+ /* Save registers */
+ for (i = 0; i < AMD64_NREG; ++i)
+ if (i != AMD64_RSP)
+ amd64_mov_membase_reg (code, AMD64_RSP, regs_offset + (i * sizeof(mgreg_t)), i, sizeof(mgreg_t));
+ /* Save RSP */
+ amd64_lea_membase (code, AMD64_RAX, AMD64_RSP, stack_size + sizeof(mgreg_t));
+ amd64_mov_membase_reg (code, AMD64_RSP, regs_offset + (AMD64_RSP * sizeof(mgreg_t)), X86_EAX, sizeof(mgreg_t));
+ /* Set arg1 == regs */
+ amd64_lea_membase (code, AMD64_RAX, AMD64_RSP, regs_offset);
+ amd64_mov_membase_reg (code, AMD64_RSP, arg_offsets [0], AMD64_RAX, sizeof(mgreg_t));
+ /* Set arg2 == eip */
+ if (llvm_abs)
+ amd64_alu_reg_reg (code, X86_XOR, AMD64_RAX, AMD64_RAX);
+ else
+ amd64_mov_reg_membase (code, AMD64_RAX, AMD64_RSP, stack_size, sizeof(mgreg_t));
+ amd64_mov_membase_reg (code, AMD64_RSP, arg_offsets [1], AMD64_RAX, sizeof(mgreg_t));
+ /* Set arg3 == exc/ex_token_index */
+ if (resume_unwind)
+ amd64_mov_membase_imm (code, AMD64_RSP, arg_offsets [2], 0, sizeof(mgreg_t));
+ else
+ amd64_mov_membase_reg (code, AMD64_RSP, arg_offsets [2], AMD64_ARG_REG1, sizeof(mgreg_t));
+ /* Set arg4 == rethrow/pc offset */
+ if (resume_unwind) {
+ amd64_mov_membase_imm (code, AMD64_RSP, arg_offsets [3], 0, sizeof(mgreg_t));
+ } else if (corlib) {
+ amd64_mov_membase_reg (code, AMD64_RSP, arg_offsets [3], AMD64_ARG_REG2, sizeof(mgreg_t));