if (*lmf && (MONO_CONTEXT_GET_BP (ctx) >= (gpointer)(*lmf)->ebp)) {
/* remove any unused lmf */
- *lmf = (*lmf)->previous_lmf;
+ *lmf = (gpointer)(((guint32)(*lmf)->previous_lmf) & ~1);
}
/* Pop EBP and the return address */
new_ctx->ebx = (*lmf)->ebx;
new_ctx->ebp = (*lmf)->ebp;
new_ctx->eip = (*lmf)->eip;
- /* the lmf is always stored on the stack, so the following
- * expression points to a stack location which can be used as ESP */
- new_ctx->esp = (unsigned long)&((*lmf)->eip);
- *lmf = (*lmf)->previous_lmf;
+ /* Check if we are in a trampoline LMF frame */
+ if ((guint32)((*lmf)->previous_lmf) & 1) {
+ /* lmf->esp is set by the trampoline code */
+ new_ctx->esp = (*lmf)->esp;
+
+ /* Pop arguments off the stack */
+ {
+ MonoMethod *method = (*lmf)->method;
+ MonoJitArgumentInfo *arg_info = g_newa (MonoJitArgumentInfo, mono_method_signature (method)->param_count + 1);
+
+ guint32 stack_to_pop = mono_arch_get_argument_info (mono_method_signature (method), mono_method_signature (method)->param_count, arg_info);
+ new_ctx->esp += stack_to_pop;
+ }
+ }
+ else
+ /* the lmf is always stored on the stack, so the following
+ * expression points to a stack location which can be used as ESP */
+ new_ctx->esp = (unsigned long)&((*lmf)->eip);
+
+ *lmf = (gpointer)(((guint32)(*lmf)->previous_lmf) & ~1);
return ji ? ji : res;
}
/* %eax = previous_lmf */
x86_prefix (code, X86_GS_PREFIX);
x86_mov_reg_mem (code, X86_EAX, lmf_tls_offset, 4);
- /* skip method_info + lmf */
- x86_alu_reg_imm (code, X86_SUB, X86_ESP, 8);
+ /* skip esp + method_info + lmf */
+ x86_alu_reg_imm (code, X86_SUB, X86_ESP, 12);
/* push previous_lmf */
x86_push_reg (code, X86_EAX);
/* new lmf = ESP */
code = emit_call (cfg, code, MONO_PATCH_INFO_INTERNAL_METHOD, (gpointer)"mono_get_lmf_addr");
}
- /* Skip method info */
- x86_alu_reg_imm (code, X86_SUB, X86_ESP, 4);
+ /* Skip esp + method info */
+ x86_alu_reg_imm (code, X86_SUB, X86_ESP, 8);
/* push lmf */
x86_push_reg (code, X86_EAX);
pushed_args = 8;
+ /* Align stack on apple */
+ x86_alu_reg_imm (buf, X86_SUB, X86_ESP, 4);
+
+ pushed_args ++;
+
/* save LMF begin */
/* save the IP (caller ip) */
pushed_args += 4;
+ /* save ESP */
+ x86_push_reg (buf, X86_ESP);
+ /* Adjust ESP so it points to the previous frame */
+ x86_alu_membase_imm (buf, X86_ADD, X86_ESP, 0, (pushed_args + 2) * 4);
+
+ pushed_args ++;
+
/* save method info */
x86_push_membase (buf, X86_ESP, pushed_args * sizeof (gpointer));
pushed_args++;
- /* the stack is correctly aligned to 16 bytes because pushed_args is 14
- * and there is the extra trampoline arg + the return ip pushed by call
+ /* On apple, the stack is correctly aligned to 16 bytes because pushed_args is
+ * 16 and there is the extra trampoline arg + the return ip pushed by call
* FIXME: Note that if an exception happens while some args are pushed
* on the stack, the stack will be misaligned.
*/
-#ifdef __APPLE__
- g_assert (pushed_args == 14);
-#endif
+ g_assert (pushed_args == 16);
+
/* get the address of lmf for the current thread */
x86_call_code (buf, mono_get_lmf_addr);
/* push lmf */
x86_push_reg (buf, X86_EAX);
/* push *lfm (previous_lmf) */
x86_push_membase (buf, X86_EAX, 0);
+ /* Signal to mono_arch_find_jit_info () that this is a trampoline frame */
+ x86_alu_membase_imm (buf, X86_ADD, X86_ESP, 0, 1);
/* *(lmf) = ESP */
x86_mov_membase_reg (buf, X86_EAX, 0, X86_ESP, 4);
/* save LFM end */
/* restore LMF start */
/* ebx = previous_lmf */
x86_pop_reg (buf, X86_EBX);
+ x86_alu_reg_imm (buf, X86_SUB, X86_EBX, 1);
/* edi = lmf */
x86_pop_reg (buf, X86_EDI);
/* *(lmf) = previous_lmf */
x86_mov_membase_reg (buf, X86_EDI, 0, X86_EBX, 4);
/* discard method info */
x86_pop_reg (buf, X86_ESI);
+ /* discard ESP */
+ x86_pop_reg (buf, X86_ESI);
/* restore caller saved regs */
x86_pop_reg (buf, X86_EBX);
x86_pop_reg (buf, X86_EDI);
x86_mov_reg_membase (buf, X86_ECX, X86_ESP, 1 * 4, 4);
x86_mov_reg_membase (buf, X86_EDX, X86_ESP, 2 * 4, 4);
- /* Pop saved reg array + method ptr */
- x86_alu_reg_imm (buf, X86_ADD, X86_ESP, 9 * 4);
+ /* Pop saved reg array + stack align + method ptr */
+ x86_alu_reg_imm (buf, X86_ADD, X86_ESP, 10 * 4);
if (tramp_type == MONO_TRAMPOLINE_CLASS_INIT)
x86_ret (buf);