2 * tramp-x86.c: JIT trampoline code for x86
5 * Dietmar Maurer (dietmar@ximian.com)
7 * (C) 2001 Ximian, Inc.
13 #include <mono/metadata/abi-details.h>
14 #include <mono/metadata/appdomain.h>
15 #include <mono/metadata/metadata-internals.h>
16 #include <mono/metadata/marshal.h>
17 #include <mono/metadata/tabledefs.h>
18 #include <mono/metadata/mono-debug.h>
19 #include <mono/metadata/mono-debug-debugger.h>
20 #include <mono/metadata/profiler-private.h>
21 #include <mono/metadata/gc-internal.h>
22 #include <mono/arch/x86/x86-codegen.h>
24 #include <mono/utils/memcheck.h>
28 #include "debugger-agent.h"
30 #define ALIGN_TO(val,align) ((((guint64)val) + ((align) - 1)) & ~((align) - 1))
33 * mono_arch_get_unbox_trampoline:
35 * @addr: pointer to native code for @m
37 * when value type methods are called through the vtable we need to unbox the
38 * this argument. This method returns a pointer to a trampoline which does
39 * unboxing before calling the method
42 mono_arch_get_unbox_trampoline (MonoMethod *m, gpointer addr)
45 int this_pos = 4, size = NACL_SIZE(16, 32);
46 MonoDomain *domain = mono_domain_get ();
49 start = code = mono_domain_code_reserve (domain, size);
51 unwind_ops = mono_arch_get_cie_program ();
53 x86_alu_membase_imm (code, X86_ADD, X86_ESP, this_pos, sizeof (MonoObject));
54 x86_jump_code (code, addr);
55 g_assert ((code - start) < size);
57 nacl_domain_code_validate (domain, &start, size, &code);
58 mono_profiler_code_buffer_new (start, code - start, MONO_PROFILER_CODE_BUFFER_UNBOX_TRAMPOLINE, m);
60 mono_tramp_info_register (mono_tramp_info_create (NULL, start, code - start, NULL, unwind_ops), domain);
66 mono_arch_get_static_rgctx_trampoline (MonoMethod *m, MonoMethodRuntimeGenericContext *mrgctx, gpointer addr)
72 MonoDomain *domain = mono_domain_get ();
74 buf_len = NACL_SIZE (10, 32);
76 start = code = mono_domain_code_reserve (domain, buf_len);
78 unwind_ops = mono_arch_get_cie_program ();
80 x86_mov_reg_imm (code, MONO_ARCH_RGCTX_REG, mrgctx);
81 x86_jump_code (code, addr);
82 g_assert ((code - start) <= buf_len);
84 nacl_domain_code_validate (domain, &start, buf_len, &code);
85 mono_arch_flush_icache (start, code - start);
86 mono_profiler_code_buffer_new (start, code - start, MONO_PROFILER_CODE_BUFFER_GENERICS_TRAMPOLINE, NULL);
88 mono_tramp_info_register (mono_tramp_info_create (NULL, start, code - start, NULL, unwind_ops), domain);
94 mono_arch_get_llvm_imt_trampoline (MonoDomain *domain, MonoMethod *m, int vt_offset)
102 start = code = mono_domain_code_reserve (domain, buf_len);
104 this_offset = mono_x86_get_this_arg_offset (mono_method_signature (m));
107 x86_mov_reg_imm (code, MONO_ARCH_IMT_REG, m);
109 x86_mov_reg_membase (code, X86_EAX, X86_ESP, this_offset + 4, 4);
110 /* Load vtable address */
111 x86_mov_reg_membase (code, X86_EAX, X86_EAX, 0, 4);
112 x86_jump_membase (code, X86_EAX, vt_offset);
114 g_assert ((code - start) < buf_len);
116 nacl_domain_code_validate (domain, &start, buf_len, &code);
118 mono_arch_flush_icache (start, code - start);
119 mono_profiler_code_buffer_new (start, code - start, MONO_PROFILER_CODE_BUFFER_IMT_TRAMPOLINE, NULL);
125 mono_arch_patch_callsite (guint8 *method_start, guint8 *orig_code, guint8 *addr)
127 #if defined(__default_codegen__)
130 gboolean can_write = mono_breakpoint_clean_code (method_start, orig_code, 8, buf, sizeof (buf));
134 /* go to the start of the call instruction
136 * address_byte = (m << 6) | (o << 3) | reg
137 * call opcode: 0xff address_byte displacement
143 if (code [1] == 0xe8) {
145 InterlockedExchange ((gint32*)(orig_code + 2), (guint)addr - ((guint)orig_code + 1) - 5);
147 /* Tell valgrind to recompile the patched code */
148 VALGRIND_DISCARD_TRANSLATIONS (orig_code + 2, 4);
150 } else if (code [1] == 0xe9) {
151 /* A PLT entry: jmp <DISP> */
153 InterlockedExchange ((gint32*)(orig_code + 2), (guint)addr - ((guint)orig_code + 1) - 5);
155 printf ("Invalid trampoline sequence: %x %x %x %x %x %x %x\n", code [0], code [1], code [2], code [3],
156 code [4], code [5], code [6]);
157 g_assert_not_reached ();
159 #elif defined(__native_client__)
160 /* Target must be bundle-aligned */
161 g_assert (((guint32)addr & kNaClAlignmentMask) == 0);
163 /* 0xe8 = call <DISP>, 0xe9 = jump <DISP> */
164 if ((orig_code [-5] == 0xe8) || orig_code [-6] == 0xe9) {
166 gint32 offset = (gint32)addr - (gint32)orig_code;
167 guint8 buf[sizeof(gint32)];
168 *((gint32*)(buf)) = offset;
169 ret = nacl_dyncode_modify (orig_code - sizeof(gint32), buf, sizeof(gint32));
172 printf ("Invalid trampoline sequence %p: %02x %02x %02x %02x %02x\n", orig_code, orig_code [-5], orig_code [-4], orig_code [-3], orig_code [-2], orig_code[-1]);
173 g_assert_not_reached ();
179 mono_arch_patch_plt_entry (guint8 *code, gpointer *got, mgreg_t *regs, guint8 *addr)
183 /* Patch the jump table entry used by the plt entry */
185 #if defined(__native_client_codegen__) || defined(__native_client__)
186 /* for both compiler and runtime */
188 /* mov <DISP>(%ebx), %ecx */
189 /* and 0xffffffe0, %ecx */
191 g_assert (code [0] == 0x8b);
192 g_assert (code [1] == 0x8b);
194 offset = *(guint32*)(code + 2);
195 #elif defined(__default_codegen__)
196 /* A PLT entry: jmp *<DISP>(%ebx) */
197 g_assert (code [0] == 0xff);
198 g_assert (code [1] == 0xa3);
200 offset = *(guint32*)(code + 2);
201 #endif /* __native_client_codegen__ */
203 got = (gpointer*)(gsize) regs [MONO_ARCH_GOT_REG];
204 *(guint8**)((guint8*)got + offset) = addr;
208 get_vcall_slot (guint8 *code, mgreg_t *regs, int *displacement)
210 const int kBufSize = NACL_SIZE (8, 16);
215 mono_breakpoint_clean_code (NULL, code, kBufSize, buf, kBufSize);
220 if ((code [0] == 0xff) && ((code [1] & 0x18) == 0x10) && ((code [1] >> 6) == 2)) {
221 reg = code [1] & 0x07;
222 disp = *((gint32*)(code + 2));
223 #if defined(__native_client_codegen__) || defined(__native_client__)
224 } else if ((code[1] == 0x83) && (code[2] == 0xe1) && (code[4] == 0xff) &&
225 (code[5] == 0xd1) && (code[-5] == 0x8b)) {
226 disp = *((gint32*)(code - 3));
227 reg = code[-4] & 0x07;
228 } else if ((code[-2] == 0x8b) && (code[1] == 0x83) && (code[4] == 0xff)) {
229 reg = code[-1] & 0x07;
230 disp = (signed char)code[0];
233 g_assert_not_reached ();
237 *displacement = disp;
238 return (gpointer)regs [reg];
242 get_vcall_slot_addr (guint8* code, mgreg_t *regs)
246 vt = get_vcall_slot (code, regs, &displacement);
249 return (gpointer*)((char*)vt + displacement);
253 mono_arch_create_generic_trampoline (MonoTrampolineType tramp_type, MonoTrampInfo **info, gboolean aot)
256 guint8 *buf, *code, *tramp;
257 GSList *unwind_ops = NULL;
258 MonoJumpInfo *ji = NULL;
259 int i, offset, frame_size, regarray_offset, lmf_offset, caller_ip_offset, arg_offset;
260 int cfa_offset; /* cfa = cfa_reg + cfa_offset */
262 code = buf = mono_global_codeman_reserve (256);
264 /* Note that there is a single argument to the trampoline
265 * and it is stored at: esp + pushed_args * sizeof (gpointer)
266 * the ret address is at: esp + (pushed_args + 1) * sizeof (gpointer)
269 /* Compute frame offsets relative to the frame pointer %ebp */
270 arg_offset = sizeof (mgreg_t);
271 caller_ip_offset = 2 * sizeof (mgreg_t);
273 offset += sizeof (MonoLMF);
274 lmf_offset = -offset;
275 offset += X86_NREG * sizeof (mgreg_t);
276 regarray_offset = -offset;
278 offset += 4 * sizeof (mgreg_t);
279 frame_size = ALIGN_TO (offset, MONO_ARCH_FRAME_ALIGNMENT);
281 /* ret addr and arg are on the stack */
282 cfa_offset = 2 * sizeof (mgreg_t);
283 mono_add_unwind_op_def_cfa (unwind_ops, code, buf, X86_ESP, cfa_offset);
284 // IP saved at CFA - 4
285 mono_add_unwind_op_offset (unwind_ops, code, buf, X86_NREG, -4);
288 x86_push_reg (code, X86_EBP);
289 cfa_offset += sizeof (mgreg_t);
290 mono_add_unwind_op_def_cfa_offset (unwind_ops, code, buf, cfa_offset);
291 mono_add_unwind_op_offset (unwind_ops, code, buf, X86_EBP, -cfa_offset);
293 x86_mov_reg_reg (code, X86_EBP, X86_ESP, sizeof (mgreg_t));
294 mono_add_unwind_op_def_cfa_reg (unwind_ops, code, buf, X86_EBP);
296 /* There are three words on the stack, adding + 4 aligns the stack to 16, which is needed on osx */
297 x86_alu_reg_imm (code, X86_SUB, X86_ESP, frame_size + sizeof (mgreg_t));
299 /* Save all registers */
300 for (i = X86_EAX; i <= X86_EDI; ++i) {
304 /* Save original ebp */
305 /* EAX is already saved */
306 x86_mov_reg_membase (code, X86_EAX, X86_EBP, 0, sizeof (mgreg_t));
308 } else if (i == X86_ESP) {
309 /* Save original esp */
310 /* EAX is already saved */
311 x86_mov_reg_reg (code, X86_EAX, X86_EBP, sizeof (mgreg_t));
312 /* Saved ebp + trampoline arg + return addr */
313 x86_alu_reg_imm (code, X86_ADD, X86_EAX, 3 * sizeof (mgreg_t));
316 x86_mov_membase_reg (code, X86_EBP, regarray_offset + (i * sizeof (mgreg_t)), reg, sizeof (mgreg_t));
321 if (tramp_type == MONO_TRAMPOLINE_JUMP) {
322 x86_mov_membase_imm (code, X86_EBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, eip), 0, sizeof (mgreg_t));
324 x86_mov_reg_membase (code, X86_EAX, X86_EBP, caller_ip_offset, sizeof (mgreg_t));
325 x86_mov_membase_reg (code, X86_EBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, eip), X86_EAX, sizeof (mgreg_t));
328 if ((tramp_type == MONO_TRAMPOLINE_JIT) || (tramp_type == MONO_TRAMPOLINE_JUMP)) {
329 x86_mov_reg_membase (code, X86_EAX, X86_EBP, arg_offset, sizeof (mgreg_t));
330 x86_mov_membase_reg (code, X86_EBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, method), X86_EAX, sizeof (mgreg_t));
332 x86_mov_membase_imm (code, X86_EBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, method), 0, sizeof (mgreg_t));
335 x86_mov_reg_membase (code, X86_EAX, X86_EBP, regarray_offset + (X86_ESP * sizeof (mgreg_t)), sizeof (mgreg_t));
336 x86_mov_membase_reg (code, X86_EBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, esp), X86_EAX, sizeof (mgreg_t));
337 /* callee save registers */
338 x86_mov_reg_membase (code, X86_EAX, X86_EBP, regarray_offset + (X86_EBX * sizeof (mgreg_t)), sizeof (mgreg_t));
339 x86_mov_membase_reg (code, X86_EBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, ebx), X86_EAX, sizeof (mgreg_t));
340 x86_mov_reg_membase (code, X86_EAX, X86_EBP, regarray_offset + (X86_EDI * sizeof (mgreg_t)), sizeof (mgreg_t));
341 x86_mov_membase_reg (code, X86_EBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, edi), X86_EAX, sizeof (mgreg_t));
342 x86_mov_reg_membase (code, X86_EAX, X86_EBP, regarray_offset + (X86_ESI * sizeof (mgreg_t)), sizeof (mgreg_t));
343 x86_mov_membase_reg (code, X86_EBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, esi), X86_EAX, sizeof (mgreg_t));
344 x86_mov_reg_membase (code, X86_EAX, X86_EBP, regarray_offset + (X86_EBP * sizeof (mgreg_t)), sizeof (mgreg_t));
345 x86_mov_membase_reg (code, X86_EBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, ebp), X86_EAX, sizeof (mgreg_t));
348 /* get the address of lmf for the current thread */
350 code = mono_arch_emit_load_aotconst (buf, code, &ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, "mono_get_lmf_addr");
351 x86_call_reg (code, X86_EAX);
353 x86_call_code (code, mono_get_lmf_addr);
355 /* lmf->lmf_addr = lmf_addr (%eax) */
356 x86_mov_membase_reg (code, X86_EBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, lmf_addr), X86_EAX, sizeof (mgreg_t));
357 /* lmf->previous_lmf = *(lmf_addr) */
358 x86_mov_reg_membase (code, X86_ECX, X86_EAX, 0, sizeof (mgreg_t));
359 /* Signal to mono_arch_unwind_frame () that this is a trampoline frame */
360 x86_alu_reg_imm (code, X86_ADD, X86_ECX, 1);
361 x86_mov_membase_reg (code, X86_EBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, previous_lmf), X86_ECX, sizeof (mgreg_t));
362 /* *lmf_addr = lmf */
363 x86_lea_membase (code, X86_ECX, X86_EBP, lmf_offset);
364 x86_mov_membase_reg (code, X86_EAX, 0, X86_ECX, sizeof (mgreg_t));
366 /* Call trampoline function */
367 /* Arg 1 - registers */
368 x86_lea_membase (code, X86_EAX, X86_EBP, regarray_offset);
369 x86_mov_membase_reg (code, X86_ESP, (0 * sizeof (mgreg_t)), X86_EAX, sizeof (mgreg_t));
370 /* Arg2 - calling code */
371 if (tramp_type == MONO_TRAMPOLINE_JUMP) {
372 x86_mov_membase_imm (code, X86_ESP, (1 * sizeof (mgreg_t)), 0, sizeof (mgreg_t));
374 x86_mov_reg_membase (code, X86_EAX, X86_EBP, caller_ip_offset, sizeof (mgreg_t));
375 x86_mov_membase_reg (code, X86_ESP, (1 * sizeof (mgreg_t)), X86_EAX, sizeof (mgreg_t));
377 /* Arg3 - trampoline argument */
378 x86_mov_reg_membase (code, X86_EAX, X86_EBP, arg_offset, sizeof (mgreg_t));
379 x86_mov_membase_reg (code, X86_ESP, (2 * sizeof (mgreg_t)), X86_EAX, sizeof (mgreg_t));
380 /* Arg4 - trampoline address */
382 x86_mov_membase_imm (code, X86_ESP, (3 * sizeof (mgreg_t)), 0, sizeof (mgreg_t));
385 /* check the stack is aligned after the ret ip is pushed */
387 x86_mov_reg_reg (code, X86_EDX, X86_ESP, 4);
388 x86_alu_reg_imm (code, X86_AND, X86_EDX, 15);
389 x86_alu_reg_imm (code, X86_CMP, X86_EDX, 0);
390 x86_branch_disp (code, X86_CC_Z, 3, FALSE);
391 x86_breakpoint (code);
396 char *icall_name = g_strdup_printf ("trampoline_func_%d", tramp_type);
397 code = mono_arch_emit_load_aotconst (buf, code, &ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, icall_name);
398 x86_call_reg (code, X86_EAX);
400 tramp = (guint8*)mono_get_trampoline_func (tramp_type);
401 x86_call_code (code, tramp);
405 * Overwrite the trampoline argument with the address we need to jump to,
408 x86_mov_membase_reg (code, X86_EBP, arg_offset, X86_EAX, 4);
410 /* Check for interruptions */
412 code = mono_arch_emit_load_aotconst (buf, code, &ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, "mono_thread_force_interruption_checkpoint");
413 x86_call_reg (code, X86_EAX);
415 x86_call_code (code, (guint8*)mono_thread_force_interruption_checkpoint);
419 x86_mov_reg_membase (code, X86_EAX, X86_EBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, lmf_addr), sizeof (mgreg_t));
420 x86_mov_reg_membase (code, X86_ECX, X86_EBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, previous_lmf), sizeof (mgreg_t));
421 x86_alu_reg_imm (code, X86_SUB, X86_ECX, 1);
422 x86_mov_membase_reg (code, X86_EAX, 0, X86_ECX, sizeof (mgreg_t));
424 /* Restore registers */
425 for (i = X86_EAX; i <= X86_EDI; ++i) {
426 if (i == X86_ESP || i == X86_EBP)
428 if (i == X86_EAX && !((tramp_type == MONO_TRAMPOLINE_RESTORE_STACK_PROT) || (tramp_type == MONO_TRAMPOLINE_AOT_PLT)))
430 x86_mov_reg_membase (code, i, X86_EBP, regarray_offset + (i * 4), 4);
435 cfa_offset -= sizeof (mgreg_t);
436 mono_add_unwind_op_def_cfa (unwind_ops, code, buf, X86_ESP, cfa_offset);
437 mono_add_unwind_op_same_value (unwind_ops, code, buf, X86_EBP);
439 if (MONO_TRAMPOLINE_TYPE_MUST_RETURN (tramp_type)) {
440 /* Load the value returned by the trampoline */
441 x86_mov_reg_membase (code, X86_EAX, X86_ESP, 0, 4);
442 /* The trampoline returns normally, pop the trampoline argument */
443 x86_alu_reg_imm (code, X86_ADD, X86_ESP, 4);
444 cfa_offset -= sizeof (mgreg_t);
445 mono_add_unwind_op_def_cfa_offset (unwind_ops, code, buf, cfa_offset);
448 /* The trampoline argument is at the top of the stack, and it contains the address we need to branch to */
449 if (tramp_type == MONO_TRAMPOLINE_HANDLER_BLOCK_GUARD) {
450 x86_pop_reg (code, X86_EAX);
451 cfa_offset -= sizeof (mgreg_t);
452 mono_add_unwind_op_def_cfa_offset (unwind_ops, code, buf, cfa_offset);
453 x86_alu_reg_imm (code, X86_ADD, X86_ESP, 0x8);
454 x86_jump_reg (code, X86_EAX);
460 nacl_global_codeman_validate (&buf, 256, &code);
461 g_assert ((code - buf) <= 256);
462 mono_profiler_code_buffer_new (buf, code - buf, MONO_PROFILER_CODE_BUFFER_HELPER, NULL);
464 tramp_name = mono_get_generic_trampoline_name (tramp_type);
465 *info = mono_tramp_info_create (tramp_name, buf, code - buf, ji, unwind_ops);
471 #define TRAMPOLINE_SIZE 10
474 mono_arch_create_specific_trampoline (gpointer arg1, MonoTrampolineType tramp_type, MonoDomain *domain, guint32 *code_len)
476 guint8 *code, *buf, *tramp;
478 tramp = mono_get_trampoline_code (tramp_type);
480 code = buf = mono_domain_code_reserve_align (domain, TRAMPOLINE_SIZE, NACL_SIZE (4, kNaClAlignment));
482 x86_push_imm (buf, arg1);
483 x86_jump_code (buf, tramp);
484 g_assert ((buf - code) <= TRAMPOLINE_SIZE);
486 nacl_domain_code_validate (domain, &code, NACL_SIZE (4, kNaClAlignment), &buf);
488 mono_arch_flush_icache (code, buf - code);
489 mono_profiler_code_buffer_new (code, buf - code, MONO_PROFILER_CODE_BUFFER_SPECIFIC_TRAMPOLINE, mono_get_generic_trampoline_simple_name (tramp_type));
492 *code_len = buf - code;
498 mono_arch_create_rgctx_lazy_fetch_trampoline (guint32 slot, MonoTrampInfo **info, gboolean aot)
502 guint8 **rgctx_null_jumps;
507 MonoJumpInfo *ji = NULL;
508 GSList *unwind_ops = NULL;
510 unwind_ops = mono_arch_get_cie_program ();
512 mrgctx = MONO_RGCTX_SLOT_IS_MRGCTX (slot);
513 index = MONO_RGCTX_SLOT_INDEX (slot);
515 index += MONO_SIZEOF_METHOD_RUNTIME_GENERIC_CONTEXT / sizeof (gpointer);
516 for (depth = 0; ; ++depth) {
517 int size = mono_class_rgctx_get_array_size (depth, mrgctx);
519 if (index < size - 1)
524 #if defined(__default_codegen__)
525 tramp_size = (aot ? 64 : 36) + 6 * depth;
526 #elif defined(__native_client_codegen__)
527 tramp_size = (aot ? 64 : 36) + 2 * kNaClAlignment +
528 6 * (depth + kNaClAlignment);
531 code = buf = mono_global_codeman_reserve (tramp_size);
533 rgctx_null_jumps = g_malloc (sizeof (guint8*) * (depth + 2));
535 /* load vtable/mrgctx ptr */
536 x86_mov_reg_membase (code, X86_EAX, X86_ESP, 4, 4);
538 /* load rgctx ptr from vtable */
539 x86_mov_reg_membase (code, X86_EAX, X86_EAX, MONO_STRUCT_OFFSET (MonoVTable, runtime_generic_context), 4);
540 /* is the rgctx ptr null? */
541 x86_test_reg_reg (code, X86_EAX, X86_EAX);
542 /* if yes, jump to actual trampoline */
543 rgctx_null_jumps [0] = code;
544 x86_branch8 (code, X86_CC_Z, -1, 1);
547 for (i = 0; i < depth; ++i) {
548 /* load ptr to next array */
549 if (mrgctx && i == 0)
550 x86_mov_reg_membase (code, X86_EAX, X86_EAX, MONO_SIZEOF_METHOD_RUNTIME_GENERIC_CONTEXT, 4);
552 x86_mov_reg_membase (code, X86_EAX, X86_EAX, 0, 4);
553 /* is the ptr null? */
554 x86_test_reg_reg (code, X86_EAX, X86_EAX);
555 /* if yes, jump to actual trampoline */
556 rgctx_null_jumps [i + 1] = code;
557 x86_branch8 (code, X86_CC_Z, -1, 1);
561 x86_mov_reg_membase (code, X86_EAX, X86_EAX, sizeof (gpointer) * (index + 1), 4);
562 /* is the slot null? */
563 x86_test_reg_reg (code, X86_EAX, X86_EAX);
564 /* if yes, jump to actual trampoline */
565 rgctx_null_jumps [depth + 1] = code;
566 x86_branch8 (code, X86_CC_Z, -1, 1);
567 /* otherwise return */
570 for (i = mrgctx ? 1 : 0; i <= depth + 1; ++i)
571 x86_patch (rgctx_null_jumps [i], code);
573 g_free (rgctx_null_jumps);
575 x86_mov_reg_membase (code, MONO_ARCH_VTABLE_REG, X86_ESP, 4, 4);
578 code = mono_arch_emit_load_aotconst (buf, code, &ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, g_strdup_printf ("specific_trampoline_lazy_fetch_%u", slot));
579 x86_jump_reg (code, X86_EAX);
581 tramp = mono_arch_create_specific_trampoline (GUINT_TO_POINTER (slot), MONO_TRAMPOLINE_RGCTX_LAZY_FETCH, mono_get_root_domain (), NULL);
583 /* jump to the actual trampoline */
584 x86_jump_code (code, tramp);
587 nacl_global_codeman_validate (&buf, tramp_size, &code);
588 mono_arch_flush_icache (buf, code - buf);
589 mono_profiler_code_buffer_new (buf, code - buf, MONO_PROFILER_CODE_BUFFER_GENERICS_TRAMPOLINE, NULL);
591 g_assert (code - buf <= tramp_size);
593 char *name = mono_get_rgctx_fetch_trampoline_name (slot);
594 *info = mono_tramp_info_create (name, buf, code - buf, ji, unwind_ops);
601 * mono_arch_create_general_rgctx_lazy_fetch_trampoline:
603 * This is a general variant of the rgctx fetch trampolines. It receives a pointer to gpointer[2] in the rgctx reg. The first entry contains the slot, the second
604 * the trampoline to call if the slot is not filled.
607 mono_arch_create_general_rgctx_lazy_fetch_trampoline (MonoTrampInfo **info, gboolean aot)
611 MonoJumpInfo *ji = NULL;
612 GSList *unwind_ops = NULL;
616 unwind_ops = mono_arch_get_cie_program ();
620 code = buf = mono_global_codeman_reserve (tramp_size);
622 // FIXME: Currently, we always go to the slow path.
624 /* Load trampoline addr */
625 x86_mov_reg_membase (code, X86_EAX, MONO_ARCH_RGCTX_REG, 4, 4);
626 /* Load mrgctx/vtable */
627 x86_mov_reg_membase (code, MONO_ARCH_VTABLE_REG, X86_ESP, 4, 4);
629 x86_jump_reg (code, X86_EAX);
631 nacl_global_codeman_validate (&buf, tramp_size, &code);
632 mono_arch_flush_icache (buf, code - buf);
633 mono_profiler_code_buffer_new (buf, code - buf, MONO_PROFILER_CODE_BUFFER_GENERICS_TRAMPOLINE, NULL);
635 g_assert (code - buf <= tramp_size);
637 *info = mono_tramp_info_create ("rgctx_fetch_trampoline_general", buf, code - buf, ji, unwind_ops);
643 mono_arch_invalidate_method (MonoJitInfo *ji, void *func, gpointer func_arg)
645 /* FIXME: This is not thread safe */
646 guint8 *code = ji->code_start;
648 x86_push_imm (code, func_arg);
649 x86_call_code (code, (guint8*)func);
653 handler_block_trampoline_helper (void)
655 MonoJitTlsData *jit_tls = mono_native_tls_get_value (mono_jit_tls_id);
656 return jit_tls->handler_block_return_address;
660 mono_arch_create_handler_block_trampoline (MonoTrampInfo **info, gboolean aot)
662 guint8 *tramp = mono_get_trampoline_code (MONO_TRAMPOLINE_HANDLER_BLOCK_GUARD);
665 MonoJumpInfo *ji = NULL;
667 GSList *unwind_ops = NULL;
671 code = buf = mono_global_codeman_reserve (tramp_size);
673 unwind_ops = mono_arch_get_cie_program ();
674 cfa_offset = sizeof (mgreg_t);
676 This trampoline restore the call chain of the handler block then jumps into the code that deals with it.
680 * We are in a method frame after the call emitted by OP_CALL_HANDLER.
683 if (mono_get_jit_tls_offset () != -1) {
684 code = mono_x86_emit_tls_get (code, X86_EAX, mono_get_jit_tls_offset ());
685 x86_mov_reg_membase (code, X86_EAX, X86_EAX, MONO_STRUCT_OFFSET (MonoJitTlsData, handler_block_return_address), 4);
687 /*Slow path uses a c helper*/
688 x86_call_code (code, handler_block_trampoline_helper);
690 /* Simulate a call */
691 /*Fix stack alignment*/
692 x86_alu_reg_imm (code, X86_SUB, X86_ESP, 0x4);
693 cfa_offset += sizeof (mgreg_t);
694 mono_add_unwind_op_def_cfa_offset (unwind_ops, code, buf, cfa_offset);
696 /* This is the address the trampoline will return to */
697 x86_push_reg (code, X86_EAX);
698 cfa_offset += sizeof (mgreg_t);
699 mono_add_unwind_op_def_cfa_offset (unwind_ops, code, buf, cfa_offset);
701 /* Dummy trampoline argument, since we call the generic trampoline directly */
702 x86_push_imm (code, 0);
703 cfa_offset += sizeof (mgreg_t);
704 mono_add_unwind_op_def_cfa_offset (unwind_ops, code, buf, cfa_offset);
705 x86_jump_code (code, tramp);
707 nacl_global_codeman_validate (&buf, tramp_size, &code);
709 mono_arch_flush_icache (buf, code - buf);
710 mono_profiler_code_buffer_new (buf, code - buf, MONO_PROFILER_CODE_BUFFER_HELPER, NULL);
711 g_assert (code - buf <= tramp_size);
713 *info = mono_tramp_info_create ("handler_block_trampoline", buf, code - buf, ji, unwind_ops);
719 mono_arch_get_call_target (guint8 *code)
721 if (code [-5] == 0xe8) {
722 gint32 disp = *(gint32*)(code - 4);
723 guint8 *target = code + disp;
732 mono_arch_get_plt_info_offset (guint8 *plt_entry, mgreg_t *regs, guint8 *code)
734 return *(guint32*)(plt_entry + NACL_SIZE (6, 12));
738 * mono_arch_get_gsharedvt_arg_trampoline:
740 * Return a trampoline which passes ARG to the gsharedvt in/out trampoline ADDR.
743 mono_arch_get_gsharedvt_arg_trampoline (MonoDomain *domain, gpointer arg, gpointer addr)
745 guint8 *code, *start;
752 start = code = mono_domain_code_reserve (domain, buf_len);
754 unwind_ops = mono_arch_get_cie_program ();
756 x86_mov_reg_imm (code, X86_EAX, arg);
757 x86_jump_code (code, addr);
758 g_assert ((code - start) <= buf_len);
760 nacl_domain_code_validate (domain, &start, buf_len, &code);
761 mono_arch_flush_icache (start, code - start);
762 mono_profiler_code_buffer_new (start, code - start, MONO_PROFILER_CODE_BUFFER_GENERICS_TRAMPOLINE, NULL);
764 mono_tramp_info_register (mono_tramp_info_create (NULL, start, code - start, NULL, unwind_ops), domain);
770 * mono_arch_create_sdb_trampoline:
772 * Return a trampoline which captures the current context, passes it to
773 * debugger_agent_single_step_from_context ()/debugger_agent_breakpoint_from_context (),
774 * then restores the (potentially changed) context.
777 mono_arch_create_sdb_trampoline (gboolean single_step, MonoTrampInfo **info, gboolean aot)
779 int tramp_size = 256;
780 int framesize, ctx_offset, cfa_offset;
782 GSList *unwind_ops = NULL;
783 MonoJumpInfo *ji = NULL;
785 code = buf = mono_global_codeman_reserve (tramp_size);
790 framesize += sizeof (mgreg_t);
792 ctx_offset = framesize;
793 framesize += sizeof (MonoContext);
795 framesize = ALIGN_TO (framesize, MONO_ARCH_FRAME_ALIGNMENT);
799 mono_add_unwind_op_def_cfa (unwind_ops, code, buf, X86_ESP, 4);
800 // IP saved at CFA - 4
801 mono_add_unwind_op_offset (unwind_ops, code, buf, X86_NREG, -cfa_offset);
803 x86_push_reg (code, X86_EBP);
804 cfa_offset += sizeof(mgreg_t);
805 mono_add_unwind_op_def_cfa_offset (unwind_ops, code, buf, cfa_offset);
806 mono_add_unwind_op_offset (unwind_ops, code, buf, X86_EBP, - cfa_offset);
808 x86_mov_reg_reg (code, X86_EBP, X86_ESP, sizeof(mgreg_t));
809 mono_add_unwind_op_def_cfa_reg (unwind_ops, code, buf, X86_EBP);
810 /* The + 8 makes the stack aligned */
811 x86_alu_reg_imm (code, X86_SUB, X86_ESP, framesize + 8);
813 /* Initialize a MonoContext structure on the stack */
814 x86_mov_membase_reg (code, X86_ESP, ctx_offset + G_STRUCT_OFFSET (MonoContext, eax), X86_EAX, sizeof (mgreg_t));
815 x86_mov_membase_reg (code, X86_ESP, ctx_offset + G_STRUCT_OFFSET (MonoContext, ebx), X86_EBX, sizeof (mgreg_t));
816 x86_mov_membase_reg (code, X86_ESP, ctx_offset + G_STRUCT_OFFSET (MonoContext, ecx), X86_ECX, sizeof (mgreg_t));
817 x86_mov_membase_reg (code, X86_ESP, ctx_offset + G_STRUCT_OFFSET (MonoContext, edx), X86_EDX, sizeof (mgreg_t));
818 x86_mov_reg_membase (code, X86_EAX, X86_EBP, 0, sizeof (mgreg_t));
819 x86_mov_membase_reg (code, X86_ESP, ctx_offset + G_STRUCT_OFFSET (MonoContext, ebp), X86_EAX, sizeof (mgreg_t));
820 x86_mov_reg_reg (code, X86_EAX, X86_EBP, sizeof (mgreg_t));
821 x86_alu_reg_imm (code, X86_ADD, X86_EAX, cfa_offset);
822 x86_mov_membase_reg (code, X86_ESP, ctx_offset + G_STRUCT_OFFSET (MonoContext, esp), X86_ESP, sizeof (mgreg_t));
823 x86_mov_membase_reg (code, X86_ESP, ctx_offset + G_STRUCT_OFFSET (MonoContext, esi), X86_ESI, sizeof (mgreg_t));
824 x86_mov_membase_reg (code, X86_ESP, ctx_offset + G_STRUCT_OFFSET (MonoContext, edi), X86_EDI, sizeof (mgreg_t));
825 x86_mov_reg_membase (code, X86_EAX, X86_EBP, 4, sizeof (mgreg_t));
826 x86_mov_membase_reg (code, X86_ESP, ctx_offset + G_STRUCT_OFFSET (MonoContext, eip), X86_EAX, sizeof (mgreg_t));
828 /* Call the single step/breakpoint function in sdb */
829 x86_lea_membase (code, X86_EAX, X86_ESP, ctx_offset);
830 x86_mov_membase_reg (code, X86_ESP, 0, X86_EAX, sizeof (mgreg_t));
833 x86_breakpoint (code);
836 x86_call_code (code, debugger_agent_single_step_from_context);
838 x86_call_code (code, debugger_agent_breakpoint_from_context);
841 /* Restore registers from ctx */
842 /* Overwrite the saved ebp */
843 x86_mov_reg_membase (code, X86_EAX, X86_ESP, ctx_offset + G_STRUCT_OFFSET (MonoContext, ebp), sizeof (mgreg_t));
844 x86_mov_membase_reg (code, X86_EBP, 0, X86_EAX, sizeof (mgreg_t));
845 /* Overwrite saved eip */
846 x86_mov_reg_membase (code, X86_EAX, X86_ESP, ctx_offset + G_STRUCT_OFFSET (MonoContext, eip), sizeof (mgreg_t));
847 x86_mov_membase_reg (code, X86_EBP, 4, X86_EAX, sizeof (mgreg_t));
848 x86_mov_reg_membase (code, X86_EAX, X86_ESP, ctx_offset + G_STRUCT_OFFSET (MonoContext, eax), sizeof (mgreg_t));
849 x86_mov_reg_membase (code, X86_EBX, X86_ESP, ctx_offset + G_STRUCT_OFFSET (MonoContext, ebx), sizeof (mgreg_t));
850 x86_mov_reg_membase (code, X86_ECX, X86_ESP, ctx_offset + G_STRUCT_OFFSET (MonoContext, ecx), sizeof (mgreg_t));
851 x86_mov_reg_membase (code, X86_EDX, X86_ESP, ctx_offset + G_STRUCT_OFFSET (MonoContext, edx), sizeof (mgreg_t));
852 x86_mov_reg_membase (code, X86_ESI, X86_ESP, ctx_offset + G_STRUCT_OFFSET (MonoContext, esi), sizeof (mgreg_t));
853 x86_mov_reg_membase (code, X86_EDI, X86_ESP, ctx_offset + G_STRUCT_OFFSET (MonoContext, edi), sizeof (mgreg_t));
856 cfa_offset -= sizeof (mgreg_t);
857 mono_add_unwind_op_def_cfa (unwind_ops, code, buf, X86_ESP, cfa_offset);
860 mono_arch_flush_icache (code, code - buf);
861 g_assert (code - buf <= tramp_size);
863 const char *tramp_name = single_step ? "sdb_single_step_trampoline" : "sdb_breakpoint_trampoline";
864 *info = mono_tramp_info_create (tramp_name, buf, code - buf, ji, unwind_ops);
869 #if defined(ENABLE_GSHAREDVT)
871 #include "../../../mono-extensions/mono/mini/tramp-x86-gsharedvt.c"
876 mono_arch_get_gsharedvt_trampoline (MonoTrampInfo **info, gboolean aot)
882 #endif /* !MONOTOUCH */