2 * tramp-x86.c: JIT trampoline code for x86
5 * Dietmar Maurer (dietmar@ximian.com)
7 * (C) 2001 Ximian, Inc.
13 #include <mono/metadata/appdomain.h>
14 #include <mono/metadata/marshal.h>
15 #include <mono/metadata/tabledefs.h>
16 #include <mono/metadata/mono-debug-debugger.h>
17 #include <mono/arch/amd64/amd64-codegen.h>
19 #ifdef HAVE_VALGRIND_MEMCHECK_H
20 #include <valgrind/memcheck.h>
24 #include "mini-amd64.h"
26 #define IS_REX(inst) (((inst) >= 0x40) && ((inst) <= 0x4f))
28 static guint8* nullified_class_init_trampoline;
31 * mono_arch_get_unbox_trampoline:
33 * @addr: pointer to native code for @m
35 * when value type methods are called through the vtable we need to unbox the
36 * this argument. This method returns a pointer to a trampoline which does
37 * unboxing before calling the method
40 mono_arch_get_unbox_trampoline (MonoMethod *m, gpointer addr)
43 int this_reg = AMD64_ARG_REG1;
45 MonoDomain *domain = mono_domain_get ();
47 if (!mono_method_signature (m)->ret->byref && MONO_TYPE_ISSTRUCT (mono_method_signature (m)->ret))
48 this_reg = AMD64_ARG_REG2;
50 mono_domain_lock (domain);
51 start = code = mono_code_manager_reserve (domain->code_mp, 20);
52 mono_domain_unlock (domain);
54 amd64_alu_reg_imm (code, X86_ADD, this_reg, sizeof (MonoObject));
55 /* FIXME: Optimize this */
56 amd64_mov_reg_imm (code, AMD64_RAX, addr);
57 amd64_jump_reg (code, AMD64_RAX);
58 g_assert ((code - start) < 20);
60 mono_arch_flush_icache (start, code - start);
66 mono_arch_patch_callsite (guint8 *orig_code, guint8 *addr)
70 gboolean can_write = mono_breakpoint_clean_code (orig_code - 14, buf, sizeof (buf));
74 if (((code [-13] == 0x49) && (code [-12] == 0xbb)) || (code [-5] == 0xe8)) {
75 if (code [-5] != 0xe8) {
77 InterlockedExchangePointer ((gpointer*)(orig_code - 11), addr);
79 g_assert ((((guint64)(addr)) >> 32) == 0);
80 g_assert ((((guint64)(orig_code)) >> 32) == 0);
82 InterlockedExchange ((gint32*)(orig_code - 4), ((gint64)addr - (gint64)orig_code));
85 else if ((code [-7] == 0x41) && (code [-6] == 0xff) && (code [-5] == 0x15)) {
86 /* call *<OFFSET>(%rip) */
87 gpointer *got_entry = (gpointer*)((guint8*)orig_code + (*(guint32*)(orig_code - 4)));
89 InterlockedExchangePointer (got_entry, addr);
94 mono_arch_patch_plt_entry (guint8 *code, guint8 *addr)
97 gpointer *plt_jump_table_entry;
99 /* A PLT entry: jmp *<DISP>(%rip) */
100 g_assert (code [0] == 0xff);
101 g_assert (code [1] == 0x25);
103 disp = *(gint32*)(code + 2);
105 plt_jump_table_entry = (gpointer*)(code + 6 + disp);
107 InterlockedExchangePointer (plt_jump_table_entry, addr);
111 mono_arch_nullify_class_init_trampoline (guint8 *code, gssize *regs)
116 * A given byte sequence can match more than case here, so we have to be
117 * really careful about the ordering of the cases. Longer sequences
120 if ((code [-4] == 0x41) && (code [-3] == 0xff) && (code [-2] == 0x15)) {
121 gpointer *vtable_slot;
123 /* call *<OFFSET>(%rip) */
124 vtable_slot = mono_arch_get_vcall_slot_addr (code + 3, (gpointer*)regs);
125 g_assert (vtable_slot);
127 *vtable_slot = nullified_class_init_trampoline;
128 } else if (code [-2] == 0xe8) {
130 guint8 *buf = code - 2;
137 } else if ((code [0] == 0x41) && (code [1] == 0xff)) {
139 /* happens on machines without MAP_32BIT like freebsd */
140 /* amd64_set_reg_template is 10 bytes long */
141 guint8* buf = code - 10;
143 /* FIXME: Make this thread safe */
144 /* Padding code suggested by the AMD64 Opt Manual */
158 } else if (code [0] == 0x90 || code [0] == 0xeb || code [0] == 0x66) {
159 /* Already changed by another thread */
162 printf ("Invalid trampoline sequence: %x %x %x %x %x %x %x\n", code [0], code [1], code [2], code [3],
163 code [4], code [5], code [6]);
164 g_assert_not_reached ();
169 mono_arch_nullify_plt_entry (guint8 *code)
171 mono_arch_patch_plt_entry (code, nullified_class_init_trampoline);
175 mono_arch_create_trampoline_code (MonoTrampolineType tramp_type)
177 guint8 *buf, *code, *tramp, *br [2];
178 int i, lmf_offset, offset, method_offset, tramp_offset, saved_regs_offset, saved_fpregs_offset, framesize;
181 if (tramp_type == MONO_TRAMPOLINE_JUMP)
186 code = buf = mono_global_codeman_reserve (512);
188 framesize = 512 + sizeof (MonoLMF);
189 framesize = (framesize + (MONO_ARCH_FRAME_ALIGNMENT - 1)) & ~ (MONO_ARCH_FRAME_ALIGNMENT - 1);
193 /* Pop the return address off the stack */
194 amd64_pop_reg (code, AMD64_R11);
197 * Allocate a new stack frame
199 amd64_push_reg (code, AMD64_RBP);
200 amd64_mov_reg_reg (code, AMD64_RBP, AMD64_RSP, 8);
201 amd64_alu_reg_imm (code, X86_SUB, AMD64_RSP, framesize);
204 tramp_offset = - offset;
207 method_offset = - offset;
209 /* Compute the trampoline address from the return address */
210 /* 5 = length of amd64_call_membase () */
211 amd64_alu_reg_imm (code, X86_SUB, AMD64_R11, 5);
212 amd64_mov_membase_reg (code, AMD64_RBP, tramp_offset, AMD64_R11, 8);
214 /* Save all registers */
216 offset += AMD64_NREG * 8;
217 saved_regs_offset = - offset;
218 for (i = 0; i < AMD64_NREG; ++i)
219 amd64_mov_membase_reg (code, AMD64_RBP, saved_regs_offset + (i * 8), i, 8);
221 saved_fpregs_offset = - offset;
222 for (i = 0; i < 8; ++i)
223 amd64_movsd_membase_reg (code, AMD64_RBP, saved_fpregs_offset + (i * 8), i);
225 /* Obtain the trampoline argument which is encoded in the instruction stream */
226 amd64_mov_reg_membase (code, AMD64_R11, AMD64_RBP, tramp_offset, 8);
227 amd64_mov_reg_membase (code, AMD64_RAX, AMD64_R11, 5, 1);
228 amd64_widen_reg (code, AMD64_RAX, AMD64_RAX, TRUE, FALSE);
229 amd64_alu_reg_imm_size (code, X86_CMP, AMD64_RAX, 4, 1);
231 x86_branch8 (code, X86_CC_NE, 6, FALSE);
232 /* 32 bit immediate */
233 amd64_mov_reg_membase (code, AMD64_R11, AMD64_R11, 6, 4);
235 x86_jump8 (code, 10);
236 /* 64 bit immediate */
237 mono_amd64_patch (br [0], code);
238 amd64_mov_reg_membase (code, AMD64_R11, AMD64_R11, 6, 8);
239 mono_amd64_patch (br [1], code);
240 amd64_mov_membase_reg (code, AMD64_RBP, method_offset, AMD64_R11, 8);
244 offset += sizeof (MonoLMF);
245 lmf_offset = - offset;
249 amd64_mov_reg_membase (code, AMD64_R11, AMD64_RBP, 8, 8);
251 amd64_mov_reg_imm (code, AMD64_R11, 0);
252 amd64_mov_membase_reg (code, AMD64_RBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, rip), AMD64_R11, 8);
254 amd64_mov_reg_membase (code, AMD64_R11, AMD64_RSP, framesize, 8);
255 amd64_mov_membase_reg (code, AMD64_RBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, ebp), AMD64_R11, 8);
257 amd64_mov_reg_reg (code, AMD64_R11, AMD64_RSP, 8);
258 amd64_alu_reg_imm (code, X86_ADD, AMD64_R11, framesize + 16);
259 amd64_mov_membase_reg (code, AMD64_RBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, rsp), AMD64_R11, 8);
261 amd64_mov_reg_membase (code, AMD64_R11, AMD64_RBP, method_offset, 8);
262 amd64_mov_membase_reg (code, AMD64_RBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, method), AMD64_R11, 8);
263 /* Save callee saved regs */
264 #ifdef PLATFORM_WIN32
265 amd64_mov_membase_reg (code, AMD64_RBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, rdi), AMD64_RDI, 8);
266 amd64_mov_membase_reg (code, AMD64_RBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, rsi), AMD64_RSI, 8);
268 amd64_mov_membase_reg (code, AMD64_RBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, rbx), AMD64_RBX, 8);
269 amd64_mov_membase_reg (code, AMD64_RBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, r12), AMD64_R12, 8);
270 amd64_mov_membase_reg (code, AMD64_RBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, r13), AMD64_R13, 8);
271 amd64_mov_membase_reg (code, AMD64_RBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, r14), AMD64_R14, 8);
272 amd64_mov_membase_reg (code, AMD64_RBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, r15), AMD64_R15, 8);
274 amd64_mov_reg_imm (code, AMD64_R11, mono_get_lmf_addr);
275 amd64_call_reg (code, AMD64_R11);
278 amd64_mov_membase_reg (code, AMD64_RBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, lmf_addr), AMD64_RAX, 8);
279 /* Save previous_lmf */
280 amd64_mov_reg_membase (code, AMD64_R11, AMD64_RAX, 0, 8);
281 amd64_mov_membase_reg (code, AMD64_RBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, previous_lmf), AMD64_R11, 8);
283 amd64_lea_membase (code, AMD64_R11, AMD64_RBP, lmf_offset);
284 amd64_mov_membase_reg (code, AMD64_RAX, 0, AMD64_R11, 8);
288 /* Arg1 is the pointer to the saved registers */
289 amd64_lea_membase (code, AMD64_ARG_REG1, AMD64_RBP, saved_regs_offset);
291 /* Arg2 is the address of the calling code */
293 amd64_mov_reg_membase (code, AMD64_ARG_REG2, AMD64_RBP, 8, 8);
295 amd64_mov_reg_imm (code, AMD64_ARG_REG2, 0);
297 /* Arg3 is the method/vtable ptr */
298 amd64_mov_reg_membase (code, AMD64_ARG_REG3, AMD64_RBP, method_offset, 8);
300 /* Arg4 is the trampoline address */
301 amd64_mov_reg_membase (code, AMD64_ARG_REG4, AMD64_RBP, tramp_offset, 8);
303 if (tramp_type == MONO_TRAMPOLINE_CLASS_INIT)
304 tramp = (guint8*)mono_class_init_trampoline;
305 else if (tramp_type == MONO_TRAMPOLINE_AOT)
306 tramp = (guint8*)mono_aot_trampoline;
307 else if (tramp_type == MONO_TRAMPOLINE_AOT_PLT)
308 tramp = (guint8*)mono_aot_plt_trampoline;
309 else if (tramp_type == MONO_TRAMPOLINE_DELEGATE)
310 tramp = (guint8*)mono_delegate_trampoline;
312 tramp = (guint8*)mono_magic_trampoline;
314 amd64_mov_reg_imm (code, AMD64_RAX, tramp);
315 amd64_call_reg (code, AMD64_RAX);
319 amd64_mov_reg_membase (code, AMD64_RCX, AMD64_RBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, previous_lmf), 8);
320 amd64_mov_reg_membase (code, AMD64_R11, AMD64_RBP, lmf_offset + G_STRUCT_OFFSET (MonoLMF, lmf_addr), 8);
321 amd64_mov_membase_reg (code, AMD64_R11, 0, AMD64_RCX, 8);
323 /* Restore argument registers */
324 for (i = 0; i < AMD64_NREG; ++i)
325 if (AMD64_IS_ARGUMENT_REG (i))
326 amd64_mov_reg_membase (code, i, AMD64_RBP, saved_regs_offset + (i * 8), 8);
328 for (i = 0; i < 8; ++i)
329 amd64_movsd_reg_membase (code, i, AMD64_RBP, saved_fpregs_offset + (i * 8));
334 if (tramp_type == MONO_TRAMPOLINE_CLASS_INIT)
337 /* call the compiled method */
338 amd64_jump_reg (code, X86_EAX);
340 g_assert ((code - buf) <= 512);
342 mono_arch_flush_icache (buf, code - buf);
344 if (tramp_type == MONO_TRAMPOLINE_CLASS_INIT) {
345 /* Initialize the nullified class init trampoline used in the AOT case */
346 nullified_class_init_trampoline = code = mono_global_codeman_reserve (16);
354 mono_arch_create_specific_trampoline (gpointer arg1, MonoTrampolineType tramp_type, MonoDomain *domain, guint32 *code_len)
356 guint8 *code, *buf, *tramp;
359 tramp = mono_get_trampoline_code (tramp_type);
361 if ((((guint64)arg1) >> 32) == 0)
366 mono_domain_lock (domain);
367 code = buf = mono_code_manager_reserve_align (domain->code_mp, size, 1);
368 mono_domain_unlock (domain);
370 amd64_call_code (code, tramp);
371 /* The trampoline code will obtain the argument from the instruction stream */
372 if ((((guint64)arg1) >> 32) == 0) {
374 *(guint32*)(code + 1) = (gint64)arg1;
378 *(guint64*)(code + 1) = (gint64)arg1;
382 g_assert ((code - buf) <= size);
387 mono_arch_flush_icache (buf, size);
393 mono_arch_invalidate_method (MonoJitInfo *ji, void *func, gpointer func_arg)
395 /* FIXME: This is not thread safe */
396 guint8 *code = ji->code_start;
398 amd64_mov_reg_imm (code, AMD64_ARG_REG1, func_arg);
399 amd64_mov_reg_imm (code, AMD64_R11, func);
401 x86_push_imm (code, (guint64)func_arg);
402 amd64_call_reg (code, AMD64_R11);
406 * This method is only called when running in the Mono Debugger.
409 mono_debugger_create_notification_function (void)
413 code = buf = mono_global_codeman_reserve (2);
414 x86_breakpoint (buf);