2 * tramp-ia64.c: JIT trampoline code for ia64
5 * Zoltan Varga (vargaz@gmail.com)
7 * (C) 2001 Ximian, Inc.
13 #include <mono/metadata/appdomain.h>
14 #include <mono/metadata/marshal.h>
15 #include <mono/metadata/tabledefs.h>
16 #include <mono/arch/ia64/ia64-codegen.h>
17 #include <mono/metadata/mono-debug-debugger.h>
20 #include "mini-ia64.h"
22 #define NOT_IMPLEMENTED g_assert_not_reached ()
24 #define GP_SCRATCH_REG 31
25 #define GP_SCRATCH_REG2 30
28 * get_unbox_trampoline:
30 * @addr: pointer to native code for @m
32 * when value type methods are called through the vtable we need to unbox the
33 * this argument. This method returns a pointer to a trampoline which does
34 * unboxing before calling the method
37 get_unbox_trampoline (MonoMethod *m, gpointer addr)
40 gpointer func_addr, func_gp;
41 Ia64CodegenState code;
43 MonoDomain *domain = mono_domain_get ();
45 /* FIXME: Optimize this */
47 if (!mono_method_signature (m)->ret->byref && MONO_TYPE_ISSTRUCT (mono_method_signature (m)->ret))
50 func_addr = ((gpointer*)addr) [0];
51 func_gp = ((gpointer*)addr) [1];
53 mono_domain_lock (domain);
54 buf = mono_code_manager_reserve (domain->code_mp, 256);
55 mono_domain_unlock (domain);
57 /* Since the this reg is a stacked register, its a bit hard to access it */
58 ia64_codegen_init (code, buf);
59 ia64_alloc (code, 40, 8, 1, 0, 0);
60 ia64_adds_imm (code, 32 + this_reg, sizeof (MonoObject), 32 + this_reg);
61 ia64_mov_to_ar_i (code, IA64_PFS, 40);
62 ia64_movl (code, GP_SCRATCH_REG, func_addr);
63 ia64_mov_to_br (code, IA64_B6, GP_SCRATCH_REG);
64 ia64_br_cond_reg (code, IA64_B6);
65 ia64_codegen_close (code);
67 g_assert (code.buf - buf < 256);
71 gpointer *desc = g_malloc0 (sizeof (gpointer) * 2);
79 * ia64_magic_trampoline:
82 ia64_magic_trampoline (long *regs, guint8 *code, MonoMethod *m, guint8* tramp)
85 gpointer *vtable_slot;
87 addr = mono_compile_method (m);
90 //printf ("ENTER: %s\n", mono_method_full_name (m, TRUE));
92 /* the method was jumped to */
94 /* FIXME: Optimize the case when the call is from a delegate wrapper */
97 vtable_slot = mono_arch_get_vcall_slot_addr (code, (gpointer*)regs);
100 if (m->klass->valuetype)
101 addr = get_unbox_trampoline (m, addr);
103 g_assert (*vtable_slot);
105 if (mono_aot_is_got_entry (code, (guint8*)vtable_slot) || mono_domain_owns_vtable_slot (mono_domain_get (), vtable_slot))
109 /* FIXME: Patch calling code */
116 * ia64_aot_trampoline:
118 * This trampoline handles calls made from AOT code. We try to bypass the
119 * normal JIT compilation logic to avoid loading the metadata for the method.
122 ia64_aot_trampoline (long *regs, guint8 *code, guint8 *token_info,
131 * ia64_class_init_trampoline:
133 * This method calls mono_runtime_class_init () to run the static constructor
134 * for the type, then patches the caller code so it is not called again.
137 ia64_class_init_trampoline (long *regs, guint8 *code, MonoVTable *vtable, guint8 *tramp)
139 mono_runtime_class_init (vtable);
141 /* FIXME: Patch calling code */
145 mono_arch_create_trampoline_code (MonoTrampolineType tramp_type)
148 int i, offset, saved_regs_offset, saved_fpregs_offset, framesize;
149 int in0, local0, out0, l0, l1, l2, l3, l4, l5, l6, l7, l8, o0, o1, o2, o3;
151 Ia64CodegenState code;
153 unw_dyn_region_info_t *r_pro;
155 if (tramp_type == MONO_TRAMPOLINE_JUMP)
160 buf = mono_global_codeman_reserve (2048);
162 ia64_codegen_init (code, buf);
164 /* FIXME: Save/restore lmf */
166 /* Stacked Registers */
175 l5 = 45; /* saved ar.pfs */
178 l8 = 48; /* saved sp */
179 o0 = out0 + 0; /* regs */
180 o1 = out0 + 1; /* code */
181 o2 = out0 + 2; /* arg */
182 o3 = out0 + 3; /* tramp */
184 framesize = (128 * 8) + 1024;
185 framesize = (framesize + (MONO_ARCH_FRAME_ALIGNMENT - 1)) & ~ (MONO_ARCH_FRAME_ALIGNMENT - 1);
188 * Allocate a new register+memory stack frame.
189 * 8 input registers (the max used by the ABI)
191 * 4 output (number of parameters passed to trampoline)
193 ia64_alloc (code, l5, local0 - in0, out0 - local0, 4, 0);
194 ia64_mov (code, l8, IA64_SP);
195 ia64_adds_imm (code, IA64_SP, (-framesize), IA64_SP);
197 offset = 16; /* scratch area */
199 /* Save the argument received from the specific trampoline */
200 ia64_mov (code, l6, GP_SCRATCH_REG);
202 /* Save the calling address */
203 ia64_mov_from_br (code, l7, IA64_B0);
205 /* Create unwind info for the prolog */
206 r_pro = g_malloc0 (_U_dyn_region_info_size (3));
208 r_pro->insn_count = 16;
210 _U_dyn_op_save_reg (&r_pro->op[i++], _U_QP_TRUE, /* when=*/ 2,
211 /* reg=*/ UNW_IA64_AR_PFS, /* dst=*/ UNW_IA64_GR + local0 + 5);
212 _U_dyn_op_save_reg (&r_pro->op[i++], _U_QP_TRUE, /* when=*/ 5,
213 /* reg=*/ UNW_IA64_SP, /* dst=*/ UNW_IA64_GR + local0 + 8);
214 _U_dyn_op_save_reg (&r_pro->op[i++], _U_QP_TRUE, /* when=*/ 14,
215 /* reg=*/ UNW_IA64_RP, /* dst=*/ UNW_IA64_GR + local0 + 7);
216 g_assert ((unsigned) i <= r_pro->op_count);
219 saved_regs_offset = offset;
222 * Only the registers which are needed for computing vtable slots need
225 for (i = 0; i < 64; ++i)
226 if ((1 << i) & MONO_ARCH_CALLEE_REGS) {
227 ia64_adds_imm (code, l1, saved_regs_offset + (i * 8), IA64_SP);
228 ia64_st8_hint (code, l1, i, 0);
231 /* Save fp registers */
232 saved_fpregs_offset = offset;
234 ia64_adds_imm (code, l1, saved_fpregs_offset, IA64_SP);
235 for (i = 0; i < 8; ++i)
236 ia64_stfd_inc_imm_hint (code, l1, i + 8, 8, 0);
238 g_assert (offset < framesize);
240 /* Arg1 is the pointer to the saved registers */
241 ia64_adds_imm (code, o0, saved_regs_offset, IA64_SP);
243 /* Arg2 is the address of the calling code */
245 ia64_mov (code, o1, l7);
247 ia64_mov (code, o1, 0);
249 /* Arg3 is the method/vtable ptr */
250 ia64_mov (code, o2, l6);
252 /* Arg4 is the trampoline address */
254 ia64_mov (code, o3, 0);
256 if (tramp_type == MONO_TRAMPOLINE_CLASS_INIT)
257 tramp = (guint8*)ia64_class_init_trampoline;
258 else if (tramp_type == MONO_TRAMPOLINE_AOT)
259 tramp = (guint8*)ia64_aot_trampoline;
261 tramp = (guint8*)ia64_magic_trampoline;
263 /* Call the trampoline using an indirect call */
264 ia64_movl (code, l0, tramp);
265 ia64_ld8_inc_imm (code, l1, l0, 8);
266 ia64_mov_to_br (code, IA64_B6, l1);
267 ia64_ld8 (code, IA64_GP, l0);
268 ia64_br_call_reg (code, 0, IA64_B6);
270 /* Restore fp regs */
271 ia64_adds_imm (code, l1, saved_fpregs_offset, IA64_SP);
272 for (i = 0; i < 8; ++i)
273 ia64_ldfd_inc_imm (code, i + 8, l1, 8);
275 /* FIXME: Handle NATs in fp regs / scratch regs */
277 if (tramp_type != MONO_TRAMPOLINE_CLASS_INIT) {
278 /* Load method address from function descriptor */
279 ia64_ld8 (code, l0, IA64_R8);
280 ia64_mov_to_br (code, IA64_B6, l0);
283 /* Clean up register/memory stack frame */
284 ia64_adds_imm (code, IA64_SP, framesize, IA64_SP);
285 ia64_mov_to_ar_i (code, IA64_PFS, l5);
287 if (tramp_type == MONO_TRAMPOLINE_CLASS_INIT) {
288 ia64_mov_ret_to_br (code, IA64_B0, l7);
289 ia64_br_ret_reg (code, IA64_B0);
292 /* Call the compiled method */
293 ia64_mov_to_br (code, IA64_B0, l7);
294 ia64_br_cond_reg (code, IA64_B6);
297 ia64_codegen_close (code);
299 g_assert ((code.buf - buf) <= 2048);
301 /* FIXME: emit unwind info for epilog */
302 di = g_malloc0 (sizeof (unw_dyn_info_t));
303 di->start_ip = (unw_word_t) buf;
304 di->end_ip = (unw_word_t) code.buf;
306 di->format = UNW_INFO_FORMAT_DYNAMIC;
307 di->u.pi.name_ptr = (unw_word_t)"ia64_generic_trampoline";
308 di->u.pi.regions = r_pro;
310 _U_dyn_register (di);
312 mono_arch_flush_icache (buf, code.buf - buf);
317 #define TRAMPOLINE_SIZE 128
320 create_specific_trampoline (gpointer arg1, MonoTrampolineType tramp_type, MonoDomain *domain, guint32 *code_len)
324 Ia64CodegenState code;
326 tramp = mono_get_trampoline_code (tramp_type);
328 mono_domain_lock (domain);
329 buf = mono_code_manager_reserve (domain->code_mp, TRAMPOLINE_SIZE);
330 mono_domain_unlock (domain);
332 /* FIXME: Optimize this */
334 ia64_codegen_init (code, buf);
336 ia64_movl (code, GP_SCRATCH_REG, arg1);
338 ia64_begin_bundle (code);
339 disp = (tramp - code.buf) >> 4;
340 if (ia64_is_imm21 (disp)) {
341 ia64_br_cond (code, disp);
344 ia64_movl (code, GP_SCRATCH_REG2, tramp);
345 ia64_mov_to_br (code, IA64_B6, GP_SCRATCH_REG2);
346 ia64_br_cond_reg (code, IA64_B6);
349 ia64_codegen_close (code);
351 g_assert (code.buf - buf <= TRAMPOLINE_SIZE);
353 mono_arch_flush_icache (buf, code.buf - buf);
356 *code_len = code.buf - buf;
362 mono_arch_create_jump_trampoline (MonoMethod *method)
368 code = create_specific_trampoline (method, MONO_TRAMPOLINE_JUMP, mono_domain_get (), &code_size);
370 ji = g_new0 (MonoJitInfo, 1);
371 ji->code_start = code;
372 ji->code_size = code_size;
379 mono_arch_create_jit_trampoline (MonoMethod *method)
381 return create_specific_trampoline (method, MONO_TRAMPOLINE_GENERIC, mono_domain_get (), NULL);
385 mono_arch_create_jit_trampoline_from_token (MonoImage *image, guint32 token)
387 MonoDomain *domain = mono_domain_get ();
390 mono_domain_lock (domain);
391 buf = start = mono_code_manager_reserve (domain->code_mp, 2 * sizeof (gpointer));
392 mono_domain_unlock (domain);
394 *(gpointer*)(gpointer)buf = image;
395 buf += sizeof (gpointer);
396 *(guint32*)(gpointer)buf = token;
398 return create_specific_trampoline (start, MONO_TRAMPOLINE_AOT, domain, NULL);
402 * mono_arch_create_class_init_trampoline:
403 * @vtable: the type to initialize
405 * Creates a trampoline function to run a type initializer.
406 * If the trampoline is called, it calls mono_runtime_class_init with the
407 * given vtable, then patches the caller code so it does not get called any
410 * Returns: a pointer to the newly created code
413 mono_arch_create_class_init_trampoline (MonoVTable *vtable)
415 return create_specific_trampoline (vtable, MONO_TRAMPOLINE_CLASS_INIT, vtable->domain, NULL);
419 mono_arch_invalidate_method (MonoJitInfo *ji, void *func, gpointer func_arg)
425 mono_debugger_create_notification_function (gpointer *notification_address)