2 * tramp-ppc.c: JIT trampoline code for PowerPC
5 * Dietmar Maurer (dietmar@ximian.com)
6 * Paolo Molaro (lupus@ximian.com)
7 * Carlos Valiente <yo@virutass.net>
8 * Andreas Faerber <andreas.faerber@web.de>
10 * (C) 2001 Ximian, Inc.
11 * (C) 2007-2008 Andreas Faerber
17 #include <mono/metadata/appdomain.h>
18 #include <mono/metadata/marshal.h>
19 #include <mono/metadata/tabledefs.h>
20 #include <mono/arch/ppc/ppc-codegen.h>
25 static guint8* nullified_class_init_trampoline;
27 /* Same as mono_create_ftnptr, but doesn't require a domain */
29 mono_ppc_create_ftnptr (guint8 *code)
31 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
32 MonoPPCFunctionDescriptor *ftnptr = mono_global_codeman_reserve (sizeof (MonoPPCFunctionDescriptor));
45 * Return the instruction to jump from code to target, 0 if not
46 * reachable with a single instruction
49 branch_for_target_reachable (guint8 *branch, guint8 *target)
51 gint diff = target - branch;
52 g_assert ((diff & 3) == 0);
55 return (18 << 26) | (diff);
57 /* diff between 0 and -33554432 */
58 if (diff >= -33554432)
59 return (18 << 26) | (diff & ~0xfc000000);
65 * get_unbox_trampoline:
66 * @gsctx: the generic sharing context
68 * @addr: pointer to native code for @m
70 * when value type methods are called through the vtable we need to unbox the
71 * this argument. This method returns a pointer to a trampoline which does
72 * unboxing before calling the method
75 mono_arch_get_unbox_trampoline (MonoGenericSharingContext *gsctx, MonoMethod *m, gpointer addr)
80 MonoDomain *domain = mono_domain_get ();
81 int size = MONO_PPC_32_64_CASE (20, 32) + PPC_FTNPTR_SIZE;
83 addr = mono_get_addr_from_ftnptr (addr);
85 if (MONO_TYPE_ISSTRUCT (mono_method_signature (m)->ret))
88 mono_domain_lock (domain);
89 start = code = mono_domain_code_reserve (domain, size);
90 code = mono_ppc_create_pre_code_ftnptr (code);
91 short_branch = branch_for_target_reachable (code + 4, addr);
93 mono_domain_code_commit (domain, code, size, 8);
94 mono_domain_unlock (domain);
97 ppc_addi (code, this_pos, this_pos, sizeof (MonoObject));
98 ppc_emit32 (code, short_branch);
100 ppc_load_ptr (code, ppc_r0, addr);
101 ppc_mtctr (code, ppc_r0);
102 ppc_addi (code, this_pos, this_pos, sizeof (MonoObject));
103 ppc_bcctr (code, 20, 0);
105 mono_arch_flush_icache (start, code - start);
106 g_assert ((code - start) <= size);
107 /*g_print ("unbox trampoline at %d for %s:%s\n", this_pos, m->klass->name, m->name);
108 g_print ("unbox code is at %p for method at %p\n", start, addr);*/
114 * mono_arch_get_static_rgctx_trampoline:
116 * Create a trampoline which sets RGCTX_REG to MRGCTX, then jumps to ADDR.
119 mono_arch_get_static_rgctx_trampoline (MonoMethod *m, MonoMethodRuntimeGenericContext *mrgctx, gpointer addr)
121 guint8 *code, *start, *p;
122 guint8 imm_buf [128];
123 guint32 short_branch;
124 MonoDomain *domain = mono_domain_get ();
126 int size = MONO_PPC_32_64_CASE (24, (PPC_LOAD_SEQUENCE_LENGTH * 2) + 8) + PPC_FTNPTR_SIZE;
128 addr = mono_get_addr_from_ftnptr (addr);
130 /* Compute size of code needed to emit mrgctx */
132 ppc_load_ptr (p, MONO_ARCH_RGCTX_REG, mrgctx);
133 imm_size = p - imm_buf;
135 mono_domain_lock (domain);
136 start = code = mono_domain_code_reserve (domain, size);
137 code = mono_ppc_create_pre_code_ftnptr (code);
138 short_branch = branch_for_target_reachable (code + imm_size, addr);
140 mono_domain_code_commit (domain, code, size, imm_size + 4);
141 mono_domain_unlock (domain);
144 ppc_load_ptr (code, MONO_ARCH_RGCTX_REG, mrgctx);
145 ppc_emit32 (code, short_branch);
147 ppc_load_ptr (code, ppc_r0, addr);
148 ppc_mtctr (code, ppc_r0);
149 ppc_load_ptr (code, MONO_ARCH_RGCTX_REG, mrgctx);
150 ppc_bcctr (code, 20, 0);
152 mono_arch_flush_icache (start, code - start);
153 g_assert ((code - start) <= size);
159 mono_arch_patch_callsite (guint8 *method_start, guint8 *code_ptr, guint8 *addr)
161 guint32 *code = (guint32*)code_ptr;
163 addr = mono_get_addr_from_ftnptr (addr);
165 /* This is the 'blrl' instruction */
169 * Note that methods are called also with the bl opcode.
171 if (((*code) >> 26) == 18) {
172 /*g_print ("direct patching\n");*/
173 ppc_patch ((guint8*)code, addr);
174 mono_arch_flush_icache ((guint8*)code, 4);
179 g_assert (mono_ppc_is_direct_call_sequence (code));
181 ppc_patch ((guint8*)code, addr);
185 mono_arch_patch_plt_entry (guint8 *code, gpointer *got, mgreg_t *regs, guint8 *addr)
187 guint32 ins1, ins2, offset;
189 /* Patch the jump table entry used by the plt entry */
191 /* Should be a lis+ori */
192 ins1 = ((guint32*)code)[0];
193 g_assert (ins1 >> 26 == 15);
194 ins2 = ((guint32*)code)[1];
195 g_assert (ins2 >> 26 == 24);
196 offset = ((ins1 & 0xffff) << 16) | (ins2 & 0xffff);
198 /* Either got or regs is set */
200 got = (gpointer*)(gsize) regs [30];
201 *(guint8**)((guint8*)got + offset) = addr;
205 mono_arch_nullify_class_init_trampoline (guint8 *code, mgreg_t *regs)
207 mono_arch_patch_callsite (NULL, code, nullified_class_init_trampoline);
211 mono_arch_nullify_plt_entry (guint8 *code, mgreg_t *regs)
213 if (mono_aot_only && !nullified_class_init_trampoline)
214 nullified_class_init_trampoline = mono_aot_get_named_code ("nullified_class_init_trampoline");
216 mono_arch_patch_plt_entry (code, NULL, regs, nullified_class_init_trampoline);
219 /* Stack size for trampoline function
220 * PPC_MINIMAL_STACK_SIZE + 16 (args + alignment to ppc_magic_trampoline)
221 * + MonoLMF + 14 fp regs + 13 gregs + alignment
223 #define STACK (((PPC_MINIMAL_STACK_SIZE + 4 * sizeof (mgreg_t) + sizeof (MonoLMF) + 14 * sizeof (double) + 31 * sizeof (mgreg_t)) + (MONO_ARCH_FRAME_ALIGNMENT - 1)) & ~(MONO_ARCH_FRAME_ALIGNMENT - 1))
225 /* Method-specific trampoline code fragment size */
226 #define METHOD_TRAMPOLINE_SIZE 64
228 /* Jump-specific trampoline code fragment size */
229 #define JUMP_TRAMPOLINE_SIZE 64
231 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
232 #define PPC_TOC_REG ppc_r2
234 #define PPC_TOC_REG -1
238 mono_arch_create_trampoline_code (MonoTrampolineType tramp_type)
243 GSList *unwind_ops, *l;
245 code = mono_arch_create_trampoline_code_full (tramp_type, &code_size, &ji, &unwind_ops, FALSE);
247 //mono_save_trampoline_xdebug_info ("<generic_trampoline>", code, code_size, unwind_ops);
249 for (l = unwind_ops; l; l = l->next)
251 g_slist_free (unwind_ops);
257 * Stack frame description when the generic trampoline is called.
259 * --------------------
261 * -------------------
262 * Saved FP registers 0-13
263 * -------------------
264 * Saved general registers 0-30
265 * -------------------
266 * param area for 3 args to ppc_magic_trampoline
267 * -------------------
269 * -------------------
272 mono_arch_create_trampoline_code_full (MonoTrampolineType tramp_type, guint32 *code_size, MonoJumpInfo **ji, GSList **out_unwind_ops, gboolean aot)
274 guint8 *buf, *code = NULL;
276 gconstpointer tramp_handler;
277 int size = MONO_PPC_32_64_CASE (600, 800);
279 /* Now we'll create in 'buf' the PowerPC trampoline code. This
280 is the trampoline code common to all methods */
282 code = buf = mono_global_codeman_reserve (size);
285 *out_unwind_ops = NULL;
287 ppc_str_update (buf, ppc_r1, -STACK, ppc_r1);
289 /* start building the MonoLMF on the stack */
290 offset = STACK - sizeof (double) * MONO_SAVED_FREGS;
291 for (i = 14; i < 32; i++) {
292 ppc_stfd (buf, i, offset, ppc_r1);
293 offset += sizeof (double);
296 * now the integer registers.
298 offset = STACK - sizeof (MonoLMF) + G_STRUCT_OFFSET (MonoLMF, iregs);
299 ppc_str_multiple (buf, ppc_r13, offset, ppc_r1);
301 /* Now save the rest of the registers below the MonoLMF struct, first 14
302 * fp regs and then the 31 gregs.
304 offset = STACK - sizeof (MonoLMF) - (14 * sizeof (double));
305 for (i = 0; i < 14; i++) {
306 ppc_stfd (buf, i, offset, ppc_r1);
307 offset += sizeof (double);
309 #define GREGS_OFFSET (STACK - sizeof (MonoLMF) - (14 * sizeof (double)) - (31 * sizeof (mgreg_t)))
310 offset = GREGS_OFFSET;
311 for (i = 0; i < 31; i++) {
312 ppc_str (buf, i, offset, ppc_r1);
313 offset += sizeof (mgreg_t);
316 /* we got here through a jump to the ctr reg, we must save the lr
317 * in the parent frame (we do it here to reduce the size of the
318 * method-specific trampoline)
320 ppc_mflr (buf, ppc_r0);
321 ppc_str (buf, ppc_r0, STACK + PPC_RET_ADDR_OFFSET, ppc_r1);
323 /* ok, now we can continue with the MonoLMF setup, mostly untouched
324 * from emit_prolog in mini-ppc.c
327 buf = mono_arch_emit_load_aotconst (code, buf, ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, "mono_get_lmf_addr");
328 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
329 ppc_ldptr (buf, ppc_r2, sizeof (gpointer), ppc_r11);
330 ppc_ldptr (buf, ppc_r11, 0, ppc_r11);
332 ppc_mtlr (buf, ppc_r11);
335 ppc_load_func (buf, ppc_r0, mono_get_lmf_addr);
336 ppc_mtlr (buf, ppc_r0);
339 /* we build the MonoLMF structure on the stack - see mini-ppc.h
340 * The pointer to the struct is put in ppc_r11.
342 ppc_addi (buf, ppc_r11, ppc_sp, STACK - sizeof (MonoLMF));
343 ppc_stptr (buf, ppc_r3, G_STRUCT_OFFSET(MonoLMF, lmf_addr), ppc_r11);
344 /* new_lmf->previous_lmf = *lmf_addr */
345 ppc_ldptr (buf, ppc_r0, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r3);
346 ppc_stptr (buf, ppc_r0, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r11);
347 /* *(lmf_addr) = r11 */
348 ppc_stptr (buf, ppc_r11, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r3);
349 /* save method info (it's stored on the stack, so get it first). */
350 if ((tramp_type == MONO_TRAMPOLINE_JIT) || (tramp_type == MONO_TRAMPOLINE_JUMP)) {
351 ppc_ldr (buf, ppc_r0, GREGS_OFFSET, ppc_r1);
352 ppc_stptr (buf, ppc_r0, G_STRUCT_OFFSET(MonoLMF, method), ppc_r11);
354 ppc_load (buf, ppc_r0, 0);
355 ppc_stptr (buf, ppc_r0, G_STRUCT_OFFSET(MonoLMF, method), ppc_r11);
357 /* store the frame pointer of the calling method */
358 ppc_addi (buf, ppc_r0, ppc_sp, STACK);
359 ppc_stptr (buf, ppc_r0, G_STRUCT_OFFSET(MonoLMF, ebp), ppc_r11);
360 /* save the IP (caller ip) */
361 if (tramp_type == MONO_TRAMPOLINE_JUMP) {
362 ppc_li (buf, ppc_r0, 0);
364 ppc_ldr (buf, ppc_r0, STACK + PPC_RET_ADDR_OFFSET, ppc_r1);
366 ppc_stptr (buf, ppc_r0, G_STRUCT_OFFSET(MonoLMF, eip), ppc_r11);
369 * Now we're ready to call trampoline (mgreg_t *regs, guint8 *code, gpointer value, guint8 *tramp)
370 * Note that the last argument is unused.
372 /* Arg 1: a pointer to the registers */
373 ppc_addi (buf, ppc_r3, ppc_r1, GREGS_OFFSET);
375 /* Arg 2: code (next address to the instruction that called us) */
376 if (tramp_type == MONO_TRAMPOLINE_JUMP)
377 ppc_li (buf, ppc_r4, 0);
379 ppc_ldr (buf, ppc_r4, STACK + PPC_RET_ADDR_OFFSET, ppc_r1);
381 /* Arg 3: trampoline argument */
382 if (tramp_type == MONO_TRAMPOLINE_GENERIC_CLASS_INIT)
383 ppc_ldr (buf, ppc_r5, GREGS_OFFSET + MONO_ARCH_VTABLE_REG * sizeof (mgreg_t), ppc_r1);
385 ppc_ldr (buf, ppc_r5, GREGS_OFFSET, ppc_r1);
388 buf = mono_arch_emit_load_aotconst (code, buf, ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, g_strdup_printf ("trampoline_func_%d", tramp_type));
389 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
390 ppc_ldptr (buf, ppc_r2, sizeof (gpointer), ppc_r11);
391 ppc_ldptr (buf, ppc_r11, 0, ppc_r11);
393 ppc_mtlr (buf, ppc_r11);
396 tramp_handler = mono_get_trampoline_func (tramp_type);
397 ppc_load_func (buf, ppc_r0, tramp_handler);
398 ppc_mtlr (buf, ppc_r0);
402 /* OK, code address is now on r3. Move it to the counter reg
403 * so it will be ready for the final jump: this is safe since we
404 * won't do any more calls.
406 if (!MONO_TRAMPOLINE_TYPE_MUST_RETURN (tramp_type)) {
407 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
408 ppc_ldptr (buf, ppc_r2, sizeof (gpointer), ppc_r3);
409 ppc_ldptr (buf, ppc_r3, 0, ppc_r3);
411 ppc_mtctr (buf, ppc_r3);
415 * Now we restore the MonoLMF (see emit_epilogue in mini-ppc.c)
416 * and the rest of the registers, so the method called will see
417 * the same state as before we executed.
418 * The pointer to MonoLMF is in ppc_r11.
420 ppc_addi (buf, ppc_r11, ppc_r1, STACK - sizeof (MonoLMF));
421 /* r5 = previous_lmf */
422 ppc_ldptr (buf, ppc_r5, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r11);
424 ppc_ldptr (buf, ppc_r6, G_STRUCT_OFFSET(MonoLMF, lmf_addr), ppc_r11);
425 /* *(lmf_addr) = previous_lmf */
426 ppc_stptr (buf, ppc_r5, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r6);
428 ppc_ldr_multiple (buf, ppc_r13, G_STRUCT_OFFSET(MonoLMF, iregs), ppc_r11);
430 for (i = 14; i < 32; i++)
431 ppc_lfd (buf, i, G_STRUCT_OFFSET(MonoLMF, fregs) + ((i-14) * sizeof (gdouble)), ppc_r11);
433 /* restore the volatile registers, we skip r1, of course */
434 offset = STACK - sizeof (MonoLMF) - (14 * sizeof (double));
435 for (i = 0; i < 14; i++) {
436 ppc_lfd (buf, i, offset, ppc_r1);
437 offset += sizeof (double);
439 offset = STACK - sizeof (MonoLMF) - (14 * sizeof (double)) - (31 * sizeof (mgreg_t));
440 ppc_ldr (buf, ppc_r0, offset, ppc_r1);
441 offset += 2 * sizeof (mgreg_t);
442 for (i = 2; i < 13; i++) {
443 if (i != PPC_TOC_REG && (i != 3 || tramp_type != MONO_TRAMPOLINE_RGCTX_LAZY_FETCH))
444 ppc_ldr (buf, i, offset, ppc_r1);
445 offset += sizeof (mgreg_t);
448 /* Non-standard function epilogue. Instead of doing a proper
449 * return, we just jump to the compiled code.
451 /* Restore stack pointer and LR and jump to the code */
452 ppc_ldr (buf, ppc_r1, 0, ppc_r1);
453 ppc_ldr (buf, ppc_r11, PPC_RET_ADDR_OFFSET, ppc_r1);
454 ppc_mtlr (buf, ppc_r11);
455 if (MONO_TRAMPOLINE_TYPE_MUST_RETURN (tramp_type))
458 ppc_bcctr (buf, 20, 0);
460 /* Flush instruction cache, since we've generated code */
461 mono_arch_flush_icache (code, buf - code);
463 *code_size = buf - code;
466 g_assert ((buf - code) <= size);
468 if (tramp_type == MONO_TRAMPOLINE_CLASS_INIT) {
471 /* Initialize the nullified class init trampoline */
472 nullified_class_init_trampoline = mono_ppc_create_ftnptr (mono_arch_get_nullified_class_init_trampoline (&code_len));
478 #define TRAMPOLINE_SIZE (MONO_PPC_32_64_CASE (24, (5+5+1+1)*4))
480 mono_arch_create_specific_trampoline (gpointer arg1, MonoTrampolineType tramp_type, MonoDomain *domain, guint32 *code_len)
482 guint8 *code, *buf, *tramp;
483 guint32 short_branch;
485 tramp = mono_get_trampoline_code (tramp_type);
487 mono_domain_lock (domain);
488 code = buf = mono_domain_code_reserve_align (domain, TRAMPOLINE_SIZE, 4);
489 short_branch = branch_for_target_reachable (code + MONO_PPC_32_64_CASE (8, 5*4), tramp);
490 #ifdef __mono_ppc64__
491 /* FIXME: make shorter if possible */
494 mono_domain_code_commit (domain, code, TRAMPOLINE_SIZE, 12);
496 mono_domain_unlock (domain);
499 ppc_load_sequence (buf, ppc_r0, (mgreg_t)(gsize) arg1);
500 ppc_emit32 (buf, short_branch);
502 /* Prepare the jump to the generic trampoline code.*/
503 ppc_load_ptr (buf, ppc_r0, tramp);
504 ppc_mtctr (buf, ppc_r0);
506 /* And finally put 'arg1' in r0 and fly! */
507 ppc_load_ptr (buf, ppc_r0, arg1);
508 ppc_bcctr (buf, 20, 0);
511 /* Flush instruction cache, since we've generated code */
512 mono_arch_flush_icache (code, buf - code);
514 g_assert ((buf - code) <= TRAMPOLINE_SIZE);
516 *code_len = buf - code;
522 emit_trampoline_jump (guint8 *code, guint8 *tramp)
524 guint32 short_branch = branch_for_target_reachable (code, tramp);
526 /* FIXME: we can save a few bytes here by committing if the
527 short branch is possible */
529 ppc_emit32 (code, short_branch);
531 ppc_load_ptr (code, ppc_r0, tramp);
532 ppc_mtctr (code, ppc_r0);
533 ppc_bcctr (code, 20, 0);
540 mono_arch_create_rgctx_lazy_fetch_trampoline (guint32 slot)
545 return mono_arch_create_rgctx_lazy_fetch_trampoline_full (slot, &code_size, &ji, FALSE);
549 mono_arch_create_rgctx_lazy_fetch_trampoline_full (guint32 slot, guint32 *code_size, MonoJumpInfo **ji, gboolean aot)
551 #ifdef MONO_ARCH_VTABLE_REG
554 guint8 **rgctx_null_jumps;
562 mrgctx = MONO_RGCTX_SLOT_IS_MRGCTX (slot);
563 index = MONO_RGCTX_SLOT_INDEX (slot);
565 index += MONO_SIZEOF_METHOD_RUNTIME_GENERIC_CONTEXT / sizeof (gpointer);
566 for (depth = 0; ; ++depth) {
567 int size = mono_class_rgctx_get_array_size (depth, mrgctx);
569 if (index < size - 1)
574 tramp_size = MONO_PPC_32_64_CASE (40, 52) + 12 * depth;
582 code = buf = mono_global_codeman_reserve (tramp_size);
584 rgctx_null_jumps = g_malloc (sizeof (guint8*) * (depth + 2));
588 ppc_mr (code, ppc_r4, PPC_FIRST_ARG_REG);
590 /* load rgctx ptr from vtable */
591 ppc_ldptr (code, ppc_r4, G_STRUCT_OFFSET (MonoVTable, runtime_generic_context), PPC_FIRST_ARG_REG);
592 /* is the rgctx ptr null? */
593 ppc_compare_reg_imm (code, 0, ppc_r4, 0);
594 /* if yes, jump to actual trampoline */
595 rgctx_null_jumps [0] = code;
596 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
599 for (i = 0; i < depth; ++i) {
600 /* load ptr to next array */
601 if (mrgctx && i == 0)
602 ppc_ldptr (code, ppc_r4, MONO_SIZEOF_METHOD_RUNTIME_GENERIC_CONTEXT, ppc_r4);
604 ppc_ldptr (code, ppc_r4, 0, ppc_r4);
605 /* is the ptr null? */
606 ppc_compare_reg_imm (code, 0, ppc_r4, 0);
607 /* if yes, jump to actual trampoline */
608 rgctx_null_jumps [i + 1] = code;
609 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
613 ppc_ldptr (code, ppc_r4, sizeof (gpointer) * (index + 1), ppc_r4);
614 /* is the slot null? */
615 ppc_compare_reg_imm (code, 0, ppc_r4, 0);
616 /* if yes, jump to actual trampoline */
617 rgctx_null_jumps [depth + 1] = code;
618 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
619 /* otherwise return r4 */
620 /* FIXME: if we use r3 as the work register we can avoid this copy */
621 ppc_mr (code, ppc_r3, ppc_r4);
624 for (i = mrgctx ? 1 : 0; i <= depth + 1; ++i)
625 ppc_patch (rgctx_null_jumps [i], code);
627 g_free (rgctx_null_jumps);
629 /* move the rgctx pointer to the VTABLE register */
630 ppc_mr (code, MONO_ARCH_VTABLE_REG, ppc_r3);
633 code = mono_arch_emit_load_aotconst (buf, code, ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, g_strdup_printf ("specific_trampoline_lazy_fetch_%u", slot));
634 /* Branch to the trampoline */
635 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
636 ppc_ldptr (code, ppc_r11, 0, ppc_r11);
638 ppc_mtctr (code, ppc_r11);
639 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
641 tramp = mono_arch_create_specific_trampoline (GUINT_TO_POINTER (slot),
642 MONO_TRAMPOLINE_RGCTX_LAZY_FETCH, mono_get_root_domain (), NULL);
644 /* jump to the actual trampoline */
645 code = emit_trampoline_jump (code, tramp);
648 mono_arch_flush_icache (buf, code - buf);
650 g_assert (code - buf <= tramp_size);
652 *code_size = code - buf;
656 g_assert_not_reached ();
661 mono_arch_create_generic_class_init_trampoline (void)
666 return mono_arch_create_generic_class_init_trampoline_full (&code_size, &ji, FALSE);
670 mono_arch_create_generic_class_init_trampoline_full (guint32 *code_size, MonoJumpInfo **ji, gboolean aot)
674 static int byte_offset = -1;
675 static guint8 bitmask;
679 tramp_size = MONO_PPC_32_64_CASE (32, 44);
683 code = buf = mono_global_codeman_reserve (tramp_size);
688 mono_marshal_find_bitfield_offset (MonoVTable, initialized, &byte_offset, &bitmask);
690 ppc_lbz (code, ppc_r4, byte_offset, MONO_ARCH_VTABLE_REG);
691 ppc_andid (code, ppc_r4, ppc_r4, bitmask);
693 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
697 ppc_patch (jump, code);
700 code = mono_arch_emit_load_aotconst (buf, code, ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, "specific_trampoline_generic_class_init");
701 /* Branch to the trampoline */
702 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
703 ppc_ldptr (code, ppc_r11, 0, ppc_r11);
705 ppc_mtctr (code, ppc_r11);
706 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
708 tramp = mono_arch_create_specific_trampoline (NULL, MONO_TRAMPOLINE_GENERIC_CLASS_INIT,
709 mono_get_root_domain (), NULL);
711 /* jump to the actual trampoline */
712 code = emit_trampoline_jump (code, tramp);
715 mono_arch_flush_icache (buf, code - buf);
717 *code_size = code - buf;
719 g_assert (code - buf <= tramp_size);
725 mono_arch_get_nullified_class_init_trampoline (guint32 *code_len)
728 guint32 tramp_size = 64;
730 code = buf = mono_global_codeman_reserve (tramp_size);
733 mono_arch_flush_icache (buf, code - buf);
735 *code_len = code - buf;
737 g_assert (code - buf <= tramp_size);