2 * tramp-ppc.c: JIT trampoline code for PowerPC
5 * Dietmar Maurer (dietmar@ximian.com)
6 * Paolo Molaro (lupus@ximian.com)
7 * Carlos Valiente <yo@virutass.net>
8 * Andreas Faerber <andreas.faerber@web.de>
10 * (C) 2001 Ximian, Inc.
11 * (C) 2007-2008 Andreas Faerber
17 #include <mono/metadata/appdomain.h>
18 #include <mono/metadata/marshal.h>
19 #include <mono/metadata/tabledefs.h>
20 #include <mono/arch/ppc/ppc-codegen.h>
25 static guint8* nullified_class_init_trampoline;
27 /* Same as mono_create_ftnptr, but doesn't require a domain */
29 mono_ppc_create_ftnptr (guint8 *code)
31 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
32 MonoPPCFunctionDescriptor *ftnptr = mono_global_codeman_reserve (sizeof (MonoPPCFunctionDescriptor));
45 * Return the instruction to jump from code to target, 0 if not
46 * reachable with a single instruction
49 branch_for_target_reachable (guint8 *branch, guint8 *target)
51 gint diff = target - branch;
52 g_assert ((diff & 3) == 0);
55 return (18 << 26) | (diff);
57 /* diff between 0 and -33554432 */
58 if (diff >= -33554432)
59 return (18 << 26) | (diff & ~0xfc000000);
65 * get_unbox_trampoline:
66 * @gsctx: the generic sharing context
68 * @addr: pointer to native code for @m
70 * when value type methods are called through the vtable we need to unbox the
71 * this argument. This method returns a pointer to a trampoline which does
72 * unboxing before calling the method
75 mono_arch_get_unbox_trampoline (MonoGenericSharingContext *gsctx, MonoMethod *m, gpointer addr)
80 MonoDomain *domain = mono_domain_get ();
81 int size = MONO_PPC_32_64_CASE (20, 32) + PPC_FTNPTR_SIZE;
83 addr = mono_get_addr_from_ftnptr (addr);
85 if (MONO_TYPE_ISSTRUCT (mono_method_signature (m)->ret))
88 mono_domain_lock (domain);
89 start = code = mono_domain_code_reserve (domain, size);
90 code = mono_ppc_create_pre_code_ftnptr (code);
91 short_branch = branch_for_target_reachable (code + 4, addr);
93 mono_domain_code_commit (domain, code, size, 8);
94 mono_domain_unlock (domain);
97 ppc_addi (code, this_pos, this_pos, sizeof (MonoObject));
98 ppc_emit32 (code, short_branch);
100 ppc_load (code, ppc_r0, addr);
101 ppc_mtctr (code, ppc_r0);
102 ppc_addi (code, this_pos, this_pos, sizeof (MonoObject));
103 ppc_bcctr (code, 20, 0);
105 mono_arch_flush_icache (start, code - start);
106 g_assert ((code - start) <= size);
107 /*g_print ("unbox trampoline at %d for %s:%s\n", this_pos, m->klass->name, m->name);
108 g_print ("unbox code is at %p for method at %p\n", start, addr);*/
114 * mono_arch_get_static_rgctx_trampoline:
116 * Create a trampoline which sets RGCTX_REG to MRGCTX, then jumps to ADDR.
119 mono_arch_get_static_rgctx_trampoline (MonoMethod *m, MonoMethodRuntimeGenericContext *mrgctx, gpointer addr)
121 guint8 *code, *start, *p;
122 guint8 imm_buf [128];
123 guint32 short_branch;
124 MonoDomain *domain = mono_domain_get ();
126 int size = MONO_PPC_32_64_CASE (24, (PPC_LOAD_SEQUENCE_LENGTH * 2) + 8) + PPC_FTNPTR_SIZE;
128 addr = mono_get_addr_from_ftnptr (addr);
130 /* Compute size of code needed to emit mrgctx */
132 ppc_load (p, MONO_ARCH_RGCTX_REG, mrgctx);
133 imm_size = p - imm_buf;
135 mono_domain_lock (domain);
136 start = code = mono_domain_code_reserve (domain, size);
137 code = mono_ppc_create_pre_code_ftnptr (code);
138 short_branch = branch_for_target_reachable (code + imm_size, addr);
140 mono_domain_code_commit (domain, code, size, imm_size + 4);
141 mono_domain_unlock (domain);
144 ppc_load (code, MONO_ARCH_RGCTX_REG, mrgctx);
145 ppc_emit32 (code, short_branch);
147 ppc_load (code, ppc_r0, addr);
148 ppc_mtctr (code, ppc_r0);
149 ppc_load (code, MONO_ARCH_RGCTX_REG, mrgctx);
150 ppc_bcctr (code, 20, 0);
152 mono_arch_flush_icache (start, code - start);
153 g_assert ((code - start) <= size);
159 mono_arch_patch_callsite (guint8 *method_start, guint8 *code_ptr, guint8 *addr)
161 guint32 *code = (guint32*)code_ptr;
163 addr = mono_get_addr_from_ftnptr (addr);
165 /* This is the 'blrl' instruction */
169 * Note that methods are called also with the bl opcode.
171 if (((*code) >> 26) == 18) {
172 /*g_print ("direct patching\n");*/
173 ppc_patch ((guint8*)code, addr);
174 mono_arch_flush_icache ((guint8*)code, 4);
179 g_assert (mono_ppc_is_direct_call_sequence (code));
181 ppc_patch ((guint8*)code, addr);
185 mono_arch_patch_plt_entry (guint8 *code, gpointer *got, gssize *regs, guint8 *addr)
187 guint32 ins1, ins2, offset;
188 mgreg_t *r = (mgreg_t*)regs;
190 /* Patch the jump table entry used by the plt entry */
192 /* Should be a lis+ori */
193 ins1 = ((guint32*)code)[0];
194 g_assert (ins1 >> 26 == 15);
195 ins2 = ((guint32*)code)[1];
196 g_assert (ins2 >> 26 == 24);
197 offset = ((ins1 & 0xffff) << 16) | (ins2 & 0xffff);
199 /* Either got or regs is set */
201 got = (gpointer*)r [30];
202 *(guint8**)((guint8*)got + offset) = addr;
206 mono_arch_nullify_class_init_trampoline (guint8 *code, gssize *regs)
208 mono_arch_patch_callsite (NULL, code, nullified_class_init_trampoline);
212 mono_arch_nullify_plt_entry (guint8 *code, gssize *regs)
214 if (mono_aot_only && !nullified_class_init_trampoline)
215 nullified_class_init_trampoline = mono_aot_get_named_code ("nullified_class_init_trampoline");
217 mono_arch_patch_plt_entry (code, NULL, regs, nullified_class_init_trampoline);
220 /* Stack size for trampoline function
221 * PPC_MINIMAL_STACK_SIZE + 16 (args + alignment to ppc_magic_trampoline)
222 * + MonoLMF + 14 fp regs + 31 gregs + alignment
224 #define STACK (PPC_MINIMAL_STACK_SIZE + 4 * sizeof (gulong) + sizeof (MonoLMF) + 14 * sizeof (double) + 31 * sizeof (gulong))
226 /* Method-specific trampoline code fragment size */
227 #define METHOD_TRAMPOLINE_SIZE 64
229 /* Jump-specific trampoline code fragment size */
230 #define JUMP_TRAMPOLINE_SIZE 64
232 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
233 #define PPC_TOC_REG ppc_r2
235 #define PPC_TOC_REG -1
239 mono_arch_create_trampoline_code (MonoTrampolineType tramp_type)
244 GSList *unwind_ops, *l;
246 code = mono_arch_create_trampoline_code_full (tramp_type, &code_size, &ji, &unwind_ops, FALSE);
248 //mono_save_trampoline_xdebug_info ("<generic_trampoline>", code, code_size, unwind_ops);
250 for (l = unwind_ops; l; l = l->next)
252 g_slist_free (unwind_ops);
258 * Stack frame description when the generic trampoline is called.
260 * --------------------
262 * -------------------
263 * Saved FP registers 0-13
264 * -------------------
265 * Saved general registers 0-30
266 * -------------------
267 * param area for 3 args to ppc_magic_trampoline
268 * -------------------
270 * -------------------
273 mono_arch_create_trampoline_code_full (MonoTrampolineType tramp_type, guint32 *code_size, MonoJumpInfo **ji, GSList **out_unwind_ops, gboolean aot)
275 guint8 *buf, *code = NULL;
277 gconstpointer tramp_handler;
278 int size = MONO_PPC_32_64_CASE (600, 800);
280 /* Now we'll create in 'buf' the PowerPC trampoline code. This
281 is the trampoline code common to all methods */
283 code = buf = mono_global_codeman_reserve (size);
286 *out_unwind_ops = NULL;
288 ppc_store_reg_update (buf, ppc_r1, -STACK, ppc_r1);
290 /* start building the MonoLMF on the stack */
291 offset = STACK - sizeof (double) * MONO_SAVED_FREGS;
292 for (i = 14; i < 32; i++) {
293 ppc_stfd (buf, i, offset, ppc_r1);
294 offset += sizeof (double);
297 * now the integer registers.
299 offset = STACK - sizeof (MonoLMF) + G_STRUCT_OFFSET (MonoLMF, iregs);
300 ppc_store_multiple_regs (buf, ppc_r13, offset, ppc_r1);
302 /* Now save the rest of the registers below the MonoLMF struct, first 14
303 * fp regs and then the 31 gregs.
305 offset = STACK - sizeof (MonoLMF) - (14 * sizeof (double));
306 for (i = 0; i < 14; i++) {
307 ppc_stfd (buf, i, offset, ppc_r1);
308 offset += sizeof (double);
310 #define GREGS_OFFSET (STACK - sizeof (MonoLMF) - (14 * sizeof (double)) - (31 * sizeof (gulong)))
311 offset = GREGS_OFFSET;
312 for (i = 0; i < 31; i++) {
313 ppc_store_reg (buf, i, offset, ppc_r1);
314 offset += sizeof (gulong);
317 /* we got here through a jump to the ctr reg, we must save the lr
318 * in the parent frame (we do it here to reduce the size of the
319 * method-specific trampoline)
321 ppc_mflr (buf, ppc_r0);
322 ppc_store_reg (buf, ppc_r0, STACK + PPC_RET_ADDR_OFFSET, ppc_r1);
324 /* ok, now we can continue with the MonoLMF setup, mostly untouched
325 * from emit_prolog in mini-ppc.c
328 buf = mono_arch_emit_load_aotconst (code, buf, ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, "mono_get_lmf_addr");
329 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
330 ppc_load_reg (buf, ppc_r2, sizeof (gpointer), ppc_r11);
331 ppc_load_reg (buf, ppc_r11, 0, ppc_r11);
333 ppc_mtlr (buf, ppc_r11);
336 ppc_load_func (buf, ppc_r0, mono_get_lmf_addr);
337 ppc_mtlr (buf, ppc_r0);
340 /* we build the MonoLMF structure on the stack - see mini-ppc.h
341 * The pointer to the struct is put in ppc_r11.
343 ppc_addi (buf, ppc_r11, ppc_sp, STACK - sizeof (MonoLMF));
344 ppc_store_reg (buf, ppc_r3, G_STRUCT_OFFSET(MonoLMF, lmf_addr), ppc_r11);
345 /* new_lmf->previous_lmf = *lmf_addr */
346 ppc_load_reg (buf, ppc_r0, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r3);
347 ppc_store_reg (buf, ppc_r0, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r11);
348 /* *(lmf_addr) = r11 */
349 ppc_store_reg (buf, ppc_r11, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r3);
350 /* save method info (it's stored on the stack, so get it first and put it
351 * in r5 as it's the third argument to the function)
353 if (tramp_type == MONO_TRAMPOLINE_GENERIC_CLASS_INIT)
354 ppc_load_reg (buf, ppc_r5, GREGS_OFFSET + PPC_FIRST_ARG_REG * sizeof (gpointer), ppc_r1);
356 ppc_load_reg (buf, ppc_r5, GREGS_OFFSET, ppc_r1);
357 if ((tramp_type == MONO_TRAMPOLINE_JIT) || (tramp_type == MONO_TRAMPOLINE_JUMP))
358 ppc_store_reg (buf, ppc_r5, G_STRUCT_OFFSET(MonoLMF, method), ppc_r11);
359 /* store the frame pointer of the calling method */
360 ppc_addi (buf, ppc_r0, ppc_sp, STACK);
361 ppc_store_reg (buf, ppc_r0, G_STRUCT_OFFSET(MonoLMF, ebp), ppc_r11);
362 /* save the IP (caller ip) */
363 if (tramp_type == MONO_TRAMPOLINE_JUMP) {
364 ppc_li (buf, ppc_r0, 0);
366 ppc_load_reg (buf, ppc_r0, STACK + PPC_RET_ADDR_OFFSET, ppc_r1);
368 ppc_store_reg (buf, ppc_r0, G_STRUCT_OFFSET(MonoLMF, eip), ppc_r11);
371 * Now we're ready to call trampoline (gssize *regs, guint8 *code, gpointer value, guint8 *tramp)
372 * Note that the last argument is unused.
374 /* Arg 1: a pointer to the registers */
375 ppc_addi (buf, ppc_r3, ppc_r1, GREGS_OFFSET);
377 /* Arg 2: code (next address to the instruction that called us) */
378 if (tramp_type == MONO_TRAMPOLINE_JUMP)
379 ppc_li (buf, ppc_r4, 0);
381 ppc_load_reg (buf, ppc_r4, STACK + PPC_RET_ADDR_OFFSET, ppc_r1);
383 /* Arg 3: MonoMethod *method. It was put in r5 already above */
384 /*ppc_mr (buf, ppc_r5, ppc_r5);*/
387 buf = mono_arch_emit_load_aotconst (code, buf, ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, g_strdup_printf ("trampoline_func_%d", tramp_type));
388 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
389 ppc_load_reg (buf, ppc_r2, sizeof (gpointer), ppc_r11);
390 ppc_load_reg (buf, ppc_r11, 0, ppc_r11);
392 ppc_mtlr (buf, ppc_r11);
395 tramp_handler = mono_get_trampoline_func (tramp_type);
396 ppc_load_func (buf, ppc_r0, tramp_handler);
397 ppc_mtlr (buf, ppc_r0);
401 /* OK, code address is now on r3. Move it to the counter reg
402 * so it will be ready for the final jump: this is safe since we
403 * won't do any more calls.
405 if (!MONO_TRAMPOLINE_TYPE_MUST_RETURN (tramp_type)) {
406 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
407 ppc_load_reg (buf, ppc_r2, sizeof (gpointer), ppc_r3);
408 ppc_load_reg (buf, ppc_r3, 0, ppc_r3);
410 ppc_mtctr (buf, ppc_r3);
414 * Now we restore the MonoLMF (see emit_epilogue in mini-ppc.c)
415 * and the rest of the registers, so the method called will see
416 * the same state as before we executed.
417 * The pointer to MonoLMF is in ppc_r11.
419 ppc_addi (buf, ppc_r11, ppc_r1, STACK - sizeof (MonoLMF));
420 /* r5 = previous_lmf */
421 ppc_load_reg (buf, ppc_r5, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r11);
423 ppc_load_reg (buf, ppc_r6, G_STRUCT_OFFSET(MonoLMF, lmf_addr), ppc_r11);
424 /* *(lmf_addr) = previous_lmf */
425 ppc_store_reg (buf, ppc_r5, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r6);
427 ppc_load_multiple_regs (buf, ppc_r13, G_STRUCT_OFFSET(MonoLMF, iregs), ppc_r11);
429 for (i = 14; i < 32; i++)
430 ppc_lfd (buf, i, G_STRUCT_OFFSET(MonoLMF, fregs) + ((i-14) * sizeof (gdouble)), ppc_r11);
432 /* restore the volatile registers, we skip r1, of course */
433 offset = STACK - sizeof (MonoLMF) - (14 * sizeof (double));
434 for (i = 0; i < 14; i++) {
435 ppc_lfd (buf, i, offset, ppc_r1);
436 offset += sizeof (double);
438 offset = STACK - sizeof (MonoLMF) - (14 * sizeof (double)) - (31 * sizeof (gulong));
439 ppc_load_reg (buf, ppc_r0, offset, ppc_r1);
440 offset += 2 * sizeof (gulong);
441 for (i = 2; i < 13; i++) {
442 if (i != PPC_TOC_REG && (i != 3 || tramp_type != MONO_TRAMPOLINE_RGCTX_LAZY_FETCH))
443 ppc_load_reg (buf, i, offset, ppc_r1);
444 offset += sizeof (gulong);
447 /* Non-standard function epilogue. Instead of doing a proper
448 * return, we just jump to the compiled code.
450 /* Restore stack pointer and LR and jump to the code */
451 ppc_load_reg (buf, ppc_r1, 0, ppc_r1);
452 ppc_load_reg (buf, ppc_r11, PPC_RET_ADDR_OFFSET, ppc_r1);
453 ppc_mtlr (buf, ppc_r11);
454 if (MONO_TRAMPOLINE_TYPE_MUST_RETURN (tramp_type))
457 ppc_bcctr (buf, 20, 0);
459 /* Flush instruction cache, since we've generated code */
460 mono_arch_flush_icache (code, buf - code);
462 *code_size = buf - code;
465 g_assert ((buf - code) <= size);
467 if (tramp_type == MONO_TRAMPOLINE_CLASS_INIT) {
470 /* Initialize the nullified class init trampoline */
471 nullified_class_init_trampoline = mono_ppc_create_ftnptr (mono_arch_get_nullified_class_init_trampoline (&code_len));
477 #define TRAMPOLINE_SIZE (MONO_PPC_32_64_CASE (24, (5+5+1+1)*4))
479 mono_arch_create_specific_trampoline (gpointer arg1, MonoTrampolineType tramp_type, MonoDomain *domain, guint32 *code_len)
481 guint8 *code, *buf, *tramp;
482 guint32 short_branch;
484 tramp = mono_get_trampoline_code (tramp_type);
486 mono_domain_lock (domain);
487 code = buf = mono_domain_code_reserve_align (domain, TRAMPOLINE_SIZE, 4);
488 short_branch = branch_for_target_reachable (code + MONO_PPC_32_64_CASE (8, 5*4), tramp);
489 #ifdef __mono_ppc64__
490 /* FIXME: make shorter if possible */
493 mono_domain_code_commit (domain, code, TRAMPOLINE_SIZE, 12);
495 mono_domain_unlock (domain);
498 ppc_load_sequence (buf, ppc_r0, (gulong) arg1);
499 ppc_emit32 (buf, short_branch);
501 /* Prepare the jump to the generic trampoline code.*/
502 ppc_load (buf, ppc_r0, (gulong) tramp);
503 ppc_mtctr (buf, ppc_r0);
505 /* And finally put 'arg1' in r0 and fly! */
506 ppc_load (buf, ppc_r0, (gulong) arg1);
507 ppc_bcctr (buf, 20, 0);
510 /* Flush instruction cache, since we've generated code */
511 mono_arch_flush_icache (code, buf - code);
513 g_assert ((buf - code) <= TRAMPOLINE_SIZE);
515 *code_len = buf - code;
521 emit_trampoline_jump (guint8 *code, guint8 *tramp)
523 guint32 short_branch = branch_for_target_reachable (code, tramp);
525 /* FIXME: we can save a few bytes here by committing if the
526 short branch is possible */
528 ppc_emit32 (code, short_branch);
530 ppc_load (code, ppc_r0, tramp);
531 ppc_mtctr (code, ppc_r0);
532 ppc_bcctr (code, 20, 0);
539 mono_arch_create_rgctx_lazy_fetch_trampoline (guint32 slot)
544 return mono_arch_create_rgctx_lazy_fetch_trampoline_full (slot, &code_size, &ji, FALSE);
548 mono_arch_create_rgctx_lazy_fetch_trampoline_full (guint32 slot, guint32 *code_size, MonoJumpInfo **ji, gboolean aot)
550 #ifdef MONO_ARCH_VTABLE_REG
553 guint8 **rgctx_null_jumps;
561 mrgctx = MONO_RGCTX_SLOT_IS_MRGCTX (slot);
562 index = MONO_RGCTX_SLOT_INDEX (slot);
564 index += sizeof (MonoMethodRuntimeGenericContext) / sizeof (gpointer);
565 for (depth = 0; ; ++depth) {
566 int size = mono_class_rgctx_get_array_size (depth, mrgctx);
568 if (index < size - 1)
573 tramp_size = MONO_PPC_32_64_CASE (40, 52) + 12 * depth;
581 code = buf = mono_global_codeman_reserve (tramp_size);
583 rgctx_null_jumps = g_malloc (sizeof (guint8*) * (depth + 2));
587 ppc_mr (code, ppc_r4, PPC_FIRST_ARG_REG);
589 /* load rgctx ptr from vtable */
590 ppc_load_reg (code, ppc_r4, G_STRUCT_OFFSET (MonoVTable, runtime_generic_context), PPC_FIRST_ARG_REG);
591 /* is the rgctx ptr null? */
592 ppc_compare_reg_imm (code, 0, ppc_r4, 0);
593 /* if yes, jump to actual trampoline */
594 rgctx_null_jumps [0] = code;
595 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
598 for (i = 0; i < depth; ++i) {
599 /* load ptr to next array */
600 if (mrgctx && i == 0)
601 ppc_load_reg (code, ppc_r4, sizeof (MonoMethodRuntimeGenericContext), ppc_r4);
603 ppc_load_reg (code, ppc_r4, 0, ppc_r4);
604 /* is the ptr null? */
605 ppc_compare_reg_imm (code, 0, ppc_r4, 0);
606 /* if yes, jump to actual trampoline */
607 rgctx_null_jumps [i + 1] = code;
608 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
612 ppc_load_reg (code, ppc_r4, sizeof (gpointer) * (index + 1), ppc_r4);
613 /* is the slot null? */
614 ppc_compare_reg_imm (code, 0, ppc_r4, 0);
615 /* if yes, jump to actual trampoline */
616 rgctx_null_jumps [depth + 1] = code;
617 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
618 /* otherwise return r4 */
619 /* FIXME: if we use r3 as the work register we can avoid this copy */
620 ppc_mr (code, ppc_r3, ppc_r4);
623 for (i = mrgctx ? 1 : 0; i <= depth + 1; ++i)
624 ppc_patch (rgctx_null_jumps [i], code);
626 g_free (rgctx_null_jumps);
628 /* move the rgctx pointer to the VTABLE register */
629 ppc_mr (code, MONO_ARCH_VTABLE_REG, ppc_r3);
632 code = mono_arch_emit_load_aotconst (buf, code, ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, g_strdup_printf ("specific_trampoline_lazy_fetch_%u", slot));
633 /* Branch to the trampoline */
634 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
635 ppc_load_reg (code, ppc_r11, 0, ppc_r11);
637 ppc_mtctr (code, ppc_r11);
638 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
640 tramp = mono_arch_create_specific_trampoline (GUINT_TO_POINTER (slot),
641 MONO_TRAMPOLINE_RGCTX_LAZY_FETCH, mono_get_root_domain (), NULL);
643 /* jump to the actual trampoline */
644 code = emit_trampoline_jump (code, tramp);
647 mono_arch_flush_icache (buf, code - buf);
649 g_assert (code - buf <= tramp_size);
651 *code_size = code - buf;
655 g_assert_not_reached ();
660 mono_arch_create_generic_class_init_trampoline (void)
665 return mono_arch_create_generic_class_init_trampoline_full (&code_size, &ji, FALSE);
669 mono_arch_create_generic_class_init_trampoline_full (guint32 *code_size, MonoJumpInfo **ji, gboolean aot)
673 static int byte_offset = -1;
674 static guint8 bitmask;
678 tramp_size = MONO_PPC_32_64_CASE (32, 44);
682 code = buf = mono_global_codeman_reserve (tramp_size);
687 mono_marshal_find_bitfield_offset (MonoVTable, initialized, &byte_offset, &bitmask);
689 ppc_lbz (code, ppc_r4, byte_offset, PPC_FIRST_ARG_REG);
690 ppc_andid (code, ppc_r4, ppc_r4, bitmask);
692 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
696 ppc_patch (jump, code);
699 code = mono_arch_emit_load_aotconst (buf, code, ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, "specific_trampoline_generic_class_init");
700 /* Branch to the trampoline */
701 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
702 ppc_load_reg (code, ppc_r11, 0, ppc_r11);
704 ppc_mtctr (code, ppc_r11);
705 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
707 tramp = mono_arch_create_specific_trampoline (NULL, MONO_TRAMPOLINE_GENERIC_CLASS_INIT,
708 mono_get_root_domain (), NULL);
710 /* jump to the actual trampoline */
711 code = emit_trampoline_jump (code, tramp);
714 mono_arch_flush_icache (buf, code - buf);
716 *code_size = code - buf;
718 g_assert (code - buf <= tramp_size);
724 mono_arch_get_nullified_class_init_trampoline (guint32 *code_len)
727 guint32 tramp_size = 64;
729 code = buf = mono_global_codeman_reserve (tramp_size);
732 mono_arch_flush_icache (buf, code - buf);
734 *code_len = code - buf;
736 g_assert (code - buf <= tramp_size);