2 * tramp-ppc.c: JIT trampoline code for PowerPC
5 * Dietmar Maurer (dietmar@ximian.com)
6 * Paolo Molaro (lupus@ximian.com)
7 * Carlos Valiente <yo@virutass.net>
8 * Andreas Faerber <andreas.faerber@web.de>
10 * (C) 2001 Ximian, Inc.
11 * (C) 2007-2008 Andreas Faerber
17 #include <mono/metadata/appdomain.h>
18 #include <mono/metadata/marshal.h>
19 #include <mono/metadata/tabledefs.h>
20 #include <mono/arch/ppc/ppc-codegen.h>
25 static guint8* nullified_class_init_trampoline;
27 /* Same as mono_create_ftnptr, but doesn't require a domain */
29 mono_ppc_create_ftnptr (guint8 *code)
31 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
32 MonoPPCFunctionDescriptor *ftnptr = mono_global_codeman_reserve (sizeof (MonoPPCFunctionDescriptor));
45 * Return the instruction to jump from code to target, 0 if not
46 * reachable with a single instruction
49 branch_for_target_reachable (guint8 *branch, guint8 *target)
51 gint diff = target - branch;
52 g_assert ((diff & 3) == 0);
55 return (18 << 26) | (diff);
57 /* diff between 0 and -33554432 */
58 if (diff >= -33554432)
59 return (18 << 26) | (diff & ~0xfc000000);
65 * get_unbox_trampoline:
66 * @gsctx: the generic sharing context
68 * @addr: pointer to native code for @m
70 * when value type methods are called through the vtable we need to unbox the
71 * this argument. This method returns a pointer to a trampoline which does
72 * unboxing before calling the method
75 mono_arch_get_unbox_trampoline (MonoGenericSharingContext *gsctx, MonoMethod *m, gpointer addr)
80 MonoDomain *domain = mono_domain_get ();
81 int size = MONO_PPC_32_64_CASE (20, 32) + PPC_FTNPTR_SIZE;
83 addr = mono_get_addr_from_ftnptr (addr);
85 mono_domain_lock (domain);
86 start = code = mono_domain_code_reserve (domain, size);
87 code = mono_ppc_create_pre_code_ftnptr (code);
88 short_branch = branch_for_target_reachable (code + 4, addr);
90 mono_domain_code_commit (domain, code, size, 8);
91 mono_domain_unlock (domain);
94 ppc_addi (code, this_pos, this_pos, sizeof (MonoObject));
95 ppc_emit32 (code, short_branch);
97 ppc_load_ptr (code, ppc_r0, addr);
98 ppc_mtctr (code, ppc_r0);
99 ppc_addi (code, this_pos, this_pos, sizeof (MonoObject));
100 ppc_bcctr (code, 20, 0);
102 mono_arch_flush_icache (start, code - start);
103 g_assert ((code - start) <= size);
104 /*g_print ("unbox trampoline at %d for %s:%s\n", this_pos, m->klass->name, m->name);
105 g_print ("unbox code is at %p for method at %p\n", start, addr);*/
111 * mono_arch_get_static_rgctx_trampoline:
113 * Create a trampoline which sets RGCTX_REG to MRGCTX, then jumps to ADDR.
116 mono_arch_get_static_rgctx_trampoline (MonoMethod *m, MonoMethodRuntimeGenericContext *mrgctx, gpointer addr)
118 guint8 *code, *start, *p;
119 guint8 imm_buf [128];
120 guint32 short_branch;
121 MonoDomain *domain = mono_domain_get ();
123 int size = MONO_PPC_32_64_CASE (24, (PPC_LOAD_SEQUENCE_LENGTH * 2) + 8) + PPC_FTNPTR_SIZE;
125 addr = mono_get_addr_from_ftnptr (addr);
127 /* Compute size of code needed to emit mrgctx */
129 ppc_load_ptr (p, MONO_ARCH_RGCTX_REG, mrgctx);
130 imm_size = p - imm_buf;
132 mono_domain_lock (domain);
133 start = code = mono_domain_code_reserve (domain, size);
134 code = mono_ppc_create_pre_code_ftnptr (code);
135 short_branch = branch_for_target_reachable (code + imm_size, addr);
137 mono_domain_code_commit (domain, code, size, imm_size + 4);
138 mono_domain_unlock (domain);
141 ppc_load_ptr (code, MONO_ARCH_RGCTX_REG, mrgctx);
142 ppc_emit32 (code, short_branch);
144 ppc_load_ptr (code, ppc_r0, addr);
145 ppc_mtctr (code, ppc_r0);
146 ppc_load_ptr (code, MONO_ARCH_RGCTX_REG, mrgctx);
147 ppc_bcctr (code, 20, 0);
149 mono_arch_flush_icache (start, code - start);
150 g_assert ((code - start) <= size);
156 mono_arch_patch_callsite (guint8 *method_start, guint8 *code_ptr, guint8 *addr)
158 guint32 *code = (guint32*)code_ptr;
160 addr = mono_get_addr_from_ftnptr (addr);
162 /* This is the 'blrl' instruction */
166 * Note that methods are called also with the bl opcode.
168 if (((*code) >> 26) == 18) {
169 /*g_print ("direct patching\n");*/
170 ppc_patch ((guint8*)code, addr);
171 mono_arch_flush_icache ((guint8*)code, 4);
176 g_assert (mono_ppc_is_direct_call_sequence (code));
178 ppc_patch ((guint8*)code, addr);
182 mono_arch_patch_plt_entry (guint8 *code, gpointer *got, mgreg_t *regs, guint8 *addr)
184 guint32 ins1, ins2, offset;
186 /* Patch the jump table entry used by the plt entry */
188 /* Should be a lis+ori */
189 ins1 = ((guint32*)code)[0];
190 g_assert (ins1 >> 26 == 15);
191 ins2 = ((guint32*)code)[1];
192 g_assert (ins2 >> 26 == 24);
193 offset = ((ins1 & 0xffff) << 16) | (ins2 & 0xffff);
195 /* Either got or regs is set */
197 got = (gpointer*)(gsize) regs [30];
198 *(guint8**)((guint8*)got + offset) = addr;
202 mono_arch_nullify_class_init_trampoline (guint8 *code, mgreg_t *regs)
204 mono_arch_patch_callsite (NULL, code, nullified_class_init_trampoline);
208 mono_arch_nullify_plt_entry (guint8 *code, mgreg_t *regs)
210 if (mono_aot_only && !nullified_class_init_trampoline)
211 nullified_class_init_trampoline = mono_aot_get_trampoline ("nullified_class_init_trampoline");
213 mono_arch_patch_plt_entry (code, NULL, regs, nullified_class_init_trampoline);
216 /* Stack size for trampoline function
217 * PPC_MINIMAL_STACK_SIZE + 16 (args + alignment to ppc_magic_trampoline)
218 * + MonoLMF + 14 fp regs + 13 gregs + alignment
220 #define STACK (((PPC_MINIMAL_STACK_SIZE + 4 * sizeof (mgreg_t) + sizeof (MonoLMF) + 14 * sizeof (double) + 31 * sizeof (mgreg_t)) + (MONO_ARCH_FRAME_ALIGNMENT - 1)) & ~(MONO_ARCH_FRAME_ALIGNMENT - 1))
222 /* Method-specific trampoline code fragment size */
223 #define METHOD_TRAMPOLINE_SIZE 64
225 /* Jump-specific trampoline code fragment size */
226 #define JUMP_TRAMPOLINE_SIZE 64
228 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
229 #define PPC_TOC_REG ppc_r2
231 #define PPC_TOC_REG -1
235 * Stack frame description when the generic trampoline is called.
237 * --------------------
239 * -------------------
240 * Saved FP registers 0-13
241 * -------------------
242 * Saved general registers 0-30
243 * -------------------
244 * param area for 3 args to ppc_magic_trampoline
245 * -------------------
247 * -------------------
250 mono_arch_create_generic_trampoline (MonoTrampolineType tramp_type, MonoTrampInfo **info, gboolean aot)
253 guint8 *buf, *code = NULL;
255 gconstpointer tramp_handler;
256 int size = MONO_PPC_32_64_CASE (600, 800);
257 GSList *unwind_ops = NULL;
258 MonoJumpInfo *ji = NULL;
260 /* Now we'll create in 'buf' the PowerPC trampoline code. This
261 is the trampoline code common to all methods */
263 code = buf = mono_global_codeman_reserve (size);
265 ppc_str_update (code, ppc_r1, -STACK, ppc_r1);
267 /* start building the MonoLMF on the stack */
268 offset = STACK - sizeof (double) * MONO_SAVED_FREGS;
269 for (i = 14; i < 32; i++) {
270 ppc_stfd (code, i, offset, ppc_r1);
271 offset += sizeof (double);
274 * now the integer registers.
276 offset = STACK - sizeof (MonoLMF) + G_STRUCT_OFFSET (MonoLMF, iregs);
277 ppc_str_multiple (code, ppc_r13, offset, ppc_r1);
279 /* Now save the rest of the registers below the MonoLMF struct, first 14
280 * fp regs and then the 31 gregs.
282 offset = STACK - sizeof (MonoLMF) - (14 * sizeof (double));
283 for (i = 0; i < 14; i++) {
284 ppc_stfd (code, i, offset, ppc_r1);
285 offset += sizeof (double);
287 #define GREGS_OFFSET (STACK - sizeof (MonoLMF) - (14 * sizeof (double)) - (31 * sizeof (mgreg_t)))
288 offset = GREGS_OFFSET;
289 for (i = 0; i < 31; i++) {
290 ppc_str (code, i, offset, ppc_r1);
291 offset += sizeof (mgreg_t);
294 /* we got here through a jump to the ctr reg, we must save the lr
295 * in the parent frame (we do it here to reduce the size of the
296 * method-specific trampoline)
298 ppc_mflr (code, ppc_r0);
299 ppc_str (code, ppc_r0, STACK + PPC_RET_ADDR_OFFSET, ppc_r1);
301 /* ok, now we can continue with the MonoLMF setup, mostly untouched
302 * from emit_prolog in mini-ppc.c
305 code = mono_arch_emit_load_aotconst (buf, code, &ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, "mono_get_lmf_addr");
306 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
307 ppc_ldptr (code, ppc_r2, sizeof (gpointer), ppc_r11);
308 ppc_ldptr (code, ppc_r11, 0, ppc_r11);
310 ppc_mtlr (code, ppc_r11);
313 ppc_load_func (code, ppc_r0, mono_get_lmf_addr);
314 ppc_mtlr (code, ppc_r0);
317 /* we build the MonoLMF structure on the stack - see mini-ppc.h
318 * The pointer to the struct is put in ppc_r11.
320 ppc_addi (code, ppc_r11, ppc_sp, STACK - sizeof (MonoLMF));
321 ppc_stptr (code, ppc_r3, G_STRUCT_OFFSET(MonoLMF, lmf_addr), ppc_r11);
322 /* new_lmf->previous_lmf = *lmf_addr */
323 ppc_ldptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r3);
324 ppc_stptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r11);
325 /* *(lmf_addr) = r11 */
326 ppc_stptr (code, ppc_r11, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r3);
327 /* save method info (it's stored on the stack, so get it first). */
328 if ((tramp_type == MONO_TRAMPOLINE_JIT) || (tramp_type == MONO_TRAMPOLINE_JUMP)) {
329 ppc_ldr (code, ppc_r0, GREGS_OFFSET, ppc_r1);
330 ppc_stptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, method), ppc_r11);
332 ppc_load (code, ppc_r0, 0);
333 ppc_stptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, method), ppc_r11);
335 /* store the frame pointer of the calling method */
336 ppc_addi (code, ppc_r0, ppc_sp, STACK);
337 ppc_stptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, ebp), ppc_r11);
338 /* save the IP (caller ip) */
339 if (tramp_type == MONO_TRAMPOLINE_JUMP) {
340 ppc_li (code, ppc_r0, 0);
342 ppc_ldr (code, ppc_r0, STACK + PPC_RET_ADDR_OFFSET, ppc_r1);
344 ppc_stptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, eip), ppc_r11);
347 * Now we're ready to call trampoline (mgreg_t *regs, guint8 *code, gpointer value, guint8 *tramp)
348 * Note that the last argument is unused.
350 /* Arg 1: a pointer to the registers */
351 ppc_addi (code, ppc_r3, ppc_r1, GREGS_OFFSET);
353 /* Arg 2: code (next address to the instruction that called us) */
354 if (tramp_type == MONO_TRAMPOLINE_JUMP)
355 ppc_li (code, ppc_r4, 0);
357 ppc_ldr (code, ppc_r4, STACK + PPC_RET_ADDR_OFFSET, ppc_r1);
359 /* Arg 3: trampoline argument */
360 if (tramp_type == MONO_TRAMPOLINE_GENERIC_CLASS_INIT)
361 ppc_ldr (code, ppc_r5, GREGS_OFFSET + MONO_ARCH_VTABLE_REG * sizeof (mgreg_t), ppc_r1);
363 ppc_ldr (code, ppc_r5, GREGS_OFFSET, ppc_r1);
366 code = mono_arch_emit_load_aotconst (buf, code, &ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, g_strdup_printf ("trampoline_func_%d", tramp_type));
367 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
368 ppc_ldptr (code, ppc_r2, sizeof (gpointer), ppc_r11);
369 ppc_ldptr (code, ppc_r11, 0, ppc_r11);
371 ppc_mtlr (code, ppc_r11);
374 tramp_handler = mono_get_trampoline_func (tramp_type);
375 ppc_load_func (code, ppc_r0, tramp_handler);
376 ppc_mtlr (code, ppc_r0);
380 /* OK, code address is now on r3. Move it to the counter reg
381 * so it will be ready for the final jump: this is safe since we
382 * won't do any more calls.
384 if (!MONO_TRAMPOLINE_TYPE_MUST_RETURN (tramp_type)) {
385 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
386 ppc_ldptr (code, ppc_r2, sizeof (gpointer), ppc_r3);
387 ppc_ldptr (code, ppc_r3, 0, ppc_r3);
389 ppc_mtctr (code, ppc_r3);
393 * Now we restore the MonoLMF (see emit_epilogue in mini-ppc.c)
394 * and the rest of the registers, so the method called will see
395 * the same state as before we executed.
396 * The pointer to MonoLMF is in ppc_r11.
398 ppc_addi (code, ppc_r11, ppc_r1, STACK - sizeof (MonoLMF));
399 /* r5 = previous_lmf */
400 ppc_ldptr (code, ppc_r5, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r11);
402 ppc_ldptr (code, ppc_r6, G_STRUCT_OFFSET(MonoLMF, lmf_addr), ppc_r11);
403 /* *(lmf_addr) = previous_lmf */
404 ppc_stptr (code, ppc_r5, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r6);
406 ppc_ldr_multiple (code, ppc_r13, G_STRUCT_OFFSET(MonoLMF, iregs), ppc_r11);
408 for (i = 14; i < 32; i++)
409 ppc_lfd (code, i, G_STRUCT_OFFSET(MonoLMF, fregs) + ((i-14) * sizeof (gdouble)), ppc_r11);
411 /* restore the volatile registers, we skip r1, of course */
412 offset = STACK - sizeof (MonoLMF) - (14 * sizeof (double));
413 for (i = 0; i < 14; i++) {
414 ppc_lfd (code, i, offset, ppc_r1);
415 offset += sizeof (double);
417 offset = STACK - sizeof (MonoLMF) - (14 * sizeof (double)) - (31 * sizeof (mgreg_t));
418 ppc_ldr (code, ppc_r0, offset, ppc_r1);
419 offset += 2 * sizeof (mgreg_t);
420 for (i = 2; i < 13; i++) {
421 if (i != PPC_TOC_REG && (i != 3 || tramp_type != MONO_TRAMPOLINE_RGCTX_LAZY_FETCH))
422 ppc_ldr (code, i, offset, ppc_r1);
423 offset += sizeof (mgreg_t);
426 /* Non-standard function epilogue. Instead of doing a proper
427 * return, we just jump to the compiled code.
429 /* Restore stack pointer and LR and jump to the code */
430 ppc_ldr (code, ppc_r1, 0, ppc_r1);
431 ppc_ldr (code, ppc_r11, PPC_RET_ADDR_OFFSET, ppc_r1);
432 ppc_mtlr (code, ppc_r11);
433 if (MONO_TRAMPOLINE_TYPE_MUST_RETURN (tramp_type))
436 ppc_bcctr (code, 20, 0);
438 /* Flush instruction cache, since we've generated code */
439 mono_arch_flush_icache (buf, code - buf);
442 g_assert ((code - buf) <= size);
444 if (tramp_type == MONO_TRAMPOLINE_CLASS_INIT) {
445 /* Initialize the nullified class init trampoline */
446 nullified_class_init_trampoline = mono_ppc_create_ftnptr (mono_arch_get_nullified_class_init_trampoline (NULL));
450 *info = mono_tramp_info_create (g_strdup_printf ("generic_trampoline_%d", tramp_type), buf, code - buf, ji, unwind_ops);
455 #define TRAMPOLINE_SIZE (MONO_PPC_32_64_CASE (24, (5+5+1+1)*4))
457 mono_arch_create_specific_trampoline (gpointer arg1, MonoTrampolineType tramp_type, MonoDomain *domain, guint32 *code_len)
459 guint8 *code, *buf, *tramp;
460 guint32 short_branch;
462 tramp = mono_get_trampoline_code (tramp_type);
464 mono_domain_lock (domain);
465 code = buf = mono_domain_code_reserve_align (domain, TRAMPOLINE_SIZE, 4);
466 short_branch = branch_for_target_reachable (code + MONO_PPC_32_64_CASE (8, 5*4), tramp);
467 #ifdef __mono_ppc64__
468 /* FIXME: make shorter if possible */
471 mono_domain_code_commit (domain, code, TRAMPOLINE_SIZE, 12);
473 mono_domain_unlock (domain);
476 ppc_load_sequence (code, ppc_r0, (mgreg_t)(gsize) arg1);
477 ppc_emit32 (code, short_branch);
479 /* Prepare the jump to the generic trampoline code.*/
480 ppc_load_ptr (code, ppc_r0, tramp);
481 ppc_mtctr (code, ppc_r0);
483 /* And finally put 'arg1' in r0 and fly! */
484 ppc_load_ptr (code, ppc_r0, arg1);
485 ppc_bcctr (code, 20, 0);
488 /* Flush instruction cache, since we've generated code */
489 mono_arch_flush_icache (buf, code - buf);
491 g_assert ((code - buf) <= TRAMPOLINE_SIZE);
494 *code_len = code - buf;
500 emit_trampoline_jump (guint8 *code, guint8 *tramp)
502 guint32 short_branch = branch_for_target_reachable (code, tramp);
504 /* FIXME: we can save a few bytes here by committing if the
505 short branch is possible */
507 ppc_emit32 (code, short_branch);
509 ppc_load_ptr (code, ppc_r0, tramp);
510 ppc_mtctr (code, ppc_r0);
511 ppc_bcctr (code, 20, 0);
518 mono_arch_create_rgctx_lazy_fetch_trampoline (guint32 slot, MonoTrampInfo **info, gboolean aot)
520 #ifdef MONO_ARCH_VTABLE_REG
523 guint8 **rgctx_null_jumps;
528 MonoJumpInfo *ji = NULL;
529 GSList *unwind_ops = NULL;
531 mrgctx = MONO_RGCTX_SLOT_IS_MRGCTX (slot);
532 index = MONO_RGCTX_SLOT_INDEX (slot);
534 index += MONO_SIZEOF_METHOD_RUNTIME_GENERIC_CONTEXT / sizeof (gpointer);
535 for (depth = 0; ; ++depth) {
536 int size = mono_class_rgctx_get_array_size (depth, mrgctx);
538 if (index < size - 1)
543 tramp_size = MONO_PPC_32_64_CASE (40, 52) + 12 * depth;
551 code = buf = mono_global_codeman_reserve (tramp_size);
553 rgctx_null_jumps = g_malloc (sizeof (guint8*) * (depth + 2));
557 ppc_mr (code, ppc_r4, PPC_FIRST_ARG_REG);
559 /* load rgctx ptr from vtable */
560 ppc_ldptr (code, ppc_r4, G_STRUCT_OFFSET (MonoVTable, runtime_generic_context), PPC_FIRST_ARG_REG);
561 /* is the rgctx ptr null? */
562 ppc_compare_reg_imm (code, 0, ppc_r4, 0);
563 /* if yes, jump to actual trampoline */
564 rgctx_null_jumps [0] = code;
565 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
568 for (i = 0; i < depth; ++i) {
569 /* load ptr to next array */
570 if (mrgctx && i == 0)
571 ppc_ldptr (code, ppc_r4, MONO_SIZEOF_METHOD_RUNTIME_GENERIC_CONTEXT, ppc_r4);
573 ppc_ldptr (code, ppc_r4, 0, ppc_r4);
574 /* is the ptr null? */
575 ppc_compare_reg_imm (code, 0, ppc_r4, 0);
576 /* if yes, jump to actual trampoline */
577 rgctx_null_jumps [i + 1] = code;
578 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
582 ppc_ldptr (code, ppc_r4, sizeof (gpointer) * (index + 1), ppc_r4);
583 /* is the slot null? */
584 ppc_compare_reg_imm (code, 0, ppc_r4, 0);
585 /* if yes, jump to actual trampoline */
586 rgctx_null_jumps [depth + 1] = code;
587 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
588 /* otherwise return r4 */
589 /* FIXME: if we use r3 as the work register we can avoid this copy */
590 ppc_mr (code, ppc_r3, ppc_r4);
593 for (i = mrgctx ? 1 : 0; i <= depth + 1; ++i)
594 ppc_patch (rgctx_null_jumps [i], code);
596 g_free (rgctx_null_jumps);
598 /* move the rgctx pointer to the VTABLE register */
599 ppc_mr (code, MONO_ARCH_VTABLE_REG, ppc_r3);
602 code = mono_arch_emit_load_aotconst (buf, code, &ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, g_strdup_printf ("specific_trampoline_lazy_fetch_%u", slot));
603 /* Branch to the trampoline */
604 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
605 ppc_ldptr (code, ppc_r11, 0, ppc_r11);
607 ppc_mtctr (code, ppc_r11);
608 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
610 tramp = mono_arch_create_specific_trampoline (GUINT_TO_POINTER (slot),
611 MONO_TRAMPOLINE_RGCTX_LAZY_FETCH, mono_get_root_domain (), NULL);
613 /* jump to the actual trampoline */
614 code = emit_trampoline_jump (code, tramp);
617 mono_arch_flush_icache (buf, code - buf);
619 g_assert (code - buf <= tramp_size);
622 *info = mono_tramp_info_create (g_strdup_printf ("rgctx_fetch_trampoline_%u", slot), buf, code - buf, ji, unwind_ops);
626 g_assert_not_reached ();
631 mono_arch_create_generic_class_init_trampoline (MonoTrampInfo **info, gboolean aot)
635 static int byte_offset = -1;
636 static guint8 bitmask;
639 GSList *unwind_ops = NULL;
640 MonoJumpInfo *ji = NULL;
642 tramp_size = MONO_PPC_32_64_CASE (32, 44);
646 code = buf = mono_global_codeman_reserve (tramp_size);
649 mono_marshal_find_bitfield_offset (MonoVTable, initialized, &byte_offset, &bitmask);
651 ppc_lbz (code, ppc_r4, byte_offset, MONO_ARCH_VTABLE_REG);
652 ppc_andid (code, ppc_r4, ppc_r4, bitmask);
654 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
658 ppc_patch (jump, code);
661 code = mono_arch_emit_load_aotconst (buf, code, &ji, MONO_PATCH_INFO_JIT_ICALL_ADDR, "specific_trampoline_generic_class_init");
662 /* Branch to the trampoline */
663 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
664 ppc_ldptr (code, ppc_r11, 0, ppc_r11);
666 ppc_mtctr (code, ppc_r11);
667 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
669 tramp = mono_arch_create_specific_trampoline (NULL, MONO_TRAMPOLINE_GENERIC_CLASS_INIT,
670 mono_get_root_domain (), NULL);
672 /* jump to the actual trampoline */
673 code = emit_trampoline_jump (code, tramp);
676 mono_arch_flush_icache (buf, code - buf);
678 g_assert (code - buf <= tramp_size);
681 *info = mono_tramp_info_create (g_strdup_printf ("generic_class_init_trampoline"), buf, code - buf, ji, unwind_ops);
687 mono_arch_get_nullified_class_init_trampoline (MonoTrampInfo **info)
690 guint32 tramp_size = 64;
692 code = buf = mono_global_codeman_reserve (tramp_size);
695 mono_arch_flush_icache (buf, code - buf);
697 g_assert (code - buf <= tramp_size);
700 *info = mono_tramp_info_create (g_strdup_printf ("nullified_class_init_trampoline"), buf, code - buf, NULL, NULL);
706 mono_arch_get_call_target (guint8 *code)
709 guint32 ins = ((guint32*)(gpointer)code) [-1];
711 if ((ins >> 26 == 18) && ((ins & 1) == 1) && ((ins & 2) == 0)) {
712 gint32 disp = (((gint32)ins) >> 2) & 0xffffff;
713 guint8 *target = code - 4 + (disp * 4);
722 mono_arch_get_plt_info_offset (guint8 *plt_entry, mgreg_t *regs, guint8 *code)
724 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
725 return ((guint32*)plt_entry) [8];
727 return ((guint32*)plt_entry) [6];