2 * mini-ppc.c: PowerPC backend for the Mono code generator
5 * Paolo Molaro (lupus@ximian.com)
6 * Dietmar Maurer (dietmar@ximian.com)
7 * Andreas Faerber <andreas.faerber@web.de>
9 * (C) 2003 Ximian, Inc.
10 * (C) 2007-2008 Andreas Faerber
15 #include <mono/metadata/appdomain.h>
16 #include <mono/metadata/debug-helpers.h>
19 #include "cpu-ppc64.h"
23 #include <sys/sysctl.h>
26 //#define DEBUG_PATCHING
28 #define FORCE_INDIR_CALL 1
38 /* This mutex protects architecture specific caches */
39 #define mono_mini_arch_lock() EnterCriticalSection (&mini_arch_mutex)
40 #define mono_mini_arch_unlock() LeaveCriticalSection (&mini_arch_mutex)
41 static CRITICAL_SECTION mini_arch_mutex;
43 int mono_exc_esp_offset = 0;
44 static int tls_mode = TLS_MODE_DETECT;
45 static int lmf_pthread_key = -1;
46 static int monothread_key = -1;
47 static int monodomain_key = -1;
50 offsets_from_pthread_key (guint32 key, int *offset2)
54 *offset2 = idx2 * sizeof (gpointer);
55 return 284 + idx1 * sizeof (gpointer);
58 #define emit_linuxthreads_tls(code,dreg,key) do {\
60 off1 = offsets_from_pthread_key ((key), &off2); \
61 ppc_load_reg ((code), (dreg), off1, ppc_r2); \
62 ppc_load_reg ((code), (dreg), off2, (dreg)); \
65 #define emit_darwing5_tls(code,dreg,key) do {\
66 int off1 = 0x48 + key * sizeof (gpointer); \
67 ppc_mfspr ((code), (dreg), 104); \
68 ppc_load_reg ((code), (dreg), off1, (dreg)); \
71 #define emit_tls_access(code,dreg,key) do { \
73 case TLS_MODE_LTHREADS: emit_linuxthreads_tls(code,dreg,key); break; \
74 case TLS_MODE_DARWIN_G5: emit_darwing5_tls(code,dreg,key); break; \
75 default: g_assert_not_reached (); \
79 #define MONO_EMIT_NEW_LOAD_R8(cfg,dr,addr) do { \
81 MONO_INST_NEW ((cfg), (inst), OP_R8CONST); \
82 inst->type = STACK_R8; \
84 inst->inst_p0 = (void*)(addr); \
85 mono_bblock_add_inst (cfg->cbb, inst); \
89 mono_arch_regname (int reg) {
90 static const char rnames[][4] = {
91 "r0", "sp", "r2", "r3", "r4",
92 "r5", "r6", "r7", "r8", "r9",
93 "r10", "r11", "r12", "r13", "r14",
94 "r15", "r16", "r17", "r18", "r19",
95 "r20", "r21", "r22", "r23", "r24",
96 "r25", "r26", "r27", "r28", "r29",
99 if (reg >= 0 && reg < 32)
105 mono_arch_fregname (int reg) {
106 static const char rnames[][4] = {
107 "f0", "f1", "f2", "f3", "f4",
108 "f5", "f6", "f7", "f8", "f9",
109 "f10", "f11", "f12", "f13", "f14",
110 "f15", "f16", "f17", "f18", "f19",
111 "f20", "f21", "f22", "f23", "f24",
112 "f25", "f26", "f27", "f28", "f29",
115 if (reg >= 0 && reg < 32)
120 /* this function overwrites r0, r11, r12 */
122 emit_memcpy (guint8 *code, int size, int dreg, int doffset, int sreg, int soffset)
124 /* unrolled, use the counter in big */
125 if (size > sizeof (gpointer) * 5) {
126 int shifted = size >> 3;
127 guint8 *copy_loop_start, *copy_loop_jump;
129 ppc_load (code, ppc_r0, shifted);
130 ppc_mtctr (code, ppc_r0);
131 g_assert (sreg == ppc_r11);
132 ppc_addi (code, ppc_r12, dreg, (doffset - sizeof (gpointer)));
133 ppc_addi (code, ppc_r11, sreg, (soffset - sizeof (gpointer)));
134 copy_loop_start = code;
135 ppc_load_reg_update (code, ppc_r0, 8, ppc_r11);
136 ppc_store_reg_update (code, ppc_r0, 8, ppc_r12);
137 copy_loop_jump = code;
138 ppc_bc (code, PPC_BR_DEC_CTR_NONZERO, 0, 0);
139 ppc_patch (copy_loop_jump, copy_loop_start);
141 doffset = soffset = 0;
145 ppc_load_reg (code, ppc_r0, soffset, sreg);
146 ppc_store_reg (code, ppc_r0, doffset, dreg);
152 ppc_lwz (code, ppc_r0, soffset, sreg);
153 ppc_stw (code, ppc_r0, doffset, dreg);
159 ppc_lhz (code, ppc_r0, soffset, sreg);
160 ppc_sth (code, ppc_r0, doffset, dreg);
166 ppc_lbz (code, ppc_r0, soffset, sreg);
167 ppc_stb (code, ppc_r0, doffset, dreg);
176 * mono_arch_get_argument_info:
177 * @csig: a method signature
178 * @param_count: the number of parameters to consider
179 * @arg_info: an array to store the result infos
181 * Gathers information on parameters such as size, alignment and
182 * padding. arg_info should be large enought to hold param_count + 1 entries.
184 * Returns the size of the activation frame.
187 mono_arch_get_argument_info (MonoMethodSignature *csig, int param_count, MonoJitArgumentInfo *arg_info)
189 g_assert_not_reached ();
194 is_load_sequence (guint32 *seq)
196 return ppc_opcode (seq [0]) == 15 && /* lis */
197 ppc_opcode (seq [1]) == 24 && /* ori */
198 ppc_opcode (seq [2]) == 30 && /* sldi */
199 ppc_opcode (seq [3]) == 25 && /* oris */
200 ppc_opcode (seq [4]) == 24; /* ori */
203 #define ppc_load_get_dest(l) (((l)>>21) & 0x1f)
204 #define ppc_load_get_off(l) ((gint16)((l) & 0xffff))
206 /* code must point to the blrl */
208 mono_ppc_is_direct_call_sequence (guint32 *code)
210 g_assert(*code == 0x4e800021 || *code == 0x4e800020 || *code == 0x4e800420);
212 /* the thunk-less direct call sequence: lis/ori/sldi/oris/ori/mtlr/blrl */
213 if (ppc_opcode (code [-1]) == 31) { /* mtlr */
214 if (ppc_opcode (code [-2]) == 58 && ppc_opcode (code [-3]) == 58) { /* ld/ld */
215 if (!is_load_sequence (&code [-8]))
217 /* one of the loads must be "ld r2,8(rX)" */
218 return (ppc_load_get_dest (code [-2]) == ppc_r2 && ppc_load_get_off (code [-2]) == 8) ||
219 (ppc_load_get_dest (code [-3]) == ppc_r2 && ppc_load_get_off (code [-3]) == 8);
221 if (ppc_opcode (code [-2]) == 24 && ppc_opcode (code [-3]) == 31) /* mr/nop */
222 return is_load_sequence (&code [-8]);
224 return is_load_sequence (&code [-6]);
230 mono_arch_get_vcall_slot (guint8 *code_ptr, gpointer *regs, int *displacement)
234 guint32* code = (guint32*)code_ptr;
238 /* This is the 'blrl' instruction */
241 /* Sanity check: instruction must be 'blrl' */
242 if (*code != 0x4e800021)
245 if (mono_ppc_is_direct_call_sequence (code))
248 /* FIXME: more sanity checks here */
249 /* OK, we're now at the 'blrl' instruction. Now walk backwards
250 till we get to a 'mtlr rA' */
252 if((*code & 0x7c0803a6) == 0x7c0803a6) {
254 /* Here we are: we reached the 'mtlr rA'.
255 Extract the register from the instruction */
256 reg = (*code & 0x03e00000) >> 21;
258 /* ok, this is a lwz reg, offset (vtreg)
259 * it is emitted with:
260 * ppc_emit32 (c, (32 << 26) | ((D) << 21) | ((a) << 16) | (guint16)(d))
262 soff = (*code & 0xffff);
264 reg = (*code >> 16) & 0x1f;
265 g_assert (reg != ppc_r1);
266 /*g_print ("patching reg is %d\n", reg);*/
268 MonoLMF *lmf = (MonoLMF*)((char*)regs + (14 * sizeof (double)) + (13 * sizeof (gulong)));
269 /* saved in the MonoLMF structure */
270 o = (gpointer)lmf->iregs [reg - 13];
277 *displacement = offset;
282 mono_arch_get_vcall_slot_addr (guint8 *code, gpointer *regs)
286 vt = mono_arch_get_vcall_slot (code, regs, &displacement);
289 return (gpointer*)((char*)vt + displacement);
292 #define MAX_ARCH_DELEGATE_PARAMS 7
295 mono_arch_get_delegate_invoke_impl (MonoMethodSignature *sig, gboolean has_target)
297 guint8 *code, *start;
299 /* FIXME: Support more cases */
300 if (MONO_TYPE_ISSTRUCT (sig->ret))
304 static guint8* cached = NULL;
305 mono_mini_arch_lock ();
307 mono_mini_arch_unlock ();
311 start = code = mono_global_codeman_reserve (20);
313 /* Replace the this argument with the target */
314 ppc_load_reg (code, ppc_r0, G_STRUCT_OFFSET (MonoDelegate, method_ptr), ppc_r3);
315 /* it's a function descriptor */
316 ppc_ldx (code, ppc_r0, 0, ppc_r0);
317 ppc_mtctr (code, ppc_r0);
318 ppc_load_reg (code, ppc_r3, G_STRUCT_OFFSET (MonoDelegate, target), ppc_r3);
319 /* FIXME: this might be a function descriptor */
320 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
322 g_assert ((code - start) <= 20);
324 mono_arch_flush_icache (start, 20);
325 mono_ppc_emitted (start, 16, "delegate invoke target has_target 1");
327 mono_mini_arch_unlock ();
330 static guint8* cache [MAX_ARCH_DELEGATE_PARAMS + 1] = {NULL};
333 if (sig->param_count > MAX_ARCH_DELEGATE_PARAMS)
335 for (i = 0; i < sig->param_count; ++i)
336 if (!mono_is_regsize_var (sig->params [i]))
339 mono_mini_arch_lock ();
340 code = cache [sig->param_count];
342 mono_mini_arch_unlock ();
346 size = 16 + sig->param_count * 4;
347 start = code = mono_global_codeman_reserve (size);
349 ppc_load_reg (code, ppc_r0, G_STRUCT_OFFSET (MonoDelegate, method_ptr), ppc_r3);
350 /* it's a function descriptor */
351 ppc_ldx (code, ppc_r0, 0, ppc_r0);
352 ppc_mtctr (code, ppc_r0);
353 /* slide down the arguments */
354 for (i = 0; i < sig->param_count; ++i) {
355 ppc_mr (code, (ppc_r3 + i), (ppc_r3 + i + 1));
357 /* FIXME: this might be a function descriptor */
358 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
360 g_assert ((code - start) <= size);
362 mono_arch_flush_icache (start, size);
363 mono_ppc_emitted (start, size, "delegate invoke target has_target 0 params %d", sig->param_count);
364 cache [sig->param_count] = start;
365 mono_mini_arch_unlock ();
372 mono_arch_get_this_arg_from_call (MonoGenericSharingContext *gsctx, MonoMethodSignature *sig, gssize *regs, guint8 *code)
374 /* FIXME: handle returning a struct */
375 if (MONO_TYPE_ISSTRUCT (sig->ret))
376 return (gpointer)regs [ppc_r4];
377 return (gpointer)regs [ppc_r3];
381 * Initialize the cpu to execute managed code.
384 mono_arch_cpu_init (void)
389 * Initialize architecture specific code.
392 mono_arch_init (void)
394 InitializeCriticalSection (&mini_arch_mutex);
398 * Cleanup architecture specific code.
401 mono_arch_cleanup (void)
403 DeleteCriticalSection (&mini_arch_mutex);
407 * This function returns the optimizations supported on this cpu.
410 mono_arch_cpu_optimizazions (guint32 *exclude_mask)
414 /* no ppc-specific optimizations yet */
420 is_regsize_var (MonoType *t) {
423 t = mini_type_get_underlying_type (NULL, t);
432 case MONO_TYPE_FNPTR:
434 case MONO_TYPE_OBJECT:
435 case MONO_TYPE_STRING:
436 case MONO_TYPE_CLASS:
437 case MONO_TYPE_SZARRAY:
438 case MONO_TYPE_ARRAY:
440 case MONO_TYPE_GENERICINST:
441 if (!mono_type_generic_inst_is_valuetype (t))
444 case MONO_TYPE_VALUETYPE:
451 mono_arch_get_allocatable_int_vars (MonoCompile *cfg)
456 for (i = 0; i < cfg->num_varinfo; i++) {
457 MonoInst *ins = cfg->varinfo [i];
458 MonoMethodVar *vmv = MONO_VARINFO (cfg, i);
461 if (vmv->range.first_use.abs_pos >= vmv->range.last_use.abs_pos)
464 if (ins->flags & (MONO_INST_VOLATILE|MONO_INST_INDIRECT) || (ins->opcode != OP_LOCAL && ins->opcode != OP_ARG))
467 /* we can only allocate 32 bit values */
468 if (is_regsize_var (ins->inst_vtype)) {
469 g_assert (MONO_VARINFO (cfg, i)->reg == -1);
470 g_assert (i == vmv->idx);
471 vars = mono_varlist_insert_sorted (cfg, vars, vmv, FALSE);
479 mono_arch_get_global_int_regs (MonoCompile *cfg)
483 if (cfg->frame_reg != ppc_sp)
485 /* ppc_r13 is used by the system on PPC EABI */
486 for (i = 14; i < top; ++i)
487 regs = g_list_prepend (regs, GUINT_TO_POINTER (i));
493 * mono_arch_regalloc_cost:
495 * Return the cost, in number of memory references, of the action of
496 * allocating the variable VMV into a register during global register
500 mono_arch_regalloc_cost (MonoCompile *cfg, MonoMethodVar *vmv)
512 mono_arch_flush_icache (guint8 *code, gint size)
515 guint8 *endp, *start;
516 static int cachelinesize = 0;
517 static int cachelineinc = 16;
519 if (!cachelinesize) {
524 mib [1] = HW_CACHELINE;
525 len = sizeof (cachelinesize);
526 if (sysctl(mib, 2, &cachelinesize, (size_t*)&len, NULL, 0) == -1) {
530 cachelineinc = cachelinesize;
531 /*g_print ("setting cl size to %d\n", cachelinesize);*/
533 #elif defined(__linux__)
534 /* sadly this will work only with 2.6 kernels... */
535 FILE* f = fopen ("/proc/self/auxv", "rb");
538 while (fread (&vec, sizeof (vec), 1, f) == 1) {
539 if (vec.type == 19) {
540 cachelinesize = vec.value;
548 #elif defined(G_COMPILER_CODEWARRIOR)
552 #warning Need a way to get cache line size
558 start = (guint8*)((gsize)start & ~(cachelinesize - 1));
559 /* use dcbf for smp support, later optimize for UP, see pem._64bit.d20030611.pdf page 211 */
560 #if defined(G_COMPILER_CODEWARRIOR)
562 for (p = start; p < endp; p += cachelineinc) {
566 for (p = start; p < endp; p += cachelineinc) {
572 for (p = start; p < endp; p += cachelineinc) {
584 for (p = start; p < endp; p += cachelineinc) {
585 asm ("dcbf 0,%0;" : : "r"(p) : "memory");
588 for (p = start; p < endp; p += cachelineinc) {
589 asm ("dcbst 0,%0;" : : "r"(p) : "memory");
594 for (p = start; p < endp; p += cachelineinc) {
595 asm ("icbi 0,%0; sync;" : : "r"(p) : "memory");
603 mono_arch_flush_register_windows (void)
608 #define ALWAYS_ON_STACK(s) s
609 #define FP_ALSO_IN_REG(s) s
611 #define ALWAYS_ON_STACK(s) s
612 #define FP_ALSO_IN_REG(s) s
613 #define ALIGN_DOUBLES
626 guint32 vtsize; /* in param area */
628 guint8 regtype : 4; /* 0 general, 1 basereg, 2 floating point register, see RegType* */
629 guint8 size : 4; /* 1, 2, 4, 8, or regs used by RegTypeStructByVal */
630 guint8 bytes : 4; /* size in bytes - only valid for
631 RegTypeStructByVal if the struct fits
632 in one word, otherwise it's 0*/
647 add_general (guint *gr, guint *stack_size, ArgInfo *ainfo, gboolean simple)
651 if (*gr >= 3 + PPC_NUM_REG_ARGS) {
652 ainfo->offset = PPC_STACK_PARAM_OFFSET + *stack_size;
653 ainfo->reg = ppc_sp; /* in the caller */
654 ainfo->regtype = RegTypeBase;
655 *stack_size += sizeof (gpointer);
657 ALWAYS_ON_STACK (*stack_size += sizeof (gpointer));
664 has_only_a_r48_field (MonoClass *klass)
668 gboolean have_field = FALSE;
670 while ((f = mono_class_get_fields (klass, &iter))) {
671 if (!(f->type->attrs & FIELD_ATTRIBUTE_STATIC)) {
674 if (!f->type->byref && (f->type->type == MONO_TYPE_R4 || f->type->type == MONO_TYPE_R8))
684 calculate_sizes (MonoMethodSignature *sig, gboolean is_pinvoke)
687 int n = sig->hasthis + sig->param_count;
689 guint32 stack_size = 0;
690 CallInfo *cinfo = g_malloc0 (sizeof (CallInfo) + sizeof (ArgInfo) * n);
692 fr = PPC_FIRST_FPARG_REG;
693 gr = PPC_FIRST_ARG_REG;
695 /* FIXME: handle returning a struct */
696 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
697 add_general (&gr, &stack_size, &cinfo->ret, TRUE);
698 cinfo->struct_ret = PPC_FIRST_ARG_REG;
703 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
706 DEBUG(printf("params: %d\n", sig->param_count));
707 for (i = 0; i < sig->param_count; ++i) {
708 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (i == sig->sentinelpos)) {
709 /* Prevent implicit arguments and sig_cookie from
710 being passed in registers */
711 gr = PPC_LAST_ARG_REG + 1;
712 /* FIXME: don't we have to set fr, too? */
713 /* Emit the signature cookie just before the implicit arguments */
714 add_general (&gr, &stack_size, &cinfo->sig_cookie, TRUE);
716 DEBUG(printf("param %d: ", i));
717 if (sig->params [i]->byref) {
718 DEBUG(printf("byref\n"));
719 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
723 simpletype = mini_type_get_underlying_type (NULL, sig->params [i])->type;
724 switch (simpletype) {
725 case MONO_TYPE_BOOLEAN:
728 cinfo->args [n].size = 1;
729 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
735 cinfo->args [n].size = 2;
736 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
741 cinfo->args [n].size = 4;
742 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
748 case MONO_TYPE_FNPTR:
749 case MONO_TYPE_CLASS:
750 case MONO_TYPE_OBJECT:
751 case MONO_TYPE_STRING:
752 case MONO_TYPE_SZARRAY:
753 case MONO_TYPE_ARRAY:
754 cinfo->args [n].size = sizeof (gpointer);
755 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
758 case MONO_TYPE_GENERICINST:
759 if (!mono_type_generic_inst_is_valuetype (sig->params [i])) {
760 cinfo->args [n].size = sizeof (gpointer);
761 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
766 case MONO_TYPE_VALUETYPE: {
769 klass = mono_class_from_mono_type (sig->params [i]);
771 size = mono_class_native_size (klass, NULL);
773 size = mono_class_value_size (klass, NULL);
774 if ((size == 4 || size == 8) && has_only_a_r48_field (klass)) {
775 cinfo->args [n].size = size;
777 /* It was 7, now it is 8 in LinuxPPC */
778 if (fr <= PPC_LAST_FPARG_REG) {
779 cinfo->args [n].regtype = RegTypeFP;
780 cinfo->args [n].reg = fr;
782 FP_ALSO_IN_REG (gr ++);
784 FP_ALSO_IN_REG (gr ++);
785 ALWAYS_ON_STACK (stack_size += size);
787 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size;
788 cinfo->args [n].regtype = RegTypeBase;
789 cinfo->args [n].reg = ppc_sp; /* in the caller*/
795 DEBUG(printf ("load %d bytes struct\n",
796 mono_class_native_size (sig->params [i]->data.klass, NULL)));
797 #if PPC_PASS_STRUCTS_BY_VALUE
799 int align_size = size;
801 int rest = PPC_LAST_ARG_REG - gr + 1;
803 align_size += (sizeof (gpointer) - 1);
804 align_size &= ~(sizeof (gpointer) - 1);
805 nwords = (align_size + sizeof (gpointer) -1 ) / sizeof (gpointer);
806 n_in_regs = MIN (rest, nwords);
807 cinfo->args [n].regtype = RegTypeStructByVal;
808 if (gr > PPC_LAST_ARG_REG
810 /* FIXME: check this */
811 || (size >= 3 && size % 4 != 0)
814 cinfo->args [n].size = 0;
815 cinfo->args [n].vtsize = nwords;
817 cinfo->args [n].size = n_in_regs;
818 cinfo->args [n].vtsize = nwords - n_in_regs;
819 cinfo->args [n].reg = gr;
821 if (nwords == 1 && is_pinvoke)
822 cinfo->args [n].bytes = size;
824 cinfo->args [n].bytes = 0;
826 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size;
827 /*g_print ("offset for arg %d at %d\n", n, PPC_STACK_PARAM_OFFSET + stack_size);*/
828 stack_size += nwords * sizeof (gpointer);
831 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
832 cinfo->args [n].regtype = RegTypeStructByAddr;
833 cinfo->args [n].vtsize = size;
838 case MONO_TYPE_TYPEDBYREF: {
839 int size = sizeof (MonoTypedRef);
840 /* keep in sync or merge with the valuetype case */
841 #if PPC_PASS_STRUCTS_BY_VALUE
843 int nwords = (size + sizeof (gpointer) -1 ) / sizeof (gpointer);
844 cinfo->args [n].regtype = RegTypeStructByVal;
845 if (gr <= PPC_LAST_ARG_REG) {
846 int rest = PPC_LAST_ARG_REG - gr + 1;
847 int n_in_regs = rest >= nwords? nwords: rest;
848 cinfo->args [n].size = n_in_regs;
849 cinfo->args [n].vtsize = nwords - n_in_regs;
850 cinfo->args [n].reg = gr;
853 cinfo->args [n].size = 0;
854 cinfo->args [n].vtsize = nwords;
856 if (nwords == 1 && is_pinvoke)
857 cinfo->args [n].bytes = size;
859 cinfo->args [n].bytes = 0;
860 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size;
861 /*g_print ("offset for arg %d at %d\n", n, PPC_STACK_PARAM_OFFSET + stack_size);*/
862 stack_size += nwords * sizeof (gpointer);
865 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
866 cinfo->args [n].regtype = RegTypeStructByAddr;
867 cinfo->args [n].vtsize = size;
874 cinfo->args [n].size = 8;
875 add_general (&gr, &stack_size, cinfo->args + n, sizeof (gpointer) == 8);
879 cinfo->args [n].size = 4;
881 /* It was 7, now it is 8 in LinuxPPC */
882 if (fr <= PPC_LAST_FPARG_REG) {
883 cinfo->args [n].regtype = RegTypeFP;
884 cinfo->args [n].reg = fr;
886 FP_ALSO_IN_REG (gr ++);
887 ALWAYS_ON_STACK (stack_size += 8);
889 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size + 4;
890 cinfo->args [n].regtype = RegTypeBase;
891 cinfo->args [n].reg = ppc_sp; /* in the caller*/
897 cinfo->args [n].size = 8;
898 /* It was 7, now it is 8 in LinuxPPC */
899 if (fr <= PPC_LAST_FPARG_REG) {
900 cinfo->args [n].regtype = RegTypeFP;
901 cinfo->args [n].reg = fr;
903 FP_ALSO_IN_REG (gr++);
904 ALWAYS_ON_STACK (stack_size += 8);
906 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size;
907 cinfo->args [n].regtype = RegTypeBase;
908 cinfo->args [n].reg = ppc_sp; /* in the caller*/
914 g_error ("Can't trampoline 0x%x", sig->params [i]->type);
918 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (i == sig->sentinelpos)) {
919 /* Prevent implicit arguments and sig_cookie from
920 being passed in registers */
921 gr = PPC_LAST_ARG_REG + 1;
922 /* Emit the signature cookie just before the implicit arguments */
923 add_general (&gr, &stack_size, &cinfo->sig_cookie, TRUE);
927 simpletype = mini_type_get_underlying_type (NULL, sig->ret)->type;
928 switch (simpletype) {
929 case MONO_TYPE_BOOLEAN:
940 case MONO_TYPE_FNPTR:
941 case MONO_TYPE_CLASS:
942 case MONO_TYPE_OBJECT:
943 case MONO_TYPE_SZARRAY:
944 case MONO_TYPE_ARRAY:
945 case MONO_TYPE_STRING:
946 cinfo->ret.reg = ppc_r3;
950 cinfo->ret.reg = ppc_r3;
954 cinfo->ret.reg = ppc_f1;
955 cinfo->ret.regtype = RegTypeFP;
957 case MONO_TYPE_GENERICINST:
958 if (!mono_type_generic_inst_is_valuetype (sig->ret)) {
959 cinfo->ret.reg = ppc_r3;
963 case MONO_TYPE_VALUETYPE:
965 case MONO_TYPE_TYPEDBYREF:
969 g_error ("Can't handle as return value 0x%x", sig->ret->type);
973 /* align stack size to 16 */
974 DEBUG (printf (" stack size: %d (%d)\n", (stack_size + 15) & ~15, stack_size));
975 stack_size = (stack_size + 15) & ~15;
977 cinfo->stack_usage = stack_size;
982 allocate_tailcall_valuetype_addrs (MonoCompile *cfg)
984 #if !PPC_PASS_STRUCTS_BY_VALUE
985 MonoMethodSignature *sig = mono_method_signature (cfg->method);
989 if (!(cfg->flags & MONO_CFG_HAS_TAIL))
992 for (i = 0; i < sig->param_count; ++i) {
993 MonoType *type = mono_type_get_underlying_type (sig->params [i]);
994 if (type->type == MONO_TYPE_VALUETYPE)
999 cfg->tailcall_valuetype_addrs =
1000 mono_mempool_alloc0 (cfg->mempool, sizeof (MonoInst*) * num_structs);
1001 for (i = 0; i < num_structs; ++i) {
1002 cfg->tailcall_valuetype_addrs [i] =
1003 mono_compile_create_var (cfg, &mono_defaults.int_class->byval_arg, OP_LOCAL);
1004 cfg->tailcall_valuetype_addrs [i]->flags |= MONO_INST_INDIRECT;
1011 * Set var information according to the calling convention. ppc version.
1012 * The locals var stuff should most likely be split in another method.
1015 mono_arch_allocate_vars (MonoCompile *m)
1017 MonoMethodSignature *sig;
1018 MonoMethodHeader *header;
1020 int i, offset, size, align, curinst;
1021 int frame_reg = ppc_sp;
1023 guint32 locals_stack_size, locals_stack_align;
1025 allocate_tailcall_valuetype_addrs (m);
1027 m->flags |= MONO_CFG_HAS_SPILLUP;
1029 /* allow room for the vararg method args: void* and long/double */
1030 if (mono_jit_trace_calls != NULL && mono_trace_eval (m->method))
1031 m->param_area = MAX (m->param_area, sizeof (gpointer)*8);
1032 /* this is bug #60332: remove when #59509 is fixed, so no weird vararg
1033 * call convs needs to be handled this way.
1035 if (m->flags & MONO_CFG_HAS_VARARGS)
1036 m->param_area = MAX (m->param_area, sizeof (gpointer)*8);
1037 /* gtk-sharp and other broken code will dllimport vararg functions even with
1038 * non-varargs signatures. Since there is little hope people will get this right
1039 * we assume they won't.
1041 if (m->method->wrapper_type == MONO_WRAPPER_MANAGED_TO_NATIVE)
1042 m->param_area = MAX (m->param_area, sizeof (gpointer)*8);
1044 header = mono_method_get_header (m->method);
1047 * We use the frame register also for any method that has
1048 * exception clauses. This way, when the handlers are called,
1049 * the code will reference local variables using the frame reg instead of
1050 * the stack pointer: if we had to restore the stack pointer, we'd
1051 * corrupt the method frames that are already on the stack (since
1052 * filters get called before stack unwinding happens) when the filter
1053 * code would call any method (this also applies to finally etc.).
1055 if ((m->flags & MONO_CFG_HAS_ALLOCA) || header->num_clauses)
1056 frame_reg = ppc_r31;
1057 m->frame_reg = frame_reg;
1058 if (frame_reg != ppc_sp) {
1059 m->used_int_regs |= 1 << frame_reg;
1062 sig = mono_method_signature (m->method);
1066 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
1067 m->ret->opcode = OP_REGVAR;
1068 m->ret->inst_c0 = m->ret->dreg = ppc_r3;
1070 /* FIXME: handle long values? */
1071 switch (mini_type_get_underlying_type (m->generic_sharing_context, sig->ret)->type) {
1072 case MONO_TYPE_VOID:
1076 m->ret->opcode = OP_REGVAR;
1077 m->ret->inst_c0 = m->ret->dreg = ppc_f1;
1080 m->ret->opcode = OP_REGVAR;
1081 m->ret->inst_c0 = m->ret->dreg = ppc_r3;
1085 /* local vars are at a positive offset from the stack pointer */
1087 * also note that if the function uses alloca, we use ppc_r31
1088 * to point at the local variables.
1090 offset = PPC_MINIMAL_STACK_SIZE; /* linkage area */
1091 /* align the offset to 16 bytes: not sure this is needed here */
1093 //offset &= ~(16 - 1);
1095 /* add parameter area size for called functions */
1096 offset += m->param_area;
1098 offset &= ~(16 - 1);
1100 /* allow room to save the return value */
1101 if (mono_jit_trace_calls != NULL && mono_trace_eval (m->method))
1104 /* the MonoLMF structure is stored just below the stack pointer */
1107 /* this stuff should not be needed on ppc and the new jit,
1108 * because a call on ppc to the handlers doesn't change the
1109 * stack pointer and the jist doesn't manipulate the stack pointer
1110 * for operations involving valuetypes.
1112 /* reserve space to store the esp */
1113 offset += sizeof (gpointer);
1115 /* this is a global constant */
1116 mono_exc_esp_offset = offset;
1119 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
1120 offset += sizeof(gpointer) - 1;
1121 offset &= ~(sizeof(gpointer) - 1);
1123 m->vret_addr->opcode = OP_REGOFFSET;
1124 m->vret_addr->inst_basereg = frame_reg;
1125 m->vret_addr->inst_offset = offset;
1127 if (G_UNLIKELY (m->verbose_level > 1)) {
1128 printf ("vret_addr =");
1129 mono_print_ins (m->vret_addr);
1132 offset += sizeof(gpointer);
1135 offsets = mono_allocate_stack_slots_full (m, FALSE, &locals_stack_size, &locals_stack_align);
1136 if (locals_stack_align) {
1137 offset += (locals_stack_align - 1);
1138 offset &= ~(locals_stack_align - 1);
1140 for (i = m->locals_start; i < m->num_varinfo; i++) {
1141 if (offsets [i] != -1) {
1142 MonoInst *inst = m->varinfo [i];
1143 inst->opcode = OP_REGOFFSET;
1144 inst->inst_basereg = frame_reg;
1145 inst->inst_offset = offset + offsets [i];
1147 g_print ("allocating local %d (%s) to %d\n",
1148 i, mono_type_get_name (inst->inst_vtype), inst->inst_offset);
1152 offset += locals_stack_size;
1156 inst = m->args [curinst];
1157 if (inst->opcode != OP_REGVAR) {
1158 inst->opcode = OP_REGOFFSET;
1159 inst->inst_basereg = frame_reg;
1160 offset += sizeof (gpointer) - 1;
1161 offset &= ~(sizeof (gpointer) - 1);
1162 inst->inst_offset = offset;
1163 offset += sizeof (gpointer);
1168 for (i = 0; i < sig->param_count; ++i) {
1169 inst = m->args [curinst];
1170 if (inst->opcode != OP_REGVAR) {
1171 inst->opcode = OP_REGOFFSET;
1172 inst->inst_basereg = frame_reg;
1174 size = mono_type_native_stack_size (sig->params [i], (guint32*)&align);
1175 inst->backend.is_pinvoke = 1;
1177 size = mono_type_size (sig->params [i], &align);
1179 if (MONO_TYPE_ISSTRUCT (sig->params [i]) && size < sizeof (gpointer))
1180 size = align = sizeof (gpointer);
1181 offset += align - 1;
1182 offset &= ~(align - 1);
1183 inst->inst_offset = offset;
1189 /* some storage for fp conversions */
1192 m->arch.fp_conv_var_offset = offset;
1195 /* align the offset to 16 bytes */
1197 offset &= ~(16 - 1);
1200 m->stack_offset = offset;
1202 if (sig->call_convention == MONO_CALL_VARARG) {
1203 CallInfo *cinfo = calculate_sizes (m->method->signature, m->method->signature->pinvoke);
1205 m->sig_cookie = cinfo->sig_cookie.offset;
1212 mono_arch_create_vars (MonoCompile *cfg)
1214 MonoMethodSignature *sig = mono_method_signature (cfg->method);
1216 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
1217 cfg->vret_addr = mono_compile_create_var (cfg, &mono_defaults.int_class->byval_arg, OP_ARG);
1222 emit_sig_cookie (MonoCompile *cfg, MonoCallInst *call, CallInfo *cinfo)
1224 int sig_reg = mono_alloc_ireg (cfg);
1226 MONO_EMIT_NEW_ICONST (cfg, sig_reg, (gulong)call->signature);
1227 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG,
1228 ppc_r1, cinfo->sig_cookie.offset, sig_reg);
1232 mono_arch_emit_call (MonoCompile *cfg, MonoCallInst *call)
1235 MonoMethodSignature *sig;
1239 sig = call->signature;
1240 n = sig->param_count + sig->hasthis;
1242 cinfo = calculate_sizes (sig, sig->pinvoke);
1244 for (i = 0; i < n; ++i) {
1245 ArgInfo *ainfo = cinfo->args + i;
1248 if (i >= sig->hasthis)
1249 t = sig->params [i - sig->hasthis];
1251 t = &mono_defaults.int_class->byval_arg;
1252 t = mini_type_get_underlying_type (cfg->generic_sharing_context, t);
1254 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (i == sig->sentinelpos))
1255 emit_sig_cookie (cfg, call, cinfo);
1257 in = call->args [i];
1259 if (ainfo->regtype == RegTypeGeneral) {
1260 MONO_INST_NEW (cfg, ins, OP_MOVE);
1261 ins->dreg = mono_alloc_ireg (cfg);
1262 ins->sreg1 = in->dreg;
1263 MONO_ADD_INS (cfg->cbb, ins);
1265 mono_call_inst_add_outarg_reg (cfg, call, ins->dreg, ainfo->reg, FALSE);
1266 } else if (ainfo->regtype == RegTypeStructByAddr) {
1267 MONO_INST_NEW (cfg, ins, OP_OUTARG_VT);
1268 ins->opcode = OP_OUTARG_VT;
1269 ins->sreg1 = in->dreg;
1270 ins->klass = in->klass;
1271 ins->inst_p0 = call;
1272 ins->inst_p1 = mono_mempool_alloc (cfg->mempool, sizeof (ArgInfo));
1273 memcpy (ins->inst_p1, ainfo, sizeof (ArgInfo));
1274 MONO_ADD_INS (cfg->cbb, ins);
1275 } else if (ainfo->regtype == RegTypeStructByVal) {
1276 /* this is further handled in mono_arch_emit_outarg_vt () */
1277 MONO_INST_NEW (cfg, ins, OP_OUTARG_VT);
1278 ins->opcode = OP_OUTARG_VT;
1279 ins->sreg1 = in->dreg;
1280 ins->klass = in->klass;
1281 ins->inst_p0 = call;
1282 ins->inst_p1 = mono_mempool_alloc (cfg->mempool, sizeof (ArgInfo));
1283 memcpy (ins->inst_p1, ainfo, sizeof (ArgInfo));
1284 MONO_ADD_INS (cfg->cbb, ins);
1285 } else if (ainfo->regtype == RegTypeBase) {
1286 if (!t->byref && ((t->type == MONO_TYPE_I8) || (t->type == MONO_TYPE_U8))) {
1287 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI8_MEMBASE_REG, ppc_r1, ainfo->offset, in->dreg);
1288 } else if (!t->byref && ((t->type == MONO_TYPE_R4) || (t->type == MONO_TYPE_R8))) {
1289 if (t->type == MONO_TYPE_R8)
1290 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORER8_MEMBASE_REG, ppc_r1, ainfo->offset, in->dreg);
1292 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORER4_MEMBASE_REG, ppc_r1, ainfo->offset, in->dreg);
1294 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG, ppc_r1, ainfo->offset, in->dreg);
1296 } else if (ainfo->regtype == RegTypeFP) {
1297 if (t->type == MONO_TYPE_VALUETYPE) {
1298 /* this is further handled in mono_arch_emit_outarg_vt () */
1299 MONO_INST_NEW (cfg, ins, OP_OUTARG_VT);
1300 ins->opcode = OP_OUTARG_VT;
1301 ins->sreg1 = in->dreg;
1302 ins->klass = in->klass;
1303 ins->inst_p0 = call;
1304 ins->inst_p1 = mono_mempool_alloc (cfg->mempool, sizeof (ArgInfo));
1305 memcpy (ins->inst_p1, ainfo, sizeof (ArgInfo));
1306 MONO_ADD_INS (cfg->cbb, ins);
1308 cfg->flags |= MONO_CFG_HAS_FPOUT;
1310 int dreg = mono_alloc_freg (cfg);
1312 if (ainfo->size == 4) {
1313 MONO_EMIT_NEW_UNALU (cfg, OP_FCONV_TO_R4, dreg, in->dreg);
1315 MONO_INST_NEW (cfg, ins, OP_FMOVE);
1317 ins->sreg1 = in->dreg;
1318 MONO_ADD_INS (cfg->cbb, ins);
1321 mono_call_inst_add_outarg_reg (cfg, call, dreg, ainfo->reg, TRUE);
1322 cfg->flags |= MONO_CFG_HAS_FPOUT;
1325 g_assert_not_reached ();
1329 /* Emit the signature cookie in the case that there is no
1330 additional argument */
1331 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (n == sig->sentinelpos))
1332 emit_sig_cookie (cfg, call, cinfo);
1334 if (cinfo->struct_ret) {
1337 MONO_INST_NEW (cfg, vtarg, OP_MOVE);
1338 vtarg->sreg1 = call->vret_var->dreg;
1339 vtarg->dreg = mono_alloc_preg (cfg);
1340 MONO_ADD_INS (cfg->cbb, vtarg);
1342 mono_call_inst_add_outarg_reg (cfg, call, vtarg->dreg, cinfo->struct_ret, FALSE);
1345 call->stack_usage = cinfo->stack_usage;
1346 cfg->param_area = MAX (PPC_MINIMAL_PARAM_AREA_SIZE, MAX (cfg->param_area, cinfo->stack_usage));
1347 cfg->flags |= MONO_CFG_HAS_CALLS;
1353 mono_arch_emit_outarg_vt (MonoCompile *cfg, MonoInst *ins, MonoInst *src)
1355 MonoCallInst *call = (MonoCallInst*)ins->inst_p0;
1356 ArgInfo *ainfo = ins->inst_p1;
1357 int ovf_size = ainfo->vtsize;
1358 int doffset = ainfo->offset;
1359 int i, soffset, dreg;
1361 if (ainfo->regtype == RegTypeStructByVal) {
1366 * Darwin pinvokes needs some special handling for 1
1367 * and 2 byte arguments
1369 g_assert (ins->klass);
1370 if (call->signature->pinvoke)
1371 size = mono_class_native_size (ins->klass, NULL);
1372 if (size == 2 || size == 1) {
1373 int tmpr = mono_alloc_ireg (cfg);
1375 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADI1_MEMBASE, tmpr, src->dreg, soffset);
1377 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADI2_MEMBASE, tmpr, src->dreg, soffset);
1378 dreg = mono_alloc_ireg (cfg);
1379 MONO_EMIT_NEW_UNALU (cfg, OP_MOVE, dreg, tmpr);
1380 mono_call_inst_add_outarg_reg (cfg, call, dreg, ainfo->reg, FALSE);
1383 for (i = 0; i < ainfo->size; ++i) {
1384 int antipadding = 0;
1387 antipadding = sizeof (gpointer) - ainfo->bytes;
1389 dreg = mono_alloc_ireg (cfg);
1390 MONO_EMIT_NEW_LOAD_MEMBASE (cfg, dreg, src->dreg, soffset);
1392 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHR_UN_IMM, dreg, dreg, antipadding * 8);
1393 mono_call_inst_add_outarg_reg (cfg, call, dreg, ainfo->reg + i, FALSE);
1394 soffset += sizeof (gpointer);
1397 mini_emit_memcpy (cfg, ppc_r1, doffset + soffset, src->dreg, soffset, ovf_size * sizeof (gpointer), 0);
1398 } else if (ainfo->regtype == RegTypeFP) {
1399 int tmpr = mono_alloc_freg (cfg);
1400 if (ainfo->size == 4)
1401 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADR4_MEMBASE, tmpr, src->dreg, 0);
1403 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADR8_MEMBASE, tmpr, src->dreg, 0);
1404 dreg = mono_alloc_freg (cfg);
1405 MONO_EMIT_NEW_UNALU (cfg, OP_FMOVE, dreg, tmpr);
1406 mono_call_inst_add_outarg_reg (cfg, call, dreg, ainfo->reg, TRUE);
1408 MonoInst *vtcopy = mono_compile_create_var (cfg, &src->klass->byval_arg, OP_LOCAL);
1412 /* FIXME: alignment? */
1413 if (call->signature->pinvoke) {
1414 size = mono_type_native_stack_size (&src->klass->byval_arg, NULL);
1415 vtcopy->backend.is_pinvoke = 1;
1417 size = mini_type_stack_size (cfg->generic_sharing_context, &src->klass->byval_arg, NULL);
1420 g_assert (ovf_size > 0);
1422 EMIT_NEW_VARLOADA (cfg, load, vtcopy, vtcopy->inst_vtype);
1423 mini_emit_memcpy (cfg, load->dreg, 0, src->dreg, 0, size, 0);
1426 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG, ppc_r1, ainfo->offset, load->dreg);
1428 mono_call_inst_add_outarg_reg (cfg, call, load->dreg, ainfo->reg, FALSE);
1433 mono_arch_emit_setret (MonoCompile *cfg, MonoMethod *method, MonoInst *val)
1435 MonoType *ret = mini_type_get_underlying_type (cfg->generic_sharing_context,
1436 mono_method_signature (method)->ret);
1439 if (ret->type == MONO_TYPE_R8 || ret->type == MONO_TYPE_R4) {
1440 MONO_EMIT_NEW_UNALU (cfg, OP_FMOVE, cfg->ret->dreg, val->dreg);
1444 MONO_EMIT_NEW_UNALU (cfg, OP_MOVE, cfg->ret->dreg, val->dreg);
1447 /* FIXME: this is just a useless hint: fix the interface to include the opcode */
1449 mono_arch_is_inst_imm (gint64 imm)
1455 * Allow tracing to work with this interface (with an optional argument)
1459 mono_arch_instrument_prolog (MonoCompile *cfg, void *func, void *p, gboolean enable_arguments)
1463 ppc_load (code, ppc_r3, cfg->method);
1464 ppc_li (code, ppc_r4, 0); /* NULL ebp for now */
1465 ppc_load_func (code, ppc_r0, func);
1466 ppc_mtlr (code, ppc_r0);
1479 mono_arch_instrument_epilog (MonoCompile *cfg, void *func, void *p, gboolean enable_arguments)
1482 int save_mode = SAVE_NONE;
1484 MonoMethod *method = cfg->method;
1485 int rtype = mini_type_get_underlying_type (cfg->generic_sharing_context,
1486 mono_method_signature (method)->ret)->type;
1487 int save_offset = PPC_STACK_PARAM_OFFSET + cfg->param_area;
1491 offset = code - cfg->native_code;
1492 /* we need about 16 instructions */
1493 if (offset > (cfg->code_size - 16 * 4)) {
1494 cfg->code_size *= 2;
1495 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
1496 code = cfg->native_code + offset;
1500 case MONO_TYPE_VOID:
1501 /* special case string .ctor icall */
1502 if (strcmp (".ctor", method->name) && method->klass == mono_defaults.string_class)
1503 save_mode = SAVE_ONE;
1505 save_mode = SAVE_NONE;
1509 save_mode = SAVE_FP;
1511 case MONO_TYPE_VALUETYPE:
1512 save_mode = SAVE_STRUCT;
1515 save_mode = SAVE_ONE;
1519 switch (save_mode) {
1521 ppc_store_reg (code, ppc_r3, save_offset, cfg->frame_reg);
1522 if (enable_arguments) {
1523 ppc_mr (code, ppc_r4, ppc_r3);
1527 ppc_stfd (code, ppc_f1, save_offset, cfg->frame_reg);
1528 if (enable_arguments) {
1529 /* FIXME: what reg? */
1530 ppc_fmr (code, ppc_f3, ppc_f1);
1531 /* FIXME: use 8 byte load */
1532 ppc_lwz (code, ppc_r4, save_offset, cfg->frame_reg);
1533 ppc_lwz (code, ppc_r5, save_offset + 4, cfg->frame_reg);
1537 if (enable_arguments) {
1538 /* FIXME: get the actual address */
1539 ppc_mr (code, ppc_r4, ppc_r3);
1547 ppc_load (code, ppc_r3, cfg->method);
1548 ppc_load_func (code, ppc_r0, func);
1549 ppc_mtlr (code, ppc_r0);
1552 switch (save_mode) {
1554 ppc_load_reg (code, ppc_r3, save_offset, cfg->frame_reg);
1557 ppc_lfd (code, ppc_f1, save_offset, cfg->frame_reg);
1567 * Conditional branches have a small offset, so if it is likely overflowed,
1568 * we do a branch to the end of the method (uncond branches have much larger
1569 * offsets) where we perform the conditional and jump back unconditionally.
1570 * It's slightly slower, since we add two uncond branches, but it's very simple
1571 * with the current patch implementation and such large methods are likely not
1572 * going to be perf critical anyway.
1577 const char *exception;
1584 #define EMIT_COND_BRANCH_FLAGS(ins,b0,b1) \
1585 if (ins->flags & MONO_INST_BRLABEL) { \
1586 if (0 && ins->inst_i0->inst_c0) { \
1587 ppc_bc (code, (b0), (b1), (code - cfg->native_code + ins->inst_i0->inst_c0) & 0xffff); \
1589 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_LABEL, ins->inst_i0); \
1590 ppc_bc (code, (b0), (b1), 0); \
1593 if (0 && ins->inst_true_bb->native_offset) { \
1594 ppc_bc (code, (b0), (b1), (code - cfg->native_code + ins->inst_true_bb->native_offset) & 0xffff); \
1596 int br_disp = ins->inst_true_bb->max_offset - offset; \
1597 if (!ppc_is_imm16 (br_disp + 1024) || ! ppc_is_imm16 (ppc_is_imm16 (br_disp - 1024))) { \
1598 MonoOvfJump *ovfj = mono_mempool_alloc (cfg->mempool, sizeof (MonoOvfJump)); \
1599 ovfj->data.bb = ins->inst_true_bb; \
1600 ovfj->ip_offset = 0; \
1601 ovfj->b0_cond = (b0); \
1602 ovfj->b1_cond = (b1); \
1603 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_BB_OVF, ovfj); \
1606 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_BB, ins->inst_true_bb); \
1607 ppc_bc (code, (b0), (b1), 0); \
1612 #define EMIT_COND_BRANCH(ins,cond) EMIT_COND_BRANCH_FLAGS(ins, branch_b0_table [(cond)], branch_b1_table [(cond)])
1614 /* emit an exception if condition is fail
1616 * We assign the extra code used to throw the implicit exceptions
1617 * to cfg->bb_exit as far as the big branch handling is concerned
1619 #define EMIT_COND_SYSTEM_EXCEPTION_FLAGS(b0,b1,exc_name) \
1621 int br_disp = cfg->bb_exit->max_offset - offset; \
1622 if (!ppc_is_imm16 (br_disp + 1024) || ! ppc_is_imm16 (ppc_is_imm16 (br_disp - 1024))) { \
1623 MonoOvfJump *ovfj = mono_mempool_alloc (cfg->mempool, sizeof (MonoOvfJump)); \
1624 ovfj->data.exception = (exc_name); \
1625 ovfj->ip_offset = code - cfg->native_code; \
1626 ovfj->b0_cond = (b0); \
1627 ovfj->b1_cond = (b1); \
1628 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_EXC_OVF, ovfj); \
1630 cfg->bb_exit->max_offset += 24; \
1632 mono_add_patch_info (cfg, code - cfg->native_code, \
1633 MONO_PATCH_INFO_EXC, exc_name); \
1634 ppc_bcl (code, (b0), (b1), 0); \
1638 #define EMIT_COND_SYSTEM_EXCEPTION(cond,exc_name) EMIT_COND_SYSTEM_EXCEPTION_FLAGS(branch_b0_table [(cond)], branch_b1_table [(cond)], (exc_name))
1641 mono_arch_peephole_pass_1 (MonoCompile *cfg, MonoBasicBlock *bb)
1646 mono_arch_peephole_pass_2 (MonoCompile *cfg, MonoBasicBlock *bb)
1648 MonoInst *ins, *n, *last_ins = NULL;
1650 MONO_BB_FOR_EACH_INS_SAFE (bb, n, ins) {
1651 switch (ins->opcode) {
1653 /* remove unnecessary multiplication with 1 */
1654 if (ins->inst_imm == 1) {
1655 if (ins->dreg != ins->sreg1) {
1656 ins->opcode = OP_MOVE;
1658 MONO_DELETE_INS (bb, ins);
1662 int power2 = mono_is_power_of_two (ins->inst_imm);
1664 ins->opcode = OP_SHL_IMM;
1665 ins->inst_imm = power2;
1669 case OP_LOAD_MEMBASE:
1670 case OP_LOADI8_MEMBASE:
1672 * OP_STORE_MEMBASE_REG reg, offset(basereg)
1673 * OP_LOAD_MEMBASE offset(basereg), reg
1675 if (last_ins && (last_ins->opcode == OP_STOREI8_MEMBASE_REG
1676 || last_ins->opcode == OP_STORE_MEMBASE_REG) &&
1677 ins->inst_basereg == last_ins->inst_destbasereg &&
1678 ins->inst_offset == last_ins->inst_offset) {
1679 if (ins->dreg == last_ins->sreg1) {
1680 MONO_DELETE_INS (bb, ins);
1683 //static int c = 0; printf ("MATCHX %s %d\n", cfg->method->name,c++);
1684 ins->opcode = OP_MOVE;
1685 ins->sreg1 = last_ins->sreg1;
1689 * Note: reg1 must be different from the basereg in the second load
1690 * OP_LOAD_MEMBASE offset(basereg), reg1
1691 * OP_LOAD_MEMBASE offset(basereg), reg2
1693 * OP_LOAD_MEMBASE offset(basereg), reg1
1694 * OP_MOVE reg1, reg2
1696 } else if (last_ins && (last_ins->opcode == OP_LOADI8_MEMBASE
1697 || last_ins->opcode == OP_LOAD_MEMBASE) &&
1698 ins->inst_basereg != last_ins->dreg &&
1699 ins->inst_basereg == last_ins->inst_basereg &&
1700 ins->inst_offset == last_ins->inst_offset) {
1702 if (ins->dreg == last_ins->dreg) {
1703 MONO_DELETE_INS (bb, ins);
1706 ins->opcode = OP_MOVE;
1707 ins->sreg1 = last_ins->dreg;
1710 //g_assert_not_reached ();
1714 * OP_STORE_MEMBASE_IMM imm, offset(basereg)
1715 * OP_LOAD_MEMBASE offset(basereg), reg
1717 * OP_STORE_MEMBASE_IMM imm, offset(basereg)
1718 * OP_ICONST reg, imm
1720 } else if (last_ins && (last_ins->opcode == OP_STOREI8_MEMBASE_IMM
1721 || last_ins->opcode == OP_STORE_MEMBASE_IMM) &&
1722 ins->inst_basereg == last_ins->inst_destbasereg &&
1723 ins->inst_offset == last_ins->inst_offset) {
1724 //static int c = 0; printf ("MATCHX %s %d\n", cfg->method->name,c++);
1725 ins->opcode = OP_ICONST;
1726 ins->inst_c0 = last_ins->inst_imm;
1727 g_assert_not_reached (); // check this rule
1731 case OP_LOADU1_MEMBASE:
1732 case OP_LOADI1_MEMBASE:
1733 if (last_ins && (last_ins->opcode == OP_STOREI1_MEMBASE_REG) &&
1734 ins->inst_basereg == last_ins->inst_destbasereg &&
1735 ins->inst_offset == last_ins->inst_offset) {
1736 ins->opcode = (ins->opcode == OP_LOADI1_MEMBASE) ? OP_ICONV_TO_I1 : OP_ICONV_TO_U1;
1737 ins->sreg1 = last_ins->sreg1;
1740 case OP_LOADU2_MEMBASE:
1741 case OP_LOADI2_MEMBASE:
1742 if (last_ins && (last_ins->opcode == OP_STOREI2_MEMBASE_REG) &&
1743 ins->inst_basereg == last_ins->inst_destbasereg &&
1744 ins->inst_offset == last_ins->inst_offset) {
1745 ins->opcode = (ins->opcode == OP_LOADI2_MEMBASE) ? OP_ICONV_TO_I2 : OP_ICONV_TO_U2;
1746 ins->sreg1 = last_ins->sreg1;
1749 case OP_LOADU4_MEMBASE:
1750 case OP_LOADI4_MEMBASE:
1751 if (last_ins && (last_ins->opcode == OP_STOREI4_MEMBASE_REG) &&
1752 ins->inst_basereg == last_ins->inst_destbasereg &&
1753 ins->inst_offset == last_ins->inst_offset) {
1754 ins->opcode = (ins->opcode == OP_LOADI4_MEMBASE) ? OP_ICONV_TO_I4 : OP_ICONV_TO_U4;
1755 ins->sreg1 = last_ins->sreg1;
1759 ins->opcode = OP_MOVE;
1763 if (ins->dreg == ins->sreg1) {
1764 MONO_DELETE_INS (bb, ins);
1768 * OP_MOVE sreg, dreg
1769 * OP_MOVE dreg, sreg
1771 if (last_ins && last_ins->opcode == OP_MOVE &&
1772 ins->sreg1 == last_ins->dreg &&
1773 ins->dreg == last_ins->sreg1) {
1774 MONO_DELETE_INS (bb, ins);
1782 bb->last_ins = last_ins;
1786 mono_arch_decompose_opts (MonoCompile *cfg, MonoInst *ins)
1788 switch (ins->opcode) {
1789 case OP_ICONV_TO_R_UN: {
1790 static const guint64 adjust_val = 0x4330000000000000ULL;
1791 int msw_reg = mono_alloc_ireg (cfg);
1792 int adj_reg = mono_alloc_freg (cfg);
1793 int tmp_reg = mono_alloc_freg (cfg);
1794 int basereg = ppc_sp;
1796 MONO_EMIT_NEW_ICONST (cfg, msw_reg, 0x43300000);
1797 if (!ppc_is_imm16 (offset + 4)) {
1798 basereg = mono_alloc_ireg (cfg);
1799 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_IADD_IMM, basereg, cfg->frame_reg, offset);
1801 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, basereg, offset, msw_reg);
1802 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, basereg, offset + 4, ins->sreg1);
1803 MONO_EMIT_NEW_LOAD_R8 (cfg, adj_reg, &adjust_val);
1804 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADR8_MEMBASE, tmp_reg, basereg, offset);
1805 MONO_EMIT_NEW_BIALU (cfg, OP_FSUB, ins->dreg, tmp_reg, adj_reg);
1806 ins->opcode = OP_NOP;
1810 int msw_reg = mono_alloc_ireg (cfg);
1811 int basereg = ppc_sp;
1813 if (!ppc_is_imm16 (offset + 4)) {
1814 basereg = mono_alloc_ireg (cfg);
1815 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_IADD_IMM, basereg, cfg->frame_reg, offset);
1817 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORER8_MEMBASE_REG, basereg, offset, ins->sreg1);
1818 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADI4_MEMBASE, msw_reg, basereg, offset);
1819 MONO_EMIT_NEW_UNALU (cfg, OP_CHECK_FINITE, -1, msw_reg);
1820 MONO_EMIT_NEW_UNALU (cfg, OP_FMOVE, ins->dreg, ins->sreg1);
1821 ins->opcode = OP_NOP;
1825 case OP_IADD_OVF_UN:
1827 int shifted1_reg = mono_alloc_ireg (cfg);
1828 int shifted2_reg = mono_alloc_ireg (cfg);
1829 int result_shifted_reg = mono_alloc_ireg (cfg);
1831 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHL_IMM, shifted1_reg, ins->sreg1, 32);
1832 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHL_IMM, shifted2_reg, ins->sreg2, 32);
1833 MONO_EMIT_NEW_BIALU (cfg, ins->opcode, result_shifted_reg, shifted1_reg, shifted2_reg);
1834 if (ins->opcode == OP_IADD_OVF_UN)
1835 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHR_UN_IMM, ins->dreg, result_shifted_reg, 32);
1837 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHR_IMM, ins->dreg, result_shifted_reg, 32);
1838 ins->opcode = OP_NOP;
1844 * the branch_b0_table should maintain the order of these
1858 branch_b0_table [] = {
1873 branch_b1_table [] = {
1887 #define NEW_INS(cfg,dest,op) do { \
1888 MONO_INST_NEW((cfg), (dest), (op)); \
1889 mono_bblock_insert_after_ins (bb, last_ins, (dest)); \
1893 map_to_reg_reg_op (int op)
1902 case OP_COMPARE_IMM:
1904 case OP_ICOMPARE_IMM:
1906 case OP_LCOMPARE_IMM:
1922 case OP_LOAD_MEMBASE:
1923 return OP_LOAD_MEMINDEX;
1924 case OP_LOADI4_MEMBASE:
1925 return OP_LOADI4_MEMINDEX;
1926 case OP_LOADU4_MEMBASE:
1927 return OP_LOADU4_MEMINDEX;
1928 case OP_LOADI8_MEMBASE:
1929 return OP_LOADI8_MEMINDEX;
1930 case OP_LOADU1_MEMBASE:
1931 return OP_LOADU1_MEMINDEX;
1932 case OP_LOADI2_MEMBASE:
1933 return OP_LOADI2_MEMINDEX;
1934 case OP_LOADU2_MEMBASE:
1935 return OP_LOADU2_MEMINDEX;
1936 case OP_LOADI1_MEMBASE:
1937 return OP_LOADI1_MEMINDEX;
1938 case OP_LOADR4_MEMBASE:
1939 return OP_LOADR4_MEMINDEX;
1940 case OP_LOADR8_MEMBASE:
1941 return OP_LOADR8_MEMINDEX;
1942 case OP_STOREI1_MEMBASE_REG:
1943 return OP_STOREI1_MEMINDEX;
1944 case OP_STOREI2_MEMBASE_REG:
1945 return OP_STOREI2_MEMINDEX;
1946 case OP_STOREI4_MEMBASE_REG:
1947 return OP_STOREI4_MEMINDEX;
1948 case OP_STOREI8_MEMBASE_REG:
1949 return OP_STOREI8_MEMINDEX;
1950 case OP_STORE_MEMBASE_REG:
1951 return OP_STORE_MEMINDEX;
1952 case OP_STORER4_MEMBASE_REG:
1953 return OP_STORER4_MEMINDEX;
1954 case OP_STORER8_MEMBASE_REG:
1955 return OP_STORER8_MEMINDEX;
1956 case OP_STORE_MEMBASE_IMM:
1957 return OP_STORE_MEMBASE_REG;
1958 case OP_STOREI1_MEMBASE_IMM:
1959 return OP_STOREI1_MEMBASE_REG;
1960 case OP_STOREI2_MEMBASE_IMM:
1961 return OP_STOREI2_MEMBASE_REG;
1962 case OP_STOREI4_MEMBASE_IMM:
1963 return OP_STOREI4_MEMBASE_REG;
1964 case OP_STOREI8_MEMBASE_IMM:
1965 return OP_STOREI8_MEMBASE_REG;
1967 return mono_op_imm_to_op (op);
1970 //#define map_to_reg_reg_op(op) (cfg->new_ir? mono_op_imm_to_op (op): map_to_reg_reg_op (op))
1972 #define compare_opcode_is_unsigned(opcode) \
1973 (((opcode) >= CEE_BNE_UN && (opcode) <= CEE_BLT_UN) || \
1974 ((opcode) >= OP_IBNE_UN && (opcode) <= OP_IBLT_UN) || \
1975 ((opcode) >= OP_LBNE_UN && (opcode) <= OP_LBLT_UN) || \
1976 ((opcode) >= OP_COND_EXC_NE_UN && (opcode) <= OP_COND_EXC_LT_UN) || \
1977 ((opcode) >= OP_COND_EXC_INE_UN && (opcode) <= OP_COND_EXC_ILT_UN) || \
1978 ((opcode) == OP_CLT_UN || (opcode) == OP_CGT_UN || \
1979 (opcode) == OP_ICLT_UN || (opcode) == OP_ICGT_UN || \
1980 (opcode) == OP_LCLT_UN || (opcode) == OP_LCGT_UN))
1983 * Remove from the instruction list the instructions that can't be
1984 * represented with very simple instructions with no register
1988 mono_arch_lowering_pass (MonoCompile *cfg, MonoBasicBlock *bb)
1990 MonoInst *ins, *next, *temp, *last_ins = NULL;
1993 MONO_BB_FOR_EACH_INS (bb, ins) {
1995 switch (ins->opcode) {
1996 case OP_IDIV_UN_IMM:
1999 case OP_IREM_UN_IMM:
2000 NEW_INS (cfg, temp, OP_ICONST);
2001 temp->inst_c0 = ins->inst_imm;
2002 temp->dreg = mono_alloc_ireg (cfg);
2003 ins->sreg2 = temp->dreg;
2004 if (ins->opcode == OP_IDIV_IMM)
2005 ins->opcode = OP_IDIV;
2006 else if (ins->opcode == OP_IREM_IMM)
2007 ins->opcode = OP_IREM;
2008 else if (ins->opcode == OP_IDIV_UN_IMM)
2009 ins->opcode = OP_IDIV_UN;
2010 else if (ins->opcode == OP_IREM_UN_IMM)
2011 ins->opcode = OP_IREM_UN;
2013 /* handle rem separately */
2020 /* we change a rem dest, src1, src2 to
2021 * div temp1, src1, src2
2022 * mul temp2, temp1, src2
2023 * sub dest, src1, temp2
2025 if (ins->opcode == OP_IREM || ins->opcode == OP_IREM_UN) {
2026 NEW_INS (cfg, mul, OP_IMUL);
2027 NEW_INS (cfg, temp, ins->opcode == OP_IREM? OP_IDIV: OP_IDIV_UN);
2028 ins->opcode = OP_ISUB;
2030 NEW_INS (cfg, mul, OP_LMUL);
2031 NEW_INS (cfg, temp, ins->opcode == OP_LREM? OP_LDIV: OP_LDIV_UN);
2032 ins->opcode = OP_LSUB;
2034 temp->sreg1 = ins->sreg1;
2035 temp->sreg2 = ins->sreg2;
2036 temp->dreg = mono_alloc_ireg (cfg);
2037 mul->sreg1 = temp->dreg;
2038 mul->sreg2 = ins->sreg2;
2039 mul->dreg = mono_alloc_ireg (cfg);
2040 ins->sreg2 = mul->dreg;
2047 if (!ppc_is_imm16 (ins->inst_imm)) {
2048 NEW_INS (cfg, temp, OP_ICONST);
2049 temp->inst_c0 = ins->inst_imm;
2050 temp->dreg = mono_alloc_ireg (cfg);
2051 ins->sreg2 = temp->dreg;
2052 ins->opcode = map_to_reg_reg_op (ins->opcode);
2058 if (!ppc_is_imm16 (-ins->inst_imm)) {
2059 NEW_INS (cfg, temp, OP_ICONST);
2060 temp->inst_c0 = ins->inst_imm;
2061 temp->dreg = mono_alloc_ireg (cfg);
2062 ins->sreg2 = temp->dreg;
2063 ins->opcode = map_to_reg_reg_op (ins->opcode);
2075 if ((ins->inst_imm & 0xffffffff00000000UL) ||
2076 ((ins->inst_imm & 0xffff0000) && (ins->inst_imm & 0xffff))) {
2077 NEW_INS (cfg, temp, OP_ICONST);
2078 temp->inst_c0 = ins->inst_imm;
2079 temp->dreg = mono_alloc_ireg (cfg);
2080 ins->sreg2 = temp->dreg;
2081 ins->opcode = map_to_reg_reg_op (ins->opcode);
2089 NEW_INS (cfg, temp, OP_ICONST);
2090 temp->inst_c0 = ins->inst_imm;
2091 temp->dreg = mono_alloc_ireg (cfg);
2092 ins->sreg2 = temp->dreg;
2093 ins->opcode = map_to_reg_reg_op (ins->opcode);
2095 case OP_COMPARE_IMM:
2096 case OP_ICOMPARE_IMM:
2097 case OP_LCOMPARE_IMM:
2099 /* Branch opts can eliminate the branch */
2100 if (!next || (!(MONO_IS_COND_BRANCH_OP (next) || MONO_IS_COND_EXC (next) || MONO_IS_SETCC (next)))) {
2101 ins->opcode = OP_NOP;
2105 if (compare_opcode_is_unsigned (next->opcode)) {
2106 if (!ppc_is_uimm16 (ins->inst_imm)) {
2107 NEW_INS (cfg, temp, OP_ICONST);
2108 temp->inst_c0 = ins->inst_imm;
2109 temp->dreg = mono_alloc_ireg (cfg);
2110 ins->sreg2 = temp->dreg;
2111 ins->opcode = map_to_reg_reg_op (ins->opcode);
2114 if (!ppc_is_imm16 (ins->inst_imm)) {
2115 NEW_INS (cfg, temp, OP_ICONST);
2116 temp->inst_c0 = ins->inst_imm;
2117 temp->dreg = mono_alloc_ireg (cfg);
2118 ins->sreg2 = temp->dreg;
2119 ins->opcode = map_to_reg_reg_op (ins->opcode);
2125 if (ins->inst_imm == 1) {
2126 ins->opcode = OP_MOVE;
2129 if (ins->inst_imm == 0) {
2130 ins->opcode = OP_ICONST;
2134 imm = mono_is_power_of_two (ins->inst_imm);
2136 ins->opcode = OP_SHL_IMM;
2137 ins->inst_imm = imm;
2140 if (!ppc_is_imm16 (ins->inst_imm)) {
2141 NEW_INS (cfg, temp, OP_ICONST);
2142 temp->inst_c0 = ins->inst_imm;
2143 temp->dreg = mono_alloc_ireg (cfg);
2144 ins->sreg2 = temp->dreg;
2145 ins->opcode = map_to_reg_reg_op (ins->opcode);
2148 case OP_LOCALLOC_IMM:
2149 NEW_INS (cfg, temp, OP_ICONST);
2150 temp->inst_c0 = ins->inst_imm;
2151 temp->dreg = mono_alloc_ireg (cfg);
2152 ins->sreg1 = temp->dreg;
2153 ins->opcode = OP_LOCALLOC;
2155 case OP_LOAD_MEMBASE:
2156 case OP_LOADI4_MEMBASE:
2157 case OP_LOADI8_MEMBASE:
2158 case OP_LOADU4_MEMBASE:
2159 case OP_LOADI2_MEMBASE:
2160 case OP_LOADU2_MEMBASE:
2161 case OP_LOADI1_MEMBASE:
2162 case OP_LOADU1_MEMBASE:
2163 case OP_LOADR4_MEMBASE:
2164 case OP_LOADR8_MEMBASE:
2165 case OP_STORE_MEMBASE_REG:
2166 case OP_STOREI8_MEMBASE_REG:
2167 case OP_STOREI4_MEMBASE_REG:
2168 case OP_STOREI2_MEMBASE_REG:
2169 case OP_STOREI1_MEMBASE_REG:
2170 case OP_STORER4_MEMBASE_REG:
2171 case OP_STORER8_MEMBASE_REG:
2172 /* we can do two things: load the immed in a register
2173 * and use an indexed load, or see if the immed can be
2174 * represented as an ad_imm + a load with a smaller offset
2175 * that fits. We just do the first for now, optimize later.
2177 if (ppc_is_imm16 (ins->inst_offset))
2179 NEW_INS (cfg, temp, OP_ICONST);
2180 temp->inst_c0 = ins->inst_offset;
2181 temp->dreg = mono_alloc_ireg (cfg);
2182 ins->sreg2 = temp->dreg;
2183 ins->opcode = map_to_reg_reg_op (ins->opcode);
2185 case OP_STORE_MEMBASE_IMM:
2186 case OP_STOREI1_MEMBASE_IMM:
2187 case OP_STOREI2_MEMBASE_IMM:
2188 case OP_STOREI4_MEMBASE_IMM:
2189 case OP_STOREI8_MEMBASE_IMM:
2190 NEW_INS (cfg, temp, OP_ICONST);
2191 temp->inst_c0 = ins->inst_imm;
2192 temp->dreg = mono_alloc_ireg (cfg);
2193 ins->sreg1 = temp->dreg;
2194 ins->opcode = map_to_reg_reg_op (ins->opcode);
2196 goto loop_start; /* make it handle the possibly big ins->inst_offset */
2199 NEW_INS (cfg, temp, OP_ICONST);
2200 temp->inst_c0 = (gulong)ins->inst_p0;
2201 temp->dreg = mono_alloc_ireg (cfg);
2202 ins->inst_basereg = temp->dreg;
2203 ins->inst_offset = 0;
2204 ins->opcode = ins->opcode == OP_R4CONST? OP_LOADR4_MEMBASE: OP_LOADR8_MEMBASE;
2206 /* make it handle the possibly big ins->inst_offset
2207 * later optimize to use lis + load_membase
2213 bb->last_ins = last_ins;
2214 bb->max_vreg = cfg->next_vreg;
2218 emit_float_to_int (MonoCompile *cfg, guchar *code, int dreg, int sreg, int size, gboolean is_signed)
2220 int offset = cfg->arch.fp_conv_var_offset;
2222 /* sreg is a float, dreg is an integer reg. ppc_f0 is used a scratch */
2224 ppc_fctidz (code, ppc_f0, sreg);
2227 ppc_fctiwz (code, ppc_f0, sreg);
2230 if (ppc_is_imm16 (offset + sub_offset)) {
2231 ppc_stfd (code, ppc_f0, offset, cfg->frame_reg);
2233 ppc_load_reg (code, dreg, offset + sub_offset, cfg->frame_reg);
2235 ppc_lwz (code, dreg, offset + sub_offset, cfg->frame_reg);
2237 ppc_load (code, dreg, offset);
2238 ppc_add (code, dreg, dreg, cfg->frame_reg);
2239 ppc_stfd (code, ppc_f0, 0, dreg);
2241 ppc_load_reg (code, dreg, sub_offset, dreg);
2243 ppc_lwz (code, dreg, sub_offset, dreg);
2247 ppc_andid (code, dreg, dreg, 0xff);
2249 ppc_andid (code, dreg, dreg, 0xffff);
2251 ppc_clrldi (code, dreg, dreg, 32);
2254 ppc_extsb (code, dreg, dreg);
2256 ppc_extsh (code, dreg, dreg);
2258 ppc_extsw (code, dreg, dreg);
2265 const guchar *target;
2270 #define is_call_imm(diff) ((glong)(diff) >= -33554432 && (glong)(diff) <= 33554431)
2273 search_thunk_slot (void *data, int csize, int bsize, void *user_data) {
2274 PatchData *pdata = (PatchData*)user_data;
2275 guchar *code = data;
2276 guint32 *thunks = data;
2277 guint32 *endthunks = (guint32*)(code + bsize);
2281 int difflow, diffhigh;
2283 /* always ensure a call from pdata->code can reach to the thunks without further thunks */
2284 difflow = (char*)pdata->code - (char*)thunks;
2285 diffhigh = (char*)pdata->code - (char*)endthunks;
2286 if (!((is_call_imm (thunks) && is_call_imm (endthunks)) || (is_call_imm (difflow) && is_call_imm (diffhigh))))
2289 templ = (guchar*)load;
2290 ppc_load (templ, ppc_r0, pdata->target);
2292 g_assert_not_reached ();
2294 //g_print ("thunk nentries: %d\n", ((char*)endthunks - (char*)thunks)/16);
2295 if ((pdata->found == 2) || (pdata->code >= code && pdata->code <= code + csize)) {
2296 while (thunks < endthunks) {
2297 //g_print ("looking for target: %p at %p (%08x-%08x)\n", pdata->target, thunks, thunks [0], thunks [1]);
2298 if ((thunks [0] == load [0]) && (thunks [1] == load [1])) {
2299 ppc_patch (pdata->code, (guchar*)thunks);
2302 static int num_thunks = 0;
2304 if ((num_thunks % 20) == 0)
2305 g_print ("num_thunks lookup: %d\n", num_thunks);
2308 } else if ((thunks [0] == 0) && (thunks [1] == 0)) {
2309 /* found a free slot instead: emit thunk */
2310 code = (guchar*)thunks;
2311 g_assert_not_reached ();
2312 ppc_lis (code, ppc_r0, (gulong)(pdata->target) >> 16);
2313 ppc_ori (code, ppc_r0, ppc_r0, (gulong)(pdata->target) & 0xffff);
2314 ppc_mtctr (code, ppc_r0);
2315 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
2316 mono_arch_flush_icache ((guchar*)thunks, 16);
2318 ppc_patch (pdata->code, (guchar*)thunks);
2321 static int num_thunks = 0;
2323 if ((num_thunks % 20) == 0)
2324 g_print ("num_thunks: %d\n", num_thunks);
2328 /* skip 16 bytes, the size of the thunk */
2332 //g_print ("failed thunk lookup for %p from %p at %p (%d entries)\n", pdata->target, pdata->code, data, count);
2338 handle_thunk (int absolute, guchar *code, const guchar *target) {
2339 MonoDomain *domain = mono_domain_get ();
2343 pdata.target = target;
2344 pdata.absolute = absolute;
2347 mono_domain_lock (domain);
2348 mono_code_manager_foreach (domain->code_mp, search_thunk_slot, &pdata);
2351 /* this uses the first available slot */
2353 mono_code_manager_foreach (domain->code_mp, search_thunk_slot, &pdata);
2355 mono_domain_unlock (domain);
2357 if (pdata.found != 1)
2358 g_print ("thunk failed for %p from %p\n", target, code);
2359 g_assert (pdata.found == 1);
2363 patch_ins (guint8 *code, guint32 ins)
2365 *(guint32*)code = ins;
2366 mono_arch_flush_icache (code, 4);
2370 ppc_patch_full (guchar *code, const guchar *target, gboolean is_fd)
2372 guint32 ins = *(guint32*)code;
2373 guint32 prim = ppc_opcode (ins);
2376 #ifdef DEBUG_PATCHING
2377 g_print ("patching %p (0x%08x) to point to %p\n", code, ins, target);
2380 // prefer relative branches, they are more position independent (e.g. for AOT compilation).
2381 gint diff = target - code;
2384 if (diff <= 33554431){
2385 ins = (18 << 26) | (diff) | (ins & 1);
2386 patch_ins (code, ins);
2390 /* diff between 0 and -33554432 */
2391 if (diff >= -33554432){
2392 ins = (18 << 26) | (diff & ~0xfc000000) | (ins & 1);
2393 patch_ins (code, ins);
2398 if ((glong)target >= 0){
2399 if ((glong)target <= 33554431){
2400 ins = (18 << 26) | ((gulong) target) | (ins & 1) | 2;
2401 patch_ins (code, ins);
2405 if ((glong)target >= -33554432){
2406 ins = (18 << 26) | (((gulong)target) & ~0xfc000000) | (ins & 1) | 2;
2407 patch_ins (code, ins);
2412 handle_thunk (TRUE, code, target);
2415 g_assert_not_reached ();
2423 guint32 li = (gulong)target;
2424 ins = (ins & 0xffff0000) | (ins & 3);
2425 ovf = li & 0xffff0000;
2426 if (ovf != 0 && ovf != 0xffff0000)
2427 g_assert_not_reached ();
2430 // FIXME: assert the top bits of li are 0
2432 gint diff = target - code;
2433 ins = (ins & 0xffff0000) | (ins & 3);
2434 ovf = diff & 0xffff0000;
2435 if (ovf != 0 && ovf != 0xffff0000)
2436 g_assert_not_reached ();
2440 patch_ins (code, ins);
2444 if (prim == 15 || ins == 0x4e800021 || ins == 0x4e800020 || ins == 0x4e800420) {
2445 guint32 *seq = (guint32*)code;
2446 guint32 *branch_ins;
2448 /* the trampoline code will try to patch the blrl, blr, bcctr */
2449 if (ins == 0x4e800021 || ins == 0x4e800020 || ins == 0x4e800420) {
2451 if (ppc_opcode (seq [-3]) == 58 || ppc_opcode (seq [-3]) == 31) /* ld || mr */
2456 if (ppc_opcode (seq [5]) == 58 || ppc_opcode (seq [5]) == 31) /* ld || mr */
2457 branch_ins = seq + 8;
2459 branch_ins = seq + 6;
2462 seq = (guint32*)code;
2463 /* this is the lis/ori/sldi/oris/ori/(ld/ld|mr/nop)/mtlr/blrl sequence */
2464 g_assert (mono_ppc_is_direct_call_sequence (branch_ins));
2466 if (ppc_opcode (seq [5]) == 58) { /* ld */
2467 g_assert (ppc_opcode (seq [6]) == 58); /* ld */
2470 guint8 *buf = (guint8*)&seq [5];
2471 ppc_mr (buf, ppc_r0, ppc_r11);
2476 target = mono_get_addr_from_ftnptr ((gpointer)target);
2479 /* FIXME: make this thread safe */
2480 /* FIXME: we're assuming we're using r11 here */
2481 ppc_load_sequence (code, ppc_r11, target);
2482 mono_arch_flush_icache ((guint8*)seq, 28);
2484 g_assert_not_reached ();
2489 ppc_patch (guchar *code, const guchar *target)
2491 ppc_patch_full (code, target, FALSE);
2495 emit_move_return_value (MonoCompile *cfg, MonoInst *ins, guint8 *code)
2497 switch (ins->opcode) {
2500 case OP_FCALL_MEMBASE:
2501 if (ins->dreg != ppc_f1)
2502 ppc_fmr (code, ins->dreg, ppc_f1);
2510 * emit_load_volatile_arguments:
2512 * Load volatile arguments from the stack to the original input registers.
2513 * Required before a tail call.
2516 emit_load_volatile_arguments (MonoCompile *cfg, guint8 *code)
2518 MonoMethod *method = cfg->method;
2519 MonoMethodSignature *sig;
2523 int struct_index = 0;
2525 sig = mono_method_signature (method);
2527 /* This is the opposite of the code in emit_prolog */
2531 cinfo = calculate_sizes (sig, sig->pinvoke);
2533 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
2534 ArgInfo *ainfo = &cinfo->ret;
2535 inst = cfg->vret_addr;
2536 g_assert (ppc_is_imm16 (inst->inst_offset));
2537 ppc_load_reg (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2539 for (i = 0; i < sig->param_count + sig->hasthis; ++i) {
2540 ArgInfo *ainfo = cinfo->args + i;
2541 inst = cfg->args [pos];
2543 g_assert (inst->opcode != OP_REGVAR);
2544 g_assert (ppc_is_imm16 (inst->inst_offset));
2546 switch (ainfo->regtype) {
2547 case RegTypeGeneral:
2548 switch (ainfo->size) {
2550 ppc_lbz (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2553 ppc_lhz (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2556 ppc_lwz (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2559 ppc_load_reg (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2565 switch (ainfo->size) {
2567 ppc_lfs (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2570 ppc_lfd (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2573 g_assert_not_reached ();
2578 MonoType *type = mini_type_get_underlying_type (cfg->generic_sharing_context,
2579 &inst->klass->byval_arg);
2581 if (MONO_TYPE_IS_REFERENCE (type) || type->type == MONO_TYPE_I8) {
2582 ppc_load_reg (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
2583 ppc_store_reg (code, ppc_r0, ainfo->offset, ainfo->reg);
2584 } else if (type->type == MONO_TYPE_I4) {
2585 ppc_lwz (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
2586 ppc_stw (code, ppc_r0, ainfo->offset, ainfo->reg);
2594 case RegTypeStructByVal: {
2603 * Darwin pinvokes needs some special handling
2604 * for 1 and 2 byte arguments
2606 if (method->signature->pinvoke)
2607 size = mono_class_native_size (inst->klass, NULL);
2608 if (size == 1 || size == 2) {
2613 for (j = 0; j < ainfo->size; ++j) {
2614 ppc_load_reg (code, ainfo->reg + j,
2615 inst->inst_offset + j * sizeof (gpointer),
2616 inst->inst_basereg);
2617 /* FIXME: shift to the right */
2624 case RegTypeStructByAddr: {
2625 MonoInst *addr = cfg->tailcall_valuetype_addrs [struct_index];
2627 g_assert (ppc_is_imm16 (addr->inst_offset));
2628 g_assert (!ainfo->offset);
2629 ppc_load_reg (code, ainfo->reg, addr->inst_offset, addr->inst_basereg);
2636 g_assert_not_reached ();
2647 /* This must be kept in sync with emit_load_volatile_arguments(). */
2649 ins_native_length (MonoCompile *cfg, MonoInst *ins)
2651 int len = ((guint8 *)ins_get_spec (ins->opcode))[MONO_INST_LEN];
2652 MonoMethodSignature *sig;
2657 if (ins->opcode != OP_JMP)
2660 call = (MonoCallInst*)ins;
2661 sig = mono_method_signature (cfg->method);
2662 cinfo = calculate_sizes (sig, sig->pinvoke);
2664 if (MONO_TYPE_ISSTRUCT (sig->ret))
2666 for (i = 0; i < sig->param_count + sig->hasthis; ++i) {
2667 ArgInfo *ainfo = cinfo->args + i;
2669 switch (ainfo->regtype) {
2670 case RegTypeGeneral:
2679 case RegTypeStructByVal:
2680 len += 4 * ainfo->size;
2683 case RegTypeStructByAddr:
2688 g_assert_not_reached ();
2698 emit_reserve_param_area (MonoCompile *cfg, guint8 *code)
2700 int size = cfg->param_area;
2702 size += MONO_ARCH_FRAME_ALIGNMENT - 1;
2703 size &= -MONO_ARCH_FRAME_ALIGNMENT;
2708 ppc_load_reg (code, ppc_r0, 0, ppc_sp);
2709 if (ppc_is_imm16 (-size)) {
2710 ppc_store_reg_update (code, ppc_r0, -size, ppc_sp);
2712 ppc_load (code, ppc_r11, -size);
2713 ppc_store_reg_update_indexed (code, ppc_r0, ppc_sp, ppc_r11);
2720 emit_unreserve_param_area (MonoCompile *cfg, guint8 *code)
2722 int size = cfg->param_area;
2724 size += MONO_ARCH_FRAME_ALIGNMENT - 1;
2725 size &= -MONO_ARCH_FRAME_ALIGNMENT;
2730 ppc_load_reg (code, ppc_r0, 0, ppc_sp);
2731 if (ppc_is_imm16 (size)) {
2732 ppc_store_reg_update (code, ppc_r0, size, ppc_sp);
2734 ppc_load (code, ppc_r11, size);
2735 ppc_store_reg_update_indexed (code, ppc_r0, ppc_sp, ppc_r11);
2742 mono_arch_output_basic_block (MonoCompile *cfg, MonoBasicBlock *bb)
2744 MonoInst *ins, *next;
2747 guint8 *code = cfg->native_code + cfg->code_len;
2748 MonoInst *last_ins = NULL;
2749 guint last_offset = 0;
2753 /* we don't align basic blocks of loops on ppc */
2755 if (cfg->verbose_level > 2)
2756 g_print ("Basic block %d starting at offset 0x%x\n", bb->block_num, bb->native_offset);
2758 cpos = bb->max_offset;
2760 if (cfg->prof_options & MONO_PROFILE_COVERAGE) {
2761 //MonoCoverageInfo *cov = mono_get_coverage_info (cfg->method);
2762 //g_assert (!mono_compile_aot);
2765 // cov->data [bb->dfn].iloffset = bb->cil_code - cfg->cil_code;
2766 /* this is not thread save, but good enough */
2767 /* fixme: howto handle overflows? */
2768 //x86_inc_mem (code, &cov->data [bb->dfn].count);
2771 MONO_BB_FOR_EACH_INS (bb, ins) {
2772 offset = code - cfg->native_code;
2774 max_len = ins_native_length (cfg, ins);
2776 if (offset > (cfg->code_size - max_len - 16)) {
2777 cfg->code_size *= 2;
2778 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
2779 code = cfg->native_code + offset;
2781 // if (ins->cil_code)
2782 // g_print ("cil code\n");
2783 mono_debug_record_line_number (cfg, ins, offset);
2785 switch (ins->opcode) {
2786 case OP_RELAXED_NOP:
2789 case OP_DUMMY_STORE:
2790 case OP_NOT_REACHED:
2794 emit_tls_access (code, ins->dreg, ins->inst_offset);
2797 ppc_mullw (code, ppc_r0, ins->sreg1, ins->sreg2);
2798 ppc_mulhw (code, ppc_r3, ins->sreg1, ins->sreg2);
2799 ppc_mr (code, ppc_r4, ppc_r0);
2802 ppc_mullw (code, ppc_r0, ins->sreg1, ins->sreg2);
2803 ppc_mulhwu (code, ppc_r3, ins->sreg1, ins->sreg2);
2804 ppc_mr (code, ppc_r4, ppc_r0);
2806 case OP_MEMORY_BARRIER:
2809 case OP_STOREI1_MEMBASE_REG:
2810 if (ppc_is_imm16 (ins->inst_offset)) {
2811 ppc_stb (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
2813 ppc_load (code, ppc_r0, ins->inst_offset);
2814 ppc_stbx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
2817 case OP_STOREI2_MEMBASE_REG:
2818 if (ppc_is_imm16 (ins->inst_offset)) {
2819 ppc_sth (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
2821 ppc_load (code, ppc_r0, ins->inst_offset);
2822 ppc_sthx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
2825 case OP_STOREI4_MEMBASE_REG:
2826 if (ppc_is_imm16 (ins->inst_offset)) {
2827 ppc_stw (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
2829 ppc_load (code, ppc_r0, ins->inst_offset);
2830 ppc_stwx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
2833 case OP_STORE_MEMBASE_REG:
2834 case OP_STOREI8_MEMBASE_REG:
2835 if (ppc_is_imm16 (ins->inst_offset)) {
2836 ppc_store_reg (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
2838 ppc_load (code, ppc_r0, ins->inst_offset);
2839 ppc_store_reg_indexed (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
2842 case OP_STOREI1_MEMINDEX:
2843 ppc_stbx (code, ins->sreg1, ins->sreg2, ins->inst_destbasereg);
2845 case OP_STOREI2_MEMINDEX:
2846 ppc_sthx (code, ins->sreg1, ins->sreg2, ins->inst_destbasereg);
2848 case OP_STOREI4_MEMINDEX:
2849 ppc_stwx (code, ins->sreg1, ins->sreg2, ins->inst_destbasereg);
2851 case OP_STORE_MEMINDEX:
2852 case OP_STOREI8_MEMINDEX:
2853 ppc_stdx (code, ins->sreg1, ins->sreg2, ins->inst_destbasereg);
2856 g_assert_not_reached ();
2858 case OP_LOAD_MEMBASE:
2859 case OP_LOADI8_MEMBASE:
2860 if (ppc_is_imm16 (ins->inst_offset)) {
2861 ppc_load_reg (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
2863 ppc_load (code, ppc_r0, ins->inst_offset);
2864 ppc_load_reg_indexed (code, ins->dreg, ins->inst_basereg, ppc_r0);
2867 case OP_LOADI4_MEMBASE:
2868 case OP_LOADU4_MEMBASE:
2869 if (ppc_is_imm16 (ins->inst_offset)) {
2870 ppc_lwz (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
2872 ppc_load (code, ppc_r0, ins->inst_offset);
2873 ppc_lwzx (code, ins->dreg, ins->inst_basereg, ppc_r0);
2875 if (ins->opcode == OP_LOADI4_MEMBASE)
2876 ppc_extsw (code, ins->dreg, ins->dreg);
2878 case OP_LOADI1_MEMBASE:
2879 case OP_LOADU1_MEMBASE:
2880 if (ppc_is_imm16 (ins->inst_offset)) {
2881 ppc_lbz (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
2883 ppc_load (code, ppc_r0, ins->inst_offset);
2884 ppc_lbzx (code, ins->dreg, ins->inst_basereg, ppc_r0);
2886 if (ins->opcode == OP_LOADI1_MEMBASE)
2887 ppc_extsb (code, ins->dreg, ins->dreg);
2889 case OP_LOADU2_MEMBASE:
2890 if (ppc_is_imm16 (ins->inst_offset)) {
2891 ppc_lhz (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
2893 ppc_load (code, ppc_r0, ins->inst_offset);
2894 ppc_lhzx (code, ins->dreg, ins->inst_basereg, ppc_r0);
2897 case OP_LOADI2_MEMBASE:
2898 if (ppc_is_imm16 (ins->inst_offset)) {
2899 ppc_lha (code, ins->dreg, ins->inst_basereg, ins->inst_offset);
2901 ppc_load (code, ppc_r0, ins->inst_offset);
2902 ppc_lhax (code, ins->dreg, ins->inst_basereg, ppc_r0);
2905 case OP_LOAD_MEMINDEX:
2906 case OP_LOADI8_MEMINDEX:
2907 ppc_ldx (code, ins->dreg, ins->sreg2, ins->inst_basereg);
2909 case OP_LOADI4_MEMINDEX:
2910 case OP_LOADU4_MEMINDEX:
2911 ppc_lwzx (code, ins->dreg, ins->sreg2, ins->inst_basereg);
2912 if (ins->opcode == OP_LOADI4_MEMINDEX)
2913 ppc_extsb (code, ins->dreg, ins->dreg);
2915 case OP_LOADU2_MEMINDEX:
2916 ppc_lhzx (code, ins->dreg, ins->sreg2, ins->inst_basereg);
2918 case OP_LOADI2_MEMINDEX:
2919 ppc_lhax (code, ins->dreg, ins->sreg2, ins->inst_basereg);
2921 case OP_LOADU1_MEMINDEX:
2922 ppc_lbzx (code, ins->dreg, ins->sreg2, ins->inst_basereg);
2924 case OP_LOADI1_MEMINDEX:
2925 ppc_lbzx (code, ins->dreg, ins->sreg2, ins->inst_basereg);
2926 ppc_extsb (code, ins->dreg, ins->dreg);
2928 case OP_ICONV_TO_I1:
2929 case OP_LCONV_TO_I1:
2930 ppc_extsb (code, ins->dreg, ins->sreg1);
2932 case OP_ICONV_TO_I2:
2933 case OP_LCONV_TO_I2:
2934 ppc_extsh (code, ins->dreg, ins->sreg1);
2936 case OP_ICONV_TO_I4:
2938 ppc_extsw (code, ins->dreg, ins->sreg1);
2940 case OP_ICONV_TO_U1:
2941 case OP_LCONV_TO_U1:
2942 ppc_clrlwi (code, ins->dreg, ins->sreg1, 24);
2944 case OP_ICONV_TO_U2:
2945 case OP_LCONV_TO_U2:
2946 ppc_clrlwi (code, ins->dreg, ins->sreg1, 16);
2948 case OP_ICONV_TO_U4:
2950 ppc_clrldi (code, ins->dreg, ins->sreg1, 32);
2952 case OP_ICONV_TO_R4:
2953 case OP_ICONV_TO_R8:
2954 case OP_LCONV_TO_R4:
2955 case OP_LCONV_TO_R8: {
2957 if (ins->opcode == OP_ICONV_TO_R4 || ins->opcode == OP_ICONV_TO_R8) {
2958 ppc_extsw (code, ppc_r0, ins->sreg1);
2963 ppc_store_reg (code, tmp, -8, ppc_r1);
2964 ppc_lfd (code, ins->dreg, -8, ppc_r1);
2965 ppc_fcfid (code, ins->dreg, ins->dreg);
2966 if (ins->opcode == OP_ICONV_TO_R4 || ins->opcode == OP_LCONV_TO_R4)
2967 ppc_frsp (code, ins->dreg, ins->dreg);
2973 L = (ins->opcode == OP_ICOMPARE) ? 0 : 1;
2975 if (next && compare_opcode_is_unsigned (next->opcode))
2976 ppc_cmpl (code, 0, L, ins->sreg1, ins->sreg2);
2978 ppc_cmp (code, 0, L, ins->sreg1, ins->sreg2);
2980 case OP_COMPARE_IMM:
2981 case OP_ICOMPARE_IMM:
2982 case OP_LCOMPARE_IMM:
2983 L = (ins->opcode == OP_ICOMPARE_IMM) ? 0 : 1;
2985 if (next && compare_opcode_is_unsigned (next->opcode)) {
2986 if (ppc_is_uimm16 (ins->inst_imm)) {
2987 ppc_cmpli (code, 0, L, ins->sreg1, (ins->inst_imm & 0xffff));
2989 g_assert_not_reached ();
2992 if (ppc_is_imm16 (ins->inst_imm)) {
2993 ppc_cmpi (code, 0, L, ins->sreg1, (ins->inst_imm & 0xffff));
2995 g_assert_not_reached ();
3004 ppc_addco (code, ins->dreg, ins->sreg1, ins->sreg2);
3008 ppc_add (code, ins->dreg, ins->sreg1, ins->sreg2);
3012 ppc_adde (code, ins->dreg, ins->sreg1, ins->sreg2);
3015 if (ppc_is_imm16 (ins->inst_imm)) {
3016 ppc_addic (code, ins->dreg, ins->sreg1, ins->inst_imm);
3018 g_assert_not_reached ();
3024 if (ppc_is_imm16 (ins->inst_imm)) {
3025 ppc_addi (code, ins->dreg, ins->sreg1, ins->inst_imm);
3027 g_assert_not_reached ();
3031 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3033 ppc_addo (code, ins->dreg, ins->sreg1, ins->sreg2);
3034 ppc_mfspr (code, ppc_r0, ppc_xer);
3035 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3036 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3038 case OP_IADD_OVF_UN:
3039 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3041 ppc_addco (code, ins->dreg, ins->sreg1, ins->sreg2);
3042 ppc_mfspr (code, ppc_r0, ppc_xer);
3043 ppc_andisd (code, ppc_r0, ppc_r0, (1<<13));
3044 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3048 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3050 ppc_subfo (code, ins->dreg, ins->sreg2, ins->sreg1);
3051 ppc_mfspr (code, ppc_r0, ppc_xer);
3052 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3053 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3055 case OP_ISUB_OVF_UN:
3056 case OP_LSUB_OVF_UN:
3057 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3059 ppc_subfc (code, ins->dreg, ins->sreg2, ins->sreg1);
3060 ppc_mfspr (code, ppc_r0, ppc_xer);
3061 ppc_andisd (code, ppc_r0, ppc_r0, (1<<13));
3062 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_TRUE, PPC_BR_EQ, "OverflowException");
3064 case OP_ADD_OVF_CARRY:
3065 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3067 ppc_addeo (code, ins->dreg, ins->sreg1, ins->sreg2);
3068 ppc_mfspr (code, ppc_r0, ppc_xer);
3069 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3070 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3072 case OP_ADD_OVF_UN_CARRY:
3073 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3075 ppc_addeo (code, ins->dreg, ins->sreg1, ins->sreg2);
3076 ppc_mfspr (code, ppc_r0, ppc_xer);
3077 ppc_andisd (code, ppc_r0, ppc_r0, (1<<13));
3078 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3080 case OP_SUB_OVF_CARRY:
3081 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3083 ppc_subfeo (code, ins->dreg, ins->sreg2, ins->sreg1);
3084 ppc_mfspr (code, ppc_r0, ppc_xer);
3085 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3086 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3088 case OP_SUB_OVF_UN_CARRY:
3089 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3091 ppc_subfeo (code, ins->dreg, ins->sreg2, ins->sreg1);
3092 ppc_mfspr (code, ppc_r0, ppc_xer);
3093 ppc_andisd (code, ppc_r0, ppc_r0, (1<<13));
3094 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_TRUE, PPC_BR_EQ, "OverflowException");
3098 ppc_subfco (code, ins->dreg, ins->sreg2, ins->sreg1);
3102 ppc_subf (code, ins->dreg, ins->sreg2, ins->sreg1);
3106 ppc_subfe (code, ins->dreg, ins->sreg2, ins->sreg1);
3111 // we add the negated value
3112 if (ppc_is_imm16 (-ins->inst_imm))
3113 ppc_addi (code, ins->dreg, ins->sreg1, -ins->inst_imm);
3115 g_assert_not_reached ();
3119 g_assert (ppc_is_imm16 (ins->inst_imm));
3120 ppc_subfic (code, ins->dreg, ins->sreg1, ins->inst_imm);
3123 ppc_subfze (code, ins->dreg, ins->sreg1);
3127 /* FIXME: the ppc macros as inconsistent here: put dest as the first arg! */
3128 ppc_and (code, ins->sreg1, ins->dreg, ins->sreg2);
3133 if (!(ins->inst_imm & 0xffff0000)) {
3134 ppc_andid (code, ins->sreg1, ins->dreg, ins->inst_imm);
3135 } else if (!(ins->inst_imm & 0xffff)) {
3136 ppc_andisd (code, ins->sreg1, ins->dreg, ((guint32)ins->inst_imm >> 16));
3138 g_assert_not_reached ();
3143 guint8 *divisor_is_m1;
3144 /* XER format: SO, OV, CA, reserved [21 bits], count [8 bits]
3146 ppc_cmpi (code, 0, 1, ins->sreg2, -1);
3147 divisor_is_m1 = code;
3148 ppc_bc (code, PPC_BR_FALSE | PPC_BR_LIKELY, PPC_BR_EQ, 0);
3149 ppc_lis (code, ppc_r0, 0x8000);
3150 if (ins->opcode == OP_LDIV)
3151 ppc_sldi (code, ppc_r0, ppc_r0, 32);
3152 ppc_cmp (code, 0, 1, ins->sreg1, ppc_r0);
3153 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_TRUE, PPC_BR_EQ, "ArithmeticException");
3154 ppc_patch (divisor_is_m1, code);
3155 /* XER format: SO, OV, CA, reserved [21 bits], count [8 bits]
3157 if (ins->opcode == OP_IDIV)
3158 ppc_divwod (code, ins->dreg, ins->sreg1, ins->sreg2);
3160 ppc_divdod (code, ins->dreg, ins->sreg1, ins->sreg2);
3161 ppc_mfspr (code, ppc_r0, ppc_xer);
3162 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3163 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "DivideByZeroException");
3168 if (ins->opcode == OP_IDIV_UN)
3169 ppc_divwuod (code, ins->dreg, ins->sreg1, ins->sreg2);
3171 ppc_divduod (code, ins->dreg, ins->sreg1, ins->sreg2);
3172 ppc_mfspr (code, ppc_r0, ppc_xer);
3173 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3174 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "DivideByZeroException");
3180 g_assert_not_reached ();
3183 ppc_or (code, ins->dreg, ins->sreg1, ins->sreg2);
3188 if (!(ins->inst_imm & 0xffff0000)) {
3189 ppc_ori (code, ins->sreg1, ins->dreg, ins->inst_imm);
3190 } else if (!(ins->inst_imm & 0xffff)) {
3191 ppc_oris (code, ins->dreg, ins->sreg1, ((guint32)(ins->inst_imm) >> 16));
3193 g_assert_not_reached ();
3198 ppc_xor (code, ins->dreg, ins->sreg1, ins->sreg2);
3203 if (!(ins->inst_imm & 0xffff0000)) {
3204 ppc_xori (code, ins->sreg1, ins->dreg, ins->inst_imm);
3205 } else if (!(ins->inst_imm & 0xffff)) {
3206 ppc_xoris (code, ins->sreg1, ins->dreg, ((guint32)(ins->inst_imm) >> 16));
3208 g_assert_not_reached ();
3213 ppc_sld (code, ins->dreg, ins->sreg1, ins->sreg2);
3218 ppc_sldi (code, ins->dreg, ins->sreg1, (ins->inst_imm & 0x3f));
3221 ppc_sraw (code, ins->dreg, ins->sreg1, ins->sreg2);
3224 ppc_srad (code, ins->dreg, ins->sreg1, ins->sreg2);
3228 ppc_sradi (code, ins->dreg, ins->sreg1, (ins->inst_imm & 0x3f));
3231 ppc_srawi (code, ins->dreg, ins->sreg1, (ins->inst_imm & 0x1f));
3234 case OP_LSHR_UN_IMM:
3235 if (ins->inst_imm & 0x3f)
3236 ppc_srdi (code, ins->dreg, ins->sreg1, (ins->inst_imm & 0x3f));
3238 ppc_mr (code, ins->dreg, ins->sreg1);
3240 case OP_ISHR_UN_IMM:
3241 if (ins->inst_imm & 0x1f)
3242 ppc_srwi (code, ins->dreg, ins->sreg1, (ins->inst_imm & 0x1f));
3244 ppc_mr (code, ins->dreg, ins->sreg1);
3247 ppc_srw (code, ins->dreg, ins->sreg1, ins->sreg2);
3250 ppc_srd (code, ins->dreg, ins->sreg1, ins->sreg2);
3254 ppc_not (code, ins->dreg, ins->sreg1);
3258 ppc_neg (code, ins->dreg, ins->sreg1);
3262 ppc_mulld (code, ins->dreg, ins->sreg1, ins->sreg2);
3267 if (ppc_is_imm16 (ins->inst_imm)) {
3268 ppc_mulli (code, ins->dreg, ins->sreg1, ins->inst_imm);
3270 g_assert_not_reached ();
3275 /* we annot use mcrxr, since it's not implemented on some processors
3276 * XER format: SO, OV, CA, reserved [21 bits], count [8 bits]
3278 if (ins->opcode == OP_IMUL_OVF)
3279 ppc_mullwo (code, ins->dreg, ins->sreg1, ins->sreg2);
3281 ppc_mulldo (code, ins->dreg, ins->sreg1, ins->sreg2);
3282 ppc_mfspr (code, ppc_r0, ppc_xer);
3283 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3284 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3286 case OP_IMUL_OVF_UN:
3287 case OP_LMUL_OVF_UN:
3288 /* we first multiply to get the high word and compare to 0
3289 * to set the flags, then the result is discarded and then
3290 * we multiply to get the lower * bits result
3292 if (ins->opcode == OP_IMUL_OVF_UN)
3293 ppc_mulhwu (code, ppc_r0, ins->sreg1, ins->sreg2);
3295 ppc_mulhdu (code, ppc_r0, ins->sreg1, ins->sreg2);
3296 ppc_cmpi (code, 0, 0, ppc_r0, 0);
3297 EMIT_COND_SYSTEM_EXCEPTION (CEE_BNE_UN - CEE_BEQ, "OverflowException");
3298 ppc_mulld (code, ins->dreg, ins->sreg1, ins->sreg2);
3302 ppc_load (code, ins->dreg, ins->inst_c0);
3305 mono_add_patch_info (cfg, offset, (MonoJumpInfoType)ins->inst_i1, ins->inst_p0);
3306 ppc_load_sequence (code, ins->dreg, 0);
3309 ppc_mr (code, ins->dreg, ins->sreg1);
3312 int saved = ins->sreg1;
3313 if (ins->sreg1 == ppc_r3) {
3314 ppc_mr (code, ppc_r0, ins->sreg1);
3317 if (ins->sreg2 != ppc_r3)
3318 ppc_mr (code, ppc_r3, ins->sreg2);
3319 if (saved != ppc_r4)
3320 ppc_mr (code, ppc_r4, saved);
3324 ppc_fmr (code, ins->dreg, ins->sreg1);
3326 case OP_FCONV_TO_R4:
3327 ppc_frsp (code, ins->dreg, ins->sreg1);
3333 * Keep in sync with mono_arch_emit_epilog
3335 g_assert (!cfg->method->save_lmf);
3337 * Note: we can use ppc_r11 here because it is dead anyway:
3338 * we're leaving the method.
3340 if (1 || cfg->flags & MONO_CFG_HAS_CALLS) {
3341 if (ppc_is_imm16 (cfg->stack_usage + PPC_RET_ADDR_OFFSET)) {
3342 ppc_load_reg (code, ppc_r0, cfg->stack_usage + PPC_RET_ADDR_OFFSET, cfg->frame_reg);
3344 ppc_load (code, ppc_r11, cfg->stack_usage + PPC_RET_ADDR_OFFSET);
3345 ppc_load_reg_indexed (code, ppc_r0, cfg->frame_reg, ppc_r11);
3347 ppc_mtlr (code, ppc_r0);
3350 code = emit_load_volatile_arguments (cfg, code);
3352 if (ppc_is_imm16 (cfg->stack_usage)) {
3353 ppc_addic (code, ppc_sp, cfg->frame_reg, cfg->stack_usage);
3355 ppc_load (code, ppc_r11, cfg->stack_usage);
3356 ppc_add (code, ppc_sp, cfg->frame_reg, ppc_r11);
3358 if (!cfg->method->save_lmf) {
3359 /*for (i = 31; i >= 14; --i) {
3360 if (cfg->used_float_regs & (1 << i)) {
3361 pos += sizeof (double);
3362 ppc_lfd (code, i, -pos, cfg->frame_reg);
3365 /* FIXME: restore registers before changing ppc_sp */
3366 for (i = 31; i >= 13; --i) {
3367 if (cfg->used_int_regs & (1 << i)) {
3368 pos += sizeof (gulong);
3369 ppc_load_reg_indexed (code, i, -pos, ppc_sp);
3373 /* FIXME restore from MonoLMF: though this can't happen yet */
3375 mono_add_patch_info (cfg, (guint8*) code - cfg->native_code, MONO_PATCH_INFO_METHOD_JUMP, ins->inst_p0);
3380 /* ensure ins->sreg1 is not NULL */
3381 ppc_load_reg (code, ppc_r0, 0, ins->sreg1);
3384 if (ppc_is_imm16 (cfg->sig_cookie + cfg->stack_usage)) {
3385 ppc_addi (code, ppc_r0, cfg->frame_reg, cfg->sig_cookie + cfg->stack_usage);
3387 ppc_load (code, ppc_r0, cfg->sig_cookie + cfg->stack_usage);
3388 ppc_add (code, ppc_r0, cfg->frame_reg, ppc_r0);
3390 ppc_store_reg (code, ppc_r0, 0, ins->sreg1);
3399 call = (MonoCallInst*)ins;
3400 if (ins->flags & MONO_INST_HAS_METHOD)
3401 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_METHOD, call->method);
3403 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_ABS, call->fptr);
3404 if (FORCE_INDIR_CALL || cfg->method->dynamic) {
3405 ppc_load_func (code, ppc_r0, 0);
3406 ppc_mtlr (code, ppc_r0);
3411 /* FIXME: this should be handled somewhere else in the new jit */
3412 code = emit_move_return_value (cfg, ins, code);
3418 case OP_VOIDCALL_REG:
3420 ppc_load_reg (code, ppc_r0, 0, ins->sreg1);
3421 /* FIXME: if we know that this is a method, we
3422 can omit this load */
3423 ppc_load_reg (code, ppc_r2, 8, ins->sreg1);
3424 ppc_mtlr (code, ppc_r0);
3426 /* FIXME: this should be handled somewhere else in the new jit */
3427 code = emit_move_return_value (cfg, ins, code);
3429 case OP_FCALL_MEMBASE:
3430 case OP_LCALL_MEMBASE:
3431 case OP_VCALL_MEMBASE:
3432 case OP_VCALL2_MEMBASE:
3433 case OP_VOIDCALL_MEMBASE:
3434 case OP_CALL_MEMBASE:
3435 ppc_load_reg (code, ppc_r0, ins->inst_offset, ins->sreg1);
3436 ppc_mtlr (code, ppc_r0);
3438 /* FIXME: this should be handled somewhere else in the new jit */
3439 code = emit_move_return_value (cfg, ins, code);
3442 guint8 * zero_loop_jump, * zero_loop_start;
3443 /* keep alignment */
3444 int alloca_waste = PPC_STACK_PARAM_OFFSET + cfg->param_area + 31;
3445 int area_offset = alloca_waste;
3447 ppc_addi (code, ppc_r11, ins->sreg1, alloca_waste + 31);
3448 /* FIXME: should be calculated from MONO_ARCH_FRAME_ALIGNMENT */
3449 ppc_clrrdi (code, ppc_r11, ppc_r11, 4);
3450 /* use ctr to store the number of words to 0 if needed */
3451 if (ins->flags & MONO_INST_INIT) {
3452 /* we zero 4 bytes at a time:
3453 * we add 7 instead of 3 so that we set the counter to
3454 * at least 1, otherwise the bdnz instruction will make
3455 * it negative and iterate billions of times.
3457 ppc_addi (code, ppc_r0, ins->sreg1, 7);
3458 ppc_sradi (code, ppc_r0, ppc_r0, 2);
3459 ppc_mtctr (code, ppc_r0);
3461 ppc_load_reg (code, ppc_r0, 0, ppc_sp);
3462 ppc_neg (code, ppc_r11, ppc_r11);
3463 ppc_store_reg_update_indexed (code, ppc_r0, ppc_sp, ppc_r11);
3465 /* FIXME: make this loop work in 8 byte increments */
3466 if (ins->flags & MONO_INST_INIT) {
3467 /* adjust the dest reg by -4 so we can use stwu */
3468 /* we actually adjust -8 because we let the loop
3471 ppc_addi (code, ins->dreg, ppc_sp, (area_offset - 8));
3472 ppc_li (code, ppc_r11, 0);
3473 zero_loop_start = code;
3474 ppc_stwu (code, ppc_r11, 4, ins->dreg);
3475 zero_loop_jump = code;
3476 ppc_bc (code, PPC_BR_DEC_CTR_NONZERO, 0, 0);
3477 ppc_patch (zero_loop_jump, zero_loop_start);
3479 ppc_addi (code, ins->dreg, ppc_sp, area_offset);
3484 ppc_mr (code, ppc_r3, ins->sreg1);
3485 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD,
3486 (gpointer)"mono_arch_throw_exception");
3487 if (FORCE_INDIR_CALL || cfg->method->dynamic) {
3488 ppc_load_func (code, ppc_r0, 0);
3489 ppc_mtlr (code, ppc_r0);
3498 ppc_mr (code, ppc_r3, ins->sreg1);
3499 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD,
3500 (gpointer)"mono_arch_rethrow_exception");
3501 if (FORCE_INDIR_CALL || cfg->method->dynamic) {
3502 ppc_load_func (code, ppc_r0, 0);
3503 ppc_mtlr (code, ppc_r0);
3510 case OP_START_HANDLER: {
3511 MonoInst *spvar = mono_find_spvar_for_region (cfg, bb->region);
3512 g_assert (spvar->inst_basereg != ppc_sp);
3513 code = emit_reserve_param_area (cfg, code);
3514 ppc_mflr (code, ppc_r0);
3515 if (ppc_is_imm16 (spvar->inst_offset)) {
3516 ppc_store_reg (code, ppc_r0, spvar->inst_offset, spvar->inst_basereg);
3518 ppc_load (code, ppc_r11, spvar->inst_offset);
3519 ppc_store_reg_indexed (code, ppc_r0, ppc_r11, spvar->inst_basereg);
3523 case OP_ENDFILTER: {
3524 MonoInst *spvar = mono_find_spvar_for_region (cfg, bb->region);
3525 g_assert (spvar->inst_basereg != ppc_sp);
3526 code = emit_unreserve_param_area (cfg, code);
3527 if (ins->sreg1 != ppc_r3)
3528 ppc_mr (code, ppc_r3, ins->sreg1);
3529 if (ppc_is_imm16 (spvar->inst_offset)) {
3530 ppc_load_reg (code, ppc_r0, spvar->inst_offset, spvar->inst_basereg);
3532 ppc_load (code, ppc_r11, spvar->inst_offset);
3533 ppc_load_reg_indexed (code, ppc_r0, spvar->inst_basereg, ppc_r11);
3535 ppc_mtlr (code, ppc_r0);
3539 case OP_ENDFINALLY: {
3540 MonoInst *spvar = mono_find_spvar_for_region (cfg, bb->region);
3541 g_assert (spvar->inst_basereg != ppc_sp);
3542 code = emit_unreserve_param_area (cfg, code);
3543 ppc_load_reg (code, ppc_r0, spvar->inst_offset, spvar->inst_basereg);
3544 ppc_mtlr (code, ppc_r0);
3548 case OP_CALL_HANDLER:
3549 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_BB, ins->inst_target_bb);
3553 ins->inst_c0 = code - cfg->native_code;
3556 if (ins->flags & MONO_INST_BRLABEL) {
3557 /*if (ins->inst_i0->inst_c0) {
3559 //x86_jump_code (code, cfg->native_code + ins->inst_i0->inst_c0);
3561 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_LABEL, ins->inst_i0);
3565 /*if (ins->inst_target_bb->native_offset) {
3567 //x86_jump_code (code, cfg->native_code + ins->inst_target_bb->native_offset);
3569 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_target_bb);
3575 ppc_mtctr (code, ins->sreg1);
3576 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
3581 ppc_li (code, ins->dreg, 0);
3582 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 2);
3583 ppc_li (code, ins->dreg, 1);
3591 ppc_li (code, ins->dreg, 1);
3592 ppc_bc (code, PPC_BR_TRUE, PPC_BR_LT, 2);
3593 ppc_li (code, ins->dreg, 0);
3601 ppc_li (code, ins->dreg, 1);
3602 ppc_bc (code, PPC_BR_TRUE, PPC_BR_GT, 2);
3603 ppc_li (code, ins->dreg, 0);
3605 case OP_COND_EXC_EQ:
3606 case OP_COND_EXC_NE_UN:
3607 case OP_COND_EXC_LT:
3608 case OP_COND_EXC_LT_UN:
3609 case OP_COND_EXC_GT:
3610 case OP_COND_EXC_GT_UN:
3611 case OP_COND_EXC_GE:
3612 case OP_COND_EXC_GE_UN:
3613 case OP_COND_EXC_LE:
3614 case OP_COND_EXC_LE_UN:
3615 EMIT_COND_SYSTEM_EXCEPTION (ins->opcode - OP_COND_EXC_EQ, ins->inst_p1);
3617 case OP_COND_EXC_IEQ:
3618 case OP_COND_EXC_INE_UN:
3619 case OP_COND_EXC_ILT:
3620 case OP_COND_EXC_ILT_UN:
3621 case OP_COND_EXC_IGT:
3622 case OP_COND_EXC_IGT_UN:
3623 case OP_COND_EXC_IGE:
3624 case OP_COND_EXC_IGE_UN:
3625 case OP_COND_EXC_ILE:
3626 case OP_COND_EXC_ILE_UN:
3627 EMIT_COND_SYSTEM_EXCEPTION (ins->opcode - OP_COND_EXC_IEQ, ins->inst_p1);
3630 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3632 ppc_mfspr (code, ppc_r0, ppc_xer);
3633 ppc_andisd (code, ppc_r0, ppc_r0, (1 << 13)); /* CA */
3634 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, ins->inst_p1);
3636 case OP_COND_EXC_OV:
3637 ppc_mfspr (code, ppc_r0, ppc_xer);
3638 ppc_andisd (code, ppc_r0, ppc_r0, (1 << 14)); /* OV */
3639 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, ins->inst_p1);
3641 case OP_COND_EXC_NC:
3642 case OP_COND_EXC_NO:
3643 g_assert_not_reached ();
3665 EMIT_COND_BRANCH (ins, ins->opcode -
3666 ((ins->opcode >= OP_LBEQ && ins->opcode <= OP_LBLT_UN) ? OP_LBEQ : OP_IBEQ));
3669 /* floating point opcodes */
3672 g_assert_not_reached ();
3673 case OP_STORER8_MEMBASE_REG:
3674 if (ppc_is_imm16 (ins->inst_offset)) {
3675 ppc_stfd (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
3677 ppc_load (code, ppc_r0, ins->inst_offset);
3678 ppc_stfdx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
3681 case OP_LOADR8_MEMBASE:
3682 if (ppc_is_imm16 (ins->inst_offset)) {
3683 ppc_lfd (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3685 ppc_load (code, ppc_r0, ins->inst_offset);
3686 ppc_lfdx (code, ins->dreg, ins->inst_destbasereg, ppc_r0);
3689 case OP_STORER4_MEMBASE_REG:
3690 ppc_frsp (code, ins->sreg1, ins->sreg1);
3691 if (ppc_is_imm16 (ins->inst_offset)) {
3692 ppc_stfs (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
3694 ppc_load (code, ppc_r0, ins->inst_offset);
3695 ppc_stfsx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
3698 case OP_LOADR4_MEMBASE:
3699 if (ppc_is_imm16 (ins->inst_offset)) {
3700 ppc_lfs (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3702 ppc_load (code, ppc_r0, ins->inst_offset);
3703 ppc_lfsx (code, ins->dreg, ins->inst_destbasereg, ppc_r0);
3706 case OP_LOADR4_MEMINDEX:
3707 ppc_lfsx (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3709 case OP_LOADR8_MEMINDEX:
3710 ppc_lfdx (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3712 case OP_STORER4_MEMINDEX:
3713 ppc_frsp (code, ins->sreg1, ins->sreg1);
3714 ppc_stfsx (code, ins->sreg1, ins->sreg2, ins->inst_destbasereg);
3716 case OP_STORER8_MEMINDEX:
3717 ppc_stfdx (code, ins->sreg1, ins->sreg2, ins->inst_destbasereg);
3720 case CEE_CONV_R4: /* FIXME: change precision */
3722 g_assert_not_reached ();
3723 case OP_FCONV_TO_I1:
3724 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 1, TRUE);
3726 case OP_FCONV_TO_U1:
3727 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 1, FALSE);
3729 case OP_FCONV_TO_I2:
3730 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 2, TRUE);
3732 case OP_FCONV_TO_U2:
3733 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 2, FALSE);
3735 case OP_FCONV_TO_I4:
3737 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 4, TRUE);
3739 case OP_FCONV_TO_U4:
3741 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 4, FALSE);
3743 case OP_FCONV_TO_I8:
3744 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 8, TRUE);
3746 case OP_FCONV_TO_U8:
3747 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 8, FALSE);
3749 case OP_LCONV_TO_R_UN:
3750 g_assert_not_reached ();
3751 /* Implemented as helper calls */
3753 case OP_LCONV_TO_OVF_I4_2:
3754 case OP_LCONV_TO_OVF_I: {
3755 guint8 *negative_branch, *msword_positive_branch, *msword_negative_branch, *ovf_ex_target;
3756 g_assert_not_reached (); /* FIXME: L in cmps */
3757 // Check if its negative
3758 ppc_cmpi (code, 0, 0, ins->sreg1, 0);
3759 negative_branch = code;
3760 ppc_bc (code, PPC_BR_TRUE, PPC_BR_LT, 0);
3761 // Its positive msword == 0
3762 ppc_cmpi (code, 0, 0, ins->sreg2, 0);
3763 msword_positive_branch = code;
3764 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
3766 ovf_ex_target = code;
3767 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_ALWAYS, 0, "OverflowException");
3769 ppc_patch (negative_branch, code);
3770 ppc_cmpi (code, 0, 0, ins->sreg2, -1);
3771 msword_negative_branch = code;
3772 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
3773 ppc_patch (msword_negative_branch, ovf_ex_target);
3775 ppc_patch (msword_positive_branch, code);
3776 if (ins->dreg != ins->sreg1)
3777 ppc_mr (code, ins->dreg, ins->sreg1);
3781 ppc_fsqrtd (code, ins->dreg, ins->sreg1);
3784 ppc_fadd (code, ins->dreg, ins->sreg1, ins->sreg2);
3787 ppc_fsub (code, ins->dreg, ins->sreg1, ins->sreg2);
3790 ppc_fmul (code, ins->dreg, ins->sreg1, ins->sreg2);
3793 ppc_fdiv (code, ins->dreg, ins->sreg1, ins->sreg2);
3796 ppc_fneg (code, ins->dreg, ins->sreg1);
3800 g_assert_not_reached ();
3803 ppc_fcmpu (code, 0, ins->sreg1, ins->sreg2);
3806 ppc_fcmpo (code, 0, ins->sreg1, ins->sreg2);
3807 ppc_li (code, ins->dreg, 0);
3808 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 2);
3809 ppc_li (code, ins->dreg, 1);
3812 ppc_fcmpo (code, 0, ins->sreg1, ins->sreg2);
3813 ppc_li (code, ins->dreg, 1);
3814 ppc_bc (code, PPC_BR_TRUE, PPC_BR_LT, 2);
3815 ppc_li (code, ins->dreg, 0);
3818 ppc_fcmpu (code, 0, ins->sreg1, ins->sreg2);
3819 ppc_li (code, ins->dreg, 1);
3820 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 3);
3821 ppc_bc (code, PPC_BR_TRUE, PPC_BR_LT, 2);
3822 ppc_li (code, ins->dreg, 0);
3825 ppc_fcmpo (code, 0, ins->sreg1, ins->sreg2);
3826 ppc_li (code, ins->dreg, 1);
3827 ppc_bc (code, PPC_BR_TRUE, PPC_BR_GT, 2);
3828 ppc_li (code, ins->dreg, 0);
3831 ppc_fcmpu (code, 0, ins->sreg1, ins->sreg2);
3832 ppc_li (code, ins->dreg, 1);
3833 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 3);
3834 ppc_bc (code, PPC_BR_TRUE, PPC_BR_GT, 2);
3835 ppc_li (code, ins->dreg, 0);
3838 EMIT_COND_BRANCH (ins, CEE_BEQ - CEE_BEQ);
3841 EMIT_COND_BRANCH (ins, CEE_BNE_UN - CEE_BEQ);
3844 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 2);
3845 EMIT_COND_BRANCH (ins, CEE_BLT - CEE_BEQ);
3848 EMIT_COND_BRANCH_FLAGS (ins, PPC_BR_TRUE, PPC_BR_SO);
3849 EMIT_COND_BRANCH (ins, CEE_BLT_UN - CEE_BEQ);
3852 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 2);
3853 EMIT_COND_BRANCH (ins, CEE_BGT - CEE_BEQ);
3856 EMIT_COND_BRANCH_FLAGS (ins, PPC_BR_TRUE, PPC_BR_SO);
3857 EMIT_COND_BRANCH (ins, CEE_BGT_UN - CEE_BEQ);
3860 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 2);
3861 EMIT_COND_BRANCH (ins, CEE_BGE - CEE_BEQ);
3864 EMIT_COND_BRANCH (ins, CEE_BGE_UN - CEE_BEQ);
3867 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 2);
3868 EMIT_COND_BRANCH (ins, CEE_BLE - CEE_BEQ);
3871 EMIT_COND_BRANCH (ins, CEE_BLE_UN - CEE_BEQ);
3874 g_assert_not_reached ();
3875 case OP_CHECK_FINITE: {
3876 ppc_rlwinm (code, ins->sreg1, ins->sreg1, 0, 1, 31);
3877 ppc_addis (code, ins->sreg1, ins->sreg1, -32752);
3878 ppc_rlwinmd (code, ins->sreg1, ins->sreg1, 1, 31, 31);
3879 EMIT_COND_SYSTEM_EXCEPTION (CEE_BEQ - CEE_BEQ, "ArithmeticException");
3882 mono_add_patch_info (cfg, offset, (MonoJumpInfoType)ins->inst_i1, ins->inst_p0);
3883 ppc_load_sequence (code, ins->dreg, 0x0f0f0f0f0f0f0f0fL);
3886 case OP_ATOMIC_ADD_NEW_I4:
3887 case OP_ATOMIC_ADD_NEW_I8: {
3888 guint8 *loop = code, *branch;
3889 g_assert (ins->inst_offset == 0);
3890 if (ins->opcode == OP_ATOMIC_ADD_NEW_I4)
3891 ppc_lwarx (code, ppc_r0, 0, ins->inst_basereg);
3893 ppc_ldarx (code, ppc_r0, 0, ins->inst_basereg);
3894 ppc_add (code, ppc_r0, ppc_r0, ins->sreg2);
3895 if (ins->opcode == OP_ATOMIC_ADD_NEW_I4)
3896 ppc_stwcxd (code, ppc_r0, 0, ins->inst_basereg);
3898 ppc_stdcxd (code, ppc_r0, 0, ins->inst_basereg);
3900 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
3901 ppc_patch (branch, loop);
3902 ppc_mr (code, ins->dreg, ppc_r0);
3907 g_warning ("unknown opcode %s in %s()\n", mono_inst_name (ins->opcode), __FUNCTION__);
3908 g_assert_not_reached ();
3911 if ((cfg->opt & MONO_OPT_BRANCH) && ((code - cfg->native_code - offset) > max_len)) {
3912 g_warning ("wrong maximal instruction length of instruction %s (expected %d, got %ld)",
3913 mono_inst_name (ins->opcode), max_len, code - cfg->native_code - offset);
3914 g_assert_not_reached ();
3920 last_offset = offset;
3923 cfg->code_len = code - cfg->native_code;
3927 mono_arch_register_lowlevel_calls (void)
3931 #define patch_lis_ori(ip,val) do {\
3932 guint16 *__lis_ori = (guint16*)(ip); \
3933 __lis_ori [1] = (((gulong)(val)) >> 16) & 0xffff; \
3934 __lis_ori [3] = ((gulong)(val)) & 0xffff; \
3936 #define patch_load_sequence(ip,val) do {\
3937 guint16 *__load = (guint16*)(ip); \
3938 __load [1] = (((guint64)(val)) >> 48) & 0xffff; \
3939 __load [3] = (((guint64)(val)) >> 32) & 0xffff; \
3940 __load [7] = (((guint64)(val)) >> 16) & 0xffff; \
3941 __load [9] = ((guint64)(val)) & 0xffff; \
3945 mono_arch_patch_code (MonoMethod *method, MonoDomain *domain, guint8 *code, MonoJumpInfo *ji, gboolean run_cctors)
3947 MonoJumpInfo *patch_info;
3949 for (patch_info = ji; patch_info; patch_info = patch_info->next) {
3950 unsigned char *ip = patch_info->ip.i + code;
3951 unsigned char *target;
3952 gboolean is_fd = FALSE;
3954 target = mono_resolve_patch_target (method, domain, code, patch_info, run_cctors);
3956 #ifdef DEBUG_PATCHING
3957 g_print ("patching %p to %p (type %d)\n", ip, target, patch_info->type);
3960 switch (patch_info->type) {
3961 case MONO_PATCH_INFO_IP:
3962 patch_load_sequence (ip, ip);
3964 case MONO_PATCH_INFO_METHOD_REL:
3965 g_assert_not_reached ();
3966 *((gpointer *)(ip)) = code + patch_info->data.offset;
3968 case MONO_PATCH_INFO_SWITCH: {
3969 gpointer *table = (gpointer *)patch_info->data.table->table;
3972 patch_load_sequence (ip, table);
3974 for (i = 0; i < patch_info->data.table->table_size; i++) {
3975 table [i] = (glong)patch_info->data.table->table [i] + code;
3977 /* we put into the table the absolute address, no need for ppc_patch in this case */
3980 case MONO_PATCH_INFO_METHODCONST:
3981 case MONO_PATCH_INFO_CLASS:
3982 case MONO_PATCH_INFO_IMAGE:
3983 case MONO_PATCH_INFO_FIELD:
3984 case MONO_PATCH_INFO_VTABLE:
3985 case MONO_PATCH_INFO_IID:
3986 case MONO_PATCH_INFO_SFLDA:
3987 case MONO_PATCH_INFO_LDSTR:
3988 case MONO_PATCH_INFO_TYPE_FROM_HANDLE:
3989 case MONO_PATCH_INFO_LDTOKEN:
3990 /* from OP_AOTCONST : lis + ori */
3991 patch_load_sequence (ip, target);
3993 case MONO_PATCH_INFO_R4:
3994 case MONO_PATCH_INFO_R8:
3995 g_assert_not_reached ();
3996 *((gconstpointer *)(ip + 2)) = patch_info->data.target;
3998 case MONO_PATCH_INFO_EXC_NAME:
3999 g_assert_not_reached ();
4000 *((gconstpointer *)(ip + 1)) = patch_info->data.name;
4002 case MONO_PATCH_INFO_NONE:
4003 case MONO_PATCH_INFO_BB_OVF:
4004 case MONO_PATCH_INFO_EXC_OVF:
4005 /* everything is dealt with at epilog output time */
4007 case MONO_PATCH_INFO_INTERNAL_METHOD:
4008 case MONO_PATCH_INFO_ABS:
4009 case MONO_PATCH_INFO_CLASS_INIT:
4010 case MONO_PATCH_INFO_RGCTX_FETCH:
4016 ppc_patch_full (ip, target, is_fd);
4021 * Stack frame layout:
4023 * ------------------- sp
4024 * MonoLMF structure or saved registers
4025 * -------------------
4027 * -------------------
4029 * -------------------
4030 * optional 8 bytes for tracing
4031 * -------------------
4032 * param area size is cfg->param_area
4033 * -------------------
4034 * linkage area size is PPC_STACK_PARAM_OFFSET
4035 * ------------------- sp
4039 mono_arch_emit_prolog (MonoCompile *cfg)
4041 MonoMethod *method = cfg->method;
4043 MonoMethodSignature *sig;
4045 int alloc_size, pos, max_offset, i;
4050 int tailcall_struct_index;
4052 if (mono_jit_trace_calls != NULL && mono_trace_eval (method))
4055 sig = mono_method_signature (method);
4056 cfg->code_size = 384 + sig->param_count * 20;
4057 code = cfg->native_code = g_malloc (cfg->code_size);
4059 if (1 || cfg->flags & MONO_CFG_HAS_CALLS) {
4060 ppc_mflr (code, ppc_r0);
4061 ppc_store_reg (code, ppc_r0, PPC_RET_ADDR_OFFSET, ppc_sp);
4064 alloc_size = cfg->stack_offset;
4067 if (!method->save_lmf) {
4068 /*for (i = 31; i >= 14; --i) {
4069 if (cfg->used_float_regs & (1 << i)) {
4070 pos += sizeof (gdouble);
4071 ppc_stfd (code, i, -pos, ppc_sp);
4074 for (i = 31; i >= 13; --i) {
4075 if (cfg->used_int_regs & (1 << i)) {
4076 pos += sizeof (gulong);
4077 ppc_store_reg (code, i, -pos, ppc_sp);
4081 pos += sizeof (MonoLMF);
4083 for (i = 13; i <= 31; i++) {
4084 ppc_store_reg (code, i, (-pos + G_STRUCT_OFFSET(MonoLMF, iregs) +
4085 ((i-13) * sizeof (gulong))), ppc_r1);
4087 for (i = 14; i <= 31; i++) {
4088 ppc_stfd (code, i, (-pos + G_STRUCT_OFFSET(MonoLMF, fregs) +
4089 ((i-14) * sizeof (gdouble))), ppc_r1);
4093 // align to MONO_ARCH_FRAME_ALIGNMENT bytes
4094 if (alloc_size & (MONO_ARCH_FRAME_ALIGNMENT - 1)) {
4095 alloc_size += MONO_ARCH_FRAME_ALIGNMENT - 1;
4096 alloc_size &= ~(MONO_ARCH_FRAME_ALIGNMENT - 1);
4099 cfg->stack_usage = alloc_size;
4100 g_assert ((alloc_size & (MONO_ARCH_FRAME_ALIGNMENT-1)) == 0);
4102 if (ppc_is_imm16 (-alloc_size)) {
4103 ppc_store_reg_update (code, ppc_sp, -alloc_size, ppc_sp);
4105 ppc_load (code, ppc_r11, -alloc_size);
4106 ppc_store_reg_update_indexed (code, ppc_sp, ppc_sp, ppc_r11);
4109 if (cfg->frame_reg != ppc_sp)
4110 ppc_mr (code, cfg->frame_reg, ppc_sp);
4112 /* store runtime generic context */
4113 #ifdef MONO_ARCH_RGCTX_REG
4114 if (cfg->rgctx_var) {
4115 g_assert (cfg->rgctx_var->opcode == OP_REGOFFSET &&
4116 (cfg->rgctx_var->inst_basereg == ppc_r1 || cfg->rgctx_var->inst_basereg == ppc_r31));
4118 ppc_store_reg (code, MONO_ARCH_RGCTX_REG, cfg->rgctx_var->inst_offset, cfg->rgctx_var->inst_basereg);
4122 /* compute max_offset in order to use short forward jumps
4123 * we always do it on ppc because the immediate displacement
4124 * for jumps is too small
4127 for (bb = cfg->bb_entry; bb; bb = bb->next_bb) {
4129 bb->max_offset = max_offset;
4131 if (cfg->prof_options & MONO_PROFILE_COVERAGE)
4134 MONO_BB_FOR_EACH_INS (bb, ins)
4135 max_offset += ins_native_length (cfg, ins);
4138 /* load arguments allocated to register from the stack */
4141 cinfo = calculate_sizes (sig, sig->pinvoke);
4143 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
4144 ArgInfo *ainfo = &cinfo->ret;
4146 inst = cfg->vret_addr;
4149 if (ppc_is_imm16 (inst->inst_offset)) {
4150 ppc_store_reg (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4152 ppc_load (code, ppc_r11, inst->inst_offset);
4153 ppc_store_reg_indexed (code, ainfo->reg, ppc_r11, inst->inst_basereg);
4157 tailcall_struct_index = 0;
4158 for (i = 0; i < sig->param_count + sig->hasthis; ++i) {
4159 ArgInfo *ainfo = cinfo->args + i;
4160 inst = cfg->args [pos];
4162 if (cfg->verbose_level > 2)
4163 g_print ("Saving argument %d (type: %d)\n", i, ainfo->regtype);
4164 if (inst->opcode == OP_REGVAR) {
4165 if (ainfo->regtype == RegTypeGeneral)
4166 ppc_mr (code, inst->dreg, ainfo->reg);
4167 else if (ainfo->regtype == RegTypeFP)
4168 ppc_fmr (code, inst->dreg, ainfo->reg);
4169 else if (ainfo->regtype == RegTypeBase) {
4170 ppc_load_reg (code, ppc_r11, 0, ppc_sp);
4171 ppc_load_reg (code, inst->dreg, ainfo->offset, ppc_r11);
4173 g_assert_not_reached ();
4175 if (cfg->verbose_level > 2)
4176 g_print ("Argument %d assigned to register %s\n", pos, mono_arch_regname (inst->dreg));
4178 /* the argument should be put on the stack: FIXME handle size != word */
4179 if (ainfo->regtype == RegTypeGeneral) {
4180 switch (ainfo->size) {
4182 if (ppc_is_imm16 (inst->inst_offset)) {
4183 ppc_stb (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4185 ppc_load (code, ppc_r11, inst->inst_offset);
4186 ppc_stbx (code, ainfo->reg, ppc_r11, inst->inst_basereg);
4190 if (ppc_is_imm16 (inst->inst_offset)) {
4191 ppc_sth (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4193 ppc_load (code, ppc_r11, inst->inst_offset);
4194 ppc_sthx (code, ainfo->reg, ppc_r11, inst->inst_basereg);
4198 if (ppc_is_imm16 (inst->inst_offset)) {
4199 ppc_stw (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4201 ppc_load (code, ppc_r11, inst->inst_offset);
4202 ppc_stwx (code, ainfo->reg, ppc_r11, inst->inst_basereg);
4206 if (ppc_is_imm16 (inst->inst_offset)) {
4207 ppc_store_reg (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4209 ppc_load (code, ppc_r11, inst->inst_offset);
4210 ppc_store_reg_indexed (code, ainfo->reg, ppc_r11, inst->inst_basereg);
4214 } else if (ainfo->regtype == RegTypeBase) {
4215 /* load the previous stack pointer in r11 */
4216 ppc_load_reg (code, ppc_r11, 0, ppc_sp);
4217 ppc_load_reg (code, ppc_r0, ainfo->offset, ppc_r11);
4218 switch (ainfo->size) {
4220 if (ppc_is_imm16 (inst->inst_offset)) {
4221 ppc_stb (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
4223 ppc_load (code, ppc_r11, inst->inst_offset);
4224 ppc_stbx (code, ppc_r0, ppc_r11, inst->inst_basereg);
4228 if (ppc_is_imm16 (inst->inst_offset)) {
4229 ppc_sth (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
4231 ppc_load (code, ppc_r11, inst->inst_offset);
4232 ppc_sthx (code, ppc_r0, ppc_r11, inst->inst_basereg);
4236 if (ppc_is_imm16 (inst->inst_offset)) {
4237 ppc_stw (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
4239 ppc_load (code, ppc_r11, inst->inst_offset);
4240 ppc_stwx (code, ppc_r0, ppc_r11, inst->inst_basereg);
4244 if (ppc_is_imm16 (inst->inst_offset)) {
4245 ppc_store_reg (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
4247 ppc_load (code, ppc_r11, inst->inst_offset);
4248 ppc_store_reg_indexed (code, ppc_r0, ppc_r11, inst->inst_basereg);
4252 } else if (ainfo->regtype == RegTypeFP) {
4253 g_assert (ppc_is_imm16 (inst->inst_offset));
4254 if (ainfo->size == 8)
4255 ppc_stfd (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4256 else if (ainfo->size == 4)
4257 ppc_stfs (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4259 g_assert_not_reached ();
4260 } else if (ainfo->regtype == RegTypeStructByVal) {
4261 int doffset = inst->inst_offset;
4265 g_assert (ppc_is_imm16 (inst->inst_offset));
4266 g_assert (ppc_is_imm16 (inst->inst_offset + ainfo->size * sizeof (gpointer)));
4267 /* FIXME: what if there is no class? */
4268 if (sig->pinvoke && mono_class_from_mono_type (inst->inst_vtype))
4269 size = mono_class_native_size (mono_class_from_mono_type (inst->inst_vtype), NULL);
4270 for (cur_reg = 0; cur_reg < ainfo->size; ++cur_reg) {
4273 * Darwin handles 1 and 2 byte
4274 * structs specially by
4275 * loading h/b into the arg
4276 * register. Only done for
4280 ppc_sth (code, ainfo->reg + cur_reg, doffset, inst->inst_basereg);
4282 ppc_stb (code, ainfo->reg + cur_reg, doffset, inst->inst_basereg);
4287 g_assert (cur_reg == 0);
4288 ppc_sldi (code, ppc_r0, ainfo->reg,
4289 (sizeof (gpointer) - ainfo->bytes) * 8);
4290 ppc_store_reg (code, ppc_r0, doffset, inst->inst_basereg);
4292 ppc_store_reg (code, ainfo->reg + cur_reg, doffset,
4293 inst->inst_basereg);
4296 soffset += sizeof (gpointer);
4297 doffset += sizeof (gpointer);
4299 if (ainfo->vtsize) {
4300 /* FIXME: we need to do the shifting here, too */
4303 /* load the previous stack pointer in r11 (r0 gets overwritten by the memcpy) */
4304 ppc_load_reg (code, ppc_r11, 0, ppc_sp);
4305 if ((size & 7) != 0) {
4306 code = emit_memcpy (code, size - soffset,
4307 inst->inst_basereg, doffset,
4308 ppc_r11, ainfo->offset + soffset);
4310 code = emit_memcpy (code, ainfo->vtsize * sizeof (gpointer),
4311 inst->inst_basereg, doffset,
4312 ppc_r11, ainfo->offset + soffset);
4315 } else if (ainfo->regtype == RegTypeStructByAddr) {
4316 /* if it was originally a RegTypeBase */
4317 if (ainfo->offset) {
4318 /* load the previous stack pointer in r11 */
4319 ppc_load_reg (code, ppc_r11, 0, ppc_sp);
4320 ppc_load_reg (code, ppc_r11, ainfo->offset, ppc_r11);
4322 ppc_mr (code, ppc_r11, ainfo->reg);
4325 if (cfg->tailcall_valuetype_addrs) {
4326 MonoInst *addr = cfg->tailcall_valuetype_addrs [tailcall_struct_index];
4328 g_assert (ppc_is_imm16 (addr->inst_offset));
4329 ppc_store_reg (code, ppc_r11, addr->inst_offset, addr->inst_basereg);
4331 tailcall_struct_index++;
4334 g_assert (ppc_is_imm16 (inst->inst_offset));
4335 code = emit_memcpy (code, ainfo->vtsize, inst->inst_basereg, inst->inst_offset, ppc_r11, 0);
4336 /*g_print ("copy in %s: %d bytes from %d to offset: %d\n", method->name, ainfo->vtsize, ainfo->reg, inst->inst_offset);*/
4338 g_assert_not_reached ();
4343 if (method->wrapper_type == MONO_WRAPPER_NATIVE_TO_MANAGED) {
4344 ppc_load (code, ppc_r3, cfg->domain);
4345 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD, (gpointer)"mono_jit_thread_attach");
4346 if (FORCE_INDIR_CALL || cfg->method->dynamic) {
4347 ppc_load_func (code, ppc_r0, 0);
4348 ppc_mtlr (code, ppc_r0);
4355 if (method->save_lmf) {
4356 if (lmf_pthread_key != -1) {
4357 emit_tls_access (code, ppc_r3, lmf_pthread_key);
4358 if (G_STRUCT_OFFSET (MonoJitTlsData, lmf))
4359 ppc_addi (code, ppc_r3, ppc_r3, G_STRUCT_OFFSET (MonoJitTlsData, lmf));
4361 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD,
4362 (gpointer)"mono_get_lmf_addr");
4363 if (FORCE_INDIR_CALL || cfg->method->dynamic) {
4364 ppc_load_func (code, ppc_r0, 0);
4365 ppc_mtlr (code, ppc_r0);
4371 /* we build the MonoLMF structure on the stack - see mini-ppc.h */
4372 /* lmf_offset is the offset from the previous stack pointer,
4373 * alloc_size is the total stack space allocated, so the offset
4374 * of MonoLMF from the current stack ptr is alloc_size - lmf_offset.
4375 * The pointer to the struct is put in ppc_r11 (new_lmf).
4376 * The callee-saved registers are already in the MonoLMF structure
4378 ppc_addi (code, ppc_r11, ppc_sp, alloc_size - lmf_offset);
4379 /* ppc_r3 is the result from mono_get_lmf_addr () */
4380 ppc_store_reg (code, ppc_r3, G_STRUCT_OFFSET(MonoLMF, lmf_addr), ppc_r11);
4381 /* new_lmf->previous_lmf = *lmf_addr */
4382 ppc_load_reg (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r3);
4383 ppc_store_reg (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r11);
4384 /* *(lmf_addr) = r11 */
4385 ppc_store_reg (code, ppc_r11, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r3);
4386 /* save method info */
4387 ppc_load (code, ppc_r0, method);
4388 ppc_store_reg (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, method), ppc_r11);
4389 ppc_store_reg (code, ppc_sp, G_STRUCT_OFFSET(MonoLMF, ebp), ppc_r11);
4390 /* save the current IP */
4391 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_IP, NULL);
4392 ppc_load_sequence (code, ppc_r0, 0x0101010101010101L);
4393 ppc_store_reg (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, eip), ppc_r11);
4397 code = mono_arch_instrument_prolog (cfg, mono_trace_enter_method, code, TRUE);
4399 cfg->code_len = code - cfg->native_code;
4400 g_assert (cfg->code_len < cfg->code_size);
4407 mono_arch_emit_epilog (MonoCompile *cfg)
4409 MonoMethod *method = cfg->method;
4411 int max_epilog_size = 16 + 20*4;
4414 if (cfg->method->save_lmf)
4415 max_epilog_size += 128;
4417 if (mono_jit_trace_calls != NULL)
4418 max_epilog_size += 50;
4420 if (cfg->prof_options & MONO_PROFILE_ENTER_LEAVE)
4421 max_epilog_size += 50;
4423 while (cfg->code_len + max_epilog_size > (cfg->code_size - 16)) {
4424 cfg->code_size *= 2;
4425 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
4426 mono_jit_stats.code_reallocs++;
4430 * Keep in sync with OP_JMP
4432 code = cfg->native_code + cfg->code_len;
4434 if (mono_jit_trace_calls != NULL && mono_trace_eval (method)) {
4435 code = mono_arch_instrument_epilog (cfg, mono_trace_leave_method, code, TRUE);
4439 if (method->save_lmf) {
4441 pos += sizeof (MonoLMF);
4443 /* save the frame reg in r8 */
4444 ppc_mr (code, ppc_r8, cfg->frame_reg);
4445 ppc_addi (code, ppc_r11, cfg->frame_reg, cfg->stack_usage - lmf_offset);
4446 /* r5 = previous_lmf */
4447 ppc_load_reg (code, ppc_r5, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r11);
4449 ppc_load_reg (code, ppc_r6, G_STRUCT_OFFSET(MonoLMF, lmf_addr), ppc_r11);
4450 /* *(lmf_addr) = previous_lmf */
4451 ppc_store_reg (code, ppc_r5, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r6);
4452 /* FIXME: speedup: there is no actual need to restore the registers if
4453 * we didn't actually change them (idea from Zoltan).
4456 for (i = 13; i <= 31; ++i) {
4457 ppc_load_reg (code, i, G_STRUCT_OFFSET (MonoLMF, iregs) +
4458 (i - 13) * sizeof (gulong), ppc_r11);
4461 /*for (i = 14; i < 32; i++) {
4462 ppc_lfd (code, i, G_STRUCT_OFFSET(MonoLMF, fregs) + ((i-14) * sizeof (gdouble)), ppc_r11);
4464 g_assert (ppc_is_imm16 (cfg->stack_usage + PPC_RET_ADDR_OFFSET));
4465 /* use the saved copy of the frame reg in r8 */
4466 if (1 || cfg->flags & MONO_CFG_HAS_CALLS) {
4467 ppc_load_reg (code, ppc_r0, cfg->stack_usage + PPC_RET_ADDR_OFFSET, ppc_r8);
4468 ppc_mtlr (code, ppc_r0);
4470 ppc_addic (code, ppc_sp, ppc_r8, cfg->stack_usage);
4472 if (1 || cfg->flags & MONO_CFG_HAS_CALLS) {
4473 if (ppc_is_imm16 (cfg->stack_usage + PPC_RET_ADDR_OFFSET)) {
4474 ppc_load_reg (code, ppc_r0, cfg->stack_usage + PPC_RET_ADDR_OFFSET, cfg->frame_reg);
4476 ppc_load (code, ppc_r11, cfg->stack_usage + PPC_RET_ADDR_OFFSET);
4477 ppc_load_reg_indexed (code, ppc_r0, cfg->frame_reg, ppc_r11);
4479 ppc_mtlr (code, ppc_r0);
4481 if (ppc_is_imm16 (cfg->stack_usage)) {
4482 ppc_addic (code, ppc_sp, cfg->frame_reg, cfg->stack_usage);
4484 ppc_load (code, ppc_r11, cfg->stack_usage);
4485 ppc_add (code, ppc_sp, cfg->frame_reg, ppc_r11);
4488 /*for (i = 31; i >= 14; --i) {
4489 if (cfg->used_float_regs & (1 << i)) {
4490 pos += sizeof (double);
4491 ppc_lfd (code, i, -pos, ppc_sp);
4494 for (i = 31; i >= 13; --i) {
4495 if (cfg->used_int_regs & (1 << i)) {
4496 pos += sizeof (gulong);
4497 ppc_load_reg (code, i, -pos, ppc_sp);
4503 cfg->code_len = code - cfg->native_code;
4505 g_assert (cfg->code_len < cfg->code_size);
4509 /* remove once throw_exception_by_name is eliminated */
4511 exception_id_by_name (const char *name)
4513 if (strcmp (name, "IndexOutOfRangeException") == 0)
4514 return MONO_EXC_INDEX_OUT_OF_RANGE;
4515 if (strcmp (name, "OverflowException") == 0)
4516 return MONO_EXC_OVERFLOW;
4517 if (strcmp (name, "ArithmeticException") == 0)
4518 return MONO_EXC_ARITHMETIC;
4519 if (strcmp (name, "DivideByZeroException") == 0)
4520 return MONO_EXC_DIVIDE_BY_ZERO;
4521 if (strcmp (name, "InvalidCastException") == 0)
4522 return MONO_EXC_INVALID_CAST;
4523 if (strcmp (name, "NullReferenceException") == 0)
4524 return MONO_EXC_NULL_REF;
4525 if (strcmp (name, "ArrayTypeMismatchException") == 0)
4526 return MONO_EXC_ARRAY_TYPE_MISMATCH;
4527 g_error ("Unknown intrinsic exception %s\n", name);
4532 mono_arch_emit_exceptions (MonoCompile *cfg)
4534 MonoJumpInfo *patch_info;
4537 const guint8* exc_throw_pos [MONO_EXC_INTRINS_NUM] = {NULL};
4538 guint8 exc_throw_found [MONO_EXC_INTRINS_NUM] = {0};
4539 int max_epilog_size = 50;
4541 /* count the number of exception infos */
4544 * make sure we have enough space for exceptions
4545 * 24 is the simulated call to throw_exception_by_name
4547 for (patch_info = cfg->patch_info; patch_info; patch_info = patch_info->next) {
4548 if (patch_info->type == MONO_PATCH_INFO_EXC) {
4549 i = exception_id_by_name (patch_info->data.target);
4550 if (!exc_throw_found [i]) {
4551 max_epilog_size += 24;
4552 exc_throw_found [i] = TRUE;
4554 } else if (patch_info->type == MONO_PATCH_INFO_BB_OVF)
4555 max_epilog_size += 12;
4556 else if (patch_info->type == MONO_PATCH_INFO_EXC_OVF) {
4557 MonoOvfJump *ovfj = (MonoOvfJump*)patch_info->data.target;
4558 i = exception_id_by_name (ovfj->data.exception);
4559 if (!exc_throw_found [i]) {
4560 max_epilog_size += 24;
4561 exc_throw_found [i] = TRUE;
4563 max_epilog_size += 8;
4567 while (cfg->code_len + max_epilog_size > (cfg->code_size - 16)) {
4568 cfg->code_size *= 2;
4569 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
4570 mono_jit_stats.code_reallocs++;
4573 code = cfg->native_code + cfg->code_len;
4575 /* add code to raise exceptions */
4576 for (patch_info = cfg->patch_info; patch_info; patch_info = patch_info->next) {
4577 switch (patch_info->type) {
4578 case MONO_PATCH_INFO_BB_OVF: {
4579 MonoOvfJump *ovfj = (MonoOvfJump*)patch_info->data.target;
4580 unsigned char *ip = patch_info->ip.i + cfg->native_code;
4581 /* patch the initial jump */
4582 ppc_patch (ip, code);
4583 ppc_bc (code, ovfj->b0_cond, ovfj->b1_cond, 2);
4585 ppc_patch (code - 4, ip + 4); /* jump back after the initiali branch */
4586 /* jump back to the true target */
4588 ip = ovfj->data.bb->native_offset + cfg->native_code;
4589 ppc_patch (code - 4, ip);
4592 case MONO_PATCH_INFO_EXC_OVF: {
4593 MonoOvfJump *ovfj = (MonoOvfJump*)patch_info->data.target;
4594 MonoJumpInfo *newji;
4595 unsigned char *ip = patch_info->ip.i + cfg->native_code;
4596 unsigned char *bcl = code;
4597 /* patch the initial jump: we arrived here with a call */
4598 ppc_patch (ip, code);
4599 ppc_bc (code, ovfj->b0_cond, ovfj->b1_cond, 0);
4601 ppc_patch (code - 4, ip + 4); /* jump back after the initiali branch */
4602 /* patch the conditional jump to the right handler */
4603 /* make it processed next */
4604 newji = mono_mempool_alloc (cfg->mempool, sizeof (MonoJumpInfo));
4605 newji->type = MONO_PATCH_INFO_EXC;
4606 newji->ip.i = bcl - cfg->native_code;
4607 newji->data.target = ovfj->data.exception;
4608 newji->next = patch_info->next;
4609 patch_info->next = newji;
4612 case MONO_PATCH_INFO_EXC: {
4613 unsigned char *ip = patch_info->ip.i + cfg->native_code;
4614 i = exception_id_by_name (patch_info->data.target);
4615 if (exc_throw_pos [i]) {
4616 ppc_patch (ip, exc_throw_pos [i]);
4617 patch_info->type = MONO_PATCH_INFO_NONE;
4620 exc_throw_pos [i] = code;
4622 ppc_patch (ip, code);
4623 /*mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_EXC_NAME, patch_info->data.target);*/
4624 ppc_load (code, ppc_r3, patch_info->data.target);
4625 /* we got here from a conditional call, so the calling ip is set in lr already */
4626 patch_info->type = MONO_PATCH_INFO_INTERNAL_METHOD;
4627 patch_info->data.name = "mono_arch_throw_exception_by_name";
4628 patch_info->ip.i = code - cfg->native_code;
4629 if (FORCE_INDIR_CALL || cfg->method->dynamic) {
4630 ppc_load_func (code, ppc_r0, 0);
4631 ppc_mtctr (code, ppc_r0);
4632 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
4644 cfg->code_len = code - cfg->native_code;
4646 g_assert (cfg->code_len < cfg->code_size);
4651 setup_tls_access (void)
4656 tls_mode = TLS_MODE_FAILED;
4659 if (monodomain_key == -1) {
4660 ptk = mono_domain_get_tls_key ();
4662 ptk = mono_pthread_key_for_tls (ptk);
4664 monodomain_key = ptk;
4668 if (lmf_pthread_key == -1) {
4669 ptk = mono_pthread_key_for_tls (mono_jit_tls_id);
4671 /*g_print ("MonoLMF at: %d\n", ptk);*/
4672 /*if (!try_offset_access (mono_get_lmf_addr (), ptk)) {
4673 init_tls_failed = 1;
4676 lmf_pthread_key = ptk;
4679 if (monothread_key == -1) {
4680 ptk = mono_thread_get_tls_key ();
4682 ptk = mono_pthread_key_for_tls (ptk);
4684 monothread_key = ptk;
4685 /*g_print ("thread inited: %d\n", ptk);*/
4688 /*g_print ("thread not inited yet %d\n", ptk);*/
4694 mono_arch_setup_jit_tls_data (MonoJitTlsData *tls)
4696 setup_tls_access ();
4700 mono_arch_free_jit_tls_data (MonoJitTlsData *tls)
4704 #ifdef MONO_ARCH_HAVE_IMT
4706 #define CMP_SIZE (PPC_LOAD_SEQUENCE_LENGTH + 4)
4708 #define JUMP_IMM_SIZE 12
4709 #define JUMP_IMM32_SIZE (PPC_LOAD_SEQUENCE_LENGTH + 8)
4710 #define ENABLE_WRONG_METHOD_CHECK 0
4713 * LOCKING: called with the domain lock held
4716 mono_arch_build_imt_thunk (MonoVTable *vtable, MonoDomain *domain, MonoIMTCheckItem **imt_entries, int count,
4717 gpointer fail_tramp)
4721 guint8 *code, *start;
4723 for (i = 0; i < count; ++i) {
4724 MonoIMTCheckItem *item = imt_entries [i];
4725 if (item->is_equals) {
4726 if (item->check_target_idx) {
4727 if (!item->compare_done)
4728 item->chunk_size += CMP_SIZE;
4730 item->chunk_size += BR_SIZE + JUMP_IMM32_SIZE;
4732 item->chunk_size += BR_SIZE + JUMP_IMM_SIZE;
4735 item->chunk_size += CMP_SIZE + BR_SIZE + JUMP_IMM32_SIZE * 2;
4737 item->chunk_size += JUMP_IMM_SIZE;
4738 #if ENABLE_WRONG_METHOD_CHECK
4739 item->chunk_size += CMP_SIZE + BR_SIZE + 4;
4744 item->chunk_size += CMP_SIZE + BR_SIZE;
4745 imt_entries [item->check_target_idx]->compare_done = TRUE;
4747 size += item->chunk_size;
4750 code = mono_method_alloc_generic_virtual_thunk (domain, size);
4752 /* the initial load of the vtable address */
4753 size += PPC_LOAD_SEQUENCE_LENGTH;
4754 code = mono_code_manager_reserve (domain->code_mp, size);
4758 ppc_load (code, ppc_r11, (gulong)(& (vtable->vtable [0])));
4759 for (i = 0; i < count; ++i) {
4760 MonoIMTCheckItem *item = imt_entries [i];
4761 item->code_target = code;
4762 if (item->is_equals) {
4763 if (item->check_target_idx) {
4764 if (!item->compare_done) {
4765 ppc_load (code, ppc_r0, (gulong)item->key);
4766 ppc_cmpl (code, 0, 1, MONO_ARCH_IMT_REG, ppc_r0);
4768 item->jmp_code = code;
4769 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
4771 ppc_load (code, ppc_r0, item->value.target_code);
4773 ppc_load_reg (code, ppc_r0, (sizeof (gpointer) * item->value.vtable_slot), ppc_r11);
4774 ppc_mtctr (code, ppc_r0);
4775 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
4778 ppc_load (code, ppc_r0, (gulong)item->key);
4779 ppc_cmpl (code, 0, 1, MONO_ARCH_IMT_REG, ppc_r0);
4780 item->jmp_code = code;
4781 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
4782 ppc_load (code, ppc_r0, item->value.target_code);
4783 ppc_mtctr (code, ppc_r0);
4784 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
4785 ppc_patch (item->jmp_code, code);
4786 ppc_load (code, ppc_r0, fail_tramp);
4787 ppc_mtctr (code, ppc_r0);
4788 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
4789 item->jmp_code = NULL;
4791 /* enable the commented code to assert on wrong method */
4792 #if ENABLE_WRONG_METHOD_CHECK
4793 ppc_load (code, ppc_r0, (guint32)item->key);
4794 ppc_cmpl (code, 0, 1, MONO_ARCH_IMT_REG, ppc_r0);
4795 item->jmp_code = code;
4796 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
4798 ppc_load_reg (code, ppc_r0, (sizeof (gpointer) * item->value.vtable_slot), ppc_r11);
4799 ppc_mtctr (code, ppc_r0);
4800 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
4801 #if ENABLE_WRONG_METHOD_CHECK
4802 ppc_patch (item->jmp_code, code);
4804 item->jmp_code = NULL;
4809 ppc_load (code, ppc_r0, (gulong)item->key);
4810 ppc_cmpl (code, 0, 1, MONO_ARCH_IMT_REG, ppc_r0);
4811 item->jmp_code = code;
4812 ppc_bc (code, PPC_BR_FALSE, PPC_BR_LT, 0);
4815 /* patch the branches to get to the target items */
4816 for (i = 0; i < count; ++i) {
4817 MonoIMTCheckItem *item = imt_entries [i];
4818 if (item->jmp_code) {
4819 if (item->check_target_idx) {
4820 ppc_patch (item->jmp_code, imt_entries [item->check_target_idx]->code_target);
4826 mono_stats.imt_thunks_size += code - start;
4827 g_assert (code - start <= size);
4828 mono_arch_flush_icache (start, size);
4829 mono_ppc_emitted (start, size, "imt thunk vtable %p count %d fail_tramp %d", vtable, count, fail_tramp);
4834 mono_arch_find_imt_method (gpointer *regs, guint8 *code)
4836 return (MonoMethod*) regs [MONO_ARCH_IMT_REG];
4840 mono_arch_find_this_argument (gpointer *regs, MonoMethod *method, MonoGenericSharingContext *gsctx)
4842 return mono_arch_get_this_arg_from_call (gsctx, mono_method_signature (method), (gssize*)regs, NULL);
4846 #ifdef MONO_ARCH_RGCTX_REG
4848 mono_arch_find_static_call_vtable (gpointer *regs, guint8 *code)
4850 return (MonoVTable*) regs [MONO_ARCH_RGCTX_REG];
4855 mono_arch_emit_inst_for_method (MonoCompile *cfg, MonoMethod *cmethod, MonoMethodSignature *fsig, MonoInst **args)
4862 mono_arch_print_tree (MonoInst *tree, int arity)
4867 MonoInst* mono_arch_get_domain_intrinsic (MonoCompile* cfg)
4871 setup_tls_access ();
4872 if (monodomain_key == -1)
4875 MONO_INST_NEW (cfg, ins, OP_TLS_GET);
4876 ins->inst_offset = monodomain_key;
4881 mono_arch_get_thread_intrinsic (MonoCompile* cfg)
4885 setup_tls_access ();
4886 if (monothread_key == -1)
4889 MONO_INST_NEW (cfg, ins, OP_TLS_GET);
4890 ins->inst_offset = monothread_key;
4895 mono_arch_context_get_int_reg (MonoContext *ctx, int reg)
4897 g_assert (reg >= 13);
4899 return (gpointer)ctx->regs [reg - 13];
4903 mono_ppc_emitted (guint8 *code, ssize_t length, const char *format, ...)
4908 va_start (args, format);
4909 name = g_strdup_vprintf (format, args);
4912 //g_print ("emitted [%s] at %p %p (length %ld)\n", name, code, code + length, length);