2 * mini-ppc.c: PowerPC backend for the Mono code generator
5 * Paolo Molaro (lupus@ximian.com)
6 * Dietmar Maurer (dietmar@ximian.com)
7 * Andreas Faerber <andreas.faerber@web.de>
9 * (C) 2003 Ximian, Inc.
10 * (C) 2007-2008 Andreas Faerber
15 #include <mono/metadata/appdomain.h>
16 #include <mono/metadata/debug-helpers.h>
20 #include "cpu-ppc64.h"
27 #include <sys/sysctl.h>
33 #define FORCE_INDIR_CALL 1
44 /* This mutex protects architecture specific caches */
45 #define mono_mini_arch_lock() EnterCriticalSection (&mini_arch_mutex)
46 #define mono_mini_arch_unlock() LeaveCriticalSection (&mini_arch_mutex)
47 static CRITICAL_SECTION mini_arch_mutex;
49 int mono_exc_esp_offset = 0;
50 static int tls_mode = TLS_MODE_DETECT;
51 static int lmf_pthread_key = -1;
52 static int monothread_key = -1;
53 static int monodomain_key = -1;
56 offsets_from_pthread_key (guint32 key, int *offset2)
60 *offset2 = idx2 * sizeof (gpointer);
61 return 284 + idx1 * sizeof (gpointer);
64 #define emit_linuxthreads_tls(code,dreg,key) do {\
66 off1 = offsets_from_pthread_key ((key), &off2); \
67 ppc_load_reg ((code), (dreg), off1, ppc_r2); \
68 ppc_load_reg ((code), (dreg), off2, (dreg)); \
71 #define emit_darwing5_tls(code,dreg,key) do {\
72 int off1 = 0x48 + key * sizeof (gpointer); \
73 ppc_mfspr ((code), (dreg), 104); \
74 ppc_load_reg ((code), (dreg), off1, (dreg)); \
77 /* FIXME: ensure the sc call preserves all but r3 */
78 #define emit_darwing4_tls(code,dreg,key) do {\
79 int off1 = 0x48 + key * sizeof (gpointer); \
80 if ((dreg) != ppc_r3) ppc_mr ((code), ppc_r11, ppc_r3); \
81 ppc_li ((code), ppc_r0, 0x7FF2); \
83 ppc_lwz ((code), (dreg), off1, ppc_r3); \
84 if ((dreg) != ppc_r3) ppc_mr ((code), ppc_r3, ppc_r11); \
87 #ifdef PPC_THREAD_PTR_REG
88 #define emit_nptl_tls(code,dreg,key) do { \
90 int off2 = key >> 15; \
91 if ((off2 == 0) || (off2 == -1)) { \
92 ppc_load_reg ((code), (dreg), off1, PPC_THREAD_PTR_REG); \
94 int off3 = (off2 + 1) > 1; \
95 ppc_addis ((code), ppc_r11, PPC_THREAD_PTR_REG, off3); \
96 ppc_load_reg ((code), (dreg), off1, ppc_r11); \
100 #define emit_nptl_tls(code,dreg,key) do { \
101 g_assert_not_reached (); \
105 #define emit_tls_access(code,dreg,key) do { \
106 switch (tls_mode) { \
107 case TLS_MODE_LTHREADS: emit_linuxthreads_tls(code,dreg,key); break; \
108 case TLS_MODE_NPTL: emit_nptl_tls(code,dreg,key); break; \
109 case TLS_MODE_DARWIN_G5: emit_darwing5_tls(code,dreg,key); break; \
110 case TLS_MODE_DARWIN_G4: emit_darwing4_tls(code,dreg,key); break; \
111 default: g_assert_not_reached (); \
115 #define MONO_EMIT_NEW_LOAD_R8(cfg,dr,addr) do { \
117 MONO_INST_NEW ((cfg), (inst), OP_R8CONST); \
118 inst->type = STACK_R8; \
120 inst->inst_p0 = (void*)(addr); \
121 mono_bblock_add_inst (cfg->cbb, inst); \
125 mono_arch_regname (int reg) {
126 static const char rnames[][4] = {
127 "r0", "sp", "r2", "r3", "r4",
128 "r5", "r6", "r7", "r8", "r9",
129 "r10", "r11", "r12", "r13", "r14",
130 "r15", "r16", "r17", "r18", "r19",
131 "r20", "r21", "r22", "r23", "r24",
132 "r25", "r26", "r27", "r28", "r29",
135 if (reg >= 0 && reg < 32)
141 mono_arch_fregname (int reg) {
142 static const char rnames[][4] = {
143 "f0", "f1", "f2", "f3", "f4",
144 "f5", "f6", "f7", "f8", "f9",
145 "f10", "f11", "f12", "f13", "f14",
146 "f15", "f16", "f17", "f18", "f19",
147 "f20", "f21", "f22", "f23", "f24",
148 "f25", "f26", "f27", "f28", "f29",
151 if (reg >= 0 && reg < 32)
156 /* this function overwrites r0, r11, r12 */
158 emit_memcpy (guint8 *code, int size, int dreg, int doffset, int sreg, int soffset)
160 /* unrolled, use the counter in big */
161 if (size > sizeof (gpointer) * 5) {
162 long shifted = size >> MONO_PPC_32_64_CASE (2, 3);
163 guint8 *copy_loop_start, *copy_loop_jump;
165 ppc_load (code, ppc_r0, shifted);
166 ppc_mtctr (code, ppc_r0);
167 g_assert (sreg == ppc_r11);
168 ppc_addi (code, ppc_r12, dreg, (doffset - sizeof (gpointer)));
169 ppc_addi (code, ppc_r11, sreg, (soffset - sizeof (gpointer)));
170 copy_loop_start = code;
171 ppc_load_reg_update (code, ppc_r0, (unsigned int)sizeof (gpointer), ppc_r11);
172 ppc_store_reg_update (code, ppc_r0, (unsigned int)sizeof (gpointer), ppc_r12);
173 copy_loop_jump = code;
174 ppc_bc (code, PPC_BR_DEC_CTR_NONZERO, 0, 0);
175 ppc_patch (copy_loop_jump, copy_loop_start);
176 size -= shifted * sizeof (gpointer);
177 doffset = soffset = 0;
180 #ifdef __mono_ppc64__
182 ppc_load_reg (code, ppc_r0, soffset, sreg);
183 ppc_store_reg (code, ppc_r0, doffset, dreg);
190 ppc_lwz (code, ppc_r0, soffset, sreg);
191 ppc_stw (code, ppc_r0, doffset, dreg);
197 ppc_lhz (code, ppc_r0, soffset, sreg);
198 ppc_sth (code, ppc_r0, doffset, dreg);
204 ppc_lbz (code, ppc_r0, soffset, sreg);
205 ppc_stb (code, ppc_r0, doffset, dreg);
214 * mono_arch_get_argument_info:
215 * @csig: a method signature
216 * @param_count: the number of parameters to consider
217 * @arg_info: an array to store the result infos
219 * Gathers information on parameters such as size, alignment and
220 * padding. arg_info should be large enought to hold param_count + 1 entries.
222 * Returns the size of the activation frame.
225 mono_arch_get_argument_info (MonoMethodSignature *csig, int param_count, MonoJitArgumentInfo *arg_info)
227 #ifdef __mono_ppc64__
231 int k, frame_size = 0;
232 int size, align, pad;
235 if (MONO_TYPE_ISSTRUCT (csig->ret)) {
236 frame_size += sizeof (gpointer);
240 arg_info [0].offset = offset;
243 frame_size += sizeof (gpointer);
247 arg_info [0].size = frame_size;
249 for (k = 0; k < param_count; k++) {
252 size = mono_type_native_stack_size (csig->params [k], (guint32*)&align);
254 size = mini_type_stack_size (NULL, csig->params [k], &align);
256 /* ignore alignment for now */
259 frame_size += pad = (align - (frame_size & (align - 1))) & (align - 1);
260 arg_info [k].pad = pad;
262 arg_info [k + 1].pad = 0;
263 arg_info [k + 1].size = size;
265 arg_info [k + 1].offset = offset;
269 align = MONO_ARCH_FRAME_ALIGNMENT;
270 frame_size += pad = (align - (frame_size & (align - 1))) & (align - 1);
271 arg_info [k].pad = pad;
277 #ifdef __mono_ppc64__
279 is_load_sequence (guint32 *seq)
281 return ppc_opcode (seq [0]) == 15 && /* lis */
282 ppc_opcode (seq [1]) == 24 && /* ori */
283 ppc_opcode (seq [2]) == 30 && /* sldi */
284 ppc_opcode (seq [3]) == 25 && /* oris */
285 ppc_opcode (seq [4]) == 24; /* ori */
288 #define ppc_load_get_dest(l) (((l)>>21) & 0x1f)
289 #define ppc_load_get_off(l) ((gint16)((l) & 0xffff))
292 /* code must point to the blrl */
294 mono_ppc_is_direct_call_sequence (guint32 *code)
296 #ifdef __mono_ppc64__
297 g_assert(*code == 0x4e800021 || *code == 0x4e800020 || *code == 0x4e800420);
299 /* the thunk-less direct call sequence: lis/ori/sldi/oris/ori/mtlr/blrl */
300 if (ppc_opcode (code [-1]) == 31) { /* mtlr */
301 if (ppc_opcode (code [-2]) == 58 && ppc_opcode (code [-3]) == 58) { /* ld/ld */
302 if (!is_load_sequence (&code [-8]))
304 /* one of the loads must be "ld r2,8(rX)" */
305 return (ppc_load_get_dest (code [-2]) == ppc_r2 && ppc_load_get_off (code [-2]) == 8) ||
306 (ppc_load_get_dest (code [-3]) == ppc_r2 && ppc_load_get_off (code [-3]) == 8);
308 if (ppc_opcode (code [-2]) == 24 && ppc_opcode (code [-3]) == 31) /* mr/nop */
309 return is_load_sequence (&code [-8]);
311 return is_load_sequence (&code [-6]);
315 g_assert(*code == 0x4e800021);
317 /* the thunk-less direct call sequence: lis/ori/mtlr/blrl */
318 return ppc_opcode (code [-1]) == 31 &&
319 ppc_opcode (code [-2]) == 24 &&
320 ppc_opcode (code [-3]) == 15;
325 mono_arch_get_vcall_slot (guint8 *code_ptr, gpointer *regs, int *displacement)
329 guint32* code = (guint32*)code_ptr;
333 /* This is the 'blrl' instruction */
336 /* Sanity check: instruction must be 'blrl' */
337 if (*code != 0x4e800021)
340 if (mono_ppc_is_direct_call_sequence (code))
343 /* FIXME: more sanity checks here */
344 /* OK, we're now at the 'blrl' instruction. Now walk backwards
345 till we get to a 'mtlr rA' */
347 if((*code & 0x7c0803a6) == 0x7c0803a6) {
349 /* Here we are: we reached the 'mtlr rA'.
350 Extract the register from the instruction */
351 reg = (*code & 0x03e00000) >> 21;
353 /* ok, this is a lwz reg, offset (vtreg)
354 * it is emitted with:
355 * ppc_emit32 (c, (32 << 26) | ((D) << 21) | ((a) << 16) | (guint16)(d))
357 soff = (*code & 0xffff);
359 reg = (*code >> 16) & 0x1f;
360 g_assert (reg != ppc_r1);
361 /*g_print ("patching reg is %d\n", reg);*/
363 MonoLMF *lmf = (MonoLMF*)((char*)regs + (14 * sizeof (double)) + (13 * sizeof (gpointer)));
364 /* saved in the MonoLMF structure */
365 o = (gpointer)lmf->iregs [reg - 13];
372 *displacement = offset;
376 #define MAX_ARCH_DELEGATE_PARAMS 7
379 mono_arch_get_delegate_invoke_impl (MonoMethodSignature *sig, gboolean has_target)
381 guint8 *code, *start;
383 /* FIXME: Support more cases */
384 if (MONO_TYPE_ISSTRUCT (sig->ret))
388 static guint8* cached = NULL;
389 int size = MONO_PPC_32_64_CASE (16, 20) + PPC_FTNPTR_SIZE;
390 mono_mini_arch_lock ();
392 mono_mini_arch_unlock ();
396 start = code = mono_global_codeman_reserve (size);
397 code = mono_ppc_create_pre_code_ftnptr (code);
399 /* Replace the this argument with the target */
400 ppc_load_reg (code, ppc_r0, G_STRUCT_OFFSET (MonoDelegate, method_ptr), ppc_r3);
401 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
402 /* it's a function descriptor */
403 ppc_ldx (code, ppc_r0, 0, ppc_r0);
405 ppc_mtctr (code, ppc_r0);
406 ppc_load_reg (code, ppc_r3, G_STRUCT_OFFSET (MonoDelegate, target), ppc_r3);
407 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
409 g_assert ((code - start) <= size);
411 mono_arch_flush_icache (start, size);
413 mono_mini_arch_unlock ();
416 static guint8* cache [MAX_ARCH_DELEGATE_PARAMS + 1] = {NULL};
419 if (sig->param_count > MAX_ARCH_DELEGATE_PARAMS)
421 for (i = 0; i < sig->param_count; ++i)
422 if (!mono_is_regsize_var (sig->params [i]))
425 mono_mini_arch_lock ();
426 code = cache [sig->param_count];
428 mono_mini_arch_unlock ();
432 size = MONO_PPC_32_64_CASE (12, 16) + sig->param_count * 4 + PPC_FTNPTR_SIZE;
433 start = code = mono_global_codeman_reserve (size);
434 code = mono_ppc_create_pre_code_ftnptr (code);
436 ppc_load_reg (code, ppc_r0, G_STRUCT_OFFSET (MonoDelegate, method_ptr), ppc_r3);
437 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
438 /* it's a function descriptor */
439 ppc_ldx (code, ppc_r0, 0, ppc_r0);
441 ppc_mtctr (code, ppc_r0);
442 /* slide down the arguments */
443 for (i = 0; i < sig->param_count; ++i) {
444 ppc_mr (code, (ppc_r3 + i), (ppc_r3 + i + 1));
446 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
448 g_assert ((code - start) <= size);
450 mono_arch_flush_icache (start, size);
451 cache [sig->param_count] = start;
452 mono_mini_arch_unlock ();
459 mono_arch_get_this_arg_from_call (MonoGenericSharingContext *gsctx, MonoMethodSignature *sig, gssize *regs, guint8 *code)
461 /* FIXME: handle returning a struct */
462 if (MONO_TYPE_ISSTRUCT (sig->ret))
463 return (gpointer)regs [ppc_r4];
464 return (gpointer)regs [ppc_r3];
468 * Initialize the cpu to execute managed code.
471 mono_arch_cpu_init (void)
476 * Initialize architecture specific code.
479 mono_arch_init (void)
481 InitializeCriticalSection (&mini_arch_mutex);
485 * Cleanup architecture specific code.
488 mono_arch_cleanup (void)
490 DeleteCriticalSection (&mini_arch_mutex);
494 * This function returns the optimizations supported on this cpu.
497 mono_arch_cpu_optimizazions (guint32 *exclude_mask)
501 /* no ppc-specific optimizations yet */
506 #ifdef __mono_ppc64__
507 #define CASE_PPC32(c)
508 #define CASE_PPC64(c) case c:
510 #define CASE_PPC32(c) case c:
511 #define CASE_PPC64(c)
515 is_regsize_var (MonoType *t) {
518 t = mini_type_get_underlying_type (NULL, t);
522 CASE_PPC64 (MONO_TYPE_I8)
523 CASE_PPC64 (MONO_TYPE_U8)
527 case MONO_TYPE_FNPTR:
529 case MONO_TYPE_OBJECT:
530 case MONO_TYPE_STRING:
531 case MONO_TYPE_CLASS:
532 case MONO_TYPE_SZARRAY:
533 case MONO_TYPE_ARRAY:
535 case MONO_TYPE_GENERICINST:
536 if (!mono_type_generic_inst_is_valuetype (t))
539 case MONO_TYPE_VALUETYPE:
546 mono_arch_get_allocatable_int_vars (MonoCompile *cfg)
551 for (i = 0; i < cfg->num_varinfo; i++) {
552 MonoInst *ins = cfg->varinfo [i];
553 MonoMethodVar *vmv = MONO_VARINFO (cfg, i);
556 if (vmv->range.first_use.abs_pos >= vmv->range.last_use.abs_pos)
559 if (ins->flags & (MONO_INST_VOLATILE|MONO_INST_INDIRECT) || (ins->opcode != OP_LOCAL && ins->opcode != OP_ARG))
562 /* we can only allocate 32 bit values */
563 if (is_regsize_var (ins->inst_vtype)) {
564 g_assert (MONO_VARINFO (cfg, i)->reg == -1);
565 g_assert (i == vmv->idx);
566 vars = mono_varlist_insert_sorted (cfg, vars, vmv, FALSE);
574 mono_arch_get_global_int_regs (MonoCompile *cfg)
578 if (cfg->frame_reg != ppc_sp)
580 /* ppc_r13 is used by the system on PPC EABI */
581 for (i = 14; i < top; ++i)
582 regs = g_list_prepend (regs, GUINT_TO_POINTER (i));
588 * mono_arch_regalloc_cost:
590 * Return the cost, in number of memory references, of the action of
591 * allocating the variable VMV into a register during global register
595 mono_arch_regalloc_cost (MonoCompile *cfg, MonoMethodVar *vmv)
607 mono_arch_flush_icache (guint8 *code, gint size)
610 guint8 *endp, *start;
611 static int cachelinesize = 0;
612 static int cachelineinc = 16;
614 if (!cachelinesize) {
619 mib [1] = HW_CACHELINE;
620 len = sizeof (cachelinesize);
621 if (sysctl(mib, 2, &cachelinesize, (size_t*)&len, NULL, 0) == -1) {
625 cachelineinc = cachelinesize;
626 /*g_print ("setting cl size to %d\n", cachelinesize);*/
628 #elif defined(__linux__)
629 /* sadly this will work only with 2.6 kernels... */
630 FILE* f = fopen ("/proc/self/auxv", "rb");
633 while (fread (&vec, sizeof (vec), 1, f) == 1) {
634 if (vec.type == 19) {
635 cachelinesize = vec.value;
643 #elif defined(G_COMPILER_CODEWARRIOR)
647 #warning Need a way to get cache line size
653 start = (guint8*)((gsize)start & ~(cachelinesize - 1));
654 /* use dcbf for smp support, later optimize for UP, see pem._64bit.d20030611.pdf page 211 */
655 #if defined(G_COMPILER_CODEWARRIOR)
657 for (p = start; p < endp; p += cachelineinc) {
661 for (p = start; p < endp; p += cachelineinc) {
667 for (p = start; p < endp; p += cachelineinc) {
679 for (p = start; p < endp; p += cachelineinc) {
680 asm ("dcbf 0,%0;" : : "r"(p) : "memory");
683 for (p = start; p < endp; p += cachelineinc) {
684 asm ("dcbst 0,%0;" : : "r"(p) : "memory");
689 for (p = start; p < endp; p += cachelineinc) {
690 asm ("icbi 0,%0; sync;" : : "r"(p) : "memory");
698 mono_arch_flush_register_windows (void)
703 #define ALWAYS_ON_STACK(s) s
704 #define FP_ALSO_IN_REG(s) s
706 #ifdef __mono_ppc64__
707 #define ALWAYS_ON_STACK(s) s
708 #define FP_ALSO_IN_REG(s) s
710 #define ALWAYS_ON_STACK(s)
711 #define FP_ALSO_IN_REG(s)
713 #define ALIGN_DOUBLES
726 guint32 vtsize; /* in param area */
728 guint8 regtype : 4; /* 0 general, 1 basereg, 2 floating point register, see RegType* */
729 guint8 size : 4; /* 1, 2, 4, 8, or regs used by RegTypeStructByVal */
730 guint8 bytes : 4; /* size in bytes - only valid for
731 RegTypeStructByVal if the struct fits
732 in one word, otherwise it's 0*/
747 add_general (guint *gr, guint *stack_size, ArgInfo *ainfo, gboolean simple)
749 #ifdef __mono_ppc64__
754 if (*gr >= 3 + PPC_NUM_REG_ARGS) {
755 ainfo->offset = PPC_STACK_PARAM_OFFSET + *stack_size;
756 ainfo->reg = ppc_sp; /* in the caller */
757 ainfo->regtype = RegTypeBase;
758 *stack_size += sizeof (gpointer);
760 ALWAYS_ON_STACK (*stack_size += sizeof (gpointer));
764 if (*gr >= 3 + PPC_NUM_REG_ARGS - 1) {
766 //*stack_size += (*stack_size % 8);
768 ainfo->offset = PPC_STACK_PARAM_OFFSET + *stack_size;
769 ainfo->reg = ppc_sp; /* in the caller */
770 ainfo->regtype = RegTypeBase;
777 ALWAYS_ON_STACK (*stack_size += 8);
785 #if defined(__APPLE__) || defined(__mono_ppc64__)
787 has_only_a_r48_field (MonoClass *klass)
791 gboolean have_field = FALSE;
793 while ((f = mono_class_get_fields (klass, &iter))) {
794 if (!(f->type->attrs & FIELD_ATTRIBUTE_STATIC)) {
797 if (!f->type->byref && (f->type->type == MONO_TYPE_R4 || f->type->type == MONO_TYPE_R8))
808 calculate_sizes (MonoMethodSignature *sig, gboolean is_pinvoke)
811 int n = sig->hasthis + sig->param_count;
813 guint32 stack_size = 0;
814 CallInfo *cinfo = g_malloc0 (sizeof (CallInfo) + sizeof (ArgInfo) * n);
816 fr = PPC_FIRST_FPARG_REG;
817 gr = PPC_FIRST_ARG_REG;
819 /* FIXME: handle returning a struct */
820 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
821 add_general (&gr, &stack_size, &cinfo->ret, TRUE);
822 cinfo->struct_ret = PPC_FIRST_ARG_REG;
827 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
830 DEBUG(printf("params: %d\n", sig->param_count));
831 for (i = 0; i < sig->param_count; ++i) {
832 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (i == sig->sentinelpos)) {
833 /* Prevent implicit arguments and sig_cookie from
834 being passed in registers */
835 gr = PPC_LAST_ARG_REG + 1;
836 /* FIXME: don't we have to set fr, too? */
837 /* Emit the signature cookie just before the implicit arguments */
838 add_general (&gr, &stack_size, &cinfo->sig_cookie, TRUE);
840 DEBUG(printf("param %d: ", i));
841 if (sig->params [i]->byref) {
842 DEBUG(printf("byref\n"));
843 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
847 simpletype = mini_type_get_underlying_type (NULL, sig->params [i])->type;
848 switch (simpletype) {
849 case MONO_TYPE_BOOLEAN:
852 cinfo->args [n].size = 1;
853 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
859 cinfo->args [n].size = 2;
860 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
865 cinfo->args [n].size = 4;
866 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
872 case MONO_TYPE_FNPTR:
873 case MONO_TYPE_CLASS:
874 case MONO_TYPE_OBJECT:
875 case MONO_TYPE_STRING:
876 case MONO_TYPE_SZARRAY:
877 case MONO_TYPE_ARRAY:
878 cinfo->args [n].size = sizeof (gpointer);
879 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
882 case MONO_TYPE_GENERICINST:
883 if (!mono_type_generic_inst_is_valuetype (sig->params [i])) {
884 cinfo->args [n].size = sizeof (gpointer);
885 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
890 case MONO_TYPE_VALUETYPE: {
893 klass = mono_class_from_mono_type (sig->params [i]);
895 size = mono_class_native_size (klass, NULL);
897 size = mono_class_value_size (klass, NULL);
898 #if defined(__APPLE__) || defined(__mono_ppc64__)
899 if ((size == 4 || size == 8) && has_only_a_r48_field (klass)) {
900 cinfo->args [n].size = size;
902 /* It was 7, now it is 8 in LinuxPPC */
903 if (fr <= PPC_LAST_FPARG_REG) {
904 cinfo->args [n].regtype = RegTypeFP;
905 cinfo->args [n].reg = fr;
907 FP_ALSO_IN_REG (gr ++);
909 FP_ALSO_IN_REG (gr ++);
910 ALWAYS_ON_STACK (stack_size += size);
912 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size;
913 cinfo->args [n].regtype = RegTypeBase;
914 cinfo->args [n].reg = ppc_sp; /* in the caller*/
921 DEBUG(printf ("load %d bytes struct\n",
922 mono_class_native_size (sig->params [i]->data.klass, NULL)));
923 #if PPC_PASS_STRUCTS_BY_VALUE
925 int align_size = size;
927 int rest = PPC_LAST_ARG_REG - gr + 1;
929 align_size += (sizeof (gpointer) - 1);
930 align_size &= ~(sizeof (gpointer) - 1);
931 nwords = (align_size + sizeof (gpointer) -1 ) / sizeof (gpointer);
932 n_in_regs = MIN (rest, nwords);
933 cinfo->args [n].regtype = RegTypeStructByVal;
934 if (gr > PPC_LAST_ARG_REG
936 /* FIXME: check this */
937 || (size >= 3 && size % 4 != 0)
940 cinfo->args [n].size = 0;
941 cinfo->args [n].vtsize = nwords;
943 cinfo->args [n].size = n_in_regs;
944 cinfo->args [n].vtsize = nwords - n_in_regs;
945 cinfo->args [n].reg = gr;
947 #ifdef __mono_ppc64__
948 if (nwords == 1 && is_pinvoke)
949 cinfo->args [n].bytes = size;
952 cinfo->args [n].bytes = 0;
954 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size;
955 /*g_print ("offset for arg %d at %d\n", n, PPC_STACK_PARAM_OFFSET + stack_size);*/
956 stack_size += nwords * sizeof (gpointer);
959 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
960 cinfo->args [n].regtype = RegTypeStructByAddr;
961 cinfo->args [n].vtsize = size;
966 case MONO_TYPE_TYPEDBYREF: {
967 int size = sizeof (MonoTypedRef);
968 /* keep in sync or merge with the valuetype case */
969 #if PPC_PASS_STRUCTS_BY_VALUE
971 int nwords = (size + sizeof (gpointer) -1 ) / sizeof (gpointer);
972 cinfo->args [n].regtype = RegTypeStructByVal;
973 if (gr <= PPC_LAST_ARG_REG) {
974 int rest = PPC_LAST_ARG_REG - gr + 1;
975 int n_in_regs = rest >= nwords? nwords: rest;
976 cinfo->args [n].size = n_in_regs;
977 cinfo->args [n].vtsize = nwords - n_in_regs;
978 cinfo->args [n].reg = gr;
981 cinfo->args [n].size = 0;
982 cinfo->args [n].vtsize = nwords;
984 #ifdef __mono_ppc64__
985 if (nwords == 1 && is_pinvoke)
986 cinfo->args [n].bytes = size;
989 cinfo->args [n].bytes = 0;
990 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size;
991 /*g_print ("offset for arg %d at %d\n", n, PPC_STACK_PARAM_OFFSET + stack_size);*/
992 stack_size += nwords * sizeof (gpointer);
995 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
996 cinfo->args [n].regtype = RegTypeStructByAddr;
997 cinfo->args [n].vtsize = size;
1004 cinfo->args [n].size = 8;
1005 add_general (&gr, &stack_size, cinfo->args + n, sizeof (gpointer) == 8);
1009 cinfo->args [n].size = 4;
1011 /* It was 7, now it is 8 in LinuxPPC */
1012 if (fr <= PPC_LAST_FPARG_REG) {
1013 cinfo->args [n].regtype = RegTypeFP;
1014 cinfo->args [n].reg = fr;
1016 FP_ALSO_IN_REG (gr ++);
1017 ALWAYS_ON_STACK (stack_size += sizeof (gpointer));
1019 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size + MONO_PPC_32_64_CASE (0, 4);
1020 cinfo->args [n].regtype = RegTypeBase;
1021 cinfo->args [n].reg = ppc_sp; /* in the caller*/
1022 stack_size += sizeof (gpointer);
1027 cinfo->args [n].size = 8;
1028 /* It was 7, now it is 8 in LinuxPPC */
1029 if (fr <= PPC_LAST_FPARG_REG) {
1030 cinfo->args [n].regtype = RegTypeFP;
1031 cinfo->args [n].reg = fr;
1033 FP_ALSO_IN_REG (gr += sizeof (double) / sizeof (gpointer));
1034 ALWAYS_ON_STACK (stack_size += 8);
1036 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size;
1037 cinfo->args [n].regtype = RegTypeBase;
1038 cinfo->args [n].reg = ppc_sp; /* in the caller*/
1044 g_error ("Can't trampoline 0x%x", sig->params [i]->type);
1048 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (i == sig->sentinelpos)) {
1049 /* Prevent implicit arguments and sig_cookie from
1050 being passed in registers */
1051 gr = PPC_LAST_ARG_REG + 1;
1052 /* Emit the signature cookie just before the implicit arguments */
1053 add_general (&gr, &stack_size, &cinfo->sig_cookie, TRUE);
1057 simpletype = mini_type_get_underlying_type (NULL, sig->ret)->type;
1058 switch (simpletype) {
1059 case MONO_TYPE_BOOLEAN:
1064 case MONO_TYPE_CHAR:
1070 case MONO_TYPE_FNPTR:
1071 case MONO_TYPE_CLASS:
1072 case MONO_TYPE_OBJECT:
1073 case MONO_TYPE_SZARRAY:
1074 case MONO_TYPE_ARRAY:
1075 case MONO_TYPE_STRING:
1076 cinfo->ret.reg = ppc_r3;
1080 cinfo->ret.reg = ppc_r3;
1084 cinfo->ret.reg = ppc_f1;
1085 cinfo->ret.regtype = RegTypeFP;
1087 case MONO_TYPE_GENERICINST:
1088 if (!mono_type_generic_inst_is_valuetype (sig->ret)) {
1089 cinfo->ret.reg = ppc_r3;
1093 case MONO_TYPE_VALUETYPE:
1095 case MONO_TYPE_TYPEDBYREF:
1096 case MONO_TYPE_VOID:
1099 g_error ("Can't handle as return value 0x%x", sig->ret->type);
1103 /* align stack size to 16 */
1104 DEBUG (printf (" stack size: %d (%d)\n", (stack_size + 15) & ~15, stack_size));
1105 stack_size = (stack_size + 15) & ~15;
1107 cinfo->stack_usage = stack_size;
1112 allocate_tailcall_valuetype_addrs (MonoCompile *cfg)
1114 #if !PPC_PASS_STRUCTS_BY_VALUE
1115 MonoMethodSignature *sig = mono_method_signature (cfg->method);
1116 int num_structs = 0;
1119 if (!(cfg->flags & MONO_CFG_HAS_TAIL))
1122 for (i = 0; i < sig->param_count; ++i) {
1123 MonoType *type = mono_type_get_underlying_type (sig->params [i]);
1124 if (type->type == MONO_TYPE_VALUETYPE)
1129 cfg->tailcall_valuetype_addrs =
1130 mono_mempool_alloc0 (cfg->mempool, sizeof (MonoInst*) * num_structs);
1131 for (i = 0; i < num_structs; ++i) {
1132 cfg->tailcall_valuetype_addrs [i] =
1133 mono_compile_create_var (cfg, &mono_defaults.int_class->byval_arg, OP_LOCAL);
1134 cfg->tailcall_valuetype_addrs [i]->flags |= MONO_INST_INDIRECT;
1141 * Set var information according to the calling convention. ppc version.
1142 * The locals var stuff should most likely be split in another method.
1145 mono_arch_allocate_vars (MonoCompile *m)
1147 MonoMethodSignature *sig;
1148 MonoMethodHeader *header;
1150 int i, offset, size, align, curinst;
1151 int frame_reg = ppc_sp;
1153 guint32 locals_stack_size, locals_stack_align;
1155 allocate_tailcall_valuetype_addrs (m);
1157 m->flags |= MONO_CFG_HAS_SPILLUP;
1159 /* allow room for the vararg method args: void* and long/double */
1160 if (mono_jit_trace_calls != NULL && mono_trace_eval (m->method))
1161 m->param_area = MAX (m->param_area, sizeof (gpointer)*8);
1162 /* this is bug #60332: remove when #59509 is fixed, so no weird vararg
1163 * call convs needs to be handled this way.
1165 if (m->flags & MONO_CFG_HAS_VARARGS)
1166 m->param_area = MAX (m->param_area, sizeof (gpointer)*8);
1167 /* gtk-sharp and other broken code will dllimport vararg functions even with
1168 * non-varargs signatures. Since there is little hope people will get this right
1169 * we assume they won't.
1171 if (m->method->wrapper_type == MONO_WRAPPER_MANAGED_TO_NATIVE)
1172 m->param_area = MAX (m->param_area, sizeof (gpointer)*8);
1174 header = mono_method_get_header (m->method);
1177 * We use the frame register also for any method that has
1178 * exception clauses. This way, when the handlers are called,
1179 * the code will reference local variables using the frame reg instead of
1180 * the stack pointer: if we had to restore the stack pointer, we'd
1181 * corrupt the method frames that are already on the stack (since
1182 * filters get called before stack unwinding happens) when the filter
1183 * code would call any method (this also applies to finally etc.).
1185 if ((m->flags & MONO_CFG_HAS_ALLOCA) || header->num_clauses)
1186 frame_reg = ppc_r31;
1187 m->frame_reg = frame_reg;
1188 if (frame_reg != ppc_sp) {
1189 m->used_int_regs |= 1 << frame_reg;
1192 sig = mono_method_signature (m->method);
1196 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
1197 m->ret->opcode = OP_REGVAR;
1198 m->ret->inst_c0 = m->ret->dreg = ppc_r3;
1200 /* FIXME: handle long values? */
1201 switch (mini_type_get_underlying_type (m->generic_sharing_context, sig->ret)->type) {
1202 case MONO_TYPE_VOID:
1206 m->ret->opcode = OP_REGVAR;
1207 m->ret->inst_c0 = m->ret->dreg = ppc_f1;
1210 m->ret->opcode = OP_REGVAR;
1211 m->ret->inst_c0 = m->ret->dreg = ppc_r3;
1215 /* local vars are at a positive offset from the stack pointer */
1217 * also note that if the function uses alloca, we use ppc_r31
1218 * to point at the local variables.
1220 offset = PPC_MINIMAL_STACK_SIZE; /* linkage area */
1221 /* align the offset to 16 bytes: not sure this is needed here */
1223 //offset &= ~(16 - 1);
1225 /* add parameter area size for called functions */
1226 offset += m->param_area;
1228 offset &= ~(16 - 1);
1230 /* allow room to save the return value */
1231 if (mono_jit_trace_calls != NULL && mono_trace_eval (m->method))
1234 /* the MonoLMF structure is stored just below the stack pointer */
1237 /* this stuff should not be needed on ppc and the new jit,
1238 * because a call on ppc to the handlers doesn't change the
1239 * stack pointer and the jist doesn't manipulate the stack pointer
1240 * for operations involving valuetypes.
1242 /* reserve space to store the esp */
1243 offset += sizeof (gpointer);
1245 /* this is a global constant */
1246 mono_exc_esp_offset = offset;
1249 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
1250 offset += sizeof(gpointer) - 1;
1251 offset &= ~(sizeof(gpointer) - 1);
1253 m->vret_addr->opcode = OP_REGOFFSET;
1254 m->vret_addr->inst_basereg = frame_reg;
1255 m->vret_addr->inst_offset = offset;
1257 if (G_UNLIKELY (m->verbose_level > 1)) {
1258 printf ("vret_addr =");
1259 mono_print_ins (m->vret_addr);
1262 offset += sizeof(gpointer);
1265 offsets = mono_allocate_stack_slots_full (m, FALSE, &locals_stack_size, &locals_stack_align);
1266 if (locals_stack_align) {
1267 offset += (locals_stack_align - 1);
1268 offset &= ~(locals_stack_align - 1);
1270 for (i = m->locals_start; i < m->num_varinfo; i++) {
1271 if (offsets [i] != -1) {
1272 MonoInst *inst = m->varinfo [i];
1273 inst->opcode = OP_REGOFFSET;
1274 inst->inst_basereg = frame_reg;
1275 inst->inst_offset = offset + offsets [i];
1277 g_print ("allocating local %d (%s) to %d\n",
1278 i, mono_type_get_name (inst->inst_vtype), inst->inst_offset);
1282 offset += locals_stack_size;
1286 inst = m->args [curinst];
1287 if (inst->opcode != OP_REGVAR) {
1288 inst->opcode = OP_REGOFFSET;
1289 inst->inst_basereg = frame_reg;
1290 offset += sizeof (gpointer) - 1;
1291 offset &= ~(sizeof (gpointer) - 1);
1292 inst->inst_offset = offset;
1293 offset += sizeof (gpointer);
1298 for (i = 0; i < sig->param_count; ++i) {
1299 inst = m->args [curinst];
1300 if (inst->opcode != OP_REGVAR) {
1301 inst->opcode = OP_REGOFFSET;
1302 inst->inst_basereg = frame_reg;
1304 size = mono_type_native_stack_size (sig->params [i], (guint32*)&align);
1305 inst->backend.is_pinvoke = 1;
1307 size = mono_type_size (sig->params [i], &align);
1309 if (MONO_TYPE_ISSTRUCT (sig->params [i]) && size < sizeof (gpointer))
1310 size = align = sizeof (gpointer);
1311 offset += align - 1;
1312 offset &= ~(align - 1);
1313 inst->inst_offset = offset;
1319 /* some storage for fp conversions */
1322 m->arch.fp_conv_var_offset = offset;
1325 /* align the offset to 16 bytes */
1327 offset &= ~(16 - 1);
1330 m->stack_offset = offset;
1332 if (sig->call_convention == MONO_CALL_VARARG) {
1333 CallInfo *cinfo = calculate_sizes (m->method->signature, m->method->signature->pinvoke);
1335 m->sig_cookie = cinfo->sig_cookie.offset;
1342 mono_arch_create_vars (MonoCompile *cfg)
1344 MonoMethodSignature *sig = mono_method_signature (cfg->method);
1346 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
1347 cfg->vret_addr = mono_compile_create_var (cfg, &mono_defaults.int_class->byval_arg, OP_ARG);
1351 /* Fixme: we need an alignment solution for enter_method and mono_arch_call_opcode,
1352 * currently alignment in mono_arch_call_opcode is computed without arch_get_argument_info
1356 emit_sig_cookie (MonoCompile *cfg, MonoCallInst *call, CallInfo *cinfo)
1358 int sig_reg = mono_alloc_ireg (cfg);
1360 MONO_EMIT_NEW_ICONST (cfg, sig_reg, (gulong)call->signature);
1361 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG,
1362 ppc_r1, cinfo->sig_cookie.offset, sig_reg);
1366 mono_arch_emit_call (MonoCompile *cfg, MonoCallInst *call)
1369 MonoMethodSignature *sig;
1373 sig = call->signature;
1374 n = sig->param_count + sig->hasthis;
1376 cinfo = calculate_sizes (sig, sig->pinvoke);
1378 for (i = 0; i < n; ++i) {
1379 ArgInfo *ainfo = cinfo->args + i;
1382 if (i >= sig->hasthis)
1383 t = sig->params [i - sig->hasthis];
1385 t = &mono_defaults.int_class->byval_arg;
1386 t = mini_type_get_underlying_type (cfg->generic_sharing_context, t);
1388 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (i == sig->sentinelpos))
1389 emit_sig_cookie (cfg, call, cinfo);
1391 in = call->args [i];
1393 if (ainfo->regtype == RegTypeGeneral) {
1394 #ifndef __mono_ppc64__
1395 if (!t->byref && ((t->type == MONO_TYPE_I8) || (t->type == MONO_TYPE_U8))) {
1396 MONO_INST_NEW (cfg, ins, OP_MOVE);
1397 ins->dreg = mono_alloc_ireg (cfg);
1398 ins->sreg1 = in->dreg + 1;
1399 MONO_ADD_INS (cfg->cbb, ins);
1400 mono_call_inst_add_outarg_reg (cfg, call, ins->dreg, ainfo->reg + 1, FALSE);
1402 MONO_INST_NEW (cfg, ins, OP_MOVE);
1403 ins->dreg = mono_alloc_ireg (cfg);
1404 ins->sreg1 = in->dreg + 2;
1405 MONO_ADD_INS (cfg->cbb, ins);
1406 mono_call_inst_add_outarg_reg (cfg, call, ins->dreg, ainfo->reg, FALSE);
1410 MONO_INST_NEW (cfg, ins, OP_MOVE);
1411 ins->dreg = mono_alloc_ireg (cfg);
1412 ins->sreg1 = in->dreg;
1413 MONO_ADD_INS (cfg->cbb, ins);
1415 mono_call_inst_add_outarg_reg (cfg, call, ins->dreg, ainfo->reg, FALSE);
1417 } else if (ainfo->regtype == RegTypeStructByAddr) {
1418 MONO_INST_NEW (cfg, ins, OP_OUTARG_VT);
1419 ins->opcode = OP_OUTARG_VT;
1420 ins->sreg1 = in->dreg;
1421 ins->klass = in->klass;
1422 ins->inst_p0 = call;
1423 ins->inst_p1 = mono_mempool_alloc (cfg->mempool, sizeof (ArgInfo));
1424 memcpy (ins->inst_p1, ainfo, sizeof (ArgInfo));
1425 MONO_ADD_INS (cfg->cbb, ins);
1426 } else if (ainfo->regtype == RegTypeStructByVal) {
1427 /* this is further handled in mono_arch_emit_outarg_vt () */
1428 MONO_INST_NEW (cfg, ins, OP_OUTARG_VT);
1429 ins->opcode = OP_OUTARG_VT;
1430 ins->sreg1 = in->dreg;
1431 ins->klass = in->klass;
1432 ins->inst_p0 = call;
1433 ins->inst_p1 = mono_mempool_alloc (cfg->mempool, sizeof (ArgInfo));
1434 memcpy (ins->inst_p1, ainfo, sizeof (ArgInfo));
1435 MONO_ADD_INS (cfg->cbb, ins);
1436 } else if (ainfo->regtype == RegTypeBase) {
1437 if (!t->byref && ((t->type == MONO_TYPE_I8) || (t->type == MONO_TYPE_U8))) {
1438 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI8_MEMBASE_REG, ppc_r1, ainfo->offset, in->dreg);
1439 } else if (!t->byref && ((t->type == MONO_TYPE_R4) || (t->type == MONO_TYPE_R8))) {
1440 if (t->type == MONO_TYPE_R8)
1441 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORER8_MEMBASE_REG, ppc_r1, ainfo->offset, in->dreg);
1443 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORER4_MEMBASE_REG, ppc_r1, ainfo->offset, in->dreg);
1445 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG, ppc_r1, ainfo->offset, in->dreg);
1447 } else if (ainfo->regtype == RegTypeFP) {
1448 if (t->type == MONO_TYPE_VALUETYPE) {
1449 /* this is further handled in mono_arch_emit_outarg_vt () */
1450 MONO_INST_NEW (cfg, ins, OP_OUTARG_VT);
1451 ins->opcode = OP_OUTARG_VT;
1452 ins->sreg1 = in->dreg;
1453 ins->klass = in->klass;
1454 ins->inst_p0 = call;
1455 ins->inst_p1 = mono_mempool_alloc (cfg->mempool, sizeof (ArgInfo));
1456 memcpy (ins->inst_p1, ainfo, sizeof (ArgInfo));
1457 MONO_ADD_INS (cfg->cbb, ins);
1459 cfg->flags |= MONO_CFG_HAS_FPOUT;
1461 int dreg = mono_alloc_freg (cfg);
1463 if (ainfo->size == 4) {
1464 MONO_EMIT_NEW_UNALU (cfg, OP_FCONV_TO_R4, dreg, in->dreg);
1466 MONO_INST_NEW (cfg, ins, OP_FMOVE);
1468 ins->sreg1 = in->dreg;
1469 MONO_ADD_INS (cfg->cbb, ins);
1472 mono_call_inst_add_outarg_reg (cfg, call, dreg, ainfo->reg, TRUE);
1473 cfg->flags |= MONO_CFG_HAS_FPOUT;
1476 g_assert_not_reached ();
1480 /* Emit the signature cookie in the case that there is no
1481 additional argument */
1482 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (n == sig->sentinelpos))
1483 emit_sig_cookie (cfg, call, cinfo);
1485 if (cinfo->struct_ret) {
1488 MONO_INST_NEW (cfg, vtarg, OP_MOVE);
1489 vtarg->sreg1 = call->vret_var->dreg;
1490 vtarg->dreg = mono_alloc_preg (cfg);
1491 MONO_ADD_INS (cfg->cbb, vtarg);
1493 mono_call_inst_add_outarg_reg (cfg, call, vtarg->dreg, cinfo->struct_ret, FALSE);
1496 call->stack_usage = cinfo->stack_usage;
1497 cfg->param_area = MAX (PPC_MINIMAL_PARAM_AREA_SIZE, MAX (cfg->param_area, cinfo->stack_usage));
1498 cfg->flags |= MONO_CFG_HAS_CALLS;
1504 mono_arch_emit_outarg_vt (MonoCompile *cfg, MonoInst *ins, MonoInst *src)
1506 MonoCallInst *call = (MonoCallInst*)ins->inst_p0;
1507 ArgInfo *ainfo = ins->inst_p1;
1508 int ovf_size = ainfo->vtsize;
1509 int doffset = ainfo->offset;
1510 int i, soffset, dreg;
1512 if (ainfo->regtype == RegTypeStructByVal) {
1519 * Darwin pinvokes needs some special handling for 1
1520 * and 2 byte arguments
1522 g_assert (ins->klass);
1523 if (call->signature->pinvoke)
1524 size = mono_class_native_size (ins->klass, NULL);
1525 if (size == 2 || size == 1) {
1526 int tmpr = mono_alloc_ireg (cfg);
1528 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADI1_MEMBASE, tmpr, src->dreg, soffset);
1530 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADI2_MEMBASE, tmpr, src->dreg, soffset);
1531 dreg = mono_alloc_ireg (cfg);
1532 MONO_EMIT_NEW_UNALU (cfg, OP_MOVE, dreg, tmpr);
1533 mono_call_inst_add_outarg_reg (cfg, call, dreg, ainfo->reg, FALSE);
1536 for (i = 0; i < ainfo->size; ++i) {
1537 int antipadding = 0;
1540 antipadding = sizeof (gpointer) - ainfo->bytes;
1542 dreg = mono_alloc_ireg (cfg);
1543 MONO_EMIT_NEW_LOAD_MEMBASE (cfg, dreg, src->dreg, soffset);
1545 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHR_UN_IMM, dreg, dreg, antipadding * 8);
1546 mono_call_inst_add_outarg_reg (cfg, call, dreg, ainfo->reg + i, FALSE);
1547 soffset += sizeof (gpointer);
1550 mini_emit_memcpy (cfg, ppc_r1, doffset + soffset, src->dreg, soffset, ovf_size * sizeof (gpointer), 0);
1551 } else if (ainfo->regtype == RegTypeFP) {
1552 int tmpr = mono_alloc_freg (cfg);
1553 if (ainfo->size == 4)
1554 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADR4_MEMBASE, tmpr, src->dreg, 0);
1556 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADR8_MEMBASE, tmpr, src->dreg, 0);
1557 dreg = mono_alloc_freg (cfg);
1558 MONO_EMIT_NEW_UNALU (cfg, OP_FMOVE, dreg, tmpr);
1559 mono_call_inst_add_outarg_reg (cfg, call, dreg, ainfo->reg, TRUE);
1561 MonoInst *vtcopy = mono_compile_create_var (cfg, &src->klass->byval_arg, OP_LOCAL);
1565 /* FIXME: alignment? */
1566 if (call->signature->pinvoke) {
1567 size = mono_type_native_stack_size (&src->klass->byval_arg, NULL);
1568 vtcopy->backend.is_pinvoke = 1;
1570 size = mini_type_stack_size (cfg->generic_sharing_context, &src->klass->byval_arg, NULL);
1573 g_assert (ovf_size > 0);
1575 EMIT_NEW_VARLOADA (cfg, load, vtcopy, vtcopy->inst_vtype);
1576 mini_emit_memcpy (cfg, load->dreg, 0, src->dreg, 0, size, 0);
1579 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG, ppc_r1, ainfo->offset, load->dreg);
1581 mono_call_inst_add_outarg_reg (cfg, call, load->dreg, ainfo->reg, FALSE);
1586 mono_arch_emit_setret (MonoCompile *cfg, MonoMethod *method, MonoInst *val)
1588 MonoType *ret = mini_type_get_underlying_type (cfg->generic_sharing_context,
1589 mono_method_signature (method)->ret);
1592 #ifndef __mono_ppc64__
1593 if (ret->type == MONO_TYPE_I8 || ret->type == MONO_TYPE_U8) {
1596 MONO_INST_NEW (cfg, ins, OP_SETLRET);
1597 ins->sreg1 = val->dreg + 1;
1598 ins->sreg2 = val->dreg + 2;
1599 MONO_ADD_INS (cfg->cbb, ins);
1603 if (ret->type == MONO_TYPE_R8 || ret->type == MONO_TYPE_R4) {
1604 MONO_EMIT_NEW_UNALU (cfg, OP_FMOVE, cfg->ret->dreg, val->dreg);
1608 MONO_EMIT_NEW_UNALU (cfg, OP_MOVE, cfg->ret->dreg, val->dreg);
1611 /* FIXME: this is just a useless hint: fix the interface to include the opcode */
1613 mono_arch_is_inst_imm (gint64 imm)
1619 * Allow tracing to work with this interface (with an optional argument)
1623 mono_arch_instrument_prolog (MonoCompile *cfg, void *func, void *p, gboolean enable_arguments)
1627 ppc_load (code, ppc_r3, cfg->method);
1628 ppc_li (code, ppc_r4, 0); /* NULL ebp for now */
1629 ppc_load_func (code, ppc_r0, func);
1630 ppc_mtlr (code, ppc_r0);
1644 mono_arch_instrument_epilog (MonoCompile *cfg, void *func, void *p, gboolean enable_arguments)
1647 int save_mode = SAVE_NONE;
1649 MonoMethod *method = cfg->method;
1650 int rtype = mini_type_get_underlying_type (cfg->generic_sharing_context,
1651 mono_method_signature (method)->ret)->type;
1652 int save_offset = PPC_STACK_PARAM_OFFSET + cfg->param_area;
1656 offset = code - cfg->native_code;
1657 /* we need about 16 instructions */
1658 if (offset > (cfg->code_size - 16 * 4)) {
1659 cfg->code_size *= 2;
1660 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
1661 code = cfg->native_code + offset;
1665 case MONO_TYPE_VOID:
1666 /* special case string .ctor icall */
1667 if (strcmp (".ctor", method->name) && method->klass == mono_defaults.string_class)
1668 save_mode = SAVE_ONE;
1670 save_mode = SAVE_NONE;
1672 #ifndef __mono_ppc64__
1675 save_mode = SAVE_TWO;
1680 save_mode = SAVE_FP;
1682 case MONO_TYPE_VALUETYPE:
1683 save_mode = SAVE_STRUCT;
1686 save_mode = SAVE_ONE;
1690 switch (save_mode) {
1692 ppc_stw (code, ppc_r3, save_offset, cfg->frame_reg);
1693 ppc_stw (code, ppc_r4, save_offset + 4, cfg->frame_reg);
1694 if (enable_arguments) {
1695 ppc_mr (code, ppc_r5, ppc_r4);
1696 ppc_mr (code, ppc_r4, ppc_r3);
1700 ppc_store_reg (code, ppc_r3, save_offset, cfg->frame_reg);
1701 if (enable_arguments) {
1702 ppc_mr (code, ppc_r4, ppc_r3);
1706 ppc_stfd (code, ppc_f1, save_offset, cfg->frame_reg);
1707 if (enable_arguments) {
1708 /* FIXME: what reg? */
1709 ppc_fmr (code, ppc_f3, ppc_f1);
1710 /* FIXME: use 8 byte load on PPC64 */
1711 ppc_lwz (code, ppc_r4, save_offset, cfg->frame_reg);
1712 ppc_lwz (code, ppc_r5, save_offset + 4, cfg->frame_reg);
1716 if (enable_arguments) {
1717 /* FIXME: get the actual address */
1718 ppc_mr (code, ppc_r4, ppc_r3);
1726 ppc_load (code, ppc_r3, cfg->method);
1727 ppc_load_func (code, ppc_r0, func);
1728 ppc_mtlr (code, ppc_r0);
1731 switch (save_mode) {
1733 ppc_lwz (code, ppc_r3, save_offset, cfg->frame_reg);
1734 ppc_lwz (code, ppc_r4, save_offset + 4, cfg->frame_reg);
1737 ppc_load_reg (code, ppc_r3, save_offset, cfg->frame_reg);
1740 ppc_lfd (code, ppc_f1, save_offset, cfg->frame_reg);
1750 * Conditional branches have a small offset, so if it is likely overflowed,
1751 * we do a branch to the end of the method (uncond branches have much larger
1752 * offsets) where we perform the conditional and jump back unconditionally.
1753 * It's slightly slower, since we add two uncond branches, but it's very simple
1754 * with the current patch implementation and such large methods are likely not
1755 * going to be perf critical anyway.
1760 const char *exception;
1767 #define EMIT_COND_BRANCH_FLAGS(ins,b0,b1) \
1768 if (ins->flags & MONO_INST_BRLABEL) { \
1769 if (0 && ins->inst_i0->inst_c0) { \
1770 ppc_bc (code, (b0), (b1), (code - cfg->native_code + ins->inst_i0->inst_c0) & 0xffff); \
1772 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_LABEL, ins->inst_i0); \
1773 ppc_bc (code, (b0), (b1), 0); \
1776 if (0 && ins->inst_true_bb->native_offset) { \
1777 ppc_bc (code, (b0), (b1), (code - cfg->native_code + ins->inst_true_bb->native_offset) & 0xffff); \
1779 int br_disp = ins->inst_true_bb->max_offset - offset; \
1780 if (!ppc_is_imm16 (br_disp + 1024) || ! ppc_is_imm16 (ppc_is_imm16 (br_disp - 1024))) { \
1781 MonoOvfJump *ovfj = mono_mempool_alloc (cfg->mempool, sizeof (MonoOvfJump)); \
1782 ovfj->data.bb = ins->inst_true_bb; \
1783 ovfj->ip_offset = 0; \
1784 ovfj->b0_cond = (b0); \
1785 ovfj->b1_cond = (b1); \
1786 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_BB_OVF, ovfj); \
1789 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_BB, ins->inst_true_bb); \
1790 ppc_bc (code, (b0), (b1), 0); \
1795 #define EMIT_COND_BRANCH(ins,cond) EMIT_COND_BRANCH_FLAGS(ins, branch_b0_table [(cond)], branch_b1_table [(cond)])
1797 /* emit an exception if condition is fail
1799 * We assign the extra code used to throw the implicit exceptions
1800 * to cfg->bb_exit as far as the big branch handling is concerned
1802 #define EMIT_COND_SYSTEM_EXCEPTION_FLAGS(b0,b1,exc_name) \
1804 int br_disp = cfg->bb_exit->max_offset - offset; \
1805 if (!ppc_is_imm16 (br_disp + 1024) || ! ppc_is_imm16 (ppc_is_imm16 (br_disp - 1024))) { \
1806 MonoOvfJump *ovfj = mono_mempool_alloc (cfg->mempool, sizeof (MonoOvfJump)); \
1807 ovfj->data.exception = (exc_name); \
1808 ovfj->ip_offset = code - cfg->native_code; \
1809 ovfj->b0_cond = (b0); \
1810 ovfj->b1_cond = (b1); \
1811 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_EXC_OVF, ovfj); \
1813 cfg->bb_exit->max_offset += 24; \
1815 mono_add_patch_info (cfg, code - cfg->native_code, \
1816 MONO_PATCH_INFO_EXC, exc_name); \
1817 ppc_bcl (code, (b0), (b1), 0); \
1821 #define EMIT_COND_SYSTEM_EXCEPTION(cond,exc_name) EMIT_COND_SYSTEM_EXCEPTION_FLAGS(branch_b0_table [(cond)], branch_b1_table [(cond)], (exc_name))
1824 mono_arch_peephole_pass_1 (MonoCompile *cfg, MonoBasicBlock *bb)
1829 normalize_opcode (int opcode)
1832 case MONO_PPC_32_64_CASE (OP_LOADI4_MEMBASE, OP_LOADI8_MEMBASE):
1833 return OP_LOAD_MEMBASE;
1834 case MONO_PPC_32_64_CASE (OP_LOADI4_MEMINDEX, OP_LOADI8_MEMINDEX):
1835 return OP_LOAD_MEMINDEX;
1836 case MONO_PPC_32_64_CASE (OP_STOREI4_MEMBASE_REG, OP_STOREI8_MEMBASE_REG):
1837 return OP_STORE_MEMBASE_REG;
1838 case MONO_PPC_32_64_CASE (OP_STOREI4_MEMBASE_IMM, OP_STOREI8_MEMBASE_IMM):
1839 return OP_STORE_MEMBASE_IMM;
1840 case MONO_PPC_32_64_CASE (OP_STOREI4_MEMINDEX, OP_STOREI8_MEMINDEX):
1841 return OP_STORE_MEMINDEX;
1842 case MONO_PPC_32_64_CASE (OP_ISHR_IMM, OP_LSHR_IMM):
1844 case MONO_PPC_32_64_CASE (OP_ISHR_UN_IMM, OP_LSHR_UN_IMM):
1845 return OP_SHR_UN_IMM;
1852 mono_arch_peephole_pass_2 (MonoCompile *cfg, MonoBasicBlock *bb)
1854 MonoInst *ins, *n, *last_ins = NULL;
1856 MONO_BB_FOR_EACH_INS_SAFE (bb, n, ins) {
1857 switch (normalize_opcode (ins->opcode)) {
1859 /* remove unnecessary multiplication with 1 */
1860 if (ins->inst_imm == 1) {
1861 if (ins->dreg != ins->sreg1) {
1862 ins->opcode = OP_MOVE;
1864 MONO_DELETE_INS (bb, ins);
1868 int power2 = mono_is_power_of_two (ins->inst_imm);
1870 ins->opcode = OP_SHL_IMM;
1871 ins->inst_imm = power2;
1875 case OP_LOAD_MEMBASE:
1877 * OP_STORE_MEMBASE_REG reg, offset(basereg)
1878 * OP_LOAD_MEMBASE offset(basereg), reg
1880 if (last_ins && normalize_opcode (last_ins->opcode) == OP_STORE_MEMBASE_REG &&
1881 ins->inst_basereg == last_ins->inst_destbasereg &&
1882 ins->inst_offset == last_ins->inst_offset) {
1883 if (ins->dreg == last_ins->sreg1) {
1884 MONO_DELETE_INS (bb, ins);
1887 //static int c = 0; printf ("MATCHX %s %d\n", cfg->method->name,c++);
1888 ins->opcode = OP_MOVE;
1889 ins->sreg1 = last_ins->sreg1;
1893 * Note: reg1 must be different from the basereg in the second load
1894 * OP_LOAD_MEMBASE offset(basereg), reg1
1895 * OP_LOAD_MEMBASE offset(basereg), reg2
1897 * OP_LOAD_MEMBASE offset(basereg), reg1
1898 * OP_MOVE reg1, reg2
1900 } else if (last_ins && normalize_opcode (last_ins->opcode) == OP_LOAD_MEMBASE &&
1901 ins->inst_basereg != last_ins->dreg &&
1902 ins->inst_basereg == last_ins->inst_basereg &&
1903 ins->inst_offset == last_ins->inst_offset) {
1905 if (ins->dreg == last_ins->dreg) {
1906 MONO_DELETE_INS (bb, ins);
1909 ins->opcode = OP_MOVE;
1910 ins->sreg1 = last_ins->dreg;
1913 //g_assert_not_reached ();
1917 * OP_STORE_MEMBASE_IMM imm, offset(basereg)
1918 * OP_LOAD_MEMBASE offset(basereg), reg
1920 * OP_STORE_MEMBASE_IMM imm, offset(basereg)
1921 * OP_ICONST reg, imm
1923 } else if (last_ins && normalize_opcode (last_ins->opcode) == OP_STORE_MEMBASE_IMM &&
1924 ins->inst_basereg == last_ins->inst_destbasereg &&
1925 ins->inst_offset == last_ins->inst_offset) {
1926 //static int c = 0; printf ("MATCHX %s %d\n", cfg->method->name,c++);
1927 ins->opcode = OP_ICONST;
1928 ins->inst_c0 = last_ins->inst_imm;
1929 g_assert_not_reached (); // check this rule
1933 case OP_LOADU1_MEMBASE:
1934 case OP_LOADI1_MEMBASE:
1935 if (last_ins && (last_ins->opcode == OP_STOREI1_MEMBASE_REG) &&
1936 ins->inst_basereg == last_ins->inst_destbasereg &&
1937 ins->inst_offset == last_ins->inst_offset) {
1938 ins->opcode = (ins->opcode == OP_LOADI1_MEMBASE) ? OP_ICONV_TO_I1 : OP_ICONV_TO_U1;
1939 ins->sreg1 = last_ins->sreg1;
1942 case OP_LOADU2_MEMBASE:
1943 case OP_LOADI2_MEMBASE:
1944 if (last_ins && (last_ins->opcode == OP_STOREI2_MEMBASE_REG) &&
1945 ins->inst_basereg == last_ins->inst_destbasereg &&
1946 ins->inst_offset == last_ins->inst_offset) {
1947 ins->opcode = (ins->opcode == OP_LOADI2_MEMBASE) ? OP_ICONV_TO_I2 : OP_ICONV_TO_U2;
1948 ins->sreg1 = last_ins->sreg1;
1951 #ifdef __mono_ppc64__
1952 case OP_LOADU4_MEMBASE:
1953 case OP_LOADI4_MEMBASE:
1954 if (last_ins && (last_ins->opcode == OP_STOREI4_MEMBASE_REG) &&
1955 ins->inst_basereg == last_ins->inst_destbasereg &&
1956 ins->inst_offset == last_ins->inst_offset) {
1957 ins->opcode = (ins->opcode == OP_LOADI4_MEMBASE) ? OP_ICONV_TO_I4 : OP_ICONV_TO_U4;
1958 ins->sreg1 = last_ins->sreg1;
1963 ins->opcode = OP_MOVE;
1967 if (ins->dreg == ins->sreg1) {
1968 MONO_DELETE_INS (bb, ins);
1972 * OP_MOVE sreg, dreg
1973 * OP_MOVE dreg, sreg
1975 if (last_ins && last_ins->opcode == OP_MOVE &&
1976 ins->sreg1 == last_ins->dreg &&
1977 ins->dreg == last_ins->sreg1) {
1978 MONO_DELETE_INS (bb, ins);
1986 bb->last_ins = last_ins;
1990 mono_arch_decompose_opts (MonoCompile *cfg, MonoInst *ins)
1992 switch (ins->opcode) {
1993 case OP_ICONV_TO_R_UN: {
1994 static const guint64 adjust_val = 0x4330000000000000ULL;
1995 int msw_reg = mono_alloc_ireg (cfg);
1996 int adj_reg = mono_alloc_freg (cfg);
1997 int tmp_reg = mono_alloc_freg (cfg);
1998 int basereg = ppc_sp;
2000 MONO_EMIT_NEW_ICONST (cfg, msw_reg, 0x43300000);
2001 if (!ppc_is_imm16 (offset + 4)) {
2002 basereg = mono_alloc_ireg (cfg);
2003 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_IADD_IMM, basereg, cfg->frame_reg, offset);
2005 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, basereg, offset, msw_reg);
2006 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, basereg, offset + 4, ins->sreg1);
2007 MONO_EMIT_NEW_LOAD_R8 (cfg, adj_reg, &adjust_val);
2008 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADR8_MEMBASE, tmp_reg, basereg, offset);
2009 MONO_EMIT_NEW_BIALU (cfg, OP_FSUB, ins->dreg, tmp_reg, adj_reg);
2010 ins->opcode = OP_NOP;
2013 #ifndef __mono_ppc64__
2014 case OP_ICONV_TO_R4:
2015 case OP_ICONV_TO_R8: {
2016 /* FIXME: change precision for CEE_CONV_R4 */
2017 static const guint64 adjust_val = 0x4330000080000000ULL;
2018 int msw_reg = mono_alloc_ireg (cfg);
2019 int xored = mono_alloc_ireg (cfg);
2020 int adj_reg = mono_alloc_freg (cfg);
2021 int tmp_reg = mono_alloc_freg (cfg);
2022 int basereg = ppc_sp;
2024 if (!ppc_is_imm16 (offset + 4)) {
2025 basereg = mono_alloc_ireg (cfg);
2026 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_IADD_IMM, basereg, cfg->frame_reg, offset);
2028 MONO_EMIT_NEW_ICONST (cfg, msw_reg, 0x43300000);
2029 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, basereg, offset, msw_reg);
2030 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_XOR_IMM, xored, ins->sreg1, 0x80000000);
2031 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, basereg, offset + 4, xored);
2032 MONO_EMIT_NEW_LOAD_R8 (cfg, adj_reg, (gpointer)&adjust_val);
2033 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADR8_MEMBASE, tmp_reg, basereg, offset);
2034 MONO_EMIT_NEW_BIALU (cfg, OP_FSUB, ins->dreg, tmp_reg, adj_reg);
2035 if (ins->opcode == OP_ICONV_TO_R4)
2036 MONO_EMIT_NEW_UNALU (cfg, OP_FCONV_TO_R4, ins->dreg, ins->dreg);
2037 ins->opcode = OP_NOP;
2042 int msw_reg = mono_alloc_ireg (cfg);
2043 int basereg = ppc_sp;
2045 if (!ppc_is_imm16 (offset + 4)) {
2046 basereg = mono_alloc_ireg (cfg);
2047 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_IADD_IMM, basereg, cfg->frame_reg, offset);
2049 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORER8_MEMBASE_REG, basereg, offset, ins->sreg1);
2050 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADI4_MEMBASE, msw_reg, basereg, offset);
2051 MONO_EMIT_NEW_UNALU (cfg, OP_CHECK_FINITE, -1, msw_reg);
2052 MONO_EMIT_NEW_UNALU (cfg, OP_FMOVE, ins->dreg, ins->sreg1);
2053 ins->opcode = OP_NOP;
2056 #ifdef __mono_ppc64__
2058 case OP_IADD_OVF_UN:
2060 int shifted1_reg = mono_alloc_ireg (cfg);
2061 int shifted2_reg = mono_alloc_ireg (cfg);
2062 int result_shifted_reg = mono_alloc_ireg (cfg);
2064 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHL_IMM, shifted1_reg, ins->sreg1, 32);
2065 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHL_IMM, shifted2_reg, ins->sreg2, 32);
2066 MONO_EMIT_NEW_BIALU (cfg, ins->opcode, result_shifted_reg, shifted1_reg, shifted2_reg);
2067 if (ins->opcode == OP_IADD_OVF_UN)
2068 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHR_UN_IMM, ins->dreg, result_shifted_reg, 32);
2070 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHR_IMM, ins->dreg, result_shifted_reg, 32);
2071 ins->opcode = OP_NOP;
2078 mono_arch_decompose_long_opts (MonoCompile *cfg, MonoInst *ins)
2080 switch (ins->opcode) {
2082 /* ADC sets the condition code */
2083 MONO_EMIT_NEW_BIALU (cfg, OP_ADDCC, ins->dreg + 1, ins->sreg1 + 1, ins->sreg2 + 1);
2084 MONO_EMIT_NEW_BIALU (cfg, OP_ADD_OVF_CARRY, ins->dreg + 2, ins->sreg1 + 2, ins->sreg2 + 2);
2087 case OP_LADD_OVF_UN:
2088 /* ADC sets the condition code */
2089 MONO_EMIT_NEW_BIALU (cfg, OP_ADDCC, ins->dreg + 1, ins->sreg1 + 1, ins->sreg2 + 1);
2090 MONO_EMIT_NEW_BIALU (cfg, OP_ADD_OVF_UN_CARRY, ins->dreg + 2, ins->sreg1 + 2, ins->sreg2 + 2);
2094 /* SBB sets the condition code */
2095 MONO_EMIT_NEW_BIALU (cfg, OP_SUBCC, ins->dreg + 1, ins->sreg1 + 1, ins->sreg2 + 1);
2096 MONO_EMIT_NEW_BIALU (cfg, OP_SUB_OVF_CARRY, ins->dreg + 2, ins->sreg1 + 2, ins->sreg2 + 2);
2099 case OP_LSUB_OVF_UN:
2100 /* SBB sets the condition code */
2101 MONO_EMIT_NEW_BIALU (cfg, OP_SUBCC, ins->dreg + 1, ins->sreg1 + 1, ins->sreg2 + 1);
2102 MONO_EMIT_NEW_BIALU (cfg, OP_SUB_OVF_UN_CARRY, ins->dreg + 2, ins->sreg1 + 2, ins->sreg2 + 2);
2106 /* This is the old version from inssel-long32.brg */
2107 MONO_EMIT_NEW_UNALU (cfg, OP_INOT, ins->dreg + 1, ins->sreg1 + 1);
2108 MONO_EMIT_NEW_UNALU (cfg, OP_INOT, ins->dreg + 2, ins->sreg1 + 2);
2109 /* ADC sets the condition codes */
2110 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_ADC_IMM, ins->dreg + 1, ins->dreg + 1, 1);
2111 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_ADC_IMM, ins->dreg + 2, ins->dreg + 2, 0);
2120 * the branch_b0_table should maintain the order of these
2134 branch_b0_table [] = {
2149 branch_b1_table [] = {
2163 #define NEW_INS(cfg,dest,op) do { \
2164 MONO_INST_NEW((cfg), (dest), (op)); \
2165 mono_bblock_insert_after_ins (bb, last_ins, (dest)); \
2169 map_to_reg_reg_op (int op)
2178 case OP_COMPARE_IMM:
2180 case OP_ICOMPARE_IMM:
2182 case OP_LCOMPARE_IMM:
2198 case OP_LOAD_MEMBASE:
2199 return OP_LOAD_MEMINDEX;
2200 case OP_LOADI4_MEMBASE:
2201 return OP_LOADI4_MEMINDEX;
2202 case OP_LOADU4_MEMBASE:
2203 return OP_LOADU4_MEMINDEX;
2204 case OP_LOADI8_MEMBASE:
2205 return OP_LOADI8_MEMINDEX;
2206 case OP_LOADU1_MEMBASE:
2207 return OP_LOADU1_MEMINDEX;
2208 case OP_LOADI2_MEMBASE:
2209 return OP_LOADI2_MEMINDEX;
2210 case OP_LOADU2_MEMBASE:
2211 return OP_LOADU2_MEMINDEX;
2212 case OP_LOADI1_MEMBASE:
2213 return OP_LOADI1_MEMINDEX;
2214 case OP_LOADR4_MEMBASE:
2215 return OP_LOADR4_MEMINDEX;
2216 case OP_LOADR8_MEMBASE:
2217 return OP_LOADR8_MEMINDEX;
2218 case OP_STOREI1_MEMBASE_REG:
2219 return OP_STOREI1_MEMINDEX;
2220 case OP_STOREI2_MEMBASE_REG:
2221 return OP_STOREI2_MEMINDEX;
2222 case OP_STOREI4_MEMBASE_REG:
2223 return OP_STOREI4_MEMINDEX;
2224 case OP_STOREI8_MEMBASE_REG:
2225 return OP_STOREI8_MEMINDEX;
2226 case OP_STORE_MEMBASE_REG:
2227 return OP_STORE_MEMINDEX;
2228 case OP_STORER4_MEMBASE_REG:
2229 return OP_STORER4_MEMINDEX;
2230 case OP_STORER8_MEMBASE_REG:
2231 return OP_STORER8_MEMINDEX;
2232 case OP_STORE_MEMBASE_IMM:
2233 return OP_STORE_MEMBASE_REG;
2234 case OP_STOREI1_MEMBASE_IMM:
2235 return OP_STOREI1_MEMBASE_REG;
2236 case OP_STOREI2_MEMBASE_IMM:
2237 return OP_STOREI2_MEMBASE_REG;
2238 case OP_STOREI4_MEMBASE_IMM:
2239 return OP_STOREI4_MEMBASE_REG;
2240 case OP_STOREI8_MEMBASE_IMM:
2241 return OP_STOREI8_MEMBASE_REG;
2243 return mono_op_imm_to_op (op);
2246 //#define map_to_reg_reg_op(op) (cfg->new_ir? mono_op_imm_to_op (op): map_to_reg_reg_op (op))
2248 #define compare_opcode_is_unsigned(opcode) \
2249 (((opcode) >= CEE_BNE_UN && (opcode) <= CEE_BLT_UN) || \
2250 ((opcode) >= OP_IBNE_UN && (opcode) <= OP_IBLT_UN) || \
2251 ((opcode) >= OP_LBNE_UN && (opcode) <= OP_LBLT_UN) || \
2252 ((opcode) >= OP_COND_EXC_NE_UN && (opcode) <= OP_COND_EXC_LT_UN) || \
2253 ((opcode) >= OP_COND_EXC_INE_UN && (opcode) <= OP_COND_EXC_ILT_UN) || \
2254 ((opcode) == OP_CLT_UN || (opcode) == OP_CGT_UN || \
2255 (opcode) == OP_ICLT_UN || (opcode) == OP_ICGT_UN || \
2256 (opcode) == OP_LCLT_UN || (opcode) == OP_LCGT_UN))
2259 * Remove from the instruction list the instructions that can't be
2260 * represented with very simple instructions with no register
2264 mono_arch_lowering_pass (MonoCompile *cfg, MonoBasicBlock *bb)
2266 MonoInst *ins, *next, *temp, *last_ins = NULL;
2269 MONO_BB_FOR_EACH_INS (bb, ins) {
2271 switch (ins->opcode) {
2272 case OP_IDIV_UN_IMM:
2275 case OP_IREM_UN_IMM:
2276 NEW_INS (cfg, temp, OP_ICONST);
2277 temp->inst_c0 = ins->inst_imm;
2278 temp->dreg = mono_alloc_ireg (cfg);
2279 ins->sreg2 = temp->dreg;
2280 if (ins->opcode == OP_IDIV_IMM)
2281 ins->opcode = OP_IDIV;
2282 else if (ins->opcode == OP_IREM_IMM)
2283 ins->opcode = OP_IREM;
2284 else if (ins->opcode == OP_IDIV_UN_IMM)
2285 ins->opcode = OP_IDIV_UN;
2286 else if (ins->opcode == OP_IREM_UN_IMM)
2287 ins->opcode = OP_IREM_UN;
2289 /* handle rem separately */
2293 CASE_PPC64 (OP_LREM)
2294 CASE_PPC64 (OP_LREM_UN) {
2296 /* we change a rem dest, src1, src2 to
2297 * div temp1, src1, src2
2298 * mul temp2, temp1, src2
2299 * sub dest, src1, temp2
2301 if (ins->opcode == OP_IREM || ins->opcode == OP_IREM_UN) {
2302 NEW_INS (cfg, mul, OP_IMUL);
2303 NEW_INS (cfg, temp, ins->opcode == OP_IREM? OP_IDIV: OP_IDIV_UN);
2304 ins->opcode = OP_ISUB;
2306 NEW_INS (cfg, mul, OP_LMUL);
2307 NEW_INS (cfg, temp, ins->opcode == OP_LREM? OP_LDIV: OP_LDIV_UN);
2308 ins->opcode = OP_LSUB;
2310 temp->sreg1 = ins->sreg1;
2311 temp->sreg2 = ins->sreg2;
2312 temp->dreg = mono_alloc_ireg (cfg);
2313 mul->sreg1 = temp->dreg;
2314 mul->sreg2 = ins->sreg2;
2315 mul->dreg = mono_alloc_ireg (cfg);
2316 ins->sreg2 = mul->dreg;
2320 CASE_PPC64 (OP_LADD_IMM)
2323 if (!ppc_is_imm16 (ins->inst_imm)) {
2324 NEW_INS (cfg, temp, OP_ICONST);
2325 temp->inst_c0 = ins->inst_imm;
2326 temp->dreg = mono_alloc_ireg (cfg);
2327 ins->sreg2 = temp->dreg;
2328 ins->opcode = map_to_reg_reg_op (ins->opcode);
2332 CASE_PPC64 (OP_LSUB_IMM)
2334 if (!ppc_is_imm16 (-ins->inst_imm)) {
2335 NEW_INS (cfg, temp, OP_ICONST);
2336 temp->inst_c0 = ins->inst_imm;
2337 temp->dreg = mono_alloc_ireg (cfg);
2338 ins->sreg2 = temp->dreg;
2339 ins->opcode = map_to_reg_reg_op (ins->opcode);
2351 gboolean is_imm = ((ins->inst_imm & 0xffff0000) && (ins->inst_imm & 0xffff));
2352 #ifdef __mono_ppc64__
2353 if (ins->inst_imm & 0xffffffff00000000UL)
2357 NEW_INS (cfg, temp, OP_ICONST);
2358 temp->inst_c0 = ins->inst_imm;
2359 temp->dreg = mono_alloc_ireg (cfg);
2360 ins->sreg2 = temp->dreg;
2361 ins->opcode = map_to_reg_reg_op (ins->opcode);
2370 NEW_INS (cfg, temp, OP_ICONST);
2371 temp->inst_c0 = ins->inst_imm;
2372 temp->dreg = mono_alloc_ireg (cfg);
2373 ins->sreg2 = temp->dreg;
2374 ins->opcode = map_to_reg_reg_op (ins->opcode);
2376 case OP_COMPARE_IMM:
2377 case OP_ICOMPARE_IMM:
2378 CASE_PPC64 (OP_LCOMPARE_IMM)
2380 /* Branch opts can eliminate the branch */
2381 if (!next || (!(MONO_IS_COND_BRANCH_OP (next) || MONO_IS_COND_EXC (next) || MONO_IS_SETCC (next)))) {
2382 ins->opcode = OP_NOP;
2386 if (compare_opcode_is_unsigned (next->opcode)) {
2387 if (!ppc_is_uimm16 (ins->inst_imm)) {
2388 NEW_INS (cfg, temp, OP_ICONST);
2389 temp->inst_c0 = ins->inst_imm;
2390 temp->dreg = mono_alloc_ireg (cfg);
2391 ins->sreg2 = temp->dreg;
2392 ins->opcode = map_to_reg_reg_op (ins->opcode);
2395 if (!ppc_is_imm16 (ins->inst_imm)) {
2396 NEW_INS (cfg, temp, OP_ICONST);
2397 temp->inst_c0 = ins->inst_imm;
2398 temp->dreg = mono_alloc_ireg (cfg);
2399 ins->sreg2 = temp->dreg;
2400 ins->opcode = map_to_reg_reg_op (ins->opcode);
2406 if (ins->inst_imm == 1) {
2407 ins->opcode = OP_MOVE;
2410 if (ins->inst_imm == 0) {
2411 ins->opcode = OP_ICONST;
2415 imm = mono_is_power_of_two (ins->inst_imm);
2417 ins->opcode = OP_SHL_IMM;
2418 ins->inst_imm = imm;
2421 if (!ppc_is_imm16 (ins->inst_imm)) {
2422 NEW_INS (cfg, temp, OP_ICONST);
2423 temp->inst_c0 = ins->inst_imm;
2424 temp->dreg = mono_alloc_ireg (cfg);
2425 ins->sreg2 = temp->dreg;
2426 ins->opcode = map_to_reg_reg_op (ins->opcode);
2429 case OP_LOCALLOC_IMM:
2430 NEW_INS (cfg, temp, OP_ICONST);
2431 temp->inst_c0 = ins->inst_imm;
2432 temp->dreg = mono_alloc_ireg (cfg);
2433 ins->sreg1 = temp->dreg;
2434 ins->opcode = OP_LOCALLOC;
2436 case OP_LOAD_MEMBASE:
2437 case OP_LOADI4_MEMBASE:
2438 CASE_PPC64 (OP_LOADI8_MEMBASE)
2439 case OP_LOADU4_MEMBASE:
2440 case OP_LOADI2_MEMBASE:
2441 case OP_LOADU2_MEMBASE:
2442 case OP_LOADI1_MEMBASE:
2443 case OP_LOADU1_MEMBASE:
2444 case OP_LOADR4_MEMBASE:
2445 case OP_LOADR8_MEMBASE:
2446 case OP_STORE_MEMBASE_REG:
2447 CASE_PPC64 (OP_STOREI8_MEMBASE_REG)
2448 case OP_STOREI4_MEMBASE_REG:
2449 case OP_STOREI2_MEMBASE_REG:
2450 case OP_STOREI1_MEMBASE_REG:
2451 case OP_STORER4_MEMBASE_REG:
2452 case OP_STORER8_MEMBASE_REG:
2453 /* we can do two things: load the immed in a register
2454 * and use an indexed load, or see if the immed can be
2455 * represented as an ad_imm + a load with a smaller offset
2456 * that fits. We just do the first for now, optimize later.
2458 if (ppc_is_imm16 (ins->inst_offset))
2460 NEW_INS (cfg, temp, OP_ICONST);
2461 temp->inst_c0 = ins->inst_offset;
2462 temp->dreg = mono_alloc_ireg (cfg);
2463 ins->sreg2 = temp->dreg;
2464 ins->opcode = map_to_reg_reg_op (ins->opcode);
2466 case OP_STORE_MEMBASE_IMM:
2467 case OP_STOREI1_MEMBASE_IMM:
2468 case OP_STOREI2_MEMBASE_IMM:
2469 case OP_STOREI4_MEMBASE_IMM:
2470 CASE_PPC64 (OP_STOREI8_MEMBASE_IMM)
2471 NEW_INS (cfg, temp, OP_ICONST);
2472 temp->inst_c0 = ins->inst_imm;
2473 temp->dreg = mono_alloc_ireg (cfg);
2474 ins->sreg1 = temp->dreg;
2475 ins->opcode = map_to_reg_reg_op (ins->opcode);
2477 goto loop_start; /* make it handle the possibly big ins->inst_offset */
2480 NEW_INS (cfg, temp, OP_ICONST);
2481 temp->inst_c0 = (gulong)ins->inst_p0;
2482 temp->dreg = mono_alloc_ireg (cfg);
2483 ins->inst_basereg = temp->dreg;
2484 ins->inst_offset = 0;
2485 ins->opcode = ins->opcode == OP_R4CONST? OP_LOADR4_MEMBASE: OP_LOADR8_MEMBASE;
2487 /* make it handle the possibly big ins->inst_offset
2488 * later optimize to use lis + load_membase
2494 bb->last_ins = last_ins;
2495 bb->max_vreg = cfg->next_vreg;
2499 emit_float_to_int (MonoCompile *cfg, guchar *code, int dreg, int sreg, int size, gboolean is_signed)
2501 long offset = cfg->arch.fp_conv_var_offset;
2503 /* sreg is a float, dreg is an integer reg. ppc_f0 is used a scratch */
2504 #ifdef __mono_ppc64__
2506 ppc_fctidz (code, ppc_f0, sreg);
2511 ppc_fctiwz (code, ppc_f0, sreg);
2514 if (ppc_is_imm16 (offset + sub_offset)) {
2515 ppc_stfd (code, ppc_f0, offset, cfg->frame_reg);
2517 ppc_load_reg (code, dreg, offset + sub_offset, cfg->frame_reg);
2519 ppc_lwz (code, dreg, offset + sub_offset, cfg->frame_reg);
2521 ppc_load (code, dreg, offset);
2522 ppc_add (code, dreg, dreg, cfg->frame_reg);
2523 ppc_stfd (code, ppc_f0, 0, dreg);
2525 ppc_load_reg (code, dreg, sub_offset, dreg);
2527 ppc_lwz (code, dreg, sub_offset, dreg);
2531 ppc_andid (code, dreg, dreg, 0xff);
2533 ppc_andid (code, dreg, dreg, 0xffff);
2534 #ifdef __mono_ppc64__
2536 ppc_clrldi (code, dreg, dreg, 32);
2540 ppc_extsb (code, dreg, dreg);
2542 ppc_extsh (code, dreg, dreg);
2543 #ifdef __mono_ppc64__
2545 ppc_extsw (code, dreg, dreg);
2553 const guchar *target;
2558 #define is_call_imm(diff) ((glong)(diff) >= -33554432 && (glong)(diff) <= 33554431)
2561 search_thunk_slot (void *data, int csize, int bsize, void *user_data) {
2562 #ifdef __mono_ppc64__
2563 g_assert_not_reached ();
2565 PatchData *pdata = (PatchData*)user_data;
2566 guchar *code = data;
2567 guint32 *thunks = data;
2568 guint32 *endthunks = (guint32*)(code + bsize);
2572 int difflow, diffhigh;
2574 /* always ensure a call from pdata->code can reach to the thunks without further thunks */
2575 difflow = (char*)pdata->code - (char*)thunks;
2576 diffhigh = (char*)pdata->code - (char*)endthunks;
2577 if (!((is_call_imm (thunks) && is_call_imm (endthunks)) || (is_call_imm (difflow) && is_call_imm (diffhigh))))
2580 templ = (guchar*)load;
2581 ppc_load_sequence (templ, ppc_r0, pdata->target);
2583 //g_print ("thunk nentries: %d\n", ((char*)endthunks - (char*)thunks)/16);
2584 if ((pdata->found == 2) || (pdata->code >= code && pdata->code <= code + csize)) {
2585 while (thunks < endthunks) {
2586 //g_print ("looking for target: %p at %p (%08x-%08x)\n", pdata->target, thunks, thunks [0], thunks [1]);
2587 if ((thunks [0] == load [0]) && (thunks [1] == load [1])) {
2588 ppc_patch (pdata->code, (guchar*)thunks);
2591 static int num_thunks = 0;
2593 if ((num_thunks % 20) == 0)
2594 g_print ("num_thunks lookup: %d\n", num_thunks);
2597 } else if ((thunks [0] == 0) && (thunks [1] == 0)) {
2598 /* found a free slot instead: emit thunk */
2599 code = (guchar*)thunks;
2600 ppc_lis (code, ppc_r0, (gulong)(pdata->target) >> 16);
2601 ppc_ori (code, ppc_r0, ppc_r0, (gulong)(pdata->target) & 0xffff);
2602 ppc_mtctr (code, ppc_r0);
2603 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
2604 mono_arch_flush_icache ((guchar*)thunks, 16);
2606 ppc_patch (pdata->code, (guchar*)thunks);
2609 static int num_thunks = 0;
2611 if ((num_thunks % 20) == 0)
2612 g_print ("num_thunks: %d\n", num_thunks);
2616 /* skip 16 bytes, the size of the thunk */
2620 //g_print ("failed thunk lookup for %p from %p at %p (%d entries)\n", pdata->target, pdata->code, data, count);
2627 handle_thunk (int absolute, guchar *code, const guchar *target) {
2628 MonoDomain *domain = mono_domain_get ();
2632 pdata.target = target;
2633 pdata.absolute = absolute;
2636 mono_domain_lock (domain);
2637 mono_domain_code_foreach (domain, search_thunk_slot, &pdata);
2640 /* this uses the first available slot */
2642 mono_domain_code_foreach (domain, search_thunk_slot, &pdata);
2644 mono_domain_unlock (domain);
2646 if (pdata.found != 1)
2647 g_print ("thunk failed for %p from %p\n", target, code);
2648 g_assert (pdata.found == 1);
2652 patch_ins (guint8 *code, guint32 ins)
2654 *(guint32*)code = ins;
2655 mono_arch_flush_icache (code, 4);
2659 ppc_patch_full (guchar *code, const guchar *target, gboolean is_fd)
2661 guint32 ins = *(guint32*)code;
2662 guint32 prim = ins >> 26;
2665 //g_print ("patching 0x%08x (0x%08x) to point to 0x%08x\n", code, ins, target);
2667 // prefer relative branches, they are more position independent (e.g. for AOT compilation).
2668 gint diff = target - code;
2671 if (diff <= 33554431){
2672 ins = (18 << 26) | (diff) | (ins & 1);
2673 patch_ins (code, ins);
2677 /* diff between 0 and -33554432 */
2678 if (diff >= -33554432){
2679 ins = (18 << 26) | (diff & ~0xfc000000) | (ins & 1);
2680 patch_ins (code, ins);
2685 if ((glong)target >= 0){
2686 if ((glong)target <= 33554431){
2687 ins = (18 << 26) | ((gulong) target) | (ins & 1) | 2;
2688 patch_ins (code, ins);
2692 if ((glong)target >= -33554432){
2693 ins = (18 << 26) | (((gulong)target) & ~0xfc000000) | (ins & 1) | 2;
2694 patch_ins (code, ins);
2699 handle_thunk (TRUE, code, target);
2702 g_assert_not_reached ();
2710 guint32 li = (gulong)target;
2711 ins = (ins & 0xffff0000) | (ins & 3);
2712 ovf = li & 0xffff0000;
2713 if (ovf != 0 && ovf != 0xffff0000)
2714 g_assert_not_reached ();
2717 // FIXME: assert the top bits of li are 0
2719 gint diff = target - code;
2720 ins = (ins & 0xffff0000) | (ins & 3);
2721 ovf = diff & 0xffff0000;
2722 if (ovf != 0 && ovf != 0xffff0000)
2723 g_assert_not_reached ();
2727 patch_ins (code, ins);
2731 if (prim == 15 || ins == 0x4e800021 || ins == 0x4e800020 || ins == 0x4e800420) {
2732 #ifdef __mono_ppc64__
2733 guint32 *seq = (guint32*)code;
2734 guint32 *branch_ins;
2736 /* the trampoline code will try to patch the blrl, blr, bcctr */
2737 if (ins == 0x4e800021 || ins == 0x4e800020 || ins == 0x4e800420) {
2739 if (ppc_opcode (seq [-3]) == 58 || ppc_opcode (seq [-3]) == 31) /* ld || mr */
2744 if (ppc_opcode (seq [5]) == 58 || ppc_opcode (seq [5]) == 31) /* ld || mr */
2745 branch_ins = seq + 8;
2747 branch_ins = seq + 6;
2750 seq = (guint32*)code;
2751 /* this is the lis/ori/sldi/oris/ori/(ld/ld|mr/nop)/mtlr/blrl sequence */
2752 g_assert (mono_ppc_is_direct_call_sequence (branch_ins));
2754 if (ppc_opcode (seq [5]) == 58) { /* ld */
2755 g_assert (ppc_opcode (seq [6]) == 58); /* ld */
2758 guint8 *buf = (guint8*)&seq [5];
2759 ppc_mr (buf, ppc_r0, ppc_r11);
2764 target = mono_get_addr_from_ftnptr ((gpointer)target);
2767 /* FIXME: make this thread safe */
2768 /* FIXME: we're assuming we're using r11 here */
2769 ppc_load_sequence (code, ppc_r11, target);
2770 mono_arch_flush_icache ((guint8*)seq, 28);
2773 /* the trampoline code will try to patch the blrl, blr, bcctr */
2774 if (ins == 0x4e800021 || ins == 0x4e800020 || ins == 0x4e800420) {
2777 /* this is the lis/ori/mtlr/blrl sequence */
2778 seq = (guint32*)code;
2779 g_assert ((seq [0] >> 26) == 15);
2780 g_assert ((seq [1] >> 26) == 24);
2781 g_assert ((seq [2] >> 26) == 31);
2782 g_assert (seq [3] == 0x4e800021 || seq [3] == 0x4e800020 || seq [3] == 0x4e800420);
2783 /* FIXME: make this thread safe */
2784 ppc_lis (code, ppc_r0, (guint32)(target) >> 16);
2785 ppc_ori (code, ppc_r0, ppc_r0, (guint32)(target) & 0xffff);
2786 mono_arch_flush_icache (code - 8, 8);
2789 g_assert_not_reached ();
2791 // g_print ("patched with 0x%08x\n", ins);
2795 ppc_patch (guchar *code, const guchar *target)
2797 ppc_patch_full (code, target, FALSE);
2801 emit_move_return_value (MonoCompile *cfg, MonoInst *ins, guint8 *code)
2803 switch (ins->opcode) {
2806 case OP_FCALL_MEMBASE:
2807 if (ins->dreg != ppc_f1)
2808 ppc_fmr (code, ins->dreg, ppc_f1);
2816 * emit_load_volatile_arguments:
2818 * Load volatile arguments from the stack to the original input registers.
2819 * Required before a tail call.
2822 emit_load_volatile_arguments (MonoCompile *cfg, guint8 *code)
2824 MonoMethod *method = cfg->method;
2825 MonoMethodSignature *sig;
2829 int struct_index = 0;
2831 sig = mono_method_signature (method);
2833 /* This is the opposite of the code in emit_prolog */
2837 cinfo = calculate_sizes (sig, sig->pinvoke);
2839 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
2840 ArgInfo *ainfo = &cinfo->ret;
2841 inst = cfg->vret_addr;
2842 g_assert (ppc_is_imm16 (inst->inst_offset));
2843 ppc_load_reg (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2845 for (i = 0; i < sig->param_count + sig->hasthis; ++i) {
2846 ArgInfo *ainfo = cinfo->args + i;
2847 inst = cfg->args [pos];
2849 g_assert (inst->opcode != OP_REGVAR);
2850 g_assert (ppc_is_imm16 (inst->inst_offset));
2852 switch (ainfo->regtype) {
2853 case RegTypeGeneral:
2854 switch (ainfo->size) {
2856 ppc_lbz (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2859 ppc_lhz (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2861 #ifdef __mono_ppc64__
2863 ppc_lwz (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2867 ppc_load_reg (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2873 switch (ainfo->size) {
2875 ppc_lfs (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2878 ppc_lfd (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2881 g_assert_not_reached ();
2886 MonoType *type = mini_type_get_underlying_type (cfg->generic_sharing_context,
2887 &inst->klass->byval_arg);
2889 #ifndef __mono_ppc64__
2890 if (type->type == MONO_TYPE_I8)
2894 if (MONO_TYPE_IS_REFERENCE (type) || type->type == MONO_TYPE_I8) {
2895 ppc_load_reg (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
2896 ppc_store_reg (code, ppc_r0, ainfo->offset, ainfo->reg);
2897 } else if (type->type == MONO_TYPE_I4) {
2898 ppc_lwz (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
2899 ppc_stw (code, ppc_r0, ainfo->offset, ainfo->reg);
2907 case RegTypeStructByVal: {
2918 * Darwin pinvokes needs some special handling
2919 * for 1 and 2 byte arguments
2921 if (method->signature->pinvoke)
2922 size = mono_class_native_size (inst->klass, NULL);
2923 if (size == 1 || size == 2) {
2928 for (j = 0; j < ainfo->size; ++j) {
2929 ppc_load_reg (code, ainfo->reg + j,
2930 inst->inst_offset + j * sizeof (gpointer),
2931 inst->inst_basereg);
2932 /* FIXME: shift to the right */
2939 case RegTypeStructByAddr: {
2940 MonoInst *addr = cfg->tailcall_valuetype_addrs [struct_index];
2942 g_assert (ppc_is_imm16 (addr->inst_offset));
2943 g_assert (!ainfo->offset);
2944 ppc_load_reg (code, ainfo->reg, addr->inst_offset, addr->inst_basereg);
2951 g_assert_not_reached ();
2962 /* This must be kept in sync with emit_load_volatile_arguments(). */
2964 ins_native_length (MonoCompile *cfg, MonoInst *ins)
2966 int len = ((guint8 *)ins_get_spec (ins->opcode))[MONO_INST_LEN];
2967 MonoMethodSignature *sig;
2972 if (ins->opcode != OP_JMP)
2975 call = (MonoCallInst*)ins;
2976 sig = mono_method_signature (cfg->method);
2977 cinfo = calculate_sizes (sig, sig->pinvoke);
2979 if (MONO_TYPE_ISSTRUCT (sig->ret))
2981 for (i = 0; i < sig->param_count + sig->hasthis; ++i) {
2982 ArgInfo *ainfo = cinfo->args + i;
2984 switch (ainfo->regtype) {
2985 case RegTypeGeneral:
2994 case RegTypeStructByVal:
2995 len += 4 * ainfo->size;
2998 case RegTypeStructByAddr:
3003 g_assert_not_reached ();
3013 emit_reserve_param_area (MonoCompile *cfg, guint8 *code)
3015 long size = cfg->param_area;
3017 size += MONO_ARCH_FRAME_ALIGNMENT - 1;
3018 size &= -MONO_ARCH_FRAME_ALIGNMENT;
3023 ppc_load_reg (code, ppc_r0, 0, ppc_sp);
3024 if (ppc_is_imm16 (-size)) {
3025 ppc_store_reg_update (code, ppc_r0, -size, ppc_sp);
3027 ppc_load (code, ppc_r11, -size);
3028 ppc_store_reg_update_indexed (code, ppc_r0, ppc_sp, ppc_r11);
3035 emit_unreserve_param_area (MonoCompile *cfg, guint8 *code)
3037 long size = cfg->param_area;
3039 size += MONO_ARCH_FRAME_ALIGNMENT - 1;
3040 size &= -MONO_ARCH_FRAME_ALIGNMENT;
3045 ppc_load_reg (code, ppc_r0, 0, ppc_sp);
3046 if (ppc_is_imm16 (size)) {
3047 ppc_store_reg_update (code, ppc_r0, size, ppc_sp);
3049 ppc_load (code, ppc_r11, size);
3050 ppc_store_reg_update_indexed (code, ppc_r0, ppc_sp, ppc_r11);
3056 #define MASK_SHIFT_IMM(i) ((i) & MONO_PPC_32_64_CASE (0x1f, 0x3f))
3059 mono_arch_output_basic_block (MonoCompile *cfg, MonoBasicBlock *bb)
3061 MonoInst *ins, *next;
3064 guint8 *code = cfg->native_code + cfg->code_len;
3065 MonoInst *last_ins = NULL;
3066 guint last_offset = 0;
3070 /* we don't align basic blocks of loops on ppc */
3072 if (cfg->verbose_level > 2)
3073 g_print ("Basic block %d starting at offset 0x%x\n", bb->block_num, bb->native_offset);
3075 cpos = bb->max_offset;
3077 if (cfg->prof_options & MONO_PROFILE_COVERAGE) {
3078 //MonoCoverageInfo *cov = mono_get_coverage_info (cfg->method);
3079 //g_assert (!mono_compile_aot);
3082 // cov->data [bb->dfn].iloffset = bb->cil_code - cfg->cil_code;
3083 /* this is not thread save, but good enough */
3084 /* fixme: howto handle overflows? */
3085 //x86_inc_mem (code, &cov->data [bb->dfn].count);
3088 MONO_BB_FOR_EACH_INS (bb, ins) {
3089 offset = code - cfg->native_code;
3091 max_len = ins_native_length (cfg, ins);
3093 if (offset > (cfg->code_size - max_len - 16)) {
3094 cfg->code_size *= 2;
3095 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
3096 code = cfg->native_code + offset;
3098 // if (ins->cil_code)
3099 // g_print ("cil code\n");
3100 mono_debug_record_line_number (cfg, ins, offset);
3102 switch (normalize_opcode (ins->opcode)) {
3103 case OP_RELAXED_NOP:
3106 case OP_DUMMY_STORE:
3107 case OP_NOT_REACHED:
3111 emit_tls_access (code, ins->dreg, ins->inst_offset);
3114 ppc_mullw (code, ppc_r0, ins->sreg1, ins->sreg2);
3115 ppc_mulhw (code, ppc_r3, ins->sreg1, ins->sreg2);
3116 ppc_mr (code, ppc_r4, ppc_r0);
3119 ppc_mullw (code, ppc_r0, ins->sreg1, ins->sreg2);
3120 ppc_mulhwu (code, ppc_r3, ins->sreg1, ins->sreg2);
3121 ppc_mr (code, ppc_r4, ppc_r0);
3123 case OP_MEMORY_BARRIER:
3126 case OP_STOREI1_MEMBASE_REG:
3127 if (ppc_is_imm16 (ins->inst_offset)) {
3128 ppc_stb (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
3130 ppc_load (code, ppc_r0, ins->inst_offset);
3131 ppc_stbx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
3134 case OP_STOREI2_MEMBASE_REG:
3135 if (ppc_is_imm16 (ins->inst_offset)) {
3136 ppc_sth (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
3138 ppc_load (code, ppc_r0, ins->inst_offset);
3139 ppc_sthx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
3142 case OP_STORE_MEMBASE_REG:
3143 if (ppc_is_imm16 (ins->inst_offset)) {
3144 ppc_store_reg (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
3146 ppc_load (code, ppc_r0, ins->inst_offset);
3147 ppc_store_reg_indexed (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
3150 case OP_STOREI1_MEMINDEX:
3151 ppc_stbx (code, ins->sreg1, ins->sreg2, ins->inst_destbasereg);
3153 case OP_STOREI2_MEMINDEX:
3154 ppc_sthx (code, ins->sreg1, ins->sreg2, ins->inst_destbasereg);
3156 case OP_STORE_MEMINDEX:
3157 ppc_store_reg_indexed (code, ins->sreg1, ins->sreg2, ins->inst_destbasereg);
3160 g_assert_not_reached ();
3162 case OP_LOAD_MEMBASE:
3163 if (ppc_is_imm16 (ins->inst_offset)) {
3164 ppc_load_reg (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3166 ppc_load (code, ppc_r0, ins->inst_offset);
3167 ppc_load_reg_indexed (code, ins->dreg, ins->inst_basereg, ppc_r0);
3170 case OP_LOADI4_MEMBASE:
3171 #ifdef __mono_ppc64__
3172 if (ppc_is_imm16 (ins->inst_offset)) {
3173 ppc_lwa (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3175 ppc_load (code, ppc_r0, ins->inst_offset);
3176 ppc_lwax (code, ins->dreg, ins->inst_basereg, ppc_r0);
3180 case OP_LOADU4_MEMBASE:
3181 if (ppc_is_imm16 (ins->inst_offset)) {
3182 ppc_lwz (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3184 ppc_load (code, ppc_r0, ins->inst_offset);
3185 ppc_lwzx (code, ins->dreg, ins->inst_basereg, ppc_r0);
3188 case OP_LOADI1_MEMBASE:
3189 case OP_LOADU1_MEMBASE:
3190 if (ppc_is_imm16 (ins->inst_offset)) {
3191 ppc_lbz (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3193 ppc_load (code, ppc_r0, ins->inst_offset);
3194 ppc_lbzx (code, ins->dreg, ins->inst_basereg, ppc_r0);
3196 if (ins->opcode == OP_LOADI1_MEMBASE)
3197 ppc_extsb (code, ins->dreg, ins->dreg);
3199 case OP_LOADU2_MEMBASE:
3200 if (ppc_is_imm16 (ins->inst_offset)) {
3201 ppc_lhz (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3203 ppc_load (code, ppc_r0, ins->inst_offset);
3204 ppc_lhzx (code, ins->dreg, ins->inst_basereg, ppc_r0);
3207 case OP_LOADI2_MEMBASE:
3208 if (ppc_is_imm16 (ins->inst_offset)) {
3209 ppc_lha (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3211 ppc_load (code, ppc_r0, ins->inst_offset);
3212 ppc_lhax (code, ins->dreg, ins->inst_basereg, ppc_r0);
3215 case OP_LOAD_MEMINDEX:
3216 ppc_load_reg_indexed (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3218 case OP_LOADI4_MEMINDEX:
3219 #ifdef __mono_ppc64__
3220 ppc_lwax (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3223 case OP_LOADU4_MEMINDEX:
3224 ppc_lwzx (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3226 case OP_LOADU2_MEMINDEX:
3227 ppc_lhzx (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3229 case OP_LOADI2_MEMINDEX:
3230 ppc_lhax (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3232 case OP_LOADU1_MEMINDEX:
3233 ppc_lbzx (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3235 case OP_LOADI1_MEMINDEX:
3236 ppc_lbzx (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3237 ppc_extsb (code, ins->dreg, ins->dreg);
3239 case OP_ICONV_TO_I1:
3240 CASE_PPC64 (OP_LCONV_TO_I1)
3241 ppc_extsb (code, ins->dreg, ins->sreg1);
3243 case OP_ICONV_TO_I2:
3244 CASE_PPC64 (OP_LCONV_TO_I2)
3245 ppc_extsh (code, ins->dreg, ins->sreg1);
3247 case OP_ICONV_TO_U1:
3248 CASE_PPC64 (OP_LCONV_TO_U1)
3249 ppc_clrlwi (code, ins->dreg, ins->sreg1, 24);
3251 case OP_ICONV_TO_U2:
3252 CASE_PPC64 (OP_LCONV_TO_U2)
3253 ppc_clrlwi (code, ins->dreg, ins->sreg1, 16);
3257 CASE_PPC64 (OP_LCOMPARE)
3258 L = (sizeof (gpointer) == 4 || ins->opcode == OP_ICOMPARE) ? 0 : 1;
3260 if (next && compare_opcode_is_unsigned (next->opcode))
3261 ppc_cmpl (code, 0, L, ins->sreg1, ins->sreg2);
3263 ppc_cmp (code, 0, L, ins->sreg1, ins->sreg2);
3265 case OP_COMPARE_IMM:
3266 case OP_ICOMPARE_IMM:
3267 CASE_PPC64 (OP_LCOMPARE_IMM)
3268 L = (sizeof (gpointer) == 4 || ins->opcode == OP_ICOMPARE_IMM) ? 0 : 1;
3270 if (next && compare_opcode_is_unsigned (next->opcode)) {
3271 if (ppc_is_uimm16 (ins->inst_imm)) {
3272 ppc_cmpli (code, 0, L, ins->sreg1, (ins->inst_imm & 0xffff));
3274 g_assert_not_reached ();
3277 if (ppc_is_imm16 (ins->inst_imm)) {
3278 ppc_cmpi (code, 0, L, ins->sreg1, (ins->inst_imm & 0xffff));
3280 g_assert_not_reached ();
3289 ppc_addco (code, ins->dreg, ins->sreg1, ins->sreg2);
3292 CASE_PPC64 (OP_LADD)
3293 ppc_add (code, ins->dreg, ins->sreg1, ins->sreg2);
3297 ppc_adde (code, ins->dreg, ins->sreg1, ins->sreg2);
3300 if (ppc_is_imm16 (ins->inst_imm)) {
3301 ppc_addic (code, ins->dreg, ins->sreg1, ins->inst_imm);
3303 g_assert_not_reached ();
3308 CASE_PPC64 (OP_LADD_IMM)
3309 if (ppc_is_imm16 (ins->inst_imm)) {
3310 ppc_addi (code, ins->dreg, ins->sreg1, ins->inst_imm);
3312 g_assert_not_reached ();
3316 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3318 ppc_addo (code, ins->dreg, ins->sreg1, ins->sreg2);
3319 ppc_mfspr (code, ppc_r0, ppc_xer);
3320 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3321 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3323 case OP_IADD_OVF_UN:
3324 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3326 ppc_addco (code, ins->dreg, ins->sreg1, ins->sreg2);
3327 ppc_mfspr (code, ppc_r0, ppc_xer);
3328 ppc_andisd (code, ppc_r0, ppc_r0, (1<<13));
3329 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3332 CASE_PPC64 (OP_LSUB_OVF)
3333 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3335 ppc_subfo (code, ins->dreg, ins->sreg2, ins->sreg1);
3336 ppc_mfspr (code, ppc_r0, ppc_xer);
3337 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3338 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3340 case OP_ISUB_OVF_UN:
3341 CASE_PPC64 (OP_LSUB_OVF_UN)
3342 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3344 ppc_subfc (code, ins->dreg, ins->sreg2, ins->sreg1);
3345 ppc_mfspr (code, ppc_r0, ppc_xer);
3346 ppc_andisd (code, ppc_r0, ppc_r0, (1<<13));
3347 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_TRUE, PPC_BR_EQ, "OverflowException");
3349 case OP_ADD_OVF_CARRY:
3350 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3352 ppc_addeo (code, ins->dreg, ins->sreg1, ins->sreg2);
3353 ppc_mfspr (code, ppc_r0, ppc_xer);
3354 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3355 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3357 case OP_ADD_OVF_UN_CARRY:
3358 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3360 ppc_addeo (code, ins->dreg, ins->sreg1, ins->sreg2);
3361 ppc_mfspr (code, ppc_r0, ppc_xer);
3362 ppc_andisd (code, ppc_r0, ppc_r0, (1<<13));
3363 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3365 case OP_SUB_OVF_CARRY:
3366 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3368 ppc_subfeo (code, ins->dreg, ins->sreg2, ins->sreg1);
3369 ppc_mfspr (code, ppc_r0, ppc_xer);
3370 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3371 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3373 case OP_SUB_OVF_UN_CARRY:
3374 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3376 ppc_subfeo (code, ins->dreg, ins->sreg2, ins->sreg1);
3377 ppc_mfspr (code, ppc_r0, ppc_xer);
3378 ppc_andisd (code, ppc_r0, ppc_r0, (1<<13));
3379 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_TRUE, PPC_BR_EQ, "OverflowException");
3383 ppc_subfco (code, ins->dreg, ins->sreg2, ins->sreg1);
3386 CASE_PPC64 (OP_LSUB)
3387 ppc_subf (code, ins->dreg, ins->sreg2, ins->sreg1);
3391 ppc_subfe (code, ins->dreg, ins->sreg2, ins->sreg1);
3395 CASE_PPC64 (OP_LSUB_IMM)
3396 // we add the negated value
3397 if (ppc_is_imm16 (-ins->inst_imm))
3398 ppc_addi (code, ins->dreg, ins->sreg1, -ins->inst_imm);
3400 g_assert_not_reached ();
3404 g_assert (ppc_is_imm16 (ins->inst_imm));
3405 ppc_subfic (code, ins->dreg, ins->sreg1, ins->inst_imm);
3408 ppc_subfze (code, ins->dreg, ins->sreg1);
3411 CASE_PPC64 (OP_LAND)
3412 /* FIXME: the ppc macros as inconsistent here: put dest as the first arg! */
3413 ppc_and (code, ins->sreg1, ins->dreg, ins->sreg2);
3417 CASE_PPC64 (OP_LAND_IMM)
3418 if (!(ins->inst_imm & 0xffff0000)) {
3419 ppc_andid (code, ins->sreg1, ins->dreg, ins->inst_imm);
3420 } else if (!(ins->inst_imm & 0xffff)) {
3421 ppc_andisd (code, ins->sreg1, ins->dreg, ((guint32)ins->inst_imm >> 16));
3423 g_assert_not_reached ();
3427 CASE_PPC64 (OP_LDIV) {
3428 guint8 *divisor_is_m1;
3429 /* XER format: SO, OV, CA, reserved [21 bits], count [8 bits]
3431 ppc_compare_reg_imm (code, 0, ins->sreg2, -1);
3432 divisor_is_m1 = code;
3433 ppc_bc (code, PPC_BR_FALSE | PPC_BR_LIKELY, PPC_BR_EQ, 0);
3434 ppc_lis (code, ppc_r0, 0x8000);
3435 #ifdef __mono_ppc64__
3436 if (ins->opcode == OP_LDIV)
3437 ppc_sldi (code, ppc_r0, ppc_r0, 32);
3439 ppc_compare (code, 0, ins->sreg1, ppc_r0);
3440 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_TRUE, PPC_BR_EQ, "ArithmeticException");
3441 ppc_patch (divisor_is_m1, code);
3442 /* XER format: SO, OV, CA, reserved [21 bits], count [8 bits]
3444 if (ins->opcode == OP_IDIV)
3445 ppc_divwod (code, ins->dreg, ins->sreg1, ins->sreg2);
3446 #ifdef __mono_ppc64__
3448 ppc_divdod (code, ins->dreg, ins->sreg1, ins->sreg2);
3450 ppc_mfspr (code, ppc_r0, ppc_xer);
3451 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3452 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "DivideByZeroException");
3456 CASE_PPC64 (OP_LDIV_UN)
3457 if (ins->opcode == OP_IDIV_UN)
3458 ppc_divwuod (code, ins->dreg, ins->sreg1, ins->sreg2);
3459 #ifdef __mono_ppc64__
3461 ppc_divduod (code, ins->dreg, ins->sreg1, ins->sreg2);
3463 ppc_mfspr (code, ppc_r0, ppc_xer);
3464 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3465 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "DivideByZeroException");
3471 g_assert_not_reached ();
3474 ppc_or (code, ins->dreg, ins->sreg1, ins->sreg2);
3478 CASE_PPC64 (OP_LOR_IMM)
3479 if (!(ins->inst_imm & 0xffff0000)) {
3480 ppc_ori (code, ins->sreg1, ins->dreg, ins->inst_imm);
3481 } else if (!(ins->inst_imm & 0xffff)) {
3482 ppc_oris (code, ins->dreg, ins->sreg1, ((guint32)(ins->inst_imm) >> 16));
3484 g_assert_not_reached ();
3488 CASE_PPC64 (OP_LXOR)
3489 ppc_xor (code, ins->dreg, ins->sreg1, ins->sreg2);
3493 CASE_PPC64 (OP_LXOR_IMM)
3494 if (!(ins->inst_imm & 0xffff0000)) {
3495 ppc_xori (code, ins->sreg1, ins->dreg, ins->inst_imm);
3496 } else if (!(ins->inst_imm & 0xffff)) {
3497 ppc_xoris (code, ins->sreg1, ins->dreg, ((guint32)(ins->inst_imm) >> 16));
3499 g_assert_not_reached ();
3503 CASE_PPC64 (OP_LSHL)
3504 ppc_shift_left (code, ins->dreg, ins->sreg1, ins->sreg2);
3508 CASE_PPC64 (OP_LSHL_IMM)
3509 ppc_shift_left_imm (code, ins->dreg, ins->sreg1, MASK_SHIFT_IMM (ins->inst_imm));
3512 ppc_sraw (code, ins->dreg, ins->sreg1, ins->sreg2);
3515 ppc_shift_right_arith_imm (code, ins->dreg, ins->sreg1, MASK_SHIFT_IMM (ins->inst_imm));
3518 if (MASK_SHIFT_IMM (ins->inst_imm))
3519 ppc_shift_right_imm (code, ins->dreg, ins->sreg1, MASK_SHIFT_IMM (ins->inst_imm));
3521 ppc_mr (code, ins->dreg, ins->sreg1);
3524 ppc_srw (code, ins->dreg, ins->sreg1, ins->sreg2);
3527 CASE_PPC64 (OP_LNOT)
3528 ppc_not (code, ins->dreg, ins->sreg1);
3531 CASE_PPC64 (OP_LNEG)
3532 ppc_neg (code, ins->dreg, ins->sreg1);
3535 CASE_PPC64 (OP_LMUL)
3536 ppc_multiply (code, ins->dreg, ins->sreg1, ins->sreg2);
3540 CASE_PPC64 (OP_LMUL_IMM)
3541 if (ppc_is_imm16 (ins->inst_imm)) {
3542 ppc_mulli (code, ins->dreg, ins->sreg1, ins->inst_imm);
3544 g_assert_not_reached ();
3548 CASE_PPC64 (OP_LMUL_OVF)
3549 /* we annot use mcrxr, since it's not implemented on some processors
3550 * XER format: SO, OV, CA, reserved [21 bits], count [8 bits]
3552 if (ins->opcode == OP_IMUL_OVF)
3553 ppc_mullwo (code, ins->dreg, ins->sreg1, ins->sreg2);
3554 #ifdef __mono_ppc64__
3556 ppc_mulldo (code, ins->dreg, ins->sreg1, ins->sreg2);
3558 ppc_mfspr (code, ppc_r0, ppc_xer);
3559 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3560 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3562 case OP_IMUL_OVF_UN:
3563 CASE_PPC64 (OP_LMUL_OVF_UN)
3564 /* we first multiply to get the high word and compare to 0
3565 * to set the flags, then the result is discarded and then
3566 * we multiply to get the lower * bits result
3568 if (ins->opcode == OP_IMUL_OVF_UN)
3569 ppc_mulhwu (code, ppc_r0, ins->sreg1, ins->sreg2);
3570 #ifdef __mono_ppc64__
3572 ppc_mulhdu (code, ppc_r0, ins->sreg1, ins->sreg2);
3574 ppc_cmpi (code, 0, 0, ppc_r0, 0);
3575 EMIT_COND_SYSTEM_EXCEPTION (CEE_BNE_UN - CEE_BEQ, "OverflowException");
3576 ppc_multiply (code, ins->dreg, ins->sreg1, ins->sreg2);
3579 CASE_PPC64 (OP_I8CONST)
3580 ppc_load (code, ins->dreg, ins->inst_c0);
3583 mono_add_patch_info (cfg, offset, (MonoJumpInfoType)ins->inst_i1, ins->inst_p0);
3584 ppc_load_sequence (code, ins->dreg, 0);
3586 CASE_PPC32 (OP_ICONV_TO_I4)
3587 CASE_PPC32 (OP_ICONV_TO_U4)
3589 ppc_mr (code, ins->dreg, ins->sreg1);
3592 int saved = ins->sreg1;
3593 if (ins->sreg1 == ppc_r3) {
3594 ppc_mr (code, ppc_r0, ins->sreg1);
3597 if (ins->sreg2 != ppc_r3)
3598 ppc_mr (code, ppc_r3, ins->sreg2);
3599 if (saved != ppc_r4)
3600 ppc_mr (code, ppc_r4, saved);
3604 ppc_fmr (code, ins->dreg, ins->sreg1);
3606 case OP_FCONV_TO_R4:
3607 ppc_frsp (code, ins->dreg, ins->sreg1);
3613 * Keep in sync with mono_arch_emit_epilog
3615 g_assert (!cfg->method->save_lmf);
3617 * Note: we can use ppc_r11 here because it is dead anyway:
3618 * we're leaving the method.
3620 if (1 || cfg->flags & MONO_CFG_HAS_CALLS) {
3621 long ret_offset = cfg->stack_usage + PPC_RET_ADDR_OFFSET;
3622 if (ppc_is_imm16 (ret_offset)) {
3623 ppc_load_reg (code, ppc_r0, ret_offset, cfg->frame_reg);
3625 ppc_load (code, ppc_r11, ret_offset);
3626 ppc_load_reg_indexed (code, ppc_r0, cfg->frame_reg, ppc_r11);
3628 ppc_mtlr (code, ppc_r0);
3631 code = emit_load_volatile_arguments (cfg, code);
3633 if (ppc_is_imm16 (cfg->stack_usage)) {
3634 ppc_addi (code, ppc_r11, cfg->frame_reg, cfg->stack_usage);
3636 ppc_load (code, ppc_r11, cfg->stack_usage);
3637 ppc_add (code, ppc_r11, cfg->frame_reg, ppc_r11);
3639 if (!cfg->method->save_lmf) {
3640 /*for (i = 31; i >= 14; --i) {
3641 if (cfg->used_float_regs & (1 << i)) {
3642 pos += sizeof (double);
3643 ppc_lfd (code, i, -pos, cfg->frame_reg);
3647 for (i = 31; i >= 13; --i) {
3648 if (cfg->used_int_regs & (1 << i)) {
3649 pos += sizeof (gpointer);
3650 ppc_load_reg (code, i, -pos, ppc_r11);
3654 /* FIXME restore from MonoLMF: though this can't happen yet */
3656 ppc_mr (code, ppc_sp, ppc_r11);
3657 mono_add_patch_info (cfg, (guint8*) code - cfg->native_code, MONO_PATCH_INFO_METHOD_JUMP, ins->inst_p0);
3662 /* ensure ins->sreg1 is not NULL */
3663 ppc_load_reg (code, ppc_r0, 0, ins->sreg1);
3666 long cookie_offset = cfg->sig_cookie + cfg->stack_usage;
3667 if (ppc_is_imm16 (cookie_offset)) {
3668 ppc_addi (code, ppc_r0, cfg->frame_reg, cookie_offset);
3670 ppc_load (code, ppc_r0, cookie_offset);
3671 ppc_add (code, ppc_r0, cfg->frame_reg, ppc_r0);
3673 ppc_store_reg (code, ppc_r0, 0, ins->sreg1);
3682 call = (MonoCallInst*)ins;
3683 if (ins->flags & MONO_INST_HAS_METHOD)
3684 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_METHOD, call->method);
3686 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_ABS, call->fptr);
3687 if (FORCE_INDIR_CALL || cfg->method->dynamic) {
3688 ppc_load_func (code, ppc_r0, 0);
3689 ppc_mtlr (code, ppc_r0);
3694 /* FIXME: this should be handled somewhere else in the new jit */
3695 code = emit_move_return_value (cfg, ins, code);
3701 case OP_VOIDCALL_REG:
3703 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
3704 ppc_load_reg (code, ppc_r0, 0, ins->sreg1);
3705 /* FIXME: if we know that this is a method, we
3706 can omit this load */
3707 ppc_load_reg (code, ppc_r2, 8, ins->sreg1);
3708 ppc_mtlr (code, ppc_r0);
3710 ppc_mtlr (code, ins->sreg1);
3713 /* FIXME: this should be handled somewhere else in the new jit */
3714 code = emit_move_return_value (cfg, ins, code);
3716 case OP_FCALL_MEMBASE:
3717 case OP_LCALL_MEMBASE:
3718 case OP_VCALL_MEMBASE:
3719 case OP_VCALL2_MEMBASE:
3720 case OP_VOIDCALL_MEMBASE:
3721 case OP_CALL_MEMBASE:
3722 ppc_load_reg (code, ppc_r0, ins->inst_offset, ins->sreg1);
3723 ppc_mtlr (code, ppc_r0);
3725 /* FIXME: this should be handled somewhere else in the new jit */
3726 code = emit_move_return_value (cfg, ins, code);
3729 guint8 * zero_loop_jump, * zero_loop_start;
3730 /* keep alignment */
3731 int alloca_waste = PPC_STACK_PARAM_OFFSET + cfg->param_area + 31;
3732 int area_offset = alloca_waste;
3734 ppc_addi (code, ppc_r11, ins->sreg1, alloca_waste + 31);
3735 /* FIXME: should be calculated from MONO_ARCH_FRAME_ALIGNMENT */
3736 ppc_clear_right_imm (code, ppc_r11, ppc_r11, 4);
3737 /* use ctr to store the number of words to 0 if needed */
3738 if (ins->flags & MONO_INST_INIT) {
3739 /* we zero 4 bytes at a time:
3740 * we add 7 instead of 3 so that we set the counter to
3741 * at least 1, otherwise the bdnz instruction will make
3742 * it negative and iterate billions of times.
3744 ppc_addi (code, ppc_r0, ins->sreg1, 7);
3745 ppc_shift_right_arith_imm (code, ppc_r0, ppc_r0, 2);
3746 ppc_mtctr (code, ppc_r0);
3748 ppc_load_reg (code, ppc_r0, 0, ppc_sp);
3749 ppc_neg (code, ppc_r11, ppc_r11);
3750 ppc_store_reg_update_indexed (code, ppc_r0, ppc_sp, ppc_r11);
3752 /* FIXME: make this loop work in 8 byte
3753 increments on PPC64 */
3754 if (ins->flags & MONO_INST_INIT) {
3755 /* adjust the dest reg by -4 so we can use stwu */
3756 /* we actually adjust -8 because we let the loop
3759 ppc_addi (code, ins->dreg, ppc_sp, (area_offset - 8));
3760 ppc_li (code, ppc_r11, 0);
3761 zero_loop_start = code;
3762 ppc_stwu (code, ppc_r11, 4, ins->dreg);
3763 zero_loop_jump = code;
3764 ppc_bc (code, PPC_BR_DEC_CTR_NONZERO, 0, 0);
3765 ppc_patch (zero_loop_jump, zero_loop_start);
3767 ppc_addi (code, ins->dreg, ppc_sp, area_offset);
3772 ppc_mr (code, ppc_r3, ins->sreg1);
3773 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD,
3774 (gpointer)"mono_arch_throw_exception");
3775 if (FORCE_INDIR_CALL || cfg->method->dynamic) {
3776 ppc_load_func (code, ppc_r0, 0);
3777 ppc_mtlr (code, ppc_r0);
3786 ppc_mr (code, ppc_r3, ins->sreg1);
3787 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD,
3788 (gpointer)"mono_arch_rethrow_exception");
3789 if (FORCE_INDIR_CALL || cfg->method->dynamic) {
3790 ppc_load_func (code, ppc_r0, 0);
3791 ppc_mtlr (code, ppc_r0);
3798 case OP_START_HANDLER: {
3799 MonoInst *spvar = mono_find_spvar_for_region (cfg, bb->region);
3800 g_assert (spvar->inst_basereg != ppc_sp);
3801 code = emit_reserve_param_area (cfg, code);
3802 ppc_mflr (code, ppc_r0);
3803 if (ppc_is_imm16 (spvar->inst_offset)) {
3804 ppc_store_reg (code, ppc_r0, spvar->inst_offset, spvar->inst_basereg);
3806 ppc_load (code, ppc_r11, spvar->inst_offset);
3807 ppc_store_reg_indexed (code, ppc_r0, ppc_r11, spvar->inst_basereg);
3811 case OP_ENDFILTER: {
3812 MonoInst *spvar = mono_find_spvar_for_region (cfg, bb->region);
3813 g_assert (spvar->inst_basereg != ppc_sp);
3814 code = emit_unreserve_param_area (cfg, code);
3815 if (ins->sreg1 != ppc_r3)
3816 ppc_mr (code, ppc_r3, ins->sreg1);
3817 if (ppc_is_imm16 (spvar->inst_offset)) {
3818 ppc_load_reg (code, ppc_r0, spvar->inst_offset, spvar->inst_basereg);
3820 ppc_load (code, ppc_r11, spvar->inst_offset);
3821 ppc_load_reg_indexed (code, ppc_r0, spvar->inst_basereg, ppc_r11);
3823 ppc_mtlr (code, ppc_r0);
3827 case OP_ENDFINALLY: {
3828 MonoInst *spvar = mono_find_spvar_for_region (cfg, bb->region);
3829 g_assert (spvar->inst_basereg != ppc_sp);
3830 code = emit_unreserve_param_area (cfg, code);
3831 ppc_load_reg (code, ppc_r0, spvar->inst_offset, spvar->inst_basereg);
3832 ppc_mtlr (code, ppc_r0);
3836 case OP_CALL_HANDLER:
3837 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_BB, ins->inst_target_bb);
3841 ins->inst_c0 = code - cfg->native_code;
3844 if (ins->flags & MONO_INST_BRLABEL) {
3845 /*if (ins->inst_i0->inst_c0) {
3847 //x86_jump_code (code, cfg->native_code + ins->inst_i0->inst_c0);
3849 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_LABEL, ins->inst_i0);
3853 /*if (ins->inst_target_bb->native_offset) {
3855 //x86_jump_code (code, cfg->native_code + ins->inst_target_bb->native_offset);
3857 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_target_bb);
3863 ppc_mtctr (code, ins->sreg1);
3864 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
3868 CASE_PPC64 (OP_LCEQ)
3869 ppc_li (code, ins->dreg, 0);
3870 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 2);
3871 ppc_li (code, ins->dreg, 1);
3877 CASE_PPC64 (OP_LCLT)
3878 CASE_PPC64 (OP_LCLT_UN)
3879 ppc_li (code, ins->dreg, 1);
3880 ppc_bc (code, PPC_BR_TRUE, PPC_BR_LT, 2);
3881 ppc_li (code, ins->dreg, 0);
3887 CASE_PPC64 (OP_LCGT)
3888 CASE_PPC64 (OP_LCGT_UN)
3889 ppc_li (code, ins->dreg, 1);
3890 ppc_bc (code, PPC_BR_TRUE, PPC_BR_GT, 2);
3891 ppc_li (code, ins->dreg, 0);
3893 case OP_COND_EXC_EQ:
3894 case OP_COND_EXC_NE_UN:
3895 case OP_COND_EXC_LT:
3896 case OP_COND_EXC_LT_UN:
3897 case OP_COND_EXC_GT:
3898 case OP_COND_EXC_GT_UN:
3899 case OP_COND_EXC_GE:
3900 case OP_COND_EXC_GE_UN:
3901 case OP_COND_EXC_LE:
3902 case OP_COND_EXC_LE_UN:
3903 EMIT_COND_SYSTEM_EXCEPTION (ins->opcode - OP_COND_EXC_EQ, ins->inst_p1);
3905 case OP_COND_EXC_IEQ:
3906 case OP_COND_EXC_INE_UN:
3907 case OP_COND_EXC_ILT:
3908 case OP_COND_EXC_ILT_UN:
3909 case OP_COND_EXC_IGT:
3910 case OP_COND_EXC_IGT_UN:
3911 case OP_COND_EXC_IGE:
3912 case OP_COND_EXC_IGE_UN:
3913 case OP_COND_EXC_ILE:
3914 case OP_COND_EXC_ILE_UN:
3915 EMIT_COND_SYSTEM_EXCEPTION (ins->opcode - OP_COND_EXC_IEQ, ins->inst_p1);
3927 EMIT_COND_BRANCH (ins, ins->opcode - OP_IBEQ);
3930 /* floating point opcodes */
3933 g_assert_not_reached ();
3934 case OP_STORER8_MEMBASE_REG:
3935 if (ppc_is_imm16 (ins->inst_offset)) {
3936 ppc_stfd (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
3938 ppc_load (code, ppc_r0, ins->inst_offset);
3939 ppc_stfdx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
3942 case OP_LOADR8_MEMBASE:
3943 if (ppc_is_imm16 (ins->inst_offset)) {
3944 ppc_lfd (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3946 ppc_load (code, ppc_r0, ins->inst_offset);
3947 ppc_lfdx (code, ins->dreg, ins->inst_destbasereg, ppc_r0);
3950 case OP_STORER4_MEMBASE_REG:
3951 ppc_frsp (code, ins->sreg1, ins->sreg1);
3952 if (ppc_is_imm16 (ins->inst_offset)) {
3953 ppc_stfs (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
3955 ppc_load (code, ppc_r0, ins->inst_offset);
3956 ppc_stfsx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
3959 case OP_LOADR4_MEMBASE:
3960 if (ppc_is_imm16 (ins->inst_offset)) {
3961 ppc_lfs (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3963 ppc_load (code, ppc_r0, ins->inst_offset);
3964 ppc_lfsx (code, ins->dreg, ins->inst_destbasereg, ppc_r0);
3967 case OP_LOADR4_MEMINDEX:
3968 ppc_lfsx (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3970 case OP_LOADR8_MEMINDEX:
3971 ppc_lfdx (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3973 case OP_STORER4_MEMINDEX:
3974 ppc_frsp (code, ins->sreg1, ins->sreg1);
3975 ppc_stfsx (code, ins->sreg1, ins->sreg2, ins->inst_destbasereg);
3977 case OP_STORER8_MEMINDEX:
3978 ppc_stfdx (code, ins->sreg1, ins->sreg2, ins->inst_destbasereg);
3981 case CEE_CONV_R4: /* FIXME: change precision */
3983 g_assert_not_reached ();
3984 case OP_FCONV_TO_I1:
3985 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 1, TRUE);
3987 case OP_FCONV_TO_U1:
3988 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 1, FALSE);
3990 case OP_FCONV_TO_I2:
3991 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 2, TRUE);
3993 case OP_FCONV_TO_U2:
3994 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 2, FALSE);
3996 case OP_FCONV_TO_I4:
3998 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 4, TRUE);
4000 case OP_FCONV_TO_U4:
4002 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 4, FALSE);
4004 case OP_LCONV_TO_R_UN:
4005 g_assert_not_reached ();
4006 /* Implemented as helper calls */
4008 case OP_LCONV_TO_OVF_I4_2:
4009 case OP_LCONV_TO_OVF_I: {
4010 #ifdef __mono_ppc64__
4013 guint8 *negative_branch, *msword_positive_branch, *msword_negative_branch, *ovf_ex_target;
4014 // Check if its negative
4015 ppc_cmpi (code, 0, 0, ins->sreg1, 0);
4016 negative_branch = code;
4017 ppc_bc (code, PPC_BR_TRUE, PPC_BR_LT, 0);
4018 // Its positive msword == 0
4019 ppc_cmpi (code, 0, 0, ins->sreg2, 0);
4020 msword_positive_branch = code;
4021 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
4023 ovf_ex_target = code;
4024 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_ALWAYS, 0, "OverflowException");
4026 ppc_patch (negative_branch, code);
4027 ppc_cmpi (code, 0, 0, ins->sreg2, -1);
4028 msword_negative_branch = code;
4029 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
4030 ppc_patch (msword_negative_branch, ovf_ex_target);
4032 ppc_patch (msword_positive_branch, code);
4033 if (ins->dreg != ins->sreg1)
4034 ppc_mr (code, ins->dreg, ins->sreg1);
4039 ppc_fsqrtd (code, ins->dreg, ins->sreg1);
4042 ppc_fadd (code, ins->dreg, ins->sreg1, ins->sreg2);
4045 ppc_fsub (code, ins->dreg, ins->sreg1, ins->sreg2);
4048 ppc_fmul (code, ins->dreg, ins->sreg1, ins->sreg2);
4051 ppc_fdiv (code, ins->dreg, ins->sreg1, ins->sreg2);
4054 ppc_fneg (code, ins->dreg, ins->sreg1);
4058 g_assert_not_reached ();
4061 ppc_fcmpu (code, 0, ins->sreg1, ins->sreg2);
4064 ppc_fcmpo (code, 0, ins->sreg1, ins->sreg2);
4065 ppc_li (code, ins->dreg, 0);
4066 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 2);
4067 ppc_li (code, ins->dreg, 1);
4070 ppc_fcmpo (code, 0, ins->sreg1, ins->sreg2);
4071 ppc_li (code, ins->dreg, 1);
4072 ppc_bc (code, PPC_BR_TRUE, PPC_BR_LT, 2);
4073 ppc_li (code, ins->dreg, 0);
4076 ppc_fcmpu (code, 0, ins->sreg1, ins->sreg2);
4077 ppc_li (code, ins->dreg, 1);
4078 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 3);
4079 ppc_bc (code, PPC_BR_TRUE, PPC_BR_LT, 2);
4080 ppc_li (code, ins->dreg, 0);
4083 ppc_fcmpo (code, 0, ins->sreg1, ins->sreg2);
4084 ppc_li (code, ins->dreg, 1);
4085 ppc_bc (code, PPC_BR_TRUE, PPC_BR_GT, 2);
4086 ppc_li (code, ins->dreg, 0);
4089 ppc_fcmpu (code, 0, ins->sreg1, ins->sreg2);
4090 ppc_li (code, ins->dreg, 1);
4091 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 3);
4092 ppc_bc (code, PPC_BR_TRUE, PPC_BR_GT, 2);
4093 ppc_li (code, ins->dreg, 0);
4096 EMIT_COND_BRANCH (ins, CEE_BEQ - CEE_BEQ);
4099 EMIT_COND_BRANCH (ins, CEE_BNE_UN - CEE_BEQ);
4102 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 2);
4103 EMIT_COND_BRANCH (ins, CEE_BLT - CEE_BEQ);
4106 EMIT_COND_BRANCH_FLAGS (ins, PPC_BR_TRUE, PPC_BR_SO);
4107 EMIT_COND_BRANCH (ins, CEE_BLT_UN - CEE_BEQ);
4110 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 2);
4111 EMIT_COND_BRANCH (ins, CEE_BGT - CEE_BEQ);
4114 EMIT_COND_BRANCH_FLAGS (ins, PPC_BR_TRUE, PPC_BR_SO);
4115 EMIT_COND_BRANCH (ins, CEE_BGT_UN - CEE_BEQ);
4118 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 2);
4119 EMIT_COND_BRANCH (ins, CEE_BGE - CEE_BEQ);
4122 EMIT_COND_BRANCH (ins, CEE_BGE_UN - CEE_BEQ);
4125 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 2);
4126 EMIT_COND_BRANCH (ins, CEE_BLE - CEE_BEQ);
4129 EMIT_COND_BRANCH (ins, CEE_BLE_UN - CEE_BEQ);
4132 g_assert_not_reached ();
4133 case OP_CHECK_FINITE: {
4134 ppc_rlwinm (code, ins->sreg1, ins->sreg1, 0, 1, 31);
4135 ppc_addis (code, ins->sreg1, ins->sreg1, -32752);
4136 ppc_rlwinmd (code, ins->sreg1, ins->sreg1, 1, 31, 31);
4137 EMIT_COND_SYSTEM_EXCEPTION (CEE_BEQ - CEE_BEQ, "ArithmeticException");
4140 mono_add_patch_info (cfg, offset, (MonoJumpInfoType)ins->inst_i1, ins->inst_p0);
4141 #ifdef __mono_ppc64__
4142 ppc_load_sequence (code, ins->dreg, (gulong)0x0f0f0f0f0f0f0f0fL);
4144 ppc_load_sequence (code, ins->dreg, (gulong)0x0f0f0f0fL);
4149 #ifdef __mono_ppc64__
4150 case OP_ICONV_TO_I4:
4152 ppc_extsw (code, ins->dreg, ins->sreg1);
4154 case OP_ICONV_TO_U4:
4156 ppc_clrldi (code, ins->dreg, ins->sreg1, 32);
4158 case OP_ICONV_TO_R4:
4159 case OP_ICONV_TO_R8:
4160 case OP_LCONV_TO_R4:
4161 case OP_LCONV_TO_R8: {
4163 if (ins->opcode == OP_ICONV_TO_R4 || ins->opcode == OP_ICONV_TO_R8) {
4164 ppc_extsw (code, ppc_r0, ins->sreg1);
4169 ppc_store_reg (code, tmp, -8, ppc_r1);
4170 ppc_lfd (code, ins->dreg, -8, ppc_r1);
4171 ppc_fcfid (code, ins->dreg, ins->dreg);
4172 if (ins->opcode == OP_ICONV_TO_R4 || ins->opcode == OP_LCONV_TO_R4)
4173 ppc_frsp (code, ins->dreg, ins->dreg);
4177 ppc_srad (code, ins->dreg, ins->sreg1, ins->sreg2);
4180 ppc_srd (code, ins->dreg, ins->sreg1, ins->sreg2);
4183 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
4185 ppc_mfspr (code, ppc_r0, ppc_xer);
4186 ppc_andisd (code, ppc_r0, ppc_r0, (1 << 13)); /* CA */
4187 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, ins->inst_p1);
4189 case OP_COND_EXC_OV:
4190 ppc_mfspr (code, ppc_r0, ppc_xer);
4191 ppc_andisd (code, ppc_r0, ppc_r0, (1 << 14)); /* OV */
4192 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, ins->inst_p1);
4204 EMIT_COND_BRANCH (ins, ins->opcode - OP_LBEQ);
4206 case OP_FCONV_TO_I8:
4207 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 8, TRUE);
4209 case OP_FCONV_TO_U8:
4210 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 8, FALSE);
4212 case OP_STOREI4_MEMBASE_REG:
4213 if (ppc_is_imm16 (ins->inst_offset)) {
4214 ppc_stw (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
4216 ppc_load (code, ppc_r0, ins->inst_offset);
4217 ppc_stwx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
4220 case OP_STOREI4_MEMINDEX:
4221 ppc_stwx (code, ins->sreg1, ins->sreg2, ins->inst_destbasereg);
4224 ppc_srawi (code, ins->dreg, ins->sreg1, (ins->inst_imm & 0x1f));
4226 case OP_ISHR_UN_IMM:
4227 if (ins->inst_imm & 0x1f)
4228 ppc_srwi (code, ins->dreg, ins->sreg1, (ins->inst_imm & 0x1f));
4230 ppc_mr (code, ins->dreg, ins->sreg1);
4232 case OP_ATOMIC_ADD_NEW_I4:
4233 case OP_ATOMIC_ADD_NEW_I8: {
4234 guint8 *loop = code, *branch;
4235 g_assert (ins->inst_offset == 0);
4236 if (ins->opcode == OP_ATOMIC_ADD_NEW_I4)
4237 ppc_lwarx (code, ppc_r0, 0, ins->inst_basereg);
4239 ppc_ldarx (code, ppc_r0, 0, ins->inst_basereg);
4240 ppc_add (code, ppc_r0, ppc_r0, ins->sreg2);
4241 if (ins->opcode == OP_ATOMIC_ADD_NEW_I4)
4242 ppc_stwcxd (code, ppc_r0, 0, ins->inst_basereg);
4244 ppc_stdcxd (code, ppc_r0, 0, ins->inst_basereg);
4246 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
4247 ppc_patch (branch, loop);
4248 ppc_mr (code, ins->dreg, ppc_r0);
4252 case OP_ATOMIC_CAS_I4:
4253 CASE_PPC64 (OP_ATOMIC_CAS_I8) {
4254 int location = ins->sreg1;
4255 int value = ins->sreg2;
4256 int comparand = ins->sreg3;
4257 guint8 *start, *not_equal, *lost_reservation;
4260 if (ins->opcode == OP_ATOMIC_CAS_I4)
4261 ppc_lwarx (code, ppc_r0, 0, location);
4262 #ifdef __mono_ppc64__
4264 ppc_ldarx (code, ppc_r0, 0, location);
4266 ppc_cmp (code, 0, ins->opcode == OP_ATOMIC_CAS_I4 ? 0 : 1, ppc_r0, comparand);
4269 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
4270 if (ins->opcode == OP_ATOMIC_CAS_I4)
4271 ppc_stwcxd (code, value, 0, location);
4272 #ifdef __mono_ppc64__
4274 ppc_stdcxd (code, value, 0, location);
4277 lost_reservation = code;
4278 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
4279 ppc_patch (lost_reservation, start);
4281 ppc_patch (not_equal, code);
4282 ppc_mr (code, ins->dreg, ppc_r0);
4287 g_warning ("unknown opcode %s in %s()\n", mono_inst_name (ins->opcode), __FUNCTION__);
4288 g_assert_not_reached ();
4291 if ((cfg->opt & MONO_OPT_BRANCH) && ((code - cfg->native_code - offset) > max_len)) {
4292 g_warning ("wrong maximal instruction length of instruction %s (expected %d, got %ld)",
4293 mono_inst_name (ins->opcode), max_len, (glong)(code - cfg->native_code - offset));
4294 g_assert_not_reached ();
4300 last_offset = offset;
4303 cfg->code_len = code - cfg->native_code;
4307 mono_arch_register_lowlevel_calls (void)
4311 #ifdef __mono_ppc64__
4312 #define patch_load_sequence(ip,val) do {\
4313 guint16 *__load = (guint16*)(ip); \
4314 __load [1] = (((guint64)(val)) >> 48) & 0xffff; \
4315 __load [3] = (((guint64)(val)) >> 32) & 0xffff; \
4316 __load [7] = (((guint64)(val)) >> 16) & 0xffff; \
4317 __load [9] = ((guint64)(val)) & 0xffff; \
4320 #define patch_load_sequence(ip,val) do {\
4321 guint16 *__lis_ori = (guint16*)(ip); \
4322 __lis_ori [1] = (((gulong)(val)) >> 16) & 0xffff; \
4323 __lis_ori [3] = ((gulong)(val)) & 0xffff; \
4328 mono_arch_patch_code (MonoMethod *method, MonoDomain *domain, guint8 *code, MonoJumpInfo *ji, gboolean run_cctors)
4330 MonoJumpInfo *patch_info;
4332 for (patch_info = ji; patch_info; patch_info = patch_info->next) {
4333 unsigned char *ip = patch_info->ip.i + code;
4334 unsigned char *target;
4335 gboolean is_fd = FALSE;
4337 target = mono_resolve_patch_target (method, domain, code, patch_info, run_cctors);
4339 switch (patch_info->type) {
4340 case MONO_PATCH_INFO_IP:
4341 patch_load_sequence (ip, ip);
4343 case MONO_PATCH_INFO_METHOD_REL:
4344 g_assert_not_reached ();
4345 *((gpointer *)(ip)) = code + patch_info->data.offset;
4347 case MONO_PATCH_INFO_SWITCH: {
4348 gpointer *table = (gpointer *)patch_info->data.table->table;
4351 patch_load_sequence (ip, table);
4353 for (i = 0; i < patch_info->data.table->table_size; i++) {
4354 table [i] = (glong)patch_info->data.table->table [i] + code;
4356 /* we put into the table the absolute address, no need for ppc_patch in this case */
4359 case MONO_PATCH_INFO_METHODCONST:
4360 case MONO_PATCH_INFO_CLASS:
4361 case MONO_PATCH_INFO_IMAGE:
4362 case MONO_PATCH_INFO_FIELD:
4363 case MONO_PATCH_INFO_VTABLE:
4364 case MONO_PATCH_INFO_IID:
4365 case MONO_PATCH_INFO_SFLDA:
4366 case MONO_PATCH_INFO_LDSTR:
4367 case MONO_PATCH_INFO_TYPE_FROM_HANDLE:
4368 case MONO_PATCH_INFO_LDTOKEN:
4369 /* from OP_AOTCONST : lis + ori */
4370 patch_load_sequence (ip, target);
4372 case MONO_PATCH_INFO_R4:
4373 case MONO_PATCH_INFO_R8:
4374 g_assert_not_reached ();
4375 *((gconstpointer *)(ip + 2)) = patch_info->data.target;
4377 case MONO_PATCH_INFO_EXC_NAME:
4378 g_assert_not_reached ();
4379 *((gconstpointer *)(ip + 1)) = patch_info->data.name;
4381 case MONO_PATCH_INFO_NONE:
4382 case MONO_PATCH_INFO_BB_OVF:
4383 case MONO_PATCH_INFO_EXC_OVF:
4384 /* everything is dealt with at epilog output time */
4386 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
4387 case MONO_PATCH_INFO_INTERNAL_METHOD:
4388 case MONO_PATCH_INFO_ABS:
4389 case MONO_PATCH_INFO_CLASS_INIT:
4390 case MONO_PATCH_INFO_RGCTX_FETCH:
4397 ppc_patch_full (ip, target, is_fd);
4402 * Emit code to save the registers in used_int_regs or the registers in the MonoLMF
4403 * structure at positive offset pos from register base_reg. pos is guaranteed to fit into
4404 * the instruction offset immediate for all the registers.
4407 save_registers (guint8* code, int pos, int base_reg, gboolean save_lmf, guint32 used_int_regs)
4411 for (i = 13; i <= 31; i++) {
4412 if (used_int_regs & (1 << i)) {
4413 ppc_store_reg (code, i, pos, base_reg);
4414 pos += sizeof (gulong);
4418 /* pos is the start of the MonoLMF structure */
4419 int offset = pos + G_STRUCT_OFFSET (MonoLMF, iregs);
4420 for (i = 13; i <= 31; i++) {
4421 ppc_store_reg (code, i, offset, base_reg);
4422 offset += sizeof (gulong);
4424 offset = pos + G_STRUCT_OFFSET (MonoLMF, fregs);
4425 for (i = 14; i < 32; i++) {
4426 ppc_stfd (code, i, offset, base_reg);
4427 offset += sizeof (gdouble);
4434 * Stack frame layout:
4436 * ------------------- sp
4437 * MonoLMF structure or saved registers
4438 * -------------------
4440 * -------------------
4442 * -------------------
4443 * optional 8 bytes for tracing
4444 * -------------------
4445 * param area size is cfg->param_area
4446 * -------------------
4447 * linkage area size is PPC_STACK_PARAM_OFFSET
4448 * ------------------- sp
4452 mono_arch_emit_prolog (MonoCompile *cfg)
4454 MonoMethod *method = cfg->method;
4456 MonoMethodSignature *sig;
4458 long alloc_size, pos, max_offset;
4464 int tailcall_struct_index;
4466 if (mono_jit_trace_calls != NULL && mono_trace_eval (method))
4469 sig = mono_method_signature (method);
4470 cfg->code_size = MONO_PPC_32_64_CASE (260, 384) + sig->param_count * 20;
4471 code = cfg->native_code = g_malloc (cfg->code_size);
4473 if (1 || cfg->flags & MONO_CFG_HAS_CALLS) {
4474 ppc_mflr (code, ppc_r0);
4475 ppc_store_reg (code, ppc_r0, PPC_RET_ADDR_OFFSET, ppc_sp);
4478 alloc_size = cfg->stack_offset;
4481 if (!method->save_lmf) {
4482 for (i = 31; i >= 13; --i) {
4483 if (cfg->used_int_regs & (1 << i)) {
4484 pos += sizeof (gulong);
4488 pos += sizeof (MonoLMF);
4492 // align to MONO_ARCH_FRAME_ALIGNMENT bytes
4493 if (alloc_size & (MONO_ARCH_FRAME_ALIGNMENT - 1)) {
4494 alloc_size += MONO_ARCH_FRAME_ALIGNMENT - 1;
4495 alloc_size &= ~(MONO_ARCH_FRAME_ALIGNMENT - 1);
4498 cfg->stack_usage = alloc_size;
4499 g_assert ((alloc_size & (MONO_ARCH_FRAME_ALIGNMENT-1)) == 0);
4501 if (ppc_is_imm16 (-alloc_size)) {
4502 ppc_store_reg_update (code, ppc_sp, -alloc_size, ppc_sp);
4503 code = save_registers (code, alloc_size - pos, ppc_sp, method->save_lmf, cfg->used_int_regs);
4506 ppc_addi (code, ppc_r11, ppc_sp, -pos);
4507 ppc_load (code, ppc_r0, -alloc_size);
4508 ppc_store_reg_update_indexed (code, ppc_sp, ppc_sp, ppc_r0);
4509 code = save_registers (code, 0, ppc_r11, method->save_lmf, cfg->used_int_regs);
4512 if (cfg->frame_reg != ppc_sp)
4513 ppc_mr (code, cfg->frame_reg, ppc_sp);
4515 /* store runtime generic context */
4516 if (cfg->rgctx_var) {
4517 g_assert (cfg->rgctx_var->opcode == OP_REGOFFSET &&
4518 (cfg->rgctx_var->inst_basereg == ppc_r1 || cfg->rgctx_var->inst_basereg == ppc_r31));
4520 ppc_store_reg (code, MONO_ARCH_RGCTX_REG, cfg->rgctx_var->inst_offset, cfg->rgctx_var->inst_basereg);
4523 /* compute max_offset in order to use short forward jumps
4524 * we always do it on ppc because the immediate displacement
4525 * for jumps is too small
4528 for (bb = cfg->bb_entry; bb; bb = bb->next_bb) {
4530 bb->max_offset = max_offset;
4532 if (cfg->prof_options & MONO_PROFILE_COVERAGE)
4535 MONO_BB_FOR_EACH_INS (bb, ins)
4536 max_offset += ins_native_length (cfg, ins);
4539 /* load arguments allocated to register from the stack */
4542 cinfo = calculate_sizes (sig, sig->pinvoke);
4544 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
4545 ArgInfo *ainfo = &cinfo->ret;
4547 inst = cfg->vret_addr;
4550 if (ppc_is_imm16 (inst->inst_offset)) {
4551 ppc_store_reg (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4553 ppc_load (code, ppc_r11, inst->inst_offset);
4554 ppc_store_reg_indexed (code, ainfo->reg, ppc_r11, inst->inst_basereg);
4558 tailcall_struct_index = 0;
4559 for (i = 0; i < sig->param_count + sig->hasthis; ++i) {
4560 ArgInfo *ainfo = cinfo->args + i;
4561 inst = cfg->args [pos];
4563 if (cfg->verbose_level > 2)
4564 g_print ("Saving argument %d (type: %d)\n", i, ainfo->regtype);
4565 if (inst->opcode == OP_REGVAR) {
4566 if (ainfo->regtype == RegTypeGeneral)
4567 ppc_mr (code, inst->dreg, ainfo->reg);
4568 else if (ainfo->regtype == RegTypeFP)
4569 ppc_fmr (code, inst->dreg, ainfo->reg);
4570 else if (ainfo->regtype == RegTypeBase) {
4571 ppc_load_reg (code, ppc_r11, 0, ppc_sp);
4572 ppc_load_reg (code, inst->dreg, ainfo->offset, ppc_r11);
4574 g_assert_not_reached ();
4576 if (cfg->verbose_level > 2)
4577 g_print ("Argument %ld assigned to register %s\n", pos, mono_arch_regname (inst->dreg));
4579 /* the argument should be put on the stack: FIXME handle size != word */
4580 if (ainfo->regtype == RegTypeGeneral) {
4581 switch (ainfo->size) {
4583 if (ppc_is_imm16 (inst->inst_offset)) {
4584 ppc_stb (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4586 ppc_load (code, ppc_r11, inst->inst_offset);
4587 ppc_stbx (code, ainfo->reg, ppc_r11, inst->inst_basereg);
4591 if (ppc_is_imm16 (inst->inst_offset)) {
4592 ppc_sth (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4594 ppc_load (code, ppc_r11, inst->inst_offset);
4595 ppc_sthx (code, ainfo->reg, ppc_r11, inst->inst_basereg);
4598 #ifdef __mono_ppc64__
4600 if (ppc_is_imm16 (inst->inst_offset)) {
4601 ppc_stw (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4603 ppc_load (code, ppc_r11, inst->inst_offset);
4604 ppc_stwx (code, ainfo->reg, ppc_r11, inst->inst_basereg);
4609 if (ppc_is_imm16 (inst->inst_offset + 4)) {
4610 ppc_stw (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4611 ppc_stw (code, ainfo->reg + 1, inst->inst_offset + 4, inst->inst_basereg);
4613 ppc_load (code, ppc_r11, inst->inst_offset);
4614 ppc_add (code, ppc_r11, ppc_r11, inst->inst_basereg);
4615 ppc_stw (code, ainfo->reg, 0, ppc_r11);
4616 ppc_stw (code, ainfo->reg + 1, 4, ppc_r11);
4621 if (ppc_is_imm16 (inst->inst_offset)) {
4622 ppc_store_reg (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4624 ppc_load (code, ppc_r11, inst->inst_offset);
4625 ppc_store_reg_indexed (code, ainfo->reg, ppc_r11, inst->inst_basereg);
4629 } else if (ainfo->regtype == RegTypeBase) {
4630 /* load the previous stack pointer in r11 */
4631 ppc_load_reg (code, ppc_r11, 0, ppc_sp);
4632 ppc_load_reg (code, ppc_r0, ainfo->offset, ppc_r11);
4633 switch (ainfo->size) {
4635 if (ppc_is_imm16 (inst->inst_offset)) {
4636 ppc_stb (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
4638 ppc_load (code, ppc_r11, inst->inst_offset);
4639 ppc_stbx (code, ppc_r0, ppc_r11, inst->inst_basereg);
4643 if (ppc_is_imm16 (inst->inst_offset)) {
4644 ppc_sth (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
4646 ppc_load (code, ppc_r11, inst->inst_offset);
4647 ppc_sthx (code, ppc_r0, ppc_r11, inst->inst_basereg);
4650 #ifdef __mono_ppc64__
4652 if (ppc_is_imm16 (inst->inst_offset)) {
4653 ppc_stw (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
4655 ppc_load (code, ppc_r11, inst->inst_offset);
4656 ppc_stwx (code, ppc_r0, ppc_r11, inst->inst_basereg);
4661 if (ppc_is_imm16 (inst->inst_offset + 4)) {
4662 ppc_stw (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
4663 ppc_lwz (code, ppc_r0, ainfo->offset + 4, ppc_r11);
4664 ppc_stw (code, ppc_r0, inst->inst_offset + 4, inst->inst_basereg);
4667 g_assert_not_reached ();
4672 if (ppc_is_imm16 (inst->inst_offset)) {
4673 ppc_store_reg (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
4675 ppc_load (code, ppc_r11, inst->inst_offset);
4676 ppc_store_reg_indexed (code, ppc_r0, ppc_r11, inst->inst_basereg);
4680 } else if (ainfo->regtype == RegTypeFP) {
4681 g_assert (ppc_is_imm16 (inst->inst_offset));
4682 if (ainfo->size == 8)
4683 ppc_stfd (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4684 else if (ainfo->size == 4)
4685 ppc_stfs (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4687 g_assert_not_reached ();
4688 } else if (ainfo->regtype == RegTypeStructByVal) {
4689 int doffset = inst->inst_offset;
4693 g_assert (ppc_is_imm16 (inst->inst_offset));
4694 g_assert (ppc_is_imm16 (inst->inst_offset + ainfo->size * sizeof (gpointer)));
4695 /* FIXME: what if there is no class? */
4696 if (sig->pinvoke && mono_class_from_mono_type (inst->inst_vtype))
4697 size = mono_class_native_size (mono_class_from_mono_type (inst->inst_vtype), NULL);
4698 for (cur_reg = 0; cur_reg < ainfo->size; ++cur_reg) {
4701 * Darwin handles 1 and 2 byte
4702 * structs specially by
4703 * loading h/b into the arg
4704 * register. Only done for
4708 ppc_sth (code, ainfo->reg + cur_reg, doffset, inst->inst_basereg);
4710 ppc_stb (code, ainfo->reg + cur_reg, doffset, inst->inst_basereg);
4714 #ifdef __mono_ppc64__
4716 g_assert (cur_reg == 0);
4717 ppc_sldi (code, ppc_r0, ainfo->reg,
4718 (sizeof (gpointer) - ainfo->bytes) * 8);
4719 ppc_store_reg (code, ppc_r0, doffset, inst->inst_basereg);
4723 ppc_store_reg (code, ainfo->reg + cur_reg, doffset,
4724 inst->inst_basereg);
4727 soffset += sizeof (gpointer);
4728 doffset += sizeof (gpointer);
4730 if (ainfo->vtsize) {
4731 /* FIXME: we need to do the shifting here, too */
4734 /* load the previous stack pointer in r11 (r0 gets overwritten by the memcpy) */
4735 ppc_load_reg (code, ppc_r11, 0, ppc_sp);
4736 if ((size & MONO_PPC_32_64_CASE (3, 7)) != 0) {
4737 code = emit_memcpy (code, size - soffset,
4738 inst->inst_basereg, doffset,
4739 ppc_r11, ainfo->offset + soffset);
4741 code = emit_memcpy (code, ainfo->vtsize * sizeof (gpointer),
4742 inst->inst_basereg, doffset,
4743 ppc_r11, ainfo->offset + soffset);
4746 } else if (ainfo->regtype == RegTypeStructByAddr) {
4747 /* if it was originally a RegTypeBase */
4748 if (ainfo->offset) {
4749 /* load the previous stack pointer in r11 */
4750 ppc_load_reg (code, ppc_r11, 0, ppc_sp);
4751 ppc_load_reg (code, ppc_r11, ainfo->offset, ppc_r11);
4753 ppc_mr (code, ppc_r11, ainfo->reg);
4756 if (cfg->tailcall_valuetype_addrs) {
4757 MonoInst *addr = cfg->tailcall_valuetype_addrs [tailcall_struct_index];
4759 g_assert (ppc_is_imm16 (addr->inst_offset));
4760 ppc_store_reg (code, ppc_r11, addr->inst_offset, addr->inst_basereg);
4762 tailcall_struct_index++;
4765 g_assert (ppc_is_imm16 (inst->inst_offset));
4766 code = emit_memcpy (code, ainfo->vtsize, inst->inst_basereg, inst->inst_offset, ppc_r11, 0);
4767 /*g_print ("copy in %s: %d bytes from %d to offset: %d\n", method->name, ainfo->vtsize, ainfo->reg, inst->inst_offset);*/
4769 g_assert_not_reached ();
4774 if (method->wrapper_type == MONO_WRAPPER_NATIVE_TO_MANAGED) {
4775 ppc_load (code, ppc_r3, cfg->domain);
4776 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD, (gpointer)"mono_jit_thread_attach");
4777 if (FORCE_INDIR_CALL || cfg->method->dynamic) {
4778 ppc_load_func (code, ppc_r0, 0);
4779 ppc_mtlr (code, ppc_r0);
4786 if (method->save_lmf) {
4787 if (lmf_pthread_key != -1) {
4788 emit_tls_access (code, ppc_r3, lmf_pthread_key);
4789 if (tls_mode != TLS_MODE_NPTL && G_STRUCT_OFFSET (MonoJitTlsData, lmf))
4790 ppc_addi (code, ppc_r3, ppc_r3, G_STRUCT_OFFSET (MonoJitTlsData, lmf));
4792 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD,
4793 (gpointer)"mono_get_lmf_addr");
4794 if (FORCE_INDIR_CALL || cfg->method->dynamic) {
4795 ppc_load_func (code, ppc_r0, 0);
4796 ppc_mtlr (code, ppc_r0);
4802 /* we build the MonoLMF structure on the stack - see mini-ppc.h */
4803 /* lmf_offset is the offset from the previous stack pointer,
4804 * alloc_size is the total stack space allocated, so the offset
4805 * of MonoLMF from the current stack ptr is alloc_size - lmf_offset.
4806 * The pointer to the struct is put in ppc_r11 (new_lmf).
4807 * The callee-saved registers are already in the MonoLMF structure
4809 ppc_addi (code, ppc_r11, ppc_sp, alloc_size - lmf_offset);
4810 /* ppc_r3 is the result from mono_get_lmf_addr () */
4811 ppc_store_reg (code, ppc_r3, G_STRUCT_OFFSET(MonoLMF, lmf_addr), ppc_r11);
4812 /* new_lmf->previous_lmf = *lmf_addr */
4813 ppc_load_reg (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r3);
4814 ppc_store_reg (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r11);
4815 /* *(lmf_addr) = r11 */
4816 ppc_store_reg (code, ppc_r11, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r3);
4817 /* save method info */
4818 ppc_load (code, ppc_r0, method);
4819 ppc_store_reg (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, method), ppc_r11);
4820 ppc_store_reg (code, ppc_sp, G_STRUCT_OFFSET(MonoLMF, ebp), ppc_r11);
4821 /* save the current IP */
4822 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_IP, NULL);
4823 #ifdef __mono_ppc64__
4824 ppc_load_sequence (code, ppc_r0, (gulong)0x0101010101010101L);
4826 ppc_load_sequence (code, ppc_r0, (gulong)0x01010101L);
4828 ppc_store_reg (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, eip), ppc_r11);
4832 code = mono_arch_instrument_prolog (cfg, mono_trace_enter_method, code, TRUE);
4834 cfg->code_len = code - cfg->native_code;
4835 g_assert (cfg->code_len <= cfg->code_size);
4842 mono_arch_emit_epilog (MonoCompile *cfg)
4844 MonoMethod *method = cfg->method;
4846 int max_epilog_size = 16 + 20*4;
4849 if (cfg->method->save_lmf)
4850 max_epilog_size += 128;
4852 if (mono_jit_trace_calls != NULL)
4853 max_epilog_size += 50;
4855 if (cfg->prof_options & MONO_PROFILE_ENTER_LEAVE)
4856 max_epilog_size += 50;
4858 while (cfg->code_len + max_epilog_size > (cfg->code_size - 16)) {
4859 cfg->code_size *= 2;
4860 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
4861 mono_jit_stats.code_reallocs++;
4865 * Keep in sync with OP_JMP
4867 code = cfg->native_code + cfg->code_len;
4869 if (mono_jit_trace_calls != NULL && mono_trace_eval (method)) {
4870 code = mono_arch_instrument_epilog (cfg, mono_trace_leave_method, code, TRUE);
4874 if (method->save_lmf) {
4876 pos += sizeof (MonoLMF);
4878 /* save the frame reg in r8 */
4879 ppc_mr (code, ppc_r8, cfg->frame_reg);
4880 ppc_addi (code, ppc_r11, cfg->frame_reg, cfg->stack_usage - lmf_offset);
4881 /* r5 = previous_lmf */
4882 ppc_load_reg (code, ppc_r5, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r11);
4884 ppc_load_reg (code, ppc_r6, G_STRUCT_OFFSET(MonoLMF, lmf_addr), ppc_r11);
4885 /* *(lmf_addr) = previous_lmf */
4886 ppc_store_reg (code, ppc_r5, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r6);
4887 /* FIXME: speedup: there is no actual need to restore the registers if
4888 * we didn't actually change them (idea from Zoltan).
4891 ppc_load_multiple_regs (code, ppc_r13, G_STRUCT_OFFSET(MonoLMF, iregs), ppc_r11);
4893 /*for (i = 14; i < 32; i++) {
4894 ppc_lfd (code, i, G_STRUCT_OFFSET(MonoLMF, fregs) + ((i-14) * sizeof (gdouble)), ppc_r11);
4896 g_assert (ppc_is_imm16 (cfg->stack_usage + PPC_RET_ADDR_OFFSET));
4897 /* use the saved copy of the frame reg in r8 */
4898 if (1 || cfg->flags & MONO_CFG_HAS_CALLS) {
4899 ppc_load_reg (code, ppc_r0, cfg->stack_usage + PPC_RET_ADDR_OFFSET, ppc_r8);
4900 ppc_mtlr (code, ppc_r0);
4902 ppc_addic (code, ppc_sp, ppc_r8, cfg->stack_usage);
4904 if (1 || cfg->flags & MONO_CFG_HAS_CALLS) {
4905 long return_offset = cfg->stack_usage + PPC_RET_ADDR_OFFSET;
4906 if (ppc_is_imm16 (return_offset)) {
4907 ppc_load_reg (code, ppc_r0, return_offset, cfg->frame_reg);
4909 ppc_load (code, ppc_r11, return_offset);
4910 ppc_load_reg_indexed (code, ppc_r0, cfg->frame_reg, ppc_r11);
4912 ppc_mtlr (code, ppc_r0);
4914 if (ppc_is_imm16 (cfg->stack_usage)) {
4915 int offset = cfg->stack_usage;
4916 for (i = 13; i <= 31; i++) {
4917 if (cfg->used_int_regs & (1 << i))
4918 offset -= sizeof (gulong);
4920 if (cfg->frame_reg != ppc_sp)
4921 ppc_mr (code, ppc_r11, cfg->frame_reg);
4922 /* note r31 (possibly the frame register) is restored last */
4923 for (i = 13; i <= 31; i++) {
4924 if (cfg->used_int_regs & (1 << i)) {
4925 ppc_load_reg (code, i, offset, cfg->frame_reg);
4926 offset += sizeof (gulong);
4929 if (cfg->frame_reg != ppc_sp)
4930 ppc_addi (code, ppc_sp, ppc_r11, cfg->stack_usage);
4932 ppc_addi (code, ppc_sp, ppc_sp, cfg->stack_usage);
4934 ppc_load (code, ppc_r11, cfg->stack_usage);
4935 if (cfg->used_int_regs) {
4936 ppc_add (code, ppc_r11, cfg->frame_reg, ppc_r11);
4937 for (i = 31; i >= 13; --i) {
4938 if (cfg->used_int_regs & (1 << i)) {
4939 pos += sizeof (gulong);
4940 ppc_load_reg (code, i, -pos, ppc_r11);
4943 ppc_mr (code, ppc_sp, ppc_r11);
4945 ppc_add (code, ppc_sp, cfg->frame_reg, ppc_r11);
4952 cfg->code_len = code - cfg->native_code;
4954 g_assert (cfg->code_len < cfg->code_size);
4958 /* remove once throw_exception_by_name is eliminated */
4960 exception_id_by_name (const char *name)
4962 if (strcmp (name, "IndexOutOfRangeException") == 0)
4963 return MONO_EXC_INDEX_OUT_OF_RANGE;
4964 if (strcmp (name, "OverflowException") == 0)
4965 return MONO_EXC_OVERFLOW;
4966 if (strcmp (name, "ArithmeticException") == 0)
4967 return MONO_EXC_ARITHMETIC;
4968 if (strcmp (name, "DivideByZeroException") == 0)
4969 return MONO_EXC_DIVIDE_BY_ZERO;
4970 if (strcmp (name, "InvalidCastException") == 0)
4971 return MONO_EXC_INVALID_CAST;
4972 if (strcmp (name, "NullReferenceException") == 0)
4973 return MONO_EXC_NULL_REF;
4974 if (strcmp (name, "ArrayTypeMismatchException") == 0)
4975 return MONO_EXC_ARRAY_TYPE_MISMATCH;
4976 g_error ("Unknown intrinsic exception %s\n", name);
4981 mono_arch_emit_exceptions (MonoCompile *cfg)
4983 MonoJumpInfo *patch_info;
4986 const guint8* exc_throw_pos [MONO_EXC_INTRINS_NUM] = {NULL};
4987 guint8 exc_throw_found [MONO_EXC_INTRINS_NUM] = {0};
4988 int max_epilog_size = 50;
4990 /* count the number of exception infos */
4993 * make sure we have enough space for exceptions
4994 * 24 is the simulated call to throw_exception_by_name
4996 for (patch_info = cfg->patch_info; patch_info; patch_info = patch_info->next) {
4997 if (patch_info->type == MONO_PATCH_INFO_EXC) {
4998 i = exception_id_by_name (patch_info->data.target);
4999 if (!exc_throw_found [i]) {
5000 max_epilog_size += 24;
5001 exc_throw_found [i] = TRUE;
5003 } else if (patch_info->type == MONO_PATCH_INFO_BB_OVF)
5004 max_epilog_size += 12;
5005 else if (patch_info->type == MONO_PATCH_INFO_EXC_OVF) {
5006 MonoOvfJump *ovfj = (MonoOvfJump*)patch_info->data.target;
5007 i = exception_id_by_name (ovfj->data.exception);
5008 if (!exc_throw_found [i]) {
5009 max_epilog_size += 24;
5010 exc_throw_found [i] = TRUE;
5012 max_epilog_size += 8;
5016 while (cfg->code_len + max_epilog_size > (cfg->code_size - 16)) {
5017 cfg->code_size *= 2;
5018 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
5019 mono_jit_stats.code_reallocs++;
5022 code = cfg->native_code + cfg->code_len;
5024 /* add code to raise exceptions */
5025 for (patch_info = cfg->patch_info; patch_info; patch_info = patch_info->next) {
5026 switch (patch_info->type) {
5027 case MONO_PATCH_INFO_BB_OVF: {
5028 MonoOvfJump *ovfj = (MonoOvfJump*)patch_info->data.target;
5029 unsigned char *ip = patch_info->ip.i + cfg->native_code;
5030 /* patch the initial jump */
5031 ppc_patch (ip, code);
5032 ppc_bc (code, ovfj->b0_cond, ovfj->b1_cond, 2);
5034 ppc_patch (code - 4, ip + 4); /* jump back after the initiali branch */
5035 /* jump back to the true target */
5037 ip = ovfj->data.bb->native_offset + cfg->native_code;
5038 ppc_patch (code - 4, ip);
5041 case MONO_PATCH_INFO_EXC_OVF: {
5042 MonoOvfJump *ovfj = (MonoOvfJump*)patch_info->data.target;
5043 MonoJumpInfo *newji;
5044 unsigned char *ip = patch_info->ip.i + cfg->native_code;
5045 unsigned char *bcl = code;
5046 /* patch the initial jump: we arrived here with a call */
5047 ppc_patch (ip, code);
5048 ppc_bc (code, ovfj->b0_cond, ovfj->b1_cond, 0);
5050 ppc_patch (code - 4, ip + 4); /* jump back after the initiali branch */
5051 /* patch the conditional jump to the right handler */
5052 /* make it processed next */
5053 newji = mono_mempool_alloc (cfg->mempool, sizeof (MonoJumpInfo));
5054 newji->type = MONO_PATCH_INFO_EXC;
5055 newji->ip.i = bcl - cfg->native_code;
5056 newji->data.target = ovfj->data.exception;
5057 newji->next = patch_info->next;
5058 patch_info->next = newji;
5061 case MONO_PATCH_INFO_EXC: {
5062 MonoClass *exc_class;
5064 unsigned char *ip = patch_info->ip.i + cfg->native_code;
5065 i = exception_id_by_name (patch_info->data.target);
5066 if (exc_throw_pos [i]) {
5067 ppc_patch (ip, exc_throw_pos [i]);
5068 patch_info->type = MONO_PATCH_INFO_NONE;
5071 exc_throw_pos [i] = code;
5074 exc_class = mono_class_from_name (mono_defaults.corlib, "System", patch_info->data.name);
5075 g_assert (exc_class);
5077 ppc_patch (ip, code);
5078 /*mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_EXC_NAME, patch_info->data.target);*/
5079 ppc_load (code, ppc_r3, exc_class->type_token);
5080 /* we got here from a conditional call, so the calling ip is set in lr */
5081 ppc_mflr (code, ppc_r4);
5082 patch_info->type = MONO_PATCH_INFO_INTERNAL_METHOD;
5083 patch_info->data.name = "mono_arch_throw_corlib_exception";
5084 patch_info->ip.i = code - cfg->native_code;
5085 if (FORCE_INDIR_CALL || cfg->method->dynamic) {
5086 ppc_load_func (code, ppc_r0, 0);
5087 ppc_mtctr (code, ppc_r0);
5088 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
5100 cfg->code_len = code - cfg->native_code;
5102 g_assert (cfg->code_len < cfg->code_size);
5108 try_offset_access (void *value, guint32 idx)
5110 register void* me __asm__ ("r2");
5111 void ***p = (void***)((char*)me + 284);
5112 int idx1 = idx / 32;
5113 int idx2 = idx % 32;
5116 if (value != p[idx1][idx2])
5123 setup_tls_access (void)
5127 #if defined(__linux__) && defined(_CS_GNU_LIBPTHREAD_VERSION)
5128 size_t conf_size = 0;
5131 /* FIXME for darwin */
5132 guint32 *ins, *code;
5133 guint32 cmplwi_1023, li_0x48, blr_ins;
5136 if (tls_mode == TLS_MODE_FAILED)
5138 if (g_getenv ("MONO_NO_TLS")) {
5139 tls_mode = TLS_MODE_FAILED;
5143 if (tls_mode == TLS_MODE_DETECT) {
5144 #if defined(__linux__) && defined(_CS_GNU_LIBPTHREAD_VERSION)
5145 conf_size = confstr ( _CS_GNU_LIBPTHREAD_VERSION, confbuf, sizeof(confbuf));
5146 if ((conf_size > 4) && (strncmp (confbuf, "NPTL", 4) == 0))
5147 tls_mode = TLS_MODE_NPTL;
5149 tls_mode = TLS_MODE_LTHREADS;
5151 ins = (guint32*)pthread_getspecific;
5152 /* uncond branch to the real method */
5153 if ((*ins >> 26) == 18) {
5155 val = (*ins & ~3) << 6;
5159 ins = (guint32*)(long)val;
5161 ins = (guint32*) ((char*)ins + val);
5164 code = &cmplwi_1023;
5165 ppc_cmpli (code, 0, 0, ppc_r3, 1023);
5167 ppc_li (code, ppc_r4, 0x48);
5170 if (*ins == cmplwi_1023) {
5171 int found_lwz_284 = 0;
5172 for (ptk = 0; ptk < 20; ++ptk) {
5174 if (!*ins || *ins == blr_ins)
5176 if ((guint16)*ins == 284 && (*ins >> 26) == 32) {
5181 if (!found_lwz_284) {
5182 tls_mode = TLS_MODE_FAILED;
5185 tls_mode = TLS_MODE_LTHREADS;
5186 } else if (*ins == li_0x48) {
5188 /* uncond branch to the real method */
5189 if ((*ins >> 26) == 18) {
5191 val = (*ins & ~3) << 6;
5195 ins = (guint32*)(long)val;
5197 ins = (guint32*) ((char*)ins + val);
5199 code = (guint32*)&val;
5200 ppc_li (code, ppc_r0, 0x7FF2);
5201 if (ins [1] == val) {
5202 /* Darwin on G4, implement */
5203 tls_mode = TLS_MODE_FAILED;
5206 code = (guint32*)&val;
5207 ppc_mfspr (code, ppc_r3, 104);
5208 if (ins [1] != val) {
5209 tls_mode = TLS_MODE_FAILED;
5212 tls_mode = TLS_MODE_DARWIN_G5;
5215 tls_mode = TLS_MODE_FAILED;
5219 tls_mode = TLS_MODE_FAILED;
5224 if ((monodomain_key == -1) && (tls_mode == TLS_MODE_NPTL)) {
5225 monodomain_key = mono_domain_get_tls_offset();
5227 /* if not TLS_MODE_NPTL or local dynamic (as indicated by
5228 mono_domain_get_tls_offset returning -1) then use keyed access. */
5229 if (monodomain_key == -1) {
5230 ptk = mono_domain_get_tls_key ();
5232 ptk = mono_pthread_key_for_tls (ptk);
5234 monodomain_key = ptk;
5239 if ((lmf_pthread_key == -1) && (tls_mode == TLS_MODE_NPTL)) {
5240 lmf_pthread_key = mono_get_lmf_addr_tls_offset();
5242 /* if not TLS_MODE_NPTL or local dynamic (as indicated by
5243 mono_get_lmf_addr_tls_offset returning -1) then use keyed access. */
5244 if (lmf_pthread_key == -1) {
5245 ptk = mono_pthread_key_for_tls (mono_jit_tls_id);
5247 /*g_print ("MonoLMF at: %d\n", ptk);*/
5248 /*if (!try_offset_access (mono_get_lmf_addr (), ptk)) {
5249 init_tls_failed = 1;
5252 lmf_pthread_key = ptk;
5256 if ((monothread_key == -1) && (tls_mode == TLS_MODE_NPTL)) {
5257 monothread_key = mono_thread_get_tls_offset();
5259 /* if not TLS_MODE_NPTL or local dynamic (as indicated by
5260 mono_get_lmf_addr_tls_offset returning -1) then use keyed access. */
5261 if (monothread_key == -1) {
5262 ptk = mono_thread_get_tls_key ();
5264 ptk = mono_pthread_key_for_tls (ptk);
5266 monothread_key = ptk;
5267 /*g_print ("thread inited: %d\n", ptk);*/
5270 /*g_print ("thread not inited yet %d\n", ptk);*/
5276 mono_arch_setup_jit_tls_data (MonoJitTlsData *tls)
5278 setup_tls_access ();
5282 mono_arch_free_jit_tls_data (MonoJitTlsData *tls)
5286 #ifdef MONO_ARCH_HAVE_IMT
5288 #define CMP_SIZE (PPC_LOAD_SEQUENCE_LENGTH + 4)
5290 #define LOADSTORE_SIZE 4
5291 #define JUMP_IMM_SIZE 12
5292 #define JUMP_IMM32_SIZE (PPC_LOAD_SEQUENCE_LENGTH + 8)
5293 #define ENABLE_WRONG_METHOD_CHECK 0
5296 * LOCKING: called with the domain lock held
5299 mono_arch_build_imt_thunk (MonoVTable *vtable, MonoDomain *domain, MonoIMTCheckItem **imt_entries, int count,
5300 gpointer fail_tramp)
5304 guint8 *code, *start;
5306 for (i = 0; i < count; ++i) {
5307 MonoIMTCheckItem *item = imt_entries [i];
5308 if (item->is_equals) {
5309 if (item->check_target_idx) {
5310 if (!item->compare_done)
5311 item->chunk_size += CMP_SIZE;
5312 if (item->has_target_code)
5313 item->chunk_size += BR_SIZE + JUMP_IMM32_SIZE;
5315 item->chunk_size += LOADSTORE_SIZE + BR_SIZE + JUMP_IMM_SIZE;
5318 item->chunk_size += CMP_SIZE + BR_SIZE + JUMP_IMM32_SIZE * 2;
5319 if (!item->has_target_code)
5320 item->chunk_size += LOADSTORE_SIZE;
5322 item->chunk_size += LOADSTORE_SIZE + JUMP_IMM_SIZE;
5323 #if ENABLE_WRONG_METHOD_CHECK
5324 item->chunk_size += CMP_SIZE + BR_SIZE + 4;
5329 item->chunk_size += CMP_SIZE + BR_SIZE;
5330 imt_entries [item->check_target_idx]->compare_done = TRUE;
5332 size += item->chunk_size;
5335 code = mono_method_alloc_generic_virtual_thunk (domain, size);
5337 /* the initial load of the vtable address */
5338 size += PPC_LOAD_SEQUENCE_LENGTH + LOADSTORE_SIZE;
5339 code = mono_domain_code_reserve (domain, size);
5344 * We need to save and restore r11 because it might be
5345 * used by the caller as the vtable register, so
5346 * clobbering it will trip up the magic trampoline.
5348 * FIXME: Get rid of this by making sure that r11 is
5349 * not used as the vtable register in interface calls.
5351 ppc_store_reg (code, ppc_r11, PPC_RET_ADDR_OFFSET, ppc_sp);
5352 ppc_load (code, ppc_r11, (gulong)(& (vtable->vtable [0])));
5354 for (i = 0; i < count; ++i) {
5355 MonoIMTCheckItem *item = imt_entries [i];
5356 item->code_target = code;
5357 if (item->is_equals) {
5358 if (item->check_target_idx) {
5359 if (!item->compare_done) {
5360 ppc_load (code, ppc_r0, (gulong)item->key);
5361 ppc_compare_log (code, 0, MONO_ARCH_IMT_REG, ppc_r0);
5363 item->jmp_code = code;
5364 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
5365 if (item->has_target_code) {
5366 ppc_load (code, ppc_r0, item->value.target_code);
5368 ppc_load_reg (code, ppc_r0, (sizeof (gpointer) * item->value.vtable_slot), ppc_r11);
5369 ppc_load_reg (code, ppc_r11, PPC_RET_ADDR_OFFSET, ppc_sp);
5371 ppc_mtctr (code, ppc_r0);
5372 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
5375 ppc_load (code, ppc_r0, (gulong)item->key);
5376 ppc_compare_log (code, 0, MONO_ARCH_IMT_REG, ppc_r0);
5377 item->jmp_code = code;
5378 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
5379 if (item->has_target_code) {
5380 ppc_load (code, ppc_r0, item->value.target_code);
5383 ppc_load (code, ppc_r0, & (vtable->vtable [item->value.vtable_slot]));
5384 ppc_load_reg_indexed (code, ppc_r0, 0, ppc_r0);
5386 ppc_mtctr (code, ppc_r0);
5387 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
5388 ppc_patch (item->jmp_code, code);
5389 ppc_load (code, ppc_r0, fail_tramp);
5390 ppc_mtctr (code, ppc_r0);
5391 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
5392 item->jmp_code = NULL;
5394 /* enable the commented code to assert on wrong method */
5395 #if ENABLE_WRONG_METHOD_CHECK
5396 ppc_load (code, ppc_r0, (guint32)item->key);
5397 ppc_compare_log (code, 0, MONO_ARCH_IMT_REG, ppc_r0);
5398 item->jmp_code = code;
5399 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
5401 ppc_load_reg (code, ppc_r0, (sizeof (gpointer) * item->value.vtable_slot), ppc_r11);
5402 ppc_load_reg (code, ppc_r11, PPC_RET_ADDR_OFFSET, ppc_sp);
5403 ppc_mtctr (code, ppc_r0);
5404 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
5405 #if ENABLE_WRONG_METHOD_CHECK
5406 ppc_patch (item->jmp_code, code);
5408 item->jmp_code = NULL;
5413 ppc_load (code, ppc_r0, (gulong)item->key);
5414 ppc_compare_log (code, 0, MONO_ARCH_IMT_REG, ppc_r0);
5415 item->jmp_code = code;
5416 ppc_bc (code, PPC_BR_FALSE, PPC_BR_LT, 0);
5419 /* patch the branches to get to the target items */
5420 for (i = 0; i < count; ++i) {
5421 MonoIMTCheckItem *item = imt_entries [i];
5422 if (item->jmp_code) {
5423 if (item->check_target_idx) {
5424 ppc_patch (item->jmp_code, imt_entries [item->check_target_idx]->code_target);
5430 mono_stats.imt_thunks_size += code - start;
5431 g_assert (code - start <= size);
5432 mono_arch_flush_icache (start, size);
5437 mono_arch_find_imt_method (gpointer *regs, guint8 *code)
5439 return (MonoMethod*) regs [MONO_ARCH_IMT_REG];
5443 mono_arch_find_this_argument (gpointer *regs, MonoMethod *method, MonoGenericSharingContext *gsctx)
5445 return mono_arch_get_this_arg_from_call (gsctx, mono_method_signature (method), (gssize*)regs, NULL);
5450 mono_arch_find_static_call_vtable (gpointer *regs, guint8 *code)
5452 return (MonoVTable*) regs [MONO_ARCH_RGCTX_REG];
5456 mono_arch_emit_inst_for_method (MonoCompile *cfg, MonoMethod *cmethod, MonoMethodSignature *fsig, MonoInst **args)
5463 mono_arch_print_tree (MonoInst *tree, int arity)
5468 MonoInst* mono_arch_get_domain_intrinsic (MonoCompile* cfg)
5472 setup_tls_access ();
5473 if (monodomain_key == -1)
5476 MONO_INST_NEW (cfg, ins, OP_TLS_GET);
5477 ins->inst_offset = monodomain_key;
5482 mono_arch_get_thread_intrinsic (MonoCompile* cfg)
5486 setup_tls_access ();
5487 if (monothread_key == -1)
5490 MONO_INST_NEW (cfg, ins, OP_TLS_GET);
5491 ins->inst_offset = monothread_key;
5496 mono_arch_context_get_int_reg (MonoContext *ctx, int reg)
5499 return MONO_CONTEXT_GET_SP (ctx);
5501 g_assert (reg >= ppc_r13);
5503 return (gpointer)ctx->regs [reg - ppc_r13];