2 * mini-ppc.c: PowerPC backend for the Mono code generator
5 * Paolo Molaro (lupus@ximian.com)
6 * Dietmar Maurer (dietmar@ximian.com)
7 * Andreas Faerber <andreas.faerber@web.de>
9 * (C) 2003 Ximian, Inc.
10 * (C) 2007-2008 Andreas Faerber
15 #include <mono/metadata/appdomain.h>
16 #include <mono/metadata/debug-helpers.h>
20 #include "cpu-ppc64.h"
27 #include <sys/sysctl.h>
33 #define FORCE_INDIR_CALL 1
44 /* This mutex protects architecture specific caches */
45 #define mono_mini_arch_lock() EnterCriticalSection (&mini_arch_mutex)
46 #define mono_mini_arch_unlock() LeaveCriticalSection (&mini_arch_mutex)
47 static CRITICAL_SECTION mini_arch_mutex;
49 int mono_exc_esp_offset = 0;
50 static int tls_mode = TLS_MODE_DETECT;
51 static int lmf_pthread_key = -1;
52 static int monothread_key = -1;
53 static int monodomain_key = -1;
56 offsets_from_pthread_key (guint32 key, int *offset2)
60 *offset2 = idx2 * sizeof (gpointer);
61 return 284 + idx1 * sizeof (gpointer);
64 #define emit_linuxthreads_tls(code,dreg,key) do {\
66 off1 = offsets_from_pthread_key ((key), &off2); \
67 ppc_load_reg ((code), (dreg), off1, ppc_r2); \
68 ppc_load_reg ((code), (dreg), off2, (dreg)); \
71 #define emit_darwing5_tls(code,dreg,key) do {\
72 int off1 = 0x48 + key * sizeof (gpointer); \
73 ppc_mfspr ((code), (dreg), 104); \
74 ppc_load_reg ((code), (dreg), off1, (dreg)); \
77 /* FIXME: ensure the sc call preserves all but r3 */
78 #define emit_darwing4_tls(code,dreg,key) do {\
79 int off1 = 0x48 + key * sizeof (gpointer); \
80 if ((dreg) != ppc_r3) ppc_mr ((code), ppc_r11, ppc_r3); \
81 ppc_li ((code), ppc_r0, 0x7FF2); \
83 ppc_lwz ((code), (dreg), off1, ppc_r3); \
84 if ((dreg) != ppc_r3) ppc_mr ((code), ppc_r3, ppc_r11); \
87 #ifdef PPC_THREAD_PTR_REG
88 #define emit_nptl_tls(code,dreg,key) do { \
90 int off2 = key >> 15; \
91 if ((off2 == 0) || (off2 == -1)) { \
92 ppc_load_reg ((code), (dreg), off1, PPC_THREAD_PTR_REG); \
94 int off3 = (off2 + 1) > 1; \
95 ppc_addis ((code), ppc_r11, PPC_THREAD_PTR_REG, off3); \
96 ppc_load_reg ((code), (dreg), off1, ppc_r11); \
100 #define emit_nptl_tls(code,dreg,key) do { \
101 g_assert_not_reached (); \
105 #define emit_tls_access(code,dreg,key) do { \
106 switch (tls_mode) { \
107 case TLS_MODE_LTHREADS: emit_linuxthreads_tls(code,dreg,key); break; \
108 case TLS_MODE_NPTL: emit_nptl_tls(code,dreg,key); break; \
109 case TLS_MODE_DARWIN_G5: emit_darwing5_tls(code,dreg,key); break; \
110 case TLS_MODE_DARWIN_G4: emit_darwing4_tls(code,dreg,key); break; \
111 default: g_assert_not_reached (); \
115 #define MONO_EMIT_NEW_LOAD_R8(cfg,dr,addr) do { \
117 MONO_INST_NEW ((cfg), (inst), OP_R8CONST); \
118 inst->type = STACK_R8; \
120 inst->inst_p0 = (void*)(addr); \
121 mono_bblock_add_inst (cfg->cbb, inst); \
125 mono_arch_regname (int reg) {
126 static const char rnames[][4] = {
127 "r0", "sp", "r2", "r3", "r4",
128 "r5", "r6", "r7", "r8", "r9",
129 "r10", "r11", "r12", "r13", "r14",
130 "r15", "r16", "r17", "r18", "r19",
131 "r20", "r21", "r22", "r23", "r24",
132 "r25", "r26", "r27", "r28", "r29",
135 if (reg >= 0 && reg < 32)
141 mono_arch_fregname (int reg) {
142 static const char rnames[][4] = {
143 "f0", "f1", "f2", "f3", "f4",
144 "f5", "f6", "f7", "f8", "f9",
145 "f10", "f11", "f12", "f13", "f14",
146 "f15", "f16", "f17", "f18", "f19",
147 "f20", "f21", "f22", "f23", "f24",
148 "f25", "f26", "f27", "f28", "f29",
151 if (reg >= 0 && reg < 32)
156 /* this function overwrites r0, r11, r12 */
158 emit_memcpy (guint8 *code, int size, int dreg, int doffset, int sreg, int soffset)
160 /* unrolled, use the counter in big */
161 if (size > sizeof (gpointer) * 5) {
162 long shifted = size >> MONO_PPC_32_64_CASE (2, 3);
163 guint8 *copy_loop_start, *copy_loop_jump;
165 ppc_load (code, ppc_r0, shifted);
166 ppc_mtctr (code, ppc_r0);
167 g_assert (sreg == ppc_r11);
168 ppc_addi (code, ppc_r12, dreg, (doffset - sizeof (gpointer)));
169 ppc_addi (code, ppc_r11, sreg, (soffset - sizeof (gpointer)));
170 copy_loop_start = code;
171 ppc_load_reg_update (code, ppc_r0, (unsigned int)sizeof (gpointer), ppc_r11);
172 ppc_store_reg_update (code, ppc_r0, (unsigned int)sizeof (gpointer), ppc_r12);
173 copy_loop_jump = code;
174 ppc_bc (code, PPC_BR_DEC_CTR_NONZERO, 0, 0);
175 ppc_patch (copy_loop_jump, copy_loop_start);
176 size -= shifted * sizeof (gpointer);
177 doffset = soffset = 0;
180 #ifdef __mono_ppc64__
182 ppc_load_reg (code, ppc_r0, soffset, sreg);
183 ppc_store_reg (code, ppc_r0, doffset, dreg);
190 ppc_lwz (code, ppc_r0, soffset, sreg);
191 ppc_stw (code, ppc_r0, doffset, dreg);
197 ppc_lhz (code, ppc_r0, soffset, sreg);
198 ppc_sth (code, ppc_r0, doffset, dreg);
204 ppc_lbz (code, ppc_r0, soffset, sreg);
205 ppc_stb (code, ppc_r0, doffset, dreg);
214 * mono_arch_get_argument_info:
215 * @csig: a method signature
216 * @param_count: the number of parameters to consider
217 * @arg_info: an array to store the result infos
219 * Gathers information on parameters such as size, alignment and
220 * padding. arg_info should be large enought to hold param_count + 1 entries.
222 * Returns the size of the activation frame.
225 mono_arch_get_argument_info (MonoMethodSignature *csig, int param_count, MonoJitArgumentInfo *arg_info)
227 #ifdef __mono_ppc64__
231 int k, frame_size = 0;
232 int size, align, pad;
235 if (MONO_TYPE_ISSTRUCT (csig->ret)) {
236 frame_size += sizeof (gpointer);
240 arg_info [0].offset = offset;
243 frame_size += sizeof (gpointer);
247 arg_info [0].size = frame_size;
249 for (k = 0; k < param_count; k++) {
252 size = mono_type_native_stack_size (csig->params [k], (guint32*)&align);
254 size = mini_type_stack_size (NULL, csig->params [k], &align);
256 /* ignore alignment for now */
259 frame_size += pad = (align - (frame_size & (align - 1))) & (align - 1);
260 arg_info [k].pad = pad;
262 arg_info [k + 1].pad = 0;
263 arg_info [k + 1].size = size;
265 arg_info [k + 1].offset = offset;
269 align = MONO_ARCH_FRAME_ALIGNMENT;
270 frame_size += pad = (align - (frame_size & (align - 1))) & (align - 1);
271 arg_info [k].pad = pad;
277 #ifdef __mono_ppc64__
279 is_load_sequence (guint32 *seq)
281 return ppc_opcode (seq [0]) == 15 && /* lis */
282 ppc_opcode (seq [1]) == 24 && /* ori */
283 ppc_opcode (seq [2]) == 30 && /* sldi */
284 ppc_opcode (seq [3]) == 25 && /* oris */
285 ppc_opcode (seq [4]) == 24; /* ori */
288 #define ppc_load_get_dest(l) (((l)>>21) & 0x1f)
289 #define ppc_load_get_off(l) ((gint16)((l) & 0xffff))
292 /* code must point to the blrl */
294 mono_ppc_is_direct_call_sequence (guint32 *code)
296 #ifdef __mono_ppc64__
297 g_assert(*code == 0x4e800021 || *code == 0x4e800020 || *code == 0x4e800420);
299 /* the thunk-less direct call sequence: lis/ori/sldi/oris/ori/mtlr/blrl */
300 if (ppc_opcode (code [-1]) == 31) { /* mtlr */
301 if (ppc_opcode (code [-2]) == 58 && ppc_opcode (code [-3]) == 58) { /* ld/ld */
302 if (!is_load_sequence (&code [-8]))
304 /* one of the loads must be "ld r2,8(rX)" */
305 return (ppc_load_get_dest (code [-2]) == ppc_r2 && ppc_load_get_off (code [-2]) == 8) ||
306 (ppc_load_get_dest (code [-3]) == ppc_r2 && ppc_load_get_off (code [-3]) == 8);
308 if (ppc_opcode (code [-2]) == 24 && ppc_opcode (code [-3]) == 31) /* mr/nop */
309 return is_load_sequence (&code [-8]);
311 return is_load_sequence (&code [-6]);
315 g_assert(*code == 0x4e800021);
317 /* the thunk-less direct call sequence: lis/ori/mtlr/blrl */
318 return ppc_opcode (code [-1]) == 31 &&
319 ppc_opcode (code [-2]) == 24 &&
320 ppc_opcode (code [-3]) == 15;
325 mono_arch_get_vcall_slot (guint8 *code_ptr, gpointer *regs, int *displacement)
329 guint32* code = (guint32*)code_ptr;
333 /* This is the 'blrl' instruction */
336 /* Sanity check: instruction must be 'blrl' */
337 if (*code != 0x4e800021)
340 if (mono_ppc_is_direct_call_sequence (code))
343 /* FIXME: more sanity checks here */
344 /* OK, we're now at the 'blrl' instruction. Now walk backwards
345 till we get to a 'mtlr rA' */
347 if((*code & 0x7c0803a6) == 0x7c0803a6) {
349 /* Here we are: we reached the 'mtlr rA'.
350 Extract the register from the instruction */
351 reg = (*code & 0x03e00000) >> 21;
353 /* ok, this is a lwz reg, offset (vtreg)
354 * it is emitted with:
355 * ppc_emit32 (c, (32 << 26) | ((D) << 21) | ((a) << 16) | (guint16)(d))
357 soff = (*code & 0xffff);
359 reg = (*code >> 16) & 0x1f;
360 g_assert (reg != ppc_r1);
361 /*g_print ("patching reg is %d\n", reg);*/
363 MonoLMF *lmf = (MonoLMF*)((char*)regs + (14 * sizeof (double)) + (13 * sizeof (gpointer)));
364 /* saved in the MonoLMF structure */
365 o = (gpointer)lmf->iregs [reg - 13];
372 *displacement = offset;
376 #define MAX_ARCH_DELEGATE_PARAMS 7
379 mono_arch_get_delegate_invoke_impl (MonoMethodSignature *sig, gboolean has_target)
381 guint8 *code, *start;
383 /* FIXME: Support more cases */
384 if (MONO_TYPE_ISSTRUCT (sig->ret))
388 static guint8* cached = NULL;
389 int size = MONO_PPC_32_64_CASE (16, 20) + PPC_FTNPTR_SIZE;
390 mono_mini_arch_lock ();
392 mono_mini_arch_unlock ();
396 start = code = mono_global_codeman_reserve (size);
397 code = mono_ppc_create_pre_code_ftnptr (code);
399 /* Replace the this argument with the target */
400 ppc_load_reg (code, ppc_r0, G_STRUCT_OFFSET (MonoDelegate, method_ptr), ppc_r3);
401 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
402 /* it's a function descriptor */
403 ppc_ldx (code, ppc_r0, 0, ppc_r0);
405 ppc_mtctr (code, ppc_r0);
406 ppc_load_reg (code, ppc_r3, G_STRUCT_OFFSET (MonoDelegate, target), ppc_r3);
407 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
409 g_assert ((code - start) <= size);
411 mono_arch_flush_icache (start, size);
413 mono_mini_arch_unlock ();
416 static guint8* cache [MAX_ARCH_DELEGATE_PARAMS + 1] = {NULL};
419 if (sig->param_count > MAX_ARCH_DELEGATE_PARAMS)
421 for (i = 0; i < sig->param_count; ++i)
422 if (!mono_is_regsize_var (sig->params [i]))
425 mono_mini_arch_lock ();
426 code = cache [sig->param_count];
428 mono_mini_arch_unlock ();
432 size = MONO_PPC_32_64_CASE (12, 16) + sig->param_count * 4 + PPC_FTNPTR_SIZE;
433 start = code = mono_global_codeman_reserve (size);
434 code = mono_ppc_create_pre_code_ftnptr (code);
436 ppc_load_reg (code, ppc_r0, G_STRUCT_OFFSET (MonoDelegate, method_ptr), ppc_r3);
437 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
438 /* it's a function descriptor */
439 ppc_ldx (code, ppc_r0, 0, ppc_r0);
441 ppc_mtctr (code, ppc_r0);
442 /* slide down the arguments */
443 for (i = 0; i < sig->param_count; ++i) {
444 ppc_mr (code, (ppc_r3 + i), (ppc_r3 + i + 1));
446 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
448 g_assert ((code - start) <= size);
450 mono_arch_flush_icache (start, size);
451 cache [sig->param_count] = start;
452 mono_mini_arch_unlock ();
459 mono_arch_get_this_arg_from_call (MonoGenericSharingContext *gsctx, MonoMethodSignature *sig, gssize *regs, guint8 *code)
461 /* FIXME: handle returning a struct */
462 if (MONO_TYPE_ISSTRUCT (sig->ret))
463 return (gpointer)regs [ppc_r4];
464 return (gpointer)regs [ppc_r3];
468 * Initialize the cpu to execute managed code.
471 mono_arch_cpu_init (void)
476 * Initialize architecture specific code.
479 mono_arch_init (void)
481 InitializeCriticalSection (&mini_arch_mutex);
485 * Cleanup architecture specific code.
488 mono_arch_cleanup (void)
490 DeleteCriticalSection (&mini_arch_mutex);
494 * This function returns the optimizations supported on this cpu.
497 mono_arch_cpu_optimizazions (guint32 *exclude_mask)
501 /* no ppc-specific optimizations yet */
506 #ifdef __mono_ppc64__
507 #define CASE_PPC32(c)
508 #define CASE_PPC64(c) case c:
510 #define CASE_PPC32(c) case c:
511 #define CASE_PPC64(c)
515 is_regsize_var (MonoType *t) {
518 t = mini_type_get_underlying_type (NULL, t);
522 CASE_PPC64 (MONO_TYPE_I8)
523 CASE_PPC64 (MONO_TYPE_U8)
527 case MONO_TYPE_FNPTR:
529 case MONO_TYPE_OBJECT:
530 case MONO_TYPE_STRING:
531 case MONO_TYPE_CLASS:
532 case MONO_TYPE_SZARRAY:
533 case MONO_TYPE_ARRAY:
535 case MONO_TYPE_GENERICINST:
536 if (!mono_type_generic_inst_is_valuetype (t))
539 case MONO_TYPE_VALUETYPE:
546 mono_arch_get_allocatable_int_vars (MonoCompile *cfg)
551 for (i = 0; i < cfg->num_varinfo; i++) {
552 MonoInst *ins = cfg->varinfo [i];
553 MonoMethodVar *vmv = MONO_VARINFO (cfg, i);
556 if (vmv->range.first_use.abs_pos >= vmv->range.last_use.abs_pos)
559 if (ins->flags & (MONO_INST_VOLATILE|MONO_INST_INDIRECT) || (ins->opcode != OP_LOCAL && ins->opcode != OP_ARG))
562 /* we can only allocate 32 bit values */
563 if (is_regsize_var (ins->inst_vtype)) {
564 g_assert (MONO_VARINFO (cfg, i)->reg == -1);
565 g_assert (i == vmv->idx);
566 vars = mono_varlist_insert_sorted (cfg, vars, vmv, FALSE);
574 mono_arch_get_global_int_regs (MonoCompile *cfg)
578 if (cfg->frame_reg != ppc_sp)
580 /* ppc_r13 is used by the system on PPC EABI */
581 for (i = 14; i < top; ++i)
582 regs = g_list_prepend (regs, GUINT_TO_POINTER (i));
588 * mono_arch_regalloc_cost:
590 * Return the cost, in number of memory references, of the action of
591 * allocating the variable VMV into a register during global register
595 mono_arch_regalloc_cost (MonoCompile *cfg, MonoMethodVar *vmv)
607 mono_arch_flush_icache (guint8 *code, gint size)
610 guint8 *endp, *start;
611 static int cachelinesize = 0;
612 static int cachelineinc = 16;
614 if (!cachelinesize) {
619 mib [1] = HW_CACHELINE;
620 len = sizeof (cachelinesize);
621 if (sysctl(mib, 2, &cachelinesize, (size_t*)&len, NULL, 0) == -1) {
625 cachelineinc = cachelinesize;
626 /*g_print ("setting cl size to %d\n", cachelinesize);*/
628 #elif defined(__linux__)
629 /* sadly this will work only with 2.6 kernels... */
630 FILE* f = fopen ("/proc/self/auxv", "rb");
633 while (fread (&vec, sizeof (vec), 1, f) == 1) {
634 if (vec.type == 19) {
635 cachelinesize = vec.value;
643 #elif defined(G_COMPILER_CODEWARRIOR)
647 #warning Need a way to get cache line size
653 start = (guint8*)((gsize)start & ~(cachelinesize - 1));
654 /* use dcbf for smp support, later optimize for UP, see pem._64bit.d20030611.pdf page 211 */
655 #if defined(G_COMPILER_CODEWARRIOR)
657 for (p = start; p < endp; p += cachelineinc) {
661 for (p = start; p < endp; p += cachelineinc) {
667 for (p = start; p < endp; p += cachelineinc) {
679 for (p = start; p < endp; p += cachelineinc) {
680 asm ("dcbf 0,%0;" : : "r"(p) : "memory");
683 for (p = start; p < endp; p += cachelineinc) {
684 asm ("dcbst 0,%0;" : : "r"(p) : "memory");
689 for (p = start; p < endp; p += cachelineinc) {
690 asm ("icbi 0,%0; sync;" : : "r"(p) : "memory");
698 mono_arch_flush_register_windows (void)
703 #define ALWAYS_ON_STACK(s) s
704 #define FP_ALSO_IN_REG(s) s
706 #ifdef __mono_ppc64__
707 #define ALWAYS_ON_STACK(s) s
708 #define FP_ALSO_IN_REG(s) s
710 #define ALWAYS_ON_STACK(s)
711 #define FP_ALSO_IN_REG(s)
713 #define ALIGN_DOUBLES
726 guint32 vtsize; /* in param area */
728 guint8 regtype : 4; /* 0 general, 1 basereg, 2 floating point register, see RegType* */
729 guint8 size : 4; /* 1, 2, 4, 8, or regs used by RegTypeStructByVal */
730 guint8 bytes : 4; /* size in bytes - only valid for
731 RegTypeStructByVal if the struct fits
732 in one word, otherwise it's 0*/
747 add_general (guint *gr, guint *stack_size, ArgInfo *ainfo, gboolean simple)
749 #ifdef __mono_ppc64__
754 if (*gr >= 3 + PPC_NUM_REG_ARGS) {
755 ainfo->offset = PPC_STACK_PARAM_OFFSET + *stack_size;
756 ainfo->reg = ppc_sp; /* in the caller */
757 ainfo->regtype = RegTypeBase;
758 *stack_size += sizeof (gpointer);
760 ALWAYS_ON_STACK (*stack_size += sizeof (gpointer));
764 if (*gr >= 3 + PPC_NUM_REG_ARGS - 1) {
766 //*stack_size += (*stack_size % 8);
768 ainfo->offset = PPC_STACK_PARAM_OFFSET + *stack_size;
769 ainfo->reg = ppc_sp; /* in the caller */
770 ainfo->regtype = RegTypeBase;
777 ALWAYS_ON_STACK (*stack_size += 8);
785 #if defined(__APPLE__) || defined(__mono_ppc64__)
787 has_only_a_r48_field (MonoClass *klass)
791 gboolean have_field = FALSE;
793 while ((f = mono_class_get_fields (klass, &iter))) {
794 if (!(f->type->attrs & FIELD_ATTRIBUTE_STATIC)) {
797 if (!f->type->byref && (f->type->type == MONO_TYPE_R4 || f->type->type == MONO_TYPE_R8))
808 calculate_sizes (MonoMethodSignature *sig, gboolean is_pinvoke)
811 int n = sig->hasthis + sig->param_count;
813 guint32 stack_size = 0;
814 CallInfo *cinfo = g_malloc0 (sizeof (CallInfo) + sizeof (ArgInfo) * n);
816 fr = PPC_FIRST_FPARG_REG;
817 gr = PPC_FIRST_ARG_REG;
819 /* FIXME: handle returning a struct */
820 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
821 add_general (&gr, &stack_size, &cinfo->ret, TRUE);
822 cinfo->struct_ret = PPC_FIRST_ARG_REG;
827 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
830 DEBUG(printf("params: %d\n", sig->param_count));
831 for (i = 0; i < sig->param_count; ++i) {
832 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (i == sig->sentinelpos)) {
833 /* Prevent implicit arguments and sig_cookie from
834 being passed in registers */
835 gr = PPC_LAST_ARG_REG + 1;
836 /* FIXME: don't we have to set fr, too? */
837 /* Emit the signature cookie just before the implicit arguments */
838 add_general (&gr, &stack_size, &cinfo->sig_cookie, TRUE);
840 DEBUG(printf("param %d: ", i));
841 if (sig->params [i]->byref) {
842 DEBUG(printf("byref\n"));
843 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
847 simpletype = mini_type_get_underlying_type (NULL, sig->params [i])->type;
848 switch (simpletype) {
849 case MONO_TYPE_BOOLEAN:
852 cinfo->args [n].size = 1;
853 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
859 cinfo->args [n].size = 2;
860 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
865 cinfo->args [n].size = 4;
866 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
872 case MONO_TYPE_FNPTR:
873 case MONO_TYPE_CLASS:
874 case MONO_TYPE_OBJECT:
875 case MONO_TYPE_STRING:
876 case MONO_TYPE_SZARRAY:
877 case MONO_TYPE_ARRAY:
878 cinfo->args [n].size = sizeof (gpointer);
879 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
882 case MONO_TYPE_GENERICINST:
883 if (!mono_type_generic_inst_is_valuetype (sig->params [i])) {
884 cinfo->args [n].size = sizeof (gpointer);
885 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
890 case MONO_TYPE_VALUETYPE: {
893 klass = mono_class_from_mono_type (sig->params [i]);
895 size = mono_class_native_size (klass, NULL);
897 size = mono_class_value_size (klass, NULL);
898 #if defined(__APPLE__) || defined(__mono_ppc64__)
899 if ((size == 4 || size == 8) && has_only_a_r48_field (klass)) {
900 cinfo->args [n].size = size;
902 /* It was 7, now it is 8 in LinuxPPC */
903 if (fr <= PPC_LAST_FPARG_REG) {
904 cinfo->args [n].regtype = RegTypeFP;
905 cinfo->args [n].reg = fr;
907 FP_ALSO_IN_REG (gr ++);
909 FP_ALSO_IN_REG (gr ++);
910 ALWAYS_ON_STACK (stack_size += size);
912 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size;
913 cinfo->args [n].regtype = RegTypeBase;
914 cinfo->args [n].reg = ppc_sp; /* in the caller*/
921 DEBUG(printf ("load %d bytes struct\n",
922 mono_class_native_size (sig->params [i]->data.klass, NULL)));
923 #if PPC_PASS_STRUCTS_BY_VALUE
925 int align_size = size;
927 int rest = PPC_LAST_ARG_REG - gr + 1;
929 align_size += (sizeof (gpointer) - 1);
930 align_size &= ~(sizeof (gpointer) - 1);
931 nwords = (align_size + sizeof (gpointer) -1 ) / sizeof (gpointer);
932 n_in_regs = MIN (rest, nwords);
933 cinfo->args [n].regtype = RegTypeStructByVal;
934 if (gr > PPC_LAST_ARG_REG
936 /* FIXME: check this */
937 || (size >= 3 && size % 4 != 0)
940 cinfo->args [n].size = 0;
941 cinfo->args [n].vtsize = nwords;
943 cinfo->args [n].size = n_in_regs;
944 cinfo->args [n].vtsize = nwords - n_in_regs;
945 cinfo->args [n].reg = gr;
947 #ifdef __mono_ppc64__
948 if (nwords == 1 && is_pinvoke)
949 cinfo->args [n].bytes = size;
952 cinfo->args [n].bytes = 0;
954 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size;
955 /*g_print ("offset for arg %d at %d\n", n, PPC_STACK_PARAM_OFFSET + stack_size);*/
956 stack_size += nwords * sizeof (gpointer);
959 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
960 cinfo->args [n].regtype = RegTypeStructByAddr;
961 cinfo->args [n].vtsize = size;
966 case MONO_TYPE_TYPEDBYREF: {
967 int size = sizeof (MonoTypedRef);
968 /* keep in sync or merge with the valuetype case */
969 #if PPC_PASS_STRUCTS_BY_VALUE
971 int nwords = (size + sizeof (gpointer) -1 ) / sizeof (gpointer);
972 cinfo->args [n].regtype = RegTypeStructByVal;
973 if (gr <= PPC_LAST_ARG_REG) {
974 int rest = PPC_LAST_ARG_REG - gr + 1;
975 int n_in_regs = rest >= nwords? nwords: rest;
976 cinfo->args [n].size = n_in_regs;
977 cinfo->args [n].vtsize = nwords - n_in_regs;
978 cinfo->args [n].reg = gr;
981 cinfo->args [n].size = 0;
982 cinfo->args [n].vtsize = nwords;
984 #ifdef __mono_ppc64__
985 if (nwords == 1 && is_pinvoke)
986 cinfo->args [n].bytes = size;
989 cinfo->args [n].bytes = 0;
990 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size;
991 /*g_print ("offset for arg %d at %d\n", n, PPC_STACK_PARAM_OFFSET + stack_size);*/
992 stack_size += nwords * sizeof (gpointer);
995 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
996 cinfo->args [n].regtype = RegTypeStructByAddr;
997 cinfo->args [n].vtsize = size;
1004 cinfo->args [n].size = 8;
1005 add_general (&gr, &stack_size, cinfo->args + n, sizeof (gpointer) == 8);
1009 cinfo->args [n].size = 4;
1011 /* It was 7, now it is 8 in LinuxPPC */
1012 if (fr <= PPC_LAST_FPARG_REG) {
1013 cinfo->args [n].regtype = RegTypeFP;
1014 cinfo->args [n].reg = fr;
1016 FP_ALSO_IN_REG (gr ++);
1017 ALWAYS_ON_STACK (stack_size += sizeof (gpointer));
1019 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size + MONO_PPC_32_64_CASE (0, 4);
1020 cinfo->args [n].regtype = RegTypeBase;
1021 cinfo->args [n].reg = ppc_sp; /* in the caller*/
1022 stack_size += sizeof (gpointer);
1027 cinfo->args [n].size = 8;
1028 /* It was 7, now it is 8 in LinuxPPC */
1029 if (fr <= PPC_LAST_FPARG_REG) {
1030 cinfo->args [n].regtype = RegTypeFP;
1031 cinfo->args [n].reg = fr;
1033 FP_ALSO_IN_REG (gr += sizeof (double) / sizeof (gpointer));
1034 ALWAYS_ON_STACK (stack_size += 8);
1036 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size;
1037 cinfo->args [n].regtype = RegTypeBase;
1038 cinfo->args [n].reg = ppc_sp; /* in the caller*/
1044 g_error ("Can't trampoline 0x%x", sig->params [i]->type);
1048 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (i == sig->sentinelpos)) {
1049 /* Prevent implicit arguments and sig_cookie from
1050 being passed in registers */
1051 gr = PPC_LAST_ARG_REG + 1;
1052 /* Emit the signature cookie just before the implicit arguments */
1053 add_general (&gr, &stack_size, &cinfo->sig_cookie, TRUE);
1057 simpletype = mini_type_get_underlying_type (NULL, sig->ret)->type;
1058 switch (simpletype) {
1059 case MONO_TYPE_BOOLEAN:
1064 case MONO_TYPE_CHAR:
1070 case MONO_TYPE_FNPTR:
1071 case MONO_TYPE_CLASS:
1072 case MONO_TYPE_OBJECT:
1073 case MONO_TYPE_SZARRAY:
1074 case MONO_TYPE_ARRAY:
1075 case MONO_TYPE_STRING:
1076 cinfo->ret.reg = ppc_r3;
1080 cinfo->ret.reg = ppc_r3;
1084 cinfo->ret.reg = ppc_f1;
1085 cinfo->ret.regtype = RegTypeFP;
1087 case MONO_TYPE_GENERICINST:
1088 if (!mono_type_generic_inst_is_valuetype (sig->ret)) {
1089 cinfo->ret.reg = ppc_r3;
1093 case MONO_TYPE_VALUETYPE:
1095 case MONO_TYPE_TYPEDBYREF:
1096 case MONO_TYPE_VOID:
1099 g_error ("Can't handle as return value 0x%x", sig->ret->type);
1103 /* align stack size to 16 */
1104 DEBUG (printf (" stack size: %d (%d)\n", (stack_size + 15) & ~15, stack_size));
1105 stack_size = (stack_size + 15) & ~15;
1107 cinfo->stack_usage = stack_size;
1112 allocate_tailcall_valuetype_addrs (MonoCompile *cfg)
1114 #if !PPC_PASS_STRUCTS_BY_VALUE
1115 MonoMethodSignature *sig = mono_method_signature (cfg->method);
1116 int num_structs = 0;
1119 if (!(cfg->flags & MONO_CFG_HAS_TAIL))
1122 for (i = 0; i < sig->param_count; ++i) {
1123 MonoType *type = mono_type_get_underlying_type (sig->params [i]);
1124 if (type->type == MONO_TYPE_VALUETYPE)
1129 cfg->tailcall_valuetype_addrs =
1130 mono_mempool_alloc0 (cfg->mempool, sizeof (MonoInst*) * num_structs);
1131 for (i = 0; i < num_structs; ++i) {
1132 cfg->tailcall_valuetype_addrs [i] =
1133 mono_compile_create_var (cfg, &mono_defaults.int_class->byval_arg, OP_LOCAL);
1134 cfg->tailcall_valuetype_addrs [i]->flags |= MONO_INST_INDIRECT;
1141 * Set var information according to the calling convention. ppc version.
1142 * The locals var stuff should most likely be split in another method.
1145 mono_arch_allocate_vars (MonoCompile *m)
1147 MonoMethodSignature *sig;
1148 MonoMethodHeader *header;
1150 int i, offset, size, align, curinst;
1151 int frame_reg = ppc_sp;
1153 guint32 locals_stack_size, locals_stack_align;
1155 allocate_tailcall_valuetype_addrs (m);
1157 m->flags |= MONO_CFG_HAS_SPILLUP;
1159 /* allow room for the vararg method args: void* and long/double */
1160 if (mono_jit_trace_calls != NULL && mono_trace_eval (m->method))
1161 m->param_area = MAX (m->param_area, sizeof (gpointer)*8);
1162 /* this is bug #60332: remove when #59509 is fixed, so no weird vararg
1163 * call convs needs to be handled this way.
1165 if (m->flags & MONO_CFG_HAS_VARARGS)
1166 m->param_area = MAX (m->param_area, sizeof (gpointer)*8);
1167 /* gtk-sharp and other broken code will dllimport vararg functions even with
1168 * non-varargs signatures. Since there is little hope people will get this right
1169 * we assume they won't.
1171 if (m->method->wrapper_type == MONO_WRAPPER_MANAGED_TO_NATIVE)
1172 m->param_area = MAX (m->param_area, sizeof (gpointer)*8);
1174 header = mono_method_get_header (m->method);
1177 * We use the frame register also for any method that has
1178 * exception clauses. This way, when the handlers are called,
1179 * the code will reference local variables using the frame reg instead of
1180 * the stack pointer: if we had to restore the stack pointer, we'd
1181 * corrupt the method frames that are already on the stack (since
1182 * filters get called before stack unwinding happens) when the filter
1183 * code would call any method (this also applies to finally etc.).
1185 if ((m->flags & MONO_CFG_HAS_ALLOCA) || header->num_clauses)
1186 frame_reg = ppc_r31;
1187 m->frame_reg = frame_reg;
1188 if (frame_reg != ppc_sp) {
1189 m->used_int_regs |= 1 << frame_reg;
1192 sig = mono_method_signature (m->method);
1196 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
1197 m->ret->opcode = OP_REGVAR;
1198 m->ret->inst_c0 = m->ret->dreg = ppc_r3;
1200 /* FIXME: handle long values? */
1201 switch (mini_type_get_underlying_type (m->generic_sharing_context, sig->ret)->type) {
1202 case MONO_TYPE_VOID:
1206 m->ret->opcode = OP_REGVAR;
1207 m->ret->inst_c0 = m->ret->dreg = ppc_f1;
1210 m->ret->opcode = OP_REGVAR;
1211 m->ret->inst_c0 = m->ret->dreg = ppc_r3;
1215 /* local vars are at a positive offset from the stack pointer */
1217 * also note that if the function uses alloca, we use ppc_r31
1218 * to point at the local variables.
1220 offset = PPC_MINIMAL_STACK_SIZE; /* linkage area */
1221 /* align the offset to 16 bytes: not sure this is needed here */
1223 //offset &= ~(16 - 1);
1225 /* add parameter area size for called functions */
1226 offset += m->param_area;
1228 offset &= ~(16 - 1);
1230 /* allow room to save the return value */
1231 if (mono_jit_trace_calls != NULL && mono_trace_eval (m->method))
1234 /* the MonoLMF structure is stored just below the stack pointer */
1237 /* this stuff should not be needed on ppc and the new jit,
1238 * because a call on ppc to the handlers doesn't change the
1239 * stack pointer and the jist doesn't manipulate the stack pointer
1240 * for operations involving valuetypes.
1242 /* reserve space to store the esp */
1243 offset += sizeof (gpointer);
1245 /* this is a global constant */
1246 mono_exc_esp_offset = offset;
1249 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
1250 offset += sizeof(gpointer) - 1;
1251 offset &= ~(sizeof(gpointer) - 1);
1253 m->vret_addr->opcode = OP_REGOFFSET;
1254 m->vret_addr->inst_basereg = frame_reg;
1255 m->vret_addr->inst_offset = offset;
1257 if (G_UNLIKELY (m->verbose_level > 1)) {
1258 printf ("vret_addr =");
1259 mono_print_ins (m->vret_addr);
1262 offset += sizeof(gpointer);
1265 offsets = mono_allocate_stack_slots_full (m, FALSE, &locals_stack_size, &locals_stack_align);
1266 if (locals_stack_align) {
1267 offset += (locals_stack_align - 1);
1268 offset &= ~(locals_stack_align - 1);
1270 for (i = m->locals_start; i < m->num_varinfo; i++) {
1271 if (offsets [i] != -1) {
1272 MonoInst *inst = m->varinfo [i];
1273 inst->opcode = OP_REGOFFSET;
1274 inst->inst_basereg = frame_reg;
1275 inst->inst_offset = offset + offsets [i];
1277 g_print ("allocating local %d (%s) to %d\n",
1278 i, mono_type_get_name (inst->inst_vtype), inst->inst_offset);
1282 offset += locals_stack_size;
1286 inst = m->args [curinst];
1287 if (inst->opcode != OP_REGVAR) {
1288 inst->opcode = OP_REGOFFSET;
1289 inst->inst_basereg = frame_reg;
1290 offset += sizeof (gpointer) - 1;
1291 offset &= ~(sizeof (gpointer) - 1);
1292 inst->inst_offset = offset;
1293 offset += sizeof (gpointer);
1298 for (i = 0; i < sig->param_count; ++i) {
1299 inst = m->args [curinst];
1300 if (inst->opcode != OP_REGVAR) {
1301 inst->opcode = OP_REGOFFSET;
1302 inst->inst_basereg = frame_reg;
1304 size = mono_type_native_stack_size (sig->params [i], (guint32*)&align);
1305 inst->backend.is_pinvoke = 1;
1307 size = mono_type_size (sig->params [i], &align);
1309 if (MONO_TYPE_ISSTRUCT (sig->params [i]) && size < sizeof (gpointer))
1310 size = align = sizeof (gpointer);
1311 offset += align - 1;
1312 offset &= ~(align - 1);
1313 inst->inst_offset = offset;
1319 /* some storage for fp conversions */
1322 m->arch.fp_conv_var_offset = offset;
1325 /* align the offset to 16 bytes */
1327 offset &= ~(16 - 1);
1330 m->stack_offset = offset;
1332 if (sig->call_convention == MONO_CALL_VARARG) {
1333 CallInfo *cinfo = calculate_sizes (m->method->signature, m->method->signature->pinvoke);
1335 m->sig_cookie = cinfo->sig_cookie.offset;
1342 mono_arch_create_vars (MonoCompile *cfg)
1344 MonoMethodSignature *sig = mono_method_signature (cfg->method);
1346 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
1347 cfg->vret_addr = mono_compile_create_var (cfg, &mono_defaults.int_class->byval_arg, OP_ARG);
1351 /* Fixme: we need an alignment solution for enter_method and mono_arch_call_opcode,
1352 * currently alignment in mono_arch_call_opcode is computed without arch_get_argument_info
1356 emit_sig_cookie (MonoCompile *cfg, MonoCallInst *call, CallInfo *cinfo)
1358 int sig_reg = mono_alloc_ireg (cfg);
1360 MONO_EMIT_NEW_ICONST (cfg, sig_reg, (gulong)call->signature);
1361 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG,
1362 ppc_r1, cinfo->sig_cookie.offset, sig_reg);
1366 mono_arch_emit_call (MonoCompile *cfg, MonoCallInst *call)
1369 MonoMethodSignature *sig;
1373 sig = call->signature;
1374 n = sig->param_count + sig->hasthis;
1376 cinfo = calculate_sizes (sig, sig->pinvoke);
1378 for (i = 0; i < n; ++i) {
1379 ArgInfo *ainfo = cinfo->args + i;
1382 if (i >= sig->hasthis)
1383 t = sig->params [i - sig->hasthis];
1385 t = &mono_defaults.int_class->byval_arg;
1386 t = mini_type_get_underlying_type (cfg->generic_sharing_context, t);
1388 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (i == sig->sentinelpos))
1389 emit_sig_cookie (cfg, call, cinfo);
1391 in = call->args [i];
1393 if (ainfo->regtype == RegTypeGeneral) {
1394 #ifndef __mono_ppc64__
1395 if (!t->byref && ((t->type == MONO_TYPE_I8) || (t->type == MONO_TYPE_U8))) {
1396 MONO_INST_NEW (cfg, ins, OP_MOVE);
1397 ins->dreg = mono_alloc_ireg (cfg);
1398 ins->sreg1 = in->dreg + 1;
1399 MONO_ADD_INS (cfg->cbb, ins);
1400 mono_call_inst_add_outarg_reg (cfg, call, ins->dreg, ainfo->reg + 1, FALSE);
1402 MONO_INST_NEW (cfg, ins, OP_MOVE);
1403 ins->dreg = mono_alloc_ireg (cfg);
1404 ins->sreg1 = in->dreg + 2;
1405 MONO_ADD_INS (cfg->cbb, ins);
1406 mono_call_inst_add_outarg_reg (cfg, call, ins->dreg, ainfo->reg, FALSE);
1410 MONO_INST_NEW (cfg, ins, OP_MOVE);
1411 ins->dreg = mono_alloc_ireg (cfg);
1412 ins->sreg1 = in->dreg;
1413 MONO_ADD_INS (cfg->cbb, ins);
1415 mono_call_inst_add_outarg_reg (cfg, call, ins->dreg, ainfo->reg, FALSE);
1417 } else if (ainfo->regtype == RegTypeStructByAddr) {
1418 MONO_INST_NEW (cfg, ins, OP_OUTARG_VT);
1419 ins->opcode = OP_OUTARG_VT;
1420 ins->sreg1 = in->dreg;
1421 ins->klass = in->klass;
1422 ins->inst_p0 = call;
1423 ins->inst_p1 = mono_mempool_alloc (cfg->mempool, sizeof (ArgInfo));
1424 memcpy (ins->inst_p1, ainfo, sizeof (ArgInfo));
1425 MONO_ADD_INS (cfg->cbb, ins);
1426 } else if (ainfo->regtype == RegTypeStructByVal) {
1427 /* this is further handled in mono_arch_emit_outarg_vt () */
1428 MONO_INST_NEW (cfg, ins, OP_OUTARG_VT);
1429 ins->opcode = OP_OUTARG_VT;
1430 ins->sreg1 = in->dreg;
1431 ins->klass = in->klass;
1432 ins->inst_p0 = call;
1433 ins->inst_p1 = mono_mempool_alloc (cfg->mempool, sizeof (ArgInfo));
1434 memcpy (ins->inst_p1, ainfo, sizeof (ArgInfo));
1435 MONO_ADD_INS (cfg->cbb, ins);
1436 } else if (ainfo->regtype == RegTypeBase) {
1437 if (!t->byref && ((t->type == MONO_TYPE_I8) || (t->type == MONO_TYPE_U8))) {
1438 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI8_MEMBASE_REG, ppc_r1, ainfo->offset, in->dreg);
1439 } else if (!t->byref && ((t->type == MONO_TYPE_R4) || (t->type == MONO_TYPE_R8))) {
1440 if (t->type == MONO_TYPE_R8)
1441 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORER8_MEMBASE_REG, ppc_r1, ainfo->offset, in->dreg);
1443 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORER4_MEMBASE_REG, ppc_r1, ainfo->offset, in->dreg);
1445 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG, ppc_r1, ainfo->offset, in->dreg);
1447 } else if (ainfo->regtype == RegTypeFP) {
1448 if (t->type == MONO_TYPE_VALUETYPE) {
1449 /* this is further handled in mono_arch_emit_outarg_vt () */
1450 MONO_INST_NEW (cfg, ins, OP_OUTARG_VT);
1451 ins->opcode = OP_OUTARG_VT;
1452 ins->sreg1 = in->dreg;
1453 ins->klass = in->klass;
1454 ins->inst_p0 = call;
1455 ins->inst_p1 = mono_mempool_alloc (cfg->mempool, sizeof (ArgInfo));
1456 memcpy (ins->inst_p1, ainfo, sizeof (ArgInfo));
1457 MONO_ADD_INS (cfg->cbb, ins);
1459 cfg->flags |= MONO_CFG_HAS_FPOUT;
1461 int dreg = mono_alloc_freg (cfg);
1463 if (ainfo->size == 4) {
1464 MONO_EMIT_NEW_UNALU (cfg, OP_FCONV_TO_R4, dreg, in->dreg);
1466 MONO_INST_NEW (cfg, ins, OP_FMOVE);
1468 ins->sreg1 = in->dreg;
1469 MONO_ADD_INS (cfg->cbb, ins);
1472 mono_call_inst_add_outarg_reg (cfg, call, dreg, ainfo->reg, TRUE);
1473 cfg->flags |= MONO_CFG_HAS_FPOUT;
1476 g_assert_not_reached ();
1480 /* Emit the signature cookie in the case that there is no
1481 additional argument */
1482 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (n == sig->sentinelpos))
1483 emit_sig_cookie (cfg, call, cinfo);
1485 if (cinfo->struct_ret) {
1488 MONO_INST_NEW (cfg, vtarg, OP_MOVE);
1489 vtarg->sreg1 = call->vret_var->dreg;
1490 vtarg->dreg = mono_alloc_preg (cfg);
1491 MONO_ADD_INS (cfg->cbb, vtarg);
1493 mono_call_inst_add_outarg_reg (cfg, call, vtarg->dreg, cinfo->struct_ret, FALSE);
1496 call->stack_usage = cinfo->stack_usage;
1497 cfg->param_area = MAX (PPC_MINIMAL_PARAM_AREA_SIZE, MAX (cfg->param_area, cinfo->stack_usage));
1498 cfg->flags |= MONO_CFG_HAS_CALLS;
1504 mono_arch_emit_outarg_vt (MonoCompile *cfg, MonoInst *ins, MonoInst *src)
1506 MonoCallInst *call = (MonoCallInst*)ins->inst_p0;
1507 ArgInfo *ainfo = ins->inst_p1;
1508 int ovf_size = ainfo->vtsize;
1509 int doffset = ainfo->offset;
1510 int i, soffset, dreg;
1512 if (ainfo->regtype == RegTypeStructByVal) {
1519 * Darwin pinvokes needs some special handling for 1
1520 * and 2 byte arguments
1522 g_assert (ins->klass);
1523 if (call->signature->pinvoke)
1524 size = mono_class_native_size (ins->klass, NULL);
1525 if (size == 2 || size == 1) {
1526 int tmpr = mono_alloc_ireg (cfg);
1528 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADI1_MEMBASE, tmpr, src->dreg, soffset);
1530 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADI2_MEMBASE, tmpr, src->dreg, soffset);
1531 dreg = mono_alloc_ireg (cfg);
1532 MONO_EMIT_NEW_UNALU (cfg, OP_MOVE, dreg, tmpr);
1533 mono_call_inst_add_outarg_reg (cfg, call, dreg, ainfo->reg, FALSE);
1536 for (i = 0; i < ainfo->size; ++i) {
1537 int antipadding = 0;
1540 antipadding = sizeof (gpointer) - ainfo->bytes;
1542 dreg = mono_alloc_ireg (cfg);
1543 MONO_EMIT_NEW_LOAD_MEMBASE (cfg, dreg, src->dreg, soffset);
1545 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHR_UN_IMM, dreg, dreg, antipadding * 8);
1546 mono_call_inst_add_outarg_reg (cfg, call, dreg, ainfo->reg + i, FALSE);
1547 soffset += sizeof (gpointer);
1550 mini_emit_memcpy (cfg, ppc_r1, doffset + soffset, src->dreg, soffset, ovf_size * sizeof (gpointer), 0);
1551 } else if (ainfo->regtype == RegTypeFP) {
1552 int tmpr = mono_alloc_freg (cfg);
1553 if (ainfo->size == 4)
1554 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADR4_MEMBASE, tmpr, src->dreg, 0);
1556 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADR8_MEMBASE, tmpr, src->dreg, 0);
1557 dreg = mono_alloc_freg (cfg);
1558 MONO_EMIT_NEW_UNALU (cfg, OP_FMOVE, dreg, tmpr);
1559 mono_call_inst_add_outarg_reg (cfg, call, dreg, ainfo->reg, TRUE);
1561 MonoInst *vtcopy = mono_compile_create_var (cfg, &src->klass->byval_arg, OP_LOCAL);
1565 /* FIXME: alignment? */
1566 if (call->signature->pinvoke) {
1567 size = mono_type_native_stack_size (&src->klass->byval_arg, NULL);
1568 vtcopy->backend.is_pinvoke = 1;
1570 size = mini_type_stack_size (cfg->generic_sharing_context, &src->klass->byval_arg, NULL);
1573 g_assert (ovf_size > 0);
1575 EMIT_NEW_VARLOADA (cfg, load, vtcopy, vtcopy->inst_vtype);
1576 mini_emit_memcpy (cfg, load->dreg, 0, src->dreg, 0, size, 0);
1579 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG, ppc_r1, ainfo->offset, load->dreg);
1581 mono_call_inst_add_outarg_reg (cfg, call, load->dreg, ainfo->reg, FALSE);
1586 mono_arch_emit_setret (MonoCompile *cfg, MonoMethod *method, MonoInst *val)
1588 MonoType *ret = mini_type_get_underlying_type (cfg->generic_sharing_context,
1589 mono_method_signature (method)->ret);
1592 #ifndef __mono_ppc64__
1593 if (ret->type == MONO_TYPE_I8 || ret->type == MONO_TYPE_U8) {
1596 MONO_INST_NEW (cfg, ins, OP_SETLRET);
1597 ins->sreg1 = val->dreg + 1;
1598 ins->sreg2 = val->dreg + 2;
1599 MONO_ADD_INS (cfg->cbb, ins);
1603 if (ret->type == MONO_TYPE_R8 || ret->type == MONO_TYPE_R4) {
1604 MONO_EMIT_NEW_UNALU (cfg, OP_FMOVE, cfg->ret->dreg, val->dreg);
1608 MONO_EMIT_NEW_UNALU (cfg, OP_MOVE, cfg->ret->dreg, val->dreg);
1611 /* FIXME: this is just a useless hint: fix the interface to include the opcode */
1613 mono_arch_is_inst_imm (gint64 imm)
1619 * Allow tracing to work with this interface (with an optional argument)
1623 mono_arch_instrument_prolog (MonoCompile *cfg, void *func, void *p, gboolean enable_arguments)
1627 ppc_load (code, ppc_r3, cfg->method);
1628 ppc_li (code, ppc_r4, 0); /* NULL ebp for now */
1629 ppc_load_func (code, ppc_r0, func);
1630 ppc_mtlr (code, ppc_r0);
1644 mono_arch_instrument_epilog_full (MonoCompile *cfg, void *func, void *p, gboolean enable_arguments, gboolean preserve_argument_registers)
1647 int save_mode = SAVE_NONE;
1649 MonoMethod *method = cfg->method;
1650 int rtype = mini_type_get_underlying_type (cfg->generic_sharing_context,
1651 mono_method_signature (method)->ret)->type;
1652 int save_offset = PPC_STACK_PARAM_OFFSET + cfg->param_area;
1656 offset = code - cfg->native_code;
1657 /* we need about 16 instructions */
1658 if (offset > (cfg->code_size - 16 * 4)) {
1659 cfg->code_size *= 2;
1660 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
1661 code = cfg->native_code + offset;
1665 case MONO_TYPE_VOID:
1666 /* special case string .ctor icall */
1667 if (strcmp (".ctor", method->name) && method->klass == mono_defaults.string_class)
1668 save_mode = SAVE_ONE;
1670 save_mode = SAVE_NONE;
1672 #ifndef __mono_ppc64__
1675 save_mode = SAVE_TWO;
1680 save_mode = SAVE_FP;
1682 case MONO_TYPE_VALUETYPE:
1683 save_mode = SAVE_STRUCT;
1686 save_mode = SAVE_ONE;
1690 switch (save_mode) {
1692 ppc_stw (code, ppc_r3, save_offset, cfg->frame_reg);
1693 ppc_stw (code, ppc_r4, save_offset + 4, cfg->frame_reg);
1694 if (enable_arguments) {
1695 ppc_mr (code, ppc_r5, ppc_r4);
1696 ppc_mr (code, ppc_r4, ppc_r3);
1700 ppc_store_reg (code, ppc_r3, save_offset, cfg->frame_reg);
1701 if (enable_arguments) {
1702 ppc_mr (code, ppc_r4, ppc_r3);
1706 ppc_stfd (code, ppc_f1, save_offset, cfg->frame_reg);
1707 if (enable_arguments) {
1708 /* FIXME: what reg? */
1709 ppc_fmr (code, ppc_f3, ppc_f1);
1710 /* FIXME: use 8 byte load on PPC64 */
1711 ppc_lwz (code, ppc_r4, save_offset, cfg->frame_reg);
1712 ppc_lwz (code, ppc_r5, save_offset + 4, cfg->frame_reg);
1716 if (enable_arguments) {
1717 /* FIXME: get the actual address */
1718 ppc_mr (code, ppc_r4, ppc_r3);
1726 ppc_load (code, ppc_r3, cfg->method);
1727 ppc_load_func (code, ppc_r0, func);
1728 ppc_mtlr (code, ppc_r0);
1731 switch (save_mode) {
1733 ppc_lwz (code, ppc_r3, save_offset, cfg->frame_reg);
1734 ppc_lwz (code, ppc_r4, save_offset + 4, cfg->frame_reg);
1737 ppc_load_reg (code, ppc_r3, save_offset, cfg->frame_reg);
1740 ppc_lfd (code, ppc_f1, save_offset, cfg->frame_reg);
1750 * Conditional branches have a small offset, so if it is likely overflowed,
1751 * we do a branch to the end of the method (uncond branches have much larger
1752 * offsets) where we perform the conditional and jump back unconditionally.
1753 * It's slightly slower, since we add two uncond branches, but it's very simple
1754 * with the current patch implementation and such large methods are likely not
1755 * going to be perf critical anyway.
1760 const char *exception;
1767 #define EMIT_COND_BRANCH_FLAGS(ins,b0,b1) \
1768 if (0 && ins->inst_true_bb->native_offset) { \
1769 ppc_bc (code, (b0), (b1), (code - cfg->native_code + ins->inst_true_bb->native_offset) & 0xffff); \
1771 int br_disp = ins->inst_true_bb->max_offset - offset; \
1772 if (!ppc_is_imm16 (br_disp + 1024) || ! ppc_is_imm16 (ppc_is_imm16 (br_disp - 1024))) { \
1773 MonoOvfJump *ovfj = mono_mempool_alloc (cfg->mempool, sizeof (MonoOvfJump)); \
1774 ovfj->data.bb = ins->inst_true_bb; \
1775 ovfj->ip_offset = 0; \
1776 ovfj->b0_cond = (b0); \
1777 ovfj->b1_cond = (b1); \
1778 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_BB_OVF, ovfj); \
1781 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_BB, ins->inst_true_bb); \
1782 ppc_bc (code, (b0), (b1), 0); \
1786 #define EMIT_COND_BRANCH(ins,cond) EMIT_COND_BRANCH_FLAGS(ins, branch_b0_table [(cond)], branch_b1_table [(cond)])
1788 /* emit an exception if condition is fail
1790 * We assign the extra code used to throw the implicit exceptions
1791 * to cfg->bb_exit as far as the big branch handling is concerned
1793 #define EMIT_COND_SYSTEM_EXCEPTION_FLAGS(b0,b1,exc_name) \
1795 int br_disp = cfg->bb_exit->max_offset - offset; \
1796 if (!ppc_is_imm16 (br_disp + 1024) || ! ppc_is_imm16 (ppc_is_imm16 (br_disp - 1024))) { \
1797 MonoOvfJump *ovfj = mono_mempool_alloc (cfg->mempool, sizeof (MonoOvfJump)); \
1798 ovfj->data.exception = (exc_name); \
1799 ovfj->ip_offset = code - cfg->native_code; \
1800 ovfj->b0_cond = (b0); \
1801 ovfj->b1_cond = (b1); \
1802 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_EXC_OVF, ovfj); \
1804 cfg->bb_exit->max_offset += 24; \
1806 mono_add_patch_info (cfg, code - cfg->native_code, \
1807 MONO_PATCH_INFO_EXC, exc_name); \
1808 ppc_bcl (code, (b0), (b1), 0); \
1812 #define EMIT_COND_SYSTEM_EXCEPTION(cond,exc_name) EMIT_COND_SYSTEM_EXCEPTION_FLAGS(branch_b0_table [(cond)], branch_b1_table [(cond)], (exc_name))
1815 mono_arch_peephole_pass_1 (MonoCompile *cfg, MonoBasicBlock *bb)
1820 normalize_opcode (int opcode)
1823 case MONO_PPC_32_64_CASE (OP_LOADI4_MEMBASE, OP_LOADI8_MEMBASE):
1824 return OP_LOAD_MEMBASE;
1825 case MONO_PPC_32_64_CASE (OP_LOADI4_MEMINDEX, OP_LOADI8_MEMINDEX):
1826 return OP_LOAD_MEMINDEX;
1827 case MONO_PPC_32_64_CASE (OP_STOREI4_MEMBASE_REG, OP_STOREI8_MEMBASE_REG):
1828 return OP_STORE_MEMBASE_REG;
1829 case MONO_PPC_32_64_CASE (OP_STOREI4_MEMBASE_IMM, OP_STOREI8_MEMBASE_IMM):
1830 return OP_STORE_MEMBASE_IMM;
1831 case MONO_PPC_32_64_CASE (OP_STOREI4_MEMINDEX, OP_STOREI8_MEMINDEX):
1832 return OP_STORE_MEMINDEX;
1833 case MONO_PPC_32_64_CASE (OP_ISHR_IMM, OP_LSHR_IMM):
1835 case MONO_PPC_32_64_CASE (OP_ISHR_UN_IMM, OP_LSHR_UN_IMM):
1836 return OP_SHR_UN_IMM;
1843 mono_arch_peephole_pass_2 (MonoCompile *cfg, MonoBasicBlock *bb)
1845 MonoInst *ins, *n, *last_ins = NULL;
1847 MONO_BB_FOR_EACH_INS_SAFE (bb, n, ins) {
1848 switch (normalize_opcode (ins->opcode)) {
1850 /* remove unnecessary multiplication with 1 */
1851 if (ins->inst_imm == 1) {
1852 if (ins->dreg != ins->sreg1) {
1853 ins->opcode = OP_MOVE;
1855 MONO_DELETE_INS (bb, ins);
1859 int power2 = mono_is_power_of_two (ins->inst_imm);
1861 ins->opcode = OP_SHL_IMM;
1862 ins->inst_imm = power2;
1866 case OP_LOAD_MEMBASE:
1868 * OP_STORE_MEMBASE_REG reg, offset(basereg)
1869 * OP_LOAD_MEMBASE offset(basereg), reg
1871 if (last_ins && normalize_opcode (last_ins->opcode) == OP_STORE_MEMBASE_REG &&
1872 ins->inst_basereg == last_ins->inst_destbasereg &&
1873 ins->inst_offset == last_ins->inst_offset) {
1874 if (ins->dreg == last_ins->sreg1) {
1875 MONO_DELETE_INS (bb, ins);
1878 //static int c = 0; printf ("MATCHX %s %d\n", cfg->method->name,c++);
1879 ins->opcode = OP_MOVE;
1880 ins->sreg1 = last_ins->sreg1;
1884 * Note: reg1 must be different from the basereg in the second load
1885 * OP_LOAD_MEMBASE offset(basereg), reg1
1886 * OP_LOAD_MEMBASE offset(basereg), reg2
1888 * OP_LOAD_MEMBASE offset(basereg), reg1
1889 * OP_MOVE reg1, reg2
1891 } else if (last_ins && normalize_opcode (last_ins->opcode) == OP_LOAD_MEMBASE &&
1892 ins->inst_basereg != last_ins->dreg &&
1893 ins->inst_basereg == last_ins->inst_basereg &&
1894 ins->inst_offset == last_ins->inst_offset) {
1896 if (ins->dreg == last_ins->dreg) {
1897 MONO_DELETE_INS (bb, ins);
1900 ins->opcode = OP_MOVE;
1901 ins->sreg1 = last_ins->dreg;
1904 //g_assert_not_reached ();
1908 * OP_STORE_MEMBASE_IMM imm, offset(basereg)
1909 * OP_LOAD_MEMBASE offset(basereg), reg
1911 * OP_STORE_MEMBASE_IMM imm, offset(basereg)
1912 * OP_ICONST reg, imm
1914 } else if (last_ins && normalize_opcode (last_ins->opcode) == OP_STORE_MEMBASE_IMM &&
1915 ins->inst_basereg == last_ins->inst_destbasereg &&
1916 ins->inst_offset == last_ins->inst_offset) {
1917 //static int c = 0; printf ("MATCHX %s %d\n", cfg->method->name,c++);
1918 ins->opcode = OP_ICONST;
1919 ins->inst_c0 = last_ins->inst_imm;
1920 g_assert_not_reached (); // check this rule
1924 case OP_LOADU1_MEMBASE:
1925 case OP_LOADI1_MEMBASE:
1926 if (last_ins && (last_ins->opcode == OP_STOREI1_MEMBASE_REG) &&
1927 ins->inst_basereg == last_ins->inst_destbasereg &&
1928 ins->inst_offset == last_ins->inst_offset) {
1929 ins->opcode = (ins->opcode == OP_LOADI1_MEMBASE) ? OP_ICONV_TO_I1 : OP_ICONV_TO_U1;
1930 ins->sreg1 = last_ins->sreg1;
1933 case OP_LOADU2_MEMBASE:
1934 case OP_LOADI2_MEMBASE:
1935 if (last_ins && (last_ins->opcode == OP_STOREI2_MEMBASE_REG) &&
1936 ins->inst_basereg == last_ins->inst_destbasereg &&
1937 ins->inst_offset == last_ins->inst_offset) {
1938 ins->opcode = (ins->opcode == OP_LOADI2_MEMBASE) ? OP_ICONV_TO_I2 : OP_ICONV_TO_U2;
1939 ins->sreg1 = last_ins->sreg1;
1942 #ifdef __mono_ppc64__
1943 case OP_LOADU4_MEMBASE:
1944 case OP_LOADI4_MEMBASE:
1945 if (last_ins && (last_ins->opcode == OP_STOREI4_MEMBASE_REG) &&
1946 ins->inst_basereg == last_ins->inst_destbasereg &&
1947 ins->inst_offset == last_ins->inst_offset) {
1948 ins->opcode = (ins->opcode == OP_LOADI4_MEMBASE) ? OP_ICONV_TO_I4 : OP_ICONV_TO_U4;
1949 ins->sreg1 = last_ins->sreg1;
1954 ins->opcode = OP_MOVE;
1958 if (ins->dreg == ins->sreg1) {
1959 MONO_DELETE_INS (bb, ins);
1963 * OP_MOVE sreg, dreg
1964 * OP_MOVE dreg, sreg
1966 if (last_ins && last_ins->opcode == OP_MOVE &&
1967 ins->sreg1 == last_ins->dreg &&
1968 ins->dreg == last_ins->sreg1) {
1969 MONO_DELETE_INS (bb, ins);
1977 bb->last_ins = last_ins;
1981 mono_arch_decompose_opts (MonoCompile *cfg, MonoInst *ins)
1983 switch (ins->opcode) {
1984 case OP_ICONV_TO_R_UN: {
1985 static const guint64 adjust_val = 0x4330000000000000ULL;
1986 int msw_reg = mono_alloc_ireg (cfg);
1987 int adj_reg = mono_alloc_freg (cfg);
1988 int tmp_reg = mono_alloc_freg (cfg);
1989 int basereg = ppc_sp;
1991 MONO_EMIT_NEW_ICONST (cfg, msw_reg, 0x43300000);
1992 if (!ppc_is_imm16 (offset + 4)) {
1993 basereg = mono_alloc_ireg (cfg);
1994 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_IADD_IMM, basereg, cfg->frame_reg, offset);
1996 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, basereg, offset, msw_reg);
1997 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, basereg, offset + 4, ins->sreg1);
1998 MONO_EMIT_NEW_LOAD_R8 (cfg, adj_reg, &adjust_val);
1999 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADR8_MEMBASE, tmp_reg, basereg, offset);
2000 MONO_EMIT_NEW_BIALU (cfg, OP_FSUB, ins->dreg, tmp_reg, adj_reg);
2001 ins->opcode = OP_NOP;
2004 #ifndef __mono_ppc64__
2005 case OP_ICONV_TO_R4:
2006 case OP_ICONV_TO_R8: {
2007 /* FIXME: change precision for CEE_CONV_R4 */
2008 static const guint64 adjust_val = 0x4330000080000000ULL;
2009 int msw_reg = mono_alloc_ireg (cfg);
2010 int xored = mono_alloc_ireg (cfg);
2011 int adj_reg = mono_alloc_freg (cfg);
2012 int tmp_reg = mono_alloc_freg (cfg);
2013 int basereg = ppc_sp;
2015 if (!ppc_is_imm16 (offset + 4)) {
2016 basereg = mono_alloc_ireg (cfg);
2017 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_IADD_IMM, basereg, cfg->frame_reg, offset);
2019 MONO_EMIT_NEW_ICONST (cfg, msw_reg, 0x43300000);
2020 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, basereg, offset, msw_reg);
2021 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_XOR_IMM, xored, ins->sreg1, 0x80000000);
2022 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, basereg, offset + 4, xored);
2023 MONO_EMIT_NEW_LOAD_R8 (cfg, adj_reg, (gpointer)&adjust_val);
2024 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADR8_MEMBASE, tmp_reg, basereg, offset);
2025 MONO_EMIT_NEW_BIALU (cfg, OP_FSUB, ins->dreg, tmp_reg, adj_reg);
2026 if (ins->opcode == OP_ICONV_TO_R4)
2027 MONO_EMIT_NEW_UNALU (cfg, OP_FCONV_TO_R4, ins->dreg, ins->dreg);
2028 ins->opcode = OP_NOP;
2033 int msw_reg = mono_alloc_ireg (cfg);
2034 int basereg = ppc_sp;
2036 if (!ppc_is_imm16 (offset + 4)) {
2037 basereg = mono_alloc_ireg (cfg);
2038 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_IADD_IMM, basereg, cfg->frame_reg, offset);
2040 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORER8_MEMBASE_REG, basereg, offset, ins->sreg1);
2041 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADI4_MEMBASE, msw_reg, basereg, offset);
2042 MONO_EMIT_NEW_UNALU (cfg, OP_CHECK_FINITE, -1, msw_reg);
2043 MONO_EMIT_NEW_UNALU (cfg, OP_FMOVE, ins->dreg, ins->sreg1);
2044 ins->opcode = OP_NOP;
2047 #ifdef __mono_ppc64__
2049 case OP_IADD_OVF_UN:
2051 int shifted1_reg = mono_alloc_ireg (cfg);
2052 int shifted2_reg = mono_alloc_ireg (cfg);
2053 int result_shifted_reg = mono_alloc_ireg (cfg);
2055 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHL_IMM, shifted1_reg, ins->sreg1, 32);
2056 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHL_IMM, shifted2_reg, ins->sreg2, 32);
2057 MONO_EMIT_NEW_BIALU (cfg, ins->opcode, result_shifted_reg, shifted1_reg, shifted2_reg);
2058 if (ins->opcode == OP_IADD_OVF_UN)
2059 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHR_UN_IMM, ins->dreg, result_shifted_reg, 32);
2061 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHR_IMM, ins->dreg, result_shifted_reg, 32);
2062 ins->opcode = OP_NOP;
2069 mono_arch_decompose_long_opts (MonoCompile *cfg, MonoInst *ins)
2071 switch (ins->opcode) {
2073 /* ADC sets the condition code */
2074 MONO_EMIT_NEW_BIALU (cfg, OP_ADDCC, ins->dreg + 1, ins->sreg1 + 1, ins->sreg2 + 1);
2075 MONO_EMIT_NEW_BIALU (cfg, OP_ADD_OVF_CARRY, ins->dreg + 2, ins->sreg1 + 2, ins->sreg2 + 2);
2078 case OP_LADD_OVF_UN:
2079 /* ADC sets the condition code */
2080 MONO_EMIT_NEW_BIALU (cfg, OP_ADDCC, ins->dreg + 1, ins->sreg1 + 1, ins->sreg2 + 1);
2081 MONO_EMIT_NEW_BIALU (cfg, OP_ADD_OVF_UN_CARRY, ins->dreg + 2, ins->sreg1 + 2, ins->sreg2 + 2);
2085 /* SBB sets the condition code */
2086 MONO_EMIT_NEW_BIALU (cfg, OP_SUBCC, ins->dreg + 1, ins->sreg1 + 1, ins->sreg2 + 1);
2087 MONO_EMIT_NEW_BIALU (cfg, OP_SUB_OVF_CARRY, ins->dreg + 2, ins->sreg1 + 2, ins->sreg2 + 2);
2090 case OP_LSUB_OVF_UN:
2091 /* SBB sets the condition code */
2092 MONO_EMIT_NEW_BIALU (cfg, OP_SUBCC, ins->dreg + 1, ins->sreg1 + 1, ins->sreg2 + 1);
2093 MONO_EMIT_NEW_BIALU (cfg, OP_SUB_OVF_UN_CARRY, ins->dreg + 2, ins->sreg1 + 2, ins->sreg2 + 2);
2097 /* This is the old version from inssel-long32.brg */
2098 MONO_EMIT_NEW_UNALU (cfg, OP_INOT, ins->dreg + 1, ins->sreg1 + 1);
2099 MONO_EMIT_NEW_UNALU (cfg, OP_INOT, ins->dreg + 2, ins->sreg1 + 2);
2100 /* ADC sets the condition codes */
2101 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_ADC_IMM, ins->dreg + 1, ins->dreg + 1, 1);
2102 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_ADC_IMM, ins->dreg + 2, ins->dreg + 2, 0);
2111 * the branch_b0_table should maintain the order of these
2125 branch_b0_table [] = {
2140 branch_b1_table [] = {
2154 #define NEW_INS(cfg,dest,op) do { \
2155 MONO_INST_NEW((cfg), (dest), (op)); \
2156 mono_bblock_insert_after_ins (bb, last_ins, (dest)); \
2160 map_to_reg_reg_op (int op)
2169 case OP_COMPARE_IMM:
2171 case OP_ICOMPARE_IMM:
2173 case OP_LCOMPARE_IMM:
2189 case OP_LOAD_MEMBASE:
2190 return OP_LOAD_MEMINDEX;
2191 case OP_LOADI4_MEMBASE:
2192 return OP_LOADI4_MEMINDEX;
2193 case OP_LOADU4_MEMBASE:
2194 return OP_LOADU4_MEMINDEX;
2195 case OP_LOADI8_MEMBASE:
2196 return OP_LOADI8_MEMINDEX;
2197 case OP_LOADU1_MEMBASE:
2198 return OP_LOADU1_MEMINDEX;
2199 case OP_LOADI2_MEMBASE:
2200 return OP_LOADI2_MEMINDEX;
2201 case OP_LOADU2_MEMBASE:
2202 return OP_LOADU2_MEMINDEX;
2203 case OP_LOADI1_MEMBASE:
2204 return OP_LOADI1_MEMINDEX;
2205 case OP_LOADR4_MEMBASE:
2206 return OP_LOADR4_MEMINDEX;
2207 case OP_LOADR8_MEMBASE:
2208 return OP_LOADR8_MEMINDEX;
2209 case OP_STOREI1_MEMBASE_REG:
2210 return OP_STOREI1_MEMINDEX;
2211 case OP_STOREI2_MEMBASE_REG:
2212 return OP_STOREI2_MEMINDEX;
2213 case OP_STOREI4_MEMBASE_REG:
2214 return OP_STOREI4_MEMINDEX;
2215 case OP_STOREI8_MEMBASE_REG:
2216 return OP_STOREI8_MEMINDEX;
2217 case OP_STORE_MEMBASE_REG:
2218 return OP_STORE_MEMINDEX;
2219 case OP_STORER4_MEMBASE_REG:
2220 return OP_STORER4_MEMINDEX;
2221 case OP_STORER8_MEMBASE_REG:
2222 return OP_STORER8_MEMINDEX;
2223 case OP_STORE_MEMBASE_IMM:
2224 return OP_STORE_MEMBASE_REG;
2225 case OP_STOREI1_MEMBASE_IMM:
2226 return OP_STOREI1_MEMBASE_REG;
2227 case OP_STOREI2_MEMBASE_IMM:
2228 return OP_STOREI2_MEMBASE_REG;
2229 case OP_STOREI4_MEMBASE_IMM:
2230 return OP_STOREI4_MEMBASE_REG;
2231 case OP_STOREI8_MEMBASE_IMM:
2232 return OP_STOREI8_MEMBASE_REG;
2234 return mono_op_imm_to_op (op);
2237 //#define map_to_reg_reg_op(op) (cfg->new_ir? mono_op_imm_to_op (op): map_to_reg_reg_op (op))
2239 #define compare_opcode_is_unsigned(opcode) \
2240 (((opcode) >= CEE_BNE_UN && (opcode) <= CEE_BLT_UN) || \
2241 ((opcode) >= OP_IBNE_UN && (opcode) <= OP_IBLT_UN) || \
2242 ((opcode) >= OP_LBNE_UN && (opcode) <= OP_LBLT_UN) || \
2243 ((opcode) >= OP_COND_EXC_NE_UN && (opcode) <= OP_COND_EXC_LT_UN) || \
2244 ((opcode) >= OP_COND_EXC_INE_UN && (opcode) <= OP_COND_EXC_ILT_UN) || \
2245 ((opcode) == OP_CLT_UN || (opcode) == OP_CGT_UN || \
2246 (opcode) == OP_ICLT_UN || (opcode) == OP_ICGT_UN || \
2247 (opcode) == OP_LCLT_UN || (opcode) == OP_LCGT_UN))
2250 * Remove from the instruction list the instructions that can't be
2251 * represented with very simple instructions with no register
2255 mono_arch_lowering_pass (MonoCompile *cfg, MonoBasicBlock *bb)
2257 MonoInst *ins, *next, *temp, *last_ins = NULL;
2260 MONO_BB_FOR_EACH_INS (bb, ins) {
2262 switch (ins->opcode) {
2263 case OP_IDIV_UN_IMM:
2266 case OP_IREM_UN_IMM:
2267 NEW_INS (cfg, temp, OP_ICONST);
2268 temp->inst_c0 = ins->inst_imm;
2269 temp->dreg = mono_alloc_ireg (cfg);
2270 ins->sreg2 = temp->dreg;
2271 if (ins->opcode == OP_IDIV_IMM)
2272 ins->opcode = OP_IDIV;
2273 else if (ins->opcode == OP_IREM_IMM)
2274 ins->opcode = OP_IREM;
2275 else if (ins->opcode == OP_IDIV_UN_IMM)
2276 ins->opcode = OP_IDIV_UN;
2277 else if (ins->opcode == OP_IREM_UN_IMM)
2278 ins->opcode = OP_IREM_UN;
2280 /* handle rem separately */
2284 CASE_PPC64 (OP_LREM)
2285 CASE_PPC64 (OP_LREM_UN) {
2287 /* we change a rem dest, src1, src2 to
2288 * div temp1, src1, src2
2289 * mul temp2, temp1, src2
2290 * sub dest, src1, temp2
2292 if (ins->opcode == OP_IREM || ins->opcode == OP_IREM_UN) {
2293 NEW_INS (cfg, mul, OP_IMUL);
2294 NEW_INS (cfg, temp, ins->opcode == OP_IREM? OP_IDIV: OP_IDIV_UN);
2295 ins->opcode = OP_ISUB;
2297 NEW_INS (cfg, mul, OP_LMUL);
2298 NEW_INS (cfg, temp, ins->opcode == OP_LREM? OP_LDIV: OP_LDIV_UN);
2299 ins->opcode = OP_LSUB;
2301 temp->sreg1 = ins->sreg1;
2302 temp->sreg2 = ins->sreg2;
2303 temp->dreg = mono_alloc_ireg (cfg);
2304 mul->sreg1 = temp->dreg;
2305 mul->sreg2 = ins->sreg2;
2306 mul->dreg = mono_alloc_ireg (cfg);
2307 ins->sreg2 = mul->dreg;
2311 CASE_PPC64 (OP_LADD_IMM)
2314 if (!ppc_is_imm16 (ins->inst_imm)) {
2315 NEW_INS (cfg, temp, OP_ICONST);
2316 temp->inst_c0 = ins->inst_imm;
2317 temp->dreg = mono_alloc_ireg (cfg);
2318 ins->sreg2 = temp->dreg;
2319 ins->opcode = map_to_reg_reg_op (ins->opcode);
2323 CASE_PPC64 (OP_LSUB_IMM)
2325 if (!ppc_is_imm16 (-ins->inst_imm)) {
2326 NEW_INS (cfg, temp, OP_ICONST);
2327 temp->inst_c0 = ins->inst_imm;
2328 temp->dreg = mono_alloc_ireg (cfg);
2329 ins->sreg2 = temp->dreg;
2330 ins->opcode = map_to_reg_reg_op (ins->opcode);
2342 gboolean is_imm = ((ins->inst_imm & 0xffff0000) && (ins->inst_imm & 0xffff));
2343 #ifdef __mono_ppc64__
2344 if (ins->inst_imm & 0xffffffff00000000UL)
2348 NEW_INS (cfg, temp, OP_ICONST);
2349 temp->inst_c0 = ins->inst_imm;
2350 temp->dreg = mono_alloc_ireg (cfg);
2351 ins->sreg2 = temp->dreg;
2352 ins->opcode = map_to_reg_reg_op (ins->opcode);
2361 NEW_INS (cfg, temp, OP_ICONST);
2362 temp->inst_c0 = ins->inst_imm;
2363 temp->dreg = mono_alloc_ireg (cfg);
2364 ins->sreg2 = temp->dreg;
2365 ins->opcode = map_to_reg_reg_op (ins->opcode);
2367 case OP_COMPARE_IMM:
2368 case OP_ICOMPARE_IMM:
2369 CASE_PPC64 (OP_LCOMPARE_IMM)
2371 /* Branch opts can eliminate the branch */
2372 if (!next || (!(MONO_IS_COND_BRANCH_OP (next) || MONO_IS_COND_EXC (next) || MONO_IS_SETCC (next)))) {
2373 ins->opcode = OP_NOP;
2377 if (compare_opcode_is_unsigned (next->opcode)) {
2378 if (!ppc_is_uimm16 (ins->inst_imm)) {
2379 NEW_INS (cfg, temp, OP_ICONST);
2380 temp->inst_c0 = ins->inst_imm;
2381 temp->dreg = mono_alloc_ireg (cfg);
2382 ins->sreg2 = temp->dreg;
2383 ins->opcode = map_to_reg_reg_op (ins->opcode);
2386 if (!ppc_is_imm16 (ins->inst_imm)) {
2387 NEW_INS (cfg, temp, OP_ICONST);
2388 temp->inst_c0 = ins->inst_imm;
2389 temp->dreg = mono_alloc_ireg (cfg);
2390 ins->sreg2 = temp->dreg;
2391 ins->opcode = map_to_reg_reg_op (ins->opcode);
2397 if (ins->inst_imm == 1) {
2398 ins->opcode = OP_MOVE;
2401 if (ins->inst_imm == 0) {
2402 ins->opcode = OP_ICONST;
2406 imm = mono_is_power_of_two (ins->inst_imm);
2408 ins->opcode = OP_SHL_IMM;
2409 ins->inst_imm = imm;
2412 if (!ppc_is_imm16 (ins->inst_imm)) {
2413 NEW_INS (cfg, temp, OP_ICONST);
2414 temp->inst_c0 = ins->inst_imm;
2415 temp->dreg = mono_alloc_ireg (cfg);
2416 ins->sreg2 = temp->dreg;
2417 ins->opcode = map_to_reg_reg_op (ins->opcode);
2420 case OP_LOCALLOC_IMM:
2421 NEW_INS (cfg, temp, OP_ICONST);
2422 temp->inst_c0 = ins->inst_imm;
2423 temp->dreg = mono_alloc_ireg (cfg);
2424 ins->sreg1 = temp->dreg;
2425 ins->opcode = OP_LOCALLOC;
2427 case OP_LOAD_MEMBASE:
2428 case OP_LOADI4_MEMBASE:
2429 CASE_PPC64 (OP_LOADI8_MEMBASE)
2430 case OP_LOADU4_MEMBASE:
2431 case OP_LOADI2_MEMBASE:
2432 case OP_LOADU2_MEMBASE:
2433 case OP_LOADI1_MEMBASE:
2434 case OP_LOADU1_MEMBASE:
2435 case OP_LOADR4_MEMBASE:
2436 case OP_LOADR8_MEMBASE:
2437 case OP_STORE_MEMBASE_REG:
2438 CASE_PPC64 (OP_STOREI8_MEMBASE_REG)
2439 case OP_STOREI4_MEMBASE_REG:
2440 case OP_STOREI2_MEMBASE_REG:
2441 case OP_STOREI1_MEMBASE_REG:
2442 case OP_STORER4_MEMBASE_REG:
2443 case OP_STORER8_MEMBASE_REG:
2444 /* we can do two things: load the immed in a register
2445 * and use an indexed load, or see if the immed can be
2446 * represented as an ad_imm + a load with a smaller offset
2447 * that fits. We just do the first for now, optimize later.
2449 if (ppc_is_imm16 (ins->inst_offset))
2451 NEW_INS (cfg, temp, OP_ICONST);
2452 temp->inst_c0 = ins->inst_offset;
2453 temp->dreg = mono_alloc_ireg (cfg);
2454 ins->sreg2 = temp->dreg;
2455 ins->opcode = map_to_reg_reg_op (ins->opcode);
2457 case OP_STORE_MEMBASE_IMM:
2458 case OP_STOREI1_MEMBASE_IMM:
2459 case OP_STOREI2_MEMBASE_IMM:
2460 case OP_STOREI4_MEMBASE_IMM:
2461 CASE_PPC64 (OP_STOREI8_MEMBASE_IMM)
2462 NEW_INS (cfg, temp, OP_ICONST);
2463 temp->inst_c0 = ins->inst_imm;
2464 temp->dreg = mono_alloc_ireg (cfg);
2465 ins->sreg1 = temp->dreg;
2466 ins->opcode = map_to_reg_reg_op (ins->opcode);
2468 goto loop_start; /* make it handle the possibly big ins->inst_offset */
2471 NEW_INS (cfg, temp, OP_ICONST);
2472 temp->inst_c0 = (gulong)ins->inst_p0;
2473 temp->dreg = mono_alloc_ireg (cfg);
2474 ins->inst_basereg = temp->dreg;
2475 ins->inst_offset = 0;
2476 ins->opcode = ins->opcode == OP_R4CONST? OP_LOADR4_MEMBASE: OP_LOADR8_MEMBASE;
2478 /* make it handle the possibly big ins->inst_offset
2479 * later optimize to use lis + load_membase
2485 bb->last_ins = last_ins;
2486 bb->max_vreg = cfg->next_vreg;
2490 emit_float_to_int (MonoCompile *cfg, guchar *code, int dreg, int sreg, int size, gboolean is_signed)
2492 long offset = cfg->arch.fp_conv_var_offset;
2494 /* sreg is a float, dreg is an integer reg. ppc_f0 is used a scratch */
2495 #ifdef __mono_ppc64__
2497 ppc_fctidz (code, ppc_f0, sreg);
2502 ppc_fctiwz (code, ppc_f0, sreg);
2505 if (ppc_is_imm16 (offset + sub_offset)) {
2506 ppc_stfd (code, ppc_f0, offset, cfg->frame_reg);
2508 ppc_load_reg (code, dreg, offset + sub_offset, cfg->frame_reg);
2510 ppc_lwz (code, dreg, offset + sub_offset, cfg->frame_reg);
2512 ppc_load (code, dreg, offset);
2513 ppc_add (code, dreg, dreg, cfg->frame_reg);
2514 ppc_stfd (code, ppc_f0, 0, dreg);
2516 ppc_load_reg (code, dreg, sub_offset, dreg);
2518 ppc_lwz (code, dreg, sub_offset, dreg);
2522 ppc_andid (code, dreg, dreg, 0xff);
2524 ppc_andid (code, dreg, dreg, 0xffff);
2525 #ifdef __mono_ppc64__
2527 ppc_clrldi (code, dreg, dreg, 32);
2531 ppc_extsb (code, dreg, dreg);
2533 ppc_extsh (code, dreg, dreg);
2534 #ifdef __mono_ppc64__
2536 ppc_extsw (code, dreg, dreg);
2544 const guchar *target;
2549 #define is_call_imm(diff) ((glong)(diff) >= -33554432 && (glong)(diff) <= 33554431)
2552 search_thunk_slot (void *data, int csize, int bsize, void *user_data) {
2553 #ifdef __mono_ppc64__
2554 g_assert_not_reached ();
2556 PatchData *pdata = (PatchData*)user_data;
2557 guchar *code = data;
2558 guint32 *thunks = data;
2559 guint32 *endthunks = (guint32*)(code + bsize);
2563 int difflow, diffhigh;
2565 /* always ensure a call from pdata->code can reach to the thunks without further thunks */
2566 difflow = (char*)pdata->code - (char*)thunks;
2567 diffhigh = (char*)pdata->code - (char*)endthunks;
2568 if (!((is_call_imm (thunks) && is_call_imm (endthunks)) || (is_call_imm (difflow) && is_call_imm (diffhigh))))
2571 templ = (guchar*)load;
2572 ppc_load_sequence (templ, ppc_r0, pdata->target);
2574 //g_print ("thunk nentries: %d\n", ((char*)endthunks - (char*)thunks)/16);
2575 if ((pdata->found == 2) || (pdata->code >= code && pdata->code <= code + csize)) {
2576 while (thunks < endthunks) {
2577 //g_print ("looking for target: %p at %p (%08x-%08x)\n", pdata->target, thunks, thunks [0], thunks [1]);
2578 if ((thunks [0] == load [0]) && (thunks [1] == load [1])) {
2579 ppc_patch (pdata->code, (guchar*)thunks);
2582 static int num_thunks = 0;
2584 if ((num_thunks % 20) == 0)
2585 g_print ("num_thunks lookup: %d\n", num_thunks);
2588 } else if ((thunks [0] == 0) && (thunks [1] == 0)) {
2589 /* found a free slot instead: emit thunk */
2590 code = (guchar*)thunks;
2591 ppc_lis (code, ppc_r0, (gulong)(pdata->target) >> 16);
2592 ppc_ori (code, ppc_r0, ppc_r0, (gulong)(pdata->target) & 0xffff);
2593 ppc_mtctr (code, ppc_r0);
2594 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
2595 mono_arch_flush_icache ((guchar*)thunks, 16);
2597 ppc_patch (pdata->code, (guchar*)thunks);
2600 static int num_thunks = 0;
2602 if ((num_thunks % 20) == 0)
2603 g_print ("num_thunks: %d\n", num_thunks);
2607 /* skip 16 bytes, the size of the thunk */
2611 //g_print ("failed thunk lookup for %p from %p at %p (%d entries)\n", pdata->target, pdata->code, data, count);
2618 handle_thunk (int absolute, guchar *code, const guchar *target) {
2619 MonoDomain *domain = mono_domain_get ();
2623 pdata.target = target;
2624 pdata.absolute = absolute;
2627 mono_domain_lock (domain);
2628 mono_domain_code_foreach (domain, search_thunk_slot, &pdata);
2631 /* this uses the first available slot */
2633 mono_domain_code_foreach (domain, search_thunk_slot, &pdata);
2635 mono_domain_unlock (domain);
2637 if (pdata.found != 1)
2638 g_print ("thunk failed for %p from %p\n", target, code);
2639 g_assert (pdata.found == 1);
2643 patch_ins (guint8 *code, guint32 ins)
2645 *(guint32*)code = ins;
2646 mono_arch_flush_icache (code, 4);
2650 ppc_patch_full (guchar *code, const guchar *target, gboolean is_fd)
2652 guint32 ins = *(guint32*)code;
2653 guint32 prim = ins >> 26;
2656 //g_print ("patching 0x%08x (0x%08x) to point to 0x%08x\n", code, ins, target);
2658 // prefer relative branches, they are more position independent (e.g. for AOT compilation).
2659 gint diff = target - code;
2662 if (diff <= 33554431){
2663 ins = (18 << 26) | (diff) | (ins & 1);
2664 patch_ins (code, ins);
2668 /* diff between 0 and -33554432 */
2669 if (diff >= -33554432){
2670 ins = (18 << 26) | (diff & ~0xfc000000) | (ins & 1);
2671 patch_ins (code, ins);
2676 if ((glong)target >= 0){
2677 if ((glong)target <= 33554431){
2678 ins = (18 << 26) | ((gulong) target) | (ins & 1) | 2;
2679 patch_ins (code, ins);
2683 if ((glong)target >= -33554432){
2684 ins = (18 << 26) | (((gulong)target) & ~0xfc000000) | (ins & 1) | 2;
2685 patch_ins (code, ins);
2690 handle_thunk (TRUE, code, target);
2693 g_assert_not_reached ();
2701 guint32 li = (gulong)target;
2702 ins = (ins & 0xffff0000) | (ins & 3);
2703 ovf = li & 0xffff0000;
2704 if (ovf != 0 && ovf != 0xffff0000)
2705 g_assert_not_reached ();
2708 // FIXME: assert the top bits of li are 0
2710 gint diff = target - code;
2711 ins = (ins & 0xffff0000) | (ins & 3);
2712 ovf = diff & 0xffff0000;
2713 if (ovf != 0 && ovf != 0xffff0000)
2714 g_assert_not_reached ();
2718 patch_ins (code, ins);
2722 if (prim == 15 || ins == 0x4e800021 || ins == 0x4e800020 || ins == 0x4e800420) {
2723 #ifdef __mono_ppc64__
2724 guint32 *seq = (guint32*)code;
2725 guint32 *branch_ins;
2727 /* the trampoline code will try to patch the blrl, blr, bcctr */
2728 if (ins == 0x4e800021 || ins == 0x4e800020 || ins == 0x4e800420) {
2730 if (ppc_opcode (seq [-3]) == 58 || ppc_opcode (seq [-3]) == 31) /* ld || mr */
2735 if (ppc_opcode (seq [5]) == 58 || ppc_opcode (seq [5]) == 31) /* ld || mr */
2736 branch_ins = seq + 8;
2738 branch_ins = seq + 6;
2741 seq = (guint32*)code;
2742 /* this is the lis/ori/sldi/oris/ori/(ld/ld|mr/nop)/mtlr/blrl sequence */
2743 g_assert (mono_ppc_is_direct_call_sequence (branch_ins));
2745 if (ppc_opcode (seq [5]) == 58) { /* ld */
2746 g_assert (ppc_opcode (seq [6]) == 58); /* ld */
2749 guint8 *buf = (guint8*)&seq [5];
2750 ppc_mr (buf, ppc_r0, ppc_r11);
2755 target = mono_get_addr_from_ftnptr ((gpointer)target);
2758 /* FIXME: make this thread safe */
2759 /* FIXME: we're assuming we're using r11 here */
2760 ppc_load_sequence (code, ppc_r11, target);
2761 mono_arch_flush_icache ((guint8*)seq, 28);
2764 /* the trampoline code will try to patch the blrl, blr, bcctr */
2765 if (ins == 0x4e800021 || ins == 0x4e800020 || ins == 0x4e800420) {
2768 /* this is the lis/ori/mtlr/blrl sequence */
2769 seq = (guint32*)code;
2770 g_assert ((seq [0] >> 26) == 15);
2771 g_assert ((seq [1] >> 26) == 24);
2772 g_assert ((seq [2] >> 26) == 31);
2773 g_assert (seq [3] == 0x4e800021 || seq [3] == 0x4e800020 || seq [3] == 0x4e800420);
2774 /* FIXME: make this thread safe */
2775 ppc_lis (code, ppc_r0, (guint32)(target) >> 16);
2776 ppc_ori (code, ppc_r0, ppc_r0, (guint32)(target) & 0xffff);
2777 mono_arch_flush_icache (code - 8, 8);
2780 g_assert_not_reached ();
2782 // g_print ("patched with 0x%08x\n", ins);
2786 ppc_patch (guchar *code, const guchar *target)
2788 ppc_patch_full (code, target, FALSE);
2792 emit_move_return_value (MonoCompile *cfg, MonoInst *ins, guint8 *code)
2794 switch (ins->opcode) {
2797 case OP_FCALL_MEMBASE:
2798 if (ins->dreg != ppc_f1)
2799 ppc_fmr (code, ins->dreg, ppc_f1);
2807 * emit_load_volatile_arguments:
2809 * Load volatile arguments from the stack to the original input registers.
2810 * Required before a tail call.
2813 emit_load_volatile_arguments (MonoCompile *cfg, guint8 *code)
2815 MonoMethod *method = cfg->method;
2816 MonoMethodSignature *sig;
2820 int struct_index = 0;
2822 sig = mono_method_signature (method);
2824 /* This is the opposite of the code in emit_prolog */
2828 cinfo = calculate_sizes (sig, sig->pinvoke);
2830 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
2831 ArgInfo *ainfo = &cinfo->ret;
2832 inst = cfg->vret_addr;
2833 g_assert (ppc_is_imm16 (inst->inst_offset));
2834 ppc_load_reg (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2836 for (i = 0; i < sig->param_count + sig->hasthis; ++i) {
2837 ArgInfo *ainfo = cinfo->args + i;
2838 inst = cfg->args [pos];
2840 g_assert (inst->opcode != OP_REGVAR);
2841 g_assert (ppc_is_imm16 (inst->inst_offset));
2843 switch (ainfo->regtype) {
2844 case RegTypeGeneral:
2845 switch (ainfo->size) {
2847 ppc_lbz (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2850 ppc_lhz (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2852 #ifdef __mono_ppc64__
2854 ppc_lwz (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2858 ppc_load_reg (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2864 switch (ainfo->size) {
2866 ppc_lfs (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2869 ppc_lfd (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2872 g_assert_not_reached ();
2877 MonoType *type = mini_type_get_underlying_type (cfg->generic_sharing_context,
2878 &inst->klass->byval_arg);
2880 #ifndef __mono_ppc64__
2881 if (type->type == MONO_TYPE_I8)
2885 if (MONO_TYPE_IS_REFERENCE (type) || type->type == MONO_TYPE_I8) {
2886 ppc_load_reg (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
2887 ppc_store_reg (code, ppc_r0, ainfo->offset, ainfo->reg);
2888 } else if (type->type == MONO_TYPE_I4) {
2889 ppc_lwz (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
2890 ppc_stw (code, ppc_r0, ainfo->offset, ainfo->reg);
2898 case RegTypeStructByVal: {
2909 * Darwin pinvokes needs some special handling
2910 * for 1 and 2 byte arguments
2912 if (method->signature->pinvoke)
2913 size = mono_class_native_size (inst->klass, NULL);
2914 if (size == 1 || size == 2) {
2919 for (j = 0; j < ainfo->size; ++j) {
2920 ppc_load_reg (code, ainfo->reg + j,
2921 inst->inst_offset + j * sizeof (gpointer),
2922 inst->inst_basereg);
2923 /* FIXME: shift to the right */
2930 case RegTypeStructByAddr: {
2931 MonoInst *addr = cfg->tailcall_valuetype_addrs [struct_index];
2933 g_assert (ppc_is_imm16 (addr->inst_offset));
2934 g_assert (!ainfo->offset);
2935 ppc_load_reg (code, ainfo->reg, addr->inst_offset, addr->inst_basereg);
2942 g_assert_not_reached ();
2953 /* This must be kept in sync with emit_load_volatile_arguments(). */
2955 ins_native_length (MonoCompile *cfg, MonoInst *ins)
2957 int len = ((guint8 *)ins_get_spec (ins->opcode))[MONO_INST_LEN];
2958 MonoMethodSignature *sig;
2963 if (ins->opcode != OP_JMP)
2966 call = (MonoCallInst*)ins;
2967 sig = mono_method_signature (cfg->method);
2968 cinfo = calculate_sizes (sig, sig->pinvoke);
2970 if (MONO_TYPE_ISSTRUCT (sig->ret))
2972 for (i = 0; i < sig->param_count + sig->hasthis; ++i) {
2973 ArgInfo *ainfo = cinfo->args + i;
2975 switch (ainfo->regtype) {
2976 case RegTypeGeneral:
2985 case RegTypeStructByVal:
2986 len += 4 * ainfo->size;
2989 case RegTypeStructByAddr:
2994 g_assert_not_reached ();
3004 emit_reserve_param_area (MonoCompile *cfg, guint8 *code)
3006 long size = cfg->param_area;
3008 size += MONO_ARCH_FRAME_ALIGNMENT - 1;
3009 size &= -MONO_ARCH_FRAME_ALIGNMENT;
3014 ppc_load_reg (code, ppc_r0, 0, ppc_sp);
3015 if (ppc_is_imm16 (-size)) {
3016 ppc_store_reg_update (code, ppc_r0, -size, ppc_sp);
3018 ppc_load (code, ppc_r11, -size);
3019 ppc_store_reg_update_indexed (code, ppc_r0, ppc_sp, ppc_r11);
3026 emit_unreserve_param_area (MonoCompile *cfg, guint8 *code)
3028 long size = cfg->param_area;
3030 size += MONO_ARCH_FRAME_ALIGNMENT - 1;
3031 size &= -MONO_ARCH_FRAME_ALIGNMENT;
3036 ppc_load_reg (code, ppc_r0, 0, ppc_sp);
3037 if (ppc_is_imm16 (size)) {
3038 ppc_store_reg_update (code, ppc_r0, size, ppc_sp);
3040 ppc_load (code, ppc_r11, size);
3041 ppc_store_reg_update_indexed (code, ppc_r0, ppc_sp, ppc_r11);
3047 #define MASK_SHIFT_IMM(i) ((i) & MONO_PPC_32_64_CASE (0x1f, 0x3f))
3050 mono_arch_output_basic_block (MonoCompile *cfg, MonoBasicBlock *bb)
3052 MonoInst *ins, *next;
3055 guint8 *code = cfg->native_code + cfg->code_len;
3056 MonoInst *last_ins = NULL;
3057 guint last_offset = 0;
3061 /* we don't align basic blocks of loops on ppc */
3063 if (cfg->verbose_level > 2)
3064 g_print ("Basic block %d starting at offset 0x%x\n", bb->block_num, bb->native_offset);
3066 cpos = bb->max_offset;
3068 if (cfg->prof_options & MONO_PROFILE_COVERAGE) {
3069 //MonoCoverageInfo *cov = mono_get_coverage_info (cfg->method);
3070 //g_assert (!mono_compile_aot);
3073 // cov->data [bb->dfn].iloffset = bb->cil_code - cfg->cil_code;
3074 /* this is not thread save, but good enough */
3075 /* fixme: howto handle overflows? */
3076 //x86_inc_mem (code, &cov->data [bb->dfn].count);
3079 MONO_BB_FOR_EACH_INS (bb, ins) {
3080 offset = code - cfg->native_code;
3082 max_len = ins_native_length (cfg, ins);
3084 if (offset > (cfg->code_size - max_len - 16)) {
3085 cfg->code_size *= 2;
3086 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
3087 code = cfg->native_code + offset;
3089 // if (ins->cil_code)
3090 // g_print ("cil code\n");
3091 mono_debug_record_line_number (cfg, ins, offset);
3093 switch (normalize_opcode (ins->opcode)) {
3094 case OP_RELAXED_NOP:
3097 case OP_DUMMY_STORE:
3098 case OP_NOT_REACHED:
3102 emit_tls_access (code, ins->dreg, ins->inst_offset);
3105 ppc_mullw (code, ppc_r0, ins->sreg1, ins->sreg2);
3106 ppc_mulhw (code, ppc_r3, ins->sreg1, ins->sreg2);
3107 ppc_mr (code, ppc_r4, ppc_r0);
3110 ppc_mullw (code, ppc_r0, ins->sreg1, ins->sreg2);
3111 ppc_mulhwu (code, ppc_r3, ins->sreg1, ins->sreg2);
3112 ppc_mr (code, ppc_r4, ppc_r0);
3114 case OP_MEMORY_BARRIER:
3117 case OP_STOREI1_MEMBASE_REG:
3118 if (ppc_is_imm16 (ins->inst_offset)) {
3119 ppc_stb (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
3121 ppc_load (code, ppc_r0, ins->inst_offset);
3122 ppc_stbx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
3125 case OP_STOREI2_MEMBASE_REG:
3126 if (ppc_is_imm16 (ins->inst_offset)) {
3127 ppc_sth (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
3129 ppc_load (code, ppc_r0, ins->inst_offset);
3130 ppc_sthx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
3133 case OP_STORE_MEMBASE_REG:
3134 if (ppc_is_imm16 (ins->inst_offset)) {
3135 ppc_store_reg (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
3137 ppc_load (code, ppc_r0, ins->inst_offset);
3138 ppc_store_reg_indexed (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
3141 case OP_STOREI1_MEMINDEX:
3142 ppc_stbx (code, ins->sreg1, ins->sreg2, ins->inst_destbasereg);
3144 case OP_STOREI2_MEMINDEX:
3145 ppc_sthx (code, ins->sreg1, ins->sreg2, ins->inst_destbasereg);
3147 case OP_STORE_MEMINDEX:
3148 ppc_store_reg_indexed (code, ins->sreg1, ins->sreg2, ins->inst_destbasereg);
3151 g_assert_not_reached ();
3153 case OP_LOAD_MEMBASE:
3154 if (ppc_is_imm16 (ins->inst_offset)) {
3155 ppc_load_reg (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3157 ppc_load (code, ppc_r0, ins->inst_offset);
3158 ppc_load_reg_indexed (code, ins->dreg, ins->inst_basereg, ppc_r0);
3161 case OP_LOADI4_MEMBASE:
3162 #ifdef __mono_ppc64__
3163 if (ppc_is_imm16 (ins->inst_offset)) {
3164 ppc_lwa (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3166 ppc_load (code, ppc_r0, ins->inst_offset);
3167 ppc_lwax (code, ins->dreg, ins->inst_basereg, ppc_r0);
3171 case OP_LOADU4_MEMBASE:
3172 if (ppc_is_imm16 (ins->inst_offset)) {
3173 ppc_lwz (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3175 ppc_load (code, ppc_r0, ins->inst_offset);
3176 ppc_lwzx (code, ins->dreg, ins->inst_basereg, ppc_r0);
3179 case OP_LOADI1_MEMBASE:
3180 case OP_LOADU1_MEMBASE:
3181 if (ppc_is_imm16 (ins->inst_offset)) {
3182 ppc_lbz (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3184 ppc_load (code, ppc_r0, ins->inst_offset);
3185 ppc_lbzx (code, ins->dreg, ins->inst_basereg, ppc_r0);
3187 if (ins->opcode == OP_LOADI1_MEMBASE)
3188 ppc_extsb (code, ins->dreg, ins->dreg);
3190 case OP_LOADU2_MEMBASE:
3191 if (ppc_is_imm16 (ins->inst_offset)) {
3192 ppc_lhz (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3194 ppc_load (code, ppc_r0, ins->inst_offset);
3195 ppc_lhzx (code, ins->dreg, ins->inst_basereg, ppc_r0);
3198 case OP_LOADI2_MEMBASE:
3199 if (ppc_is_imm16 (ins->inst_offset)) {
3200 ppc_lha (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3202 ppc_load (code, ppc_r0, ins->inst_offset);
3203 ppc_lhax (code, ins->dreg, ins->inst_basereg, ppc_r0);
3206 case OP_LOAD_MEMINDEX:
3207 ppc_load_reg_indexed (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3209 case OP_LOADI4_MEMINDEX:
3210 #ifdef __mono_ppc64__
3211 ppc_lwax (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3214 case OP_LOADU4_MEMINDEX:
3215 ppc_lwzx (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3217 case OP_LOADU2_MEMINDEX:
3218 ppc_lhzx (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3220 case OP_LOADI2_MEMINDEX:
3221 ppc_lhax (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3223 case OP_LOADU1_MEMINDEX:
3224 ppc_lbzx (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3226 case OP_LOADI1_MEMINDEX:
3227 ppc_lbzx (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3228 ppc_extsb (code, ins->dreg, ins->dreg);
3230 case OP_ICONV_TO_I1:
3231 CASE_PPC64 (OP_LCONV_TO_I1)
3232 ppc_extsb (code, ins->dreg, ins->sreg1);
3234 case OP_ICONV_TO_I2:
3235 CASE_PPC64 (OP_LCONV_TO_I2)
3236 ppc_extsh (code, ins->dreg, ins->sreg1);
3238 case OP_ICONV_TO_U1:
3239 CASE_PPC64 (OP_LCONV_TO_U1)
3240 ppc_clrlwi (code, ins->dreg, ins->sreg1, 24);
3242 case OP_ICONV_TO_U2:
3243 CASE_PPC64 (OP_LCONV_TO_U2)
3244 ppc_clrlwi (code, ins->dreg, ins->sreg1, 16);
3248 CASE_PPC64 (OP_LCOMPARE)
3249 L = (sizeof (gpointer) == 4 || ins->opcode == OP_ICOMPARE) ? 0 : 1;
3251 if (next && compare_opcode_is_unsigned (next->opcode))
3252 ppc_cmpl (code, 0, L, ins->sreg1, ins->sreg2);
3254 ppc_cmp (code, 0, L, ins->sreg1, ins->sreg2);
3256 case OP_COMPARE_IMM:
3257 case OP_ICOMPARE_IMM:
3258 CASE_PPC64 (OP_LCOMPARE_IMM)
3259 L = (sizeof (gpointer) == 4 || ins->opcode == OP_ICOMPARE_IMM) ? 0 : 1;
3261 if (next && compare_opcode_is_unsigned (next->opcode)) {
3262 if (ppc_is_uimm16 (ins->inst_imm)) {
3263 ppc_cmpli (code, 0, L, ins->sreg1, (ins->inst_imm & 0xffff));
3265 g_assert_not_reached ();
3268 if (ppc_is_imm16 (ins->inst_imm)) {
3269 ppc_cmpi (code, 0, L, ins->sreg1, (ins->inst_imm & 0xffff));
3271 g_assert_not_reached ();
3280 ppc_addco (code, ins->dreg, ins->sreg1, ins->sreg2);
3283 CASE_PPC64 (OP_LADD)
3284 ppc_add (code, ins->dreg, ins->sreg1, ins->sreg2);
3288 ppc_adde (code, ins->dreg, ins->sreg1, ins->sreg2);
3291 if (ppc_is_imm16 (ins->inst_imm)) {
3292 ppc_addic (code, ins->dreg, ins->sreg1, ins->inst_imm);
3294 g_assert_not_reached ();
3299 CASE_PPC64 (OP_LADD_IMM)
3300 if (ppc_is_imm16 (ins->inst_imm)) {
3301 ppc_addi (code, ins->dreg, ins->sreg1, ins->inst_imm);
3303 g_assert_not_reached ();
3307 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3309 ppc_addo (code, ins->dreg, ins->sreg1, ins->sreg2);
3310 ppc_mfspr (code, ppc_r0, ppc_xer);
3311 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3312 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3314 case OP_IADD_OVF_UN:
3315 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3317 ppc_addco (code, ins->dreg, ins->sreg1, ins->sreg2);
3318 ppc_mfspr (code, ppc_r0, ppc_xer);
3319 ppc_andisd (code, ppc_r0, ppc_r0, (1<<13));
3320 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3323 CASE_PPC64 (OP_LSUB_OVF)
3324 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3326 ppc_subfo (code, ins->dreg, ins->sreg2, ins->sreg1);
3327 ppc_mfspr (code, ppc_r0, ppc_xer);
3328 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3329 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3331 case OP_ISUB_OVF_UN:
3332 CASE_PPC64 (OP_LSUB_OVF_UN)
3333 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3335 ppc_subfc (code, ins->dreg, ins->sreg2, ins->sreg1);
3336 ppc_mfspr (code, ppc_r0, ppc_xer);
3337 ppc_andisd (code, ppc_r0, ppc_r0, (1<<13));
3338 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_TRUE, PPC_BR_EQ, "OverflowException");
3340 case OP_ADD_OVF_CARRY:
3341 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3343 ppc_addeo (code, ins->dreg, ins->sreg1, ins->sreg2);
3344 ppc_mfspr (code, ppc_r0, ppc_xer);
3345 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3346 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3348 case OP_ADD_OVF_UN_CARRY:
3349 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3351 ppc_addeo (code, ins->dreg, ins->sreg1, ins->sreg2);
3352 ppc_mfspr (code, ppc_r0, ppc_xer);
3353 ppc_andisd (code, ppc_r0, ppc_r0, (1<<13));
3354 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3356 case OP_SUB_OVF_CARRY:
3357 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3359 ppc_subfeo (code, ins->dreg, ins->sreg2, ins->sreg1);
3360 ppc_mfspr (code, ppc_r0, ppc_xer);
3361 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3362 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3364 case OP_SUB_OVF_UN_CARRY:
3365 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3367 ppc_subfeo (code, ins->dreg, ins->sreg2, ins->sreg1);
3368 ppc_mfspr (code, ppc_r0, ppc_xer);
3369 ppc_andisd (code, ppc_r0, ppc_r0, (1<<13));
3370 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_TRUE, PPC_BR_EQ, "OverflowException");
3374 ppc_subfco (code, ins->dreg, ins->sreg2, ins->sreg1);
3377 CASE_PPC64 (OP_LSUB)
3378 ppc_subf (code, ins->dreg, ins->sreg2, ins->sreg1);
3382 ppc_subfe (code, ins->dreg, ins->sreg2, ins->sreg1);
3386 CASE_PPC64 (OP_LSUB_IMM)
3387 // we add the negated value
3388 if (ppc_is_imm16 (-ins->inst_imm))
3389 ppc_addi (code, ins->dreg, ins->sreg1, -ins->inst_imm);
3391 g_assert_not_reached ();
3395 g_assert (ppc_is_imm16 (ins->inst_imm));
3396 ppc_subfic (code, ins->dreg, ins->sreg1, ins->inst_imm);
3399 ppc_subfze (code, ins->dreg, ins->sreg1);
3402 CASE_PPC64 (OP_LAND)
3403 /* FIXME: the ppc macros as inconsistent here: put dest as the first arg! */
3404 ppc_and (code, ins->sreg1, ins->dreg, ins->sreg2);
3408 CASE_PPC64 (OP_LAND_IMM)
3409 if (!(ins->inst_imm & 0xffff0000)) {
3410 ppc_andid (code, ins->sreg1, ins->dreg, ins->inst_imm);
3411 } else if (!(ins->inst_imm & 0xffff)) {
3412 ppc_andisd (code, ins->sreg1, ins->dreg, ((guint32)ins->inst_imm >> 16));
3414 g_assert_not_reached ();
3418 CASE_PPC64 (OP_LDIV) {
3419 guint8 *divisor_is_m1;
3420 /* XER format: SO, OV, CA, reserved [21 bits], count [8 bits]
3422 ppc_compare_reg_imm (code, 0, ins->sreg2, -1);
3423 divisor_is_m1 = code;
3424 ppc_bc (code, PPC_BR_FALSE | PPC_BR_LIKELY, PPC_BR_EQ, 0);
3425 ppc_lis (code, ppc_r0, 0x8000);
3426 #ifdef __mono_ppc64__
3427 if (ins->opcode == OP_LDIV)
3428 ppc_sldi (code, ppc_r0, ppc_r0, 32);
3430 ppc_compare (code, 0, ins->sreg1, ppc_r0);
3431 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_TRUE, PPC_BR_EQ, "ArithmeticException");
3432 ppc_patch (divisor_is_m1, code);
3433 /* XER format: SO, OV, CA, reserved [21 bits], count [8 bits]
3435 if (ins->opcode == OP_IDIV)
3436 ppc_divwod (code, ins->dreg, ins->sreg1, ins->sreg2);
3437 #ifdef __mono_ppc64__
3439 ppc_divdod (code, ins->dreg, ins->sreg1, ins->sreg2);
3441 ppc_mfspr (code, ppc_r0, ppc_xer);
3442 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3443 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "DivideByZeroException");
3447 CASE_PPC64 (OP_LDIV_UN)
3448 if (ins->opcode == OP_IDIV_UN)
3449 ppc_divwuod (code, ins->dreg, ins->sreg1, ins->sreg2);
3450 #ifdef __mono_ppc64__
3452 ppc_divduod (code, ins->dreg, ins->sreg1, ins->sreg2);
3454 ppc_mfspr (code, ppc_r0, ppc_xer);
3455 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3456 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "DivideByZeroException");
3462 g_assert_not_reached ();
3465 ppc_or (code, ins->dreg, ins->sreg1, ins->sreg2);
3469 CASE_PPC64 (OP_LOR_IMM)
3470 if (!(ins->inst_imm & 0xffff0000)) {
3471 ppc_ori (code, ins->sreg1, ins->dreg, ins->inst_imm);
3472 } else if (!(ins->inst_imm & 0xffff)) {
3473 ppc_oris (code, ins->dreg, ins->sreg1, ((guint32)(ins->inst_imm) >> 16));
3475 g_assert_not_reached ();
3479 CASE_PPC64 (OP_LXOR)
3480 ppc_xor (code, ins->dreg, ins->sreg1, ins->sreg2);
3484 CASE_PPC64 (OP_LXOR_IMM)
3485 if (!(ins->inst_imm & 0xffff0000)) {
3486 ppc_xori (code, ins->sreg1, ins->dreg, ins->inst_imm);
3487 } else if (!(ins->inst_imm & 0xffff)) {
3488 ppc_xoris (code, ins->sreg1, ins->dreg, ((guint32)(ins->inst_imm) >> 16));
3490 g_assert_not_reached ();
3494 CASE_PPC64 (OP_LSHL)
3495 ppc_shift_left (code, ins->dreg, ins->sreg1, ins->sreg2);
3499 CASE_PPC64 (OP_LSHL_IMM)
3500 ppc_shift_left_imm (code, ins->dreg, ins->sreg1, MASK_SHIFT_IMM (ins->inst_imm));
3503 ppc_sraw (code, ins->dreg, ins->sreg1, ins->sreg2);
3506 ppc_shift_right_arith_imm (code, ins->dreg, ins->sreg1, MASK_SHIFT_IMM (ins->inst_imm));
3509 if (MASK_SHIFT_IMM (ins->inst_imm))
3510 ppc_shift_right_imm (code, ins->dreg, ins->sreg1, MASK_SHIFT_IMM (ins->inst_imm));
3512 ppc_mr (code, ins->dreg, ins->sreg1);
3515 ppc_srw (code, ins->dreg, ins->sreg1, ins->sreg2);
3518 CASE_PPC64 (OP_LNOT)
3519 ppc_not (code, ins->dreg, ins->sreg1);
3522 CASE_PPC64 (OP_LNEG)
3523 ppc_neg (code, ins->dreg, ins->sreg1);
3526 CASE_PPC64 (OP_LMUL)
3527 ppc_multiply (code, ins->dreg, ins->sreg1, ins->sreg2);
3531 CASE_PPC64 (OP_LMUL_IMM)
3532 if (ppc_is_imm16 (ins->inst_imm)) {
3533 ppc_mulli (code, ins->dreg, ins->sreg1, ins->inst_imm);
3535 g_assert_not_reached ();
3539 CASE_PPC64 (OP_LMUL_OVF)
3540 /* we annot use mcrxr, since it's not implemented on some processors
3541 * XER format: SO, OV, CA, reserved [21 bits], count [8 bits]
3543 if (ins->opcode == OP_IMUL_OVF)
3544 ppc_mullwo (code, ins->dreg, ins->sreg1, ins->sreg2);
3545 #ifdef __mono_ppc64__
3547 ppc_mulldo (code, ins->dreg, ins->sreg1, ins->sreg2);
3549 ppc_mfspr (code, ppc_r0, ppc_xer);
3550 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3551 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3553 case OP_IMUL_OVF_UN:
3554 CASE_PPC64 (OP_LMUL_OVF_UN)
3555 /* we first multiply to get the high word and compare to 0
3556 * to set the flags, then the result is discarded and then
3557 * we multiply to get the lower * bits result
3559 if (ins->opcode == OP_IMUL_OVF_UN)
3560 ppc_mulhwu (code, ppc_r0, ins->sreg1, ins->sreg2);
3561 #ifdef __mono_ppc64__
3563 ppc_mulhdu (code, ppc_r0, ins->sreg1, ins->sreg2);
3565 ppc_cmpi (code, 0, 0, ppc_r0, 0);
3566 EMIT_COND_SYSTEM_EXCEPTION (CEE_BNE_UN - CEE_BEQ, "OverflowException");
3567 ppc_multiply (code, ins->dreg, ins->sreg1, ins->sreg2);
3570 CASE_PPC64 (OP_I8CONST)
3571 ppc_load (code, ins->dreg, ins->inst_c0);
3574 mono_add_patch_info (cfg, offset, (MonoJumpInfoType)ins->inst_i1, ins->inst_p0);
3575 ppc_load_sequence (code, ins->dreg, 0);
3577 CASE_PPC32 (OP_ICONV_TO_I4)
3578 CASE_PPC32 (OP_ICONV_TO_U4)
3580 ppc_mr (code, ins->dreg, ins->sreg1);
3583 int saved = ins->sreg1;
3584 if (ins->sreg1 == ppc_r3) {
3585 ppc_mr (code, ppc_r0, ins->sreg1);
3588 if (ins->sreg2 != ppc_r3)
3589 ppc_mr (code, ppc_r3, ins->sreg2);
3590 if (saved != ppc_r4)
3591 ppc_mr (code, ppc_r4, saved);
3595 ppc_fmr (code, ins->dreg, ins->sreg1);
3597 case OP_FCONV_TO_R4:
3598 ppc_frsp (code, ins->dreg, ins->sreg1);
3604 * Keep in sync with mono_arch_emit_epilog
3606 g_assert (!cfg->method->save_lmf);
3608 * Note: we can use ppc_r11 here because it is dead anyway:
3609 * we're leaving the method.
3611 if (1 || cfg->flags & MONO_CFG_HAS_CALLS) {
3612 long ret_offset = cfg->stack_usage + PPC_RET_ADDR_OFFSET;
3613 if (ppc_is_imm16 (ret_offset)) {
3614 ppc_load_reg (code, ppc_r0, ret_offset, cfg->frame_reg);
3616 ppc_load (code, ppc_r11, ret_offset);
3617 ppc_load_reg_indexed (code, ppc_r0, cfg->frame_reg, ppc_r11);
3619 ppc_mtlr (code, ppc_r0);
3622 code = emit_load_volatile_arguments (cfg, code);
3624 if (ppc_is_imm16 (cfg->stack_usage)) {
3625 ppc_addi (code, ppc_r11, cfg->frame_reg, cfg->stack_usage);
3627 ppc_load (code, ppc_r11, cfg->stack_usage);
3628 ppc_add (code, ppc_r11, cfg->frame_reg, ppc_r11);
3630 if (!cfg->method->save_lmf) {
3631 /*for (i = 31; i >= 14; --i) {
3632 if (cfg->used_float_regs & (1 << i)) {
3633 pos += sizeof (double);
3634 ppc_lfd (code, i, -pos, cfg->frame_reg);
3638 for (i = 31; i >= 13; --i) {
3639 if (cfg->used_int_regs & (1 << i)) {
3640 pos += sizeof (gpointer);
3641 ppc_load_reg (code, i, -pos, ppc_r11);
3645 /* FIXME restore from MonoLMF: though this can't happen yet */
3647 ppc_mr (code, ppc_sp, ppc_r11);
3648 mono_add_patch_info (cfg, (guint8*) code - cfg->native_code, MONO_PATCH_INFO_METHOD_JUMP, ins->inst_p0);
3653 /* ensure ins->sreg1 is not NULL */
3654 ppc_load_reg (code, ppc_r0, 0, ins->sreg1);
3657 long cookie_offset = cfg->sig_cookie + cfg->stack_usage;
3658 if (ppc_is_imm16 (cookie_offset)) {
3659 ppc_addi (code, ppc_r0, cfg->frame_reg, cookie_offset);
3661 ppc_load (code, ppc_r0, cookie_offset);
3662 ppc_add (code, ppc_r0, cfg->frame_reg, ppc_r0);
3664 ppc_store_reg (code, ppc_r0, 0, ins->sreg1);
3673 call = (MonoCallInst*)ins;
3674 if (ins->flags & MONO_INST_HAS_METHOD)
3675 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_METHOD, call->method);
3677 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_ABS, call->fptr);
3678 if (FORCE_INDIR_CALL || cfg->method->dynamic) {
3679 ppc_load_func (code, ppc_r0, 0);
3680 ppc_mtlr (code, ppc_r0);
3685 /* FIXME: this should be handled somewhere else in the new jit */
3686 code = emit_move_return_value (cfg, ins, code);
3692 case OP_VOIDCALL_REG:
3694 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
3695 ppc_load_reg (code, ppc_r0, 0, ins->sreg1);
3696 /* FIXME: if we know that this is a method, we
3697 can omit this load */
3698 ppc_load_reg (code, ppc_r2, 8, ins->sreg1);
3699 ppc_mtlr (code, ppc_r0);
3701 ppc_mtlr (code, ins->sreg1);
3704 /* FIXME: this should be handled somewhere else in the new jit */
3705 code = emit_move_return_value (cfg, ins, code);
3707 case OP_FCALL_MEMBASE:
3708 case OP_LCALL_MEMBASE:
3709 case OP_VCALL_MEMBASE:
3710 case OP_VCALL2_MEMBASE:
3711 case OP_VOIDCALL_MEMBASE:
3712 case OP_CALL_MEMBASE:
3713 ppc_load_reg (code, ppc_r0, ins->inst_offset, ins->sreg1);
3714 ppc_mtlr (code, ppc_r0);
3716 /* FIXME: this should be handled somewhere else in the new jit */
3717 code = emit_move_return_value (cfg, ins, code);
3720 guint8 * zero_loop_jump, * zero_loop_start;
3721 /* keep alignment */
3722 int alloca_waste = PPC_STACK_PARAM_OFFSET + cfg->param_area + 31;
3723 int area_offset = alloca_waste;
3725 ppc_addi (code, ppc_r11, ins->sreg1, alloca_waste + 31);
3726 /* FIXME: should be calculated from MONO_ARCH_FRAME_ALIGNMENT */
3727 ppc_clear_right_imm (code, ppc_r11, ppc_r11, 4);
3728 /* use ctr to store the number of words to 0 if needed */
3729 if (ins->flags & MONO_INST_INIT) {
3730 /* we zero 4 bytes at a time:
3731 * we add 7 instead of 3 so that we set the counter to
3732 * at least 1, otherwise the bdnz instruction will make
3733 * it negative and iterate billions of times.
3735 ppc_addi (code, ppc_r0, ins->sreg1, 7);
3736 ppc_shift_right_arith_imm (code, ppc_r0, ppc_r0, 2);
3737 ppc_mtctr (code, ppc_r0);
3739 ppc_load_reg (code, ppc_r0, 0, ppc_sp);
3740 ppc_neg (code, ppc_r11, ppc_r11);
3741 ppc_store_reg_update_indexed (code, ppc_r0, ppc_sp, ppc_r11);
3743 /* FIXME: make this loop work in 8 byte
3744 increments on PPC64 */
3745 if (ins->flags & MONO_INST_INIT) {
3746 /* adjust the dest reg by -4 so we can use stwu */
3747 /* we actually adjust -8 because we let the loop
3750 ppc_addi (code, ins->dreg, ppc_sp, (area_offset - 8));
3751 ppc_li (code, ppc_r11, 0);
3752 zero_loop_start = code;
3753 ppc_stwu (code, ppc_r11, 4, ins->dreg);
3754 zero_loop_jump = code;
3755 ppc_bc (code, PPC_BR_DEC_CTR_NONZERO, 0, 0);
3756 ppc_patch (zero_loop_jump, zero_loop_start);
3758 ppc_addi (code, ins->dreg, ppc_sp, area_offset);
3763 ppc_mr (code, ppc_r3, ins->sreg1);
3764 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD,
3765 (gpointer)"mono_arch_throw_exception");
3766 if (FORCE_INDIR_CALL || cfg->method->dynamic) {
3767 ppc_load_func (code, ppc_r0, 0);
3768 ppc_mtlr (code, ppc_r0);
3777 ppc_mr (code, ppc_r3, ins->sreg1);
3778 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD,
3779 (gpointer)"mono_arch_rethrow_exception");
3780 if (FORCE_INDIR_CALL || cfg->method->dynamic) {
3781 ppc_load_func (code, ppc_r0, 0);
3782 ppc_mtlr (code, ppc_r0);
3789 case OP_START_HANDLER: {
3790 MonoInst *spvar = mono_find_spvar_for_region (cfg, bb->region);
3791 g_assert (spvar->inst_basereg != ppc_sp);
3792 code = emit_reserve_param_area (cfg, code);
3793 ppc_mflr (code, ppc_r0);
3794 if (ppc_is_imm16 (spvar->inst_offset)) {
3795 ppc_store_reg (code, ppc_r0, spvar->inst_offset, spvar->inst_basereg);
3797 ppc_load (code, ppc_r11, spvar->inst_offset);
3798 ppc_store_reg_indexed (code, ppc_r0, ppc_r11, spvar->inst_basereg);
3802 case OP_ENDFILTER: {
3803 MonoInst *spvar = mono_find_spvar_for_region (cfg, bb->region);
3804 g_assert (spvar->inst_basereg != ppc_sp);
3805 code = emit_unreserve_param_area (cfg, code);
3806 if (ins->sreg1 != ppc_r3)
3807 ppc_mr (code, ppc_r3, ins->sreg1);
3808 if (ppc_is_imm16 (spvar->inst_offset)) {
3809 ppc_load_reg (code, ppc_r0, spvar->inst_offset, spvar->inst_basereg);
3811 ppc_load (code, ppc_r11, spvar->inst_offset);
3812 ppc_load_reg_indexed (code, ppc_r0, spvar->inst_basereg, ppc_r11);
3814 ppc_mtlr (code, ppc_r0);
3818 case OP_ENDFINALLY: {
3819 MonoInst *spvar = mono_find_spvar_for_region (cfg, bb->region);
3820 g_assert (spvar->inst_basereg != ppc_sp);
3821 code = emit_unreserve_param_area (cfg, code);
3822 ppc_load_reg (code, ppc_r0, spvar->inst_offset, spvar->inst_basereg);
3823 ppc_mtlr (code, ppc_r0);
3827 case OP_CALL_HANDLER:
3828 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_BB, ins->inst_target_bb);
3832 ins->inst_c0 = code - cfg->native_code;
3835 /*if (ins->inst_target_bb->native_offset) {
3837 //x86_jump_code (code, cfg->native_code + ins->inst_target_bb->native_offset);
3839 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_target_bb);
3844 ppc_mtctr (code, ins->sreg1);
3845 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
3849 CASE_PPC64 (OP_LCEQ)
3850 ppc_li (code, ins->dreg, 0);
3851 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 2);
3852 ppc_li (code, ins->dreg, 1);
3858 CASE_PPC64 (OP_LCLT)
3859 CASE_PPC64 (OP_LCLT_UN)
3860 ppc_li (code, ins->dreg, 1);
3861 ppc_bc (code, PPC_BR_TRUE, PPC_BR_LT, 2);
3862 ppc_li (code, ins->dreg, 0);
3868 CASE_PPC64 (OP_LCGT)
3869 CASE_PPC64 (OP_LCGT_UN)
3870 ppc_li (code, ins->dreg, 1);
3871 ppc_bc (code, PPC_BR_TRUE, PPC_BR_GT, 2);
3872 ppc_li (code, ins->dreg, 0);
3874 case OP_COND_EXC_EQ:
3875 case OP_COND_EXC_NE_UN:
3876 case OP_COND_EXC_LT:
3877 case OP_COND_EXC_LT_UN:
3878 case OP_COND_EXC_GT:
3879 case OP_COND_EXC_GT_UN:
3880 case OP_COND_EXC_GE:
3881 case OP_COND_EXC_GE_UN:
3882 case OP_COND_EXC_LE:
3883 case OP_COND_EXC_LE_UN:
3884 EMIT_COND_SYSTEM_EXCEPTION (ins->opcode - OP_COND_EXC_EQ, ins->inst_p1);
3886 case OP_COND_EXC_IEQ:
3887 case OP_COND_EXC_INE_UN:
3888 case OP_COND_EXC_ILT:
3889 case OP_COND_EXC_ILT_UN:
3890 case OP_COND_EXC_IGT:
3891 case OP_COND_EXC_IGT_UN:
3892 case OP_COND_EXC_IGE:
3893 case OP_COND_EXC_IGE_UN:
3894 case OP_COND_EXC_ILE:
3895 case OP_COND_EXC_ILE_UN:
3896 EMIT_COND_SYSTEM_EXCEPTION (ins->opcode - OP_COND_EXC_IEQ, ins->inst_p1);
3908 EMIT_COND_BRANCH (ins, ins->opcode - OP_IBEQ);
3911 /* floating point opcodes */
3914 g_assert_not_reached ();
3915 case OP_STORER8_MEMBASE_REG:
3916 if (ppc_is_imm16 (ins->inst_offset)) {
3917 ppc_stfd (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
3919 ppc_load (code, ppc_r0, ins->inst_offset);
3920 ppc_stfdx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
3923 case OP_LOADR8_MEMBASE:
3924 if (ppc_is_imm16 (ins->inst_offset)) {
3925 ppc_lfd (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3927 ppc_load (code, ppc_r0, ins->inst_offset);
3928 ppc_lfdx (code, ins->dreg, ins->inst_destbasereg, ppc_r0);
3931 case OP_STORER4_MEMBASE_REG:
3932 ppc_frsp (code, ins->sreg1, ins->sreg1);
3933 if (ppc_is_imm16 (ins->inst_offset)) {
3934 ppc_stfs (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
3936 ppc_load (code, ppc_r0, ins->inst_offset);
3937 ppc_stfsx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
3940 case OP_LOADR4_MEMBASE:
3941 if (ppc_is_imm16 (ins->inst_offset)) {
3942 ppc_lfs (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3944 ppc_load (code, ppc_r0, ins->inst_offset);
3945 ppc_lfsx (code, ins->dreg, ins->inst_destbasereg, ppc_r0);
3948 case OP_LOADR4_MEMINDEX:
3949 ppc_lfsx (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3951 case OP_LOADR8_MEMINDEX:
3952 ppc_lfdx (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3954 case OP_STORER4_MEMINDEX:
3955 ppc_frsp (code, ins->sreg1, ins->sreg1);
3956 ppc_stfsx (code, ins->sreg1, ins->sreg2, ins->inst_destbasereg);
3958 case OP_STORER8_MEMINDEX:
3959 ppc_stfdx (code, ins->sreg1, ins->sreg2, ins->inst_destbasereg);
3962 case CEE_CONV_R4: /* FIXME: change precision */
3964 g_assert_not_reached ();
3965 case OP_FCONV_TO_I1:
3966 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 1, TRUE);
3968 case OP_FCONV_TO_U1:
3969 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 1, FALSE);
3971 case OP_FCONV_TO_I2:
3972 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 2, TRUE);
3974 case OP_FCONV_TO_U2:
3975 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 2, FALSE);
3977 case OP_FCONV_TO_I4:
3979 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 4, TRUE);
3981 case OP_FCONV_TO_U4:
3983 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 4, FALSE);
3985 case OP_LCONV_TO_R_UN:
3986 g_assert_not_reached ();
3987 /* Implemented as helper calls */
3989 case OP_LCONV_TO_OVF_I4_2:
3990 case OP_LCONV_TO_OVF_I: {
3991 #ifdef __mono_ppc64__
3994 guint8 *negative_branch, *msword_positive_branch, *msword_negative_branch, *ovf_ex_target;
3995 // Check if its negative
3996 ppc_cmpi (code, 0, 0, ins->sreg1, 0);
3997 negative_branch = code;
3998 ppc_bc (code, PPC_BR_TRUE, PPC_BR_LT, 0);
3999 // Its positive msword == 0
4000 ppc_cmpi (code, 0, 0, ins->sreg2, 0);
4001 msword_positive_branch = code;
4002 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
4004 ovf_ex_target = code;
4005 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_ALWAYS, 0, "OverflowException");
4007 ppc_patch (negative_branch, code);
4008 ppc_cmpi (code, 0, 0, ins->sreg2, -1);
4009 msword_negative_branch = code;
4010 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
4011 ppc_patch (msword_negative_branch, ovf_ex_target);
4013 ppc_patch (msword_positive_branch, code);
4014 if (ins->dreg != ins->sreg1)
4015 ppc_mr (code, ins->dreg, ins->sreg1);
4020 ppc_fsqrtd (code, ins->dreg, ins->sreg1);
4023 ppc_fadd (code, ins->dreg, ins->sreg1, ins->sreg2);
4026 ppc_fsub (code, ins->dreg, ins->sreg1, ins->sreg2);
4029 ppc_fmul (code, ins->dreg, ins->sreg1, ins->sreg2);
4032 ppc_fdiv (code, ins->dreg, ins->sreg1, ins->sreg2);
4035 ppc_fneg (code, ins->dreg, ins->sreg1);
4039 g_assert_not_reached ();
4042 ppc_fcmpu (code, 0, ins->sreg1, ins->sreg2);
4045 ppc_fcmpo (code, 0, ins->sreg1, ins->sreg2);
4046 ppc_li (code, ins->dreg, 0);
4047 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 2);
4048 ppc_li (code, ins->dreg, 1);
4051 ppc_fcmpo (code, 0, ins->sreg1, ins->sreg2);
4052 ppc_li (code, ins->dreg, 1);
4053 ppc_bc (code, PPC_BR_TRUE, PPC_BR_LT, 2);
4054 ppc_li (code, ins->dreg, 0);
4057 ppc_fcmpu (code, 0, ins->sreg1, ins->sreg2);
4058 ppc_li (code, ins->dreg, 1);
4059 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 3);
4060 ppc_bc (code, PPC_BR_TRUE, PPC_BR_LT, 2);
4061 ppc_li (code, ins->dreg, 0);
4064 ppc_fcmpo (code, 0, ins->sreg1, ins->sreg2);
4065 ppc_li (code, ins->dreg, 1);
4066 ppc_bc (code, PPC_BR_TRUE, PPC_BR_GT, 2);
4067 ppc_li (code, ins->dreg, 0);
4070 ppc_fcmpu (code, 0, ins->sreg1, ins->sreg2);
4071 ppc_li (code, ins->dreg, 1);
4072 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 3);
4073 ppc_bc (code, PPC_BR_TRUE, PPC_BR_GT, 2);
4074 ppc_li (code, ins->dreg, 0);
4077 EMIT_COND_BRANCH (ins, CEE_BEQ - CEE_BEQ);
4080 EMIT_COND_BRANCH (ins, CEE_BNE_UN - CEE_BEQ);
4083 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 2);
4084 EMIT_COND_BRANCH (ins, CEE_BLT - CEE_BEQ);
4087 EMIT_COND_BRANCH_FLAGS (ins, PPC_BR_TRUE, PPC_BR_SO);
4088 EMIT_COND_BRANCH (ins, CEE_BLT_UN - CEE_BEQ);
4091 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 2);
4092 EMIT_COND_BRANCH (ins, CEE_BGT - CEE_BEQ);
4095 EMIT_COND_BRANCH_FLAGS (ins, PPC_BR_TRUE, PPC_BR_SO);
4096 EMIT_COND_BRANCH (ins, CEE_BGT_UN - CEE_BEQ);
4099 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 2);
4100 EMIT_COND_BRANCH (ins, CEE_BGE - CEE_BEQ);
4103 EMIT_COND_BRANCH (ins, CEE_BGE_UN - CEE_BEQ);
4106 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 2);
4107 EMIT_COND_BRANCH (ins, CEE_BLE - CEE_BEQ);
4110 EMIT_COND_BRANCH (ins, CEE_BLE_UN - CEE_BEQ);
4113 g_assert_not_reached ();
4114 case OP_CHECK_FINITE: {
4115 ppc_rlwinm (code, ins->sreg1, ins->sreg1, 0, 1, 31);
4116 ppc_addis (code, ins->sreg1, ins->sreg1, -32752);
4117 ppc_rlwinmd (code, ins->sreg1, ins->sreg1, 1, 31, 31);
4118 EMIT_COND_SYSTEM_EXCEPTION (CEE_BEQ - CEE_BEQ, "ArithmeticException");
4121 mono_add_patch_info (cfg, offset, (MonoJumpInfoType)ins->inst_i1, ins->inst_p0);
4122 #ifdef __mono_ppc64__
4123 ppc_load_sequence (code, ins->dreg, (gulong)0x0f0f0f0f0f0f0f0fL);
4125 ppc_load_sequence (code, ins->dreg, (gulong)0x0f0f0f0fL);
4130 #ifdef __mono_ppc64__
4131 case OP_ICONV_TO_I4:
4133 ppc_extsw (code, ins->dreg, ins->sreg1);
4135 case OP_ICONV_TO_U4:
4137 ppc_clrldi (code, ins->dreg, ins->sreg1, 32);
4139 case OP_ICONV_TO_R4:
4140 case OP_ICONV_TO_R8:
4141 case OP_LCONV_TO_R4:
4142 case OP_LCONV_TO_R8: {
4144 if (ins->opcode == OP_ICONV_TO_R4 || ins->opcode == OP_ICONV_TO_R8) {
4145 ppc_extsw (code, ppc_r0, ins->sreg1);
4150 ppc_store_reg (code, tmp, -8, ppc_r1);
4151 ppc_lfd (code, ins->dreg, -8, ppc_r1);
4152 ppc_fcfid (code, ins->dreg, ins->dreg);
4153 if (ins->opcode == OP_ICONV_TO_R4 || ins->opcode == OP_LCONV_TO_R4)
4154 ppc_frsp (code, ins->dreg, ins->dreg);
4158 ppc_srad (code, ins->dreg, ins->sreg1, ins->sreg2);
4161 ppc_srd (code, ins->dreg, ins->sreg1, ins->sreg2);
4164 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
4166 ppc_mfspr (code, ppc_r0, ppc_xer);
4167 ppc_andisd (code, ppc_r0, ppc_r0, (1 << 13)); /* CA */
4168 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, ins->inst_p1);
4170 case OP_COND_EXC_OV:
4171 ppc_mfspr (code, ppc_r0, ppc_xer);
4172 ppc_andisd (code, ppc_r0, ppc_r0, (1 << 14)); /* OV */
4173 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, ins->inst_p1);
4185 EMIT_COND_BRANCH (ins, ins->opcode - OP_LBEQ);
4187 case OP_FCONV_TO_I8:
4188 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 8, TRUE);
4190 case OP_FCONV_TO_U8:
4191 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 8, FALSE);
4193 case OP_STOREI4_MEMBASE_REG:
4194 if (ppc_is_imm16 (ins->inst_offset)) {
4195 ppc_stw (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
4197 ppc_load (code, ppc_r0, ins->inst_offset);
4198 ppc_stwx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
4201 case OP_STOREI4_MEMINDEX:
4202 ppc_stwx (code, ins->sreg1, ins->sreg2, ins->inst_destbasereg);
4205 ppc_srawi (code, ins->dreg, ins->sreg1, (ins->inst_imm & 0x1f));
4207 case OP_ISHR_UN_IMM:
4208 if (ins->inst_imm & 0x1f)
4209 ppc_srwi (code, ins->dreg, ins->sreg1, (ins->inst_imm & 0x1f));
4211 ppc_mr (code, ins->dreg, ins->sreg1);
4213 case OP_ATOMIC_ADD_NEW_I4:
4214 case OP_ATOMIC_ADD_NEW_I8: {
4215 guint8 *loop = code, *branch;
4216 g_assert (ins->inst_offset == 0);
4217 if (ins->opcode == OP_ATOMIC_ADD_NEW_I4)
4218 ppc_lwarx (code, ppc_r0, 0, ins->inst_basereg);
4220 ppc_ldarx (code, ppc_r0, 0, ins->inst_basereg);
4221 ppc_add (code, ppc_r0, ppc_r0, ins->sreg2);
4222 if (ins->opcode == OP_ATOMIC_ADD_NEW_I4)
4223 ppc_stwcxd (code, ppc_r0, 0, ins->inst_basereg);
4225 ppc_stdcxd (code, ppc_r0, 0, ins->inst_basereg);
4227 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
4228 ppc_patch (branch, loop);
4229 ppc_mr (code, ins->dreg, ppc_r0);
4233 case OP_ATOMIC_CAS_I4:
4234 CASE_PPC64 (OP_ATOMIC_CAS_I8) {
4235 int location = ins->sreg1;
4236 int value = ins->sreg2;
4237 int comparand = ins->sreg3;
4238 guint8 *start, *not_equal, *lost_reservation;
4241 if (ins->opcode == OP_ATOMIC_CAS_I4)
4242 ppc_lwarx (code, ppc_r0, 0, location);
4243 #ifdef __mono_ppc64__
4245 ppc_ldarx (code, ppc_r0, 0, location);
4247 ppc_cmp (code, 0, ins->opcode == OP_ATOMIC_CAS_I4 ? 0 : 1, ppc_r0, comparand);
4250 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
4251 if (ins->opcode == OP_ATOMIC_CAS_I4)
4252 ppc_stwcxd (code, value, 0, location);
4253 #ifdef __mono_ppc64__
4255 ppc_stdcxd (code, value, 0, location);
4258 lost_reservation = code;
4259 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
4260 ppc_patch (lost_reservation, start);
4262 ppc_patch (not_equal, code);
4263 ppc_mr (code, ins->dreg, ppc_r0);
4268 g_warning ("unknown opcode %s in %s()\n", mono_inst_name (ins->opcode), __FUNCTION__);
4269 g_assert_not_reached ();
4272 if ((cfg->opt & MONO_OPT_BRANCH) && ((code - cfg->native_code - offset) > max_len)) {
4273 g_warning ("wrong maximal instruction length of instruction %s (expected %d, got %ld)",
4274 mono_inst_name (ins->opcode), max_len, (glong)(code - cfg->native_code - offset));
4275 g_assert_not_reached ();
4281 last_offset = offset;
4284 cfg->code_len = code - cfg->native_code;
4288 mono_arch_register_lowlevel_calls (void)
4292 #ifdef __mono_ppc64__
4293 #define patch_load_sequence(ip,val) do {\
4294 guint16 *__load = (guint16*)(ip); \
4295 __load [1] = (((guint64)(val)) >> 48) & 0xffff; \
4296 __load [3] = (((guint64)(val)) >> 32) & 0xffff; \
4297 __load [7] = (((guint64)(val)) >> 16) & 0xffff; \
4298 __load [9] = ((guint64)(val)) & 0xffff; \
4301 #define patch_load_sequence(ip,val) do {\
4302 guint16 *__lis_ori = (guint16*)(ip); \
4303 __lis_ori [1] = (((gulong)(val)) >> 16) & 0xffff; \
4304 __lis_ori [3] = ((gulong)(val)) & 0xffff; \
4309 mono_arch_patch_code (MonoMethod *method, MonoDomain *domain, guint8 *code, MonoJumpInfo *ji, gboolean run_cctors)
4311 MonoJumpInfo *patch_info;
4313 for (patch_info = ji; patch_info; patch_info = patch_info->next) {
4314 unsigned char *ip = patch_info->ip.i + code;
4315 unsigned char *target;
4316 gboolean is_fd = FALSE;
4318 target = mono_resolve_patch_target (method, domain, code, patch_info, run_cctors);
4320 switch (patch_info->type) {
4321 case MONO_PATCH_INFO_IP:
4322 patch_load_sequence (ip, ip);
4324 case MONO_PATCH_INFO_METHOD_REL:
4325 g_assert_not_reached ();
4326 *((gpointer *)(ip)) = code + patch_info->data.offset;
4328 case MONO_PATCH_INFO_SWITCH: {
4329 gpointer *table = (gpointer *)patch_info->data.table->table;
4332 patch_load_sequence (ip, table);
4334 for (i = 0; i < patch_info->data.table->table_size; i++) {
4335 table [i] = (glong)patch_info->data.table->table [i] + code;
4337 /* we put into the table the absolute address, no need for ppc_patch in this case */
4340 case MONO_PATCH_INFO_METHODCONST:
4341 case MONO_PATCH_INFO_CLASS:
4342 case MONO_PATCH_INFO_IMAGE:
4343 case MONO_PATCH_INFO_FIELD:
4344 case MONO_PATCH_INFO_VTABLE:
4345 case MONO_PATCH_INFO_IID:
4346 case MONO_PATCH_INFO_SFLDA:
4347 case MONO_PATCH_INFO_LDSTR:
4348 case MONO_PATCH_INFO_TYPE_FROM_HANDLE:
4349 case MONO_PATCH_INFO_LDTOKEN:
4350 /* from OP_AOTCONST : lis + ori */
4351 patch_load_sequence (ip, target);
4353 case MONO_PATCH_INFO_R4:
4354 case MONO_PATCH_INFO_R8:
4355 g_assert_not_reached ();
4356 *((gconstpointer *)(ip + 2)) = patch_info->data.target;
4358 case MONO_PATCH_INFO_EXC_NAME:
4359 g_assert_not_reached ();
4360 *((gconstpointer *)(ip + 1)) = patch_info->data.name;
4362 case MONO_PATCH_INFO_NONE:
4363 case MONO_PATCH_INFO_BB_OVF:
4364 case MONO_PATCH_INFO_EXC_OVF:
4365 /* everything is dealt with at epilog output time */
4367 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
4368 case MONO_PATCH_INFO_INTERNAL_METHOD:
4369 case MONO_PATCH_INFO_ABS:
4370 case MONO_PATCH_INFO_CLASS_INIT:
4371 case MONO_PATCH_INFO_RGCTX_FETCH:
4378 ppc_patch_full (ip, target, is_fd);
4383 * Emit code to save the registers in used_int_regs or the registers in the MonoLMF
4384 * structure at positive offset pos from register base_reg. pos is guaranteed to fit into
4385 * the instruction offset immediate for all the registers.
4388 save_registers (guint8* code, int pos, int base_reg, gboolean save_lmf, guint32 used_int_regs)
4392 for (i = 13; i <= 31; i++) {
4393 if (used_int_regs & (1 << i)) {
4394 ppc_store_reg (code, i, pos, base_reg);
4395 pos += sizeof (gulong);
4399 /* pos is the start of the MonoLMF structure */
4400 int offset = pos + G_STRUCT_OFFSET (MonoLMF, iregs);
4401 for (i = 13; i <= 31; i++) {
4402 ppc_store_reg (code, i, offset, base_reg);
4403 offset += sizeof (gulong);
4405 offset = pos + G_STRUCT_OFFSET (MonoLMF, fregs);
4406 for (i = 14; i < 32; i++) {
4407 ppc_stfd (code, i, offset, base_reg);
4408 offset += sizeof (gdouble);
4415 * Stack frame layout:
4417 * ------------------- sp
4418 * MonoLMF structure or saved registers
4419 * -------------------
4421 * -------------------
4423 * -------------------
4424 * optional 8 bytes for tracing
4425 * -------------------
4426 * param area size is cfg->param_area
4427 * -------------------
4428 * linkage area size is PPC_STACK_PARAM_OFFSET
4429 * ------------------- sp
4433 mono_arch_emit_prolog (MonoCompile *cfg)
4435 MonoMethod *method = cfg->method;
4437 MonoMethodSignature *sig;
4439 long alloc_size, pos, max_offset;
4445 int tailcall_struct_index;
4447 if (mono_jit_trace_calls != NULL && mono_trace_eval (method))
4450 sig = mono_method_signature (method);
4451 cfg->code_size = MONO_PPC_32_64_CASE (260, 384) + sig->param_count * 20;
4452 code = cfg->native_code = g_malloc (cfg->code_size);
4454 if (1 || cfg->flags & MONO_CFG_HAS_CALLS) {
4455 ppc_mflr (code, ppc_r0);
4456 ppc_store_reg (code, ppc_r0, PPC_RET_ADDR_OFFSET, ppc_sp);
4459 alloc_size = cfg->stack_offset;
4462 if (!method->save_lmf) {
4463 for (i = 31; i >= 13; --i) {
4464 if (cfg->used_int_regs & (1 << i)) {
4465 pos += sizeof (gulong);
4469 pos += sizeof (MonoLMF);
4473 // align to MONO_ARCH_FRAME_ALIGNMENT bytes
4474 if (alloc_size & (MONO_ARCH_FRAME_ALIGNMENT - 1)) {
4475 alloc_size += MONO_ARCH_FRAME_ALIGNMENT - 1;
4476 alloc_size &= ~(MONO_ARCH_FRAME_ALIGNMENT - 1);
4479 cfg->stack_usage = alloc_size;
4480 g_assert ((alloc_size & (MONO_ARCH_FRAME_ALIGNMENT-1)) == 0);
4482 if (ppc_is_imm16 (-alloc_size)) {
4483 ppc_store_reg_update (code, ppc_sp, -alloc_size, ppc_sp);
4484 code = save_registers (code, alloc_size - pos, ppc_sp, method->save_lmf, cfg->used_int_regs);
4487 ppc_addi (code, ppc_r11, ppc_sp, -pos);
4488 ppc_load (code, ppc_r0, -alloc_size);
4489 ppc_store_reg_update_indexed (code, ppc_sp, ppc_sp, ppc_r0);
4490 code = save_registers (code, 0, ppc_r11, method->save_lmf, cfg->used_int_regs);
4493 if (cfg->frame_reg != ppc_sp)
4494 ppc_mr (code, cfg->frame_reg, ppc_sp);
4496 /* store runtime generic context */
4497 if (cfg->rgctx_var) {
4498 g_assert (cfg->rgctx_var->opcode == OP_REGOFFSET &&
4499 (cfg->rgctx_var->inst_basereg == ppc_r1 || cfg->rgctx_var->inst_basereg == ppc_r31));
4501 ppc_store_reg (code, MONO_ARCH_RGCTX_REG, cfg->rgctx_var->inst_offset, cfg->rgctx_var->inst_basereg);
4504 /* compute max_offset in order to use short forward jumps
4505 * we always do it on ppc because the immediate displacement
4506 * for jumps is too small
4509 for (bb = cfg->bb_entry; bb; bb = bb->next_bb) {
4511 bb->max_offset = max_offset;
4513 if (cfg->prof_options & MONO_PROFILE_COVERAGE)
4516 MONO_BB_FOR_EACH_INS (bb, ins)
4517 max_offset += ins_native_length (cfg, ins);
4520 /* load arguments allocated to register from the stack */
4523 cinfo = calculate_sizes (sig, sig->pinvoke);
4525 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
4526 ArgInfo *ainfo = &cinfo->ret;
4528 inst = cfg->vret_addr;
4531 if (ppc_is_imm16 (inst->inst_offset)) {
4532 ppc_store_reg (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4534 ppc_load (code, ppc_r11, inst->inst_offset);
4535 ppc_store_reg_indexed (code, ainfo->reg, ppc_r11, inst->inst_basereg);
4539 tailcall_struct_index = 0;
4540 for (i = 0; i < sig->param_count + sig->hasthis; ++i) {
4541 ArgInfo *ainfo = cinfo->args + i;
4542 inst = cfg->args [pos];
4544 if (cfg->verbose_level > 2)
4545 g_print ("Saving argument %d (type: %d)\n", i, ainfo->regtype);
4546 if (inst->opcode == OP_REGVAR) {
4547 if (ainfo->regtype == RegTypeGeneral)
4548 ppc_mr (code, inst->dreg, ainfo->reg);
4549 else if (ainfo->regtype == RegTypeFP)
4550 ppc_fmr (code, inst->dreg, ainfo->reg);
4551 else if (ainfo->regtype == RegTypeBase) {
4552 ppc_load_reg (code, ppc_r11, 0, ppc_sp);
4553 ppc_load_reg (code, inst->dreg, ainfo->offset, ppc_r11);
4555 g_assert_not_reached ();
4557 if (cfg->verbose_level > 2)
4558 g_print ("Argument %ld assigned to register %s\n", pos, mono_arch_regname (inst->dreg));
4560 /* the argument should be put on the stack: FIXME handle size != word */
4561 if (ainfo->regtype == RegTypeGeneral) {
4562 switch (ainfo->size) {
4564 if (ppc_is_imm16 (inst->inst_offset)) {
4565 ppc_stb (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4567 ppc_load (code, ppc_r11, inst->inst_offset);
4568 ppc_stbx (code, ainfo->reg, ppc_r11, inst->inst_basereg);
4572 if (ppc_is_imm16 (inst->inst_offset)) {
4573 ppc_sth (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4575 ppc_load (code, ppc_r11, inst->inst_offset);
4576 ppc_sthx (code, ainfo->reg, ppc_r11, inst->inst_basereg);
4579 #ifdef __mono_ppc64__
4581 if (ppc_is_imm16 (inst->inst_offset)) {
4582 ppc_stw (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4584 ppc_load (code, ppc_r11, inst->inst_offset);
4585 ppc_stwx (code, ainfo->reg, ppc_r11, inst->inst_basereg);
4590 if (ppc_is_imm16 (inst->inst_offset + 4)) {
4591 ppc_stw (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4592 ppc_stw (code, ainfo->reg + 1, inst->inst_offset + 4, inst->inst_basereg);
4594 ppc_load (code, ppc_r11, inst->inst_offset);
4595 ppc_add (code, ppc_r11, ppc_r11, inst->inst_basereg);
4596 ppc_stw (code, ainfo->reg, 0, ppc_r11);
4597 ppc_stw (code, ainfo->reg + 1, 4, ppc_r11);
4602 if (ppc_is_imm16 (inst->inst_offset)) {
4603 ppc_store_reg (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4605 ppc_load (code, ppc_r11, inst->inst_offset);
4606 ppc_store_reg_indexed (code, ainfo->reg, ppc_r11, inst->inst_basereg);
4610 } else if (ainfo->regtype == RegTypeBase) {
4611 /* load the previous stack pointer in r11 */
4612 ppc_load_reg (code, ppc_r11, 0, ppc_sp);
4613 ppc_load_reg (code, ppc_r0, ainfo->offset, ppc_r11);
4614 switch (ainfo->size) {
4616 if (ppc_is_imm16 (inst->inst_offset)) {
4617 ppc_stb (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
4619 ppc_load (code, ppc_r11, inst->inst_offset);
4620 ppc_stbx (code, ppc_r0, ppc_r11, inst->inst_basereg);
4624 if (ppc_is_imm16 (inst->inst_offset)) {
4625 ppc_sth (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
4627 ppc_load (code, ppc_r11, inst->inst_offset);
4628 ppc_sthx (code, ppc_r0, ppc_r11, inst->inst_basereg);
4631 #ifdef __mono_ppc64__
4633 if (ppc_is_imm16 (inst->inst_offset)) {
4634 ppc_stw (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
4636 ppc_load (code, ppc_r11, inst->inst_offset);
4637 ppc_stwx (code, ppc_r0, ppc_r11, inst->inst_basereg);
4642 if (ppc_is_imm16 (inst->inst_offset + 4)) {
4643 ppc_stw (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
4644 ppc_lwz (code, ppc_r0, ainfo->offset + 4, ppc_r11);
4645 ppc_stw (code, ppc_r0, inst->inst_offset + 4, inst->inst_basereg);
4648 g_assert_not_reached ();
4653 if (ppc_is_imm16 (inst->inst_offset)) {
4654 ppc_store_reg (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
4656 ppc_load (code, ppc_r11, inst->inst_offset);
4657 ppc_store_reg_indexed (code, ppc_r0, ppc_r11, inst->inst_basereg);
4661 } else if (ainfo->regtype == RegTypeFP) {
4662 g_assert (ppc_is_imm16 (inst->inst_offset));
4663 if (ainfo->size == 8)
4664 ppc_stfd (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4665 else if (ainfo->size == 4)
4666 ppc_stfs (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4668 g_assert_not_reached ();
4669 } else if (ainfo->regtype == RegTypeStructByVal) {
4670 int doffset = inst->inst_offset;
4674 g_assert (ppc_is_imm16 (inst->inst_offset));
4675 g_assert (ppc_is_imm16 (inst->inst_offset + ainfo->size * sizeof (gpointer)));
4676 /* FIXME: what if there is no class? */
4677 if (sig->pinvoke && mono_class_from_mono_type (inst->inst_vtype))
4678 size = mono_class_native_size (mono_class_from_mono_type (inst->inst_vtype), NULL);
4679 for (cur_reg = 0; cur_reg < ainfo->size; ++cur_reg) {
4682 * Darwin handles 1 and 2 byte
4683 * structs specially by
4684 * loading h/b into the arg
4685 * register. Only done for
4689 ppc_sth (code, ainfo->reg + cur_reg, doffset, inst->inst_basereg);
4691 ppc_stb (code, ainfo->reg + cur_reg, doffset, inst->inst_basereg);
4695 #ifdef __mono_ppc64__
4697 g_assert (cur_reg == 0);
4698 ppc_sldi (code, ppc_r0, ainfo->reg,
4699 (sizeof (gpointer) - ainfo->bytes) * 8);
4700 ppc_store_reg (code, ppc_r0, doffset, inst->inst_basereg);
4704 ppc_store_reg (code, ainfo->reg + cur_reg, doffset,
4705 inst->inst_basereg);
4708 soffset += sizeof (gpointer);
4709 doffset += sizeof (gpointer);
4711 if (ainfo->vtsize) {
4712 /* FIXME: we need to do the shifting here, too */
4715 /* load the previous stack pointer in r11 (r0 gets overwritten by the memcpy) */
4716 ppc_load_reg (code, ppc_r11, 0, ppc_sp);
4717 if ((size & MONO_PPC_32_64_CASE (3, 7)) != 0) {
4718 code = emit_memcpy (code, size - soffset,
4719 inst->inst_basereg, doffset,
4720 ppc_r11, ainfo->offset + soffset);
4722 code = emit_memcpy (code, ainfo->vtsize * sizeof (gpointer),
4723 inst->inst_basereg, doffset,
4724 ppc_r11, ainfo->offset + soffset);
4727 } else if (ainfo->regtype == RegTypeStructByAddr) {
4728 /* if it was originally a RegTypeBase */
4729 if (ainfo->offset) {
4730 /* load the previous stack pointer in r11 */
4731 ppc_load_reg (code, ppc_r11, 0, ppc_sp);
4732 ppc_load_reg (code, ppc_r11, ainfo->offset, ppc_r11);
4734 ppc_mr (code, ppc_r11, ainfo->reg);
4737 if (cfg->tailcall_valuetype_addrs) {
4738 MonoInst *addr = cfg->tailcall_valuetype_addrs [tailcall_struct_index];
4740 g_assert (ppc_is_imm16 (addr->inst_offset));
4741 ppc_store_reg (code, ppc_r11, addr->inst_offset, addr->inst_basereg);
4743 tailcall_struct_index++;
4746 g_assert (ppc_is_imm16 (inst->inst_offset));
4747 code = emit_memcpy (code, ainfo->vtsize, inst->inst_basereg, inst->inst_offset, ppc_r11, 0);
4748 /*g_print ("copy in %s: %d bytes from %d to offset: %d\n", method->name, ainfo->vtsize, ainfo->reg, inst->inst_offset);*/
4750 g_assert_not_reached ();
4755 if (method->wrapper_type == MONO_WRAPPER_NATIVE_TO_MANAGED) {
4756 ppc_load (code, ppc_r3, cfg->domain);
4757 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD, (gpointer)"mono_jit_thread_attach");
4758 if (FORCE_INDIR_CALL || cfg->method->dynamic) {
4759 ppc_load_func (code, ppc_r0, 0);
4760 ppc_mtlr (code, ppc_r0);
4767 if (method->save_lmf) {
4768 if (lmf_pthread_key != -1) {
4769 emit_tls_access (code, ppc_r3, lmf_pthread_key);
4770 if (tls_mode != TLS_MODE_NPTL && G_STRUCT_OFFSET (MonoJitTlsData, lmf))
4771 ppc_addi (code, ppc_r3, ppc_r3, G_STRUCT_OFFSET (MonoJitTlsData, lmf));
4773 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD,
4774 (gpointer)"mono_get_lmf_addr");
4775 if (FORCE_INDIR_CALL || cfg->method->dynamic) {
4776 ppc_load_func (code, ppc_r0, 0);
4777 ppc_mtlr (code, ppc_r0);
4783 /* we build the MonoLMF structure on the stack - see mini-ppc.h */
4784 /* lmf_offset is the offset from the previous stack pointer,
4785 * alloc_size is the total stack space allocated, so the offset
4786 * of MonoLMF from the current stack ptr is alloc_size - lmf_offset.
4787 * The pointer to the struct is put in ppc_r11 (new_lmf).
4788 * The callee-saved registers are already in the MonoLMF structure
4790 ppc_addi (code, ppc_r11, ppc_sp, alloc_size - lmf_offset);
4791 /* ppc_r3 is the result from mono_get_lmf_addr () */
4792 ppc_store_reg (code, ppc_r3, G_STRUCT_OFFSET(MonoLMF, lmf_addr), ppc_r11);
4793 /* new_lmf->previous_lmf = *lmf_addr */
4794 ppc_load_reg (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r3);
4795 ppc_store_reg (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r11);
4796 /* *(lmf_addr) = r11 */
4797 ppc_store_reg (code, ppc_r11, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r3);
4798 /* save method info */
4799 ppc_load (code, ppc_r0, method);
4800 ppc_store_reg (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, method), ppc_r11);
4801 ppc_store_reg (code, ppc_sp, G_STRUCT_OFFSET(MonoLMF, ebp), ppc_r11);
4802 /* save the current IP */
4803 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_IP, NULL);
4804 #ifdef __mono_ppc64__
4805 ppc_load_sequence (code, ppc_r0, (gulong)0x0101010101010101L);
4807 ppc_load_sequence (code, ppc_r0, (gulong)0x01010101L);
4809 ppc_store_reg (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, eip), ppc_r11);
4813 code = mono_arch_instrument_prolog (cfg, mono_trace_enter_method, code, TRUE);
4815 cfg->code_len = code - cfg->native_code;
4816 g_assert (cfg->code_len <= cfg->code_size);
4823 mono_arch_emit_epilog (MonoCompile *cfg)
4825 MonoMethod *method = cfg->method;
4827 int max_epilog_size = 16 + 20*4;
4830 if (cfg->method->save_lmf)
4831 max_epilog_size += 128;
4833 if (mono_jit_trace_calls != NULL)
4834 max_epilog_size += 50;
4836 if (cfg->prof_options & MONO_PROFILE_ENTER_LEAVE)
4837 max_epilog_size += 50;
4839 while (cfg->code_len + max_epilog_size > (cfg->code_size - 16)) {
4840 cfg->code_size *= 2;
4841 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
4842 mono_jit_stats.code_reallocs++;
4846 * Keep in sync with OP_JMP
4848 code = cfg->native_code + cfg->code_len;
4850 if (mono_jit_trace_calls != NULL && mono_trace_eval (method)) {
4851 code = mono_arch_instrument_epilog (cfg, mono_trace_leave_method, code, TRUE);
4855 if (method->save_lmf) {
4857 pos += sizeof (MonoLMF);
4859 /* save the frame reg in r8 */
4860 ppc_mr (code, ppc_r8, cfg->frame_reg);
4861 ppc_addi (code, ppc_r11, cfg->frame_reg, cfg->stack_usage - lmf_offset);
4862 /* r5 = previous_lmf */
4863 ppc_load_reg (code, ppc_r5, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r11);
4865 ppc_load_reg (code, ppc_r6, G_STRUCT_OFFSET(MonoLMF, lmf_addr), ppc_r11);
4866 /* *(lmf_addr) = previous_lmf */
4867 ppc_store_reg (code, ppc_r5, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r6);
4868 /* FIXME: speedup: there is no actual need to restore the registers if
4869 * we didn't actually change them (idea from Zoltan).
4872 ppc_load_multiple_regs (code, ppc_r13, G_STRUCT_OFFSET(MonoLMF, iregs), ppc_r11);
4874 /*for (i = 14; i < 32; i++) {
4875 ppc_lfd (code, i, G_STRUCT_OFFSET(MonoLMF, fregs) + ((i-14) * sizeof (gdouble)), ppc_r11);
4877 g_assert (ppc_is_imm16 (cfg->stack_usage + PPC_RET_ADDR_OFFSET));
4878 /* use the saved copy of the frame reg in r8 */
4879 if (1 || cfg->flags & MONO_CFG_HAS_CALLS) {
4880 ppc_load_reg (code, ppc_r0, cfg->stack_usage + PPC_RET_ADDR_OFFSET, ppc_r8);
4881 ppc_mtlr (code, ppc_r0);
4883 ppc_addic (code, ppc_sp, ppc_r8, cfg->stack_usage);
4885 if (1 || cfg->flags & MONO_CFG_HAS_CALLS) {
4886 long return_offset = cfg->stack_usage + PPC_RET_ADDR_OFFSET;
4887 if (ppc_is_imm16 (return_offset)) {
4888 ppc_load_reg (code, ppc_r0, return_offset, cfg->frame_reg);
4890 ppc_load (code, ppc_r11, return_offset);
4891 ppc_load_reg_indexed (code, ppc_r0, cfg->frame_reg, ppc_r11);
4893 ppc_mtlr (code, ppc_r0);
4895 if (ppc_is_imm16 (cfg->stack_usage)) {
4896 int offset = cfg->stack_usage;
4897 for (i = 13; i <= 31; i++) {
4898 if (cfg->used_int_regs & (1 << i))
4899 offset -= sizeof (gulong);
4901 if (cfg->frame_reg != ppc_sp)
4902 ppc_mr (code, ppc_r11, cfg->frame_reg);
4903 /* note r31 (possibly the frame register) is restored last */
4904 for (i = 13; i <= 31; i++) {
4905 if (cfg->used_int_regs & (1 << i)) {
4906 ppc_load_reg (code, i, offset, cfg->frame_reg);
4907 offset += sizeof (gulong);
4910 if (cfg->frame_reg != ppc_sp)
4911 ppc_addi (code, ppc_sp, ppc_r11, cfg->stack_usage);
4913 ppc_addi (code, ppc_sp, ppc_sp, cfg->stack_usage);
4915 ppc_load (code, ppc_r11, cfg->stack_usage);
4916 if (cfg->used_int_regs) {
4917 ppc_add (code, ppc_r11, cfg->frame_reg, ppc_r11);
4918 for (i = 31; i >= 13; --i) {
4919 if (cfg->used_int_regs & (1 << i)) {
4920 pos += sizeof (gulong);
4921 ppc_load_reg (code, i, -pos, ppc_r11);
4924 ppc_mr (code, ppc_sp, ppc_r11);
4926 ppc_add (code, ppc_sp, cfg->frame_reg, ppc_r11);
4933 cfg->code_len = code - cfg->native_code;
4935 g_assert (cfg->code_len < cfg->code_size);
4939 /* remove once throw_exception_by_name is eliminated */
4941 exception_id_by_name (const char *name)
4943 if (strcmp (name, "IndexOutOfRangeException") == 0)
4944 return MONO_EXC_INDEX_OUT_OF_RANGE;
4945 if (strcmp (name, "OverflowException") == 0)
4946 return MONO_EXC_OVERFLOW;
4947 if (strcmp (name, "ArithmeticException") == 0)
4948 return MONO_EXC_ARITHMETIC;
4949 if (strcmp (name, "DivideByZeroException") == 0)
4950 return MONO_EXC_DIVIDE_BY_ZERO;
4951 if (strcmp (name, "InvalidCastException") == 0)
4952 return MONO_EXC_INVALID_CAST;
4953 if (strcmp (name, "NullReferenceException") == 0)
4954 return MONO_EXC_NULL_REF;
4955 if (strcmp (name, "ArrayTypeMismatchException") == 0)
4956 return MONO_EXC_ARRAY_TYPE_MISMATCH;
4957 g_error ("Unknown intrinsic exception %s\n", name);
4962 mono_arch_emit_exceptions (MonoCompile *cfg)
4964 MonoJumpInfo *patch_info;
4967 const guint8* exc_throw_pos [MONO_EXC_INTRINS_NUM] = {NULL};
4968 guint8 exc_throw_found [MONO_EXC_INTRINS_NUM] = {0};
4969 int max_epilog_size = 50;
4971 /* count the number of exception infos */
4974 * make sure we have enough space for exceptions
4975 * 28 is the simulated call to throw_corlib_exception
4977 for (patch_info = cfg->patch_info; patch_info; patch_info = patch_info->next) {
4978 if (patch_info->type == MONO_PATCH_INFO_EXC) {
4979 i = exception_id_by_name (patch_info->data.target);
4980 if (!exc_throw_found [i]) {
4981 max_epilog_size += 28;
4982 exc_throw_found [i] = TRUE;
4984 } else if (patch_info->type == MONO_PATCH_INFO_BB_OVF)
4985 max_epilog_size += 12;
4986 else if (patch_info->type == MONO_PATCH_INFO_EXC_OVF) {
4987 MonoOvfJump *ovfj = (MonoOvfJump*)patch_info->data.target;
4988 i = exception_id_by_name (ovfj->data.exception);
4989 if (!exc_throw_found [i]) {
4990 max_epilog_size += 28;
4991 exc_throw_found [i] = TRUE;
4993 max_epilog_size += 8;
4997 while (cfg->code_len + max_epilog_size > (cfg->code_size - 16)) {
4998 cfg->code_size *= 2;
4999 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
5000 mono_jit_stats.code_reallocs++;
5003 code = cfg->native_code + cfg->code_len;
5005 /* add code to raise exceptions */
5006 for (patch_info = cfg->patch_info; patch_info; patch_info = patch_info->next) {
5007 switch (patch_info->type) {
5008 case MONO_PATCH_INFO_BB_OVF: {
5009 MonoOvfJump *ovfj = (MonoOvfJump*)patch_info->data.target;
5010 unsigned char *ip = patch_info->ip.i + cfg->native_code;
5011 /* patch the initial jump */
5012 ppc_patch (ip, code);
5013 ppc_bc (code, ovfj->b0_cond, ovfj->b1_cond, 2);
5015 ppc_patch (code - 4, ip + 4); /* jump back after the initiali branch */
5016 /* jump back to the true target */
5018 ip = ovfj->data.bb->native_offset + cfg->native_code;
5019 ppc_patch (code - 4, ip);
5020 patch_info->type = MONO_PATCH_INFO_NONE;
5023 case MONO_PATCH_INFO_EXC_OVF: {
5024 MonoOvfJump *ovfj = (MonoOvfJump*)patch_info->data.target;
5025 MonoJumpInfo *newji;
5026 unsigned char *ip = patch_info->ip.i + cfg->native_code;
5027 unsigned char *bcl = code;
5028 /* patch the initial jump: we arrived here with a call */
5029 ppc_patch (ip, code);
5030 ppc_bc (code, ovfj->b0_cond, ovfj->b1_cond, 0);
5032 ppc_patch (code - 4, ip + 4); /* jump back after the initiali branch */
5033 /* patch the conditional jump to the right handler */
5034 /* make it processed next */
5035 newji = mono_mempool_alloc (cfg->mempool, sizeof (MonoJumpInfo));
5036 newji->type = MONO_PATCH_INFO_EXC;
5037 newji->ip.i = bcl - cfg->native_code;
5038 newji->data.target = ovfj->data.exception;
5039 newji->next = patch_info->next;
5040 patch_info->next = newji;
5041 patch_info->type = MONO_PATCH_INFO_NONE;
5044 case MONO_PATCH_INFO_EXC: {
5045 MonoClass *exc_class;
5047 unsigned char *ip = patch_info->ip.i + cfg->native_code;
5048 i = exception_id_by_name (patch_info->data.target);
5049 if (exc_throw_pos [i]) {
5050 ppc_patch (ip, exc_throw_pos [i]);
5051 patch_info->type = MONO_PATCH_INFO_NONE;
5054 exc_throw_pos [i] = code;
5057 exc_class = mono_class_from_name (mono_defaults.corlib, "System", patch_info->data.name);
5058 g_assert (exc_class);
5060 ppc_patch (ip, code);
5061 /*mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_EXC_NAME, patch_info->data.target);*/
5062 ppc_load (code, ppc_r3, exc_class->type_token);
5063 /* we got here from a conditional call, so the calling ip is set in lr */
5064 ppc_mflr (code, ppc_r4);
5065 patch_info->type = MONO_PATCH_INFO_INTERNAL_METHOD;
5066 patch_info->data.name = "mono_arch_throw_corlib_exception";
5067 patch_info->ip.i = code - cfg->native_code;
5068 if (FORCE_INDIR_CALL || cfg->method->dynamic) {
5069 ppc_load_func (code, ppc_r0, 0);
5070 ppc_mtctr (code, ppc_r0);
5071 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
5083 cfg->code_len = code - cfg->native_code;
5085 g_assert (cfg->code_len <= cfg->code_size);
5090 try_offset_access (void *value, guint32 idx)
5092 register void* me __asm__ ("r2");
5093 void ***p = (void***)((char*)me + 284);
5094 int idx1 = idx / 32;
5095 int idx2 = idx % 32;
5098 if (value != p[idx1][idx2])
5105 setup_tls_access (void)
5109 #if defined(__linux__) && defined(_CS_GNU_LIBPTHREAD_VERSION)
5110 size_t conf_size = 0;
5113 /* FIXME for darwin */
5114 guint32 *ins, *code;
5115 guint32 cmplwi_1023, li_0x48, blr_ins;
5118 if (tls_mode == TLS_MODE_FAILED)
5120 if (g_getenv ("MONO_NO_TLS")) {
5121 tls_mode = TLS_MODE_FAILED;
5125 if (tls_mode == TLS_MODE_DETECT) {
5126 #if defined(__linux__) && defined(_CS_GNU_LIBPTHREAD_VERSION)
5127 conf_size = confstr ( _CS_GNU_LIBPTHREAD_VERSION, confbuf, sizeof(confbuf));
5128 if ((conf_size > 4) && (strncmp (confbuf, "NPTL", 4) == 0))
5129 tls_mode = TLS_MODE_NPTL;
5131 tls_mode = TLS_MODE_LTHREADS;
5133 ins = (guint32*)pthread_getspecific;
5134 /* uncond branch to the real method */
5135 if ((*ins >> 26) == 18) {
5137 val = (*ins & ~3) << 6;
5141 ins = (guint32*)(long)val;
5143 ins = (guint32*) ((char*)ins + val);
5146 code = &cmplwi_1023;
5147 ppc_cmpli (code, 0, 0, ppc_r3, 1023);
5149 ppc_li (code, ppc_r4, 0x48);
5152 if (*ins == cmplwi_1023) {
5153 int found_lwz_284 = 0;
5154 for (ptk = 0; ptk < 20; ++ptk) {
5156 if (!*ins || *ins == blr_ins)
5158 if ((guint16)*ins == 284 && (*ins >> 26) == 32) {
5163 if (!found_lwz_284) {
5164 tls_mode = TLS_MODE_FAILED;
5167 tls_mode = TLS_MODE_LTHREADS;
5168 } else if (*ins == li_0x48) {
5170 /* uncond branch to the real method */
5171 if ((*ins >> 26) == 18) {
5173 val = (*ins & ~3) << 6;
5177 ins = (guint32*)(long)val;
5179 ins = (guint32*) ((char*)ins + val);
5181 code = (guint32*)&val;
5182 ppc_li (code, ppc_r0, 0x7FF2);
5183 if (ins [1] == val) {
5184 /* Darwin on G4, implement */
5185 tls_mode = TLS_MODE_FAILED;
5188 code = (guint32*)&val;
5189 ppc_mfspr (code, ppc_r3, 104);
5190 if (ins [1] != val) {
5191 tls_mode = TLS_MODE_FAILED;
5194 tls_mode = TLS_MODE_DARWIN_G5;
5197 tls_mode = TLS_MODE_FAILED;
5201 tls_mode = TLS_MODE_FAILED;
5206 if ((monodomain_key == -1) && (tls_mode == TLS_MODE_NPTL)) {
5207 monodomain_key = mono_domain_get_tls_offset();
5209 /* if not TLS_MODE_NPTL or local dynamic (as indicated by
5210 mono_domain_get_tls_offset returning -1) then use keyed access. */
5211 if (monodomain_key == -1) {
5212 ptk = mono_domain_get_tls_key ();
5214 ptk = mono_pthread_key_for_tls (ptk);
5216 monodomain_key = ptk;
5221 if ((lmf_pthread_key == -1) && (tls_mode == TLS_MODE_NPTL)) {
5222 lmf_pthread_key = mono_get_lmf_addr_tls_offset();
5224 /* if not TLS_MODE_NPTL or local dynamic (as indicated by
5225 mono_get_lmf_addr_tls_offset returning -1) then use keyed access. */
5226 if (lmf_pthread_key == -1) {
5227 ptk = mono_pthread_key_for_tls (mono_jit_tls_id);
5229 /*g_print ("MonoLMF at: %d\n", ptk);*/
5230 /*if (!try_offset_access (mono_get_lmf_addr (), ptk)) {
5231 init_tls_failed = 1;
5234 lmf_pthread_key = ptk;
5238 if ((monothread_key == -1) && (tls_mode == TLS_MODE_NPTL)) {
5239 monothread_key = mono_thread_get_tls_offset();
5241 /* if not TLS_MODE_NPTL or local dynamic (as indicated by
5242 mono_get_lmf_addr_tls_offset returning -1) then use keyed access. */
5243 if (monothread_key == -1) {
5244 ptk = mono_thread_get_tls_key ();
5246 ptk = mono_pthread_key_for_tls (ptk);
5248 monothread_key = ptk;
5249 /*g_print ("thread inited: %d\n", ptk);*/
5252 /*g_print ("thread not inited yet %d\n", ptk);*/
5258 mono_arch_setup_jit_tls_data (MonoJitTlsData *tls)
5260 setup_tls_access ();
5264 mono_arch_free_jit_tls_data (MonoJitTlsData *tls)
5268 #ifdef MONO_ARCH_HAVE_IMT
5270 #define CMP_SIZE (PPC_LOAD_SEQUENCE_LENGTH + 4)
5272 #define LOADSTORE_SIZE 4
5273 #define JUMP_IMM_SIZE 12
5274 #define JUMP_IMM32_SIZE (PPC_LOAD_SEQUENCE_LENGTH + 8)
5275 #define ENABLE_WRONG_METHOD_CHECK 0
5278 * LOCKING: called with the domain lock held
5281 mono_arch_build_imt_thunk (MonoVTable *vtable, MonoDomain *domain, MonoIMTCheckItem **imt_entries, int count,
5282 gpointer fail_tramp)
5286 guint8 *code, *start;
5288 for (i = 0; i < count; ++i) {
5289 MonoIMTCheckItem *item = imt_entries [i];
5290 if (item->is_equals) {
5291 if (item->check_target_idx) {
5292 if (!item->compare_done)
5293 item->chunk_size += CMP_SIZE;
5294 if (item->has_target_code)
5295 item->chunk_size += BR_SIZE + JUMP_IMM32_SIZE;
5297 item->chunk_size += LOADSTORE_SIZE + BR_SIZE + JUMP_IMM_SIZE;
5300 item->chunk_size += CMP_SIZE + BR_SIZE + JUMP_IMM32_SIZE * 2;
5301 if (!item->has_target_code)
5302 item->chunk_size += LOADSTORE_SIZE;
5304 item->chunk_size += LOADSTORE_SIZE + JUMP_IMM_SIZE;
5305 #if ENABLE_WRONG_METHOD_CHECK
5306 item->chunk_size += CMP_SIZE + BR_SIZE + 4;
5311 item->chunk_size += CMP_SIZE + BR_SIZE;
5312 imt_entries [item->check_target_idx]->compare_done = TRUE;
5314 size += item->chunk_size;
5317 code = mono_method_alloc_generic_virtual_thunk (domain, size);
5319 /* the initial load of the vtable address */
5320 size += PPC_LOAD_SEQUENCE_LENGTH + LOADSTORE_SIZE;
5321 code = mono_domain_code_reserve (domain, size);
5326 * We need to save and restore r11 because it might be
5327 * used by the caller as the vtable register, so
5328 * clobbering it will trip up the magic trampoline.
5330 * FIXME: Get rid of this by making sure that r11 is
5331 * not used as the vtable register in interface calls.
5333 ppc_store_reg (code, ppc_r11, PPC_RET_ADDR_OFFSET, ppc_sp);
5334 ppc_load (code, ppc_r11, (gulong)(& (vtable->vtable [0])));
5336 for (i = 0; i < count; ++i) {
5337 MonoIMTCheckItem *item = imt_entries [i];
5338 item->code_target = code;
5339 if (item->is_equals) {
5340 if (item->check_target_idx) {
5341 if (!item->compare_done) {
5342 ppc_load (code, ppc_r0, (gulong)item->key);
5343 ppc_compare_log (code, 0, MONO_ARCH_IMT_REG, ppc_r0);
5345 item->jmp_code = code;
5346 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
5347 if (item->has_target_code) {
5348 ppc_load (code, ppc_r0, item->value.target_code);
5350 ppc_load_reg (code, ppc_r0, (sizeof (gpointer) * item->value.vtable_slot), ppc_r11);
5351 ppc_load_reg (code, ppc_r11, PPC_RET_ADDR_OFFSET, ppc_sp);
5353 ppc_mtctr (code, ppc_r0);
5354 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
5357 ppc_load (code, ppc_r0, (gulong)item->key);
5358 ppc_compare_log (code, 0, MONO_ARCH_IMT_REG, ppc_r0);
5359 item->jmp_code = code;
5360 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
5361 if (item->has_target_code) {
5362 ppc_load (code, ppc_r0, item->value.target_code);
5365 ppc_load (code, ppc_r0, & (vtable->vtable [item->value.vtable_slot]));
5366 ppc_load_reg_indexed (code, ppc_r0, 0, ppc_r0);
5368 ppc_mtctr (code, ppc_r0);
5369 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
5370 ppc_patch (item->jmp_code, code);
5371 ppc_load (code, ppc_r0, fail_tramp);
5372 ppc_mtctr (code, ppc_r0);
5373 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
5374 item->jmp_code = NULL;
5376 /* enable the commented code to assert on wrong method */
5377 #if ENABLE_WRONG_METHOD_CHECK
5378 ppc_load (code, ppc_r0, (guint32)item->key);
5379 ppc_compare_log (code, 0, MONO_ARCH_IMT_REG, ppc_r0);
5380 item->jmp_code = code;
5381 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
5383 ppc_load_reg (code, ppc_r0, (sizeof (gpointer) * item->value.vtable_slot), ppc_r11);
5384 ppc_load_reg (code, ppc_r11, PPC_RET_ADDR_OFFSET, ppc_sp);
5385 ppc_mtctr (code, ppc_r0);
5386 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
5387 #if ENABLE_WRONG_METHOD_CHECK
5388 ppc_patch (item->jmp_code, code);
5390 item->jmp_code = NULL;
5395 ppc_load (code, ppc_r0, (gulong)item->key);
5396 ppc_compare_log (code, 0, MONO_ARCH_IMT_REG, ppc_r0);
5397 item->jmp_code = code;
5398 ppc_bc (code, PPC_BR_FALSE, PPC_BR_LT, 0);
5401 /* patch the branches to get to the target items */
5402 for (i = 0; i < count; ++i) {
5403 MonoIMTCheckItem *item = imt_entries [i];
5404 if (item->jmp_code) {
5405 if (item->check_target_idx) {
5406 ppc_patch (item->jmp_code, imt_entries [item->check_target_idx]->code_target);
5412 mono_stats.imt_thunks_size += code - start;
5413 g_assert (code - start <= size);
5414 mono_arch_flush_icache (start, size);
5419 mono_arch_find_imt_method (gpointer *regs, guint8 *code)
5421 return (MonoMethod*) regs [MONO_ARCH_IMT_REG];
5425 mono_arch_find_this_argument (gpointer *regs, MonoMethod *method, MonoGenericSharingContext *gsctx)
5427 return mono_arch_get_this_arg_from_call (gsctx, mono_method_signature (method), (gssize*)regs, NULL);
5432 mono_arch_find_static_call_vtable (gpointer *regs, guint8 *code)
5434 return (MonoVTable*) regs [MONO_ARCH_RGCTX_REG];
5438 mono_arch_emit_inst_for_method (MonoCompile *cfg, MonoMethod *cmethod, MonoMethodSignature *fsig, MonoInst **args)
5445 mono_arch_print_tree (MonoInst *tree, int arity)
5450 MonoInst* mono_arch_get_domain_intrinsic (MonoCompile* cfg)
5454 setup_tls_access ();
5455 if (monodomain_key == -1)
5458 MONO_INST_NEW (cfg, ins, OP_TLS_GET);
5459 ins->inst_offset = monodomain_key;
5464 mono_arch_get_thread_intrinsic (MonoCompile* cfg)
5468 setup_tls_access ();
5469 if (monothread_key == -1)
5472 MONO_INST_NEW (cfg, ins, OP_TLS_GET);
5473 ins->inst_offset = monothread_key;
5478 mono_arch_context_get_int_reg (MonoContext *ctx, int reg)
5481 return MONO_CONTEXT_GET_SP (ctx);
5483 g_assert (reg >= ppc_r13);
5485 return (gpointer)ctx->regs [reg - ppc_r13];