2 * mini-ppc.c: PowerPC backend for the Mono code generator
5 * Paolo Molaro (lupus@ximian.com)
6 * Dietmar Maurer (dietmar@ximian.com)
7 * Andreas Faerber <andreas.faerber@web.de>
9 * (C) 2003 Ximian, Inc.
10 * (C) 2007-2008 Andreas Faerber
15 #include <mono/metadata/appdomain.h>
16 #include <mono/metadata/debug-helpers.h>
19 #ifdef TARGET_POWERPC64
20 #include "cpu-ppc64.h"
27 #include <sys/sysctl.h>
33 #define FORCE_INDIR_CALL 1
44 /* This mutex protects architecture specific caches */
45 #define mono_mini_arch_lock() EnterCriticalSection (&mini_arch_mutex)
46 #define mono_mini_arch_unlock() LeaveCriticalSection (&mini_arch_mutex)
47 static CRITICAL_SECTION mini_arch_mutex;
49 int mono_exc_esp_offset = 0;
50 static int tls_mode = TLS_MODE_DETECT;
51 static int lmf_pthread_key = -1;
52 static int monothread_key = -1;
53 static int monodomain_key = -1;
56 offsets_from_pthread_key (guint32 key, int *offset2)
60 *offset2 = idx2 * sizeof (gpointer);
61 return 284 + idx1 * sizeof (gpointer);
64 #define emit_linuxthreads_tls(code,dreg,key) do {\
66 off1 = offsets_from_pthread_key ((key), &off2); \
67 ppc_ldptr ((code), (dreg), off1, ppc_r2); \
68 ppc_ldptr ((code), (dreg), off2, (dreg)); \
71 #define emit_darwing5_tls(code,dreg,key) do {\
72 int off1 = 0x48 + key * sizeof (gpointer); \
73 ppc_mfspr ((code), (dreg), 104); \
74 ppc_ldptr ((code), (dreg), off1, (dreg)); \
77 /* FIXME: ensure the sc call preserves all but r3 */
78 #define emit_darwing4_tls(code,dreg,key) do {\
79 int off1 = 0x48 + key * sizeof (gpointer); \
80 if ((dreg) != ppc_r3) ppc_mr ((code), ppc_r11, ppc_r3); \
81 ppc_li ((code), ppc_r0, 0x7FF2); \
83 ppc_lwz ((code), (dreg), off1, ppc_r3); \
84 if ((dreg) != ppc_r3) ppc_mr ((code), ppc_r3, ppc_r11); \
87 #ifdef PPC_THREAD_PTR_REG
88 #define emit_nptl_tls(code,dreg,key) do { \
90 int off2 = key >> 15; \
91 if ((off2 == 0) || (off2 == -1)) { \
92 ppc_ldptr ((code), (dreg), off1, PPC_THREAD_PTR_REG); \
94 int off3 = (off2 + 1) > 1; \
95 ppc_addis ((code), ppc_r11, PPC_THREAD_PTR_REG, off3); \
96 ppc_ldptr ((code), (dreg), off1, ppc_r11); \
100 #define emit_nptl_tls(code,dreg,key) do { \
101 g_assert_not_reached (); \
105 #define emit_tls_access(code,dreg,key) do { \
106 switch (tls_mode) { \
107 case TLS_MODE_LTHREADS: emit_linuxthreads_tls(code,dreg,key); break; \
108 case TLS_MODE_NPTL: emit_nptl_tls(code,dreg,key); break; \
109 case TLS_MODE_DARWIN_G5: emit_darwing5_tls(code,dreg,key); break; \
110 case TLS_MODE_DARWIN_G4: emit_darwing4_tls(code,dreg,key); break; \
111 default: g_assert_not_reached (); \
115 #define MONO_EMIT_NEW_LOAD_R8(cfg,dr,addr) do { \
117 MONO_INST_NEW ((cfg), (inst), OP_R8CONST); \
118 inst->type = STACK_R8; \
120 inst->inst_p0 = (void*)(addr); \
121 mono_bblock_add_inst (cfg->cbb, inst); \
125 mono_arch_regname (int reg) {
126 static const char rnames[][4] = {
127 "r0", "sp", "r2", "r3", "r4",
128 "r5", "r6", "r7", "r8", "r9",
129 "r10", "r11", "r12", "r13", "r14",
130 "r15", "r16", "r17", "r18", "r19",
131 "r20", "r21", "r22", "r23", "r24",
132 "r25", "r26", "r27", "r28", "r29",
135 if (reg >= 0 && reg < 32)
141 mono_arch_fregname (int reg) {
142 static const char rnames[][4] = {
143 "f0", "f1", "f2", "f3", "f4",
144 "f5", "f6", "f7", "f8", "f9",
145 "f10", "f11", "f12", "f13", "f14",
146 "f15", "f16", "f17", "f18", "f19",
147 "f20", "f21", "f22", "f23", "f24",
148 "f25", "f26", "f27", "f28", "f29",
151 if (reg >= 0 && reg < 32)
156 /* this function overwrites r0, r11, r12 */
158 emit_memcpy (guint8 *code, int size, int dreg, int doffset, int sreg, int soffset)
160 /* unrolled, use the counter in big */
161 if (size > sizeof (gpointer) * 5) {
162 long shifted = size >> MONO_PPC_32_64_CASE (2, 3);
163 guint8 *copy_loop_start, *copy_loop_jump;
165 ppc_load (code, ppc_r0, shifted);
166 ppc_mtctr (code, ppc_r0);
167 g_assert (sreg == ppc_r11);
168 ppc_addi (code, ppc_r12, dreg, (doffset - sizeof (gpointer)));
169 ppc_addi (code, ppc_r11, sreg, (soffset - sizeof (gpointer)));
170 copy_loop_start = code;
171 ppc_ldptr_update (code, ppc_r0, (unsigned int)sizeof (gpointer), ppc_r11);
172 ppc_stptr_update (code, ppc_r0, (unsigned int)sizeof (gpointer), ppc_r12);
173 copy_loop_jump = code;
174 ppc_bc (code, PPC_BR_DEC_CTR_NONZERO, 0, 0);
175 ppc_patch (copy_loop_jump, copy_loop_start);
176 size -= shifted * sizeof (gpointer);
177 doffset = soffset = 0;
180 #ifdef __mono_ppc64__
182 ppc_ldptr (code, ppc_r0, soffset, sreg);
183 ppc_stptr (code, ppc_r0, doffset, dreg);
190 ppc_lwz (code, ppc_r0, soffset, sreg);
191 ppc_stw (code, ppc_r0, doffset, dreg);
197 ppc_lhz (code, ppc_r0, soffset, sreg);
198 ppc_sth (code, ppc_r0, doffset, dreg);
204 ppc_lbz (code, ppc_r0, soffset, sreg);
205 ppc_stb (code, ppc_r0, doffset, dreg);
214 * mono_arch_get_argument_info:
215 * @csig: a method signature
216 * @param_count: the number of parameters to consider
217 * @arg_info: an array to store the result infos
219 * Gathers information on parameters such as size, alignment and
220 * padding. arg_info should be large enought to hold param_count + 1 entries.
222 * Returns the size of the activation frame.
225 mono_arch_get_argument_info (MonoMethodSignature *csig, int param_count, MonoJitArgumentInfo *arg_info)
227 #ifdef __mono_ppc64__
231 int k, frame_size = 0;
232 int size, align, pad;
235 if (MONO_TYPE_ISSTRUCT (csig->ret)) {
236 frame_size += sizeof (gpointer);
240 arg_info [0].offset = offset;
243 frame_size += sizeof (gpointer);
247 arg_info [0].size = frame_size;
249 for (k = 0; k < param_count; k++) {
252 size = mono_type_native_stack_size (csig->params [k], (guint32*)&align);
254 size = mini_type_stack_size (NULL, csig->params [k], &align);
256 /* ignore alignment for now */
259 frame_size += pad = (align - (frame_size & (align - 1))) & (align - 1);
260 arg_info [k].pad = pad;
262 arg_info [k + 1].pad = 0;
263 arg_info [k + 1].size = size;
265 arg_info [k + 1].offset = offset;
269 align = MONO_ARCH_FRAME_ALIGNMENT;
270 frame_size += pad = (align - (frame_size & (align - 1))) & (align - 1);
271 arg_info [k].pad = pad;
277 #ifdef __mono_ppc64__
279 is_load_sequence (guint32 *seq)
281 return ppc_opcode (seq [0]) == 15 && /* lis */
282 ppc_opcode (seq [1]) == 24 && /* ori */
283 ppc_opcode (seq [2]) == 30 && /* sldi */
284 ppc_opcode (seq [3]) == 25 && /* oris */
285 ppc_opcode (seq [4]) == 24; /* ori */
288 #define ppc_load_get_dest(l) (((l)>>21) & 0x1f)
289 #define ppc_load_get_off(l) ((gint16)((l) & 0xffff))
292 /* code must point to the blrl */
294 mono_ppc_is_direct_call_sequence (guint32 *code)
296 #ifdef __mono_ppc64__
297 g_assert(*code == 0x4e800021 || *code == 0x4e800020 || *code == 0x4e800420);
299 /* the thunk-less direct call sequence: lis/ori/sldi/oris/ori/mtlr/blrl */
300 if (ppc_opcode (code [-1]) == 31) { /* mtlr */
301 if (ppc_opcode (code [-2]) == 58 && ppc_opcode (code [-3]) == 58) { /* ld/ld */
302 if (!is_load_sequence (&code [-8]))
304 /* one of the loads must be "ld r2,8(rX)" */
305 return (ppc_load_get_dest (code [-2]) == ppc_r2 && ppc_load_get_off (code [-2]) == 8) ||
306 (ppc_load_get_dest (code [-3]) == ppc_r2 && ppc_load_get_off (code [-3]) == 8);
308 if (ppc_opcode (code [-2]) == 24 && ppc_opcode (code [-3]) == 31) /* mr/nop */
309 return is_load_sequence (&code [-8]);
311 return is_load_sequence (&code [-6]);
315 g_assert(*code == 0x4e800021);
317 /* the thunk-less direct call sequence: lis/ori/mtlr/blrl */
318 return ppc_opcode (code [-1]) == 31 &&
319 ppc_opcode (code [-2]) == 24 &&
320 ppc_opcode (code [-3]) == 15;
325 mono_arch_get_vcall_slot (guint8 *code_ptr, mgreg_t *regs, int *displacement)
329 guint32* code = (guint32*)code_ptr;
333 /* This is the 'blrl' instruction */
336 /* Sanity check: instruction must be 'blrl' */
337 if (*code != 0x4e800021)
340 if (mono_ppc_is_direct_call_sequence (code))
343 /* FIXME: more sanity checks here */
344 /* OK, we're now at the 'blrl' instruction. Now walk backwards
345 till we get to a 'mtlr rA' */
347 if((*code & 0x7c0803a6) == 0x7c0803a6) {
349 /* Here we are: we reached the 'mtlr rA'.
350 Extract the register from the instruction */
351 reg = (*code & 0x03e00000) >> 21;
353 /* ok, this is a lwz reg, offset (vtreg)
354 * it is emitted with:
355 * ppc_emit32 (c, (32 << 26) | ((D) << 21) | ((a) << 16) | (guint16)(d))
357 soff = (*code & 0xffff);
359 reg = (*code >> 16) & 0x1f;
360 g_assert (reg != ppc_r1);
361 /*g_print ("patching reg is %d\n", reg);*/
363 MonoLMF *lmf = (MonoLMF*)((char*)regs + (14 * sizeof (double)) + (13 * sizeof (gpointer)));
364 /* saved in the MonoLMF structure */
365 o = (gpointer)lmf->iregs [reg - 13];
372 *displacement = offset;
376 #define MAX_ARCH_DELEGATE_PARAMS 7
379 get_delegate_invoke_impl (gboolean has_target, guint32 param_count, guint32 *code_len, gboolean aot)
381 guint8 *code, *start;
384 int size = MONO_PPC_32_64_CASE (32, 32) + PPC_FTNPTR_SIZE;
386 start = code = mono_global_codeman_reserve (size);
388 code = mono_ppc_create_pre_code_ftnptr (code);
390 /* Replace the this argument with the target */
391 ppc_ldptr (code, ppc_r0, G_STRUCT_OFFSET (MonoDelegate, method_ptr), ppc_r3);
392 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
393 /* it's a function descriptor */
394 ppc_ldx (code, ppc_r0, 0, ppc_r0);
396 ppc_mtctr (code, ppc_r0);
397 ppc_ldptr (code, ppc_r3, G_STRUCT_OFFSET (MonoDelegate, target), ppc_r3);
398 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
400 g_assert ((code - start) <= size);
402 mono_arch_flush_icache (start, size);
406 size = MONO_PPC_32_64_CASE (32, 32) + param_count * 4 + PPC_FTNPTR_SIZE;
407 start = code = mono_global_codeman_reserve (size);
409 code = mono_ppc_create_pre_code_ftnptr (code);
411 ppc_ldptr (code, ppc_r0, G_STRUCT_OFFSET (MonoDelegate, method_ptr), ppc_r3);
412 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
413 /* it's a function descriptor */
414 ppc_ldx (code, ppc_r0, 0, ppc_r0);
416 ppc_mtctr (code, ppc_r0);
417 /* slide down the arguments */
418 for (i = 0; i < param_count; ++i) {
419 ppc_mr (code, (ppc_r3 + i), (ppc_r3 + i + 1));
421 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
423 g_assert ((code - start) <= size);
425 mono_arch_flush_icache (start, size);
429 *code_len = code - start;
435 mono_arch_get_delegate_invoke_impls (void)
442 code = get_delegate_invoke_impl (TRUE, 0, &code_len, TRUE);
443 res = g_slist_prepend (res, mono_aot_tramp_info_create (g_strdup ("delegate_invoke_impl_has_target"), code, code_len));
445 for (i = 0; i < MAX_ARCH_DELEGATE_PARAMS; ++i) {
446 code = get_delegate_invoke_impl (FALSE, i, &code_len, TRUE);
447 res = g_slist_prepend (res, mono_aot_tramp_info_create (g_strdup_printf ("delegate_invoke_impl_target_%d", i), code, code_len));
454 mono_arch_get_delegate_invoke_impl (MonoMethodSignature *sig, gboolean has_target)
456 guint8 *code, *start;
458 /* FIXME: Support more cases */
459 if (MONO_TYPE_ISSTRUCT (sig->ret))
463 static guint8* cached = NULL;
469 start = mono_aot_get_named_code ("delegate_invoke_impl_has_target");
471 start = get_delegate_invoke_impl (TRUE, 0, NULL, FALSE);
473 mono_memory_barrier ();
477 static guint8* cache [MAX_ARCH_DELEGATE_PARAMS + 1] = {NULL};
480 if (sig->param_count > MAX_ARCH_DELEGATE_PARAMS)
482 for (i = 0; i < sig->param_count; ++i)
483 if (!mono_is_regsize_var (sig->params [i]))
487 code = cache [sig->param_count];
492 char *name = g_strdup_printf ("delegate_invoke_impl_target_%d", sig->param_count);
493 start = mono_aot_get_named_code (name);
496 start = get_delegate_invoke_impl (FALSE, sig->param_count, NULL, FALSE);
499 mono_memory_barrier ();
501 cache [sig->param_count] = start;
507 mono_arch_get_this_arg_from_call (MonoGenericSharingContext *gsctx, MonoMethodSignature *sig, mgreg_t *regs, guint8 *code)
509 /* FIXME: handle returning a struct */
510 if (MONO_TYPE_ISSTRUCT (sig->ret))
511 return (gpointer)regs [ppc_r4];
512 return (gpointer)regs [ppc_r3];
516 * Initialize the cpu to execute managed code.
519 mono_arch_cpu_init (void)
524 * Initialize architecture specific code.
527 mono_arch_init (void)
529 InitializeCriticalSection (&mini_arch_mutex);
533 * Cleanup architecture specific code.
536 mono_arch_cleanup (void)
538 DeleteCriticalSection (&mini_arch_mutex);
542 * This function returns the optimizations supported on this cpu.
545 mono_arch_cpu_optimizazions (guint32 *exclude_mask)
549 /* no ppc-specific optimizations yet */
554 #ifdef __mono_ppc64__
555 #define CASE_PPC32(c)
556 #define CASE_PPC64(c) case c:
558 #define CASE_PPC32(c) case c:
559 #define CASE_PPC64(c)
563 is_regsize_var (MonoType *t) {
566 t = mini_type_get_underlying_type (NULL, t);
570 CASE_PPC64 (MONO_TYPE_I8)
571 CASE_PPC64 (MONO_TYPE_U8)
575 case MONO_TYPE_FNPTR:
577 case MONO_TYPE_OBJECT:
578 case MONO_TYPE_STRING:
579 case MONO_TYPE_CLASS:
580 case MONO_TYPE_SZARRAY:
581 case MONO_TYPE_ARRAY:
583 case MONO_TYPE_GENERICINST:
584 if (!mono_type_generic_inst_is_valuetype (t))
587 case MONO_TYPE_VALUETYPE:
594 mono_arch_get_allocatable_int_vars (MonoCompile *cfg)
599 for (i = 0; i < cfg->num_varinfo; i++) {
600 MonoInst *ins = cfg->varinfo [i];
601 MonoMethodVar *vmv = MONO_VARINFO (cfg, i);
604 if (vmv->range.first_use.abs_pos >= vmv->range.last_use.abs_pos)
607 if (ins->flags & (MONO_INST_VOLATILE|MONO_INST_INDIRECT) || (ins->opcode != OP_LOCAL && ins->opcode != OP_ARG))
610 /* we can only allocate 32 bit values */
611 if (is_regsize_var (ins->inst_vtype)) {
612 g_assert (MONO_VARINFO (cfg, i)->reg == -1);
613 g_assert (i == vmv->idx);
614 vars = mono_varlist_insert_sorted (cfg, vars, vmv, FALSE);
622 mono_arch_get_global_int_regs (MonoCompile *cfg)
626 if (cfg->frame_reg != ppc_sp)
628 /* ppc_r13 is used by the system on PPC EABI */
629 for (i = 14; i < top; ++i)
630 regs = g_list_prepend (regs, GUINT_TO_POINTER (i));
636 * mono_arch_regalloc_cost:
638 * Return the cost, in number of memory references, of the action of
639 * allocating the variable VMV into a register during global register
643 mono_arch_regalloc_cost (MonoCompile *cfg, MonoMethodVar *vmv)
655 mono_arch_flush_icache (guint8 *code, gint size)
657 #ifdef MONO_CROSS_COMPILE
660 guint8 *endp, *start;
661 static int cachelinesize = 0;
662 static int cachelineinc = 16;
664 if (!cachelinesize) {
669 mib [1] = HW_CACHELINE;
670 len = sizeof (cachelinesize);
671 if (sysctl(mib, 2, &cachelinesize, (size_t*)&len, NULL, 0) == -1) {
675 cachelineinc = cachelinesize;
676 /*g_print ("setting cl size to %d\n", cachelinesize);*/
678 #elif defined(__linux__)
679 /* sadly this will work only with 2.6 kernels... */
680 FILE* f = fopen ("/proc/self/auxv", "rb");
683 while (fread (&vec, sizeof (vec), 1, f) == 1) {
684 if (vec.type == 19) {
685 cachelinesize = vec.value;
693 #elif defined(G_COMPILER_CODEWARRIOR)
697 #warning Need a way to get cache line size
703 start = (guint8*)((gsize)start & ~(cachelinesize - 1));
704 /* use dcbf for smp support, later optimize for UP, see pem._64bit.d20030611.pdf page 211 */
705 #if defined(G_COMPILER_CODEWARRIOR)
707 for (p = start; p < endp; p += cachelineinc) {
711 for (p = start; p < endp; p += cachelineinc) {
717 for (p = start; p < endp; p += cachelineinc) {
729 for (p = start; p < endp; p += cachelineinc) {
730 asm ("dcbf 0,%0;" : : "r"(p) : "memory");
733 for (p = start; p < endp; p += cachelineinc) {
734 asm ("dcbst 0,%0;" : : "r"(p) : "memory");
739 for (p = start; p < endp; p += cachelineinc) {
740 asm ("icbi 0,%0; sync;" : : "r"(p) : "memory");
749 mono_arch_flush_register_windows (void)
754 #define ALWAYS_ON_STACK(s) s
755 #define FP_ALSO_IN_REG(s) s
757 #ifdef __mono_ppc64__
758 #define ALWAYS_ON_STACK(s) s
759 #define FP_ALSO_IN_REG(s) s
761 #define ALWAYS_ON_STACK(s)
762 #define FP_ALSO_IN_REG(s)
764 #define ALIGN_DOUBLES
777 guint32 vtsize; /* in param area */
779 guint8 regtype : 4; /* 0 general, 1 basereg, 2 floating point register, see RegType* */
780 guint8 size : 4; /* 1, 2, 4, 8, or regs used by RegTypeStructByVal */
781 guint8 bytes : 4; /* size in bytes - only valid for
782 RegTypeStructByVal if the struct fits
783 in one word, otherwise it's 0*/
798 add_general (guint *gr, guint *stack_size, ArgInfo *ainfo, gboolean simple)
800 #ifdef __mono_ppc64__
805 if (*gr >= 3 + PPC_NUM_REG_ARGS) {
806 ainfo->offset = PPC_STACK_PARAM_OFFSET + *stack_size;
807 ainfo->reg = ppc_sp; /* in the caller */
808 ainfo->regtype = RegTypeBase;
809 *stack_size += sizeof (gpointer);
811 ALWAYS_ON_STACK (*stack_size += sizeof (gpointer));
815 if (*gr >= 3 + PPC_NUM_REG_ARGS - 1) {
817 //*stack_size += (*stack_size % 8);
819 ainfo->offset = PPC_STACK_PARAM_OFFSET + *stack_size;
820 ainfo->reg = ppc_sp; /* in the caller */
821 ainfo->regtype = RegTypeBase;
828 ALWAYS_ON_STACK (*stack_size += 8);
836 #if defined(__APPLE__) || defined(__mono_ppc64__)
838 has_only_a_r48_field (MonoClass *klass)
842 gboolean have_field = FALSE;
844 while ((f = mono_class_get_fields (klass, &iter))) {
845 if (!(f->type->attrs & FIELD_ATTRIBUTE_STATIC)) {
848 if (!f->type->byref && (f->type->type == MONO_TYPE_R4 || f->type->type == MONO_TYPE_R8))
859 calculate_sizes (MonoMethodSignature *sig, gboolean is_pinvoke)
862 int n = sig->hasthis + sig->param_count;
864 guint32 stack_size = 0;
865 CallInfo *cinfo = g_malloc0 (sizeof (CallInfo) + sizeof (ArgInfo) * n);
867 fr = PPC_FIRST_FPARG_REG;
868 gr = PPC_FIRST_ARG_REG;
870 /* FIXME: handle returning a struct */
871 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
872 add_general (&gr, &stack_size, &cinfo->ret, TRUE);
873 cinfo->struct_ret = PPC_FIRST_ARG_REG;
878 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
881 DEBUG(printf("params: %d\n", sig->param_count));
882 for (i = 0; i < sig->param_count; ++i) {
883 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (i == sig->sentinelpos)) {
884 /* Prevent implicit arguments and sig_cookie from
885 being passed in registers */
886 gr = PPC_LAST_ARG_REG + 1;
887 /* FIXME: don't we have to set fr, too? */
888 /* Emit the signature cookie just before the implicit arguments */
889 add_general (&gr, &stack_size, &cinfo->sig_cookie, TRUE);
891 DEBUG(printf("param %d: ", i));
892 if (sig->params [i]->byref) {
893 DEBUG(printf("byref\n"));
894 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
898 simpletype = mini_type_get_underlying_type (NULL, sig->params [i])->type;
899 switch (simpletype) {
900 case MONO_TYPE_BOOLEAN:
903 cinfo->args [n].size = 1;
904 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
910 cinfo->args [n].size = 2;
911 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
916 cinfo->args [n].size = 4;
917 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
923 case MONO_TYPE_FNPTR:
924 case MONO_TYPE_CLASS:
925 case MONO_TYPE_OBJECT:
926 case MONO_TYPE_STRING:
927 case MONO_TYPE_SZARRAY:
928 case MONO_TYPE_ARRAY:
929 cinfo->args [n].size = sizeof (gpointer);
930 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
933 case MONO_TYPE_GENERICINST:
934 if (!mono_type_generic_inst_is_valuetype (sig->params [i])) {
935 cinfo->args [n].size = sizeof (gpointer);
936 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
941 case MONO_TYPE_VALUETYPE: {
944 klass = mono_class_from_mono_type (sig->params [i]);
946 size = mono_class_native_size (klass, NULL);
948 size = mono_class_value_size (klass, NULL);
949 #if defined(__APPLE__) || defined(__mono_ppc64__)
950 if ((size == 4 || size == 8) && has_only_a_r48_field (klass)) {
951 cinfo->args [n].size = size;
953 /* It was 7, now it is 8 in LinuxPPC */
954 if (fr <= PPC_LAST_FPARG_REG) {
955 cinfo->args [n].regtype = RegTypeFP;
956 cinfo->args [n].reg = fr;
958 FP_ALSO_IN_REG (gr ++);
960 FP_ALSO_IN_REG (gr ++);
961 ALWAYS_ON_STACK (stack_size += size);
963 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size;
964 cinfo->args [n].regtype = RegTypeBase;
965 cinfo->args [n].reg = ppc_sp; /* in the caller*/
972 DEBUG(printf ("load %d bytes struct\n",
973 mono_class_native_size (sig->params [i]->data.klass, NULL)));
974 #if PPC_PASS_STRUCTS_BY_VALUE
976 int align_size = size;
978 int rest = PPC_LAST_ARG_REG - gr + 1;
980 align_size += (sizeof (gpointer) - 1);
981 align_size &= ~(sizeof (gpointer) - 1);
982 nwords = (align_size + sizeof (gpointer) -1 ) / sizeof (gpointer);
983 n_in_regs = MIN (rest, nwords);
984 cinfo->args [n].regtype = RegTypeStructByVal;
985 if (gr > PPC_LAST_ARG_REG
987 /* FIXME: check this */
988 || (size >= 3 && size % 4 != 0)
991 cinfo->args [n].size = 0;
992 cinfo->args [n].vtsize = nwords;
994 cinfo->args [n].size = n_in_regs;
995 cinfo->args [n].vtsize = nwords - n_in_regs;
996 cinfo->args [n].reg = gr;
998 #ifdef __mono_ppc64__
999 if (nwords == 1 && is_pinvoke)
1000 cinfo->args [n].bytes = size;
1003 cinfo->args [n].bytes = 0;
1005 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size;
1006 /*g_print ("offset for arg %d at %d\n", n, PPC_STACK_PARAM_OFFSET + stack_size);*/
1007 stack_size += nwords * sizeof (gpointer);
1010 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
1011 cinfo->args [n].regtype = RegTypeStructByAddr;
1012 cinfo->args [n].vtsize = size;
1017 case MONO_TYPE_TYPEDBYREF: {
1018 int size = sizeof (MonoTypedRef);
1019 /* keep in sync or merge with the valuetype case */
1020 #if PPC_PASS_STRUCTS_BY_VALUE
1022 int nwords = (size + sizeof (gpointer) -1 ) / sizeof (gpointer);
1023 cinfo->args [n].regtype = RegTypeStructByVal;
1024 if (gr <= PPC_LAST_ARG_REG) {
1025 int rest = PPC_LAST_ARG_REG - gr + 1;
1026 int n_in_regs = rest >= nwords? nwords: rest;
1027 cinfo->args [n].size = n_in_regs;
1028 cinfo->args [n].vtsize = nwords - n_in_regs;
1029 cinfo->args [n].reg = gr;
1032 cinfo->args [n].size = 0;
1033 cinfo->args [n].vtsize = nwords;
1035 #ifdef __mono_ppc64__
1036 if (nwords == 1 && is_pinvoke)
1037 cinfo->args [n].bytes = size;
1040 cinfo->args [n].bytes = 0;
1041 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size;
1042 /*g_print ("offset for arg %d at %d\n", n, PPC_STACK_PARAM_OFFSET + stack_size);*/
1043 stack_size += nwords * sizeof (gpointer);
1046 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
1047 cinfo->args [n].regtype = RegTypeStructByAddr;
1048 cinfo->args [n].vtsize = size;
1055 cinfo->args [n].size = 8;
1056 add_general (&gr, &stack_size, cinfo->args + n, SIZEOF_REGISTER == 8);
1060 cinfo->args [n].size = 4;
1062 /* It was 7, now it is 8 in LinuxPPC */
1063 if (fr <= PPC_LAST_FPARG_REG) {
1064 cinfo->args [n].regtype = RegTypeFP;
1065 cinfo->args [n].reg = fr;
1067 FP_ALSO_IN_REG (gr ++);
1068 ALWAYS_ON_STACK (stack_size += SIZEOF_REGISTER);
1070 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size + MONO_PPC_32_64_CASE (0, 4);
1071 cinfo->args [n].regtype = RegTypeBase;
1072 cinfo->args [n].reg = ppc_sp; /* in the caller*/
1073 stack_size += SIZEOF_REGISTER;
1078 cinfo->args [n].size = 8;
1079 /* It was 7, now it is 8 in LinuxPPC */
1080 if (fr <= PPC_LAST_FPARG_REG) {
1081 cinfo->args [n].regtype = RegTypeFP;
1082 cinfo->args [n].reg = fr;
1084 FP_ALSO_IN_REG (gr += sizeof (double) / SIZEOF_REGISTER);
1085 ALWAYS_ON_STACK (stack_size += 8);
1087 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size;
1088 cinfo->args [n].regtype = RegTypeBase;
1089 cinfo->args [n].reg = ppc_sp; /* in the caller*/
1095 g_error ("Can't trampoline 0x%x", sig->params [i]->type);
1099 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (i == sig->sentinelpos)) {
1100 /* Prevent implicit arguments and sig_cookie from
1101 being passed in registers */
1102 gr = PPC_LAST_ARG_REG + 1;
1103 /* Emit the signature cookie just before the implicit arguments */
1104 add_general (&gr, &stack_size, &cinfo->sig_cookie, TRUE);
1108 simpletype = mini_type_get_underlying_type (NULL, sig->ret)->type;
1109 switch (simpletype) {
1110 case MONO_TYPE_BOOLEAN:
1115 case MONO_TYPE_CHAR:
1121 case MONO_TYPE_FNPTR:
1122 case MONO_TYPE_CLASS:
1123 case MONO_TYPE_OBJECT:
1124 case MONO_TYPE_SZARRAY:
1125 case MONO_TYPE_ARRAY:
1126 case MONO_TYPE_STRING:
1127 cinfo->ret.reg = ppc_r3;
1131 cinfo->ret.reg = ppc_r3;
1135 cinfo->ret.reg = ppc_f1;
1136 cinfo->ret.regtype = RegTypeFP;
1138 case MONO_TYPE_GENERICINST:
1139 if (!mono_type_generic_inst_is_valuetype (sig->ret)) {
1140 cinfo->ret.reg = ppc_r3;
1144 case MONO_TYPE_VALUETYPE:
1146 case MONO_TYPE_TYPEDBYREF:
1147 case MONO_TYPE_VOID:
1150 g_error ("Can't handle as return value 0x%x", sig->ret->type);
1154 /* align stack size to 16 */
1155 DEBUG (printf (" stack size: %d (%d)\n", (stack_size + 15) & ~15, stack_size));
1156 stack_size = (stack_size + 15) & ~15;
1158 cinfo->stack_usage = stack_size;
1163 allocate_tailcall_valuetype_addrs (MonoCompile *cfg)
1165 #if !PPC_PASS_STRUCTS_BY_VALUE
1166 MonoMethodSignature *sig = mono_method_signature (cfg->method);
1167 int num_structs = 0;
1170 if (!(cfg->flags & MONO_CFG_HAS_TAIL))
1173 for (i = 0; i < sig->param_count; ++i) {
1174 MonoType *type = mono_type_get_underlying_type (sig->params [i]);
1175 if (type->type == MONO_TYPE_VALUETYPE)
1180 cfg->tailcall_valuetype_addrs =
1181 mono_mempool_alloc0 (cfg->mempool, sizeof (MonoInst*) * num_structs);
1182 for (i = 0; i < num_structs; ++i) {
1183 cfg->tailcall_valuetype_addrs [i] =
1184 mono_compile_create_var (cfg, &mono_defaults.int_class->byval_arg, OP_LOCAL);
1185 cfg->tailcall_valuetype_addrs [i]->flags |= MONO_INST_INDIRECT;
1192 * Set var information according to the calling convention. ppc version.
1193 * The locals var stuff should most likely be split in another method.
1196 mono_arch_allocate_vars (MonoCompile *m)
1198 MonoMethodSignature *sig;
1199 MonoMethodHeader *header;
1201 int i, offset, size, align, curinst;
1202 int frame_reg = ppc_sp;
1204 guint32 locals_stack_size, locals_stack_align;
1206 allocate_tailcall_valuetype_addrs (m);
1208 m->flags |= MONO_CFG_HAS_SPILLUP;
1210 /* allow room for the vararg method args: void* and long/double */
1211 if (mono_jit_trace_calls != NULL && mono_trace_eval (m->method))
1212 m->param_area = MAX (m->param_area, sizeof (gpointer)*8);
1213 /* this is bug #60332: remove when #59509 is fixed, so no weird vararg
1214 * call convs needs to be handled this way.
1216 if (m->flags & MONO_CFG_HAS_VARARGS)
1217 m->param_area = MAX (m->param_area, sizeof (gpointer)*8);
1218 /* gtk-sharp and other broken code will dllimport vararg functions even with
1219 * non-varargs signatures. Since there is little hope people will get this right
1220 * we assume they won't.
1222 if (m->method->wrapper_type == MONO_WRAPPER_MANAGED_TO_NATIVE)
1223 m->param_area = MAX (m->param_area, sizeof (gpointer)*8);
1225 header = mono_method_get_header (m->method);
1228 * We use the frame register also for any method that has
1229 * exception clauses. This way, when the handlers are called,
1230 * the code will reference local variables using the frame reg instead of
1231 * the stack pointer: if we had to restore the stack pointer, we'd
1232 * corrupt the method frames that are already on the stack (since
1233 * filters get called before stack unwinding happens) when the filter
1234 * code would call any method (this also applies to finally etc.).
1236 if ((m->flags & MONO_CFG_HAS_ALLOCA) || header->num_clauses)
1237 frame_reg = ppc_r31;
1238 m->frame_reg = frame_reg;
1239 if (frame_reg != ppc_sp) {
1240 m->used_int_regs |= 1 << frame_reg;
1243 sig = mono_method_signature (m->method);
1247 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
1248 m->ret->opcode = OP_REGVAR;
1249 m->ret->inst_c0 = m->ret->dreg = ppc_r3;
1251 /* FIXME: handle long values? */
1252 switch (mini_type_get_underlying_type (m->generic_sharing_context, sig->ret)->type) {
1253 case MONO_TYPE_VOID:
1257 m->ret->opcode = OP_REGVAR;
1258 m->ret->inst_c0 = m->ret->dreg = ppc_f1;
1261 m->ret->opcode = OP_REGVAR;
1262 m->ret->inst_c0 = m->ret->dreg = ppc_r3;
1266 /* local vars are at a positive offset from the stack pointer */
1268 * also note that if the function uses alloca, we use ppc_r31
1269 * to point at the local variables.
1271 offset = PPC_MINIMAL_STACK_SIZE; /* linkage area */
1272 /* align the offset to 16 bytes: not sure this is needed here */
1274 //offset &= ~(16 - 1);
1276 /* add parameter area size for called functions */
1277 offset += m->param_area;
1279 offset &= ~(16 - 1);
1281 /* allow room to save the return value */
1282 if (mono_jit_trace_calls != NULL && mono_trace_eval (m->method))
1285 /* the MonoLMF structure is stored just below the stack pointer */
1288 /* this stuff should not be needed on ppc and the new jit,
1289 * because a call on ppc to the handlers doesn't change the
1290 * stack pointer and the jist doesn't manipulate the stack pointer
1291 * for operations involving valuetypes.
1293 /* reserve space to store the esp */
1294 offset += sizeof (gpointer);
1296 /* this is a global constant */
1297 mono_exc_esp_offset = offset;
1300 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
1301 offset += sizeof(gpointer) - 1;
1302 offset &= ~(sizeof(gpointer) - 1);
1304 m->vret_addr->opcode = OP_REGOFFSET;
1305 m->vret_addr->inst_basereg = frame_reg;
1306 m->vret_addr->inst_offset = offset;
1308 if (G_UNLIKELY (m->verbose_level > 1)) {
1309 printf ("vret_addr =");
1310 mono_print_ins (m->vret_addr);
1313 offset += sizeof(gpointer);
1316 offsets = mono_allocate_stack_slots_full (m, FALSE, &locals_stack_size, &locals_stack_align);
1317 if (locals_stack_align) {
1318 offset += (locals_stack_align - 1);
1319 offset &= ~(locals_stack_align - 1);
1321 for (i = m->locals_start; i < m->num_varinfo; i++) {
1322 if (offsets [i] != -1) {
1323 MonoInst *inst = m->varinfo [i];
1324 inst->opcode = OP_REGOFFSET;
1325 inst->inst_basereg = frame_reg;
1326 inst->inst_offset = offset + offsets [i];
1328 g_print ("allocating local %d (%s) to %d\n",
1329 i, mono_type_get_name (inst->inst_vtype), inst->inst_offset);
1333 offset += locals_stack_size;
1337 inst = m->args [curinst];
1338 if (inst->opcode != OP_REGVAR) {
1339 inst->opcode = OP_REGOFFSET;
1340 inst->inst_basereg = frame_reg;
1341 offset += sizeof (gpointer) - 1;
1342 offset &= ~(sizeof (gpointer) - 1);
1343 inst->inst_offset = offset;
1344 offset += sizeof (gpointer);
1349 for (i = 0; i < sig->param_count; ++i) {
1350 inst = m->args [curinst];
1351 if (inst->opcode != OP_REGVAR) {
1352 inst->opcode = OP_REGOFFSET;
1353 inst->inst_basereg = frame_reg;
1355 size = mono_type_native_stack_size (sig->params [i], (guint32*)&align);
1356 inst->backend.is_pinvoke = 1;
1358 size = mono_type_size (sig->params [i], &align);
1360 if (MONO_TYPE_ISSTRUCT (sig->params [i]) && size < sizeof (gpointer))
1361 size = align = sizeof (gpointer);
1362 offset += align - 1;
1363 offset &= ~(align - 1);
1364 inst->inst_offset = offset;
1370 /* some storage for fp conversions */
1373 m->arch.fp_conv_var_offset = offset;
1376 /* align the offset to 16 bytes */
1378 offset &= ~(16 - 1);
1381 m->stack_offset = offset;
1383 if (sig->call_convention == MONO_CALL_VARARG) {
1384 CallInfo *cinfo = calculate_sizes (m->method->signature, m->method->signature->pinvoke);
1386 m->sig_cookie = cinfo->sig_cookie.offset;
1393 mono_arch_create_vars (MonoCompile *cfg)
1395 MonoMethodSignature *sig = mono_method_signature (cfg->method);
1397 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
1398 cfg->vret_addr = mono_compile_create_var (cfg, &mono_defaults.int_class->byval_arg, OP_ARG);
1402 /* Fixme: we need an alignment solution for enter_method and mono_arch_call_opcode,
1403 * currently alignment in mono_arch_call_opcode is computed without arch_get_argument_info
1407 emit_sig_cookie (MonoCompile *cfg, MonoCallInst *call, CallInfo *cinfo)
1409 int sig_reg = mono_alloc_ireg (cfg);
1411 /* FIXME: Add support for signature tokens to AOT */
1412 cfg->disable_aot = TRUE;
1414 MONO_EMIT_NEW_ICONST (cfg, sig_reg, (gulong)call->signature);
1415 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG,
1416 ppc_r1, cinfo->sig_cookie.offset, sig_reg);
1420 mono_arch_emit_call (MonoCompile *cfg, MonoCallInst *call)
1423 MonoMethodSignature *sig;
1427 sig = call->signature;
1428 n = sig->param_count + sig->hasthis;
1430 cinfo = calculate_sizes (sig, sig->pinvoke);
1432 for (i = 0; i < n; ++i) {
1433 ArgInfo *ainfo = cinfo->args + i;
1436 if (i >= sig->hasthis)
1437 t = sig->params [i - sig->hasthis];
1439 t = &mono_defaults.int_class->byval_arg;
1440 t = mini_type_get_underlying_type (cfg->generic_sharing_context, t);
1442 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (i == sig->sentinelpos))
1443 emit_sig_cookie (cfg, call, cinfo);
1445 in = call->args [i];
1447 if (ainfo->regtype == RegTypeGeneral) {
1448 #ifndef __mono_ppc64__
1449 if (!t->byref && ((t->type == MONO_TYPE_I8) || (t->type == MONO_TYPE_U8))) {
1450 MONO_INST_NEW (cfg, ins, OP_MOVE);
1451 ins->dreg = mono_alloc_ireg (cfg);
1452 ins->sreg1 = in->dreg + 1;
1453 MONO_ADD_INS (cfg->cbb, ins);
1454 mono_call_inst_add_outarg_reg (cfg, call, ins->dreg, ainfo->reg + 1, FALSE);
1456 MONO_INST_NEW (cfg, ins, OP_MOVE);
1457 ins->dreg = mono_alloc_ireg (cfg);
1458 ins->sreg1 = in->dreg + 2;
1459 MONO_ADD_INS (cfg->cbb, ins);
1460 mono_call_inst_add_outarg_reg (cfg, call, ins->dreg, ainfo->reg, FALSE);
1464 MONO_INST_NEW (cfg, ins, OP_MOVE);
1465 ins->dreg = mono_alloc_ireg (cfg);
1466 ins->sreg1 = in->dreg;
1467 MONO_ADD_INS (cfg->cbb, ins);
1469 mono_call_inst_add_outarg_reg (cfg, call, ins->dreg, ainfo->reg, FALSE);
1471 } else if (ainfo->regtype == RegTypeStructByAddr) {
1472 MONO_INST_NEW (cfg, ins, OP_OUTARG_VT);
1473 ins->opcode = OP_OUTARG_VT;
1474 ins->sreg1 = in->dreg;
1475 ins->klass = in->klass;
1476 ins->inst_p0 = call;
1477 ins->inst_p1 = mono_mempool_alloc (cfg->mempool, sizeof (ArgInfo));
1478 memcpy (ins->inst_p1, ainfo, sizeof (ArgInfo));
1479 MONO_ADD_INS (cfg->cbb, ins);
1480 } else if (ainfo->regtype == RegTypeStructByVal) {
1481 /* this is further handled in mono_arch_emit_outarg_vt () */
1482 MONO_INST_NEW (cfg, ins, OP_OUTARG_VT);
1483 ins->opcode = OP_OUTARG_VT;
1484 ins->sreg1 = in->dreg;
1485 ins->klass = in->klass;
1486 ins->inst_p0 = call;
1487 ins->inst_p1 = mono_mempool_alloc (cfg->mempool, sizeof (ArgInfo));
1488 memcpy (ins->inst_p1, ainfo, sizeof (ArgInfo));
1489 MONO_ADD_INS (cfg->cbb, ins);
1490 } else if (ainfo->regtype == RegTypeBase) {
1491 if (!t->byref && ((t->type == MONO_TYPE_I8) || (t->type == MONO_TYPE_U8))) {
1492 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI8_MEMBASE_REG, ppc_r1, ainfo->offset, in->dreg);
1493 } else if (!t->byref && ((t->type == MONO_TYPE_R4) || (t->type == MONO_TYPE_R8))) {
1494 if (t->type == MONO_TYPE_R8)
1495 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORER8_MEMBASE_REG, ppc_r1, ainfo->offset, in->dreg);
1497 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORER4_MEMBASE_REG, ppc_r1, ainfo->offset, in->dreg);
1499 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG, ppc_r1, ainfo->offset, in->dreg);
1501 } else if (ainfo->regtype == RegTypeFP) {
1502 if (t->type == MONO_TYPE_VALUETYPE) {
1503 /* this is further handled in mono_arch_emit_outarg_vt () */
1504 MONO_INST_NEW (cfg, ins, OP_OUTARG_VT);
1505 ins->opcode = OP_OUTARG_VT;
1506 ins->sreg1 = in->dreg;
1507 ins->klass = in->klass;
1508 ins->inst_p0 = call;
1509 ins->inst_p1 = mono_mempool_alloc (cfg->mempool, sizeof (ArgInfo));
1510 memcpy (ins->inst_p1, ainfo, sizeof (ArgInfo));
1511 MONO_ADD_INS (cfg->cbb, ins);
1513 cfg->flags |= MONO_CFG_HAS_FPOUT;
1515 int dreg = mono_alloc_freg (cfg);
1517 if (ainfo->size == 4) {
1518 MONO_EMIT_NEW_UNALU (cfg, OP_FCONV_TO_R4, dreg, in->dreg);
1520 MONO_INST_NEW (cfg, ins, OP_FMOVE);
1522 ins->sreg1 = in->dreg;
1523 MONO_ADD_INS (cfg->cbb, ins);
1526 mono_call_inst_add_outarg_reg (cfg, call, dreg, ainfo->reg, TRUE);
1527 cfg->flags |= MONO_CFG_HAS_FPOUT;
1530 g_assert_not_reached ();
1534 /* Emit the signature cookie in the case that there is no
1535 additional argument */
1536 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (n == sig->sentinelpos))
1537 emit_sig_cookie (cfg, call, cinfo);
1539 if (cinfo->struct_ret) {
1542 MONO_INST_NEW (cfg, vtarg, OP_MOVE);
1543 vtarg->sreg1 = call->vret_var->dreg;
1544 vtarg->dreg = mono_alloc_preg (cfg);
1545 MONO_ADD_INS (cfg->cbb, vtarg);
1547 mono_call_inst_add_outarg_reg (cfg, call, vtarg->dreg, cinfo->struct_ret, FALSE);
1550 call->stack_usage = cinfo->stack_usage;
1551 cfg->param_area = MAX (PPC_MINIMAL_PARAM_AREA_SIZE, MAX (cfg->param_area, cinfo->stack_usage));
1552 cfg->flags |= MONO_CFG_HAS_CALLS;
1558 mono_arch_emit_outarg_vt (MonoCompile *cfg, MonoInst *ins, MonoInst *src)
1560 MonoCallInst *call = (MonoCallInst*)ins->inst_p0;
1561 ArgInfo *ainfo = ins->inst_p1;
1562 int ovf_size = ainfo->vtsize;
1563 int doffset = ainfo->offset;
1564 int i, soffset, dreg;
1566 if (ainfo->regtype == RegTypeStructByVal) {
1573 * Darwin pinvokes needs some special handling for 1
1574 * and 2 byte arguments
1576 g_assert (ins->klass);
1577 if (call->signature->pinvoke)
1578 size = mono_class_native_size (ins->klass, NULL);
1579 if (size == 2 || size == 1) {
1580 int tmpr = mono_alloc_ireg (cfg);
1582 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADI1_MEMBASE, tmpr, src->dreg, soffset);
1584 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADI2_MEMBASE, tmpr, src->dreg, soffset);
1585 dreg = mono_alloc_ireg (cfg);
1586 MONO_EMIT_NEW_UNALU (cfg, OP_MOVE, dreg, tmpr);
1587 mono_call_inst_add_outarg_reg (cfg, call, dreg, ainfo->reg, FALSE);
1590 for (i = 0; i < ainfo->size; ++i) {
1591 int antipadding = 0;
1594 antipadding = sizeof (gpointer) - ainfo->bytes;
1596 dreg = mono_alloc_ireg (cfg);
1597 MONO_EMIT_NEW_LOAD_MEMBASE (cfg, dreg, src->dreg, soffset);
1599 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHR_UN_IMM, dreg, dreg, antipadding * 8);
1600 mono_call_inst_add_outarg_reg (cfg, call, dreg, ainfo->reg + i, FALSE);
1601 soffset += sizeof (gpointer);
1604 mini_emit_memcpy (cfg, ppc_r1, doffset + soffset, src->dreg, soffset, ovf_size * sizeof (gpointer), 0);
1605 } else if (ainfo->regtype == RegTypeFP) {
1606 int tmpr = mono_alloc_freg (cfg);
1607 if (ainfo->size == 4)
1608 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADR4_MEMBASE, tmpr, src->dreg, 0);
1610 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADR8_MEMBASE, tmpr, src->dreg, 0);
1611 dreg = mono_alloc_freg (cfg);
1612 MONO_EMIT_NEW_UNALU (cfg, OP_FMOVE, dreg, tmpr);
1613 mono_call_inst_add_outarg_reg (cfg, call, dreg, ainfo->reg, TRUE);
1615 MonoInst *vtcopy = mono_compile_create_var (cfg, &src->klass->byval_arg, OP_LOCAL);
1619 /* FIXME: alignment? */
1620 if (call->signature->pinvoke) {
1621 size = mono_type_native_stack_size (&src->klass->byval_arg, NULL);
1622 vtcopy->backend.is_pinvoke = 1;
1624 size = mini_type_stack_size (cfg->generic_sharing_context, &src->klass->byval_arg, NULL);
1627 g_assert (ovf_size > 0);
1629 EMIT_NEW_VARLOADA (cfg, load, vtcopy, vtcopy->inst_vtype);
1630 mini_emit_memcpy (cfg, load->dreg, 0, src->dreg, 0, size, 0);
1633 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG, ppc_r1, ainfo->offset, load->dreg);
1635 mono_call_inst_add_outarg_reg (cfg, call, load->dreg, ainfo->reg, FALSE);
1640 mono_arch_emit_setret (MonoCompile *cfg, MonoMethod *method, MonoInst *val)
1642 MonoType *ret = mini_type_get_underlying_type (cfg->generic_sharing_context,
1643 mono_method_signature (method)->ret);
1646 #ifndef __mono_ppc64__
1647 if (ret->type == MONO_TYPE_I8 || ret->type == MONO_TYPE_U8) {
1650 MONO_INST_NEW (cfg, ins, OP_SETLRET);
1651 ins->sreg1 = val->dreg + 1;
1652 ins->sreg2 = val->dreg + 2;
1653 MONO_ADD_INS (cfg->cbb, ins);
1657 if (ret->type == MONO_TYPE_R8 || ret->type == MONO_TYPE_R4) {
1658 MONO_EMIT_NEW_UNALU (cfg, OP_FMOVE, cfg->ret->dreg, val->dreg);
1662 MONO_EMIT_NEW_UNALU (cfg, OP_MOVE, cfg->ret->dreg, val->dreg);
1665 /* FIXME: this is just a useless hint: fix the interface to include the opcode */
1667 mono_arch_is_inst_imm (gint64 imm)
1673 * Allow tracing to work with this interface (with an optional argument)
1677 mono_arch_instrument_prolog (MonoCompile *cfg, void *func, void *p, gboolean enable_arguments)
1681 ppc_load (code, ppc_r3, cfg->method);
1682 ppc_li (code, ppc_r4, 0); /* NULL ebp for now */
1683 ppc_load_func (code, ppc_r0, func);
1684 ppc_mtlr (code, ppc_r0);
1698 mono_arch_instrument_epilog_full (MonoCompile *cfg, void *func, void *p, gboolean enable_arguments, gboolean preserve_argument_registers)
1701 int save_mode = SAVE_NONE;
1703 MonoMethod *method = cfg->method;
1704 int rtype = mini_type_get_underlying_type (cfg->generic_sharing_context,
1705 mono_method_signature (method)->ret)->type;
1706 int save_offset = PPC_STACK_PARAM_OFFSET + cfg->param_area;
1710 offset = code - cfg->native_code;
1711 /* we need about 16 instructions */
1712 if (offset > (cfg->code_size - 16 * 4)) {
1713 cfg->code_size *= 2;
1714 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
1715 code = cfg->native_code + offset;
1719 case MONO_TYPE_VOID:
1720 /* special case string .ctor icall */
1721 if (strcmp (".ctor", method->name) && method->klass == mono_defaults.string_class)
1722 save_mode = SAVE_ONE;
1724 save_mode = SAVE_NONE;
1726 #ifndef __mono_ppc64__
1729 save_mode = SAVE_TWO;
1734 save_mode = SAVE_FP;
1736 case MONO_TYPE_VALUETYPE:
1737 save_mode = SAVE_STRUCT;
1740 save_mode = SAVE_ONE;
1744 switch (save_mode) {
1746 ppc_stw (code, ppc_r3, save_offset, cfg->frame_reg);
1747 ppc_stw (code, ppc_r4, save_offset + 4, cfg->frame_reg);
1748 if (enable_arguments) {
1749 ppc_mr (code, ppc_r5, ppc_r4);
1750 ppc_mr (code, ppc_r4, ppc_r3);
1754 ppc_stptr (code, ppc_r3, save_offset, cfg->frame_reg);
1755 if (enable_arguments) {
1756 ppc_mr (code, ppc_r4, ppc_r3);
1760 ppc_stfd (code, ppc_f1, save_offset, cfg->frame_reg);
1761 if (enable_arguments) {
1762 /* FIXME: what reg? */
1763 ppc_fmr (code, ppc_f3, ppc_f1);
1764 /* FIXME: use 8 byte load on PPC64 */
1765 ppc_lwz (code, ppc_r4, save_offset, cfg->frame_reg);
1766 ppc_lwz (code, ppc_r5, save_offset + 4, cfg->frame_reg);
1770 if (enable_arguments) {
1771 /* FIXME: get the actual address */
1772 ppc_mr (code, ppc_r4, ppc_r3);
1780 ppc_load (code, ppc_r3, cfg->method);
1781 ppc_load_func (code, ppc_r0, func);
1782 ppc_mtlr (code, ppc_r0);
1785 switch (save_mode) {
1787 ppc_lwz (code, ppc_r3, save_offset, cfg->frame_reg);
1788 ppc_lwz (code, ppc_r4, save_offset + 4, cfg->frame_reg);
1791 ppc_ldptr (code, ppc_r3, save_offset, cfg->frame_reg);
1794 ppc_lfd (code, ppc_f1, save_offset, cfg->frame_reg);
1804 * Conditional branches have a small offset, so if it is likely overflowed,
1805 * we do a branch to the end of the method (uncond branches have much larger
1806 * offsets) where we perform the conditional and jump back unconditionally.
1807 * It's slightly slower, since we add two uncond branches, but it's very simple
1808 * with the current patch implementation and such large methods are likely not
1809 * going to be perf critical anyway.
1814 const char *exception;
1821 #define EMIT_COND_BRANCH_FLAGS(ins,b0,b1) \
1822 if (0 && ins->inst_true_bb->native_offset) { \
1823 ppc_bc (code, (b0), (b1), (code - cfg->native_code + ins->inst_true_bb->native_offset) & 0xffff); \
1825 int br_disp = ins->inst_true_bb->max_offset - offset; \
1826 if (!ppc_is_imm16 (br_disp + 1024) || ! ppc_is_imm16 (ppc_is_imm16 (br_disp - 1024))) { \
1827 MonoOvfJump *ovfj = mono_mempool_alloc (cfg->mempool, sizeof (MonoOvfJump)); \
1828 ovfj->data.bb = ins->inst_true_bb; \
1829 ovfj->ip_offset = 0; \
1830 ovfj->b0_cond = (b0); \
1831 ovfj->b1_cond = (b1); \
1832 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_BB_OVF, ovfj); \
1835 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_BB, ins->inst_true_bb); \
1836 ppc_bc (code, (b0), (b1), 0); \
1840 #define EMIT_COND_BRANCH(ins,cond) EMIT_COND_BRANCH_FLAGS(ins, branch_b0_table [(cond)], branch_b1_table [(cond)])
1842 /* emit an exception if condition is fail
1844 * We assign the extra code used to throw the implicit exceptions
1845 * to cfg->bb_exit as far as the big branch handling is concerned
1847 #define EMIT_COND_SYSTEM_EXCEPTION_FLAGS(b0,b1,exc_name) \
1849 int br_disp = cfg->bb_exit->max_offset - offset; \
1850 if (!ppc_is_imm16 (br_disp + 1024) || ! ppc_is_imm16 (ppc_is_imm16 (br_disp - 1024))) { \
1851 MonoOvfJump *ovfj = mono_mempool_alloc (cfg->mempool, sizeof (MonoOvfJump)); \
1852 ovfj->data.exception = (exc_name); \
1853 ovfj->ip_offset = code - cfg->native_code; \
1854 ovfj->b0_cond = (b0); \
1855 ovfj->b1_cond = (b1); \
1856 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_EXC_OVF, ovfj); \
1858 cfg->bb_exit->max_offset += 24; \
1860 mono_add_patch_info (cfg, code - cfg->native_code, \
1861 MONO_PATCH_INFO_EXC, exc_name); \
1862 ppc_bcl (code, (b0), (b1), 0); \
1866 #define EMIT_COND_SYSTEM_EXCEPTION(cond,exc_name) EMIT_COND_SYSTEM_EXCEPTION_FLAGS(branch_b0_table [(cond)], branch_b1_table [(cond)], (exc_name))
1869 mono_arch_peephole_pass_1 (MonoCompile *cfg, MonoBasicBlock *bb)
1874 normalize_opcode (int opcode)
1877 case MONO_PPC_32_64_CASE (OP_LOADI4_MEMBASE, OP_LOADI8_MEMBASE):
1878 return OP_LOAD_MEMBASE;
1879 case MONO_PPC_32_64_CASE (OP_LOADI4_MEMINDEX, OP_LOADI8_MEMINDEX):
1880 return OP_LOAD_MEMINDEX;
1881 case MONO_PPC_32_64_CASE (OP_STOREI4_MEMBASE_REG, OP_STOREI8_MEMBASE_REG):
1882 return OP_STORE_MEMBASE_REG;
1883 case MONO_PPC_32_64_CASE (OP_STOREI4_MEMBASE_IMM, OP_STOREI8_MEMBASE_IMM):
1884 return OP_STORE_MEMBASE_IMM;
1885 case MONO_PPC_32_64_CASE (OP_STOREI4_MEMINDEX, OP_STOREI8_MEMINDEX):
1886 return OP_STORE_MEMINDEX;
1887 case MONO_PPC_32_64_CASE (OP_ISHR_IMM, OP_LSHR_IMM):
1889 case MONO_PPC_32_64_CASE (OP_ISHR_UN_IMM, OP_LSHR_UN_IMM):
1890 return OP_SHR_UN_IMM;
1897 mono_arch_peephole_pass_2 (MonoCompile *cfg, MonoBasicBlock *bb)
1899 MonoInst *ins, *n, *last_ins = NULL;
1901 MONO_BB_FOR_EACH_INS_SAFE (bb, n, ins) {
1902 switch (normalize_opcode (ins->opcode)) {
1904 /* remove unnecessary multiplication with 1 */
1905 if (ins->inst_imm == 1) {
1906 if (ins->dreg != ins->sreg1) {
1907 ins->opcode = OP_MOVE;
1909 MONO_DELETE_INS (bb, ins);
1913 int power2 = mono_is_power_of_two (ins->inst_imm);
1915 ins->opcode = OP_SHL_IMM;
1916 ins->inst_imm = power2;
1920 case OP_LOAD_MEMBASE:
1922 * OP_STORE_MEMBASE_REG reg, offset(basereg)
1923 * OP_LOAD_MEMBASE offset(basereg), reg
1925 if (last_ins && normalize_opcode (last_ins->opcode) == OP_STORE_MEMBASE_REG &&
1926 ins->inst_basereg == last_ins->inst_destbasereg &&
1927 ins->inst_offset == last_ins->inst_offset) {
1928 if (ins->dreg == last_ins->sreg1) {
1929 MONO_DELETE_INS (bb, ins);
1932 //static int c = 0; printf ("MATCHX %s %d\n", cfg->method->name,c++);
1933 ins->opcode = OP_MOVE;
1934 ins->sreg1 = last_ins->sreg1;
1938 * Note: reg1 must be different from the basereg in the second load
1939 * OP_LOAD_MEMBASE offset(basereg), reg1
1940 * OP_LOAD_MEMBASE offset(basereg), reg2
1942 * OP_LOAD_MEMBASE offset(basereg), reg1
1943 * OP_MOVE reg1, reg2
1945 } else if (last_ins && normalize_opcode (last_ins->opcode) == OP_LOAD_MEMBASE &&
1946 ins->inst_basereg != last_ins->dreg &&
1947 ins->inst_basereg == last_ins->inst_basereg &&
1948 ins->inst_offset == last_ins->inst_offset) {
1950 if (ins->dreg == last_ins->dreg) {
1951 MONO_DELETE_INS (bb, ins);
1954 ins->opcode = OP_MOVE;
1955 ins->sreg1 = last_ins->dreg;
1958 //g_assert_not_reached ();
1962 * OP_STORE_MEMBASE_IMM imm, offset(basereg)
1963 * OP_LOAD_MEMBASE offset(basereg), reg
1965 * OP_STORE_MEMBASE_IMM imm, offset(basereg)
1966 * OP_ICONST reg, imm
1968 } else if (last_ins && normalize_opcode (last_ins->opcode) == OP_STORE_MEMBASE_IMM &&
1969 ins->inst_basereg == last_ins->inst_destbasereg &&
1970 ins->inst_offset == last_ins->inst_offset) {
1971 //static int c = 0; printf ("MATCHX %s %d\n", cfg->method->name,c++);
1972 ins->opcode = OP_ICONST;
1973 ins->inst_c0 = last_ins->inst_imm;
1974 g_assert_not_reached (); // check this rule
1978 case OP_LOADU1_MEMBASE:
1979 case OP_LOADI1_MEMBASE:
1980 if (last_ins && (last_ins->opcode == OP_STOREI1_MEMBASE_REG) &&
1981 ins->inst_basereg == last_ins->inst_destbasereg &&
1982 ins->inst_offset == last_ins->inst_offset) {
1983 ins->opcode = (ins->opcode == OP_LOADI1_MEMBASE) ? OP_ICONV_TO_I1 : OP_ICONV_TO_U1;
1984 ins->sreg1 = last_ins->sreg1;
1987 case OP_LOADU2_MEMBASE:
1988 case OP_LOADI2_MEMBASE:
1989 if (last_ins && (last_ins->opcode == OP_STOREI2_MEMBASE_REG) &&
1990 ins->inst_basereg == last_ins->inst_destbasereg &&
1991 ins->inst_offset == last_ins->inst_offset) {
1992 ins->opcode = (ins->opcode == OP_LOADI2_MEMBASE) ? OP_ICONV_TO_I2 : OP_ICONV_TO_U2;
1993 ins->sreg1 = last_ins->sreg1;
1996 #ifdef __mono_ppc64__
1997 case OP_LOADU4_MEMBASE:
1998 case OP_LOADI4_MEMBASE:
1999 if (last_ins && (last_ins->opcode == OP_STOREI4_MEMBASE_REG) &&
2000 ins->inst_basereg == last_ins->inst_destbasereg &&
2001 ins->inst_offset == last_ins->inst_offset) {
2002 ins->opcode = (ins->opcode == OP_LOADI4_MEMBASE) ? OP_ICONV_TO_I4 : OP_ICONV_TO_U4;
2003 ins->sreg1 = last_ins->sreg1;
2008 ins->opcode = OP_MOVE;
2012 if (ins->dreg == ins->sreg1) {
2013 MONO_DELETE_INS (bb, ins);
2017 * OP_MOVE sreg, dreg
2018 * OP_MOVE dreg, sreg
2020 if (last_ins && last_ins->opcode == OP_MOVE &&
2021 ins->sreg1 == last_ins->dreg &&
2022 ins->dreg == last_ins->sreg1) {
2023 MONO_DELETE_INS (bb, ins);
2031 bb->last_ins = last_ins;
2035 mono_arch_decompose_opts (MonoCompile *cfg, MonoInst *ins)
2037 switch (ins->opcode) {
2038 case OP_ICONV_TO_R_UN: {
2039 static const guint64 adjust_val = 0x4330000000000000ULL;
2040 int msw_reg = mono_alloc_ireg (cfg);
2041 int adj_reg = mono_alloc_freg (cfg);
2042 int tmp_reg = mono_alloc_freg (cfg);
2043 int basereg = ppc_sp;
2045 MONO_EMIT_NEW_ICONST (cfg, msw_reg, 0x43300000);
2046 if (!ppc_is_imm16 (offset + 4)) {
2047 basereg = mono_alloc_ireg (cfg);
2048 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_IADD_IMM, basereg, cfg->frame_reg, offset);
2050 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, basereg, offset, msw_reg);
2051 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, basereg, offset + 4, ins->sreg1);
2052 MONO_EMIT_NEW_LOAD_R8 (cfg, adj_reg, &adjust_val);
2053 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADR8_MEMBASE, tmp_reg, basereg, offset);
2054 MONO_EMIT_NEW_BIALU (cfg, OP_FSUB, ins->dreg, tmp_reg, adj_reg);
2055 ins->opcode = OP_NOP;
2058 #ifndef __mono_ppc64__
2059 case OP_ICONV_TO_R4:
2060 case OP_ICONV_TO_R8: {
2061 /* FIXME: change precision for CEE_CONV_R4 */
2062 static const guint64 adjust_val = 0x4330000080000000ULL;
2063 int msw_reg = mono_alloc_ireg (cfg);
2064 int xored = mono_alloc_ireg (cfg);
2065 int adj_reg = mono_alloc_freg (cfg);
2066 int tmp_reg = mono_alloc_freg (cfg);
2067 int basereg = ppc_sp;
2069 if (!ppc_is_imm16 (offset + 4)) {
2070 basereg = mono_alloc_ireg (cfg);
2071 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_IADD_IMM, basereg, cfg->frame_reg, offset);
2073 MONO_EMIT_NEW_ICONST (cfg, msw_reg, 0x43300000);
2074 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, basereg, offset, msw_reg);
2075 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_XOR_IMM, xored, ins->sreg1, 0x80000000);
2076 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, basereg, offset + 4, xored);
2077 MONO_EMIT_NEW_LOAD_R8 (cfg, adj_reg, (gpointer)&adjust_val);
2078 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADR8_MEMBASE, tmp_reg, basereg, offset);
2079 MONO_EMIT_NEW_BIALU (cfg, OP_FSUB, ins->dreg, tmp_reg, adj_reg);
2080 if (ins->opcode == OP_ICONV_TO_R4)
2081 MONO_EMIT_NEW_UNALU (cfg, OP_FCONV_TO_R4, ins->dreg, ins->dreg);
2082 ins->opcode = OP_NOP;
2087 int msw_reg = mono_alloc_ireg (cfg);
2088 int basereg = ppc_sp;
2090 if (!ppc_is_imm16 (offset + 4)) {
2091 basereg = mono_alloc_ireg (cfg);
2092 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_IADD_IMM, basereg, cfg->frame_reg, offset);
2094 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORER8_MEMBASE_REG, basereg, offset, ins->sreg1);
2095 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADI4_MEMBASE, msw_reg, basereg, offset);
2096 MONO_EMIT_NEW_UNALU (cfg, OP_CHECK_FINITE, -1, msw_reg);
2097 MONO_EMIT_NEW_UNALU (cfg, OP_FMOVE, ins->dreg, ins->sreg1);
2098 ins->opcode = OP_NOP;
2101 #ifdef __mono_ppc64__
2103 case OP_IADD_OVF_UN:
2105 int shifted1_reg = mono_alloc_ireg (cfg);
2106 int shifted2_reg = mono_alloc_ireg (cfg);
2107 int result_shifted_reg = mono_alloc_ireg (cfg);
2109 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHL_IMM, shifted1_reg, ins->sreg1, 32);
2110 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHL_IMM, shifted2_reg, ins->sreg2, 32);
2111 MONO_EMIT_NEW_BIALU (cfg, ins->opcode, result_shifted_reg, shifted1_reg, shifted2_reg);
2112 if (ins->opcode == OP_IADD_OVF_UN)
2113 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHR_UN_IMM, ins->dreg, result_shifted_reg, 32);
2115 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHR_IMM, ins->dreg, result_shifted_reg, 32);
2116 ins->opcode = OP_NOP;
2123 mono_arch_decompose_long_opts (MonoCompile *cfg, MonoInst *ins)
2125 switch (ins->opcode) {
2127 /* ADC sets the condition code */
2128 MONO_EMIT_NEW_BIALU (cfg, OP_ADDCC, ins->dreg + 1, ins->sreg1 + 1, ins->sreg2 + 1);
2129 MONO_EMIT_NEW_BIALU (cfg, OP_ADD_OVF_CARRY, ins->dreg + 2, ins->sreg1 + 2, ins->sreg2 + 2);
2132 case OP_LADD_OVF_UN:
2133 /* ADC sets the condition code */
2134 MONO_EMIT_NEW_BIALU (cfg, OP_ADDCC, ins->dreg + 1, ins->sreg1 + 1, ins->sreg2 + 1);
2135 MONO_EMIT_NEW_BIALU (cfg, OP_ADD_OVF_UN_CARRY, ins->dreg + 2, ins->sreg1 + 2, ins->sreg2 + 2);
2139 /* SBB sets the condition code */
2140 MONO_EMIT_NEW_BIALU (cfg, OP_SUBCC, ins->dreg + 1, ins->sreg1 + 1, ins->sreg2 + 1);
2141 MONO_EMIT_NEW_BIALU (cfg, OP_SUB_OVF_CARRY, ins->dreg + 2, ins->sreg1 + 2, ins->sreg2 + 2);
2144 case OP_LSUB_OVF_UN:
2145 /* SBB sets the condition code */
2146 MONO_EMIT_NEW_BIALU (cfg, OP_SUBCC, ins->dreg + 1, ins->sreg1 + 1, ins->sreg2 + 1);
2147 MONO_EMIT_NEW_BIALU (cfg, OP_SUB_OVF_UN_CARRY, ins->dreg + 2, ins->sreg1 + 2, ins->sreg2 + 2);
2151 /* This is the old version from inssel-long32.brg */
2152 MONO_EMIT_NEW_UNALU (cfg, OP_INOT, ins->dreg + 1, ins->sreg1 + 1);
2153 MONO_EMIT_NEW_UNALU (cfg, OP_INOT, ins->dreg + 2, ins->sreg1 + 2);
2154 /* ADC sets the condition codes */
2155 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_ADC_IMM, ins->dreg + 1, ins->dreg + 1, 1);
2156 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_ADC_IMM, ins->dreg + 2, ins->dreg + 2, 0);
2165 * the branch_b0_table should maintain the order of these
2179 branch_b0_table [] = {
2194 branch_b1_table [] = {
2208 #define NEW_INS(cfg,dest,op) do { \
2209 MONO_INST_NEW((cfg), (dest), (op)); \
2210 mono_bblock_insert_after_ins (bb, last_ins, (dest)); \
2214 map_to_reg_reg_op (int op)
2223 case OP_COMPARE_IMM:
2225 case OP_ICOMPARE_IMM:
2227 case OP_LCOMPARE_IMM:
2243 case OP_LOAD_MEMBASE:
2244 return OP_LOAD_MEMINDEX;
2245 case OP_LOADI4_MEMBASE:
2246 return OP_LOADI4_MEMINDEX;
2247 case OP_LOADU4_MEMBASE:
2248 return OP_LOADU4_MEMINDEX;
2249 case OP_LOADI8_MEMBASE:
2250 return OP_LOADI8_MEMINDEX;
2251 case OP_LOADU1_MEMBASE:
2252 return OP_LOADU1_MEMINDEX;
2253 case OP_LOADI2_MEMBASE:
2254 return OP_LOADI2_MEMINDEX;
2255 case OP_LOADU2_MEMBASE:
2256 return OP_LOADU2_MEMINDEX;
2257 case OP_LOADI1_MEMBASE:
2258 return OP_LOADI1_MEMINDEX;
2259 case OP_LOADR4_MEMBASE:
2260 return OP_LOADR4_MEMINDEX;
2261 case OP_LOADR8_MEMBASE:
2262 return OP_LOADR8_MEMINDEX;
2263 case OP_STOREI1_MEMBASE_REG:
2264 return OP_STOREI1_MEMINDEX;
2265 case OP_STOREI2_MEMBASE_REG:
2266 return OP_STOREI2_MEMINDEX;
2267 case OP_STOREI4_MEMBASE_REG:
2268 return OP_STOREI4_MEMINDEX;
2269 case OP_STOREI8_MEMBASE_REG:
2270 return OP_STOREI8_MEMINDEX;
2271 case OP_STORE_MEMBASE_REG:
2272 return OP_STORE_MEMINDEX;
2273 case OP_STORER4_MEMBASE_REG:
2274 return OP_STORER4_MEMINDEX;
2275 case OP_STORER8_MEMBASE_REG:
2276 return OP_STORER8_MEMINDEX;
2277 case OP_STORE_MEMBASE_IMM:
2278 return OP_STORE_MEMBASE_REG;
2279 case OP_STOREI1_MEMBASE_IMM:
2280 return OP_STOREI1_MEMBASE_REG;
2281 case OP_STOREI2_MEMBASE_IMM:
2282 return OP_STOREI2_MEMBASE_REG;
2283 case OP_STOREI4_MEMBASE_IMM:
2284 return OP_STOREI4_MEMBASE_REG;
2285 case OP_STOREI8_MEMBASE_IMM:
2286 return OP_STOREI8_MEMBASE_REG;
2288 return mono_op_imm_to_op (op);
2291 //#define map_to_reg_reg_op(op) (cfg->new_ir? mono_op_imm_to_op (op): map_to_reg_reg_op (op))
2293 #define compare_opcode_is_unsigned(opcode) \
2294 (((opcode) >= CEE_BNE_UN && (opcode) <= CEE_BLT_UN) || \
2295 ((opcode) >= OP_IBNE_UN && (opcode) <= OP_IBLT_UN) || \
2296 ((opcode) >= OP_LBNE_UN && (opcode) <= OP_LBLT_UN) || \
2297 ((opcode) >= OP_COND_EXC_NE_UN && (opcode) <= OP_COND_EXC_LT_UN) || \
2298 ((opcode) >= OP_COND_EXC_INE_UN && (opcode) <= OP_COND_EXC_ILT_UN) || \
2299 ((opcode) == OP_CLT_UN || (opcode) == OP_CGT_UN || \
2300 (opcode) == OP_ICLT_UN || (opcode) == OP_ICGT_UN || \
2301 (opcode) == OP_LCLT_UN || (opcode) == OP_LCGT_UN))
2304 * Remove from the instruction list the instructions that can't be
2305 * represented with very simple instructions with no register
2309 mono_arch_lowering_pass (MonoCompile *cfg, MonoBasicBlock *bb)
2311 MonoInst *ins, *next, *temp, *last_ins = NULL;
2314 MONO_BB_FOR_EACH_INS (bb, ins) {
2316 switch (ins->opcode) {
2317 case OP_IDIV_UN_IMM:
2320 case OP_IREM_UN_IMM:
2321 NEW_INS (cfg, temp, OP_ICONST);
2322 temp->inst_c0 = ins->inst_imm;
2323 temp->dreg = mono_alloc_ireg (cfg);
2324 ins->sreg2 = temp->dreg;
2325 if (ins->opcode == OP_IDIV_IMM)
2326 ins->opcode = OP_IDIV;
2327 else if (ins->opcode == OP_IREM_IMM)
2328 ins->opcode = OP_IREM;
2329 else if (ins->opcode == OP_IDIV_UN_IMM)
2330 ins->opcode = OP_IDIV_UN;
2331 else if (ins->opcode == OP_IREM_UN_IMM)
2332 ins->opcode = OP_IREM_UN;
2334 /* handle rem separately */
2338 CASE_PPC64 (OP_LREM)
2339 CASE_PPC64 (OP_LREM_UN) {
2341 /* we change a rem dest, src1, src2 to
2342 * div temp1, src1, src2
2343 * mul temp2, temp1, src2
2344 * sub dest, src1, temp2
2346 if (ins->opcode == OP_IREM || ins->opcode == OP_IREM_UN) {
2347 NEW_INS (cfg, mul, OP_IMUL);
2348 NEW_INS (cfg, temp, ins->opcode == OP_IREM? OP_IDIV: OP_IDIV_UN);
2349 ins->opcode = OP_ISUB;
2351 NEW_INS (cfg, mul, OP_LMUL);
2352 NEW_INS (cfg, temp, ins->opcode == OP_LREM? OP_LDIV: OP_LDIV_UN);
2353 ins->opcode = OP_LSUB;
2355 temp->sreg1 = ins->sreg1;
2356 temp->sreg2 = ins->sreg2;
2357 temp->dreg = mono_alloc_ireg (cfg);
2358 mul->sreg1 = temp->dreg;
2359 mul->sreg2 = ins->sreg2;
2360 mul->dreg = mono_alloc_ireg (cfg);
2361 ins->sreg2 = mul->dreg;
2365 CASE_PPC64 (OP_LADD_IMM)
2368 if (!ppc_is_imm16 (ins->inst_imm)) {
2369 NEW_INS (cfg, temp, OP_ICONST);
2370 temp->inst_c0 = ins->inst_imm;
2371 temp->dreg = mono_alloc_ireg (cfg);
2372 ins->sreg2 = temp->dreg;
2373 ins->opcode = map_to_reg_reg_op (ins->opcode);
2377 CASE_PPC64 (OP_LSUB_IMM)
2379 if (!ppc_is_imm16 (-ins->inst_imm)) {
2380 NEW_INS (cfg, temp, OP_ICONST);
2381 temp->inst_c0 = ins->inst_imm;
2382 temp->dreg = mono_alloc_ireg (cfg);
2383 ins->sreg2 = temp->dreg;
2384 ins->opcode = map_to_reg_reg_op (ins->opcode);
2396 gboolean is_imm = ((ins->inst_imm & 0xffff0000) && (ins->inst_imm & 0xffff));
2397 #ifdef __mono_ppc64__
2398 if (ins->inst_imm & 0xffffffff00000000UL)
2402 NEW_INS (cfg, temp, OP_ICONST);
2403 temp->inst_c0 = ins->inst_imm;
2404 temp->dreg = mono_alloc_ireg (cfg);
2405 ins->sreg2 = temp->dreg;
2406 ins->opcode = map_to_reg_reg_op (ins->opcode);
2415 NEW_INS (cfg, temp, OP_ICONST);
2416 temp->inst_c0 = ins->inst_imm;
2417 temp->dreg = mono_alloc_ireg (cfg);
2418 ins->sreg2 = temp->dreg;
2419 ins->opcode = map_to_reg_reg_op (ins->opcode);
2421 case OP_COMPARE_IMM:
2422 case OP_ICOMPARE_IMM:
2423 CASE_PPC64 (OP_LCOMPARE_IMM)
2425 /* Branch opts can eliminate the branch */
2426 if (!next || (!(MONO_IS_COND_BRANCH_OP (next) || MONO_IS_COND_EXC (next) || MONO_IS_SETCC (next)))) {
2427 ins->opcode = OP_NOP;
2431 if (compare_opcode_is_unsigned (next->opcode)) {
2432 if (!ppc_is_uimm16 (ins->inst_imm)) {
2433 NEW_INS (cfg, temp, OP_ICONST);
2434 temp->inst_c0 = ins->inst_imm;
2435 temp->dreg = mono_alloc_ireg (cfg);
2436 ins->sreg2 = temp->dreg;
2437 ins->opcode = map_to_reg_reg_op (ins->opcode);
2440 if (!ppc_is_imm16 (ins->inst_imm)) {
2441 NEW_INS (cfg, temp, OP_ICONST);
2442 temp->inst_c0 = ins->inst_imm;
2443 temp->dreg = mono_alloc_ireg (cfg);
2444 ins->sreg2 = temp->dreg;
2445 ins->opcode = map_to_reg_reg_op (ins->opcode);
2451 if (ins->inst_imm == 1) {
2452 ins->opcode = OP_MOVE;
2455 if (ins->inst_imm == 0) {
2456 ins->opcode = OP_ICONST;
2460 imm = mono_is_power_of_two (ins->inst_imm);
2462 ins->opcode = OP_SHL_IMM;
2463 ins->inst_imm = imm;
2466 if (!ppc_is_imm16 (ins->inst_imm)) {
2467 NEW_INS (cfg, temp, OP_ICONST);
2468 temp->inst_c0 = ins->inst_imm;
2469 temp->dreg = mono_alloc_ireg (cfg);
2470 ins->sreg2 = temp->dreg;
2471 ins->opcode = map_to_reg_reg_op (ins->opcode);
2474 case OP_LOCALLOC_IMM:
2475 NEW_INS (cfg, temp, OP_ICONST);
2476 temp->inst_c0 = ins->inst_imm;
2477 temp->dreg = mono_alloc_ireg (cfg);
2478 ins->sreg1 = temp->dreg;
2479 ins->opcode = OP_LOCALLOC;
2481 case OP_LOAD_MEMBASE:
2482 case OP_LOADI4_MEMBASE:
2483 CASE_PPC64 (OP_LOADI8_MEMBASE)
2484 case OP_LOADU4_MEMBASE:
2485 case OP_LOADI2_MEMBASE:
2486 case OP_LOADU2_MEMBASE:
2487 case OP_LOADI1_MEMBASE:
2488 case OP_LOADU1_MEMBASE:
2489 case OP_LOADR4_MEMBASE:
2490 case OP_LOADR8_MEMBASE:
2491 case OP_STORE_MEMBASE_REG:
2492 CASE_PPC64 (OP_STOREI8_MEMBASE_REG)
2493 case OP_STOREI4_MEMBASE_REG:
2494 case OP_STOREI2_MEMBASE_REG:
2495 case OP_STOREI1_MEMBASE_REG:
2496 case OP_STORER4_MEMBASE_REG:
2497 case OP_STORER8_MEMBASE_REG:
2498 /* we can do two things: load the immed in a register
2499 * and use an indexed load, or see if the immed can be
2500 * represented as an ad_imm + a load with a smaller offset
2501 * that fits. We just do the first for now, optimize later.
2503 if (ppc_is_imm16 (ins->inst_offset))
2505 NEW_INS (cfg, temp, OP_ICONST);
2506 temp->inst_c0 = ins->inst_offset;
2507 temp->dreg = mono_alloc_ireg (cfg);
2508 ins->sreg2 = temp->dreg;
2509 ins->opcode = map_to_reg_reg_op (ins->opcode);
2511 case OP_STORE_MEMBASE_IMM:
2512 case OP_STOREI1_MEMBASE_IMM:
2513 case OP_STOREI2_MEMBASE_IMM:
2514 case OP_STOREI4_MEMBASE_IMM:
2515 CASE_PPC64 (OP_STOREI8_MEMBASE_IMM)
2516 NEW_INS (cfg, temp, OP_ICONST);
2517 temp->inst_c0 = ins->inst_imm;
2518 temp->dreg = mono_alloc_ireg (cfg);
2519 ins->sreg1 = temp->dreg;
2520 ins->opcode = map_to_reg_reg_op (ins->opcode);
2522 goto loop_start; /* make it handle the possibly big ins->inst_offset */
2525 if (cfg->compile_aot) {
2526 /* Keep these in the aot case */
2529 NEW_INS (cfg, temp, OP_ICONST);
2530 temp->inst_c0 = (gulong)ins->inst_p0;
2531 temp->dreg = mono_alloc_ireg (cfg);
2532 ins->inst_basereg = temp->dreg;
2533 ins->inst_offset = 0;
2534 ins->opcode = ins->opcode == OP_R4CONST? OP_LOADR4_MEMBASE: OP_LOADR8_MEMBASE;
2536 /* make it handle the possibly big ins->inst_offset
2537 * later optimize to use lis + load_membase
2543 bb->last_ins = last_ins;
2544 bb->max_vreg = cfg->next_vreg;
2548 emit_float_to_int (MonoCompile *cfg, guchar *code, int dreg, int sreg, int size, gboolean is_signed)
2550 long offset = cfg->arch.fp_conv_var_offset;
2552 /* sreg is a float, dreg is an integer reg. ppc_f0 is used a scratch */
2553 #ifdef __mono_ppc64__
2555 ppc_fctidz (code, ppc_f0, sreg);
2560 ppc_fctiwz (code, ppc_f0, sreg);
2563 if (ppc_is_imm16 (offset + sub_offset)) {
2564 ppc_stfd (code, ppc_f0, offset, cfg->frame_reg);
2566 ppc_ldptr (code, dreg, offset + sub_offset, cfg->frame_reg);
2568 ppc_lwz (code, dreg, offset + sub_offset, cfg->frame_reg);
2570 ppc_load (code, dreg, offset);
2571 ppc_add (code, dreg, dreg, cfg->frame_reg);
2572 ppc_stfd (code, ppc_f0, 0, dreg);
2574 ppc_ldptr (code, dreg, sub_offset, dreg);
2576 ppc_lwz (code, dreg, sub_offset, dreg);
2580 ppc_andid (code, dreg, dreg, 0xff);
2582 ppc_andid (code, dreg, dreg, 0xffff);
2583 #ifdef __mono_ppc64__
2585 ppc_clrldi (code, dreg, dreg, 32);
2589 ppc_extsb (code, dreg, dreg);
2591 ppc_extsh (code, dreg, dreg);
2592 #ifdef __mono_ppc64__
2594 ppc_extsw (code, dreg, dreg);
2602 const guchar *target;
2607 #define is_call_imm(diff) ((glong)(diff) >= -33554432 && (glong)(diff) <= 33554431)
2610 search_thunk_slot (void *data, int csize, int bsize, void *user_data) {
2611 #ifdef __mono_ppc64__
2612 g_assert_not_reached ();
2614 PatchData *pdata = (PatchData*)user_data;
2615 guchar *code = data;
2616 guint32 *thunks = data;
2617 guint32 *endthunks = (guint32*)(code + bsize);
2621 int difflow, diffhigh;
2623 /* always ensure a call from pdata->code can reach to the thunks without further thunks */
2624 difflow = (char*)pdata->code - (char*)thunks;
2625 diffhigh = (char*)pdata->code - (char*)endthunks;
2626 if (!((is_call_imm (thunks) && is_call_imm (endthunks)) || (is_call_imm (difflow) && is_call_imm (diffhigh))))
2629 templ = (guchar*)load;
2630 ppc_load_sequence (templ, ppc_r0, pdata->target);
2632 //g_print ("thunk nentries: %d\n", ((char*)endthunks - (char*)thunks)/16);
2633 if ((pdata->found == 2) || (pdata->code >= code && pdata->code <= code + csize)) {
2634 while (thunks < endthunks) {
2635 //g_print ("looking for target: %p at %p (%08x-%08x)\n", pdata->target, thunks, thunks [0], thunks [1]);
2636 if ((thunks [0] == load [0]) && (thunks [1] == load [1])) {
2637 ppc_patch (pdata->code, (guchar*)thunks);
2640 static int num_thunks = 0;
2642 if ((num_thunks % 20) == 0)
2643 g_print ("num_thunks lookup: %d\n", num_thunks);
2646 } else if ((thunks [0] == 0) && (thunks [1] == 0)) {
2647 /* found a free slot instead: emit thunk */
2648 code = (guchar*)thunks;
2649 ppc_lis (code, ppc_r0, (gulong)(pdata->target) >> 16);
2650 ppc_ori (code, ppc_r0, ppc_r0, (gulong)(pdata->target) & 0xffff);
2651 ppc_mtctr (code, ppc_r0);
2652 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
2653 mono_arch_flush_icache ((guchar*)thunks, 16);
2655 ppc_patch (pdata->code, (guchar*)thunks);
2658 static int num_thunks = 0;
2660 if ((num_thunks % 20) == 0)
2661 g_print ("num_thunks: %d\n", num_thunks);
2665 /* skip 16 bytes, the size of the thunk */
2669 //g_print ("failed thunk lookup for %p from %p at %p (%d entries)\n", pdata->target, pdata->code, data, count);
2676 handle_thunk (int absolute, guchar *code, const guchar *target) {
2677 MonoDomain *domain = mono_domain_get ();
2681 pdata.target = target;
2682 pdata.absolute = absolute;
2685 mono_domain_lock (domain);
2686 mono_domain_code_foreach (domain, search_thunk_slot, &pdata);
2689 /* this uses the first available slot */
2691 mono_domain_code_foreach (domain, search_thunk_slot, &pdata);
2693 mono_domain_unlock (domain);
2695 if (pdata.found != 1)
2696 g_print ("thunk failed for %p from %p\n", target, code);
2697 g_assert (pdata.found == 1);
2701 patch_ins (guint8 *code, guint32 ins)
2703 *(guint32*)code = ins;
2704 mono_arch_flush_icache (code, 4);
2708 ppc_patch_full (guchar *code, const guchar *target, gboolean is_fd)
2710 guint32 ins = *(guint32*)code;
2711 guint32 prim = ins >> 26;
2714 //g_print ("patching 0x%08x (0x%08x) to point to 0x%08x\n", code, ins, target);
2716 // prefer relative branches, they are more position independent (e.g. for AOT compilation).
2717 gint diff = target - code;
2720 if (diff <= 33554431){
2721 ins = (18 << 26) | (diff) | (ins & 1);
2722 patch_ins (code, ins);
2726 /* diff between 0 and -33554432 */
2727 if (diff >= -33554432){
2728 ins = (18 << 26) | (diff & ~0xfc000000) | (ins & 1);
2729 patch_ins (code, ins);
2734 if ((glong)target >= 0){
2735 if ((glong)target <= 33554431){
2736 ins = (18 << 26) | ((gulong) target) | (ins & 1) | 2;
2737 patch_ins (code, ins);
2741 if ((glong)target >= -33554432){
2742 ins = (18 << 26) | (((gulong)target) & ~0xfc000000) | (ins & 1) | 2;
2743 patch_ins (code, ins);
2748 handle_thunk (TRUE, code, target);
2751 g_assert_not_reached ();
2759 guint32 li = (gulong)target;
2760 ins = (ins & 0xffff0000) | (ins & 3);
2761 ovf = li & 0xffff0000;
2762 if (ovf != 0 && ovf != 0xffff0000)
2763 g_assert_not_reached ();
2766 // FIXME: assert the top bits of li are 0
2768 gint diff = target - code;
2769 ins = (ins & 0xffff0000) | (ins & 3);
2770 ovf = diff & 0xffff0000;
2771 if (ovf != 0 && ovf != 0xffff0000)
2772 g_assert_not_reached ();
2776 patch_ins (code, ins);
2780 if (prim == 15 || ins == 0x4e800021 || ins == 0x4e800020 || ins == 0x4e800420) {
2781 #ifdef __mono_ppc64__
2782 guint32 *seq = (guint32*)code;
2783 guint32 *branch_ins;
2785 /* the trampoline code will try to patch the blrl, blr, bcctr */
2786 if (ins == 0x4e800021 || ins == 0x4e800020 || ins == 0x4e800420) {
2788 if (ppc_opcode (seq [-3]) == 58 || ppc_opcode (seq [-3]) == 31) /* ld || mr */
2793 if (ppc_opcode (seq [5]) == 58 || ppc_opcode (seq [5]) == 31) /* ld || mr */
2794 branch_ins = seq + 8;
2796 branch_ins = seq + 6;
2799 seq = (guint32*)code;
2800 /* this is the lis/ori/sldi/oris/ori/(ld/ld|mr/nop)/mtlr/blrl sequence */
2801 g_assert (mono_ppc_is_direct_call_sequence (branch_ins));
2803 if (ppc_opcode (seq [5]) == 58) { /* ld */
2804 g_assert (ppc_opcode (seq [6]) == 58); /* ld */
2807 guint8 *buf = (guint8*)&seq [5];
2808 ppc_mr (buf, ppc_r0, ppc_r11);
2813 target = mono_get_addr_from_ftnptr ((gpointer)target);
2816 /* FIXME: make this thread safe */
2817 /* FIXME: we're assuming we're using r11 here */
2818 ppc_load_sequence (code, ppc_r11, target);
2819 mono_arch_flush_icache ((guint8*)seq, 28);
2822 /* the trampoline code will try to patch the blrl, blr, bcctr */
2823 if (ins == 0x4e800021 || ins == 0x4e800020 || ins == 0x4e800420) {
2826 /* this is the lis/ori/mtlr/blrl sequence */
2827 seq = (guint32*)code;
2828 g_assert ((seq [0] >> 26) == 15);
2829 g_assert ((seq [1] >> 26) == 24);
2830 g_assert ((seq [2] >> 26) == 31);
2831 g_assert (seq [3] == 0x4e800021 || seq [3] == 0x4e800020 || seq [3] == 0x4e800420);
2832 /* FIXME: make this thread safe */
2833 ppc_lis (code, ppc_r0, (guint32)(target) >> 16);
2834 ppc_ori (code, ppc_r0, ppc_r0, (guint32)(target) & 0xffff);
2835 mono_arch_flush_icache (code - 8, 8);
2838 g_assert_not_reached ();
2840 // g_print ("patched with 0x%08x\n", ins);
2844 ppc_patch (guchar *code, const guchar *target)
2846 ppc_patch_full (code, target, FALSE);
2850 mono_ppc_patch (guchar *code, const guchar *target)
2852 ppc_patch (code, target);
2856 emit_move_return_value (MonoCompile *cfg, MonoInst *ins, guint8 *code)
2858 switch (ins->opcode) {
2861 case OP_FCALL_MEMBASE:
2862 if (ins->dreg != ppc_f1)
2863 ppc_fmr (code, ins->dreg, ppc_f1);
2871 * emit_load_volatile_arguments:
2873 * Load volatile arguments from the stack to the original input registers.
2874 * Required before a tail call.
2877 emit_load_volatile_arguments (MonoCompile *cfg, guint8 *code)
2879 MonoMethod *method = cfg->method;
2880 MonoMethodSignature *sig;
2884 int struct_index = 0;
2886 sig = mono_method_signature (method);
2888 /* This is the opposite of the code in emit_prolog */
2892 cinfo = calculate_sizes (sig, sig->pinvoke);
2894 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
2895 ArgInfo *ainfo = &cinfo->ret;
2896 inst = cfg->vret_addr;
2897 g_assert (ppc_is_imm16 (inst->inst_offset));
2898 ppc_ldptr (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2900 for (i = 0; i < sig->param_count + sig->hasthis; ++i) {
2901 ArgInfo *ainfo = cinfo->args + i;
2902 inst = cfg->args [pos];
2904 g_assert (inst->opcode != OP_REGVAR);
2905 g_assert (ppc_is_imm16 (inst->inst_offset));
2907 switch (ainfo->regtype) {
2908 case RegTypeGeneral:
2909 switch (ainfo->size) {
2911 ppc_lbz (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2914 ppc_lhz (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2916 #ifdef __mono_ppc64__
2918 ppc_lwz (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2922 ppc_ldptr (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2928 switch (ainfo->size) {
2930 ppc_lfs (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2933 ppc_lfd (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
2936 g_assert_not_reached ();
2941 MonoType *type = mini_type_get_underlying_type (cfg->generic_sharing_context,
2942 &inst->klass->byval_arg);
2944 #ifndef __mono_ppc64__
2945 if (type->type == MONO_TYPE_I8)
2949 if (MONO_TYPE_IS_REFERENCE (type) || type->type == MONO_TYPE_I8) {
2950 ppc_ldptr (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
2951 ppc_stptr (code, ppc_r0, ainfo->offset, ainfo->reg);
2952 } else if (type->type == MONO_TYPE_I4) {
2953 ppc_lwz (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
2954 ppc_stw (code, ppc_r0, ainfo->offset, ainfo->reg);
2962 case RegTypeStructByVal: {
2973 * Darwin pinvokes needs some special handling
2974 * for 1 and 2 byte arguments
2976 if (method->signature->pinvoke)
2977 size = mono_class_native_size (inst->klass, NULL);
2978 if (size == 1 || size == 2) {
2983 for (j = 0; j < ainfo->size; ++j) {
2984 ppc_ldptr (code, ainfo->reg + j,
2985 inst->inst_offset + j * sizeof (gpointer),
2986 inst->inst_basereg);
2987 /* FIXME: shift to the right */
2994 case RegTypeStructByAddr: {
2995 MonoInst *addr = cfg->tailcall_valuetype_addrs [struct_index];
2997 g_assert (ppc_is_imm16 (addr->inst_offset));
2998 g_assert (!ainfo->offset);
2999 ppc_ldptr (code, ainfo->reg, addr->inst_offset, addr->inst_basereg);
3006 g_assert_not_reached ();
3017 /* This must be kept in sync with emit_load_volatile_arguments(). */
3019 ins_native_length (MonoCompile *cfg, MonoInst *ins)
3021 int len = ((guint8 *)ins_get_spec (ins->opcode))[MONO_INST_LEN];
3022 MonoMethodSignature *sig;
3027 if (ins->opcode != OP_JMP)
3030 call = (MonoCallInst*)ins;
3031 sig = mono_method_signature (cfg->method);
3032 cinfo = calculate_sizes (sig, sig->pinvoke);
3034 if (MONO_TYPE_ISSTRUCT (sig->ret))
3036 for (i = 0; i < sig->param_count + sig->hasthis; ++i) {
3037 ArgInfo *ainfo = cinfo->args + i;
3039 switch (ainfo->regtype) {
3040 case RegTypeGeneral:
3049 case RegTypeStructByVal:
3050 len += 4 * ainfo->size;
3053 case RegTypeStructByAddr:
3058 g_assert_not_reached ();
3068 emit_reserve_param_area (MonoCompile *cfg, guint8 *code)
3070 long size = cfg->param_area;
3072 size += MONO_ARCH_FRAME_ALIGNMENT - 1;
3073 size &= -MONO_ARCH_FRAME_ALIGNMENT;
3078 ppc_ldptr (code, ppc_r0, 0, ppc_sp);
3079 if (ppc_is_imm16 (-size)) {
3080 ppc_stptr_update (code, ppc_r0, -size, ppc_sp);
3082 ppc_load (code, ppc_r11, -size);
3083 ppc_stptr_update_indexed (code, ppc_r0, ppc_sp, ppc_r11);
3090 emit_unreserve_param_area (MonoCompile *cfg, guint8 *code)
3092 long size = cfg->param_area;
3094 size += MONO_ARCH_FRAME_ALIGNMENT - 1;
3095 size &= -MONO_ARCH_FRAME_ALIGNMENT;
3100 ppc_ldptr (code, ppc_r0, 0, ppc_sp);
3101 if (ppc_is_imm16 (size)) {
3102 ppc_stptr_update (code, ppc_r0, size, ppc_sp);
3104 ppc_load (code, ppc_r11, size);
3105 ppc_stptr_update_indexed (code, ppc_r0, ppc_sp, ppc_r11);
3111 #define MASK_SHIFT_IMM(i) ((i) & MONO_PPC_32_64_CASE (0x1f, 0x3f))
3114 mono_arch_output_basic_block (MonoCompile *cfg, MonoBasicBlock *bb)
3116 MonoInst *ins, *next;
3119 guint8 *code = cfg->native_code + cfg->code_len;
3120 MonoInst *last_ins = NULL;
3121 guint last_offset = 0;
3125 /* we don't align basic blocks of loops on ppc */
3127 if (cfg->verbose_level > 2)
3128 g_print ("Basic block %d starting at offset 0x%x\n", bb->block_num, bb->native_offset);
3130 cpos = bb->max_offset;
3132 if (cfg->prof_options & MONO_PROFILE_COVERAGE) {
3133 //MonoCoverageInfo *cov = mono_get_coverage_info (cfg->method);
3134 //g_assert (!mono_compile_aot);
3137 // cov->data [bb->dfn].iloffset = bb->cil_code - cfg->cil_code;
3138 /* this is not thread save, but good enough */
3139 /* fixme: howto handle overflows? */
3140 //x86_inc_mem (code, &cov->data [bb->dfn].count);
3143 MONO_BB_FOR_EACH_INS (bb, ins) {
3144 offset = code - cfg->native_code;
3146 max_len = ins_native_length (cfg, ins);
3148 if (offset > (cfg->code_size - max_len - 16)) {
3149 cfg->code_size *= 2;
3150 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
3151 code = cfg->native_code + offset;
3153 // if (ins->cil_code)
3154 // g_print ("cil code\n");
3155 mono_debug_record_line_number (cfg, ins, offset);
3157 switch (normalize_opcode (ins->opcode)) {
3158 case OP_RELAXED_NOP:
3161 case OP_DUMMY_STORE:
3162 case OP_NOT_REACHED:
3166 emit_tls_access (code, ins->dreg, ins->inst_offset);
3169 ppc_mullw (code, ppc_r0, ins->sreg1, ins->sreg2);
3170 ppc_mulhw (code, ppc_r3, ins->sreg1, ins->sreg2);
3171 ppc_mr (code, ppc_r4, ppc_r0);
3174 ppc_mullw (code, ppc_r0, ins->sreg1, ins->sreg2);
3175 ppc_mulhwu (code, ppc_r3, ins->sreg1, ins->sreg2);
3176 ppc_mr (code, ppc_r4, ppc_r0);
3178 case OP_MEMORY_BARRIER:
3181 case OP_STOREI1_MEMBASE_REG:
3182 if (ppc_is_imm16 (ins->inst_offset)) {
3183 ppc_stb (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
3185 ppc_load (code, ppc_r0, ins->inst_offset);
3186 ppc_stbx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
3189 case OP_STOREI2_MEMBASE_REG:
3190 if (ppc_is_imm16 (ins->inst_offset)) {
3191 ppc_sth (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
3193 ppc_load (code, ppc_r0, ins->inst_offset);
3194 ppc_sthx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
3197 case OP_STORE_MEMBASE_REG:
3198 if (ppc_is_imm16 (ins->inst_offset)) {
3199 ppc_stptr (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
3201 ppc_load (code, ppc_r0, ins->inst_offset);
3202 ppc_stptr_indexed (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
3205 case OP_STOREI1_MEMINDEX:
3206 ppc_stbx (code, ins->sreg1, ins->sreg2, ins->inst_destbasereg);
3208 case OP_STOREI2_MEMINDEX:
3209 ppc_sthx (code, ins->sreg1, ins->sreg2, ins->inst_destbasereg);
3211 case OP_STORE_MEMINDEX:
3212 ppc_stptr_indexed (code, ins->sreg1, ins->sreg2, ins->inst_destbasereg);
3215 g_assert_not_reached ();
3217 case OP_LOAD_MEMBASE:
3218 if (ppc_is_imm16 (ins->inst_offset)) {
3219 ppc_ldptr (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3221 ppc_load (code, ppc_r0, ins->inst_offset);
3222 ppc_ldptr_indexed (code, ins->dreg, ins->inst_basereg, ppc_r0);
3225 case OP_LOADI4_MEMBASE:
3226 #ifdef __mono_ppc64__
3227 if (ppc_is_imm16 (ins->inst_offset)) {
3228 ppc_lwa (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3230 ppc_load (code, ppc_r0, ins->inst_offset);
3231 ppc_lwax (code, ins->dreg, ins->inst_basereg, ppc_r0);
3235 case OP_LOADU4_MEMBASE:
3236 if (ppc_is_imm16 (ins->inst_offset)) {
3237 ppc_lwz (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3239 ppc_load (code, ppc_r0, ins->inst_offset);
3240 ppc_lwzx (code, ins->dreg, ins->inst_basereg, ppc_r0);
3243 case OP_LOADI1_MEMBASE:
3244 case OP_LOADU1_MEMBASE:
3245 if (ppc_is_imm16 (ins->inst_offset)) {
3246 ppc_lbz (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3248 ppc_load (code, ppc_r0, ins->inst_offset);
3249 ppc_lbzx (code, ins->dreg, ins->inst_basereg, ppc_r0);
3251 if (ins->opcode == OP_LOADI1_MEMBASE)
3252 ppc_extsb (code, ins->dreg, ins->dreg);
3254 case OP_LOADU2_MEMBASE:
3255 if (ppc_is_imm16 (ins->inst_offset)) {
3256 ppc_lhz (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3258 ppc_load (code, ppc_r0, ins->inst_offset);
3259 ppc_lhzx (code, ins->dreg, ins->inst_basereg, ppc_r0);
3262 case OP_LOADI2_MEMBASE:
3263 if (ppc_is_imm16 (ins->inst_offset)) {
3264 ppc_lha (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3266 ppc_load (code, ppc_r0, ins->inst_offset);
3267 ppc_lhax (code, ins->dreg, ins->inst_basereg, ppc_r0);
3270 case OP_LOAD_MEMINDEX:
3271 ppc_ldptr_indexed (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3273 case OP_LOADI4_MEMINDEX:
3274 #ifdef __mono_ppc64__
3275 ppc_lwax (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3278 case OP_LOADU4_MEMINDEX:
3279 ppc_lwzx (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3281 case OP_LOADU2_MEMINDEX:
3282 ppc_lhzx (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3284 case OP_LOADI2_MEMINDEX:
3285 ppc_lhax (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3287 case OP_LOADU1_MEMINDEX:
3288 ppc_lbzx (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3290 case OP_LOADI1_MEMINDEX:
3291 ppc_lbzx (code, ins->dreg, ins->sreg2, ins->inst_basereg);
3292 ppc_extsb (code, ins->dreg, ins->dreg);
3294 case OP_ICONV_TO_I1:
3295 CASE_PPC64 (OP_LCONV_TO_I1)
3296 ppc_extsb (code, ins->dreg, ins->sreg1);
3298 case OP_ICONV_TO_I2:
3299 CASE_PPC64 (OP_LCONV_TO_I2)
3300 ppc_extsh (code, ins->dreg, ins->sreg1);
3302 case OP_ICONV_TO_U1:
3303 CASE_PPC64 (OP_LCONV_TO_U1)
3304 ppc_clrlwi (code, ins->dreg, ins->sreg1, 24);
3306 case OP_ICONV_TO_U2:
3307 CASE_PPC64 (OP_LCONV_TO_U2)
3308 ppc_clrlwi (code, ins->dreg, ins->sreg1, 16);
3312 CASE_PPC64 (OP_LCOMPARE)
3313 L = (sizeof (gpointer) == 4 || ins->opcode == OP_ICOMPARE) ? 0 : 1;
3315 if (next && compare_opcode_is_unsigned (next->opcode))
3316 ppc_cmpl (code, 0, L, ins->sreg1, ins->sreg2);
3318 ppc_cmp (code, 0, L, ins->sreg1, ins->sreg2);
3320 case OP_COMPARE_IMM:
3321 case OP_ICOMPARE_IMM:
3322 CASE_PPC64 (OP_LCOMPARE_IMM)
3323 L = (sizeof (gpointer) == 4 || ins->opcode == OP_ICOMPARE_IMM) ? 0 : 1;
3325 if (next && compare_opcode_is_unsigned (next->opcode)) {
3326 if (ppc_is_uimm16 (ins->inst_imm)) {
3327 ppc_cmpli (code, 0, L, ins->sreg1, (ins->inst_imm & 0xffff));
3329 g_assert_not_reached ();
3332 if (ppc_is_imm16 (ins->inst_imm)) {
3333 ppc_cmpi (code, 0, L, ins->sreg1, (ins->inst_imm & 0xffff));
3335 g_assert_not_reached ();
3344 ppc_addco (code, ins->dreg, ins->sreg1, ins->sreg2);
3347 CASE_PPC64 (OP_LADD)
3348 ppc_add (code, ins->dreg, ins->sreg1, ins->sreg2);
3352 ppc_adde (code, ins->dreg, ins->sreg1, ins->sreg2);
3355 if (ppc_is_imm16 (ins->inst_imm)) {
3356 ppc_addic (code, ins->dreg, ins->sreg1, ins->inst_imm);
3358 g_assert_not_reached ();
3363 CASE_PPC64 (OP_LADD_IMM)
3364 if (ppc_is_imm16 (ins->inst_imm)) {
3365 ppc_addi (code, ins->dreg, ins->sreg1, ins->inst_imm);
3367 g_assert_not_reached ();
3371 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3373 ppc_addo (code, ins->dreg, ins->sreg1, ins->sreg2);
3374 ppc_mfspr (code, ppc_r0, ppc_xer);
3375 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3376 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3378 case OP_IADD_OVF_UN:
3379 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3381 ppc_addco (code, ins->dreg, ins->sreg1, ins->sreg2);
3382 ppc_mfspr (code, ppc_r0, ppc_xer);
3383 ppc_andisd (code, ppc_r0, ppc_r0, (1<<13));
3384 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3387 CASE_PPC64 (OP_LSUB_OVF)
3388 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3390 ppc_subfo (code, ins->dreg, ins->sreg2, ins->sreg1);
3391 ppc_mfspr (code, ppc_r0, ppc_xer);
3392 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3393 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3395 case OP_ISUB_OVF_UN:
3396 CASE_PPC64 (OP_LSUB_OVF_UN)
3397 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3399 ppc_subfc (code, ins->dreg, ins->sreg2, ins->sreg1);
3400 ppc_mfspr (code, ppc_r0, ppc_xer);
3401 ppc_andisd (code, ppc_r0, ppc_r0, (1<<13));
3402 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_TRUE, PPC_BR_EQ, "OverflowException");
3404 case OP_ADD_OVF_CARRY:
3405 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3407 ppc_addeo (code, ins->dreg, ins->sreg1, ins->sreg2);
3408 ppc_mfspr (code, ppc_r0, ppc_xer);
3409 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3410 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3412 case OP_ADD_OVF_UN_CARRY:
3413 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3415 ppc_addeo (code, ins->dreg, ins->sreg1, ins->sreg2);
3416 ppc_mfspr (code, ppc_r0, ppc_xer);
3417 ppc_andisd (code, ppc_r0, ppc_r0, (1<<13));
3418 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3420 case OP_SUB_OVF_CARRY:
3421 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3423 ppc_subfeo (code, ins->dreg, ins->sreg2, ins->sreg1);
3424 ppc_mfspr (code, ppc_r0, ppc_xer);
3425 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3426 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3428 case OP_SUB_OVF_UN_CARRY:
3429 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3431 ppc_subfeo (code, ins->dreg, ins->sreg2, ins->sreg1);
3432 ppc_mfspr (code, ppc_r0, ppc_xer);
3433 ppc_andisd (code, ppc_r0, ppc_r0, (1<<13));
3434 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_TRUE, PPC_BR_EQ, "OverflowException");
3438 ppc_subfco (code, ins->dreg, ins->sreg2, ins->sreg1);
3441 CASE_PPC64 (OP_LSUB)
3442 ppc_subf (code, ins->dreg, ins->sreg2, ins->sreg1);
3446 ppc_subfe (code, ins->dreg, ins->sreg2, ins->sreg1);
3450 CASE_PPC64 (OP_LSUB_IMM)
3451 // we add the negated value
3452 if (ppc_is_imm16 (-ins->inst_imm))
3453 ppc_addi (code, ins->dreg, ins->sreg1, -ins->inst_imm);
3455 g_assert_not_reached ();
3459 g_assert (ppc_is_imm16 (ins->inst_imm));
3460 ppc_subfic (code, ins->dreg, ins->sreg1, ins->inst_imm);
3463 ppc_subfze (code, ins->dreg, ins->sreg1);
3466 CASE_PPC64 (OP_LAND)
3467 /* FIXME: the ppc macros as inconsistent here: put dest as the first arg! */
3468 ppc_and (code, ins->sreg1, ins->dreg, ins->sreg2);
3472 CASE_PPC64 (OP_LAND_IMM)
3473 if (!(ins->inst_imm & 0xffff0000)) {
3474 ppc_andid (code, ins->sreg1, ins->dreg, ins->inst_imm);
3475 } else if (!(ins->inst_imm & 0xffff)) {
3476 ppc_andisd (code, ins->sreg1, ins->dreg, ((guint32)ins->inst_imm >> 16));
3478 g_assert_not_reached ();
3482 CASE_PPC64 (OP_LDIV) {
3483 guint8 *divisor_is_m1;
3484 /* XER format: SO, OV, CA, reserved [21 bits], count [8 bits]
3486 ppc_compare_reg_imm (code, 0, ins->sreg2, -1);
3487 divisor_is_m1 = code;
3488 ppc_bc (code, PPC_BR_FALSE | PPC_BR_LIKELY, PPC_BR_EQ, 0);
3489 ppc_lis (code, ppc_r0, 0x8000);
3490 #ifdef __mono_ppc64__
3491 if (ins->opcode == OP_LDIV)
3492 ppc_sldi (code, ppc_r0, ppc_r0, 32);
3494 ppc_compare (code, 0, ins->sreg1, ppc_r0);
3495 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_TRUE, PPC_BR_EQ, "ArithmeticException");
3496 ppc_patch (divisor_is_m1, code);
3497 /* XER format: SO, OV, CA, reserved [21 bits], count [8 bits]
3499 if (ins->opcode == OP_IDIV)
3500 ppc_divwod (code, ins->dreg, ins->sreg1, ins->sreg2);
3501 #ifdef __mono_ppc64__
3503 ppc_divdod (code, ins->dreg, ins->sreg1, ins->sreg2);
3505 ppc_mfspr (code, ppc_r0, ppc_xer);
3506 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3507 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "DivideByZeroException");
3511 CASE_PPC64 (OP_LDIV_UN)
3512 if (ins->opcode == OP_IDIV_UN)
3513 ppc_divwuod (code, ins->dreg, ins->sreg1, ins->sreg2);
3514 #ifdef __mono_ppc64__
3516 ppc_divduod (code, ins->dreg, ins->sreg1, ins->sreg2);
3518 ppc_mfspr (code, ppc_r0, ppc_xer);
3519 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3520 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "DivideByZeroException");
3526 g_assert_not_reached ();
3529 ppc_or (code, ins->dreg, ins->sreg1, ins->sreg2);
3533 CASE_PPC64 (OP_LOR_IMM)
3534 if (!(ins->inst_imm & 0xffff0000)) {
3535 ppc_ori (code, ins->sreg1, ins->dreg, ins->inst_imm);
3536 } else if (!(ins->inst_imm & 0xffff)) {
3537 ppc_oris (code, ins->dreg, ins->sreg1, ((guint32)(ins->inst_imm) >> 16));
3539 g_assert_not_reached ();
3543 CASE_PPC64 (OP_LXOR)
3544 ppc_xor (code, ins->dreg, ins->sreg1, ins->sreg2);
3548 CASE_PPC64 (OP_LXOR_IMM)
3549 if (!(ins->inst_imm & 0xffff0000)) {
3550 ppc_xori (code, ins->sreg1, ins->dreg, ins->inst_imm);
3551 } else if (!(ins->inst_imm & 0xffff)) {
3552 ppc_xoris (code, ins->sreg1, ins->dreg, ((guint32)(ins->inst_imm) >> 16));
3554 g_assert_not_reached ();
3558 CASE_PPC64 (OP_LSHL)
3559 ppc_shift_left (code, ins->dreg, ins->sreg1, ins->sreg2);
3563 CASE_PPC64 (OP_LSHL_IMM)
3564 ppc_shift_left_imm (code, ins->dreg, ins->sreg1, MASK_SHIFT_IMM (ins->inst_imm));
3567 ppc_sraw (code, ins->dreg, ins->sreg1, ins->sreg2);
3570 ppc_shift_right_arith_imm (code, ins->dreg, ins->sreg1, MASK_SHIFT_IMM (ins->inst_imm));
3573 if (MASK_SHIFT_IMM (ins->inst_imm))
3574 ppc_shift_right_imm (code, ins->dreg, ins->sreg1, MASK_SHIFT_IMM (ins->inst_imm));
3576 ppc_mr (code, ins->dreg, ins->sreg1);
3579 ppc_srw (code, ins->dreg, ins->sreg1, ins->sreg2);
3582 CASE_PPC64 (OP_LNOT)
3583 ppc_not (code, ins->dreg, ins->sreg1);
3586 CASE_PPC64 (OP_LNEG)
3587 ppc_neg (code, ins->dreg, ins->sreg1);
3590 CASE_PPC64 (OP_LMUL)
3591 ppc_multiply (code, ins->dreg, ins->sreg1, ins->sreg2);
3595 CASE_PPC64 (OP_LMUL_IMM)
3596 if (ppc_is_imm16 (ins->inst_imm)) {
3597 ppc_mulli (code, ins->dreg, ins->sreg1, ins->inst_imm);
3599 g_assert_not_reached ();
3603 CASE_PPC64 (OP_LMUL_OVF)
3604 /* we annot use mcrxr, since it's not implemented on some processors
3605 * XER format: SO, OV, CA, reserved [21 bits], count [8 bits]
3607 if (ins->opcode == OP_IMUL_OVF)
3608 ppc_mullwo (code, ins->dreg, ins->sreg1, ins->sreg2);
3609 #ifdef __mono_ppc64__
3611 ppc_mulldo (code, ins->dreg, ins->sreg1, ins->sreg2);
3613 ppc_mfspr (code, ppc_r0, ppc_xer);
3614 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3615 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3617 case OP_IMUL_OVF_UN:
3618 CASE_PPC64 (OP_LMUL_OVF_UN)
3619 /* we first multiply to get the high word and compare to 0
3620 * to set the flags, then the result is discarded and then
3621 * we multiply to get the lower * bits result
3623 if (ins->opcode == OP_IMUL_OVF_UN)
3624 ppc_mulhwu (code, ppc_r0, ins->sreg1, ins->sreg2);
3625 #ifdef __mono_ppc64__
3627 ppc_mulhdu (code, ppc_r0, ins->sreg1, ins->sreg2);
3629 ppc_cmpi (code, 0, 0, ppc_r0, 0);
3630 EMIT_COND_SYSTEM_EXCEPTION (CEE_BNE_UN - CEE_BEQ, "OverflowException");
3631 ppc_multiply (code, ins->dreg, ins->sreg1, ins->sreg2);
3634 CASE_PPC64 (OP_I8CONST)
3635 ppc_load (code, ins->dreg, ins->inst_c0);
3637 case OP_LOAD_GOTADDR:
3638 /* The PLT implementation depends on this */
3639 g_assert (ins->dreg == ppc_r30);
3641 code = mono_arch_emit_load_got_addr (cfg->native_code, code, cfg, NULL);
3644 // FIXME: Fix max instruction length
3645 mono_add_patch_info (cfg, offset, (MonoJumpInfoType)ins->inst_right->inst_i1, ins->inst_right->inst_p0);
3646 /* arch_emit_got_access () patches this */
3647 ppc_load32 (code, ppc_r0, 0);
3648 ppc_ldptr_indexed (code, ins->dreg, ins->inst_basereg, ppc_r0);
3651 mono_add_patch_info (cfg, offset, (MonoJumpInfoType)ins->inst_i1, ins->inst_p0);
3652 ppc_load_sequence (code, ins->dreg, 0);
3654 CASE_PPC32 (OP_ICONV_TO_I4)
3655 CASE_PPC32 (OP_ICONV_TO_U4)
3657 ppc_mr (code, ins->dreg, ins->sreg1);
3660 int saved = ins->sreg1;
3661 if (ins->sreg1 == ppc_r3) {
3662 ppc_mr (code, ppc_r0, ins->sreg1);
3665 if (ins->sreg2 != ppc_r3)
3666 ppc_mr (code, ppc_r3, ins->sreg2);
3667 if (saved != ppc_r4)
3668 ppc_mr (code, ppc_r4, saved);
3672 ppc_fmr (code, ins->dreg, ins->sreg1);
3674 case OP_FCONV_TO_R4:
3675 ppc_frsp (code, ins->dreg, ins->sreg1);
3681 * Keep in sync with mono_arch_emit_epilog
3683 g_assert (!cfg->method->save_lmf);
3685 * Note: we can use ppc_r11 here because it is dead anyway:
3686 * we're leaving the method.
3688 if (1 || cfg->flags & MONO_CFG_HAS_CALLS) {
3689 long ret_offset = cfg->stack_usage + PPC_RET_ADDR_OFFSET;
3690 if (ppc_is_imm16 (ret_offset)) {
3691 ppc_ldptr (code, ppc_r0, ret_offset, cfg->frame_reg);
3693 ppc_load (code, ppc_r11, ret_offset);
3694 ppc_ldptr_indexed (code, ppc_r0, cfg->frame_reg, ppc_r11);
3696 ppc_mtlr (code, ppc_r0);
3699 code = emit_load_volatile_arguments (cfg, code);
3701 if (ppc_is_imm16 (cfg->stack_usage)) {
3702 ppc_addi (code, ppc_r11, cfg->frame_reg, cfg->stack_usage);
3704 ppc_load (code, ppc_r11, cfg->stack_usage);
3705 ppc_add (code, ppc_r11, cfg->frame_reg, ppc_r11);
3707 if (!cfg->method->save_lmf) {
3708 /*for (i = 31; i >= 14; --i) {
3709 if (cfg->used_float_regs & (1 << i)) {
3710 pos += sizeof (double);
3711 ppc_lfd (code, i, -pos, cfg->frame_reg);
3715 for (i = 31; i >= 13; --i) {
3716 if (cfg->used_int_regs & (1 << i)) {
3717 pos += sizeof (gpointer);
3718 ppc_ldptr (code, i, -pos, ppc_r11);
3722 /* FIXME restore from MonoLMF: though this can't happen yet */
3724 ppc_mr (code, ppc_sp, ppc_r11);
3725 mono_add_patch_info (cfg, (guint8*) code - cfg->native_code, MONO_PATCH_INFO_METHOD_JUMP, ins->inst_p0);
3726 if (cfg->compile_aot) {
3727 /* arch_emit_got_access () patches this */
3728 ppc_load32 (code, ppc_r0, 0);
3729 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
3730 ppc_ldptr_indexed (code, ppc_r11, ppc_r30, ppc_r0);
3731 ppc_ldptr (code, ppc_r0, 0, ppc_r11);
3733 ppc_ldptr_indexed (code, ppc_r0, ppc_r30, ppc_r0);
3735 ppc_mtctr (code, ppc_r0);
3736 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
3743 /* ensure ins->sreg1 is not NULL */
3744 ppc_ldptr (code, ppc_r0, 0, ins->sreg1);
3747 long cookie_offset = cfg->sig_cookie + cfg->stack_usage;
3748 if (ppc_is_imm16 (cookie_offset)) {
3749 ppc_addi (code, ppc_r0, cfg->frame_reg, cookie_offset);
3751 ppc_load (code, ppc_r0, cookie_offset);
3752 ppc_add (code, ppc_r0, cfg->frame_reg, ppc_r0);
3754 ppc_stptr (code, ppc_r0, 0, ins->sreg1);
3763 call = (MonoCallInst*)ins;
3764 if (ins->flags & MONO_INST_HAS_METHOD)
3765 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_METHOD, call->method);
3767 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_ABS, call->fptr);
3768 if ((FORCE_INDIR_CALL || cfg->method->dynamic) && !cfg->compile_aot) {
3769 ppc_load_func (code, ppc_r0, 0);
3770 ppc_mtlr (code, ppc_r0);
3775 /* FIXME: this should be handled somewhere else in the new jit */
3776 code = emit_move_return_value (cfg, ins, code);
3782 case OP_VOIDCALL_REG:
3784 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
3785 ppc_ldptr (code, ppc_r0, 0, ins->sreg1);
3786 /* FIXME: if we know that this is a method, we
3787 can omit this load */
3788 ppc_ldptr (code, ppc_r2, 8, ins->sreg1);
3789 ppc_mtlr (code, ppc_r0);
3791 ppc_mtlr (code, ins->sreg1);
3794 /* FIXME: this should be handled somewhere else in the new jit */
3795 code = emit_move_return_value (cfg, ins, code);
3797 case OP_FCALL_MEMBASE:
3798 case OP_LCALL_MEMBASE:
3799 case OP_VCALL_MEMBASE:
3800 case OP_VCALL2_MEMBASE:
3801 case OP_VOIDCALL_MEMBASE:
3802 case OP_CALL_MEMBASE:
3803 ppc_ldptr (code, ppc_r0, ins->inst_offset, ins->sreg1);
3804 ppc_mtlr (code, ppc_r0);
3806 /* FIXME: this should be handled somewhere else in the new jit */
3807 code = emit_move_return_value (cfg, ins, code);
3810 guint8 * zero_loop_jump, * zero_loop_start;
3811 /* keep alignment */
3812 int alloca_waste = PPC_STACK_PARAM_OFFSET + cfg->param_area + 31;
3813 int area_offset = alloca_waste;
3815 ppc_addi (code, ppc_r11, ins->sreg1, alloca_waste + 31);
3816 /* FIXME: should be calculated from MONO_ARCH_FRAME_ALIGNMENT */
3817 ppc_clear_right_imm (code, ppc_r11, ppc_r11, 4);
3818 /* use ctr to store the number of words to 0 if needed */
3819 if (ins->flags & MONO_INST_INIT) {
3820 /* we zero 4 bytes at a time:
3821 * we add 7 instead of 3 so that we set the counter to
3822 * at least 1, otherwise the bdnz instruction will make
3823 * it negative and iterate billions of times.
3825 ppc_addi (code, ppc_r0, ins->sreg1, 7);
3826 ppc_shift_right_arith_imm (code, ppc_r0, ppc_r0, 2);
3827 ppc_mtctr (code, ppc_r0);
3829 ppc_ldptr (code, ppc_r0, 0, ppc_sp);
3830 ppc_neg (code, ppc_r11, ppc_r11);
3831 ppc_stptr_update_indexed (code, ppc_r0, ppc_sp, ppc_r11);
3833 /* FIXME: make this loop work in 8 byte
3834 increments on PPC64 */
3835 if (ins->flags & MONO_INST_INIT) {
3836 /* adjust the dest reg by -4 so we can use stwu */
3837 /* we actually adjust -8 because we let the loop
3840 ppc_addi (code, ins->dreg, ppc_sp, (area_offset - 8));
3841 ppc_li (code, ppc_r11, 0);
3842 zero_loop_start = code;
3843 ppc_stwu (code, ppc_r11, 4, ins->dreg);
3844 zero_loop_jump = code;
3845 ppc_bc (code, PPC_BR_DEC_CTR_NONZERO, 0, 0);
3846 ppc_patch (zero_loop_jump, zero_loop_start);
3848 ppc_addi (code, ins->dreg, ppc_sp, area_offset);
3853 ppc_mr (code, ppc_r3, ins->sreg1);
3854 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD,
3855 (gpointer)"mono_arch_throw_exception");
3856 if ((FORCE_INDIR_CALL || cfg->method->dynamic) && !cfg->compile_aot) {
3857 ppc_load_func (code, ppc_r0, 0);
3858 ppc_mtlr (code, ppc_r0);
3867 ppc_mr (code, ppc_r3, ins->sreg1);
3868 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD,
3869 (gpointer)"mono_arch_rethrow_exception");
3870 if ((FORCE_INDIR_CALL || cfg->method->dynamic) && !cfg->compile_aot) {
3871 ppc_load_func (code, ppc_r0, 0);
3872 ppc_mtlr (code, ppc_r0);
3879 case OP_START_HANDLER: {
3880 MonoInst *spvar = mono_find_spvar_for_region (cfg, bb->region);
3881 g_assert (spvar->inst_basereg != ppc_sp);
3882 code = emit_reserve_param_area (cfg, code);
3883 ppc_mflr (code, ppc_r0);
3884 if (ppc_is_imm16 (spvar->inst_offset)) {
3885 ppc_stptr (code, ppc_r0, spvar->inst_offset, spvar->inst_basereg);
3887 ppc_load (code, ppc_r11, spvar->inst_offset);
3888 ppc_stptr_indexed (code, ppc_r0, ppc_r11, spvar->inst_basereg);
3892 case OP_ENDFILTER: {
3893 MonoInst *spvar = mono_find_spvar_for_region (cfg, bb->region);
3894 g_assert (spvar->inst_basereg != ppc_sp);
3895 code = emit_unreserve_param_area (cfg, code);
3896 if (ins->sreg1 != ppc_r3)
3897 ppc_mr (code, ppc_r3, ins->sreg1);
3898 if (ppc_is_imm16 (spvar->inst_offset)) {
3899 ppc_ldptr (code, ppc_r0, spvar->inst_offset, spvar->inst_basereg);
3901 ppc_load (code, ppc_r11, spvar->inst_offset);
3902 ppc_ldptr_indexed (code, ppc_r0, spvar->inst_basereg, ppc_r11);
3904 ppc_mtlr (code, ppc_r0);
3908 case OP_ENDFINALLY: {
3909 MonoInst *spvar = mono_find_spvar_for_region (cfg, bb->region);
3910 g_assert (spvar->inst_basereg != ppc_sp);
3911 code = emit_unreserve_param_area (cfg, code);
3912 ppc_ldptr (code, ppc_r0, spvar->inst_offset, spvar->inst_basereg);
3913 ppc_mtlr (code, ppc_r0);
3917 case OP_CALL_HANDLER:
3918 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_BB, ins->inst_target_bb);
3922 ins->inst_c0 = code - cfg->native_code;
3925 /*if (ins->inst_target_bb->native_offset) {
3927 //x86_jump_code (code, cfg->native_code + ins->inst_target_bb->native_offset);
3929 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_target_bb);
3934 ppc_mtctr (code, ins->sreg1);
3935 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
3939 CASE_PPC64 (OP_LCEQ)
3940 ppc_li (code, ins->dreg, 0);
3941 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 2);
3942 ppc_li (code, ins->dreg, 1);
3948 CASE_PPC64 (OP_LCLT)
3949 CASE_PPC64 (OP_LCLT_UN)
3950 ppc_li (code, ins->dreg, 1);
3951 ppc_bc (code, PPC_BR_TRUE, PPC_BR_LT, 2);
3952 ppc_li (code, ins->dreg, 0);
3958 CASE_PPC64 (OP_LCGT)
3959 CASE_PPC64 (OP_LCGT_UN)
3960 ppc_li (code, ins->dreg, 1);
3961 ppc_bc (code, PPC_BR_TRUE, PPC_BR_GT, 2);
3962 ppc_li (code, ins->dreg, 0);
3964 case OP_COND_EXC_EQ:
3965 case OP_COND_EXC_NE_UN:
3966 case OP_COND_EXC_LT:
3967 case OP_COND_EXC_LT_UN:
3968 case OP_COND_EXC_GT:
3969 case OP_COND_EXC_GT_UN:
3970 case OP_COND_EXC_GE:
3971 case OP_COND_EXC_GE_UN:
3972 case OP_COND_EXC_LE:
3973 case OP_COND_EXC_LE_UN:
3974 EMIT_COND_SYSTEM_EXCEPTION (ins->opcode - OP_COND_EXC_EQ, ins->inst_p1);
3976 case OP_COND_EXC_IEQ:
3977 case OP_COND_EXC_INE_UN:
3978 case OP_COND_EXC_ILT:
3979 case OP_COND_EXC_ILT_UN:
3980 case OP_COND_EXC_IGT:
3981 case OP_COND_EXC_IGT_UN:
3982 case OP_COND_EXC_IGE:
3983 case OP_COND_EXC_IGE_UN:
3984 case OP_COND_EXC_ILE:
3985 case OP_COND_EXC_ILE_UN:
3986 EMIT_COND_SYSTEM_EXCEPTION (ins->opcode - OP_COND_EXC_IEQ, ins->inst_p1);
3998 EMIT_COND_BRANCH (ins, ins->opcode - OP_IBEQ);
4001 /* floating point opcodes */
4003 g_assert (cfg->compile_aot);
4005 /* FIXME: Optimize this */
4007 ppc_mflr (code, ppc_r11);
4009 *(double*)code = *(double*)ins->inst_p0;
4011 ppc_lfd (code, ins->dreg, 8, ppc_r11);
4014 g_assert_not_reached ();
4016 case OP_STORER8_MEMBASE_REG:
4017 if (ppc_is_imm16 (ins->inst_offset)) {
4018 ppc_stfd (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
4020 ppc_load (code, ppc_r0, ins->inst_offset);
4021 ppc_stfdx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
4024 case OP_LOADR8_MEMBASE:
4025 if (ppc_is_imm16 (ins->inst_offset)) {
4026 ppc_lfd (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
4028 ppc_load (code, ppc_r0, ins->inst_offset);
4029 ppc_lfdx (code, ins->dreg, ins->inst_destbasereg, ppc_r0);
4032 case OP_STORER4_MEMBASE_REG:
4033 ppc_frsp (code, ins->sreg1, ins->sreg1);
4034 if (ppc_is_imm16 (ins->inst_offset)) {
4035 ppc_stfs (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
4037 ppc_load (code, ppc_r0, ins->inst_offset);
4038 ppc_stfsx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
4041 case OP_LOADR4_MEMBASE:
4042 if (ppc_is_imm16 (ins->inst_offset)) {
4043 ppc_lfs (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
4045 ppc_load (code, ppc_r0, ins->inst_offset);
4046 ppc_lfsx (code, ins->dreg, ins->inst_destbasereg, ppc_r0);
4049 case OP_LOADR4_MEMINDEX:
4050 ppc_lfsx (code, ins->dreg, ins->sreg2, ins->inst_basereg);
4052 case OP_LOADR8_MEMINDEX:
4053 ppc_lfdx (code, ins->dreg, ins->sreg2, ins->inst_basereg);
4055 case OP_STORER4_MEMINDEX:
4056 ppc_frsp (code, ins->sreg1, ins->sreg1);
4057 ppc_stfsx (code, ins->sreg1, ins->sreg2, ins->inst_destbasereg);
4059 case OP_STORER8_MEMINDEX:
4060 ppc_stfdx (code, ins->sreg1, ins->sreg2, ins->inst_destbasereg);
4063 case CEE_CONV_R4: /* FIXME: change precision */
4065 g_assert_not_reached ();
4066 case OP_FCONV_TO_I1:
4067 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 1, TRUE);
4069 case OP_FCONV_TO_U1:
4070 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 1, FALSE);
4072 case OP_FCONV_TO_I2:
4073 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 2, TRUE);
4075 case OP_FCONV_TO_U2:
4076 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 2, FALSE);
4078 case OP_FCONV_TO_I4:
4080 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 4, TRUE);
4082 case OP_FCONV_TO_U4:
4084 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 4, FALSE);
4086 case OP_LCONV_TO_R_UN:
4087 g_assert_not_reached ();
4088 /* Implemented as helper calls */
4090 case OP_LCONV_TO_OVF_I4_2:
4091 case OP_LCONV_TO_OVF_I: {
4092 #ifdef __mono_ppc64__
4095 guint8 *negative_branch, *msword_positive_branch, *msword_negative_branch, *ovf_ex_target;
4096 // Check if its negative
4097 ppc_cmpi (code, 0, 0, ins->sreg1, 0);
4098 negative_branch = code;
4099 ppc_bc (code, PPC_BR_TRUE, PPC_BR_LT, 0);
4100 // Its positive msword == 0
4101 ppc_cmpi (code, 0, 0, ins->sreg2, 0);
4102 msword_positive_branch = code;
4103 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
4105 ovf_ex_target = code;
4106 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_ALWAYS, 0, "OverflowException");
4108 ppc_patch (negative_branch, code);
4109 ppc_cmpi (code, 0, 0, ins->sreg2, -1);
4110 msword_negative_branch = code;
4111 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
4112 ppc_patch (msword_negative_branch, ovf_ex_target);
4114 ppc_patch (msword_positive_branch, code);
4115 if (ins->dreg != ins->sreg1)
4116 ppc_mr (code, ins->dreg, ins->sreg1);
4121 ppc_fsqrtd (code, ins->dreg, ins->sreg1);
4124 ppc_fadd (code, ins->dreg, ins->sreg1, ins->sreg2);
4127 ppc_fsub (code, ins->dreg, ins->sreg1, ins->sreg2);
4130 ppc_fmul (code, ins->dreg, ins->sreg1, ins->sreg2);
4133 ppc_fdiv (code, ins->dreg, ins->sreg1, ins->sreg2);
4136 ppc_fneg (code, ins->dreg, ins->sreg1);
4140 g_assert_not_reached ();
4143 ppc_fcmpu (code, 0, ins->sreg1, ins->sreg2);
4146 ppc_fcmpo (code, 0, ins->sreg1, ins->sreg2);
4147 ppc_li (code, ins->dreg, 0);
4148 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 2);
4149 ppc_li (code, ins->dreg, 1);
4152 ppc_fcmpo (code, 0, ins->sreg1, ins->sreg2);
4153 ppc_li (code, ins->dreg, 1);
4154 ppc_bc (code, PPC_BR_TRUE, PPC_BR_LT, 2);
4155 ppc_li (code, ins->dreg, 0);
4158 ppc_fcmpu (code, 0, ins->sreg1, ins->sreg2);
4159 ppc_li (code, ins->dreg, 1);
4160 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 3);
4161 ppc_bc (code, PPC_BR_TRUE, PPC_BR_LT, 2);
4162 ppc_li (code, ins->dreg, 0);
4165 ppc_fcmpo (code, 0, ins->sreg1, ins->sreg2);
4166 ppc_li (code, ins->dreg, 1);
4167 ppc_bc (code, PPC_BR_TRUE, PPC_BR_GT, 2);
4168 ppc_li (code, ins->dreg, 0);
4171 ppc_fcmpu (code, 0, ins->sreg1, ins->sreg2);
4172 ppc_li (code, ins->dreg, 1);
4173 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 3);
4174 ppc_bc (code, PPC_BR_TRUE, PPC_BR_GT, 2);
4175 ppc_li (code, ins->dreg, 0);
4178 EMIT_COND_BRANCH (ins, CEE_BEQ - CEE_BEQ);
4181 EMIT_COND_BRANCH (ins, CEE_BNE_UN - CEE_BEQ);
4184 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 2);
4185 EMIT_COND_BRANCH (ins, CEE_BLT - CEE_BEQ);
4188 EMIT_COND_BRANCH_FLAGS (ins, PPC_BR_TRUE, PPC_BR_SO);
4189 EMIT_COND_BRANCH (ins, CEE_BLT_UN - CEE_BEQ);
4192 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 2);
4193 EMIT_COND_BRANCH (ins, CEE_BGT - CEE_BEQ);
4196 EMIT_COND_BRANCH_FLAGS (ins, PPC_BR_TRUE, PPC_BR_SO);
4197 EMIT_COND_BRANCH (ins, CEE_BGT_UN - CEE_BEQ);
4200 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 2);
4201 EMIT_COND_BRANCH (ins, CEE_BGE - CEE_BEQ);
4204 EMIT_COND_BRANCH (ins, CEE_BGE_UN - CEE_BEQ);
4207 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 2);
4208 EMIT_COND_BRANCH (ins, CEE_BLE - CEE_BEQ);
4211 EMIT_COND_BRANCH (ins, CEE_BLE_UN - CEE_BEQ);
4214 g_assert_not_reached ();
4215 case OP_CHECK_FINITE: {
4216 ppc_rlwinm (code, ins->sreg1, ins->sreg1, 0, 1, 31);
4217 ppc_addis (code, ins->sreg1, ins->sreg1, -32752);
4218 ppc_rlwinmd (code, ins->sreg1, ins->sreg1, 1, 31, 31);
4219 EMIT_COND_SYSTEM_EXCEPTION (CEE_BEQ - CEE_BEQ, "ArithmeticException");
4222 mono_add_patch_info (cfg, offset, (MonoJumpInfoType)ins->inst_i1, ins->inst_p0);
4223 #ifdef __mono_ppc64__
4224 ppc_load_sequence (code, ins->dreg, (gulong)0x0f0f0f0f0f0f0f0fL);
4226 ppc_load_sequence (code, ins->dreg, (gulong)0x0f0f0f0fL);
4231 #ifdef __mono_ppc64__
4232 case OP_ICONV_TO_I4:
4234 ppc_extsw (code, ins->dreg, ins->sreg1);
4236 case OP_ICONV_TO_U4:
4238 ppc_clrldi (code, ins->dreg, ins->sreg1, 32);
4240 case OP_ICONV_TO_R4:
4241 case OP_ICONV_TO_R8:
4242 case OP_LCONV_TO_R4:
4243 case OP_LCONV_TO_R8: {
4245 if (ins->opcode == OP_ICONV_TO_R4 || ins->opcode == OP_ICONV_TO_R8) {
4246 ppc_extsw (code, ppc_r0, ins->sreg1);
4251 ppc_stptr (code, tmp, -8, ppc_r1);
4252 ppc_lfd (code, ins->dreg, -8, ppc_r1);
4253 ppc_fcfid (code, ins->dreg, ins->dreg);
4254 if (ins->opcode == OP_ICONV_TO_R4 || ins->opcode == OP_LCONV_TO_R4)
4255 ppc_frsp (code, ins->dreg, ins->dreg);
4259 ppc_srad (code, ins->dreg, ins->sreg1, ins->sreg2);
4262 ppc_srd (code, ins->dreg, ins->sreg1, ins->sreg2);
4265 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
4267 ppc_mfspr (code, ppc_r0, ppc_xer);
4268 ppc_andisd (code, ppc_r0, ppc_r0, (1 << 13)); /* CA */
4269 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, ins->inst_p1);
4271 case OP_COND_EXC_OV:
4272 ppc_mfspr (code, ppc_r0, ppc_xer);
4273 ppc_andisd (code, ppc_r0, ppc_r0, (1 << 14)); /* OV */
4274 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, ins->inst_p1);
4286 EMIT_COND_BRANCH (ins, ins->opcode - OP_LBEQ);
4288 case OP_FCONV_TO_I8:
4289 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 8, TRUE);
4291 case OP_FCONV_TO_U8:
4292 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 8, FALSE);
4294 case OP_STOREI4_MEMBASE_REG:
4295 if (ppc_is_imm16 (ins->inst_offset)) {
4296 ppc_stw (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
4298 ppc_load (code, ppc_r0, ins->inst_offset);
4299 ppc_stwx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
4302 case OP_STOREI4_MEMINDEX:
4303 ppc_stwx (code, ins->sreg1, ins->sreg2, ins->inst_destbasereg);
4306 ppc_srawi (code, ins->dreg, ins->sreg1, (ins->inst_imm & 0x1f));
4308 case OP_ISHR_UN_IMM:
4309 if (ins->inst_imm & 0x1f)
4310 ppc_srwi (code, ins->dreg, ins->sreg1, (ins->inst_imm & 0x1f));
4312 ppc_mr (code, ins->dreg, ins->sreg1);
4314 case OP_ATOMIC_ADD_NEW_I4:
4315 case OP_ATOMIC_ADD_NEW_I8: {
4316 guint8 *loop = code, *branch;
4317 g_assert (ins->inst_offset == 0);
4318 if (ins->opcode == OP_ATOMIC_ADD_NEW_I4)
4319 ppc_lwarx (code, ppc_r0, 0, ins->inst_basereg);
4321 ppc_ldarx (code, ppc_r0, 0, ins->inst_basereg);
4322 ppc_add (code, ppc_r0, ppc_r0, ins->sreg2);
4323 if (ins->opcode == OP_ATOMIC_ADD_NEW_I4)
4324 ppc_stwcxd (code, ppc_r0, 0, ins->inst_basereg);
4326 ppc_stdcxd (code, ppc_r0, 0, ins->inst_basereg);
4328 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
4329 ppc_patch (branch, loop);
4330 ppc_mr (code, ins->dreg, ppc_r0);
4334 case OP_ATOMIC_CAS_I4:
4335 CASE_PPC64 (OP_ATOMIC_CAS_I8) {
4336 int location = ins->sreg1;
4337 int value = ins->sreg2;
4338 int comparand = ins->sreg3;
4339 guint8 *start, *not_equal, *lost_reservation;
4342 if (ins->opcode == OP_ATOMIC_CAS_I4)
4343 ppc_lwarx (code, ppc_r0, 0, location);
4344 #ifdef __mono_ppc64__
4346 ppc_ldarx (code, ppc_r0, 0, location);
4348 ppc_cmp (code, 0, ins->opcode == OP_ATOMIC_CAS_I4 ? 0 : 1, ppc_r0, comparand);
4351 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
4352 if (ins->opcode == OP_ATOMIC_CAS_I4)
4353 ppc_stwcxd (code, value, 0, location);
4354 #ifdef __mono_ppc64__
4356 ppc_stdcxd (code, value, 0, location);
4359 lost_reservation = code;
4360 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
4361 ppc_patch (lost_reservation, start);
4363 ppc_patch (not_equal, code);
4364 ppc_mr (code, ins->dreg, ppc_r0);
4369 g_warning ("unknown opcode %s in %s()\n", mono_inst_name (ins->opcode), __FUNCTION__);
4370 g_assert_not_reached ();
4373 if ((cfg->opt & MONO_OPT_BRANCH) && ((code - cfg->native_code - offset) > max_len)) {
4374 g_warning ("wrong maximal instruction length of instruction %s (expected %d, got %ld)",
4375 mono_inst_name (ins->opcode), max_len, (glong)(code - cfg->native_code - offset));
4376 g_assert_not_reached ();
4382 last_offset = offset;
4385 cfg->code_len = code - cfg->native_code;
4389 mono_arch_register_lowlevel_calls (void)
4391 /* The signature doesn't matter */
4392 mono_register_jit_icall (mono_ppc_throw_exception, "mono_ppc_throw_exception", mono_create_icall_signature ("void"), TRUE);
4395 #ifdef __mono_ppc64__
4396 #define patch_load_sequence(ip,val) do {\
4397 guint16 *__load = (guint16*)(ip); \
4398 __load [1] = (((guint64)(val)) >> 48) & 0xffff; \
4399 __load [3] = (((guint64)(val)) >> 32) & 0xffff; \
4400 __load [7] = (((guint64)(val)) >> 16) & 0xffff; \
4401 __load [9] = ((guint64)(val)) & 0xffff; \
4404 #define patch_load_sequence(ip,val) do {\
4405 guint16 *__lis_ori = (guint16*)(ip); \
4406 __lis_ori [1] = (((gulong)(val)) >> 16) & 0xffff; \
4407 __lis_ori [3] = ((gulong)(val)) & 0xffff; \
4412 mono_arch_patch_code (MonoMethod *method, MonoDomain *domain, guint8 *code, MonoJumpInfo *ji, gboolean run_cctors)
4414 MonoJumpInfo *patch_info;
4415 gboolean compile_aot = !run_cctors;
4417 for (patch_info = ji; patch_info; patch_info = patch_info->next) {
4418 unsigned char *ip = patch_info->ip.i + code;
4419 unsigned char *target;
4420 gboolean is_fd = FALSE;
4422 target = mono_resolve_patch_target (method, domain, code, patch_info, run_cctors);
4425 switch (patch_info->type) {
4426 case MONO_PATCH_INFO_BB:
4427 case MONO_PATCH_INFO_LABEL:
4430 /* No need to patch these */
4435 switch (patch_info->type) {
4436 case MONO_PATCH_INFO_IP:
4437 patch_load_sequence (ip, ip);
4439 case MONO_PATCH_INFO_METHOD_REL:
4440 g_assert_not_reached ();
4441 *((gpointer *)(ip)) = code + patch_info->data.offset;
4443 case MONO_PATCH_INFO_SWITCH: {
4444 gpointer *table = (gpointer *)patch_info->data.table->table;
4447 patch_load_sequence (ip, table);
4449 for (i = 0; i < patch_info->data.table->table_size; i++) {
4450 table [i] = (glong)patch_info->data.table->table [i] + code;
4452 /* we put into the table the absolute address, no need for ppc_patch in this case */
4455 case MONO_PATCH_INFO_METHODCONST:
4456 case MONO_PATCH_INFO_CLASS:
4457 case MONO_PATCH_INFO_IMAGE:
4458 case MONO_PATCH_INFO_FIELD:
4459 case MONO_PATCH_INFO_VTABLE:
4460 case MONO_PATCH_INFO_IID:
4461 case MONO_PATCH_INFO_SFLDA:
4462 case MONO_PATCH_INFO_LDSTR:
4463 case MONO_PATCH_INFO_TYPE_FROM_HANDLE:
4464 case MONO_PATCH_INFO_LDTOKEN:
4465 /* from OP_AOTCONST : lis + ori */
4466 patch_load_sequence (ip, target);
4468 case MONO_PATCH_INFO_R4:
4469 case MONO_PATCH_INFO_R8:
4470 g_assert_not_reached ();
4471 *((gconstpointer *)(ip + 2)) = patch_info->data.target;
4473 case MONO_PATCH_INFO_EXC_NAME:
4474 g_assert_not_reached ();
4475 *((gconstpointer *)(ip + 1)) = patch_info->data.name;
4477 case MONO_PATCH_INFO_NONE:
4478 case MONO_PATCH_INFO_BB_OVF:
4479 case MONO_PATCH_INFO_EXC_OVF:
4480 /* everything is dealt with at epilog output time */
4482 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
4483 case MONO_PATCH_INFO_INTERNAL_METHOD:
4484 case MONO_PATCH_INFO_ABS:
4485 case MONO_PATCH_INFO_CLASS_INIT:
4486 case MONO_PATCH_INFO_RGCTX_FETCH:
4493 ppc_patch_full (ip, target, is_fd);
4498 * Emit code to save the registers in used_int_regs or the registers in the MonoLMF
4499 * structure at positive offset pos from register base_reg. pos is guaranteed to fit into
4500 * the instruction offset immediate for all the registers.
4503 save_registers (MonoCompile *cfg, guint8* code, int pos, int base_reg, gboolean save_lmf, guint32 used_int_regs, int cfa_offset)
4507 for (i = 13; i <= 31; i++) {
4508 if (used_int_regs & (1 << i)) {
4509 ppc_str (code, i, pos, base_reg);
4510 mono_emit_unwind_op_offset (cfg, code, i, pos - cfa_offset);
4511 pos += sizeof (mgreg_t);
4515 /* pos is the start of the MonoLMF structure */
4516 int offset = pos + G_STRUCT_OFFSET (MonoLMF, iregs);
4517 for (i = 13; i <= 31; i++) {
4518 ppc_str (code, i, offset, base_reg);
4519 mono_emit_unwind_op_offset (cfg, code, i, offset - cfa_offset);
4520 offset += sizeof (mgreg_t);
4522 offset = pos + G_STRUCT_OFFSET (MonoLMF, fregs);
4523 for (i = 14; i < 32; i++) {
4524 ppc_stfd (code, i, offset, base_reg);
4525 offset += sizeof (gdouble);
4532 * Stack frame layout:
4534 * ------------------- sp
4535 * MonoLMF structure or saved registers
4536 * -------------------
4538 * -------------------
4540 * -------------------
4541 * optional 8 bytes for tracing
4542 * -------------------
4543 * param area size is cfg->param_area
4544 * -------------------
4545 * linkage area size is PPC_STACK_PARAM_OFFSET
4546 * ------------------- sp
4550 mono_arch_emit_prolog (MonoCompile *cfg)
4552 MonoMethod *method = cfg->method;
4554 MonoMethodSignature *sig;
4556 long alloc_size, pos, max_offset, cfa_offset;
4562 int tailcall_struct_index;
4564 if (mono_jit_trace_calls != NULL && mono_trace_eval (method))
4567 /* We currently emit unwind info for aot, but don't use it */
4568 mono_emit_unwind_op_def_cfa (cfg, code, ppc_r1, 0);
4570 sig = mono_method_signature (method);
4571 cfg->code_size = MONO_PPC_32_64_CASE (260, 384) + sig->param_count * 20;
4572 code = cfg->native_code = g_malloc (cfg->code_size);
4576 if (1 || cfg->flags & MONO_CFG_HAS_CALLS) {
4577 ppc_mflr (code, ppc_r0);
4578 ppc_str (code, ppc_r0, PPC_RET_ADDR_OFFSET, ppc_sp);
4580 mono_emit_unwind_op_offset (cfg, code, ppc_lr, PPC_RET_ADDR_OFFSET);
4583 alloc_size = cfg->stack_offset;
4586 if (!method->save_lmf) {
4587 for (i = 31; i >= 13; --i) {
4588 if (cfg->used_int_regs & (1 << i)) {
4589 pos += sizeof (gulong);
4593 pos += sizeof (MonoLMF);
4597 // align to MONO_ARCH_FRAME_ALIGNMENT bytes
4598 if (alloc_size & (MONO_ARCH_FRAME_ALIGNMENT - 1)) {
4599 alloc_size += MONO_ARCH_FRAME_ALIGNMENT - 1;
4600 alloc_size &= ~(MONO_ARCH_FRAME_ALIGNMENT - 1);
4603 cfg->stack_usage = alloc_size;
4604 g_assert ((alloc_size & (MONO_ARCH_FRAME_ALIGNMENT-1)) == 0);
4606 if (ppc_is_imm16 (-alloc_size)) {
4607 ppc_str_update (code, ppc_sp, -alloc_size, ppc_sp);
4608 cfa_offset = alloc_size;
4609 mono_emit_unwind_op_def_cfa_offset (cfg, code, alloc_size);
4610 code = save_registers (cfg, code, alloc_size - pos, ppc_sp, method->save_lmf, cfg->used_int_regs, cfa_offset);
4613 ppc_addi (code, ppc_r11, ppc_sp, -pos);
4614 ppc_load (code, ppc_r0, -alloc_size);
4615 ppc_str_update_indexed (code, ppc_sp, ppc_sp, ppc_r0);
4616 cfa_offset = alloc_size;
4617 mono_emit_unwind_op_def_cfa_offset (cfg, code, alloc_size);
4618 code = save_registers (cfg, code, 0, ppc_r11, method->save_lmf, cfg->used_int_regs, cfa_offset);
4621 if (cfg->frame_reg != ppc_sp) {
4622 ppc_mr (code, cfg->frame_reg, ppc_sp);
4623 mono_emit_unwind_op_def_cfa_reg (cfg, code, cfg->frame_reg);
4626 /* store runtime generic context */
4627 if (cfg->rgctx_var) {
4628 g_assert (cfg->rgctx_var->opcode == OP_REGOFFSET &&
4629 (cfg->rgctx_var->inst_basereg == ppc_r1 || cfg->rgctx_var->inst_basereg == ppc_r31));
4631 ppc_stptr (code, MONO_ARCH_RGCTX_REG, cfg->rgctx_var->inst_offset, cfg->rgctx_var->inst_basereg);
4634 /* compute max_offset in order to use short forward jumps
4635 * we always do it on ppc because the immediate displacement
4636 * for jumps is too small
4639 for (bb = cfg->bb_entry; bb; bb = bb->next_bb) {
4641 bb->max_offset = max_offset;
4643 if (cfg->prof_options & MONO_PROFILE_COVERAGE)
4646 MONO_BB_FOR_EACH_INS (bb, ins)
4647 max_offset += ins_native_length (cfg, ins);
4650 /* load arguments allocated to register from the stack */
4653 cinfo = calculate_sizes (sig, sig->pinvoke);
4655 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
4656 ArgInfo *ainfo = &cinfo->ret;
4658 inst = cfg->vret_addr;
4661 if (ppc_is_imm16 (inst->inst_offset)) {
4662 ppc_stptr (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4664 ppc_load (code, ppc_r11, inst->inst_offset);
4665 ppc_stptr_indexed (code, ainfo->reg, ppc_r11, inst->inst_basereg);
4669 tailcall_struct_index = 0;
4670 for (i = 0; i < sig->param_count + sig->hasthis; ++i) {
4671 ArgInfo *ainfo = cinfo->args + i;
4672 inst = cfg->args [pos];
4674 if (cfg->verbose_level > 2)
4675 g_print ("Saving argument %d (type: %d)\n", i, ainfo->regtype);
4676 if (inst->opcode == OP_REGVAR) {
4677 if (ainfo->regtype == RegTypeGeneral)
4678 ppc_mr (code, inst->dreg, ainfo->reg);
4679 else if (ainfo->regtype == RegTypeFP)
4680 ppc_fmr (code, inst->dreg, ainfo->reg);
4681 else if (ainfo->regtype == RegTypeBase) {
4682 ppc_ldptr (code, ppc_r11, 0, ppc_sp);
4683 ppc_ldptr (code, inst->dreg, ainfo->offset, ppc_r11);
4685 g_assert_not_reached ();
4687 if (cfg->verbose_level > 2)
4688 g_print ("Argument %ld assigned to register %s\n", pos, mono_arch_regname (inst->dreg));
4690 /* the argument should be put on the stack: FIXME handle size != word */
4691 if (ainfo->regtype == RegTypeGeneral) {
4692 switch (ainfo->size) {
4694 if (ppc_is_imm16 (inst->inst_offset)) {
4695 ppc_stb (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4697 ppc_load (code, ppc_r11, inst->inst_offset);
4698 ppc_stbx (code, ainfo->reg, ppc_r11, inst->inst_basereg);
4702 if (ppc_is_imm16 (inst->inst_offset)) {
4703 ppc_sth (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4705 ppc_load (code, ppc_r11, inst->inst_offset);
4706 ppc_sthx (code, ainfo->reg, ppc_r11, inst->inst_basereg);
4709 #ifdef __mono_ppc64__
4711 if (ppc_is_imm16 (inst->inst_offset)) {
4712 ppc_stw (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4714 ppc_load (code, ppc_r11, inst->inst_offset);
4715 ppc_stwx (code, ainfo->reg, ppc_r11, inst->inst_basereg);
4720 if (ppc_is_imm16 (inst->inst_offset + 4)) {
4721 ppc_stw (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4722 ppc_stw (code, ainfo->reg + 1, inst->inst_offset + 4, inst->inst_basereg);
4724 ppc_load (code, ppc_r11, inst->inst_offset);
4725 ppc_add (code, ppc_r11, ppc_r11, inst->inst_basereg);
4726 ppc_stw (code, ainfo->reg, 0, ppc_r11);
4727 ppc_stw (code, ainfo->reg + 1, 4, ppc_r11);
4732 if (ppc_is_imm16 (inst->inst_offset)) {
4733 ppc_stptr (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4735 ppc_load (code, ppc_r11, inst->inst_offset);
4736 ppc_stptr_indexed (code, ainfo->reg, ppc_r11, inst->inst_basereg);
4740 } else if (ainfo->regtype == RegTypeBase) {
4741 /* load the previous stack pointer in r11 */
4742 ppc_ldptr (code, ppc_r11, 0, ppc_sp);
4743 ppc_ldptr (code, ppc_r0, ainfo->offset, ppc_r11);
4744 switch (ainfo->size) {
4746 if (ppc_is_imm16 (inst->inst_offset)) {
4747 ppc_stb (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
4749 ppc_load (code, ppc_r11, inst->inst_offset);
4750 ppc_stbx (code, ppc_r0, ppc_r11, inst->inst_basereg);
4754 if (ppc_is_imm16 (inst->inst_offset)) {
4755 ppc_sth (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
4757 ppc_load (code, ppc_r11, inst->inst_offset);
4758 ppc_sthx (code, ppc_r0, ppc_r11, inst->inst_basereg);
4761 #ifdef __mono_ppc64__
4763 if (ppc_is_imm16 (inst->inst_offset)) {
4764 ppc_stw (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
4766 ppc_load (code, ppc_r11, inst->inst_offset);
4767 ppc_stwx (code, ppc_r0, ppc_r11, inst->inst_basereg);
4772 if (ppc_is_imm16 (inst->inst_offset + 4)) {
4773 ppc_stw (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
4774 ppc_lwz (code, ppc_r0, ainfo->offset + 4, ppc_r11);
4775 ppc_stw (code, ppc_r0, inst->inst_offset + 4, inst->inst_basereg);
4778 g_assert_not_reached ();
4783 if (ppc_is_imm16 (inst->inst_offset)) {
4784 ppc_stptr (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
4786 ppc_load (code, ppc_r11, inst->inst_offset);
4787 ppc_stptr_indexed (code, ppc_r0, ppc_r11, inst->inst_basereg);
4791 } else if (ainfo->regtype == RegTypeFP) {
4792 g_assert (ppc_is_imm16 (inst->inst_offset));
4793 if (ainfo->size == 8)
4794 ppc_stfd (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4795 else if (ainfo->size == 4)
4796 ppc_stfs (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4798 g_assert_not_reached ();
4799 } else if (ainfo->regtype == RegTypeStructByVal) {
4800 int doffset = inst->inst_offset;
4804 g_assert (ppc_is_imm16 (inst->inst_offset));
4805 g_assert (ppc_is_imm16 (inst->inst_offset + ainfo->size * sizeof (gpointer)));
4806 /* FIXME: what if there is no class? */
4807 if (sig->pinvoke && mono_class_from_mono_type (inst->inst_vtype))
4808 size = mono_class_native_size (mono_class_from_mono_type (inst->inst_vtype), NULL);
4809 for (cur_reg = 0; cur_reg < ainfo->size; ++cur_reg) {
4812 * Darwin handles 1 and 2 byte
4813 * structs specially by
4814 * loading h/b into the arg
4815 * register. Only done for
4819 ppc_sth (code, ainfo->reg + cur_reg, doffset, inst->inst_basereg);
4821 ppc_stb (code, ainfo->reg + cur_reg, doffset, inst->inst_basereg);
4825 #ifdef __mono_ppc64__
4827 g_assert (cur_reg == 0);
4828 ppc_sldi (code, ppc_r0, ainfo->reg,
4829 (sizeof (gpointer) - ainfo->bytes) * 8);
4830 ppc_stptr (code, ppc_r0, doffset, inst->inst_basereg);
4834 ppc_stptr (code, ainfo->reg + cur_reg, doffset,
4835 inst->inst_basereg);
4838 soffset += sizeof (gpointer);
4839 doffset += sizeof (gpointer);
4841 if (ainfo->vtsize) {
4842 /* FIXME: we need to do the shifting here, too */
4845 /* load the previous stack pointer in r11 (r0 gets overwritten by the memcpy) */
4846 ppc_ldptr (code, ppc_r11, 0, ppc_sp);
4847 if ((size & MONO_PPC_32_64_CASE (3, 7)) != 0) {
4848 code = emit_memcpy (code, size - soffset,
4849 inst->inst_basereg, doffset,
4850 ppc_r11, ainfo->offset + soffset);
4852 code = emit_memcpy (code, ainfo->vtsize * sizeof (gpointer),
4853 inst->inst_basereg, doffset,
4854 ppc_r11, ainfo->offset + soffset);
4857 } else if (ainfo->regtype == RegTypeStructByAddr) {
4858 /* if it was originally a RegTypeBase */
4859 if (ainfo->offset) {
4860 /* load the previous stack pointer in r11 */
4861 ppc_ldptr (code, ppc_r11, 0, ppc_sp);
4862 ppc_ldptr (code, ppc_r11, ainfo->offset, ppc_r11);
4864 ppc_mr (code, ppc_r11, ainfo->reg);
4867 if (cfg->tailcall_valuetype_addrs) {
4868 MonoInst *addr = cfg->tailcall_valuetype_addrs [tailcall_struct_index];
4870 g_assert (ppc_is_imm16 (addr->inst_offset));
4871 ppc_stptr (code, ppc_r11, addr->inst_offset, addr->inst_basereg);
4873 tailcall_struct_index++;
4876 g_assert (ppc_is_imm16 (inst->inst_offset));
4877 code = emit_memcpy (code, ainfo->vtsize, inst->inst_basereg, inst->inst_offset, ppc_r11, 0);
4878 /*g_print ("copy in %s: %d bytes from %d to offset: %d\n", method->name, ainfo->vtsize, ainfo->reg, inst->inst_offset);*/
4880 g_assert_not_reached ();
4885 if (method->wrapper_type == MONO_WRAPPER_NATIVE_TO_MANAGED) {
4886 ppc_load (code, ppc_r3, cfg->domain);
4887 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD, (gpointer)"mono_jit_thread_attach");
4888 if ((FORCE_INDIR_CALL || cfg->method->dynamic) && !cfg->compile_aot) {
4889 ppc_load_func (code, ppc_r0, 0);
4890 ppc_mtlr (code, ppc_r0);
4897 if (method->save_lmf) {
4898 if (lmf_pthread_key != -1) {
4899 emit_tls_access (code, ppc_r3, lmf_pthread_key);
4900 if (tls_mode != TLS_MODE_NPTL && G_STRUCT_OFFSET (MonoJitTlsData, lmf))
4901 ppc_addi (code, ppc_r3, ppc_r3, G_STRUCT_OFFSET (MonoJitTlsData, lmf));
4903 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD,
4904 (gpointer)"mono_get_lmf_addr");
4905 if ((FORCE_INDIR_CALL || cfg->method->dynamic) && !cfg->compile_aot) {
4906 ppc_load_func (code, ppc_r0, 0);
4907 ppc_mtlr (code, ppc_r0);
4913 /* we build the MonoLMF structure on the stack - see mini-ppc.h */
4914 /* lmf_offset is the offset from the previous stack pointer,
4915 * alloc_size is the total stack space allocated, so the offset
4916 * of MonoLMF from the current stack ptr is alloc_size - lmf_offset.
4917 * The pointer to the struct is put in ppc_r11 (new_lmf).
4918 * The callee-saved registers are already in the MonoLMF structure
4920 ppc_addi (code, ppc_r11, ppc_sp, alloc_size - lmf_offset);
4921 /* ppc_r3 is the result from mono_get_lmf_addr () */
4922 ppc_stptr (code, ppc_r3, G_STRUCT_OFFSET(MonoLMF, lmf_addr), ppc_r11);
4923 /* new_lmf->previous_lmf = *lmf_addr */
4924 ppc_ldptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r3);
4925 ppc_stptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r11);
4926 /* *(lmf_addr) = r11 */
4927 ppc_stptr (code, ppc_r11, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r3);
4928 /* save method info */
4929 if (cfg->compile_aot)
4931 ppc_load (code, ppc_r0, 0);
4933 ppc_load (code, ppc_r0, method);
4934 ppc_stptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, method), ppc_r11);
4935 ppc_stptr (code, ppc_sp, G_STRUCT_OFFSET(MonoLMF, ebp), ppc_r11);
4936 /* save the current IP */
4937 if (cfg->compile_aot) {
4939 ppc_mflr (code, ppc_r0);
4941 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_IP, NULL);
4942 #ifdef __mono_ppc64__
4943 ppc_load_sequence (code, ppc_r0, (gulong)0x0101010101010101L);
4945 ppc_load_sequence (code, ppc_r0, (gulong)0x01010101L);
4948 ppc_stptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, eip), ppc_r11);
4952 code = mono_arch_instrument_prolog (cfg, mono_trace_enter_method, code, TRUE);
4954 cfg->code_len = code - cfg->native_code;
4955 g_assert (cfg->code_len <= cfg->code_size);
4962 mono_arch_emit_epilog (MonoCompile *cfg)
4964 MonoMethod *method = cfg->method;
4966 int max_epilog_size = 16 + 20*4;
4969 if (cfg->method->save_lmf)
4970 max_epilog_size += 128;
4972 if (mono_jit_trace_calls != NULL)
4973 max_epilog_size += 50;
4975 if (cfg->prof_options & MONO_PROFILE_ENTER_LEAVE)
4976 max_epilog_size += 50;
4978 while (cfg->code_len + max_epilog_size > (cfg->code_size - 16)) {
4979 cfg->code_size *= 2;
4980 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
4981 mono_jit_stats.code_reallocs++;
4985 * Keep in sync with OP_JMP
4987 code = cfg->native_code + cfg->code_len;
4989 if (mono_jit_trace_calls != NULL && mono_trace_eval (method)) {
4990 code = mono_arch_instrument_epilog (cfg, mono_trace_leave_method, code, TRUE);
4994 if (method->save_lmf) {
4996 pos += sizeof (MonoLMF);
4998 /* save the frame reg in r8 */
4999 ppc_mr (code, ppc_r8, cfg->frame_reg);
5000 ppc_addi (code, ppc_r11, cfg->frame_reg, cfg->stack_usage - lmf_offset);
5001 /* r5 = previous_lmf */
5002 ppc_ldptr (code, ppc_r5, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r11);
5004 ppc_ldptr (code, ppc_r6, G_STRUCT_OFFSET(MonoLMF, lmf_addr), ppc_r11);
5005 /* *(lmf_addr) = previous_lmf */
5006 ppc_stptr (code, ppc_r5, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r6);
5007 /* FIXME: speedup: there is no actual need to restore the registers if
5008 * we didn't actually change them (idea from Zoltan).
5011 ppc_load_multiple_regs (code, ppc_r13, G_STRUCT_OFFSET(MonoLMF, iregs), ppc_r11);
5013 /*for (i = 14; i < 32; i++) {
5014 ppc_lfd (code, i, G_STRUCT_OFFSET(MonoLMF, fregs) + ((i-14) * sizeof (gdouble)), ppc_r11);
5016 g_assert (ppc_is_imm16 (cfg->stack_usage + PPC_RET_ADDR_OFFSET));
5017 /* use the saved copy of the frame reg in r8 */
5018 if (1 || cfg->flags & MONO_CFG_HAS_CALLS) {
5019 ppc_ldptr (code, ppc_r0, cfg->stack_usage + PPC_RET_ADDR_OFFSET, ppc_r8);
5020 ppc_mtlr (code, ppc_r0);
5022 ppc_addic (code, ppc_sp, ppc_r8, cfg->stack_usage);
5024 if (1 || cfg->flags & MONO_CFG_HAS_CALLS) {
5025 long return_offset = cfg->stack_usage + PPC_RET_ADDR_OFFSET;
5026 if (ppc_is_imm16 (return_offset)) {
5027 ppc_ldptr (code, ppc_r0, return_offset, cfg->frame_reg);
5029 ppc_load (code, ppc_r11, return_offset);
5030 ppc_ldptr_indexed (code, ppc_r0, cfg->frame_reg, ppc_r11);
5032 ppc_mtlr (code, ppc_r0);
5034 if (ppc_is_imm16 (cfg->stack_usage)) {
5035 int offset = cfg->stack_usage;
5036 for (i = 13; i <= 31; i++) {
5037 if (cfg->used_int_regs & (1 << i))
5038 offset -= sizeof (gulong);
5040 if (cfg->frame_reg != ppc_sp)
5041 ppc_mr (code, ppc_r11, cfg->frame_reg);
5042 /* note r31 (possibly the frame register) is restored last */
5043 for (i = 13; i <= 31; i++) {
5044 if (cfg->used_int_regs & (1 << i)) {
5045 ppc_ldptr (code, i, offset, cfg->frame_reg);
5046 offset += sizeof (gulong);
5049 if (cfg->frame_reg != ppc_sp)
5050 ppc_addi (code, ppc_sp, ppc_r11, cfg->stack_usage);
5052 ppc_addi (code, ppc_sp, ppc_sp, cfg->stack_usage);
5054 ppc_load (code, ppc_r11, cfg->stack_usage);
5055 if (cfg->used_int_regs) {
5056 ppc_add (code, ppc_r11, cfg->frame_reg, ppc_r11);
5057 for (i = 31; i >= 13; --i) {
5058 if (cfg->used_int_regs & (1 << i)) {
5059 pos += sizeof (gulong);
5060 ppc_ldptr (code, i, -pos, ppc_r11);
5063 ppc_mr (code, ppc_sp, ppc_r11);
5065 ppc_add (code, ppc_sp, cfg->frame_reg, ppc_r11);
5072 cfg->code_len = code - cfg->native_code;
5074 g_assert (cfg->code_len < cfg->code_size);
5078 /* remove once throw_exception_by_name is eliminated */
5080 exception_id_by_name (const char *name)
5082 if (strcmp (name, "IndexOutOfRangeException") == 0)
5083 return MONO_EXC_INDEX_OUT_OF_RANGE;
5084 if (strcmp (name, "OverflowException") == 0)
5085 return MONO_EXC_OVERFLOW;
5086 if (strcmp (name, "ArithmeticException") == 0)
5087 return MONO_EXC_ARITHMETIC;
5088 if (strcmp (name, "DivideByZeroException") == 0)
5089 return MONO_EXC_DIVIDE_BY_ZERO;
5090 if (strcmp (name, "InvalidCastException") == 0)
5091 return MONO_EXC_INVALID_CAST;
5092 if (strcmp (name, "NullReferenceException") == 0)
5093 return MONO_EXC_NULL_REF;
5094 if (strcmp (name, "ArrayTypeMismatchException") == 0)
5095 return MONO_EXC_ARRAY_TYPE_MISMATCH;
5096 g_error ("Unknown intrinsic exception %s\n", name);
5101 mono_arch_emit_exceptions (MonoCompile *cfg)
5103 MonoJumpInfo *patch_info;
5106 const guint8* exc_throw_pos [MONO_EXC_INTRINS_NUM] = {NULL};
5107 guint8 exc_throw_found [MONO_EXC_INTRINS_NUM] = {0};
5108 int max_epilog_size = 50;
5110 /* count the number of exception infos */
5113 * make sure we have enough space for exceptions
5114 * 28 is the simulated call to throw_corlib_exception
5116 for (patch_info = cfg->patch_info; patch_info; patch_info = patch_info->next) {
5117 if (patch_info->type == MONO_PATCH_INFO_EXC) {
5118 i = exception_id_by_name (patch_info->data.target);
5119 if (!exc_throw_found [i]) {
5120 max_epilog_size += 28;
5121 exc_throw_found [i] = TRUE;
5123 } else if (patch_info->type == MONO_PATCH_INFO_BB_OVF)
5124 max_epilog_size += 12;
5125 else if (patch_info->type == MONO_PATCH_INFO_EXC_OVF) {
5126 MonoOvfJump *ovfj = (MonoOvfJump*)patch_info->data.target;
5127 i = exception_id_by_name (ovfj->data.exception);
5128 if (!exc_throw_found [i]) {
5129 max_epilog_size += 28;
5130 exc_throw_found [i] = TRUE;
5132 max_epilog_size += 8;
5136 while (cfg->code_len + max_epilog_size > (cfg->code_size - 16)) {
5137 cfg->code_size *= 2;
5138 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
5139 mono_jit_stats.code_reallocs++;
5142 code = cfg->native_code + cfg->code_len;
5144 /* add code to raise exceptions */
5145 for (patch_info = cfg->patch_info; patch_info; patch_info = patch_info->next) {
5146 switch (patch_info->type) {
5147 case MONO_PATCH_INFO_BB_OVF: {
5148 MonoOvfJump *ovfj = (MonoOvfJump*)patch_info->data.target;
5149 unsigned char *ip = patch_info->ip.i + cfg->native_code;
5150 /* patch the initial jump */
5151 ppc_patch (ip, code);
5152 ppc_bc (code, ovfj->b0_cond, ovfj->b1_cond, 2);
5154 ppc_patch (code - 4, ip + 4); /* jump back after the initiali branch */
5155 /* jump back to the true target */
5157 ip = ovfj->data.bb->native_offset + cfg->native_code;
5158 ppc_patch (code - 4, ip);
5159 patch_info->type = MONO_PATCH_INFO_NONE;
5162 case MONO_PATCH_INFO_EXC_OVF: {
5163 MonoOvfJump *ovfj = (MonoOvfJump*)patch_info->data.target;
5164 MonoJumpInfo *newji;
5165 unsigned char *ip = patch_info->ip.i + cfg->native_code;
5166 unsigned char *bcl = code;
5167 /* patch the initial jump: we arrived here with a call */
5168 ppc_patch (ip, code);
5169 ppc_bc (code, ovfj->b0_cond, ovfj->b1_cond, 0);
5171 ppc_patch (code - 4, ip + 4); /* jump back after the initiali branch */
5172 /* patch the conditional jump to the right handler */
5173 /* make it processed next */
5174 newji = mono_mempool_alloc (cfg->mempool, sizeof (MonoJumpInfo));
5175 newji->type = MONO_PATCH_INFO_EXC;
5176 newji->ip.i = bcl - cfg->native_code;
5177 newji->data.target = ovfj->data.exception;
5178 newji->next = patch_info->next;
5179 patch_info->next = newji;
5180 patch_info->type = MONO_PATCH_INFO_NONE;
5183 case MONO_PATCH_INFO_EXC: {
5184 MonoClass *exc_class;
5186 unsigned char *ip = patch_info->ip.i + cfg->native_code;
5187 i = exception_id_by_name (patch_info->data.target);
5188 if (exc_throw_pos [i]) {
5189 ppc_patch (ip, exc_throw_pos [i]);
5190 patch_info->type = MONO_PATCH_INFO_NONE;
5193 exc_throw_pos [i] = code;
5196 exc_class = mono_class_from_name (mono_defaults.corlib, "System", patch_info->data.name);
5197 g_assert (exc_class);
5199 ppc_patch (ip, code);
5200 /*mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_EXC_NAME, patch_info->data.target);*/
5201 ppc_load (code, ppc_r3, exc_class->type_token);
5202 /* we got here from a conditional call, so the calling ip is set in lr */
5203 ppc_mflr (code, ppc_r4);
5204 patch_info->type = MONO_PATCH_INFO_INTERNAL_METHOD;
5205 patch_info->data.name = "mono_arch_throw_corlib_exception";
5206 patch_info->ip.i = code - cfg->native_code;
5207 if (FORCE_INDIR_CALL || cfg->method->dynamic) {
5208 ppc_load_func (code, ppc_r0, 0);
5209 ppc_mtctr (code, ppc_r0);
5210 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
5222 cfg->code_len = code - cfg->native_code;
5224 g_assert (cfg->code_len <= cfg->code_size);
5229 try_offset_access (void *value, guint32 idx)
5231 register void* me __asm__ ("r2");
5232 void ***p = (void***)((char*)me + 284);
5233 int idx1 = idx / 32;
5234 int idx2 = idx % 32;
5237 if (value != p[idx1][idx2])
5244 setup_tls_access (void)
5248 #if defined(__linux__) && defined(_CS_GNU_LIBPTHREAD_VERSION)
5249 size_t conf_size = 0;
5252 /* FIXME for darwin */
5253 guint32 *ins, *code;
5254 guint32 cmplwi_1023, li_0x48, blr_ins;
5257 if (tls_mode == TLS_MODE_FAILED)
5259 if (g_getenv ("MONO_NO_TLS")) {
5260 tls_mode = TLS_MODE_FAILED;
5264 if (tls_mode == TLS_MODE_DETECT) {
5265 #if defined(__APPLE__) && defined(__mono_ppc__) && !defined(__mono_ppc64__)
5266 tls_mode = TLS_MODE_DARWIN_G4;
5267 #elif defined(__linux__) && defined(_CS_GNU_LIBPTHREAD_VERSION)
5268 conf_size = confstr ( _CS_GNU_LIBPTHREAD_VERSION, confbuf, sizeof(confbuf));
5269 if ((conf_size > 4) && (strncmp (confbuf, "NPTL", 4) == 0))
5270 tls_mode = TLS_MODE_NPTL;
5272 ins = (guint32*)pthread_getspecific;
5273 /* uncond branch to the real method */
5274 if ((*ins >> 26) == 18) {
5276 val = (*ins & ~3) << 6;
5280 ins = (guint32*)(long)val;
5282 ins = (guint32*) ((char*)ins + val);
5285 code = &cmplwi_1023;
5286 ppc_cmpli (code, 0, 0, ppc_r3, 1023);
5288 ppc_li (code, ppc_r4, 0x48);
5291 if (*ins == cmplwi_1023) {
5292 int found_lwz_284 = 0;
5293 for (ptk = 0; ptk < 20; ++ptk) {
5295 if (!*ins || *ins == blr_ins)
5297 if ((guint16)*ins == 284 && (*ins >> 26) == 32) {
5302 if (!found_lwz_284) {
5303 tls_mode = TLS_MODE_FAILED;
5306 tls_mode = TLS_MODE_LTHREADS;
5307 } else if (*ins == li_0x48) {
5309 /* uncond branch to the real method */
5310 if ((*ins >> 26) == 18) {
5312 val = (*ins & ~3) << 6;
5316 ins = (guint32*)(long)val;
5318 ins = (guint32*) ((char*)ins + val);
5320 code = (guint32*)&val;
5321 ppc_li (code, ppc_r0, 0x7FF2);
5322 if (ins [1] == val) {
5323 /* Darwin on G4, implement */
5324 tls_mode = TLS_MODE_FAILED;
5327 code = (guint32*)&val;
5328 ppc_mfspr (code, ppc_r3, 104);
5329 if (ins [1] != val) {
5330 tls_mode = TLS_MODE_FAILED;
5333 tls_mode = TLS_MODE_DARWIN_G5;
5336 tls_mode = TLS_MODE_FAILED;
5340 tls_mode = TLS_MODE_FAILED;
5345 if (tls_mode == TLS_MODE_DETECT)
5346 tls_mode = TLS_MODE_FAILED;
5347 if (tls_mode == TLS_MODE_FAILED)
5349 if ((monodomain_key == -1) && (tls_mode == TLS_MODE_NPTL)) {
5350 monodomain_key = mono_domain_get_tls_offset();
5352 /* if not TLS_MODE_NPTL or local dynamic (as indicated by
5353 mono_domain_get_tls_offset returning -1) then use keyed access. */
5354 if (monodomain_key == -1) {
5355 ptk = mono_domain_get_tls_key ();
5357 ptk = mono_pthread_key_for_tls (ptk);
5359 monodomain_key = ptk;
5364 if ((lmf_pthread_key == -1) && (tls_mode == TLS_MODE_NPTL)) {
5365 lmf_pthread_key = mono_get_lmf_addr_tls_offset();
5367 /* if not TLS_MODE_NPTL or local dynamic (as indicated by
5368 mono_get_lmf_addr_tls_offset returning -1) then use keyed access. */
5369 if (lmf_pthread_key == -1) {
5370 ptk = mono_pthread_key_for_tls (mono_jit_tls_id);
5372 /*g_print ("MonoLMF at: %d\n", ptk);*/
5373 /*if (!try_offset_access (mono_get_lmf_addr (), ptk)) {
5374 init_tls_failed = 1;
5377 lmf_pthread_key = ptk;
5381 if ((monothread_key == -1) && (tls_mode == TLS_MODE_NPTL)) {
5382 monothread_key = mono_thread_get_tls_offset();
5384 /* if not TLS_MODE_NPTL or local dynamic (as indicated by
5385 mono_get_lmf_addr_tls_offset returning -1) then use keyed access. */
5386 if (monothread_key == -1) {
5387 ptk = mono_thread_get_tls_key ();
5389 ptk = mono_pthread_key_for_tls (ptk);
5391 monothread_key = ptk;
5392 /*g_print ("thread inited: %d\n", ptk);*/
5395 /*g_print ("thread not inited yet %d\n", ptk);*/
5401 mono_arch_setup_jit_tls_data (MonoJitTlsData *tls)
5403 setup_tls_access ();
5407 mono_arch_free_jit_tls_data (MonoJitTlsData *tls)
5411 #ifdef MONO_ARCH_HAVE_IMT
5413 #define CMP_SIZE (PPC_LOAD_SEQUENCE_LENGTH + 4)
5415 #define LOADSTORE_SIZE 4
5416 #define JUMP_IMM_SIZE 12
5417 #define JUMP_IMM32_SIZE (PPC_LOAD_SEQUENCE_LENGTH + 8)
5418 #define ENABLE_WRONG_METHOD_CHECK 0
5421 * LOCKING: called with the domain lock held
5424 mono_arch_build_imt_thunk (MonoVTable *vtable, MonoDomain *domain, MonoIMTCheckItem **imt_entries, int count,
5425 gpointer fail_tramp)
5429 guint8 *code, *start;
5431 for (i = 0; i < count; ++i) {
5432 MonoIMTCheckItem *item = imt_entries [i];
5433 if (item->is_equals) {
5434 if (item->check_target_idx) {
5435 if (!item->compare_done)
5436 item->chunk_size += CMP_SIZE;
5437 if (item->has_target_code)
5438 item->chunk_size += BR_SIZE + JUMP_IMM32_SIZE;
5440 item->chunk_size += LOADSTORE_SIZE + BR_SIZE + JUMP_IMM_SIZE;
5443 item->chunk_size += CMP_SIZE + BR_SIZE + JUMP_IMM32_SIZE * 2;
5444 if (!item->has_target_code)
5445 item->chunk_size += LOADSTORE_SIZE;
5447 item->chunk_size += LOADSTORE_SIZE + JUMP_IMM_SIZE;
5448 #if ENABLE_WRONG_METHOD_CHECK
5449 item->chunk_size += CMP_SIZE + BR_SIZE + 4;
5454 item->chunk_size += CMP_SIZE + BR_SIZE;
5455 imt_entries [item->check_target_idx]->compare_done = TRUE;
5457 size += item->chunk_size;
5460 code = mono_method_alloc_generic_virtual_thunk (domain, size);
5462 /* the initial load of the vtable address */
5463 size += PPC_LOAD_SEQUENCE_LENGTH + LOADSTORE_SIZE;
5464 code = mono_domain_code_reserve (domain, size);
5469 * We need to save and restore r11 because it might be
5470 * used by the caller as the vtable register, so
5471 * clobbering it will trip up the magic trampoline.
5473 * FIXME: Get rid of this by making sure that r11 is
5474 * not used as the vtable register in interface calls.
5476 ppc_stptr (code, ppc_r11, PPC_RET_ADDR_OFFSET, ppc_sp);
5477 ppc_load (code, ppc_r11, (gsize)(& (vtable->vtable [0])));
5479 for (i = 0; i < count; ++i) {
5480 MonoIMTCheckItem *item = imt_entries [i];
5481 item->code_target = code;
5482 if (item->is_equals) {
5483 if (item->check_target_idx) {
5484 if (!item->compare_done) {
5485 ppc_load (code, ppc_r0, (gsize)item->key);
5486 ppc_compare_log (code, 0, MONO_ARCH_IMT_REG, ppc_r0);
5488 item->jmp_code = code;
5489 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
5490 if (item->has_target_code) {
5491 ppc_load (code, ppc_r0, item->value.target_code);
5493 ppc_ldptr (code, ppc_r0, (sizeof (gpointer) * item->value.vtable_slot), ppc_r11);
5494 ppc_ldptr (code, ppc_r11, PPC_RET_ADDR_OFFSET, ppc_sp);
5496 ppc_mtctr (code, ppc_r0);
5497 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
5500 ppc_load (code, ppc_r0, (gulong)item->key);
5501 ppc_compare_log (code, 0, MONO_ARCH_IMT_REG, ppc_r0);
5502 item->jmp_code = code;
5503 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
5504 if (item->has_target_code) {
5505 ppc_load (code, ppc_r0, item->value.target_code);
5508 ppc_load (code, ppc_r0, & (vtable->vtable [item->value.vtable_slot]));
5509 ppc_ldptr_indexed (code, ppc_r0, 0, ppc_r0);
5511 ppc_mtctr (code, ppc_r0);
5512 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
5513 ppc_patch (item->jmp_code, code);
5514 ppc_load (code, ppc_r0, fail_tramp);
5515 ppc_mtctr (code, ppc_r0);
5516 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
5517 item->jmp_code = NULL;
5519 /* enable the commented code to assert on wrong method */
5520 #if ENABLE_WRONG_METHOD_CHECK
5521 ppc_load (code, ppc_r0, (guint32)item->key);
5522 ppc_compare_log (code, 0, MONO_ARCH_IMT_REG, ppc_r0);
5523 item->jmp_code = code;
5524 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
5526 ppc_ldptr (code, ppc_r0, (sizeof (gpointer) * item->value.vtable_slot), ppc_r11);
5527 ppc_ldptr (code, ppc_r11, PPC_RET_ADDR_OFFSET, ppc_sp);
5528 ppc_mtctr (code, ppc_r0);
5529 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
5530 #if ENABLE_WRONG_METHOD_CHECK
5531 ppc_patch (item->jmp_code, code);
5533 item->jmp_code = NULL;
5538 ppc_load (code, ppc_r0, (gulong)item->key);
5539 ppc_compare_log (code, 0, MONO_ARCH_IMT_REG, ppc_r0);
5540 item->jmp_code = code;
5541 ppc_bc (code, PPC_BR_FALSE, PPC_BR_LT, 0);
5544 /* patch the branches to get to the target items */
5545 for (i = 0; i < count; ++i) {
5546 MonoIMTCheckItem *item = imt_entries [i];
5547 if (item->jmp_code) {
5548 if (item->check_target_idx) {
5549 ppc_patch (item->jmp_code, imt_entries [item->check_target_idx]->code_target);
5555 mono_stats.imt_thunks_size += code - start;
5556 g_assert (code - start <= size);
5557 mono_arch_flush_icache (start, size);
5562 mono_arch_find_imt_method (mgreg_t *regs, guint8 *code)
5564 return (MonoMethod*) regs [MONO_ARCH_IMT_REG];
5568 mono_arch_find_this_argument (mgreg_t *regs, MonoMethod *method, MonoGenericSharingContext *gsctx)
5570 return mono_arch_get_this_arg_from_call (gsctx, mono_method_signature (method), regs, NULL);
5575 mono_arch_find_static_call_vtable (mgreg_t *regs, guint8 *code)
5577 return (MonoVTable*) regs [MONO_ARCH_RGCTX_REG];
5581 mono_arch_emit_inst_for_method (MonoCompile *cfg, MonoMethod *cmethod, MonoMethodSignature *fsig, MonoInst **args)
5588 mono_arch_print_tree (MonoInst *tree, int arity)
5593 MonoInst* mono_arch_get_domain_intrinsic (MonoCompile* cfg)
5597 setup_tls_access ();
5598 if (monodomain_key == -1)
5601 MONO_INST_NEW (cfg, ins, OP_TLS_GET);
5602 ins->inst_offset = monodomain_key;
5607 mono_arch_get_thread_intrinsic (MonoCompile* cfg)
5611 setup_tls_access ();
5612 if (monothread_key == -1)
5615 MONO_INST_NEW (cfg, ins, OP_TLS_GET);
5616 ins->inst_offset = monothread_key;
5621 mono_arch_context_get_int_reg (MonoContext *ctx, int reg)
5624 return MONO_CONTEXT_GET_SP (ctx);
5626 g_assert (reg >= ppc_r13);
5628 return (gpointer)ctx->regs [reg - ppc_r13];
5632 mono_arch_get_patch_offset (guint8 *code)
5638 * mono_aot_emit_load_got_addr:
5640 * Emit code to load the got address.
5641 * On PPC, the result is placed into r30.
5644 mono_arch_emit_load_got_addr (guint8 *start, guint8 *code, MonoCompile *cfg, MonoJumpInfo **ji)
5647 ppc_mflr (code, ppc_r30);
5649 mono_add_patch_info (cfg, code - start, MONO_PATCH_INFO_GOT_OFFSET, NULL);
5651 *ji = mono_patch_info_list_prepend (*ji, code - start, MONO_PATCH_INFO_GOT_OFFSET, NULL);
5652 /* arch_emit_got_address () patches this */
5653 #if defined(TARGET_POWERPC64)
5659 ppc_load32 (code, ppc_r0, 0);
5660 ppc_add (code, ppc_r30, ppc_r30, ppc_r0);
5667 * mono_ppc_emit_load_aotconst:
5669 * Emit code to load the contents of the GOT slot identified by TRAMP_TYPE and
5670 * TARGET from the mscorlib GOT in full-aot code.
5671 * On PPC, the GOT address is assumed to be in r30, and the result is placed into
5675 mono_arch_emit_load_aotconst (guint8 *start, guint8 *code, MonoJumpInfo **ji, int tramp_type, gconstpointer target)
5677 /* Load the mscorlib got address */
5678 ppc_ldptr (code, ppc_r11, sizeof (gpointer), ppc_r30);
5679 *ji = mono_patch_info_list_prepend (*ji, code - start, tramp_type, target);
5680 /* arch_emit_got_access () patches this */
5681 ppc_load32 (code, ppc_r0, 0);
5682 ppc_ldptr_indexed (code, ppc_r11, ppc_r11, ppc_r0);