2 * mini-ppc.c: PowerPC backend for the Mono code generator
5 * Paolo Molaro (lupus@ximian.com)
6 * Dietmar Maurer (dietmar@ximian.com)
7 * Andreas Faerber <andreas.faerber@web.de>
9 * (C) 2003 Ximian, Inc.
10 * (C) 2007-2008 Andreas Faerber
15 #include <mono/metadata/appdomain.h>
16 #include <mono/metadata/debug-helpers.h>
17 #include <mono/utils/mono-proclib.h>
18 #include <mono/utils/mono-mmap.h>
21 #ifdef TARGET_POWERPC64
22 #include "cpu-ppc64.h"
29 #include <sys/sysctl.h>
35 #define FORCE_INDIR_CALL 1
46 /* cpu_hw_caps contains the flags defined below */
47 static int cpu_hw_caps = 0;
48 static int cachelinesize = 0;
49 static int cachelineinc = 0;
51 PPC_ICACHE_SNOOP = 1 << 0,
52 PPC_MULTIPLE_LS_UNITS = 1 << 1,
53 PPC_SMP_CAPABLE = 1 << 2,
56 PPC_MOVE_FPR_GPR = 1 << 5,
60 #define BREAKPOINT_SIZE (PPC_LOAD_SEQUENCE_LENGTH + 4)
62 /* This mutex protects architecture specific caches */
63 #define mono_mini_arch_lock() EnterCriticalSection (&mini_arch_mutex)
64 #define mono_mini_arch_unlock() LeaveCriticalSection (&mini_arch_mutex)
65 static CRITICAL_SECTION mini_arch_mutex;
67 int mono_exc_esp_offset = 0;
68 static int tls_mode = TLS_MODE_DETECT;
69 static int lmf_pthread_key = -1;
70 static int monodomain_key = -1;
73 * The code generated for sequence points reads from this location, which is
74 * made read-only when single stepping is enabled.
76 static gpointer ss_trigger_page;
78 /* Enabled breakpoints read from this trigger page */
79 static gpointer bp_trigger_page;
82 offsets_from_pthread_key (guint32 key, int *offset2)
86 *offset2 = idx2 * sizeof (gpointer);
87 return 284 + idx1 * sizeof (gpointer);
90 #define emit_linuxthreads_tls(code,dreg,key) do {\
92 off1 = offsets_from_pthread_key ((key), &off2); \
93 ppc_ldptr ((code), (dreg), off1, ppc_r2); \
94 ppc_ldptr ((code), (dreg), off2, (dreg)); \
97 #define emit_darwing5_tls(code,dreg,key) do {\
98 int off1 = 0x48 + key * sizeof (gpointer); \
99 ppc_mfspr ((code), (dreg), 104); \
100 ppc_ldptr ((code), (dreg), off1, (dreg)); \
103 /* FIXME: ensure the sc call preserves all but r3 */
104 #define emit_darwing4_tls(code,dreg,key) do {\
105 int off1 = 0x48 + key * sizeof (gpointer); \
106 if ((dreg) != ppc_r3) ppc_mr ((code), ppc_r11, ppc_r3); \
107 ppc_li ((code), ppc_r0, 0x7FF2); \
109 ppc_lwz ((code), (dreg), off1, ppc_r3); \
110 if ((dreg) != ppc_r3) ppc_mr ((code), ppc_r3, ppc_r11); \
113 #ifdef PPC_THREAD_PTR_REG
114 #define emit_nptl_tls(code,dreg,key) do { \
116 int off2 = key >> 15; \
117 if ((off2 == 0) || (off2 == -1)) { \
118 ppc_ldptr ((code), (dreg), off1, PPC_THREAD_PTR_REG); \
120 int off3 = (off2 + 1) > 1; \
121 ppc_addis ((code), ppc_r11, PPC_THREAD_PTR_REG, off3); \
122 ppc_ldptr ((code), (dreg), off1, ppc_r11); \
126 #define emit_nptl_tls(code,dreg,key) do { \
127 g_assert_not_reached (); \
131 #define emit_tls_access(code,dreg,key) do { \
132 switch (tls_mode) { \
133 case TLS_MODE_LTHREADS: emit_linuxthreads_tls(code,dreg,key); break; \
134 case TLS_MODE_NPTL: emit_nptl_tls(code,dreg,key); break; \
135 case TLS_MODE_DARWIN_G5: emit_darwing5_tls(code,dreg,key); break; \
136 case TLS_MODE_DARWIN_G4: emit_darwing4_tls(code,dreg,key); break; \
137 default: g_assert_not_reached (); \
141 #define MONO_EMIT_NEW_LOAD_R8(cfg,dr,addr) do { \
143 MONO_INST_NEW ((cfg), (inst), OP_R8CONST); \
144 inst->type = STACK_R8; \
146 inst->inst_p0 = (void*)(addr); \
147 mono_bblock_add_inst (cfg->cbb, inst); \
151 mono_arch_regname (int reg) {
152 static const char rnames[][4] = {
153 "r0", "sp", "r2", "r3", "r4",
154 "r5", "r6", "r7", "r8", "r9",
155 "r10", "r11", "r12", "r13", "r14",
156 "r15", "r16", "r17", "r18", "r19",
157 "r20", "r21", "r22", "r23", "r24",
158 "r25", "r26", "r27", "r28", "r29",
161 if (reg >= 0 && reg < 32)
167 mono_arch_fregname (int reg) {
168 static const char rnames[][4] = {
169 "f0", "f1", "f2", "f3", "f4",
170 "f5", "f6", "f7", "f8", "f9",
171 "f10", "f11", "f12", "f13", "f14",
172 "f15", "f16", "f17", "f18", "f19",
173 "f20", "f21", "f22", "f23", "f24",
174 "f25", "f26", "f27", "f28", "f29",
177 if (reg >= 0 && reg < 32)
182 /* this function overwrites r0, r11, r12 */
184 emit_memcpy (guint8 *code, int size, int dreg, int doffset, int sreg, int soffset)
186 /* unrolled, use the counter in big */
187 if (size > sizeof (gpointer) * 5) {
188 long shifted = size / SIZEOF_VOID_P;
189 guint8 *copy_loop_start, *copy_loop_jump;
191 ppc_load (code, ppc_r0, shifted);
192 ppc_mtctr (code, ppc_r0);
193 g_assert (sreg == ppc_r11);
194 ppc_addi (code, ppc_r12, dreg, (doffset - sizeof (gpointer)));
195 ppc_addi (code, ppc_r11, sreg, (soffset - sizeof (gpointer)));
196 copy_loop_start = code;
197 ppc_ldptr_update (code, ppc_r0, (unsigned int)sizeof (gpointer), ppc_r11);
198 ppc_stptr_update (code, ppc_r0, (unsigned int)sizeof (gpointer), ppc_r12);
199 copy_loop_jump = code;
200 ppc_bc (code, PPC_BR_DEC_CTR_NONZERO, 0, 0);
201 ppc_patch (copy_loop_jump, copy_loop_start);
202 size -= shifted * sizeof (gpointer);
203 doffset = soffset = 0;
206 #ifdef __mono_ppc64__
207 /* the hardware has multiple load/store units and the move is long
208 enough to use more then one regiester, then use load/load/store/store
209 to execute 2 instructions per cycle. */
210 if ((cpu_hw_caps & PPC_MULTIPLE_LS_UNITS) && (dreg != ppc_r12) && (sreg != ppc_r12)) {
212 ppc_ldptr (code, ppc_r0, soffset, sreg);
213 ppc_ldptr (code, ppc_r12, soffset+8, sreg);
214 ppc_stptr (code, ppc_r0, doffset, dreg);
215 ppc_stptr (code, ppc_r12, doffset+8, dreg);
222 ppc_ldr (code, ppc_r0, soffset, sreg);
223 ppc_str (code, ppc_r0, doffset, dreg);
229 if ((cpu_hw_caps & PPC_MULTIPLE_LS_UNITS) && (dreg != ppc_r12) && (sreg != ppc_r12)) {
231 ppc_lwz (code, ppc_r0, soffset, sreg);
232 ppc_lwz (code, ppc_r12, soffset+4, sreg);
233 ppc_stw (code, ppc_r0, doffset, dreg);
234 ppc_stw (code, ppc_r12, doffset+4, dreg);
242 ppc_lwz (code, ppc_r0, soffset, sreg);
243 ppc_stw (code, ppc_r0, doffset, dreg);
249 ppc_lhz (code, ppc_r0, soffset, sreg);
250 ppc_sth (code, ppc_r0, doffset, dreg);
256 ppc_lbz (code, ppc_r0, soffset, sreg);
257 ppc_stb (code, ppc_r0, doffset, dreg);
266 * mono_arch_get_argument_info:
267 * @csig: a method signature
268 * @param_count: the number of parameters to consider
269 * @arg_info: an array to store the result infos
271 * Gathers information on parameters such as size, alignment and
272 * padding. arg_info should be large enought to hold param_count + 1 entries.
274 * Returns the size of the activation frame.
277 mono_arch_get_argument_info (MonoMethodSignature *csig, int param_count, MonoJitArgumentInfo *arg_info)
279 #ifdef __mono_ppc64__
283 int k, frame_size = 0;
284 int size, align, pad;
287 if (MONO_TYPE_ISSTRUCT (csig->ret)) {
288 frame_size += sizeof (gpointer);
292 arg_info [0].offset = offset;
295 frame_size += sizeof (gpointer);
299 arg_info [0].size = frame_size;
301 for (k = 0; k < param_count; k++) {
304 size = mono_type_native_stack_size (csig->params [k], (guint32*)&align);
306 size = mini_type_stack_size (NULL, csig->params [k], &align);
308 /* ignore alignment for now */
311 frame_size += pad = (align - (frame_size & (align - 1))) & (align - 1);
312 arg_info [k].pad = pad;
314 arg_info [k + 1].pad = 0;
315 arg_info [k + 1].size = size;
317 arg_info [k + 1].offset = offset;
321 align = MONO_ARCH_FRAME_ALIGNMENT;
322 frame_size += pad = (align - (frame_size & (align - 1))) & (align - 1);
323 arg_info [k].pad = pad;
329 #ifdef __mono_ppc64__
331 is_load_sequence (guint32 *seq)
333 return ppc_opcode (seq [0]) == 15 && /* lis */
334 ppc_opcode (seq [1]) == 24 && /* ori */
335 ppc_opcode (seq [2]) == 30 && /* sldi */
336 ppc_opcode (seq [3]) == 25 && /* oris */
337 ppc_opcode (seq [4]) == 24; /* ori */
340 #define ppc_load_get_dest(l) (((l)>>21) & 0x1f)
341 #define ppc_load_get_off(l) ((gint16)((l) & 0xffff))
344 /* code must point to the blrl */
346 mono_ppc_is_direct_call_sequence (guint32 *code)
348 #ifdef __mono_ppc64__
349 g_assert(*code == 0x4e800021 || *code == 0x4e800020 || *code == 0x4e800420);
351 /* the thunk-less direct call sequence: lis/ori/sldi/oris/ori/mtlr/blrl */
352 if (ppc_opcode (code [-1]) == 31) { /* mtlr */
353 if (ppc_opcode (code [-2]) == 58 && ppc_opcode (code [-3]) == 58) { /* ld/ld */
354 if (!is_load_sequence (&code [-8]))
356 /* one of the loads must be "ld r2,8(rX)" */
357 return (ppc_load_get_dest (code [-2]) == ppc_r2 && ppc_load_get_off (code [-2]) == 8) ||
358 (ppc_load_get_dest (code [-3]) == ppc_r2 && ppc_load_get_off (code [-3]) == 8);
360 if (ppc_opcode (code [-2]) == 24 && ppc_opcode (code [-3]) == 31) /* mr/nop */
361 return is_load_sequence (&code [-8]);
363 return is_load_sequence (&code [-6]);
367 g_assert(*code == 0x4e800021);
369 /* the thunk-less direct call sequence: lis/ori/mtlr/blrl */
370 return ppc_opcode (code [-1]) == 31 &&
371 ppc_opcode (code [-2]) == 24 &&
372 ppc_opcode (code [-3]) == 15;
377 mono_arch_get_vcall_slot (guint8 *code_ptr, mgreg_t *regs, int *displacement)
381 guint32* code = (guint32*)code_ptr;
382 mgreg_t *r = (mgreg_t*)regs;
386 /* This is the 'blrl' instruction */
389 /* Sanity check: instruction must be 'blrl' */
390 if (*code != 0x4e800021)
393 if (mono_ppc_is_direct_call_sequence (code))
396 /* FIXME: more sanity checks here */
397 /* OK, we're now at the 'blrl' instruction. Now walk backwards
398 till we get to a 'mtlr rA' */
400 if((*code & 0x7c0803a6) == 0x7c0803a6) {
402 /* Here we are: we reached the 'mtlr rA'.
403 Extract the register from the instruction */
404 reg = (*code & 0x03e00000) >> 21;
406 /* ok, this is a lwz reg, offset (vtreg)
407 * it is emitted with:
408 * ppc_emit32 (c, (32 << 26) | ((D) << 21) | ((a) << 16) | (guint16)(d))
410 soff = (*code & 0xffff);
412 reg = (*code >> 16) & 0x1f;
413 g_assert (reg != ppc_r1);
414 /*g_print ("patching reg is %d\n", reg);*/
415 o = (gpointer)(gsize)r [reg];
419 *displacement = offset;
423 #define MAX_ARCH_DELEGATE_PARAMS 7
426 get_delegate_invoke_impl (gboolean has_target, guint32 param_count, guint32 *code_len, gboolean aot)
428 guint8 *code, *start;
431 int size = MONO_PPC_32_64_CASE (32, 32) + PPC_FTNPTR_SIZE;
433 start = code = mono_global_codeman_reserve (size);
435 code = mono_ppc_create_pre_code_ftnptr (code);
437 /* Replace the this argument with the target */
438 ppc_ldptr (code, ppc_r0, G_STRUCT_OFFSET (MonoDelegate, method_ptr), ppc_r3);
439 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
440 /* it's a function descriptor */
441 /* Can't use ldptr as it doesn't work with r0 */
442 ppc_ldptr_indexed (code, ppc_r0, 0, ppc_r0);
444 ppc_mtctr (code, ppc_r0);
445 ppc_ldptr (code, ppc_r3, G_STRUCT_OFFSET (MonoDelegate, target), ppc_r3);
446 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
448 g_assert ((code - start) <= size);
450 mono_arch_flush_icache (start, size);
454 size = MONO_PPC_32_64_CASE (32, 32) + param_count * 4 + PPC_FTNPTR_SIZE;
455 start = code = mono_global_codeman_reserve (size);
457 code = mono_ppc_create_pre_code_ftnptr (code);
459 ppc_ldptr (code, ppc_r0, G_STRUCT_OFFSET (MonoDelegate, method_ptr), ppc_r3);
460 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
461 /* it's a function descriptor */
462 ppc_ldptr_indexed (code, ppc_r0, 0, ppc_r0);
464 ppc_mtctr (code, ppc_r0);
465 /* slide down the arguments */
466 for (i = 0; i < param_count; ++i) {
467 ppc_mr (code, (ppc_r3 + i), (ppc_r3 + i + 1));
469 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
471 g_assert ((code - start) <= size);
473 mono_arch_flush_icache (start, size);
477 *code_len = code - start;
483 mono_arch_get_delegate_invoke_impls (void)
490 code = get_delegate_invoke_impl (TRUE, 0, &code_len, TRUE);
491 res = g_slist_prepend (res, mono_aot_tramp_info_create (g_strdup ("delegate_invoke_impl_has_target"), code, code_len));
493 for (i = 0; i < MAX_ARCH_DELEGATE_PARAMS; ++i) {
494 code = get_delegate_invoke_impl (FALSE, i, &code_len, TRUE);
495 res = g_slist_prepend (res, mono_aot_tramp_info_create (g_strdup_printf ("delegate_invoke_impl_target_%d", i), code, code_len));
502 mono_arch_get_delegate_invoke_impl (MonoMethodSignature *sig, gboolean has_target)
504 guint8 *code, *start;
506 /* FIXME: Support more cases */
507 if (MONO_TYPE_ISSTRUCT (sig->ret))
511 static guint8* cached = NULL;
517 start = mono_aot_get_named_code ("delegate_invoke_impl_has_target");
519 start = get_delegate_invoke_impl (TRUE, 0, NULL, FALSE);
521 mono_memory_barrier ();
525 static guint8* cache [MAX_ARCH_DELEGATE_PARAMS + 1] = {NULL};
528 if (sig->param_count > MAX_ARCH_DELEGATE_PARAMS)
530 for (i = 0; i < sig->param_count; ++i)
531 if (!mono_is_regsize_var (sig->params [i]))
535 code = cache [sig->param_count];
540 char *name = g_strdup_printf ("delegate_invoke_impl_target_%d", sig->param_count);
541 start = mono_aot_get_named_code (name);
544 start = get_delegate_invoke_impl (FALSE, sig->param_count, NULL, FALSE);
547 mono_memory_barrier ();
549 cache [sig->param_count] = start;
555 mono_arch_get_this_arg_from_call (MonoGenericSharingContext *gsctx, MonoMethodSignature *sig, mgreg_t *regs, guint8 *code)
557 mgreg_t *r = (mgreg_t*)regs;
559 /* FIXME: handle returning a struct */
560 if (MONO_TYPE_ISSTRUCT (sig->ret))
561 return (gpointer)(gsize)r [ppc_r4];
562 return (gpointer)(gsize)r [ppc_r3];
570 #ifdef USE_ENVIRON_HACK
572 linux_find_auxv (int *count)
576 char **result = __environ;
577 /* Scan over the env vector looking for the ending NULL */
578 for (; *result != NULL; ++result) {
580 /* Bump the pointer one more step, which should be the auxv. */
582 vec = (AuxVec *)result;
583 if (vec->type != 22 /*AT_IGNOREPPC*/) {
587 while (vec->type != 0 /*AT_NULL*/) {
592 return (AuxVec *)result;
596 #define MAX_AUX_ENTRIES 128
598 * PPC_FEATURE_POWER4, PPC_FEATURE_POWER5, PPC_FEATURE_POWER5_PLUS, PPC_FEATURE_CELL,
599 * PPC_FEATURE_PA6T, PPC_FEATURE_ARCH_2_05 are considered supporting 2X ISA features
601 #define ISA_2X (0x00080000 | 0x00040000 | 0x00020000 | 0x00010000 | 0x00000800 | 0x00001000)
603 /* define PPC_FEATURE_64 HWCAP for 64-bit category. */
604 #define ISA_64 0x40000000
606 /* define PPC_FEATURE_POWER6_EXT HWCAP for power6x mffgpr/mftgpr instructions. */
607 #define ISA_MOVE_FPR_GPR 0x00000200
609 * Initialize the cpu to execute managed code.
612 mono_arch_cpu_init (void)
618 mib [1] = HW_CACHELINE;
619 len = sizeof (cachelinesize);
620 if (sysctl (mib, 2, &cachelinesize, (size_t*)&len, NULL, 0) == -1) {
624 cachelineinc = cachelinesize;
626 #elif defined(__linux__)
627 AuxVec vec [MAX_AUX_ENTRIES];
628 int i, vec_entries = 0;
629 /* sadly this will work only with 2.6 kernels... */
630 FILE* f = fopen ("/proc/self/auxv", "rb");
632 vec_entries = fread (&vec, sizeof (AuxVec), MAX_AUX_ENTRIES, f);
634 #ifdef USE_ENVIRON_HACK
636 AuxVec *evec = linux_find_auxv (&vec_entries);
638 memcpy (&vec, evec, sizeof (AuxVec) * MIN (vec_entries, MAX_AUX_ENTRIES));
641 for (i = 0; i < vec_entries; i++) {
642 int type = vec [i].type;
643 if (type == 19) { /* AT_DCACHEBSIZE */
644 cachelinesize = vec [i].value;
646 } else if (type == 16) { /* AT_HWCAP */
647 if (vec [i].value & 0x00002000 /*PPC_FEATURE_ICACHE_SNOOP*/)
648 cpu_hw_caps |= PPC_ICACHE_SNOOP;
649 if (vec [i].value & ISA_2X)
650 cpu_hw_caps |= PPC_ISA_2X;
651 if (vec [i].value & ISA_64)
652 cpu_hw_caps |= PPC_ISA_64;
653 if (vec [i].value & ISA_MOVE_FPR_GPR)
654 cpu_hw_caps |= PPC_MOVE_FPR_GPR;
656 } else if (type == 15) { /* AT_PLATFORM */
657 const char *arch = (char*)vec [i].value;
658 if (strcmp (arch, "ppc970") == 0 ||
659 (strncmp (arch, "power", 5) == 0 && arch [5] >= '4' && arch [5] <= '7'))
660 cpu_hw_caps |= PPC_MULTIPLE_LS_UNITS;
661 /*printf ("cpu: %s\n", (char*)vec [i].value);*/
665 #elif defined(G_COMPILER_CODEWARRIOR)
668 #elif defined(MONO_CROSS_COMPILE)
670 //#error Need a way to get cache line size
675 cachelineinc = cachelinesize;
677 if (mono_cpu_count () > 1)
678 cpu_hw_caps |= PPC_SMP_CAPABLE;
682 * Initialize architecture specific code.
685 mono_arch_init (void)
687 InitializeCriticalSection (&mini_arch_mutex);
689 ss_trigger_page = mono_valloc (NULL, mono_pagesize (), MONO_MMAP_READ|MONO_MMAP_32BIT);
690 bp_trigger_page = mono_valloc (NULL, mono_pagesize (), MONO_MMAP_READ|MONO_MMAP_32BIT);
691 mono_mprotect (bp_trigger_page, mono_pagesize (), 0);
695 * Cleanup architecture specific code.
698 mono_arch_cleanup (void)
700 DeleteCriticalSection (&mini_arch_mutex);
704 * This function returns the optimizations supported on this cpu.
707 mono_arch_cpu_optimizazions (guint32 *exclude_mask)
711 /* no ppc-specific optimizations yet */
716 #ifdef __mono_ppc64__
717 #define CASE_PPC32(c)
718 #define CASE_PPC64(c) case c:
720 #define CASE_PPC32(c) case c:
721 #define CASE_PPC64(c)
725 is_regsize_var (MonoType *t) {
728 t = mini_type_get_underlying_type (NULL, t);
732 CASE_PPC64 (MONO_TYPE_I8)
733 CASE_PPC64 (MONO_TYPE_U8)
737 case MONO_TYPE_FNPTR:
739 case MONO_TYPE_OBJECT:
740 case MONO_TYPE_STRING:
741 case MONO_TYPE_CLASS:
742 case MONO_TYPE_SZARRAY:
743 case MONO_TYPE_ARRAY:
745 case MONO_TYPE_GENERICINST:
746 if (!mono_type_generic_inst_is_valuetype (t))
749 case MONO_TYPE_VALUETYPE:
756 mono_arch_get_allocatable_int_vars (MonoCompile *cfg)
761 for (i = 0; i < cfg->num_varinfo; i++) {
762 MonoInst *ins = cfg->varinfo [i];
763 MonoMethodVar *vmv = MONO_VARINFO (cfg, i);
766 if (vmv->range.first_use.abs_pos >= vmv->range.last_use.abs_pos)
769 if (ins->flags & (MONO_INST_VOLATILE|MONO_INST_INDIRECT) || (ins->opcode != OP_LOCAL && ins->opcode != OP_ARG))
772 /* we can only allocate 32 bit values */
773 if (is_regsize_var (ins->inst_vtype)) {
774 g_assert (MONO_VARINFO (cfg, i)->reg == -1);
775 g_assert (i == vmv->idx);
776 vars = mono_varlist_insert_sorted (cfg, vars, vmv, FALSE);
784 mono_arch_get_global_int_regs (MonoCompile *cfg)
788 if (cfg->frame_reg != ppc_sp)
790 /* ppc_r13 is used by the system on PPC EABI */
791 for (i = 14; i < top; ++i)
792 regs = g_list_prepend (regs, GUINT_TO_POINTER (i));
798 * mono_arch_regalloc_cost:
800 * Return the cost, in number of memory references, of the action of
801 * allocating the variable VMV into a register during global register
805 mono_arch_regalloc_cost (MonoCompile *cfg, MonoMethodVar *vmv)
812 mono_arch_flush_icache (guint8 *code, gint size)
814 #ifdef MONO_CROSS_COMPILE
817 guint8 *endp, *start;
821 start = (guint8*)((gsize)start & ~(cachelinesize - 1));
822 /* use dcbf for smp support, later optimize for UP, see pem._64bit.d20030611.pdf page 211 */
823 #if defined(G_COMPILER_CODEWARRIOR)
824 if (cpu_hw_caps & PPC_SMP_CAPABLE) {
825 for (p = start; p < endp; p += cachelineinc) {
829 for (p = start; p < endp; p += cachelineinc) {
835 for (p = start; p < endp; p += cachelineinc) {
846 /* For POWER5/6 with ICACHE_SNOOPing only one icbi in the range is required.
847 * The sync is required to insure that the store queue is completely empty.
848 * While the icbi performs no cache operations, icbi/isync is required to
849 * kill local prefetch.
851 if (cpu_hw_caps & PPC_ICACHE_SNOOP) {
853 asm ("icbi 0,%0;" : : "r"(code) : "memory");
857 /* use dcbf for smp support, see pem._64bit.d20030611.pdf page 211 */
858 if (cpu_hw_caps & PPC_SMP_CAPABLE) {
859 for (p = start; p < endp; p += cachelineinc) {
860 asm ("dcbf 0,%0;" : : "r"(p) : "memory");
863 for (p = start; p < endp; p += cachelineinc) {
864 asm ("dcbst 0,%0;" : : "r"(p) : "memory");
869 for (p = start; p < endp; p += cachelineinc) {
870 /* for ISA2.0+ implementations we should not need any extra sync between the
871 * icbi instructions. Both the 2.0 PEM and the PowerISA-2.05 say this.
872 * So I am not sure which chip had this problem but its not an issue on
873 * of the ISA V2 chips.
875 if (cpu_hw_caps & PPC_ISA_2X)
876 asm ("icbi 0,%0;" : : "r"(p) : "memory");
878 asm ("icbi 0,%0; sync;" : : "r"(p) : "memory");
880 if (!(cpu_hw_caps & PPC_ISA_2X))
888 mono_arch_flush_register_windows (void)
893 #define ALWAYS_ON_STACK(s) s
894 #define FP_ALSO_IN_REG(s) s
896 #ifdef __mono_ppc64__
897 #define ALWAYS_ON_STACK(s) s
898 #define FP_ALSO_IN_REG(s) s
900 #define ALWAYS_ON_STACK(s)
901 #define FP_ALSO_IN_REG(s)
903 #define ALIGN_DOUBLES
916 guint32 vtsize; /* in param area */
918 guint8 vtregs; /* number of registers used to pass a RegTypeStructByVal */
919 guint8 regtype : 4; /* 0 general, 1 basereg, 2 floating point register, see RegType* */
920 guint8 size : 4; /* 1, 2, 4, 8, or regs used by RegTypeStructByVal */
921 guint8 bytes : 4; /* size in bytes - only valid for
922 RegTypeStructByVal if the struct fits
923 in one word, otherwise it's 0*/
938 add_general (guint *gr, guint *stack_size, ArgInfo *ainfo, gboolean simple)
940 #ifdef __mono_ppc64__
945 if (*gr >= 3 + PPC_NUM_REG_ARGS) {
946 ainfo->offset = PPC_STACK_PARAM_OFFSET + *stack_size;
947 ainfo->reg = ppc_sp; /* in the caller */
948 ainfo->regtype = RegTypeBase;
949 *stack_size += sizeof (gpointer);
951 ALWAYS_ON_STACK (*stack_size += sizeof (gpointer));
955 if (*gr >= 3 + PPC_NUM_REG_ARGS - 1) {
957 //*stack_size += (*stack_size % 8);
959 ainfo->offset = PPC_STACK_PARAM_OFFSET + *stack_size;
960 ainfo->reg = ppc_sp; /* in the caller */
961 ainfo->regtype = RegTypeBase;
968 ALWAYS_ON_STACK (*stack_size += 8);
976 #if defined(__APPLE__) || defined(__mono_ppc64__)
978 has_only_a_r48_field (MonoClass *klass)
982 gboolean have_field = FALSE;
984 while ((f = mono_class_get_fields (klass, &iter))) {
985 if (!(f->type->attrs & FIELD_ATTRIBUTE_STATIC)) {
988 if (!f->type->byref && (f->type->type == MONO_TYPE_R4 || f->type->type == MONO_TYPE_R8))
999 calculate_sizes (MonoMethodSignature *sig, gboolean is_pinvoke)
1002 int n = sig->hasthis + sig->param_count;
1003 MonoType *simpletype;
1004 guint32 stack_size = 0;
1005 CallInfo *cinfo = g_malloc0 (sizeof (CallInfo) + sizeof (ArgInfo) * n);
1007 fr = PPC_FIRST_FPARG_REG;
1008 gr = PPC_FIRST_ARG_REG;
1010 /* FIXME: handle returning a struct */
1011 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
1012 add_general (&gr, &stack_size, &cinfo->ret, TRUE);
1013 cinfo->struct_ret = PPC_FIRST_ARG_REG;
1018 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
1021 DEBUG(printf("params: %d\n", sig->param_count));
1022 for (i = 0; i < sig->param_count; ++i) {
1023 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (i == sig->sentinelpos)) {
1024 /* Prevent implicit arguments and sig_cookie from
1025 being passed in registers */
1026 gr = PPC_LAST_ARG_REG + 1;
1027 /* FIXME: don't we have to set fr, too? */
1028 /* Emit the signature cookie just before the implicit arguments */
1029 add_general (&gr, &stack_size, &cinfo->sig_cookie, TRUE);
1031 DEBUG(printf("param %d: ", i));
1032 if (sig->params [i]->byref) {
1033 DEBUG(printf("byref\n"));
1034 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
1038 simpletype = mini_type_get_underlying_type (NULL, sig->params [i]);
1039 switch (simpletype->type) {
1040 case MONO_TYPE_BOOLEAN:
1043 cinfo->args [n].size = 1;
1044 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
1047 case MONO_TYPE_CHAR:
1050 cinfo->args [n].size = 2;
1051 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
1056 cinfo->args [n].size = 4;
1057 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
1063 case MONO_TYPE_FNPTR:
1064 case MONO_TYPE_CLASS:
1065 case MONO_TYPE_OBJECT:
1066 case MONO_TYPE_STRING:
1067 case MONO_TYPE_SZARRAY:
1068 case MONO_TYPE_ARRAY:
1069 cinfo->args [n].size = sizeof (gpointer);
1070 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
1073 case MONO_TYPE_GENERICINST:
1074 if (!mono_type_generic_inst_is_valuetype (simpletype)) {
1075 cinfo->args [n].size = sizeof (gpointer);
1076 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
1081 case MONO_TYPE_VALUETYPE:
1082 case MONO_TYPE_TYPEDBYREF: {
1086 klass = mono_class_from_mono_type (sig->params [i]);
1087 if (simpletype->type == MONO_TYPE_TYPEDBYREF)
1088 size = sizeof (MonoTypedRef);
1089 else if (is_pinvoke)
1090 size = mono_class_native_size (klass, NULL);
1092 size = mono_class_value_size (klass, NULL);
1094 #if defined(__APPLE__) || defined(__mono_ppc64__)
1095 if ((size == 4 || size == 8) && has_only_a_r48_field (klass)) {
1096 cinfo->args [n].size = size;
1098 /* It was 7, now it is 8 in LinuxPPC */
1099 if (fr <= PPC_LAST_FPARG_REG) {
1100 cinfo->args [n].regtype = RegTypeFP;
1101 cinfo->args [n].reg = fr;
1103 FP_ALSO_IN_REG (gr ++);
1105 FP_ALSO_IN_REG (gr ++);
1106 ALWAYS_ON_STACK (stack_size += size);
1108 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size;
1109 cinfo->args [n].regtype = RegTypeBase;
1110 cinfo->args [n].reg = ppc_sp; /* in the caller*/
1117 DEBUG(printf ("load %d bytes struct\n",
1118 mono_class_native_size (sig->params [i]->data.klass, NULL)));
1120 #if PPC_PASS_STRUCTS_BY_VALUE
1122 int align_size = size;
1124 int rest = PPC_LAST_ARG_REG - gr + 1;
1127 align_size += (sizeof (gpointer) - 1);
1128 align_size &= ~(sizeof (gpointer) - 1);
1129 nregs = (align_size + sizeof (gpointer) -1 ) / sizeof (gpointer);
1130 n_in_regs = MIN (rest, nregs);
1134 /* FIXME: check this */
1135 if (size >= 3 && size % 4 != 0)
1138 cinfo->args [n].regtype = RegTypeStructByVal;
1139 cinfo->args [n].vtregs = n_in_regs;
1140 cinfo->args [n].size = n_in_regs;
1141 cinfo->args [n].vtsize = nregs - n_in_regs;
1142 cinfo->args [n].reg = gr;
1144 #ifdef __mono_ppc64__
1145 if (nregs == 1 && is_pinvoke)
1146 cinfo->args [n].bytes = size;
1149 cinfo->args [n].bytes = 0;
1151 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size;
1152 /*g_print ("offset for arg %d at %d\n", n, PPC_STACK_PARAM_OFFSET + stack_size);*/
1153 stack_size += nregs * sizeof (gpointer);
1156 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
1157 cinfo->args [n].regtype = RegTypeStructByAddr;
1158 cinfo->args [n].vtsize = size;
1165 cinfo->args [n].size = 8;
1166 add_general (&gr, &stack_size, cinfo->args + n, SIZEOF_REGISTER == 8);
1170 cinfo->args [n].size = 4;
1172 /* It was 7, now it is 8 in LinuxPPC */
1173 if (fr <= PPC_LAST_FPARG_REG) {
1174 cinfo->args [n].regtype = RegTypeFP;
1175 cinfo->args [n].reg = fr;
1177 FP_ALSO_IN_REG (gr ++);
1178 ALWAYS_ON_STACK (stack_size += SIZEOF_REGISTER);
1180 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size + MONO_PPC_32_64_CASE (0, 4);
1181 cinfo->args [n].regtype = RegTypeBase;
1182 cinfo->args [n].reg = ppc_sp; /* in the caller*/
1183 stack_size += SIZEOF_REGISTER;
1188 cinfo->args [n].size = 8;
1189 /* It was 7, now it is 8 in LinuxPPC */
1190 if (fr <= PPC_LAST_FPARG_REG) {
1191 cinfo->args [n].regtype = RegTypeFP;
1192 cinfo->args [n].reg = fr;
1194 FP_ALSO_IN_REG (gr += sizeof (double) / SIZEOF_REGISTER);
1195 ALWAYS_ON_STACK (stack_size += 8);
1197 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size;
1198 cinfo->args [n].regtype = RegTypeBase;
1199 cinfo->args [n].reg = ppc_sp; /* in the caller*/
1205 g_error ("Can't trampoline 0x%x", sig->params [i]->type);
1209 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (i == sig->sentinelpos)) {
1210 /* Prevent implicit arguments and sig_cookie from
1211 being passed in registers */
1212 gr = PPC_LAST_ARG_REG + 1;
1213 /* Emit the signature cookie just before the implicit arguments */
1214 add_general (&gr, &stack_size, &cinfo->sig_cookie, TRUE);
1218 simpletype = mini_type_get_underlying_type (NULL, sig->ret);
1219 switch (simpletype->type) {
1220 case MONO_TYPE_BOOLEAN:
1225 case MONO_TYPE_CHAR:
1231 case MONO_TYPE_FNPTR:
1232 case MONO_TYPE_CLASS:
1233 case MONO_TYPE_OBJECT:
1234 case MONO_TYPE_SZARRAY:
1235 case MONO_TYPE_ARRAY:
1236 case MONO_TYPE_STRING:
1237 cinfo->ret.reg = ppc_r3;
1241 cinfo->ret.reg = ppc_r3;
1245 cinfo->ret.reg = ppc_f1;
1246 cinfo->ret.regtype = RegTypeFP;
1248 case MONO_TYPE_GENERICINST:
1249 if (!mono_type_generic_inst_is_valuetype (simpletype)) {
1250 cinfo->ret.reg = ppc_r3;
1254 case MONO_TYPE_VALUETYPE:
1256 case MONO_TYPE_TYPEDBYREF:
1257 case MONO_TYPE_VOID:
1260 g_error ("Can't handle as return value 0x%x", sig->ret->type);
1264 /* align stack size to 16 */
1265 DEBUG (printf (" stack size: %d (%d)\n", (stack_size + 15) & ~15, stack_size));
1266 stack_size = (stack_size + 15) & ~15;
1268 cinfo->stack_usage = stack_size;
1273 allocate_tailcall_valuetype_addrs (MonoCompile *cfg)
1275 #if !PPC_PASS_STRUCTS_BY_VALUE
1276 MonoMethodSignature *sig = mono_method_signature (cfg->method);
1277 int num_structs = 0;
1280 if (!(cfg->flags & MONO_CFG_HAS_TAIL))
1283 for (i = 0; i < sig->param_count; ++i) {
1284 MonoType *type = mono_type_get_underlying_type (sig->params [i]);
1285 if (type->type == MONO_TYPE_VALUETYPE)
1290 cfg->tailcall_valuetype_addrs =
1291 mono_mempool_alloc0 (cfg->mempool, sizeof (MonoInst*) * num_structs);
1292 for (i = 0; i < num_structs; ++i) {
1293 cfg->tailcall_valuetype_addrs [i] =
1294 mono_compile_create_var (cfg, &mono_defaults.int_class->byval_arg, OP_LOCAL);
1295 cfg->tailcall_valuetype_addrs [i]->flags |= MONO_INST_INDIRECT;
1302 * Set var information according to the calling convention. ppc version.
1303 * The locals var stuff should most likely be split in another method.
1306 mono_arch_allocate_vars (MonoCompile *m)
1308 MonoMethodSignature *sig;
1309 MonoMethodHeader *header;
1311 int i, offset, size, align, curinst;
1312 int frame_reg = ppc_sp;
1314 guint32 locals_stack_size, locals_stack_align;
1316 allocate_tailcall_valuetype_addrs (m);
1318 m->flags |= MONO_CFG_HAS_SPILLUP;
1320 /* allow room for the vararg method args: void* and long/double */
1321 if (mono_jit_trace_calls != NULL && mono_trace_eval (m->method))
1322 m->param_area = MAX (m->param_area, sizeof (gpointer)*8);
1323 /* this is bug #60332: remove when #59509 is fixed, so no weird vararg
1324 * call convs needs to be handled this way.
1326 if (m->flags & MONO_CFG_HAS_VARARGS)
1327 m->param_area = MAX (m->param_area, sizeof (gpointer)*8);
1328 /* gtk-sharp and other broken code will dllimport vararg functions even with
1329 * non-varargs signatures. Since there is little hope people will get this right
1330 * we assume they won't.
1332 if (m->method->wrapper_type == MONO_WRAPPER_MANAGED_TO_NATIVE)
1333 m->param_area = MAX (m->param_area, sizeof (gpointer)*8);
1335 header = mono_method_get_header (m->method);
1338 * We use the frame register also for any method that has
1339 * exception clauses. This way, when the handlers are called,
1340 * the code will reference local variables using the frame reg instead of
1341 * the stack pointer: if we had to restore the stack pointer, we'd
1342 * corrupt the method frames that are already on the stack (since
1343 * filters get called before stack unwinding happens) when the filter
1344 * code would call any method (this also applies to finally etc.).
1346 if ((m->flags & MONO_CFG_HAS_ALLOCA) || header->num_clauses)
1347 frame_reg = ppc_r31;
1348 m->frame_reg = frame_reg;
1349 if (frame_reg != ppc_sp) {
1350 m->used_int_regs |= 1 << frame_reg;
1353 sig = mono_method_signature (m->method);
1357 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
1358 m->ret->opcode = OP_REGVAR;
1359 m->ret->inst_c0 = m->ret->dreg = ppc_r3;
1361 /* FIXME: handle long values? */
1362 switch (mini_type_get_underlying_type (m->generic_sharing_context, sig->ret)->type) {
1363 case MONO_TYPE_VOID:
1367 m->ret->opcode = OP_REGVAR;
1368 m->ret->inst_c0 = m->ret->dreg = ppc_f1;
1371 m->ret->opcode = OP_REGVAR;
1372 m->ret->inst_c0 = m->ret->dreg = ppc_r3;
1376 /* local vars are at a positive offset from the stack pointer */
1378 * also note that if the function uses alloca, we use ppc_r31
1379 * to point at the local variables.
1381 offset = PPC_MINIMAL_STACK_SIZE; /* linkage area */
1382 /* align the offset to 16 bytes: not sure this is needed here */
1384 //offset &= ~(16 - 1);
1386 /* add parameter area size for called functions */
1387 offset += m->param_area;
1389 offset &= ~(16 - 1);
1391 /* allow room to save the return value */
1392 if (mono_jit_trace_calls != NULL && mono_trace_eval (m->method))
1395 /* the MonoLMF structure is stored just below the stack pointer */
1398 /* this stuff should not be needed on ppc and the new jit,
1399 * because a call on ppc to the handlers doesn't change the
1400 * stack pointer and the jist doesn't manipulate the stack pointer
1401 * for operations involving valuetypes.
1403 /* reserve space to store the esp */
1404 offset += sizeof (gpointer);
1406 /* this is a global constant */
1407 mono_exc_esp_offset = offset;
1410 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
1411 offset += sizeof(gpointer) - 1;
1412 offset &= ~(sizeof(gpointer) - 1);
1414 m->vret_addr->opcode = OP_REGOFFSET;
1415 m->vret_addr->inst_basereg = frame_reg;
1416 m->vret_addr->inst_offset = offset;
1418 if (G_UNLIKELY (m->verbose_level > 1)) {
1419 printf ("vret_addr =");
1420 mono_print_ins (m->vret_addr);
1423 offset += sizeof(gpointer);
1426 offsets = mono_allocate_stack_slots_full (m, FALSE, &locals_stack_size, &locals_stack_align);
1427 if (locals_stack_align) {
1428 offset += (locals_stack_align - 1);
1429 offset &= ~(locals_stack_align - 1);
1431 for (i = m->locals_start; i < m->num_varinfo; i++) {
1432 if (offsets [i] != -1) {
1433 MonoInst *inst = m->varinfo [i];
1434 inst->opcode = OP_REGOFFSET;
1435 inst->inst_basereg = frame_reg;
1436 inst->inst_offset = offset + offsets [i];
1438 g_print ("allocating local %d (%s) to %d\n",
1439 i, mono_type_get_name (inst->inst_vtype), inst->inst_offset);
1443 offset += locals_stack_size;
1447 inst = m->args [curinst];
1448 if (inst->opcode != OP_REGVAR) {
1449 inst->opcode = OP_REGOFFSET;
1450 inst->inst_basereg = frame_reg;
1451 offset += sizeof (gpointer) - 1;
1452 offset &= ~(sizeof (gpointer) - 1);
1453 inst->inst_offset = offset;
1454 offset += sizeof (gpointer);
1459 for (i = 0; i < sig->param_count; ++i) {
1460 inst = m->args [curinst];
1461 if (inst->opcode != OP_REGVAR) {
1462 inst->opcode = OP_REGOFFSET;
1463 inst->inst_basereg = frame_reg;
1465 size = mono_type_native_stack_size (sig->params [i], (guint32*)&align);
1466 inst->backend.is_pinvoke = 1;
1468 size = mono_type_size (sig->params [i], &align);
1470 if (MONO_TYPE_ISSTRUCT (sig->params [i]) && size < sizeof (gpointer))
1471 size = align = sizeof (gpointer);
1472 offset += align - 1;
1473 offset &= ~(align - 1);
1474 inst->inst_offset = offset;
1480 /* some storage for fp conversions */
1483 m->arch.fp_conv_var_offset = offset;
1486 /* align the offset to 16 bytes */
1488 offset &= ~(16 - 1);
1491 m->stack_offset = offset;
1493 if (sig->call_convention == MONO_CALL_VARARG) {
1494 CallInfo *cinfo = calculate_sizes (m->method->signature, m->method->signature->pinvoke);
1496 m->sig_cookie = cinfo->sig_cookie.offset;
1503 mono_arch_create_vars (MonoCompile *cfg)
1505 MonoMethodSignature *sig = mono_method_signature (cfg->method);
1507 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
1508 cfg->vret_addr = mono_compile_create_var (cfg, &mono_defaults.int_class->byval_arg, OP_ARG);
1512 /* Fixme: we need an alignment solution for enter_method and mono_arch_call_opcode,
1513 * currently alignment in mono_arch_call_opcode is computed without arch_get_argument_info
1517 emit_sig_cookie (MonoCompile *cfg, MonoCallInst *call, CallInfo *cinfo)
1519 int sig_reg = mono_alloc_ireg (cfg);
1521 /* FIXME: Add support for signature tokens to AOT */
1522 cfg->disable_aot = TRUE;
1524 MONO_EMIT_NEW_ICONST (cfg, sig_reg, (gulong)call->signature);
1525 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG,
1526 ppc_r1, cinfo->sig_cookie.offset, sig_reg);
1530 mono_arch_emit_call (MonoCompile *cfg, MonoCallInst *call)
1533 MonoMethodSignature *sig;
1537 sig = call->signature;
1538 n = sig->param_count + sig->hasthis;
1540 cinfo = calculate_sizes (sig, sig->pinvoke);
1542 for (i = 0; i < n; ++i) {
1543 ArgInfo *ainfo = cinfo->args + i;
1546 if (i >= sig->hasthis)
1547 t = sig->params [i - sig->hasthis];
1549 t = &mono_defaults.int_class->byval_arg;
1550 t = mini_type_get_underlying_type (cfg->generic_sharing_context, t);
1552 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (i == sig->sentinelpos))
1553 emit_sig_cookie (cfg, call, cinfo);
1555 in = call->args [i];
1557 if (ainfo->regtype == RegTypeGeneral) {
1558 #ifndef __mono_ppc64__
1559 if (!t->byref && ((t->type == MONO_TYPE_I8) || (t->type == MONO_TYPE_U8))) {
1560 MONO_INST_NEW (cfg, ins, OP_MOVE);
1561 ins->dreg = mono_alloc_ireg (cfg);
1562 ins->sreg1 = in->dreg + 1;
1563 MONO_ADD_INS (cfg->cbb, ins);
1564 mono_call_inst_add_outarg_reg (cfg, call, ins->dreg, ainfo->reg + 1, FALSE);
1566 MONO_INST_NEW (cfg, ins, OP_MOVE);
1567 ins->dreg = mono_alloc_ireg (cfg);
1568 ins->sreg1 = in->dreg + 2;
1569 MONO_ADD_INS (cfg->cbb, ins);
1570 mono_call_inst_add_outarg_reg (cfg, call, ins->dreg, ainfo->reg, FALSE);
1574 MONO_INST_NEW (cfg, ins, OP_MOVE);
1575 ins->dreg = mono_alloc_ireg (cfg);
1576 ins->sreg1 = in->dreg;
1577 MONO_ADD_INS (cfg->cbb, ins);
1579 mono_call_inst_add_outarg_reg (cfg, call, ins->dreg, ainfo->reg, FALSE);
1581 } else if (ainfo->regtype == RegTypeStructByAddr) {
1582 MONO_INST_NEW (cfg, ins, OP_OUTARG_VT);
1583 ins->opcode = OP_OUTARG_VT;
1584 ins->sreg1 = in->dreg;
1585 ins->klass = in->klass;
1586 ins->inst_p0 = call;
1587 ins->inst_p1 = mono_mempool_alloc (cfg->mempool, sizeof (ArgInfo));
1588 memcpy (ins->inst_p1, ainfo, sizeof (ArgInfo));
1589 MONO_ADD_INS (cfg->cbb, ins);
1590 } else if (ainfo->regtype == RegTypeStructByVal) {
1591 /* this is further handled in mono_arch_emit_outarg_vt () */
1592 MONO_INST_NEW (cfg, ins, OP_OUTARG_VT);
1593 ins->opcode = OP_OUTARG_VT;
1594 ins->sreg1 = in->dreg;
1595 ins->klass = in->klass;
1596 ins->inst_p0 = call;
1597 ins->inst_p1 = mono_mempool_alloc (cfg->mempool, sizeof (ArgInfo));
1598 memcpy (ins->inst_p1, ainfo, sizeof (ArgInfo));
1599 MONO_ADD_INS (cfg->cbb, ins);
1600 } else if (ainfo->regtype == RegTypeBase) {
1601 if (!t->byref && ((t->type == MONO_TYPE_I8) || (t->type == MONO_TYPE_U8))) {
1602 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI8_MEMBASE_REG, ppc_r1, ainfo->offset, in->dreg);
1603 } else if (!t->byref && ((t->type == MONO_TYPE_R4) || (t->type == MONO_TYPE_R8))) {
1604 if (t->type == MONO_TYPE_R8)
1605 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORER8_MEMBASE_REG, ppc_r1, ainfo->offset, in->dreg);
1607 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORER4_MEMBASE_REG, ppc_r1, ainfo->offset, in->dreg);
1609 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG, ppc_r1, ainfo->offset, in->dreg);
1611 } else if (ainfo->regtype == RegTypeFP) {
1612 if (t->type == MONO_TYPE_VALUETYPE) {
1613 /* this is further handled in mono_arch_emit_outarg_vt () */
1614 MONO_INST_NEW (cfg, ins, OP_OUTARG_VT);
1615 ins->opcode = OP_OUTARG_VT;
1616 ins->sreg1 = in->dreg;
1617 ins->klass = in->klass;
1618 ins->inst_p0 = call;
1619 ins->inst_p1 = mono_mempool_alloc (cfg->mempool, sizeof (ArgInfo));
1620 memcpy (ins->inst_p1, ainfo, sizeof (ArgInfo));
1621 MONO_ADD_INS (cfg->cbb, ins);
1623 cfg->flags |= MONO_CFG_HAS_FPOUT;
1625 int dreg = mono_alloc_freg (cfg);
1627 if (ainfo->size == 4) {
1628 MONO_EMIT_NEW_UNALU (cfg, OP_FCONV_TO_R4, dreg, in->dreg);
1630 MONO_INST_NEW (cfg, ins, OP_FMOVE);
1632 ins->sreg1 = in->dreg;
1633 MONO_ADD_INS (cfg->cbb, ins);
1636 mono_call_inst_add_outarg_reg (cfg, call, dreg, ainfo->reg, TRUE);
1637 cfg->flags |= MONO_CFG_HAS_FPOUT;
1640 g_assert_not_reached ();
1644 /* Emit the signature cookie in the case that there is no
1645 additional argument */
1646 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (n == sig->sentinelpos))
1647 emit_sig_cookie (cfg, call, cinfo);
1649 if (cinfo->struct_ret) {
1652 MONO_INST_NEW (cfg, vtarg, OP_MOVE);
1653 vtarg->sreg1 = call->vret_var->dreg;
1654 vtarg->dreg = mono_alloc_preg (cfg);
1655 MONO_ADD_INS (cfg->cbb, vtarg);
1657 mono_call_inst_add_outarg_reg (cfg, call, vtarg->dreg, cinfo->struct_ret, FALSE);
1660 call->stack_usage = cinfo->stack_usage;
1661 cfg->param_area = MAX (PPC_MINIMAL_PARAM_AREA_SIZE, MAX (cfg->param_area, cinfo->stack_usage));
1662 cfg->flags |= MONO_CFG_HAS_CALLS;
1668 mono_arch_emit_outarg_vt (MonoCompile *cfg, MonoInst *ins, MonoInst *src)
1670 MonoCallInst *call = (MonoCallInst*)ins->inst_p0;
1671 ArgInfo *ainfo = ins->inst_p1;
1672 int ovf_size = ainfo->vtsize;
1673 int doffset = ainfo->offset;
1674 int i, soffset, dreg;
1676 if (ainfo->regtype == RegTypeStructByVal) {
1683 * Darwin pinvokes needs some special handling for 1
1684 * and 2 byte arguments
1686 g_assert (ins->klass);
1687 if (call->signature->pinvoke)
1688 size = mono_class_native_size (ins->klass, NULL);
1689 if (size == 2 || size == 1) {
1690 int tmpr = mono_alloc_ireg (cfg);
1692 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADI1_MEMBASE, tmpr, src->dreg, soffset);
1694 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADI2_MEMBASE, tmpr, src->dreg, soffset);
1695 dreg = mono_alloc_ireg (cfg);
1696 MONO_EMIT_NEW_UNALU (cfg, OP_MOVE, dreg, tmpr);
1697 mono_call_inst_add_outarg_reg (cfg, call, dreg, ainfo->reg, FALSE);
1700 for (i = 0; i < ainfo->vtregs; ++i) {
1701 int antipadding = 0;
1704 antipadding = sizeof (gpointer) - ainfo->bytes;
1706 dreg = mono_alloc_ireg (cfg);
1707 MONO_EMIT_NEW_LOAD_MEMBASE (cfg, dreg, src->dreg, soffset);
1709 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHR_UN_IMM, dreg, dreg, antipadding * 8);
1710 mono_call_inst_add_outarg_reg (cfg, call, dreg, ainfo->reg + i, FALSE);
1711 soffset += sizeof (gpointer);
1714 mini_emit_memcpy (cfg, ppc_r1, doffset + soffset, src->dreg, soffset, ovf_size * sizeof (gpointer), 0);
1715 } else if (ainfo->regtype == RegTypeFP) {
1716 int tmpr = mono_alloc_freg (cfg);
1717 if (ainfo->size == 4)
1718 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADR4_MEMBASE, tmpr, src->dreg, 0);
1720 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADR8_MEMBASE, tmpr, src->dreg, 0);
1721 dreg = mono_alloc_freg (cfg);
1722 MONO_EMIT_NEW_UNALU (cfg, OP_FMOVE, dreg, tmpr);
1723 mono_call_inst_add_outarg_reg (cfg, call, dreg, ainfo->reg, TRUE);
1725 MonoInst *vtcopy = mono_compile_create_var (cfg, &src->klass->byval_arg, OP_LOCAL);
1729 /* FIXME: alignment? */
1730 if (call->signature->pinvoke) {
1731 size = mono_type_native_stack_size (&src->klass->byval_arg, NULL);
1732 vtcopy->backend.is_pinvoke = 1;
1734 size = mini_type_stack_size (cfg->generic_sharing_context, &src->klass->byval_arg, NULL);
1737 g_assert (ovf_size > 0);
1739 EMIT_NEW_VARLOADA (cfg, load, vtcopy, vtcopy->inst_vtype);
1740 mini_emit_memcpy (cfg, load->dreg, 0, src->dreg, 0, size, 0);
1743 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG, ppc_r1, ainfo->offset, load->dreg);
1745 mono_call_inst_add_outarg_reg (cfg, call, load->dreg, ainfo->reg, FALSE);
1750 mono_arch_emit_setret (MonoCompile *cfg, MonoMethod *method, MonoInst *val)
1752 MonoType *ret = mini_type_get_underlying_type (cfg->generic_sharing_context,
1753 mono_method_signature (method)->ret);
1756 #ifndef __mono_ppc64__
1757 if (ret->type == MONO_TYPE_I8 || ret->type == MONO_TYPE_U8) {
1760 MONO_INST_NEW (cfg, ins, OP_SETLRET);
1761 ins->sreg1 = val->dreg + 1;
1762 ins->sreg2 = val->dreg + 2;
1763 MONO_ADD_INS (cfg->cbb, ins);
1767 if (ret->type == MONO_TYPE_R8 || ret->type == MONO_TYPE_R4) {
1768 MONO_EMIT_NEW_UNALU (cfg, OP_FMOVE, cfg->ret->dreg, val->dreg);
1772 MONO_EMIT_NEW_UNALU (cfg, OP_MOVE, cfg->ret->dreg, val->dreg);
1775 /* FIXME: this is just a useless hint: fix the interface to include the opcode */
1777 mono_arch_is_inst_imm (gint64 imm)
1783 * Allow tracing to work with this interface (with an optional argument)
1787 mono_arch_instrument_prolog (MonoCompile *cfg, void *func, void *p, gboolean enable_arguments)
1791 ppc_load_ptr (code, ppc_r3, cfg->method);
1792 ppc_li (code, ppc_r4, 0); /* NULL ebp for now */
1793 ppc_load_func (code, ppc_r0, func);
1794 ppc_mtlr (code, ppc_r0);
1808 mono_arch_instrument_epilog_full (MonoCompile *cfg, void *func, void *p, gboolean enable_arguments, gboolean preserve_argument_registers)
1811 int save_mode = SAVE_NONE;
1813 MonoMethod *method = cfg->method;
1814 int rtype = mini_type_get_underlying_type (cfg->generic_sharing_context,
1815 mono_method_signature (method)->ret)->type;
1816 int save_offset = PPC_STACK_PARAM_OFFSET + cfg->param_area;
1820 offset = code - cfg->native_code;
1821 /* we need about 16 instructions */
1822 if (offset > (cfg->code_size - 16 * 4)) {
1823 cfg->code_size *= 2;
1824 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
1825 code = cfg->native_code + offset;
1829 case MONO_TYPE_VOID:
1830 /* special case string .ctor icall */
1831 if (strcmp (".ctor", method->name) && method->klass == mono_defaults.string_class)
1832 save_mode = SAVE_ONE;
1834 save_mode = SAVE_NONE;
1836 #ifndef __mono_ppc64__
1839 save_mode = SAVE_TWO;
1844 save_mode = SAVE_FP;
1846 case MONO_TYPE_VALUETYPE:
1847 save_mode = SAVE_STRUCT;
1850 save_mode = SAVE_ONE;
1854 switch (save_mode) {
1856 ppc_stw (code, ppc_r3, save_offset, cfg->frame_reg);
1857 ppc_stw (code, ppc_r4, save_offset + 4, cfg->frame_reg);
1858 if (enable_arguments) {
1859 ppc_mr (code, ppc_r5, ppc_r4);
1860 ppc_mr (code, ppc_r4, ppc_r3);
1864 ppc_stptr (code, ppc_r3, save_offset, cfg->frame_reg);
1865 if (enable_arguments) {
1866 ppc_mr (code, ppc_r4, ppc_r3);
1870 ppc_stfd (code, ppc_f1, save_offset, cfg->frame_reg);
1871 if (enable_arguments) {
1872 /* FIXME: what reg? */
1873 ppc_fmr (code, ppc_f3, ppc_f1);
1874 /* FIXME: use 8 byte load on PPC64 */
1875 ppc_lwz (code, ppc_r4, save_offset, cfg->frame_reg);
1876 ppc_lwz (code, ppc_r5, save_offset + 4, cfg->frame_reg);
1880 if (enable_arguments) {
1881 /* FIXME: get the actual address */
1882 ppc_mr (code, ppc_r4, ppc_r3);
1890 ppc_load_ptr (code, ppc_r3, cfg->method);
1891 ppc_load_func (code, ppc_r0, func);
1892 ppc_mtlr (code, ppc_r0);
1895 switch (save_mode) {
1897 ppc_lwz (code, ppc_r3, save_offset, cfg->frame_reg);
1898 ppc_lwz (code, ppc_r4, save_offset + 4, cfg->frame_reg);
1901 ppc_ldptr (code, ppc_r3, save_offset, cfg->frame_reg);
1904 ppc_lfd (code, ppc_f1, save_offset, cfg->frame_reg);
1914 * Conditional branches have a small offset, so if it is likely overflowed,
1915 * we do a branch to the end of the method (uncond branches have much larger
1916 * offsets) where we perform the conditional and jump back unconditionally.
1917 * It's slightly slower, since we add two uncond branches, but it's very simple
1918 * with the current patch implementation and such large methods are likely not
1919 * going to be perf critical anyway.
1924 const char *exception;
1931 #define EMIT_COND_BRANCH_FLAGS(ins,b0,b1) \
1932 if (0 && ins->inst_true_bb->native_offset) { \
1933 ppc_bc (code, (b0), (b1), (code - cfg->native_code + ins->inst_true_bb->native_offset) & 0xffff); \
1935 int br_disp = ins->inst_true_bb->max_offset - offset; \
1936 if (!ppc_is_imm16 (br_disp + 1024) || ! ppc_is_imm16 (ppc_is_imm16 (br_disp - 1024))) { \
1937 MonoOvfJump *ovfj = mono_mempool_alloc (cfg->mempool, sizeof (MonoOvfJump)); \
1938 ovfj->data.bb = ins->inst_true_bb; \
1939 ovfj->ip_offset = 0; \
1940 ovfj->b0_cond = (b0); \
1941 ovfj->b1_cond = (b1); \
1942 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_BB_OVF, ovfj); \
1945 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_BB, ins->inst_true_bb); \
1946 ppc_bc (code, (b0), (b1), 0); \
1950 #define EMIT_COND_BRANCH(ins,cond) EMIT_COND_BRANCH_FLAGS(ins, branch_b0_table [(cond)], branch_b1_table [(cond)])
1952 /* emit an exception if condition is fail
1954 * We assign the extra code used to throw the implicit exceptions
1955 * to cfg->bb_exit as far as the big branch handling is concerned
1957 #define EMIT_COND_SYSTEM_EXCEPTION_FLAGS(b0,b1,exc_name) \
1959 int br_disp = cfg->bb_exit->max_offset - offset; \
1960 if (!ppc_is_imm16 (br_disp + 1024) || ! ppc_is_imm16 (ppc_is_imm16 (br_disp - 1024))) { \
1961 MonoOvfJump *ovfj = mono_mempool_alloc (cfg->mempool, sizeof (MonoOvfJump)); \
1962 ovfj->data.exception = (exc_name); \
1963 ovfj->ip_offset = code - cfg->native_code; \
1964 ovfj->b0_cond = (b0); \
1965 ovfj->b1_cond = (b1); \
1966 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_EXC_OVF, ovfj); \
1968 cfg->bb_exit->max_offset += 24; \
1970 mono_add_patch_info (cfg, code - cfg->native_code, \
1971 MONO_PATCH_INFO_EXC, exc_name); \
1972 ppc_bcl (code, (b0), (b1), 0); \
1976 #define EMIT_COND_SYSTEM_EXCEPTION(cond,exc_name) EMIT_COND_SYSTEM_EXCEPTION_FLAGS(branch_b0_table [(cond)], branch_b1_table [(cond)], (exc_name))
1979 mono_arch_peephole_pass_1 (MonoCompile *cfg, MonoBasicBlock *bb)
1984 normalize_opcode (int opcode)
1987 #ifndef __mono_ilp32__
1988 case MONO_PPC_32_64_CASE (OP_LOADI4_MEMBASE, OP_LOADI8_MEMBASE):
1989 return OP_LOAD_MEMBASE;
1990 case MONO_PPC_32_64_CASE (OP_LOADI4_MEMINDEX, OP_LOADI8_MEMINDEX):
1991 return OP_LOAD_MEMINDEX;
1992 case MONO_PPC_32_64_CASE (OP_STOREI4_MEMBASE_REG, OP_STOREI8_MEMBASE_REG):
1993 return OP_STORE_MEMBASE_REG;
1994 case MONO_PPC_32_64_CASE (OP_STOREI4_MEMBASE_IMM, OP_STOREI8_MEMBASE_IMM):
1995 return OP_STORE_MEMBASE_IMM;
1996 case MONO_PPC_32_64_CASE (OP_STOREI4_MEMINDEX, OP_STOREI8_MEMINDEX):
1997 return OP_STORE_MEMINDEX;
1999 case MONO_PPC_32_64_CASE (OP_ISHR_IMM, OP_LSHR_IMM):
2001 case MONO_PPC_32_64_CASE (OP_ISHR_UN_IMM, OP_LSHR_UN_IMM):
2002 return OP_SHR_UN_IMM;
2009 mono_arch_peephole_pass_2 (MonoCompile *cfg, MonoBasicBlock *bb)
2011 MonoInst *ins, *n, *last_ins = NULL;
2013 MONO_BB_FOR_EACH_INS_SAFE (bb, n, ins) {
2014 switch (normalize_opcode (ins->opcode)) {
2016 /* remove unnecessary multiplication with 1 */
2017 if (ins->inst_imm == 1) {
2018 if (ins->dreg != ins->sreg1) {
2019 ins->opcode = OP_MOVE;
2021 MONO_DELETE_INS (bb, ins);
2025 int power2 = mono_is_power_of_two (ins->inst_imm);
2027 ins->opcode = OP_SHL_IMM;
2028 ins->inst_imm = power2;
2032 case OP_LOAD_MEMBASE:
2034 * OP_STORE_MEMBASE_REG reg, offset(basereg)
2035 * OP_LOAD_MEMBASE offset(basereg), reg
2037 if (last_ins && normalize_opcode (last_ins->opcode) == OP_STORE_MEMBASE_REG &&
2038 ins->inst_basereg == last_ins->inst_destbasereg &&
2039 ins->inst_offset == last_ins->inst_offset) {
2040 if (ins->dreg == last_ins->sreg1) {
2041 MONO_DELETE_INS (bb, ins);
2044 //static int c = 0; printf ("MATCHX %s %d\n", cfg->method->name,c++);
2045 ins->opcode = OP_MOVE;
2046 ins->sreg1 = last_ins->sreg1;
2050 * Note: reg1 must be different from the basereg in the second load
2051 * OP_LOAD_MEMBASE offset(basereg), reg1
2052 * OP_LOAD_MEMBASE offset(basereg), reg2
2054 * OP_LOAD_MEMBASE offset(basereg), reg1
2055 * OP_MOVE reg1, reg2
2057 } else if (last_ins && normalize_opcode (last_ins->opcode) == OP_LOAD_MEMBASE &&
2058 ins->inst_basereg != last_ins->dreg &&
2059 ins->inst_basereg == last_ins->inst_basereg &&
2060 ins->inst_offset == last_ins->inst_offset) {
2062 if (ins->dreg == last_ins->dreg) {
2063 MONO_DELETE_INS (bb, ins);
2066 ins->opcode = OP_MOVE;
2067 ins->sreg1 = last_ins->dreg;
2070 //g_assert_not_reached ();
2074 * OP_STORE_MEMBASE_IMM imm, offset(basereg)
2075 * OP_LOAD_MEMBASE offset(basereg), reg
2077 * OP_STORE_MEMBASE_IMM imm, offset(basereg)
2078 * OP_ICONST reg, imm
2080 } else if (last_ins && normalize_opcode (last_ins->opcode) == OP_STORE_MEMBASE_IMM &&
2081 ins->inst_basereg == last_ins->inst_destbasereg &&
2082 ins->inst_offset == last_ins->inst_offset) {
2083 //static int c = 0; printf ("MATCHX %s %d\n", cfg->method->name,c++);
2084 ins->opcode = OP_ICONST;
2085 ins->inst_c0 = last_ins->inst_imm;
2086 g_assert_not_reached (); // check this rule
2090 case OP_LOADU1_MEMBASE:
2091 case OP_LOADI1_MEMBASE:
2092 if (last_ins && (last_ins->opcode == OP_STOREI1_MEMBASE_REG) &&
2093 ins->inst_basereg == last_ins->inst_destbasereg &&
2094 ins->inst_offset == last_ins->inst_offset) {
2095 ins->opcode = (ins->opcode == OP_LOADI1_MEMBASE) ? OP_ICONV_TO_I1 : OP_ICONV_TO_U1;
2096 ins->sreg1 = last_ins->sreg1;
2099 case OP_LOADU2_MEMBASE:
2100 case OP_LOADI2_MEMBASE:
2101 if (last_ins && (last_ins->opcode == OP_STOREI2_MEMBASE_REG) &&
2102 ins->inst_basereg == last_ins->inst_destbasereg &&
2103 ins->inst_offset == last_ins->inst_offset) {
2104 ins->opcode = (ins->opcode == OP_LOADI2_MEMBASE) ? OP_ICONV_TO_I2 : OP_ICONV_TO_U2;
2105 ins->sreg1 = last_ins->sreg1;
2108 #ifdef __mono_ppc64__
2109 case OP_LOADU4_MEMBASE:
2110 case OP_LOADI4_MEMBASE:
2111 if (last_ins && (last_ins->opcode == OP_STOREI4_MEMBASE_REG) &&
2112 ins->inst_basereg == last_ins->inst_destbasereg &&
2113 ins->inst_offset == last_ins->inst_offset) {
2114 ins->opcode = (ins->opcode == OP_LOADI4_MEMBASE) ? OP_ICONV_TO_I4 : OP_ICONV_TO_U4;
2115 ins->sreg1 = last_ins->sreg1;
2120 ins->opcode = OP_MOVE;
2124 if (ins->dreg == ins->sreg1) {
2125 MONO_DELETE_INS (bb, ins);
2129 * OP_MOVE sreg, dreg
2130 * OP_MOVE dreg, sreg
2132 if (last_ins && last_ins->opcode == OP_MOVE &&
2133 ins->sreg1 == last_ins->dreg &&
2134 ins->dreg == last_ins->sreg1) {
2135 MONO_DELETE_INS (bb, ins);
2143 bb->last_ins = last_ins;
2147 mono_arch_decompose_opts (MonoCompile *cfg, MonoInst *ins)
2149 switch (ins->opcode) {
2150 case OP_ICONV_TO_R_UN: {
2151 static const guint64 adjust_val = 0x4330000000000000ULL;
2152 int msw_reg = mono_alloc_ireg (cfg);
2153 int adj_reg = mono_alloc_freg (cfg);
2154 int tmp_reg = mono_alloc_freg (cfg);
2155 int basereg = ppc_sp;
2157 MONO_EMIT_NEW_ICONST (cfg, msw_reg, 0x43300000);
2158 if (!ppc_is_imm16 (offset + 4)) {
2159 basereg = mono_alloc_ireg (cfg);
2160 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_IADD_IMM, basereg, cfg->frame_reg, offset);
2162 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, basereg, offset, msw_reg);
2163 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, basereg, offset + 4, ins->sreg1);
2164 MONO_EMIT_NEW_LOAD_R8 (cfg, adj_reg, &adjust_val);
2165 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADR8_MEMBASE, tmp_reg, basereg, offset);
2166 MONO_EMIT_NEW_BIALU (cfg, OP_FSUB, ins->dreg, tmp_reg, adj_reg);
2167 ins->opcode = OP_NOP;
2170 #ifndef __mono_ppc64__
2171 case OP_ICONV_TO_R4:
2172 case OP_ICONV_TO_R8: {
2173 /* If we have a PPC_FEATURE_64 machine we can avoid
2174 this and use the fcfid instruction. Otherwise
2175 on an old 32-bit chip and we have to do this the
2177 if (!(cpu_hw_caps & PPC_ISA_64)) {
2178 /* FIXME: change precision for CEE_CONV_R4 */
2179 static const guint64 adjust_val = 0x4330000080000000ULL;
2180 int msw_reg = mono_alloc_ireg (cfg);
2181 int xored = mono_alloc_ireg (cfg);
2182 int adj_reg = mono_alloc_freg (cfg);
2183 int tmp_reg = mono_alloc_freg (cfg);
2184 int basereg = ppc_sp;
2186 if (!ppc_is_imm16 (offset + 4)) {
2187 basereg = mono_alloc_ireg (cfg);
2188 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_IADD_IMM, basereg, cfg->frame_reg, offset);
2190 MONO_EMIT_NEW_ICONST (cfg, msw_reg, 0x43300000);
2191 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, basereg, offset, msw_reg);
2192 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_XOR_IMM, xored, ins->sreg1, 0x80000000);
2193 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, basereg, offset + 4, xored);
2194 MONO_EMIT_NEW_LOAD_R8 (cfg, adj_reg, (gpointer)&adjust_val);
2195 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADR8_MEMBASE, tmp_reg, basereg, offset);
2196 MONO_EMIT_NEW_BIALU (cfg, OP_FSUB, ins->dreg, tmp_reg, adj_reg);
2197 if (ins->opcode == OP_ICONV_TO_R4)
2198 MONO_EMIT_NEW_UNALU (cfg, OP_FCONV_TO_R4, ins->dreg, ins->dreg);
2199 ins->opcode = OP_NOP;
2205 int msw_reg = mono_alloc_ireg (cfg);
2206 int basereg = ppc_sp;
2208 if (!ppc_is_imm16 (offset + 4)) {
2209 basereg = mono_alloc_ireg (cfg);
2210 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_IADD_IMM, basereg, cfg->frame_reg, offset);
2212 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORER8_MEMBASE_REG, basereg, offset, ins->sreg1);
2213 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADI4_MEMBASE, msw_reg, basereg, offset);
2214 MONO_EMIT_NEW_UNALU (cfg, OP_CHECK_FINITE, -1, msw_reg);
2215 MONO_EMIT_NEW_UNALU (cfg, OP_FMOVE, ins->dreg, ins->sreg1);
2216 ins->opcode = OP_NOP;
2219 #ifdef __mono_ppc64__
2221 case OP_IADD_OVF_UN:
2223 int shifted1_reg = mono_alloc_ireg (cfg);
2224 int shifted2_reg = mono_alloc_ireg (cfg);
2225 int result_shifted_reg = mono_alloc_ireg (cfg);
2227 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHL_IMM, shifted1_reg, ins->sreg1, 32);
2228 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHL_IMM, shifted2_reg, ins->sreg2, 32);
2229 MONO_EMIT_NEW_BIALU (cfg, ins->opcode, result_shifted_reg, shifted1_reg, shifted2_reg);
2230 if (ins->opcode == OP_IADD_OVF_UN)
2231 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHR_UN_IMM, ins->dreg, result_shifted_reg, 32);
2233 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHR_IMM, ins->dreg, result_shifted_reg, 32);
2234 ins->opcode = OP_NOP;
2241 mono_arch_decompose_long_opts (MonoCompile *cfg, MonoInst *ins)
2243 switch (ins->opcode) {
2245 /* ADC sets the condition code */
2246 MONO_EMIT_NEW_BIALU (cfg, OP_ADDCC, ins->dreg + 1, ins->sreg1 + 1, ins->sreg2 + 1);
2247 MONO_EMIT_NEW_BIALU (cfg, OP_ADD_OVF_CARRY, ins->dreg + 2, ins->sreg1 + 2, ins->sreg2 + 2);
2250 case OP_LADD_OVF_UN:
2251 /* ADC sets the condition code */
2252 MONO_EMIT_NEW_BIALU (cfg, OP_ADDCC, ins->dreg + 1, ins->sreg1 + 1, ins->sreg2 + 1);
2253 MONO_EMIT_NEW_BIALU (cfg, OP_ADD_OVF_UN_CARRY, ins->dreg + 2, ins->sreg1 + 2, ins->sreg2 + 2);
2257 /* SBB sets the condition code */
2258 MONO_EMIT_NEW_BIALU (cfg, OP_SUBCC, ins->dreg + 1, ins->sreg1 + 1, ins->sreg2 + 1);
2259 MONO_EMIT_NEW_BIALU (cfg, OP_SUB_OVF_CARRY, ins->dreg + 2, ins->sreg1 + 2, ins->sreg2 + 2);
2262 case OP_LSUB_OVF_UN:
2263 /* SBB sets the condition code */
2264 MONO_EMIT_NEW_BIALU (cfg, OP_SUBCC, ins->dreg + 1, ins->sreg1 + 1, ins->sreg2 + 1);
2265 MONO_EMIT_NEW_BIALU (cfg, OP_SUB_OVF_UN_CARRY, ins->dreg + 2, ins->sreg1 + 2, ins->sreg2 + 2);
2269 /* This is the old version from inssel-long32.brg */
2270 MONO_EMIT_NEW_UNALU (cfg, OP_INOT, ins->dreg + 1, ins->sreg1 + 1);
2271 MONO_EMIT_NEW_UNALU (cfg, OP_INOT, ins->dreg + 2, ins->sreg1 + 2);
2272 /* ADC sets the condition codes */
2273 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_ADC_IMM, ins->dreg + 1, ins->dreg + 1, 1);
2274 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_ADC_IMM, ins->dreg + 2, ins->dreg + 2, 0);
2283 * the branch_b0_table should maintain the order of these
2297 branch_b0_table [] = {
2312 branch_b1_table [] = {
2326 #define NEW_INS(cfg,dest,op) do { \
2327 MONO_INST_NEW((cfg), (dest), (op)); \
2328 mono_bblock_insert_after_ins (bb, last_ins, (dest)); \
2332 map_to_reg_reg_op (int op)
2341 case OP_COMPARE_IMM:
2343 case OP_ICOMPARE_IMM:
2345 case OP_LCOMPARE_IMM:
2361 case OP_LOAD_MEMBASE:
2362 return OP_LOAD_MEMINDEX;
2363 case OP_LOADI4_MEMBASE:
2364 return OP_LOADI4_MEMINDEX;
2365 case OP_LOADU4_MEMBASE:
2366 return OP_LOADU4_MEMINDEX;
2367 case OP_LOADI8_MEMBASE:
2368 return OP_LOADI8_MEMINDEX;
2369 case OP_LOADU1_MEMBASE:
2370 return OP_LOADU1_MEMINDEX;
2371 case OP_LOADI2_MEMBASE:
2372 return OP_LOADI2_MEMINDEX;
2373 case OP_LOADU2_MEMBASE:
2374 return OP_LOADU2_MEMINDEX;
2375 case OP_LOADI1_MEMBASE:
2376 return OP_LOADI1_MEMINDEX;
2377 case OP_LOADR4_MEMBASE:
2378 return OP_LOADR4_MEMINDEX;
2379 case OP_LOADR8_MEMBASE:
2380 return OP_LOADR8_MEMINDEX;
2381 case OP_STOREI1_MEMBASE_REG:
2382 return OP_STOREI1_MEMINDEX;
2383 case OP_STOREI2_MEMBASE_REG:
2384 return OP_STOREI2_MEMINDEX;
2385 case OP_STOREI4_MEMBASE_REG:
2386 return OP_STOREI4_MEMINDEX;
2387 case OP_STOREI8_MEMBASE_REG:
2388 return OP_STOREI8_MEMINDEX;
2389 case OP_STORE_MEMBASE_REG:
2390 return OP_STORE_MEMINDEX;
2391 case OP_STORER4_MEMBASE_REG:
2392 return OP_STORER4_MEMINDEX;
2393 case OP_STORER8_MEMBASE_REG:
2394 return OP_STORER8_MEMINDEX;
2395 case OP_STORE_MEMBASE_IMM:
2396 return OP_STORE_MEMBASE_REG;
2397 case OP_STOREI1_MEMBASE_IMM:
2398 return OP_STOREI1_MEMBASE_REG;
2399 case OP_STOREI2_MEMBASE_IMM:
2400 return OP_STOREI2_MEMBASE_REG;
2401 case OP_STOREI4_MEMBASE_IMM:
2402 return OP_STOREI4_MEMBASE_REG;
2403 case OP_STOREI8_MEMBASE_IMM:
2404 return OP_STOREI8_MEMBASE_REG;
2406 return mono_op_imm_to_op (op);
2409 //#define map_to_reg_reg_op(op) (cfg->new_ir? mono_op_imm_to_op (op): map_to_reg_reg_op (op))
2411 #define compare_opcode_is_unsigned(opcode) \
2412 (((opcode) >= CEE_BNE_UN && (opcode) <= CEE_BLT_UN) || \
2413 ((opcode) >= OP_IBNE_UN && (opcode) <= OP_IBLT_UN) || \
2414 ((opcode) >= OP_LBNE_UN && (opcode) <= OP_LBLT_UN) || \
2415 ((opcode) >= OP_COND_EXC_NE_UN && (opcode) <= OP_COND_EXC_LT_UN) || \
2416 ((opcode) >= OP_COND_EXC_INE_UN && (opcode) <= OP_COND_EXC_ILT_UN) || \
2417 ((opcode) == OP_CLT_UN || (opcode) == OP_CGT_UN || \
2418 (opcode) == OP_ICLT_UN || (opcode) == OP_ICGT_UN || \
2419 (opcode) == OP_LCLT_UN || (opcode) == OP_LCGT_UN))
2422 * Remove from the instruction list the instructions that can't be
2423 * represented with very simple instructions with no register
2427 mono_arch_lowering_pass (MonoCompile *cfg, MonoBasicBlock *bb)
2429 MonoInst *ins, *next, *temp, *last_ins = NULL;
2432 MONO_BB_FOR_EACH_INS (bb, ins) {
2434 switch (ins->opcode) {
2435 case OP_IDIV_UN_IMM:
2438 case OP_IREM_UN_IMM:
2439 NEW_INS (cfg, temp, OP_ICONST);
2440 temp->inst_c0 = ins->inst_imm;
2441 temp->dreg = mono_alloc_ireg (cfg);
2442 ins->sreg2 = temp->dreg;
2443 if (ins->opcode == OP_IDIV_IMM)
2444 ins->opcode = OP_IDIV;
2445 else if (ins->opcode == OP_IREM_IMM)
2446 ins->opcode = OP_IREM;
2447 else if (ins->opcode == OP_IDIV_UN_IMM)
2448 ins->opcode = OP_IDIV_UN;
2449 else if (ins->opcode == OP_IREM_UN_IMM)
2450 ins->opcode = OP_IREM_UN;
2452 /* handle rem separately */
2456 CASE_PPC64 (OP_LREM)
2457 CASE_PPC64 (OP_LREM_UN) {
2459 /* we change a rem dest, src1, src2 to
2460 * div temp1, src1, src2
2461 * mul temp2, temp1, src2
2462 * sub dest, src1, temp2
2464 if (ins->opcode == OP_IREM || ins->opcode == OP_IREM_UN) {
2465 NEW_INS (cfg, mul, OP_IMUL);
2466 NEW_INS (cfg, temp, ins->opcode == OP_IREM? OP_IDIV: OP_IDIV_UN);
2467 ins->opcode = OP_ISUB;
2469 NEW_INS (cfg, mul, OP_LMUL);
2470 NEW_INS (cfg, temp, ins->opcode == OP_LREM? OP_LDIV: OP_LDIV_UN);
2471 ins->opcode = OP_LSUB;
2473 temp->sreg1 = ins->sreg1;
2474 temp->sreg2 = ins->sreg2;
2475 temp->dreg = mono_alloc_ireg (cfg);
2476 mul->sreg1 = temp->dreg;
2477 mul->sreg2 = ins->sreg2;
2478 mul->dreg = mono_alloc_ireg (cfg);
2479 ins->sreg2 = mul->dreg;
2483 CASE_PPC64 (OP_LADD_IMM)
2486 if (!ppc_is_imm16 (ins->inst_imm)) {
2487 NEW_INS (cfg, temp, OP_ICONST);
2488 temp->inst_c0 = ins->inst_imm;
2489 temp->dreg = mono_alloc_ireg (cfg);
2490 ins->sreg2 = temp->dreg;
2491 ins->opcode = map_to_reg_reg_op (ins->opcode);
2495 CASE_PPC64 (OP_LSUB_IMM)
2497 if (!ppc_is_imm16 (-ins->inst_imm)) {
2498 NEW_INS (cfg, temp, OP_ICONST);
2499 temp->inst_c0 = ins->inst_imm;
2500 temp->dreg = mono_alloc_ireg (cfg);
2501 ins->sreg2 = temp->dreg;
2502 ins->opcode = map_to_reg_reg_op (ins->opcode);
2514 gboolean is_imm = ((ins->inst_imm & 0xffff0000) && (ins->inst_imm & 0xffff));
2515 #ifdef __mono_ppc64__
2516 if (ins->inst_imm & 0xffffffff00000000ULL)
2520 NEW_INS (cfg, temp, OP_ICONST);
2521 temp->inst_c0 = ins->inst_imm;
2522 temp->dreg = mono_alloc_ireg (cfg);
2523 ins->sreg2 = temp->dreg;
2524 ins->opcode = map_to_reg_reg_op (ins->opcode);
2533 NEW_INS (cfg, temp, OP_ICONST);
2534 temp->inst_c0 = ins->inst_imm;
2535 temp->dreg = mono_alloc_ireg (cfg);
2536 ins->sreg2 = temp->dreg;
2537 ins->opcode = map_to_reg_reg_op (ins->opcode);
2539 case OP_COMPARE_IMM:
2540 case OP_ICOMPARE_IMM:
2541 CASE_PPC64 (OP_LCOMPARE_IMM)
2543 /* Branch opts can eliminate the branch */
2544 if (!next || (!(MONO_IS_COND_BRANCH_OP (next) || MONO_IS_COND_EXC (next) || MONO_IS_SETCC (next)))) {
2545 ins->opcode = OP_NOP;
2549 if (compare_opcode_is_unsigned (next->opcode)) {
2550 if (!ppc_is_uimm16 (ins->inst_imm)) {
2551 NEW_INS (cfg, temp, OP_ICONST);
2552 temp->inst_c0 = ins->inst_imm;
2553 temp->dreg = mono_alloc_ireg (cfg);
2554 ins->sreg2 = temp->dreg;
2555 ins->opcode = map_to_reg_reg_op (ins->opcode);
2558 if (!ppc_is_imm16 (ins->inst_imm)) {
2559 NEW_INS (cfg, temp, OP_ICONST);
2560 temp->inst_c0 = ins->inst_imm;
2561 temp->dreg = mono_alloc_ireg (cfg);
2562 ins->sreg2 = temp->dreg;
2563 ins->opcode = map_to_reg_reg_op (ins->opcode);
2569 if (ins->inst_imm == 1) {
2570 ins->opcode = OP_MOVE;
2573 if (ins->inst_imm == 0) {
2574 ins->opcode = OP_ICONST;
2578 imm = mono_is_power_of_two (ins->inst_imm);
2580 ins->opcode = OP_SHL_IMM;
2581 ins->inst_imm = imm;
2584 if (!ppc_is_imm16 (ins->inst_imm)) {
2585 NEW_INS (cfg, temp, OP_ICONST);
2586 temp->inst_c0 = ins->inst_imm;
2587 temp->dreg = mono_alloc_ireg (cfg);
2588 ins->sreg2 = temp->dreg;
2589 ins->opcode = map_to_reg_reg_op (ins->opcode);
2592 case OP_LOCALLOC_IMM:
2593 NEW_INS (cfg, temp, OP_ICONST);
2594 temp->inst_c0 = ins->inst_imm;
2595 temp->dreg = mono_alloc_ireg (cfg);
2596 ins->sreg1 = temp->dreg;
2597 ins->opcode = OP_LOCALLOC;
2599 case OP_LOAD_MEMBASE:
2600 case OP_LOADI4_MEMBASE:
2601 CASE_PPC64 (OP_LOADI8_MEMBASE)
2602 case OP_LOADU4_MEMBASE:
2603 case OP_LOADI2_MEMBASE:
2604 case OP_LOADU2_MEMBASE:
2605 case OP_LOADI1_MEMBASE:
2606 case OP_LOADU1_MEMBASE:
2607 case OP_LOADR4_MEMBASE:
2608 case OP_LOADR8_MEMBASE:
2609 case OP_STORE_MEMBASE_REG:
2610 CASE_PPC64 (OP_STOREI8_MEMBASE_REG)
2611 case OP_STOREI4_MEMBASE_REG:
2612 case OP_STOREI2_MEMBASE_REG:
2613 case OP_STOREI1_MEMBASE_REG:
2614 case OP_STORER4_MEMBASE_REG:
2615 case OP_STORER8_MEMBASE_REG:
2616 /* we can do two things: load the immed in a register
2617 * and use an indexed load, or see if the immed can be
2618 * represented as an ad_imm + a load with a smaller offset
2619 * that fits. We just do the first for now, optimize later.
2621 if (ppc_is_imm16 (ins->inst_offset))
2623 NEW_INS (cfg, temp, OP_ICONST);
2624 temp->inst_c0 = ins->inst_offset;
2625 temp->dreg = mono_alloc_ireg (cfg);
2626 ins->sreg2 = temp->dreg;
2627 ins->opcode = map_to_reg_reg_op (ins->opcode);
2629 case OP_STORE_MEMBASE_IMM:
2630 case OP_STOREI1_MEMBASE_IMM:
2631 case OP_STOREI2_MEMBASE_IMM:
2632 case OP_STOREI4_MEMBASE_IMM:
2633 CASE_PPC64 (OP_STOREI8_MEMBASE_IMM)
2634 NEW_INS (cfg, temp, OP_ICONST);
2635 temp->inst_c0 = ins->inst_imm;
2636 temp->dreg = mono_alloc_ireg (cfg);
2637 ins->sreg1 = temp->dreg;
2638 ins->opcode = map_to_reg_reg_op (ins->opcode);
2640 goto loop_start; /* make it handle the possibly big ins->inst_offset */
2643 if (cfg->compile_aot) {
2644 /* Keep these in the aot case */
2647 NEW_INS (cfg, temp, OP_ICONST);
2648 temp->inst_c0 = (gulong)ins->inst_p0;
2649 temp->dreg = mono_alloc_ireg (cfg);
2650 ins->inst_basereg = temp->dreg;
2651 ins->inst_offset = 0;
2652 ins->opcode = ins->opcode == OP_R4CONST? OP_LOADR4_MEMBASE: OP_LOADR8_MEMBASE;
2654 /* make it handle the possibly big ins->inst_offset
2655 * later optimize to use lis + load_membase
2661 bb->last_ins = last_ins;
2662 bb->max_vreg = cfg->next_vreg;
2666 emit_float_to_int (MonoCompile *cfg, guchar *code, int dreg, int sreg, int size, gboolean is_signed)
2668 long offset = cfg->arch.fp_conv_var_offset;
2670 /* sreg is a float, dreg is an integer reg. ppc_f0 is used a scratch */
2671 #ifdef __mono_ppc64__
2673 ppc_fctidz (code, ppc_f0, sreg);
2678 ppc_fctiwz (code, ppc_f0, sreg);
2681 if (ppc_is_imm16 (offset + sub_offset)) {
2682 ppc_stfd (code, ppc_f0, offset, cfg->frame_reg);
2684 ppc_ldr (code, dreg, offset + sub_offset, cfg->frame_reg);
2686 ppc_lwz (code, dreg, offset + sub_offset, cfg->frame_reg);
2688 ppc_load (code, dreg, offset);
2689 ppc_add (code, dreg, dreg, cfg->frame_reg);
2690 ppc_stfd (code, ppc_f0, 0, dreg);
2692 ppc_ldr (code, dreg, sub_offset, dreg);
2694 ppc_lwz (code, dreg, sub_offset, dreg);
2698 ppc_andid (code, dreg, dreg, 0xff);
2700 ppc_andid (code, dreg, dreg, 0xffff);
2701 #ifdef __mono_ppc64__
2703 ppc_clrldi (code, dreg, dreg, 32);
2707 ppc_extsb (code, dreg, dreg);
2709 ppc_extsh (code, dreg, dreg);
2710 #ifdef __mono_ppc64__
2712 ppc_extsw (code, dreg, dreg);
2720 const guchar *target;
2725 #define is_call_imm(diff) ((glong)(diff) >= -33554432 && (glong)(diff) <= 33554431)
2728 search_thunk_slot (void *data, int csize, int bsize, void *user_data) {
2729 #ifdef __mono_ppc64__
2730 g_assert_not_reached ();
2732 PatchData *pdata = (PatchData*)user_data;
2733 guchar *code = data;
2734 guint32 *thunks = data;
2735 guint32 *endthunks = (guint32*)(code + bsize);
2739 int difflow, diffhigh;
2741 /* always ensure a call from pdata->code can reach to the thunks without further thunks */
2742 difflow = (char*)pdata->code - (char*)thunks;
2743 diffhigh = (char*)pdata->code - (char*)endthunks;
2744 if (!((is_call_imm (thunks) && is_call_imm (endthunks)) || (is_call_imm (difflow) && is_call_imm (diffhigh))))
2747 templ = (guchar*)load;
2748 ppc_load_sequence (templ, ppc_r0, pdata->target);
2750 //g_print ("thunk nentries: %d\n", ((char*)endthunks - (char*)thunks)/16);
2751 if ((pdata->found == 2) || (pdata->code >= code && pdata->code <= code + csize)) {
2752 while (thunks < endthunks) {
2753 //g_print ("looking for target: %p at %p (%08x-%08x)\n", pdata->target, thunks, thunks [0], thunks [1]);
2754 if ((thunks [0] == load [0]) && (thunks [1] == load [1])) {
2755 ppc_patch (pdata->code, (guchar*)thunks);
2758 static int num_thunks = 0;
2760 if ((num_thunks % 20) == 0)
2761 g_print ("num_thunks lookup: %d\n", num_thunks);
2764 } else if ((thunks [0] == 0) && (thunks [1] == 0)) {
2765 /* found a free slot instead: emit thunk */
2766 code = (guchar*)thunks;
2767 ppc_lis (code, ppc_r0, (gulong)(pdata->target) >> 16);
2768 ppc_ori (code, ppc_r0, ppc_r0, (gulong)(pdata->target) & 0xffff);
2769 ppc_mtctr (code, ppc_r0);
2770 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
2771 mono_arch_flush_icache ((guchar*)thunks, 16);
2773 ppc_patch (pdata->code, (guchar*)thunks);
2776 static int num_thunks = 0;
2778 if ((num_thunks % 20) == 0)
2779 g_print ("num_thunks: %d\n", num_thunks);
2783 /* skip 16 bytes, the size of the thunk */
2787 //g_print ("failed thunk lookup for %p from %p at %p (%d entries)\n", pdata->target, pdata->code, data, count);
2794 handle_thunk (int absolute, guchar *code, const guchar *target) {
2795 MonoDomain *domain = mono_domain_get ();
2799 pdata.target = target;
2800 pdata.absolute = absolute;
2803 mono_domain_lock (domain);
2804 mono_domain_code_foreach (domain, search_thunk_slot, &pdata);
2807 /* this uses the first available slot */
2809 mono_domain_code_foreach (domain, search_thunk_slot, &pdata);
2811 mono_domain_unlock (domain);
2813 if (pdata.found != 1)
2814 g_print ("thunk failed for %p from %p\n", target, code);
2815 g_assert (pdata.found == 1);
2819 patch_ins (guint8 *code, guint32 ins)
2821 *(guint32*)code = GUINT32_TO_BE (ins);
2822 mono_arch_flush_icache (code, 4);
2826 ppc_patch_full (guchar *code, const guchar *target, gboolean is_fd)
2828 guint32 ins = GUINT32_FROM_BE (*(guint32*)code);
2829 guint32 prim = ins >> 26;
2832 //g_print ("patching 0x%08x (0x%08x) to point to 0x%08x\n", code, ins, target);
2834 // prefer relative branches, they are more position independent (e.g. for AOT compilation).
2835 gint diff = target - code;
2838 if (diff <= 33554431){
2839 ins = (18 << 26) | (diff) | (ins & 1);
2840 patch_ins (code, ins);
2844 /* diff between 0 and -33554432 */
2845 if (diff >= -33554432){
2846 ins = (18 << 26) | (diff & ~0xfc000000) | (ins & 1);
2847 patch_ins (code, ins);
2852 if ((glong)target >= 0){
2853 if ((glong)target <= 33554431){
2854 ins = (18 << 26) | ((gulong) target) | (ins & 1) | 2;
2855 patch_ins (code, ins);
2859 if ((glong)target >= -33554432){
2860 ins = (18 << 26) | (((gulong)target) & ~0xfc000000) | (ins & 1) | 2;
2861 patch_ins (code, ins);
2866 handle_thunk (TRUE, code, target);
2869 g_assert_not_reached ();
2877 guint32 li = (gulong)target;
2878 ins = (ins & 0xffff0000) | (ins & 3);
2879 ovf = li & 0xffff0000;
2880 if (ovf != 0 && ovf != 0xffff0000)
2881 g_assert_not_reached ();
2884 // FIXME: assert the top bits of li are 0
2886 gint diff = target - code;
2887 ins = (ins & 0xffff0000) | (ins & 3);
2888 ovf = diff & 0xffff0000;
2889 if (ovf != 0 && ovf != 0xffff0000)
2890 g_assert_not_reached ();
2894 patch_ins (code, ins);
2898 if (prim == 15 || ins == 0x4e800021 || ins == 0x4e800020 || ins == 0x4e800420) {
2899 #ifdef __mono_ppc64__
2900 guint32 *seq = (guint32*)code;
2901 guint32 *branch_ins;
2903 /* the trampoline code will try to patch the blrl, blr, bcctr */
2904 if (ins == 0x4e800021 || ins == 0x4e800020 || ins == 0x4e800420) {
2906 if (ppc_opcode (seq [-3]) == 58 || ppc_opcode (seq [-3]) == 31) /* ld || mr */
2911 if (ppc_opcode (seq [5]) == 58 || ppc_opcode (seq [5]) == 31) /* ld || mr */
2912 branch_ins = seq + 8;
2914 branch_ins = seq + 6;
2917 seq = (guint32*)code;
2918 /* this is the lis/ori/sldi/oris/ori/(ld/ld|mr/nop)/mtlr/blrl sequence */
2919 g_assert (mono_ppc_is_direct_call_sequence (branch_ins));
2921 if (ppc_opcode (seq [5]) == 58) { /* ld */
2922 g_assert (ppc_opcode (seq [6]) == 58); /* ld */
2925 guint8 *buf = (guint8*)&seq [5];
2926 ppc_mr (buf, ppc_r0, ppc_r11);
2931 target = mono_get_addr_from_ftnptr ((gpointer)target);
2934 /* FIXME: make this thread safe */
2935 /* FIXME: we're assuming we're using r11 here */
2936 ppc_load_ptr_sequence (code, ppc_r11, target);
2937 mono_arch_flush_icache ((guint8*)seq, 28);
2940 /* the trampoline code will try to patch the blrl, blr, bcctr */
2941 if (ins == 0x4e800021 || ins == 0x4e800020 || ins == 0x4e800420) {
2944 /* this is the lis/ori/mtlr/blrl sequence */
2945 seq = (guint32*)code;
2946 g_assert ((seq [0] >> 26) == 15);
2947 g_assert ((seq [1] >> 26) == 24);
2948 g_assert ((seq [2] >> 26) == 31);
2949 g_assert (seq [3] == 0x4e800021 || seq [3] == 0x4e800020 || seq [3] == 0x4e800420);
2950 /* FIXME: make this thread safe */
2951 ppc_lis (code, ppc_r0, (guint32)(target) >> 16);
2952 ppc_ori (code, ppc_r0, ppc_r0, (guint32)(target) & 0xffff);
2953 mono_arch_flush_icache (code - 8, 8);
2956 g_assert_not_reached ();
2958 // g_print ("patched with 0x%08x\n", ins);
2962 ppc_patch (guchar *code, const guchar *target)
2964 ppc_patch_full (code, target, FALSE);
2968 mono_ppc_patch (guchar *code, const guchar *target)
2970 ppc_patch (code, target);
2974 emit_move_return_value (MonoCompile *cfg, MonoInst *ins, guint8 *code)
2976 switch (ins->opcode) {
2979 case OP_FCALL_MEMBASE:
2980 if (ins->dreg != ppc_f1)
2981 ppc_fmr (code, ins->dreg, ppc_f1);
2989 * emit_load_volatile_arguments:
2991 * Load volatile arguments from the stack to the original input registers.
2992 * Required before a tail call.
2995 emit_load_volatile_arguments (MonoCompile *cfg, guint8 *code)
2997 MonoMethod *method = cfg->method;
2998 MonoMethodSignature *sig;
3002 int struct_index = 0;
3004 sig = mono_method_signature (method);
3006 /* This is the opposite of the code in emit_prolog */
3010 cinfo = calculate_sizes (sig, sig->pinvoke);
3012 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
3013 ArgInfo *ainfo = &cinfo->ret;
3014 inst = cfg->vret_addr;
3015 g_assert (ppc_is_imm16 (inst->inst_offset));
3016 ppc_ldptr (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
3018 for (i = 0; i < sig->param_count + sig->hasthis; ++i) {
3019 ArgInfo *ainfo = cinfo->args + i;
3020 inst = cfg->args [pos];
3022 g_assert (inst->opcode != OP_REGVAR);
3023 g_assert (ppc_is_imm16 (inst->inst_offset));
3025 switch (ainfo->regtype) {
3026 case RegTypeGeneral:
3027 switch (ainfo->size) {
3029 ppc_lbz (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
3032 ppc_lhz (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
3034 #ifdef __mono_ppc64__
3036 ppc_lwz (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
3040 ppc_ldptr (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
3046 switch (ainfo->size) {
3048 ppc_lfs (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
3051 ppc_lfd (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
3054 g_assert_not_reached ();
3059 MonoType *type = mini_type_get_underlying_type (cfg->generic_sharing_context,
3060 &inst->klass->byval_arg);
3062 #ifndef __mono_ppc64__
3063 if (type->type == MONO_TYPE_I8)
3067 if (MONO_TYPE_IS_REFERENCE (type) || type->type == MONO_TYPE_I8) {
3068 ppc_ldptr (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
3069 ppc_stptr (code, ppc_r0, ainfo->offset, ainfo->reg);
3070 } else if (type->type == MONO_TYPE_I4) {
3071 ppc_lwz (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
3072 ppc_stw (code, ppc_r0, ainfo->offset, ainfo->reg);
3080 case RegTypeStructByVal: {
3091 * Darwin pinvokes needs some special handling
3092 * for 1 and 2 byte arguments
3094 if (method->signature->pinvoke)
3095 size = mono_class_native_size (inst->klass, NULL);
3096 if (size == 1 || size == 2) {
3101 for (j = 0; j < ainfo->vtregs; ++j) {
3102 ppc_ldptr (code, ainfo->reg + j,
3103 inst->inst_offset + j * sizeof (gpointer),
3104 inst->inst_basereg);
3105 /* FIXME: shift to the right */
3112 case RegTypeStructByAddr: {
3113 MonoInst *addr = cfg->tailcall_valuetype_addrs [struct_index];
3115 g_assert (ppc_is_imm16 (addr->inst_offset));
3116 g_assert (!ainfo->offset);
3117 ppc_ldptr (code, ainfo->reg, addr->inst_offset, addr->inst_basereg);
3124 g_assert_not_reached ();
3135 /* This must be kept in sync with emit_load_volatile_arguments(). */
3137 ins_native_length (MonoCompile *cfg, MonoInst *ins)
3139 int len = ((guint8 *)ins_get_spec (ins->opcode))[MONO_INST_LEN];
3140 MonoMethodSignature *sig;
3145 if (ins->opcode != OP_JMP)
3148 call = (MonoCallInst*)ins;
3149 sig = mono_method_signature (cfg->method);
3150 cinfo = calculate_sizes (sig, sig->pinvoke);
3152 if (MONO_TYPE_ISSTRUCT (sig->ret))
3154 for (i = 0; i < sig->param_count + sig->hasthis; ++i) {
3155 ArgInfo *ainfo = cinfo->args + i;
3157 switch (ainfo->regtype) {
3158 case RegTypeGeneral:
3167 case RegTypeStructByVal:
3168 len += 4 * ainfo->size;
3171 case RegTypeStructByAddr:
3176 g_assert_not_reached ();
3186 emit_reserve_param_area (MonoCompile *cfg, guint8 *code)
3188 long size = cfg->param_area;
3190 size += MONO_ARCH_FRAME_ALIGNMENT - 1;
3191 size &= -MONO_ARCH_FRAME_ALIGNMENT;
3196 ppc_ldptr (code, ppc_r0, 0, ppc_sp);
3197 if (ppc_is_imm16 (-size)) {
3198 ppc_stptr_update (code, ppc_r0, -size, ppc_sp);
3200 ppc_load (code, ppc_r11, -size);
3201 ppc_stptr_update_indexed (code, ppc_r0, ppc_sp, ppc_r11);
3208 emit_unreserve_param_area (MonoCompile *cfg, guint8 *code)
3210 long size = cfg->param_area;
3212 size += MONO_ARCH_FRAME_ALIGNMENT - 1;
3213 size &= -MONO_ARCH_FRAME_ALIGNMENT;
3218 ppc_ldptr (code, ppc_r0, 0, ppc_sp);
3219 if (ppc_is_imm16 (size)) {
3220 ppc_stptr_update (code, ppc_r0, size, ppc_sp);
3222 ppc_load (code, ppc_r11, size);
3223 ppc_stptr_update_indexed (code, ppc_r0, ppc_sp, ppc_r11);
3229 #define MASK_SHIFT_IMM(i) ((i) & MONO_PPC_32_64_CASE (0x1f, 0x3f))
3232 mono_arch_output_basic_block (MonoCompile *cfg, MonoBasicBlock *bb)
3234 MonoInst *ins, *next;
3237 guint8 *code = cfg->native_code + cfg->code_len;
3238 MonoInst *last_ins = NULL;
3239 guint last_offset = 0;
3243 /* we don't align basic blocks of loops on ppc */
3245 if (cfg->verbose_level > 2)
3246 g_print ("Basic block %d starting at offset 0x%x\n", bb->block_num, bb->native_offset);
3248 cpos = bb->max_offset;
3250 if (cfg->prof_options & MONO_PROFILE_COVERAGE) {
3251 //MonoCoverageInfo *cov = mono_get_coverage_info (cfg->method);
3252 //g_assert (!mono_compile_aot);
3255 // cov->data [bb->dfn].iloffset = bb->cil_code - cfg->cil_code;
3256 /* this is not thread save, but good enough */
3257 /* fixme: howto handle overflows? */
3258 //x86_inc_mem (code, &cov->data [bb->dfn].count);
3261 MONO_BB_FOR_EACH_INS (bb, ins) {
3262 offset = code - cfg->native_code;
3264 max_len = ins_native_length (cfg, ins);
3266 if (offset > (cfg->code_size - max_len - 16)) {
3267 cfg->code_size *= 2;
3268 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
3269 code = cfg->native_code + offset;
3271 // if (ins->cil_code)
3272 // g_print ("cil code\n");
3273 mono_debug_record_line_number (cfg, ins, offset);
3275 switch (normalize_opcode (ins->opcode)) {
3276 case OP_RELAXED_NOP:
3279 case OP_DUMMY_STORE:
3280 case OP_NOT_REACHED:
3283 case OP_SEQ_POINT: {
3286 if (cfg->compile_aot)
3290 * Read from the single stepping trigger page. This will cause a
3291 * SIGSEGV when single stepping is enabled.
3292 * We do this _before_ the breakpoint, so single stepping after
3293 * a breakpoint is hit will step to the next IL offset.
3295 if (ins->flags & MONO_INST_SINGLE_STEP_LOC) {
3296 ppc_load (code, ppc_r11, (gsize)ss_trigger_page);
3297 ppc_ldptr (code, ppc_r11, 0, ppc_r11);
3300 mono_add_seq_point (cfg, bb, ins, code - cfg->native_code);
3303 * A placeholder for a possible breakpoint inserted by
3304 * mono_arch_set_breakpoint ().
3306 for (i = 0; i < BREAKPOINT_SIZE / 4; ++i)
3311 emit_tls_access (code, ins->dreg, ins->inst_offset);
3314 ppc_mullw (code, ppc_r0, ins->sreg1, ins->sreg2);
3315 ppc_mulhw (code, ppc_r3, ins->sreg1, ins->sreg2);
3316 ppc_mr (code, ppc_r4, ppc_r0);
3319 ppc_mullw (code, ppc_r0, ins->sreg1, ins->sreg2);
3320 ppc_mulhwu (code, ppc_r3, ins->sreg1, ins->sreg2);
3321 ppc_mr (code, ppc_r4, ppc_r0);
3323 case OP_MEMORY_BARRIER:
3326 case OP_STOREI1_MEMBASE_REG:
3327 if (ppc_is_imm16 (ins->inst_offset)) {
3328 ppc_stb (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
3330 if (ppc_is_imm32 (ins->inst_offset)) {
3331 ppc_addis (code, ppc_r12, ins->inst_destbasereg, ppc_ha(ins->inst_offset));
3332 ppc_stb (code, ins->sreg1, ins->inst_offset, ppc_r12);
3334 ppc_load (code, ppc_r0, ins->inst_offset);
3335 ppc_stbx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
3339 case OP_STOREI2_MEMBASE_REG:
3340 if (ppc_is_imm16 (ins->inst_offset)) {
3341 ppc_sth (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
3343 if (ppc_is_imm32 (ins->inst_offset)) {
3344 ppc_addis (code, ppc_r12, ins->inst_destbasereg, ppc_ha(ins->inst_offset));
3345 ppc_sth (code, ins->sreg1, ins->inst_offset, ppc_r12);
3347 ppc_load (code, ppc_r0, ins->inst_offset);
3348 ppc_sthx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
3352 case OP_STORE_MEMBASE_REG:
3353 if (ppc_is_imm16 (ins->inst_offset)) {
3354 ppc_stptr (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
3356 if (ppc_is_imm32 (ins->inst_offset)) {
3357 ppc_addis (code, ppc_r12, ins->inst_destbasereg, ppc_ha(ins->inst_offset));
3358 ppc_stptr (code, ins->sreg1, ins->inst_offset, ppc_r12);
3360 ppc_load (code, ppc_r0, ins->inst_offset);
3361 ppc_stptr_indexed (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
3365 #ifdef __mono_ilp32__
3366 case OP_STOREI8_MEMBASE_REG:
3367 if (ppc_is_imm16 (ins->inst_offset)) {
3368 ppc_str (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
3370 ppc_load (code, ppc_r0, ins->inst_offset);
3371 ppc_str_indexed (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
3375 case OP_STOREI1_MEMINDEX:
3376 ppc_stbx (code, ins->sreg1, ins->inst_destbasereg, ins->sreg2);
3378 case OP_STOREI2_MEMINDEX:
3379 ppc_sthx (code, ins->sreg1, ins->inst_destbasereg, ins->sreg2);
3381 case OP_STORE_MEMINDEX:
3382 ppc_stptr_indexed (code, ins->sreg1, ins->inst_destbasereg, ins->sreg2);
3385 g_assert_not_reached ();
3387 case OP_LOAD_MEMBASE:
3388 if (ppc_is_imm16 (ins->inst_offset)) {
3389 ppc_ldptr (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3391 if (ppc_is_imm32 (ins->inst_offset) && (ins->dreg > 0)) {
3392 ppc_addis (code, ins->dreg, ins->inst_basereg, ppc_ha(ins->inst_offset));
3393 ppc_ldptr (code, ins->dreg, ins->inst_offset, ins->dreg);
3395 ppc_load (code, ppc_r0, ins->inst_offset);
3396 ppc_ldptr_indexed (code, ins->dreg, ins->inst_basereg, ppc_r0);
3400 case OP_LOADI4_MEMBASE:
3401 #ifdef __mono_ppc64__
3402 if (ppc_is_imm16 (ins->inst_offset)) {
3403 ppc_lwa (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3405 if (ppc_is_imm32 (ins->inst_offset) && (ins->dreg > 0)) {
3406 ppc_addis (code, ins->dreg, ins->inst_basereg, ppc_ha(ins->inst_offset));
3407 ppc_lwa (code, ins->dreg, ins->inst_offset, ins->dreg);
3409 ppc_load (code, ppc_r0, ins->inst_offset);
3410 ppc_lwax (code, ins->dreg, ins->inst_basereg, ppc_r0);
3415 case OP_LOADU4_MEMBASE:
3416 if (ppc_is_imm16 (ins->inst_offset)) {
3417 ppc_lwz (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3419 if (ppc_is_imm32 (ins->inst_offset) && (ins->dreg > 0)) {
3420 ppc_addis (code, ins->dreg, ins->inst_basereg, ppc_ha(ins->inst_offset));
3421 ppc_lwz (code, ins->dreg, ins->inst_offset, ins->dreg);
3423 ppc_load (code, ppc_r0, ins->inst_offset);
3424 ppc_lwzx (code, ins->dreg, ins->inst_basereg, ppc_r0);
3428 case OP_LOADI1_MEMBASE:
3429 case OP_LOADU1_MEMBASE:
3430 if (ppc_is_imm16 (ins->inst_offset)) {
3431 ppc_lbz (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3433 if (ppc_is_imm32 (ins->inst_offset) && (ins->dreg > 0)) {
3434 ppc_addis (code, ins->dreg, ins->inst_basereg, ppc_ha(ins->inst_offset));
3435 ppc_lbz (code, ins->dreg, ins->inst_offset, ins->dreg);
3437 ppc_load (code, ppc_r0, ins->inst_offset);
3438 ppc_lbzx (code, ins->dreg, ins->inst_basereg, ppc_r0);
3441 if (ins->opcode == OP_LOADI1_MEMBASE)
3442 ppc_extsb (code, ins->dreg, ins->dreg);
3444 case OP_LOADU2_MEMBASE:
3445 if (ppc_is_imm16 (ins->inst_offset)) {
3446 ppc_lhz (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3448 if (ppc_is_imm32 (ins->inst_offset) && (ins->dreg > 0)) {
3449 ppc_addis (code, ins->dreg, ins->inst_basereg, ppc_ha(ins->inst_offset));
3450 ppc_lhz (code, ins->dreg, ins->inst_offset, ins->dreg);
3452 ppc_load (code, ppc_r0, ins->inst_offset);
3453 ppc_lhzx (code, ins->dreg, ins->inst_basereg, ppc_r0);
3457 case OP_LOADI2_MEMBASE:
3458 if (ppc_is_imm16 (ins->inst_offset)) {
3459 ppc_lha (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3461 if (ppc_is_imm32 (ins->inst_offset) && (ins->dreg > 0)) {
3462 ppc_addis (code, ins->dreg, ins->inst_basereg, ppc_ha(ins->inst_offset));
3463 ppc_lha (code, ins->dreg, ins->inst_offset, ins->dreg);
3465 ppc_load (code, ppc_r0, ins->inst_offset);
3466 ppc_lhax (code, ins->dreg, ins->inst_basereg, ppc_r0);
3470 #ifdef __mono_ilp32__
3471 case OP_LOADI8_MEMBASE:
3472 if (ppc_is_imm16 (ins->inst_offset)) {
3473 ppc_ldr (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3475 ppc_load (code, ppc_r0, ins->inst_offset);
3476 ppc_ldr_indexed (code, ins->dreg, ins->inst_basereg, ppc_r0);
3480 case OP_LOAD_MEMINDEX:
3481 ppc_ldptr_indexed (code, ins->dreg, ins->inst_basereg, ins->sreg2);
3483 case OP_LOADI4_MEMINDEX:
3484 #ifdef __mono_ppc64__
3485 ppc_lwax (code, ins->dreg, ins->inst_basereg, ins->sreg2);
3488 case OP_LOADU4_MEMINDEX:
3489 ppc_lwzx (code, ins->dreg, ins->inst_basereg, ins->sreg2);
3491 case OP_LOADU2_MEMINDEX:
3492 ppc_lhzx (code, ins->dreg, ins->inst_basereg, ins->sreg2);
3494 case OP_LOADI2_MEMINDEX:
3495 ppc_lhax (code, ins->dreg, ins->inst_basereg, ins->sreg2);
3497 case OP_LOADU1_MEMINDEX:
3498 ppc_lbzx (code, ins->dreg, ins->inst_basereg, ins->sreg2);
3500 case OP_LOADI1_MEMINDEX:
3501 ppc_lbzx (code, ins->dreg, ins->inst_basereg, ins->sreg2);
3502 ppc_extsb (code, ins->dreg, ins->dreg);
3504 case OP_ICONV_TO_I1:
3505 CASE_PPC64 (OP_LCONV_TO_I1)
3506 ppc_extsb (code, ins->dreg, ins->sreg1);
3508 case OP_ICONV_TO_I2:
3509 CASE_PPC64 (OP_LCONV_TO_I2)
3510 ppc_extsh (code, ins->dreg, ins->sreg1);
3512 case OP_ICONV_TO_U1:
3513 CASE_PPC64 (OP_LCONV_TO_U1)
3514 ppc_clrlwi (code, ins->dreg, ins->sreg1, 24);
3516 case OP_ICONV_TO_U2:
3517 CASE_PPC64 (OP_LCONV_TO_U2)
3518 ppc_clrlwi (code, ins->dreg, ins->sreg1, 16);
3522 CASE_PPC64 (OP_LCOMPARE)
3523 L = (sizeof (mgreg_t) == 4 || ins->opcode == OP_ICOMPARE) ? 0 : 1;
3525 if (next && compare_opcode_is_unsigned (next->opcode))
3526 ppc_cmpl (code, 0, L, ins->sreg1, ins->sreg2);
3528 ppc_cmp (code, 0, L, ins->sreg1, ins->sreg2);
3530 case OP_COMPARE_IMM:
3531 case OP_ICOMPARE_IMM:
3532 CASE_PPC64 (OP_LCOMPARE_IMM)
3533 L = (sizeof (mgreg_t) == 4 || ins->opcode == OP_ICOMPARE_IMM) ? 0 : 1;
3535 if (next && compare_opcode_is_unsigned (next->opcode)) {
3536 if (ppc_is_uimm16 (ins->inst_imm)) {
3537 ppc_cmpli (code, 0, L, ins->sreg1, (ins->inst_imm & 0xffff));
3539 g_assert_not_reached ();
3542 if (ppc_is_imm16 (ins->inst_imm)) {
3543 ppc_cmpi (code, 0, L, ins->sreg1, (ins->inst_imm & 0xffff));
3545 g_assert_not_reached ();
3554 ppc_addco (code, ins->dreg, ins->sreg1, ins->sreg2);
3557 CASE_PPC64 (OP_LADD)
3558 ppc_add (code, ins->dreg, ins->sreg1, ins->sreg2);
3562 ppc_adde (code, ins->dreg, ins->sreg1, ins->sreg2);
3565 if (ppc_is_imm16 (ins->inst_imm)) {
3566 ppc_addic (code, ins->dreg, ins->sreg1, ins->inst_imm);
3568 g_assert_not_reached ();
3573 CASE_PPC64 (OP_LADD_IMM)
3574 if (ppc_is_imm16 (ins->inst_imm)) {
3575 ppc_addi (code, ins->dreg, ins->sreg1, ins->inst_imm);
3577 g_assert_not_reached ();
3581 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3583 ppc_addo (code, ins->dreg, ins->sreg1, ins->sreg2);
3584 ppc_mfspr (code, ppc_r0, ppc_xer);
3585 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3586 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3588 case OP_IADD_OVF_UN:
3589 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3591 ppc_addco (code, ins->dreg, ins->sreg1, ins->sreg2);
3592 ppc_mfspr (code, ppc_r0, ppc_xer);
3593 ppc_andisd (code, ppc_r0, ppc_r0, (1<<13));
3594 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3597 CASE_PPC64 (OP_LSUB_OVF)
3598 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3600 ppc_subfo (code, ins->dreg, ins->sreg2, ins->sreg1);
3601 ppc_mfspr (code, ppc_r0, ppc_xer);
3602 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3603 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3605 case OP_ISUB_OVF_UN:
3606 CASE_PPC64 (OP_LSUB_OVF_UN)
3607 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3609 ppc_subfc (code, ins->dreg, ins->sreg2, ins->sreg1);
3610 ppc_mfspr (code, ppc_r0, ppc_xer);
3611 ppc_andisd (code, ppc_r0, ppc_r0, (1<<13));
3612 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_TRUE, PPC_BR_EQ, "OverflowException");
3614 case OP_ADD_OVF_CARRY:
3615 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3617 ppc_addeo (code, ins->dreg, ins->sreg1, ins->sreg2);
3618 ppc_mfspr (code, ppc_r0, ppc_xer);
3619 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3620 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3622 case OP_ADD_OVF_UN_CARRY:
3623 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3625 ppc_addeo (code, ins->dreg, ins->sreg1, ins->sreg2);
3626 ppc_mfspr (code, ppc_r0, ppc_xer);
3627 ppc_andisd (code, ppc_r0, ppc_r0, (1<<13));
3628 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3630 case OP_SUB_OVF_CARRY:
3631 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3633 ppc_subfeo (code, ins->dreg, ins->sreg2, ins->sreg1);
3634 ppc_mfspr (code, ppc_r0, ppc_xer);
3635 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3636 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3638 case OP_SUB_OVF_UN_CARRY:
3639 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3641 ppc_subfeo (code, ins->dreg, ins->sreg2, ins->sreg1);
3642 ppc_mfspr (code, ppc_r0, ppc_xer);
3643 ppc_andisd (code, ppc_r0, ppc_r0, (1<<13));
3644 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_TRUE, PPC_BR_EQ, "OverflowException");
3648 ppc_subfco (code, ins->dreg, ins->sreg2, ins->sreg1);
3651 CASE_PPC64 (OP_LSUB)
3652 ppc_subf (code, ins->dreg, ins->sreg2, ins->sreg1);
3656 ppc_subfe (code, ins->dreg, ins->sreg2, ins->sreg1);
3660 CASE_PPC64 (OP_LSUB_IMM)
3661 // we add the negated value
3662 if (ppc_is_imm16 (-ins->inst_imm))
3663 ppc_addi (code, ins->dreg, ins->sreg1, -ins->inst_imm);
3665 g_assert_not_reached ();
3669 g_assert (ppc_is_imm16 (ins->inst_imm));
3670 ppc_subfic (code, ins->dreg, ins->sreg1, ins->inst_imm);
3673 ppc_subfze (code, ins->dreg, ins->sreg1);
3676 CASE_PPC64 (OP_LAND)
3677 /* FIXME: the ppc macros as inconsistent here: put dest as the first arg! */
3678 ppc_and (code, ins->sreg1, ins->dreg, ins->sreg2);
3682 CASE_PPC64 (OP_LAND_IMM)
3683 if (!(ins->inst_imm & 0xffff0000)) {
3684 ppc_andid (code, ins->sreg1, ins->dreg, ins->inst_imm);
3685 } else if (!(ins->inst_imm & 0xffff)) {
3686 ppc_andisd (code, ins->sreg1, ins->dreg, ((guint32)ins->inst_imm >> 16));
3688 g_assert_not_reached ();
3692 CASE_PPC64 (OP_LDIV) {
3693 guint8 *divisor_is_m1;
3694 /* XER format: SO, OV, CA, reserved [21 bits], count [8 bits]
3696 ppc_compare_reg_imm (code, 0, ins->sreg2, -1);
3697 divisor_is_m1 = code;
3698 ppc_bc (code, PPC_BR_FALSE | PPC_BR_LIKELY, PPC_BR_EQ, 0);
3699 ppc_lis (code, ppc_r0, 0x8000);
3700 #ifdef __mono_ppc64__
3701 if (ins->opcode == OP_LDIV)
3702 ppc_sldi (code, ppc_r0, ppc_r0, 32);
3704 ppc_compare (code, 0, ins->sreg1, ppc_r0);
3705 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_TRUE, PPC_BR_EQ, "ArithmeticException");
3706 ppc_patch (divisor_is_m1, code);
3707 /* XER format: SO, OV, CA, reserved [21 bits], count [8 bits]
3709 if (ins->opcode == OP_IDIV)
3710 ppc_divwod (code, ins->dreg, ins->sreg1, ins->sreg2);
3711 #ifdef __mono_ppc64__
3713 ppc_divdod (code, ins->dreg, ins->sreg1, ins->sreg2);
3715 ppc_mfspr (code, ppc_r0, ppc_xer);
3716 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3717 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "DivideByZeroException");
3721 CASE_PPC64 (OP_LDIV_UN)
3722 if (ins->opcode == OP_IDIV_UN)
3723 ppc_divwuod (code, ins->dreg, ins->sreg1, ins->sreg2);
3724 #ifdef __mono_ppc64__
3726 ppc_divduod (code, ins->dreg, ins->sreg1, ins->sreg2);
3728 ppc_mfspr (code, ppc_r0, ppc_xer);
3729 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3730 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "DivideByZeroException");
3736 g_assert_not_reached ();
3739 ppc_or (code, ins->dreg, ins->sreg1, ins->sreg2);
3743 CASE_PPC64 (OP_LOR_IMM)
3744 if (!(ins->inst_imm & 0xffff0000)) {
3745 ppc_ori (code, ins->sreg1, ins->dreg, ins->inst_imm);
3746 } else if (!(ins->inst_imm & 0xffff)) {
3747 ppc_oris (code, ins->dreg, ins->sreg1, ((guint32)(ins->inst_imm) >> 16));
3749 g_assert_not_reached ();
3753 CASE_PPC64 (OP_LXOR)
3754 ppc_xor (code, ins->dreg, ins->sreg1, ins->sreg2);
3758 CASE_PPC64 (OP_LXOR_IMM)
3759 if (!(ins->inst_imm & 0xffff0000)) {
3760 ppc_xori (code, ins->sreg1, ins->dreg, ins->inst_imm);
3761 } else if (!(ins->inst_imm & 0xffff)) {
3762 ppc_xoris (code, ins->sreg1, ins->dreg, ((guint32)(ins->inst_imm) >> 16));
3764 g_assert_not_reached ();
3768 CASE_PPC64 (OP_LSHL)
3769 ppc_shift_left (code, ins->dreg, ins->sreg1, ins->sreg2);
3773 CASE_PPC64 (OP_LSHL_IMM)
3774 ppc_shift_left_imm (code, ins->dreg, ins->sreg1, MASK_SHIFT_IMM (ins->inst_imm));
3777 ppc_sraw (code, ins->dreg, ins->sreg1, ins->sreg2);
3780 ppc_shift_right_arith_imm (code, ins->dreg, ins->sreg1, MASK_SHIFT_IMM (ins->inst_imm));
3783 if (MASK_SHIFT_IMM (ins->inst_imm))
3784 ppc_shift_right_imm (code, ins->dreg, ins->sreg1, MASK_SHIFT_IMM (ins->inst_imm));
3786 ppc_mr (code, ins->dreg, ins->sreg1);
3789 ppc_srw (code, ins->dreg, ins->sreg1, ins->sreg2);
3792 CASE_PPC64 (OP_LNOT)
3793 ppc_not (code, ins->dreg, ins->sreg1);
3796 CASE_PPC64 (OP_LNEG)
3797 ppc_neg (code, ins->dreg, ins->sreg1);
3800 CASE_PPC64 (OP_LMUL)
3801 ppc_multiply (code, ins->dreg, ins->sreg1, ins->sreg2);
3805 CASE_PPC64 (OP_LMUL_IMM)
3806 if (ppc_is_imm16 (ins->inst_imm)) {
3807 ppc_mulli (code, ins->dreg, ins->sreg1, ins->inst_imm);
3809 g_assert_not_reached ();
3813 CASE_PPC64 (OP_LMUL_OVF)
3814 /* we annot use mcrxr, since it's not implemented on some processors
3815 * XER format: SO, OV, CA, reserved [21 bits], count [8 bits]
3817 if (ins->opcode == OP_IMUL_OVF)
3818 ppc_mullwo (code, ins->dreg, ins->sreg1, ins->sreg2);
3819 #ifdef __mono_ppc64__
3821 ppc_mulldo (code, ins->dreg, ins->sreg1, ins->sreg2);
3823 ppc_mfspr (code, ppc_r0, ppc_xer);
3824 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3825 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3827 case OP_IMUL_OVF_UN:
3828 CASE_PPC64 (OP_LMUL_OVF_UN)
3829 /* we first multiply to get the high word and compare to 0
3830 * to set the flags, then the result is discarded and then
3831 * we multiply to get the lower * bits result
3833 if (ins->opcode == OP_IMUL_OVF_UN)
3834 ppc_mulhwu (code, ppc_r0, ins->sreg1, ins->sreg2);
3835 #ifdef __mono_ppc64__
3837 ppc_mulhdu (code, ppc_r0, ins->sreg1, ins->sreg2);
3839 ppc_cmpi (code, 0, 0, ppc_r0, 0);
3840 EMIT_COND_SYSTEM_EXCEPTION (CEE_BNE_UN - CEE_BEQ, "OverflowException");
3841 ppc_multiply (code, ins->dreg, ins->sreg1, ins->sreg2);
3844 ppc_load (code, ins->dreg, ins->inst_c0);
3847 ppc_load (code, ins->dreg, ins->inst_l);
3850 case OP_LOAD_GOTADDR:
3851 /* The PLT implementation depends on this */
3852 g_assert (ins->dreg == ppc_r30);
3854 code = mono_arch_emit_load_got_addr (cfg->native_code, code, cfg, NULL);
3857 // FIXME: Fix max instruction length
3858 mono_add_patch_info (cfg, offset, (MonoJumpInfoType)ins->inst_right->inst_i1, ins->inst_right->inst_p0);
3859 /* arch_emit_got_access () patches this */
3860 ppc_load32 (code, ppc_r0, 0);
3861 ppc_ldptr_indexed (code, ins->dreg, ins->inst_basereg, ppc_r0);
3864 mono_add_patch_info (cfg, offset, (MonoJumpInfoType)ins->inst_i1, ins->inst_p0);
3865 ppc_load_sequence (code, ins->dreg, 0);
3867 CASE_PPC32 (OP_ICONV_TO_I4)
3868 CASE_PPC32 (OP_ICONV_TO_U4)
3870 ppc_mr (code, ins->dreg, ins->sreg1);
3873 int saved = ins->sreg1;
3874 if (ins->sreg1 == ppc_r3) {
3875 ppc_mr (code, ppc_r0, ins->sreg1);
3878 if (ins->sreg2 != ppc_r3)
3879 ppc_mr (code, ppc_r3, ins->sreg2);
3880 if (saved != ppc_r4)
3881 ppc_mr (code, ppc_r4, saved);
3885 ppc_fmr (code, ins->dreg, ins->sreg1);
3887 case OP_FCONV_TO_R4:
3888 ppc_frsp (code, ins->dreg, ins->sreg1);
3894 * Keep in sync with mono_arch_emit_epilog
3896 g_assert (!cfg->method->save_lmf);
3898 * Note: we can use ppc_r11 here because it is dead anyway:
3899 * we're leaving the method.
3901 if (1 || cfg->flags & MONO_CFG_HAS_CALLS) {
3902 long ret_offset = cfg->stack_usage + PPC_RET_ADDR_OFFSET;
3903 if (ppc_is_imm16 (ret_offset)) {
3904 ppc_ldptr (code, ppc_r0, ret_offset, cfg->frame_reg);
3906 ppc_load (code, ppc_r11, ret_offset);
3907 ppc_ldptr_indexed (code, ppc_r0, cfg->frame_reg, ppc_r11);
3909 ppc_mtlr (code, ppc_r0);
3912 code = emit_load_volatile_arguments (cfg, code);
3914 if (ppc_is_imm16 (cfg->stack_usage)) {
3915 ppc_addi (code, ppc_r11, cfg->frame_reg, cfg->stack_usage);
3917 /* cfg->stack_usage is an int, so we can use
3918 * an addis/addi sequence here even in 64-bit. */
3919 ppc_addis (code, ppc_r11, cfg->frame_reg, ppc_ha(cfg->stack_usage));
3920 ppc_addi (code, ppc_r11, ppc_r11, cfg->stack_usage);
3922 if (!cfg->method->save_lmf) {
3923 /*for (i = 31; i >= 14; --i) {
3924 if (cfg->used_float_regs & (1 << i)) {
3925 pos += sizeof (double);
3926 ppc_lfd (code, i, -pos, cfg->frame_reg);
3930 for (i = 31; i >= 13; --i) {
3931 if (cfg->used_int_regs & (1 << i)) {
3932 pos += sizeof (gpointer);
3933 ppc_ldptr (code, i, -pos, ppc_r11);
3937 /* FIXME restore from MonoLMF: though this can't happen yet */
3939 ppc_mr (code, ppc_sp, ppc_r11);
3940 mono_add_patch_info (cfg, (guint8*) code - cfg->native_code, MONO_PATCH_INFO_METHOD_JUMP, ins->inst_p0);
3941 if (cfg->compile_aot) {
3942 /* arch_emit_got_access () patches this */
3943 ppc_load32 (code, ppc_r0, 0);
3944 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
3945 ppc_ldptr_indexed (code, ppc_r11, ppc_r30, ppc_r0);
3946 ppc_ldptr (code, ppc_r0, 0, ppc_r11);
3948 ppc_ldptr_indexed (code, ppc_r0, ppc_r30, ppc_r0);
3950 ppc_mtctr (code, ppc_r0);
3951 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
3958 /* ensure ins->sreg1 is not NULL */
3959 ppc_ldptr (code, ppc_r0, 0, ins->sreg1);
3962 long cookie_offset = cfg->sig_cookie + cfg->stack_usage;
3963 if (ppc_is_imm16 (cookie_offset)) {
3964 ppc_addi (code, ppc_r0, cfg->frame_reg, cookie_offset);
3966 ppc_load (code, ppc_r0, cookie_offset);
3967 ppc_add (code, ppc_r0, cfg->frame_reg, ppc_r0);
3969 ppc_stptr (code, ppc_r0, 0, ins->sreg1);
3978 call = (MonoCallInst*)ins;
3979 if (ins->flags & MONO_INST_HAS_METHOD)
3980 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_METHOD, call->method);
3982 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_ABS, call->fptr);
3983 if ((FORCE_INDIR_CALL || cfg->method->dynamic) && !cfg->compile_aot) {
3984 ppc_load_func (code, ppc_r0, 0);
3985 ppc_mtlr (code, ppc_r0);
3990 /* FIXME: this should be handled somewhere else in the new jit */
3991 code = emit_move_return_value (cfg, ins, code);
3997 case OP_VOIDCALL_REG:
3999 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
4000 ppc_ldptr (code, ppc_r0, 0, ins->sreg1);
4001 /* FIXME: if we know that this is a method, we
4002 can omit this load */
4003 ppc_ldptr (code, ppc_r2, 8, ins->sreg1);
4004 ppc_mtlr (code, ppc_r0);
4006 ppc_mtlr (code, ins->sreg1);
4009 /* FIXME: this should be handled somewhere else in the new jit */
4010 code = emit_move_return_value (cfg, ins, code);
4012 case OP_FCALL_MEMBASE:
4013 case OP_LCALL_MEMBASE:
4014 case OP_VCALL_MEMBASE:
4015 case OP_VCALL2_MEMBASE:
4016 case OP_VOIDCALL_MEMBASE:
4017 case OP_CALL_MEMBASE:
4018 ppc_ldptr (code, ppc_r0, ins->inst_offset, ins->sreg1);
4019 ppc_mtlr (code, ppc_r0);
4021 /* FIXME: this should be handled somewhere else in the new jit */
4022 code = emit_move_return_value (cfg, ins, code);
4025 guint8 * zero_loop_jump, * zero_loop_start;
4026 /* keep alignment */
4027 int alloca_waste = PPC_STACK_PARAM_OFFSET + cfg->param_area + 31;
4028 int area_offset = alloca_waste;
4030 ppc_addi (code, ppc_r11, ins->sreg1, alloca_waste + 31);
4031 /* FIXME: should be calculated from MONO_ARCH_FRAME_ALIGNMENT */
4032 ppc_clear_right_imm (code, ppc_r11, ppc_r11, 4);
4033 /* use ctr to store the number of words to 0 if needed */
4034 if (ins->flags & MONO_INST_INIT) {
4035 /* we zero 4 bytes at a time:
4036 * we add 7 instead of 3 so that we set the counter to
4037 * at least 1, otherwise the bdnz instruction will make
4038 * it negative and iterate billions of times.
4040 ppc_addi (code, ppc_r0, ins->sreg1, 7);
4041 ppc_shift_right_arith_imm (code, ppc_r0, ppc_r0, 2);
4042 ppc_mtctr (code, ppc_r0);
4044 ppc_ldptr (code, ppc_r0, 0, ppc_sp);
4045 ppc_neg (code, ppc_r11, ppc_r11);
4046 ppc_stptr_update_indexed (code, ppc_r0, ppc_sp, ppc_r11);
4048 /* FIXME: make this loop work in 8 byte
4049 increments on PPC64 */
4050 if (ins->flags & MONO_INST_INIT) {
4051 /* adjust the dest reg by -4 so we can use stwu */
4052 /* we actually adjust -8 because we let the loop
4055 ppc_addi (code, ins->dreg, ppc_sp, (area_offset - 8));
4056 ppc_li (code, ppc_r11, 0);
4057 zero_loop_start = code;
4058 ppc_stwu (code, ppc_r11, 4, ins->dreg);
4059 zero_loop_jump = code;
4060 ppc_bc (code, PPC_BR_DEC_CTR_NONZERO, 0, 0);
4061 ppc_patch (zero_loop_jump, zero_loop_start);
4063 ppc_addi (code, ins->dreg, ppc_sp, area_offset);
4068 ppc_mr (code, ppc_r3, ins->sreg1);
4069 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD,
4070 (gpointer)"mono_arch_throw_exception");
4071 if ((FORCE_INDIR_CALL || cfg->method->dynamic) && !cfg->compile_aot) {
4072 ppc_load_func (code, ppc_r0, 0);
4073 ppc_mtlr (code, ppc_r0);
4082 ppc_mr (code, ppc_r3, ins->sreg1);
4083 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD,
4084 (gpointer)"mono_arch_rethrow_exception");
4085 if ((FORCE_INDIR_CALL || cfg->method->dynamic) && !cfg->compile_aot) {
4086 ppc_load_func (code, ppc_r0, 0);
4087 ppc_mtlr (code, ppc_r0);
4094 case OP_START_HANDLER: {
4095 MonoInst *spvar = mono_find_spvar_for_region (cfg, bb->region);
4096 g_assert (spvar->inst_basereg != ppc_sp);
4097 code = emit_reserve_param_area (cfg, code);
4098 ppc_mflr (code, ppc_r0);
4099 if (ppc_is_imm16 (spvar->inst_offset)) {
4100 ppc_stptr (code, ppc_r0, spvar->inst_offset, spvar->inst_basereg);
4102 ppc_load (code, ppc_r11, spvar->inst_offset);
4103 ppc_stptr_indexed (code, ppc_r0, ppc_r11, spvar->inst_basereg);
4107 case OP_ENDFILTER: {
4108 MonoInst *spvar = mono_find_spvar_for_region (cfg, bb->region);
4109 g_assert (spvar->inst_basereg != ppc_sp);
4110 code = emit_unreserve_param_area (cfg, code);
4111 if (ins->sreg1 != ppc_r3)
4112 ppc_mr (code, ppc_r3, ins->sreg1);
4113 if (ppc_is_imm16 (spvar->inst_offset)) {
4114 ppc_ldptr (code, ppc_r0, spvar->inst_offset, spvar->inst_basereg);
4116 ppc_load (code, ppc_r11, spvar->inst_offset);
4117 ppc_ldptr_indexed (code, ppc_r0, spvar->inst_basereg, ppc_r11);
4119 ppc_mtlr (code, ppc_r0);
4123 case OP_ENDFINALLY: {
4124 MonoInst *spvar = mono_find_spvar_for_region (cfg, bb->region);
4125 g_assert (spvar->inst_basereg != ppc_sp);
4126 code = emit_unreserve_param_area (cfg, code);
4127 ppc_ldptr (code, ppc_r0, spvar->inst_offset, spvar->inst_basereg);
4128 ppc_mtlr (code, ppc_r0);
4132 case OP_CALL_HANDLER:
4133 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_BB, ins->inst_target_bb);
4137 ins->inst_c0 = code - cfg->native_code;
4140 /*if (ins->inst_target_bb->native_offset) {
4142 //x86_jump_code (code, cfg->native_code + ins->inst_target_bb->native_offset);
4144 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_target_bb);
4149 ppc_mtctr (code, ins->sreg1);
4150 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
4154 CASE_PPC64 (OP_LCEQ)
4155 ppc_li (code, ins->dreg, 0);
4156 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 2);
4157 ppc_li (code, ins->dreg, 1);
4163 CASE_PPC64 (OP_LCLT)
4164 CASE_PPC64 (OP_LCLT_UN)
4165 ppc_li (code, ins->dreg, 1);
4166 ppc_bc (code, PPC_BR_TRUE, PPC_BR_LT, 2);
4167 ppc_li (code, ins->dreg, 0);
4173 CASE_PPC64 (OP_LCGT)
4174 CASE_PPC64 (OP_LCGT_UN)
4175 ppc_li (code, ins->dreg, 1);
4176 ppc_bc (code, PPC_BR_TRUE, PPC_BR_GT, 2);
4177 ppc_li (code, ins->dreg, 0);
4179 case OP_COND_EXC_EQ:
4180 case OP_COND_EXC_NE_UN:
4181 case OP_COND_EXC_LT:
4182 case OP_COND_EXC_LT_UN:
4183 case OP_COND_EXC_GT:
4184 case OP_COND_EXC_GT_UN:
4185 case OP_COND_EXC_GE:
4186 case OP_COND_EXC_GE_UN:
4187 case OP_COND_EXC_LE:
4188 case OP_COND_EXC_LE_UN:
4189 EMIT_COND_SYSTEM_EXCEPTION (ins->opcode - OP_COND_EXC_EQ, ins->inst_p1);
4191 case OP_COND_EXC_IEQ:
4192 case OP_COND_EXC_INE_UN:
4193 case OP_COND_EXC_ILT:
4194 case OP_COND_EXC_ILT_UN:
4195 case OP_COND_EXC_IGT:
4196 case OP_COND_EXC_IGT_UN:
4197 case OP_COND_EXC_IGE:
4198 case OP_COND_EXC_IGE_UN:
4199 case OP_COND_EXC_ILE:
4200 case OP_COND_EXC_ILE_UN:
4201 EMIT_COND_SYSTEM_EXCEPTION (ins->opcode - OP_COND_EXC_IEQ, ins->inst_p1);
4213 EMIT_COND_BRANCH (ins, ins->opcode - OP_IBEQ);
4216 /* floating point opcodes */
4218 g_assert (cfg->compile_aot);
4220 /* FIXME: Optimize this */
4222 ppc_mflr (code, ppc_r11);
4224 *(double*)code = *(double*)ins->inst_p0;
4226 ppc_lfd (code, ins->dreg, 8, ppc_r11);
4229 g_assert_not_reached ();
4231 case OP_STORER8_MEMBASE_REG:
4232 if (ppc_is_imm16 (ins->inst_offset)) {
4233 ppc_stfd (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
4235 if (ppc_is_imm32 (ins->inst_offset)) {
4236 ppc_addis (code, ppc_r12, ins->inst_destbasereg, ppc_ha(ins->inst_offset));
4237 ppc_stfd (code, ins->sreg1, ins->inst_offset, ppc_r12);
4239 ppc_load (code, ppc_r0, ins->inst_offset);
4240 ppc_stfdx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
4244 case OP_LOADR8_MEMBASE:
4245 if (ppc_is_imm16 (ins->inst_offset)) {
4246 ppc_lfd (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
4248 if (ppc_is_imm32 (ins->inst_offset)) {
4249 ppc_addis (code, ppc_r12, ins->inst_destbasereg, ppc_ha(ins->inst_offset));
4250 ppc_lfd (code, ins->dreg, ins->inst_offset, ppc_r12);
4252 ppc_load (code, ppc_r0, ins->inst_offset);
4253 ppc_lfdx (code, ins->dreg, ins->inst_destbasereg, ppc_r0);
4257 case OP_STORER4_MEMBASE_REG:
4258 ppc_frsp (code, ins->sreg1, ins->sreg1);
4259 if (ppc_is_imm16 (ins->inst_offset)) {
4260 ppc_stfs (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
4262 if (ppc_is_imm32 (ins->inst_offset)) {
4263 ppc_addis (code, ppc_r12, ins->inst_destbasereg, ppc_ha(ins->inst_offset));
4264 ppc_stfs (code, ins->sreg1, ins->inst_offset, ppc_r12);
4266 ppc_load (code, ppc_r0, ins->inst_offset);
4267 ppc_stfsx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
4271 case OP_LOADR4_MEMBASE:
4272 if (ppc_is_imm16 (ins->inst_offset)) {
4273 ppc_lfs (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
4275 if (ppc_is_imm32 (ins->inst_offset)) {
4276 ppc_addis (code, ppc_r12, ins->inst_destbasereg, ppc_ha(ins->inst_offset));
4277 ppc_lfs (code, ins->dreg, ins->inst_offset, ppc_r12);
4279 ppc_load (code, ppc_r0, ins->inst_offset);
4280 ppc_lfsx (code, ins->dreg, ins->inst_destbasereg, ppc_r0);
4284 case OP_LOADR4_MEMINDEX:
4285 ppc_lfsx (code, ins->dreg, ins->inst_basereg, ins->sreg2);
4287 case OP_LOADR8_MEMINDEX:
4288 ppc_lfdx (code, ins->dreg, ins->inst_basereg, ins->sreg2);
4290 case OP_STORER4_MEMINDEX:
4291 ppc_frsp (code, ins->sreg1, ins->sreg1);
4292 ppc_stfsx (code, ins->sreg1, ins->inst_destbasereg, ins->sreg2);
4294 case OP_STORER8_MEMINDEX:
4295 ppc_stfdx (code, ins->sreg1, ins->inst_destbasereg, ins->sreg2);
4298 case CEE_CONV_R4: /* FIXME: change precision */
4300 g_assert_not_reached ();
4301 case OP_FCONV_TO_I1:
4302 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 1, TRUE);
4304 case OP_FCONV_TO_U1:
4305 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 1, FALSE);
4307 case OP_FCONV_TO_I2:
4308 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 2, TRUE);
4310 case OP_FCONV_TO_U2:
4311 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 2, FALSE);
4313 case OP_FCONV_TO_I4:
4315 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 4, TRUE);
4317 case OP_FCONV_TO_U4:
4319 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 4, FALSE);
4321 case OP_LCONV_TO_R_UN:
4322 g_assert_not_reached ();
4323 /* Implemented as helper calls */
4325 case OP_LCONV_TO_OVF_I4_2:
4326 case OP_LCONV_TO_OVF_I: {
4327 #ifdef __mono_ppc64__
4330 guint8 *negative_branch, *msword_positive_branch, *msword_negative_branch, *ovf_ex_target;
4331 // Check if its negative
4332 ppc_cmpi (code, 0, 0, ins->sreg1, 0);
4333 negative_branch = code;
4334 ppc_bc (code, PPC_BR_TRUE, PPC_BR_LT, 0);
4335 // Its positive msword == 0
4336 ppc_cmpi (code, 0, 0, ins->sreg2, 0);
4337 msword_positive_branch = code;
4338 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
4340 ovf_ex_target = code;
4341 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_ALWAYS, 0, "OverflowException");
4343 ppc_patch (negative_branch, code);
4344 ppc_cmpi (code, 0, 0, ins->sreg2, -1);
4345 msword_negative_branch = code;
4346 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
4347 ppc_patch (msword_negative_branch, ovf_ex_target);
4349 ppc_patch (msword_positive_branch, code);
4350 if (ins->dreg != ins->sreg1)
4351 ppc_mr (code, ins->dreg, ins->sreg1);
4356 ppc_fsqrtd (code, ins->dreg, ins->sreg1);
4359 ppc_fadd (code, ins->dreg, ins->sreg1, ins->sreg2);
4362 ppc_fsub (code, ins->dreg, ins->sreg1, ins->sreg2);
4365 ppc_fmul (code, ins->dreg, ins->sreg1, ins->sreg2);
4368 ppc_fdiv (code, ins->dreg, ins->sreg1, ins->sreg2);
4371 ppc_fneg (code, ins->dreg, ins->sreg1);
4375 g_assert_not_reached ();
4378 ppc_fcmpu (code, 0, ins->sreg1, ins->sreg2);
4381 ppc_fcmpo (code, 0, ins->sreg1, ins->sreg2);
4382 ppc_li (code, ins->dreg, 0);
4383 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 2);
4384 ppc_li (code, ins->dreg, 1);
4387 ppc_fcmpo (code, 0, ins->sreg1, ins->sreg2);
4388 ppc_li (code, ins->dreg, 1);
4389 ppc_bc (code, PPC_BR_TRUE, PPC_BR_LT, 2);
4390 ppc_li (code, ins->dreg, 0);
4393 ppc_fcmpu (code, 0, ins->sreg1, ins->sreg2);
4394 ppc_li (code, ins->dreg, 1);
4395 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 3);
4396 ppc_bc (code, PPC_BR_TRUE, PPC_BR_LT, 2);
4397 ppc_li (code, ins->dreg, 0);
4400 ppc_fcmpo (code, 0, ins->sreg1, ins->sreg2);
4401 ppc_li (code, ins->dreg, 1);
4402 ppc_bc (code, PPC_BR_TRUE, PPC_BR_GT, 2);
4403 ppc_li (code, ins->dreg, 0);
4406 ppc_fcmpu (code, 0, ins->sreg1, ins->sreg2);
4407 ppc_li (code, ins->dreg, 1);
4408 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 3);
4409 ppc_bc (code, PPC_BR_TRUE, PPC_BR_GT, 2);
4410 ppc_li (code, ins->dreg, 0);
4413 EMIT_COND_BRANCH (ins, CEE_BEQ - CEE_BEQ);
4416 EMIT_COND_BRANCH (ins, CEE_BNE_UN - CEE_BEQ);
4419 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 2);
4420 EMIT_COND_BRANCH (ins, CEE_BLT - CEE_BEQ);
4423 EMIT_COND_BRANCH_FLAGS (ins, PPC_BR_TRUE, PPC_BR_SO);
4424 EMIT_COND_BRANCH (ins, CEE_BLT_UN - CEE_BEQ);
4427 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 2);
4428 EMIT_COND_BRANCH (ins, CEE_BGT - CEE_BEQ);
4431 EMIT_COND_BRANCH_FLAGS (ins, PPC_BR_TRUE, PPC_BR_SO);
4432 EMIT_COND_BRANCH (ins, CEE_BGT_UN - CEE_BEQ);
4435 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 2);
4436 EMIT_COND_BRANCH (ins, CEE_BGE - CEE_BEQ);
4439 EMIT_COND_BRANCH (ins, CEE_BGE_UN - CEE_BEQ);
4442 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 2);
4443 EMIT_COND_BRANCH (ins, CEE_BLE - CEE_BEQ);
4446 EMIT_COND_BRANCH (ins, CEE_BLE_UN - CEE_BEQ);
4449 g_assert_not_reached ();
4450 case OP_CHECK_FINITE: {
4451 ppc_rlwinm (code, ins->sreg1, ins->sreg1, 0, 1, 31);
4452 ppc_addis (code, ins->sreg1, ins->sreg1, -32752);
4453 ppc_rlwinmd (code, ins->sreg1, ins->sreg1, 1, 31, 31);
4454 EMIT_COND_SYSTEM_EXCEPTION (CEE_BEQ - CEE_BEQ, "ArithmeticException");
4457 mono_add_patch_info (cfg, offset, (MonoJumpInfoType)ins->inst_i1, ins->inst_p0);
4458 #ifdef __mono_ppc64__
4459 ppc_load_sequence (code, ins->dreg, (guint64)0x0f0f0f0f0f0f0f0fLL);
4461 ppc_load_sequence (code, ins->dreg, (gulong)0x0f0f0f0fL);
4466 #ifdef __mono_ppc64__
4467 case OP_ICONV_TO_I4:
4469 ppc_extsw (code, ins->dreg, ins->sreg1);
4471 case OP_ICONV_TO_U4:
4473 ppc_clrldi (code, ins->dreg, ins->sreg1, 32);
4475 case OP_ICONV_TO_R4:
4476 case OP_ICONV_TO_R8:
4477 case OP_LCONV_TO_R4:
4478 case OP_LCONV_TO_R8: {
4480 if (ins->opcode == OP_ICONV_TO_R4 || ins->opcode == OP_ICONV_TO_R8) {
4481 ppc_extsw (code, ppc_r0, ins->sreg1);
4486 if (cpu_hw_caps & PPC_MOVE_FPR_GPR) {
4487 ppc_mffgpr (code, ins->dreg, tmp);
4489 ppc_str (code, tmp, -8, ppc_r1);
4490 ppc_lfd (code, ins->dreg, -8, ppc_r1);
4492 ppc_fcfid (code, ins->dreg, ins->dreg);
4493 if (ins->opcode == OP_ICONV_TO_R4 || ins->opcode == OP_LCONV_TO_R4)
4494 ppc_frsp (code, ins->dreg, ins->dreg);
4498 ppc_srad (code, ins->dreg, ins->sreg1, ins->sreg2);
4501 ppc_srd (code, ins->dreg, ins->sreg1, ins->sreg2);
4504 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
4506 ppc_mfspr (code, ppc_r0, ppc_xer);
4507 ppc_andisd (code, ppc_r0, ppc_r0, (1 << 13)); /* CA */
4508 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, ins->inst_p1);
4510 case OP_COND_EXC_OV:
4511 ppc_mfspr (code, ppc_r0, ppc_xer);
4512 ppc_andisd (code, ppc_r0, ppc_r0, (1 << 14)); /* OV */
4513 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, ins->inst_p1);
4525 EMIT_COND_BRANCH (ins, ins->opcode - OP_LBEQ);
4527 case OP_FCONV_TO_I8:
4528 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 8, TRUE);
4530 case OP_FCONV_TO_U8:
4531 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 8, FALSE);
4533 case OP_STOREI4_MEMBASE_REG:
4534 if (ppc_is_imm16 (ins->inst_offset)) {
4535 ppc_stw (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
4537 ppc_load (code, ppc_r0, ins->inst_offset);
4538 ppc_stwx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
4541 case OP_STOREI4_MEMINDEX:
4542 ppc_stwx (code, ins->sreg1, ins->sreg2, ins->inst_destbasereg);
4545 ppc_srawi (code, ins->dreg, ins->sreg1, (ins->inst_imm & 0x1f));
4547 case OP_ISHR_UN_IMM:
4548 if (ins->inst_imm & 0x1f)
4549 ppc_srwi (code, ins->dreg, ins->sreg1, (ins->inst_imm & 0x1f));
4551 ppc_mr (code, ins->dreg, ins->sreg1);
4553 case OP_ATOMIC_ADD_NEW_I4:
4554 case OP_ATOMIC_ADD_NEW_I8: {
4555 guint8 *loop = code, *branch;
4556 g_assert (ins->inst_offset == 0);
4557 if (ins->opcode == OP_ATOMIC_ADD_NEW_I4)
4558 ppc_lwarx (code, ppc_r0, 0, ins->inst_basereg);
4560 ppc_ldarx (code, ppc_r0, 0, ins->inst_basereg);
4561 ppc_add (code, ppc_r0, ppc_r0, ins->sreg2);
4562 if (ins->opcode == OP_ATOMIC_ADD_NEW_I4)
4563 ppc_stwcxd (code, ppc_r0, 0, ins->inst_basereg);
4565 ppc_stdcxd (code, ppc_r0, 0, ins->inst_basereg);
4567 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
4568 ppc_patch (branch, loop);
4569 ppc_mr (code, ins->dreg, ppc_r0);
4573 case OP_ICONV_TO_R4:
4574 case OP_ICONV_TO_R8: {
4575 if (cpu_hw_caps & PPC_ISA_64) {
4576 ppc_srawi(code, ppc_r0, ins->sreg1, 31);
4577 ppc_stw (code, ppc_r0, -8, ppc_r1);
4578 ppc_stw (code, ins->sreg1, -4, ppc_r1);
4579 ppc_lfd (code, ins->dreg, -8, ppc_r1);
4580 ppc_fcfid (code, ins->dreg, ins->dreg);
4581 if (ins->opcode == OP_ICONV_TO_R4)
4582 ppc_frsp (code, ins->dreg, ins->dreg);
4587 case OP_ATOMIC_CAS_I4:
4588 CASE_PPC64 (OP_ATOMIC_CAS_I8) {
4589 int location = ins->sreg1;
4590 int value = ins->sreg2;
4591 int comparand = ins->sreg3;
4592 guint8 *start, *not_equal, *lost_reservation;
4595 if (ins->opcode == OP_ATOMIC_CAS_I4)
4596 ppc_lwarx (code, ppc_r0, 0, location);
4597 #ifdef __mono_ppc64__
4599 ppc_ldarx (code, ppc_r0, 0, location);
4601 ppc_cmp (code, 0, ins->opcode == OP_ATOMIC_CAS_I4 ? 0 : 1, ppc_r0, comparand);
4604 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
4605 if (ins->opcode == OP_ATOMIC_CAS_I4)
4606 ppc_stwcxd (code, value, 0, location);
4607 #ifdef __mono_ppc64__
4609 ppc_stdcxd (code, value, 0, location);
4612 lost_reservation = code;
4613 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
4614 ppc_patch (lost_reservation, start);
4616 ppc_patch (not_equal, code);
4617 ppc_mr (code, ins->dreg, ppc_r0);
4622 g_warning ("unknown opcode %s in %s()\n", mono_inst_name (ins->opcode), __FUNCTION__);
4623 g_assert_not_reached ();
4626 if ((cfg->opt & MONO_OPT_BRANCH) && ((code - cfg->native_code - offset) > max_len)) {
4627 g_warning ("wrong maximal instruction length of instruction %s (expected %d, got %ld)",
4628 mono_inst_name (ins->opcode), max_len, (glong)(code - cfg->native_code - offset));
4629 g_assert_not_reached ();
4635 last_offset = offset;
4638 cfg->code_len = code - cfg->native_code;
4642 mono_arch_register_lowlevel_calls (void)
4644 /* The signature doesn't matter */
4645 mono_register_jit_icall (mono_ppc_throw_exception, "mono_ppc_throw_exception", mono_create_icall_signature ("void"), TRUE);
4648 #ifdef __mono_ppc64__
4649 #define patch_load_sequence(ip,val) do {\
4650 guint16 *__load = (guint16*)(ip); \
4651 g_assert (sizeof (val) == sizeof (gsize)); \
4652 __load [1] = (((guint64)(gsize)(val)) >> 48) & 0xffff; \
4653 __load [3] = (((guint64)(gsize)(val)) >> 32) & 0xffff; \
4654 __load [7] = (((guint64)(gsize)(val)) >> 16) & 0xffff; \
4655 __load [9] = ((guint64)(gsize)(val)) & 0xffff; \
4658 #define patch_load_sequence(ip,val) do {\
4659 guint16 *__lis_ori = (guint16*)(ip); \
4660 __lis_ori [1] = (((gulong)(val)) >> 16) & 0xffff; \
4661 __lis_ori [3] = ((gulong)(val)) & 0xffff; \
4666 mono_arch_patch_code (MonoMethod *method, MonoDomain *domain, guint8 *code, MonoJumpInfo *ji, gboolean run_cctors)
4668 MonoJumpInfo *patch_info;
4669 gboolean compile_aot = !run_cctors;
4671 for (patch_info = ji; patch_info; patch_info = patch_info->next) {
4672 unsigned char *ip = patch_info->ip.i + code;
4673 unsigned char *target;
4674 gboolean is_fd = FALSE;
4676 target = mono_resolve_patch_target (method, domain, code, patch_info, run_cctors);
4679 switch (patch_info->type) {
4680 case MONO_PATCH_INFO_BB:
4681 case MONO_PATCH_INFO_LABEL:
4684 /* No need to patch these */
4689 switch (patch_info->type) {
4690 case MONO_PATCH_INFO_IP:
4691 patch_load_sequence (ip, ip);
4693 case MONO_PATCH_INFO_METHOD_REL:
4694 g_assert_not_reached ();
4695 *((gpointer *)(ip)) = code + patch_info->data.offset;
4697 case MONO_PATCH_INFO_SWITCH: {
4698 gpointer *table = (gpointer *)patch_info->data.table->table;
4701 patch_load_sequence (ip, table);
4703 for (i = 0; i < patch_info->data.table->table_size; i++) {
4704 table [i] = (glong)patch_info->data.table->table [i] + code;
4706 /* we put into the table the absolute address, no need for ppc_patch in this case */
4709 case MONO_PATCH_INFO_METHODCONST:
4710 case MONO_PATCH_INFO_CLASS:
4711 case MONO_PATCH_INFO_IMAGE:
4712 case MONO_PATCH_INFO_FIELD:
4713 case MONO_PATCH_INFO_VTABLE:
4714 case MONO_PATCH_INFO_IID:
4715 case MONO_PATCH_INFO_SFLDA:
4716 case MONO_PATCH_INFO_LDSTR:
4717 case MONO_PATCH_INFO_TYPE_FROM_HANDLE:
4718 case MONO_PATCH_INFO_LDTOKEN:
4719 /* from OP_AOTCONST : lis + ori */
4720 patch_load_sequence (ip, target);
4722 case MONO_PATCH_INFO_R4:
4723 case MONO_PATCH_INFO_R8:
4724 g_assert_not_reached ();
4725 *((gconstpointer *)(ip + 2)) = patch_info->data.target;
4727 case MONO_PATCH_INFO_EXC_NAME:
4728 g_assert_not_reached ();
4729 *((gconstpointer *)(ip + 1)) = patch_info->data.name;
4731 case MONO_PATCH_INFO_NONE:
4732 case MONO_PATCH_INFO_BB_OVF:
4733 case MONO_PATCH_INFO_EXC_OVF:
4734 /* everything is dealt with at epilog output time */
4736 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
4737 case MONO_PATCH_INFO_INTERNAL_METHOD:
4738 case MONO_PATCH_INFO_ABS:
4739 case MONO_PATCH_INFO_CLASS_INIT:
4740 case MONO_PATCH_INFO_RGCTX_FETCH:
4747 ppc_patch_full (ip, target, is_fd);
4752 * Emit code to save the registers in used_int_regs or the registers in the MonoLMF
4753 * structure at positive offset pos from register base_reg. pos is guaranteed to fit into
4754 * the instruction offset immediate for all the registers.
4757 save_registers (MonoCompile *cfg, guint8* code, int pos, int base_reg, gboolean save_lmf, guint32 used_int_regs, int cfa_offset)
4761 for (i = 13; i <= 31; i++) {
4762 if (used_int_regs & (1 << i)) {
4763 ppc_str (code, i, pos, base_reg);
4764 mono_emit_unwind_op_offset (cfg, code, i, pos - cfa_offset);
4765 pos += sizeof (mgreg_t);
4769 /* pos is the start of the MonoLMF structure */
4770 int offset = pos + G_STRUCT_OFFSET (MonoLMF, iregs);
4771 for (i = 13; i <= 31; i++) {
4772 ppc_str (code, i, offset, base_reg);
4773 mono_emit_unwind_op_offset (cfg, code, i, offset - cfa_offset);
4774 offset += sizeof (mgreg_t);
4776 offset = pos + G_STRUCT_OFFSET (MonoLMF, fregs);
4777 for (i = 14; i < 32; i++) {
4778 ppc_stfd (code, i, offset, base_reg);
4779 offset += sizeof (gdouble);
4786 * Stack frame layout:
4788 * ------------------- sp
4789 * MonoLMF structure or saved registers
4790 * -------------------
4792 * -------------------
4794 * -------------------
4795 * optional 8 bytes for tracing
4796 * -------------------
4797 * param area size is cfg->param_area
4798 * -------------------
4799 * linkage area size is PPC_STACK_PARAM_OFFSET
4800 * ------------------- sp
4804 mono_arch_emit_prolog (MonoCompile *cfg)
4806 MonoMethod *method = cfg->method;
4808 MonoMethodSignature *sig;
4810 long alloc_size, pos, max_offset, cfa_offset;
4816 int tailcall_struct_index;
4818 if (mono_jit_trace_calls != NULL && mono_trace_eval (method))
4821 sig = mono_method_signature (method);
4822 cfg->code_size = MONO_PPC_32_64_CASE (260, 384) + sig->param_count * 20;
4823 code = cfg->native_code = g_malloc (cfg->code_size);
4827 /* We currently emit unwind info for aot, but don't use it */
4828 mono_emit_unwind_op_def_cfa (cfg, code, ppc_r1, 0);
4830 if (1 || cfg->flags & MONO_CFG_HAS_CALLS) {
4831 ppc_mflr (code, ppc_r0);
4832 ppc_str (code, ppc_r0, PPC_RET_ADDR_OFFSET, ppc_sp);
4833 mono_emit_unwind_op_offset (cfg, code, ppc_lr, PPC_RET_ADDR_OFFSET);
4836 alloc_size = cfg->stack_offset;
4839 if (!method->save_lmf) {
4840 for (i = 31; i >= 13; --i) {
4841 if (cfg->used_int_regs & (1 << i)) {
4842 pos += sizeof (mgreg_t);
4846 pos += sizeof (MonoLMF);
4850 // align to MONO_ARCH_FRAME_ALIGNMENT bytes
4851 if (alloc_size & (MONO_ARCH_FRAME_ALIGNMENT - 1)) {
4852 alloc_size += MONO_ARCH_FRAME_ALIGNMENT - 1;
4853 alloc_size &= ~(MONO_ARCH_FRAME_ALIGNMENT - 1);
4856 cfg->stack_usage = alloc_size;
4857 g_assert ((alloc_size & (MONO_ARCH_FRAME_ALIGNMENT-1)) == 0);
4859 if (ppc_is_imm16 (-alloc_size)) {
4860 ppc_str_update (code, ppc_sp, -alloc_size, ppc_sp);
4861 cfa_offset = alloc_size;
4862 mono_emit_unwind_op_def_cfa_offset (cfg, code, alloc_size);
4863 code = save_registers (cfg, code, alloc_size - pos, ppc_sp, method->save_lmf, cfg->used_int_regs, cfa_offset);
4866 ppc_addi (code, ppc_r11, ppc_sp, -pos);
4867 ppc_load (code, ppc_r0, -alloc_size);
4868 ppc_str_update_indexed (code, ppc_sp, ppc_sp, ppc_r0);
4869 cfa_offset = alloc_size;
4870 mono_emit_unwind_op_def_cfa_offset (cfg, code, alloc_size);
4871 code = save_registers (cfg, code, 0, ppc_r11, method->save_lmf, cfg->used_int_regs, cfa_offset);
4874 if (cfg->frame_reg != ppc_sp) {
4875 ppc_mr (code, cfg->frame_reg, ppc_sp);
4876 mono_emit_unwind_op_def_cfa_reg (cfg, code, cfg->frame_reg);
4879 /* store runtime generic context */
4880 if (cfg->rgctx_var) {
4881 g_assert (cfg->rgctx_var->opcode == OP_REGOFFSET &&
4882 (cfg->rgctx_var->inst_basereg == ppc_r1 || cfg->rgctx_var->inst_basereg == ppc_r31));
4884 ppc_stptr (code, MONO_ARCH_RGCTX_REG, cfg->rgctx_var->inst_offset, cfg->rgctx_var->inst_basereg);
4887 /* compute max_offset in order to use short forward jumps
4888 * we always do it on ppc because the immediate displacement
4889 * for jumps is too small
4892 for (bb = cfg->bb_entry; bb; bb = bb->next_bb) {
4894 bb->max_offset = max_offset;
4896 if (cfg->prof_options & MONO_PROFILE_COVERAGE)
4899 MONO_BB_FOR_EACH_INS (bb, ins)
4900 max_offset += ins_native_length (cfg, ins);
4903 /* load arguments allocated to register from the stack */
4906 cinfo = calculate_sizes (sig, sig->pinvoke);
4908 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
4909 ArgInfo *ainfo = &cinfo->ret;
4911 inst = cfg->vret_addr;
4914 if (ppc_is_imm16 (inst->inst_offset)) {
4915 ppc_stptr (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4917 ppc_load (code, ppc_r11, inst->inst_offset);
4918 ppc_stptr_indexed (code, ainfo->reg, ppc_r11, inst->inst_basereg);
4922 tailcall_struct_index = 0;
4923 for (i = 0; i < sig->param_count + sig->hasthis; ++i) {
4924 ArgInfo *ainfo = cinfo->args + i;
4925 inst = cfg->args [pos];
4927 if (cfg->verbose_level > 2)
4928 g_print ("Saving argument %d (type: %d)\n", i, ainfo->regtype);
4929 if (inst->opcode == OP_REGVAR) {
4930 if (ainfo->regtype == RegTypeGeneral)
4931 ppc_mr (code, inst->dreg, ainfo->reg);
4932 else if (ainfo->regtype == RegTypeFP)
4933 ppc_fmr (code, inst->dreg, ainfo->reg);
4934 else if (ainfo->regtype == RegTypeBase) {
4935 ppc_ldr (code, ppc_r11, 0, ppc_sp);
4936 ppc_ldptr (code, inst->dreg, ainfo->offset, ppc_r11);
4938 g_assert_not_reached ();
4940 if (cfg->verbose_level > 2)
4941 g_print ("Argument %ld assigned to register %s\n", pos, mono_arch_regname (inst->dreg));
4943 /* the argument should be put on the stack: FIXME handle size != word */
4944 if (ainfo->regtype == RegTypeGeneral) {
4945 switch (ainfo->size) {
4947 if (ppc_is_imm16 (inst->inst_offset)) {
4948 ppc_stb (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4950 if (ppc_is_imm32 (inst->inst_offset)) {
4951 ppc_addis (code, ppc_r11, inst->inst_basereg, ppc_ha(inst->inst_offset));
4952 ppc_stb (code, ainfo->reg, ppc_r11, inst->inst_offset);
4954 ppc_load (code, ppc_r11, inst->inst_offset);
4955 ppc_stbx (code, ainfo->reg, inst->inst_basereg, ppc_r11);
4960 if (ppc_is_imm16 (inst->inst_offset)) {
4961 ppc_sth (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4963 if (ppc_is_imm32 (inst->inst_offset)) {
4964 ppc_addis (code, ppc_r11, inst->inst_basereg, ppc_ha(inst->inst_offset));
4965 ppc_sth (code, ainfo->reg, ppc_r11, inst->inst_offset);
4967 ppc_load (code, ppc_r11, inst->inst_offset);
4968 ppc_sthx (code, ainfo->reg, inst->inst_basereg, ppc_r11);
4972 #ifdef __mono_ppc64__
4974 if (ppc_is_imm16 (inst->inst_offset)) {
4975 ppc_stw (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4977 if (ppc_is_imm32 (inst->inst_offset)) {
4978 ppc_addis (code, ppc_r11, inst->inst_basereg, ppc_ha(inst->inst_offset));
4979 ppc_stw (code, ainfo->reg, ppc_r11, inst->inst_offset);
4981 ppc_load (code, ppc_r11, inst->inst_offset);
4982 ppc_stwx (code, ainfo->reg, inst->inst_basereg, ppc_r11);
4987 if (ppc_is_imm16 (inst->inst_offset)) {
4988 ppc_str (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4990 ppc_load (code, ppc_r11, inst->inst_offset);
4991 ppc_str_indexed (code, ainfo->reg, ppc_r11, inst->inst_basereg);
4996 if (ppc_is_imm16 (inst->inst_offset + 4)) {
4997 ppc_stw (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4998 ppc_stw (code, ainfo->reg + 1, inst->inst_offset + 4, inst->inst_basereg);
5000 ppc_addis (code, ppc_r11, inst->inst_basereg, ppc_ha(inst->inst_offset));
5001 ppc_addi (code, ppc_r11, ppc_r11, inst->inst_offset);
5002 ppc_stw (code, ainfo->reg, 0, ppc_r11);
5003 ppc_stw (code, ainfo->reg + 1, 4, ppc_r11);
5008 if (ppc_is_imm16 (inst->inst_offset)) {
5009 ppc_stptr (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
5011 if (ppc_is_imm32 (inst->inst_offset)) {
5012 ppc_addis (code, ppc_r11, inst->inst_basereg, ppc_ha(inst->inst_offset));
5013 ppc_stptr (code, ainfo->reg, ppc_r11, inst->inst_offset);
5015 ppc_load (code, ppc_r11, inst->inst_offset);
5016 ppc_stptr_indexed (code, ainfo->reg, inst->inst_basereg, ppc_r11);
5021 } else if (ainfo->regtype == RegTypeBase) {
5022 g_assert (ppc_is_imm16 (ainfo->offset));
5023 /* load the previous stack pointer in r11 */
5024 ppc_ldr (code, ppc_r11, 0, ppc_sp);
5025 ppc_ldptr (code, ppc_r0, ainfo->offset, ppc_r11);
5026 switch (ainfo->size) {
5028 if (ppc_is_imm16 (inst->inst_offset)) {
5029 ppc_stb (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
5031 if (ppc_is_imm32 (inst->inst_offset)) {
5032 ppc_addis (code, ppc_r11, inst->inst_basereg, ppc_ha(inst->inst_offset));
5033 ppc_stb (code, ppc_r0, ppc_r11, inst->inst_offset);
5035 ppc_load (code, ppc_r11, inst->inst_offset);
5036 ppc_stbx (code, ppc_r0, inst->inst_basereg, ppc_r11);
5041 if (ppc_is_imm16 (inst->inst_offset)) {
5042 ppc_sth (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
5044 if (ppc_is_imm32 (inst->inst_offset)) {
5045 ppc_addis (code, ppc_r11, inst->inst_basereg, ppc_ha(inst->inst_offset));
5046 ppc_sth (code, ppc_r0, ppc_r11, inst->inst_offset);
5048 ppc_load (code, ppc_r11, inst->inst_offset);
5049 ppc_sthx (code, ppc_r0, inst->inst_basereg, ppc_r11);
5053 #ifdef __mono_ppc64__
5055 if (ppc_is_imm16 (inst->inst_offset)) {
5056 ppc_stw (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
5058 if (ppc_is_imm32 (inst->inst_offset)) {
5059 ppc_addis (code, ppc_r11, inst->inst_basereg, ppc_ha(inst->inst_offset));
5060 ppc_stw (code, ppc_r0, ppc_r11, inst->inst_offset);
5062 ppc_load (code, ppc_r11, inst->inst_offset);
5063 ppc_stwx (code, ppc_r0, inst->inst_basereg, ppc_r11);
5068 if (ppc_is_imm16 (inst->inst_offset)) {
5069 ppc_str (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
5071 ppc_load (code, ppc_r11, inst->inst_offset);
5072 ppc_str_indexed (code, ppc_r0, ppc_r11, inst->inst_basereg);
5077 g_assert (ppc_is_imm16 (ainfo->offset + 4));
5078 if (ppc_is_imm16 (inst->inst_offset + 4)) {
5079 ppc_stw (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
5080 ppc_lwz (code, ppc_r0, ainfo->offset + 4, ppc_r11);
5081 ppc_stw (code, ppc_r0, inst->inst_offset + 4, inst->inst_basereg);
5083 /* use r12 to load the 2nd half of the long before we clobber r11. */
5084 ppc_lwz (code, ppc_r12, ainfo->offset + 4, ppc_r11);
5085 ppc_addis (code, ppc_r11, inst->inst_basereg, ppc_ha(inst->inst_offset));
5086 ppc_addi (code, ppc_r11, ppc_r11, inst->inst_offset);
5087 ppc_stw (code, ppc_r0, 0, ppc_r11);
5088 ppc_stw (code, ppc_r12, 4, ppc_r11);
5093 if (ppc_is_imm16 (inst->inst_offset)) {
5094 ppc_stptr (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
5096 if (ppc_is_imm32 (inst->inst_offset)) {
5097 ppc_addis (code, ppc_r11, inst->inst_basereg, ppc_ha(inst->inst_offset));
5098 ppc_stptr (code, ppc_r0, ppc_r11, inst->inst_offset);
5100 ppc_load (code, ppc_r11, inst->inst_offset);
5101 ppc_stptr_indexed (code, ppc_r0, inst->inst_basereg, ppc_r11);
5106 } else if (ainfo->regtype == RegTypeFP) {
5107 g_assert (ppc_is_imm16 (inst->inst_offset));
5108 if (ainfo->size == 8)
5109 ppc_stfd (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
5110 else if (ainfo->size == 4)
5111 ppc_stfs (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
5113 g_assert_not_reached ();
5114 } else if (ainfo->regtype == RegTypeStructByVal) {
5115 int doffset = inst->inst_offset;
5119 g_assert (ppc_is_imm16 (inst->inst_offset));
5120 g_assert (ppc_is_imm16 (inst->inst_offset + ainfo->vtregs * sizeof (gpointer)));
5121 /* FIXME: what if there is no class? */
5122 if (sig->pinvoke && mono_class_from_mono_type (inst->inst_vtype))
5123 size = mono_class_native_size (mono_class_from_mono_type (inst->inst_vtype), NULL);
5124 for (cur_reg = 0; cur_reg < ainfo->vtregs; ++cur_reg) {
5127 * Darwin handles 1 and 2 byte
5128 * structs specially by
5129 * loading h/b into the arg
5130 * register. Only done for
5134 ppc_sth (code, ainfo->reg + cur_reg, doffset, inst->inst_basereg);
5136 ppc_stb (code, ainfo->reg + cur_reg, doffset, inst->inst_basereg);
5140 #ifdef __mono_ppc64__
5142 g_assert (cur_reg == 0);
5143 ppc_sldi (code, ppc_r0, ainfo->reg,
5144 (sizeof (gpointer) - ainfo->bytes) * 8);
5145 ppc_stptr (code, ppc_r0, doffset, inst->inst_basereg);
5149 ppc_stptr (code, ainfo->reg + cur_reg, doffset,
5150 inst->inst_basereg);
5153 soffset += sizeof (gpointer);
5154 doffset += sizeof (gpointer);
5156 if (ainfo->vtsize) {
5157 /* FIXME: we need to do the shifting here, too */
5160 /* load the previous stack pointer in r11 (r0 gets overwritten by the memcpy) */
5161 ppc_ldr (code, ppc_r11, 0, ppc_sp);
5162 if ((size & MONO_PPC_32_64_CASE (3, 7)) != 0) {
5163 code = emit_memcpy (code, size - soffset,
5164 inst->inst_basereg, doffset,
5165 ppc_r11, ainfo->offset + soffset);
5167 code = emit_memcpy (code, ainfo->vtsize * sizeof (gpointer),
5168 inst->inst_basereg, doffset,
5169 ppc_r11, ainfo->offset + soffset);
5172 } else if (ainfo->regtype == RegTypeStructByAddr) {
5173 /* if it was originally a RegTypeBase */
5174 if (ainfo->offset) {
5175 /* load the previous stack pointer in r11 */
5176 ppc_ldr (code, ppc_r11, 0, ppc_sp);
5177 ppc_ldptr (code, ppc_r11, ainfo->offset, ppc_r11);
5179 ppc_mr (code, ppc_r11, ainfo->reg);
5182 if (cfg->tailcall_valuetype_addrs) {
5183 MonoInst *addr = cfg->tailcall_valuetype_addrs [tailcall_struct_index];
5185 g_assert (ppc_is_imm16 (addr->inst_offset));
5186 ppc_stptr (code, ppc_r11, addr->inst_offset, addr->inst_basereg);
5188 tailcall_struct_index++;
5191 g_assert (ppc_is_imm16 (inst->inst_offset));
5192 code = emit_memcpy (code, ainfo->vtsize, inst->inst_basereg, inst->inst_offset, ppc_r11, 0);
5193 /*g_print ("copy in %s: %d bytes from %d to offset: %d\n", method->name, ainfo->vtsize, ainfo->reg, inst->inst_offset);*/
5195 g_assert_not_reached ();
5200 if (method->wrapper_type == MONO_WRAPPER_NATIVE_TO_MANAGED) {
5201 ppc_load_ptr (code, ppc_r3, cfg->domain);
5202 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD, (gpointer)"mono_jit_thread_attach");
5203 if ((FORCE_INDIR_CALL || cfg->method->dynamic) && !cfg->compile_aot) {
5204 ppc_load_func (code, ppc_r0, 0);
5205 ppc_mtlr (code, ppc_r0);
5213 if (method->save_lmf) {
5214 if (lmf_pthread_key != -1) {
5215 emit_tls_access (code, ppc_r3, lmf_pthread_key);
5216 if (tls_mode != TLS_MODE_NPTL && G_STRUCT_OFFSET (MonoJitTlsData, lmf))
5217 ppc_addi (code, ppc_r3, ppc_r3, G_STRUCT_OFFSET (MonoJitTlsData, lmf));
5219 if (cfg->compile_aot) {
5220 /* Compute the got address which is needed by the PLT entry */
5221 code = mono_arch_emit_load_got_addr (cfg->native_code, code, cfg, NULL);
5223 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD,
5224 (gpointer)"mono_get_lmf_addr");
5225 if ((FORCE_INDIR_CALL || cfg->method->dynamic) && !cfg->compile_aot) {
5226 ppc_load_func (code, ppc_r0, 0);
5227 ppc_mtlr (code, ppc_r0);
5233 /* we build the MonoLMF structure on the stack - see mini-ppc.h */
5234 /* lmf_offset is the offset from the previous stack pointer,
5235 * alloc_size is the total stack space allocated, so the offset
5236 * of MonoLMF from the current stack ptr is alloc_size - lmf_offset.
5237 * The pointer to the struct is put in ppc_r11 (new_lmf).
5238 * The callee-saved registers are already in the MonoLMF structure
5240 ppc_addi (code, ppc_r11, ppc_sp, alloc_size - lmf_offset);
5241 /* ppc_r3 is the result from mono_get_lmf_addr () */
5242 ppc_stptr (code, ppc_r3, G_STRUCT_OFFSET(MonoLMF, lmf_addr), ppc_r11);
5243 /* new_lmf->previous_lmf = *lmf_addr */
5244 ppc_ldptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r3);
5245 ppc_stptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r11);
5246 /* *(lmf_addr) = r11 */
5247 ppc_stptr (code, ppc_r11, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r3);
5248 /* save method info */
5249 if (cfg->compile_aot)
5251 ppc_load (code, ppc_r0, 0);
5253 ppc_load_ptr (code, ppc_r0, method);
5254 ppc_stptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, method), ppc_r11);
5255 ppc_stptr (code, ppc_sp, G_STRUCT_OFFSET(MonoLMF, ebp), ppc_r11);
5256 /* save the current IP */
5257 if (cfg->compile_aot) {
5259 ppc_mflr (code, ppc_r0);
5261 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_IP, NULL);
5262 #ifdef __mono_ppc64__
5263 ppc_load_sequence (code, ppc_r0, (guint64)0x0101010101010101LL);
5265 ppc_load_sequence (code, ppc_r0, (gulong)0x01010101L);
5268 ppc_stptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, eip), ppc_r11);
5272 code = mono_arch_instrument_prolog (cfg, mono_trace_enter_method, code, TRUE);
5274 cfg->code_len = code - cfg->native_code;
5275 g_assert (cfg->code_len <= cfg->code_size);
5282 mono_arch_emit_epilog (MonoCompile *cfg)
5284 MonoMethod *method = cfg->method;
5286 int max_epilog_size = 16 + 20*4;
5289 if (cfg->method->save_lmf)
5290 max_epilog_size += 128;
5292 if (mono_jit_trace_calls != NULL)
5293 max_epilog_size += 50;
5295 if (cfg->prof_options & MONO_PROFILE_ENTER_LEAVE)
5296 max_epilog_size += 50;
5298 while (cfg->code_len + max_epilog_size > (cfg->code_size - 16)) {
5299 cfg->code_size *= 2;
5300 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
5301 mono_jit_stats.code_reallocs++;
5305 * Keep in sync with OP_JMP
5307 code = cfg->native_code + cfg->code_len;
5309 if (mono_jit_trace_calls != NULL && mono_trace_eval (method)) {
5310 code = mono_arch_instrument_epilog (cfg, mono_trace_leave_method, code, TRUE);
5314 if (method->save_lmf) {
5316 pos += sizeof (MonoLMF);
5318 /* save the frame reg in r8 */
5319 ppc_mr (code, ppc_r8, cfg->frame_reg);
5320 ppc_addi (code, ppc_r11, cfg->frame_reg, cfg->stack_usage - lmf_offset);
5321 /* r5 = previous_lmf */
5322 ppc_ldptr (code, ppc_r5, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r11);
5324 ppc_ldptr (code, ppc_r6, G_STRUCT_OFFSET(MonoLMF, lmf_addr), ppc_r11);
5325 /* *(lmf_addr) = previous_lmf */
5326 ppc_stptr (code, ppc_r5, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r6);
5327 /* FIXME: speedup: there is no actual need to restore the registers if
5328 * we didn't actually change them (idea from Zoltan).
5331 ppc_ldr_multiple (code, ppc_r13, G_STRUCT_OFFSET(MonoLMF, iregs), ppc_r11);
5333 /*for (i = 14; i < 32; i++) {
5334 ppc_lfd (code, i, G_STRUCT_OFFSET(MonoLMF, fregs) + ((i-14) * sizeof (gdouble)), ppc_r11);
5336 g_assert (ppc_is_imm16 (cfg->stack_usage + PPC_RET_ADDR_OFFSET));
5337 /* use the saved copy of the frame reg in r8 */
5338 if (1 || cfg->flags & MONO_CFG_HAS_CALLS) {
5339 ppc_ldr (code, ppc_r0, cfg->stack_usage + PPC_RET_ADDR_OFFSET, ppc_r8);
5340 ppc_mtlr (code, ppc_r0);
5342 ppc_addic (code, ppc_sp, ppc_r8, cfg->stack_usage);
5344 if (1 || cfg->flags & MONO_CFG_HAS_CALLS) {
5345 long return_offset = cfg->stack_usage + PPC_RET_ADDR_OFFSET;
5346 if (ppc_is_imm16 (return_offset)) {
5347 ppc_ldr (code, ppc_r0, return_offset, cfg->frame_reg);
5349 ppc_load (code, ppc_r11, return_offset);
5350 ppc_ldr_indexed (code, ppc_r0, cfg->frame_reg, ppc_r11);
5352 ppc_mtlr (code, ppc_r0);
5354 if (ppc_is_imm16 (cfg->stack_usage)) {
5355 int offset = cfg->stack_usage;
5356 for (i = 13; i <= 31; i++) {
5357 if (cfg->used_int_regs & (1 << i))
5358 offset -= sizeof (mgreg_t);
5360 if (cfg->frame_reg != ppc_sp)
5361 ppc_mr (code, ppc_r11, cfg->frame_reg);
5362 /* note r31 (possibly the frame register) is restored last */
5363 for (i = 13; i <= 31; i++) {
5364 if (cfg->used_int_regs & (1 << i)) {
5365 ppc_ldr (code, i, offset, cfg->frame_reg);
5366 offset += sizeof (mgreg_t);
5369 if (cfg->frame_reg != ppc_sp)
5370 ppc_addi (code, ppc_sp, ppc_r11, cfg->stack_usage);
5372 ppc_addi (code, ppc_sp, ppc_sp, cfg->stack_usage);
5374 ppc_load32 (code, ppc_r11, cfg->stack_usage);
5375 if (cfg->used_int_regs) {
5376 ppc_add (code, ppc_r11, cfg->frame_reg, ppc_r11);
5377 for (i = 31; i >= 13; --i) {
5378 if (cfg->used_int_regs & (1 << i)) {
5379 pos += sizeof (mgreg_t);
5380 ppc_ldr (code, i, -pos, ppc_r11);
5383 ppc_mr (code, ppc_sp, ppc_r11);
5385 ppc_add (code, ppc_sp, cfg->frame_reg, ppc_r11);
5392 cfg->code_len = code - cfg->native_code;
5394 g_assert (cfg->code_len < cfg->code_size);
5398 /* remove once throw_exception_by_name is eliminated */
5400 exception_id_by_name (const char *name)
5402 if (strcmp (name, "IndexOutOfRangeException") == 0)
5403 return MONO_EXC_INDEX_OUT_OF_RANGE;
5404 if (strcmp (name, "OverflowException") == 0)
5405 return MONO_EXC_OVERFLOW;
5406 if (strcmp (name, "ArithmeticException") == 0)
5407 return MONO_EXC_ARITHMETIC;
5408 if (strcmp (name, "DivideByZeroException") == 0)
5409 return MONO_EXC_DIVIDE_BY_ZERO;
5410 if (strcmp (name, "InvalidCastException") == 0)
5411 return MONO_EXC_INVALID_CAST;
5412 if (strcmp (name, "NullReferenceException") == 0)
5413 return MONO_EXC_NULL_REF;
5414 if (strcmp (name, "ArrayTypeMismatchException") == 0)
5415 return MONO_EXC_ARRAY_TYPE_MISMATCH;
5416 g_error ("Unknown intrinsic exception %s\n", name);
5421 mono_arch_emit_exceptions (MonoCompile *cfg)
5423 MonoJumpInfo *patch_info;
5426 const guint8* exc_throw_pos [MONO_EXC_INTRINS_NUM] = {NULL};
5427 guint8 exc_throw_found [MONO_EXC_INTRINS_NUM] = {0};
5428 int max_epilog_size = 50;
5430 /* count the number of exception infos */
5433 * make sure we have enough space for exceptions
5435 for (patch_info = cfg->patch_info; patch_info; patch_info = patch_info->next) {
5436 if (patch_info->type == MONO_PATCH_INFO_EXC) {
5437 i = exception_id_by_name (patch_info->data.target);
5438 if (!exc_throw_found [i]) {
5439 max_epilog_size += (2 * PPC_LOAD_SEQUENCE_LENGTH) + 5 * 4;
5440 exc_throw_found [i] = TRUE;
5442 } else if (patch_info->type == MONO_PATCH_INFO_BB_OVF)
5443 max_epilog_size += 12;
5444 else if (patch_info->type == MONO_PATCH_INFO_EXC_OVF) {
5445 MonoOvfJump *ovfj = (MonoOvfJump*)patch_info->data.target;
5446 i = exception_id_by_name (ovfj->data.exception);
5447 if (!exc_throw_found [i]) {
5448 max_epilog_size += (2 * PPC_LOAD_SEQUENCE_LENGTH) + 5 * 4;
5449 exc_throw_found [i] = TRUE;
5451 max_epilog_size += 8;
5455 while (cfg->code_len + max_epilog_size > (cfg->code_size - 16)) {
5456 cfg->code_size *= 2;
5457 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
5458 mono_jit_stats.code_reallocs++;
5461 code = cfg->native_code + cfg->code_len;
5463 /* add code to raise exceptions */
5464 for (patch_info = cfg->patch_info; patch_info; patch_info = patch_info->next) {
5465 switch (patch_info->type) {
5466 case MONO_PATCH_INFO_BB_OVF: {
5467 MonoOvfJump *ovfj = (MonoOvfJump*)patch_info->data.target;
5468 unsigned char *ip = patch_info->ip.i + cfg->native_code;
5469 /* patch the initial jump */
5470 ppc_patch (ip, code);
5471 ppc_bc (code, ovfj->b0_cond, ovfj->b1_cond, 2);
5473 ppc_patch (code - 4, ip + 4); /* jump back after the initiali branch */
5474 /* jump back to the true target */
5476 ip = ovfj->data.bb->native_offset + cfg->native_code;
5477 ppc_patch (code - 4, ip);
5478 patch_info->type = MONO_PATCH_INFO_NONE;
5481 case MONO_PATCH_INFO_EXC_OVF: {
5482 MonoOvfJump *ovfj = (MonoOvfJump*)patch_info->data.target;
5483 MonoJumpInfo *newji;
5484 unsigned char *ip = patch_info->ip.i + cfg->native_code;
5485 unsigned char *bcl = code;
5486 /* patch the initial jump: we arrived here with a call */
5487 ppc_patch (ip, code);
5488 ppc_bc (code, ovfj->b0_cond, ovfj->b1_cond, 0);
5490 ppc_patch (code - 4, ip + 4); /* jump back after the initiali branch */
5491 /* patch the conditional jump to the right handler */
5492 /* make it processed next */
5493 newji = mono_mempool_alloc (cfg->mempool, sizeof (MonoJumpInfo));
5494 newji->type = MONO_PATCH_INFO_EXC;
5495 newji->ip.i = bcl - cfg->native_code;
5496 newji->data.target = ovfj->data.exception;
5497 newji->next = patch_info->next;
5498 patch_info->next = newji;
5499 patch_info->type = MONO_PATCH_INFO_NONE;
5502 case MONO_PATCH_INFO_EXC: {
5503 MonoClass *exc_class;
5505 unsigned char *ip = patch_info->ip.i + cfg->native_code;
5506 i = exception_id_by_name (patch_info->data.target);
5507 if (exc_throw_pos [i]) {
5508 ppc_patch (ip, exc_throw_pos [i]);
5509 patch_info->type = MONO_PATCH_INFO_NONE;
5512 exc_throw_pos [i] = code;
5515 exc_class = mono_class_from_name (mono_defaults.corlib, "System", patch_info->data.name);
5516 g_assert (exc_class);
5518 ppc_patch (ip, code);
5519 /*mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_EXC_NAME, patch_info->data.target);*/
5520 ppc_load (code, ppc_r3, exc_class->type_token);
5521 /* we got here from a conditional call, so the calling ip is set in lr */
5522 ppc_mflr (code, ppc_r4);
5523 patch_info->type = MONO_PATCH_INFO_INTERNAL_METHOD;
5524 patch_info->data.name = "mono_arch_throw_corlib_exception";
5525 patch_info->ip.i = code - cfg->native_code;
5526 if (FORCE_INDIR_CALL || cfg->method->dynamic) {
5527 ppc_load_func (code, ppc_r0, 0);
5528 ppc_mtctr (code, ppc_r0);
5529 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
5541 cfg->code_len = code - cfg->native_code;
5543 g_assert (cfg->code_len <= cfg->code_size);
5548 try_offset_access (void *value, guint32 idx)
5550 register void* me __asm__ ("r2");
5551 void ***p = (void***)((char*)me + 284);
5552 int idx1 = idx / 32;
5553 int idx2 = idx % 32;
5556 if (value != p[idx1][idx2])
5563 setup_tls_access (void)
5567 #if defined(__linux__) && defined(_CS_GNU_LIBPTHREAD_VERSION)
5568 size_t conf_size = 0;
5571 /* FIXME for darwin */
5572 guint32 *ins, *code;
5573 guint32 cmplwi_1023, li_0x48, blr_ins;
5577 tls_mode = TLS_MODE_FAILED;
5580 if (tls_mode == TLS_MODE_FAILED)
5582 if (g_getenv ("MONO_NO_TLS")) {
5583 tls_mode = TLS_MODE_FAILED;
5587 if (tls_mode == TLS_MODE_DETECT) {
5588 #if defined(__APPLE__) && defined(__mono_ppc__) && !defined(__mono_ppc64__)
5589 tls_mode = TLS_MODE_DARWIN_G4;
5590 #elif defined(__linux__) && defined(_CS_GNU_LIBPTHREAD_VERSION)
5591 conf_size = confstr ( _CS_GNU_LIBPTHREAD_VERSION, confbuf, sizeof(confbuf));
5592 if ((conf_size > 4) && (strncmp (confbuf, "NPTL", 4) == 0))
5593 tls_mode = TLS_MODE_NPTL;
5594 #elif !defined(TARGET_PS3)
5595 ins = (guint32*)pthread_getspecific;
5596 /* uncond branch to the real method */
5597 if ((*ins >> 26) == 18) {
5599 val = (*ins & ~3) << 6;
5603 ins = (guint32*)(long)val;
5605 ins = (guint32*) ((char*)ins + val);
5608 code = &cmplwi_1023;
5609 ppc_cmpli (code, 0, 0, ppc_r3, 1023);
5611 ppc_li (code, ppc_r4, 0x48);
5614 if (*ins == cmplwi_1023) {
5615 int found_lwz_284 = 0;
5616 for (ptk = 0; ptk < 20; ++ptk) {
5618 if (!*ins || *ins == blr_ins)
5620 if ((guint16)*ins == 284 && (*ins >> 26) == 32) {
5625 if (!found_lwz_284) {
5626 tls_mode = TLS_MODE_FAILED;
5629 tls_mode = TLS_MODE_LTHREADS;
5630 } else if (*ins == li_0x48) {
5632 /* uncond branch to the real method */
5633 if ((*ins >> 26) == 18) {
5635 val = (*ins & ~3) << 6;
5639 ins = (guint32*)(long)val;
5641 ins = (guint32*) ((char*)ins + val);
5643 code = (guint32*)&val;
5644 ppc_li (code, ppc_r0, 0x7FF2);
5645 if (ins [1] == val) {
5646 /* Darwin on G4, implement */
5647 tls_mode = TLS_MODE_FAILED;
5650 code = (guint32*)&val;
5651 ppc_mfspr (code, ppc_r3, 104);
5652 if (ins [1] != val) {
5653 tls_mode = TLS_MODE_FAILED;
5656 tls_mode = TLS_MODE_DARWIN_G5;
5659 tls_mode = TLS_MODE_FAILED;
5663 tls_mode = TLS_MODE_FAILED;
5669 if (tls_mode == TLS_MODE_DETECT)
5670 tls_mode = TLS_MODE_FAILED;
5671 if (tls_mode == TLS_MODE_FAILED)
5673 if ((monodomain_key == -1) && (tls_mode == TLS_MODE_NPTL)) {
5674 monodomain_key = mono_domain_get_tls_offset();
5676 /* if not TLS_MODE_NPTL or local dynamic (as indicated by
5677 mono_domain_get_tls_offset returning -1) then use keyed access. */
5678 if (monodomain_key == -1) {
5679 ptk = mono_domain_get_tls_key ();
5681 ptk = mono_pthread_key_for_tls (ptk);
5683 monodomain_key = ptk;
5688 if ((lmf_pthread_key == -1) && (tls_mode == TLS_MODE_NPTL)) {
5689 lmf_pthread_key = mono_get_lmf_addr_tls_offset();
5691 /* if not TLS_MODE_NPTL or local dynamic (as indicated by
5692 mono_get_lmf_addr_tls_offset returning -1) then use keyed access. */
5693 if (lmf_pthread_key == -1) {
5694 ptk = mono_pthread_key_for_tls (mono_jit_tls_id);
5696 /*g_print ("MonoLMF at: %d\n", ptk);*/
5697 /*if (!try_offset_access (mono_get_lmf_addr (), ptk)) {
5698 init_tls_failed = 1;
5701 lmf_pthread_key = ptk;
5708 mono_arch_setup_jit_tls_data (MonoJitTlsData *tls)
5710 setup_tls_access ();
5714 mono_arch_free_jit_tls_data (MonoJitTlsData *tls)
5718 #ifdef MONO_ARCH_HAVE_IMT
5720 #define CMP_SIZE (PPC_LOAD_SEQUENCE_LENGTH + 4)
5722 #define LOADSTORE_SIZE 4
5723 #define JUMP_IMM_SIZE 12
5724 #define JUMP_IMM32_SIZE (PPC_LOAD_SEQUENCE_LENGTH + 8)
5725 #define ENABLE_WRONG_METHOD_CHECK 0
5728 * LOCKING: called with the domain lock held
5731 mono_arch_build_imt_thunk (MonoVTable *vtable, MonoDomain *domain, MonoIMTCheckItem **imt_entries, int count,
5732 gpointer fail_tramp)
5736 guint8 *code, *start;
5738 for (i = 0; i < count; ++i) {
5739 MonoIMTCheckItem *item = imt_entries [i];
5740 if (item->is_equals) {
5741 if (item->check_target_idx) {
5742 if (!item->compare_done)
5743 item->chunk_size += CMP_SIZE;
5744 if (item->has_target_code)
5745 item->chunk_size += BR_SIZE + JUMP_IMM32_SIZE;
5747 item->chunk_size += LOADSTORE_SIZE + BR_SIZE + JUMP_IMM_SIZE;
5750 item->chunk_size += CMP_SIZE + BR_SIZE + JUMP_IMM32_SIZE * 2;
5751 if (!item->has_target_code)
5752 item->chunk_size += LOADSTORE_SIZE;
5754 item->chunk_size += LOADSTORE_SIZE + JUMP_IMM_SIZE;
5755 #if ENABLE_WRONG_METHOD_CHECK
5756 item->chunk_size += CMP_SIZE + BR_SIZE + 4;
5761 item->chunk_size += CMP_SIZE + BR_SIZE;
5762 imt_entries [item->check_target_idx]->compare_done = TRUE;
5764 size += item->chunk_size;
5767 code = mono_method_alloc_generic_virtual_thunk (domain, size);
5769 /* the initial load of the vtable address */
5770 size += PPC_LOAD_SEQUENCE_LENGTH + LOADSTORE_SIZE;
5771 code = mono_domain_code_reserve (domain, size);
5776 * We need to save and restore r11 because it might be
5777 * used by the caller as the vtable register, so
5778 * clobbering it will trip up the magic trampoline.
5780 * FIXME: Get rid of this by making sure that r11 is
5781 * not used as the vtable register in interface calls.
5783 ppc_stptr (code, ppc_r11, PPC_RET_ADDR_OFFSET, ppc_sp);
5784 ppc_load (code, ppc_r11, (gsize)(& (vtable->vtable [0])));
5786 for (i = 0; i < count; ++i) {
5787 MonoIMTCheckItem *item = imt_entries [i];
5788 item->code_target = code;
5789 if (item->is_equals) {
5790 if (item->check_target_idx) {
5791 if (!item->compare_done) {
5792 ppc_load (code, ppc_r0, (gsize)item->key);
5793 ppc_compare_log (code, 0, MONO_ARCH_IMT_REG, ppc_r0);
5795 item->jmp_code = code;
5796 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
5797 if (item->has_target_code) {
5798 ppc_load_ptr (code, ppc_r0, item->value.target_code);
5800 ppc_ldptr (code, ppc_r0, (sizeof (gpointer) * item->value.vtable_slot), ppc_r11);
5801 ppc_ldptr (code, ppc_r11, PPC_RET_ADDR_OFFSET, ppc_sp);
5803 ppc_mtctr (code, ppc_r0);
5804 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
5807 ppc_load (code, ppc_r0, (gulong)item->key);
5808 ppc_compare_log (code, 0, MONO_ARCH_IMT_REG, ppc_r0);
5809 item->jmp_code = code;
5810 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
5811 if (item->has_target_code) {
5812 ppc_load_ptr (code, ppc_r0, item->value.target_code);
5815 ppc_load_ptr (code, ppc_r0, & (vtable->vtable [item->value.vtable_slot]));
5816 ppc_ldptr_indexed (code, ppc_r0, 0, ppc_r0);
5818 ppc_mtctr (code, ppc_r0);
5819 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
5820 ppc_patch (item->jmp_code, code);
5821 ppc_load_ptr (code, ppc_r0, fail_tramp);
5822 ppc_mtctr (code, ppc_r0);
5823 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
5824 item->jmp_code = NULL;
5826 /* enable the commented code to assert on wrong method */
5827 #if ENABLE_WRONG_METHOD_CHECK
5828 ppc_load (code, ppc_r0, (guint32)item->key);
5829 ppc_compare_log (code, 0, MONO_ARCH_IMT_REG, ppc_r0);
5830 item->jmp_code = code;
5831 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
5833 ppc_ldptr (code, ppc_r0, (sizeof (gpointer) * item->value.vtable_slot), ppc_r11);
5834 ppc_ldptr (code, ppc_r11, PPC_RET_ADDR_OFFSET, ppc_sp);
5835 ppc_mtctr (code, ppc_r0);
5836 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
5837 #if ENABLE_WRONG_METHOD_CHECK
5838 ppc_patch (item->jmp_code, code);
5840 item->jmp_code = NULL;
5845 ppc_load (code, ppc_r0, (gulong)item->key);
5846 ppc_compare_log (code, 0, MONO_ARCH_IMT_REG, ppc_r0);
5847 item->jmp_code = code;
5848 ppc_bc (code, PPC_BR_FALSE, PPC_BR_LT, 0);
5851 /* patch the branches to get to the target items */
5852 for (i = 0; i < count; ++i) {
5853 MonoIMTCheckItem *item = imt_entries [i];
5854 if (item->jmp_code) {
5855 if (item->check_target_idx) {
5856 ppc_patch (item->jmp_code, imt_entries [item->check_target_idx]->code_target);
5862 mono_stats.imt_thunks_size += code - start;
5863 g_assert (code - start <= size);
5864 mono_arch_flush_icache (start, size);
5869 mono_arch_find_imt_method (mgreg_t *regs, guint8 *code)
5871 mgreg_t *r = (mgreg_t*)regs;
5873 return (MonoMethod*)(gsize) r [MONO_ARCH_IMT_REG];
5878 mono_arch_find_static_call_vtable (mgreg_t *regs, guint8 *code)
5880 mgreg_t *r = (mgreg_t*)regs;
5882 return (MonoVTable*)(gsize) r [MONO_ARCH_RGCTX_REG];
5886 mono_arch_emit_inst_for_method (MonoCompile *cfg, MonoMethod *cmethod, MonoMethodSignature *fsig, MonoInst **args)
5893 mono_arch_print_tree (MonoInst *tree, int arity)
5898 MonoInst* mono_arch_get_domain_intrinsic (MonoCompile* cfg)
5902 setup_tls_access ();
5903 if (monodomain_key == -1)
5906 MONO_INST_NEW (cfg, ins, OP_TLS_GET);
5907 ins->inst_offset = monodomain_key;
5912 mono_arch_context_get_int_reg (MonoContext *ctx, int reg)
5915 return MONO_CONTEXT_GET_SP (ctx);
5917 g_assert (reg >= ppc_r13);
5919 return (gpointer)(gsize)ctx->regs [reg - ppc_r13];
5923 mono_arch_get_patch_offset (guint8 *code)
5929 * mono_aot_emit_load_got_addr:
5931 * Emit code to load the got address.
5932 * On PPC, the result is placed into r30.
5935 mono_arch_emit_load_got_addr (guint8 *start, guint8 *code, MonoCompile *cfg, MonoJumpInfo **ji)
5938 ppc_mflr (code, ppc_r30);
5940 mono_add_patch_info (cfg, code - start, MONO_PATCH_INFO_GOT_OFFSET, NULL);
5942 *ji = mono_patch_info_list_prepend (*ji, code - start, MONO_PATCH_INFO_GOT_OFFSET, NULL);
5943 /* arch_emit_got_address () patches this */
5944 #if defined(TARGET_POWERPC64)
5950 ppc_load32 (code, ppc_r0, 0);
5951 ppc_add (code, ppc_r30, ppc_r30, ppc_r0);
5958 * mono_ppc_emit_load_aotconst:
5960 * Emit code to load the contents of the GOT slot identified by TRAMP_TYPE and
5961 * TARGET from the mscorlib GOT in full-aot code.
5962 * On PPC, the GOT address is assumed to be in r30, and the result is placed into
5966 mono_arch_emit_load_aotconst (guint8 *start, guint8 *code, MonoJumpInfo **ji, int tramp_type, gconstpointer target)
5968 /* Load the mscorlib got address */
5969 ppc_ldptr (code, ppc_r11, sizeof (gpointer), ppc_r30);
5970 *ji = mono_patch_info_list_prepend (*ji, code - start, tramp_type, target);
5971 /* arch_emit_got_access () patches this */
5972 ppc_load32 (code, ppc_r0, 0);
5973 ppc_ldptr_indexed (code, ppc_r11, ppc_r11, ppc_r0);
5978 /* Soft Debug support */
5979 #ifdef MONO_ARCH_SOFT_DEBUG_SUPPORTED
5986 * mono_arch_set_breakpoint:
5988 * See mini-amd64.c for docs.
5991 mono_arch_set_breakpoint (MonoJitInfo *ji, guint8 *ip)
5994 guint8 *orig_code = code;
5996 ppc_load_sequence (code, ppc_r11, (gsize)bp_trigger_page);
5997 ppc_ldptr (code, ppc_r11, 0, ppc_r11);
5999 g_assert (code - orig_code == BREAKPOINT_SIZE);
6001 mono_arch_flush_icache (orig_code, code - orig_code);
6005 * mono_arch_clear_breakpoint:
6007 * See mini-amd64.c for docs.
6010 mono_arch_clear_breakpoint (MonoJitInfo *ji, guint8 *ip)
6015 for (i = 0; i < BREAKPOINT_SIZE / 4; ++i)
6018 mono_arch_flush_icache (ip, code - ip);
6022 * mono_arch_is_breakpoint_event:
6024 * See mini-amd64.c for docs.
6027 mono_arch_is_breakpoint_event (void *info, void *sigctx)
6029 siginfo_t* sinfo = (siginfo_t*) info;
6030 /* Sometimes the address is off by 4 */
6031 if (sinfo->si_addr >= bp_trigger_page && (guint8*)sinfo->si_addr <= (guint8*)bp_trigger_page + 128)
6038 * mono_arch_get_ip_for_breakpoint:
6040 * See mini-amd64.c for docs.
6043 mono_arch_get_ip_for_breakpoint (MonoJitInfo *ji, MonoContext *ctx)
6045 guint8 *ip = MONO_CONTEXT_GET_IP (ctx);
6047 /* ip points at the ldptr instruction */
6048 ip -= PPC_LOAD_SEQUENCE_LENGTH;
6054 * mono_arch_skip_breakpoint:
6056 * See mini-amd64.c for docs.
6059 mono_arch_skip_breakpoint (MonoContext *ctx)
6061 /* skip the ldptr */
6062 MONO_CONTEXT_SET_IP (ctx, (guint8*)MONO_CONTEXT_GET_IP (ctx) + 4);
6070 * mono_arch_start_single_stepping:
6072 * See mini-amd64.c for docs.
6075 mono_arch_start_single_stepping (void)
6077 mono_mprotect (ss_trigger_page, mono_pagesize (), 0);
6081 * mono_arch_stop_single_stepping:
6083 * See mini-amd64.c for docs.
6086 mono_arch_stop_single_stepping (void)
6088 mono_mprotect (ss_trigger_page, mono_pagesize (), MONO_MMAP_READ);
6092 * mono_arch_is_single_step_event:
6094 * See mini-amd64.c for docs.
6097 mono_arch_is_single_step_event (void *info, void *sigctx)
6099 siginfo_t* sinfo = (siginfo_t*) info;
6100 /* Sometimes the address is off by 4 */
6101 if (sinfo->si_addr >= ss_trigger_page && (guint8*)sinfo->si_addr <= (guint8*)ss_trigger_page + 128)
6108 * mono_arch_get_ip_for_single_step:
6110 * See mini-amd64.c for docs.
6113 mono_arch_get_ip_for_single_step (MonoJitInfo *ji, MonoContext *ctx)
6115 guint8 *ip = MONO_CONTEXT_GET_IP (ctx);
6117 /* ip points after the ldptr instruction */
6122 * mono_arch_skip_single_step:
6124 * See mini-amd64.c for docs.
6127 mono_arch_skip_single_step (MonoContext *ctx)
6129 /* skip the ldptr */
6130 MONO_CONTEXT_SET_IP (ctx, (guint8*)MONO_CONTEXT_GET_IP (ctx) + 4);
6134 * mono_arch_create_seq_point_info:
6136 * See mini-amd64.c for docs.
6139 mono_arch_get_seq_point_info (MonoDomain *domain, guint8 *code)