2 * mini-ppc.c: PowerPC backend for the Mono code generator
5 * Paolo Molaro (lupus@ximian.com)
6 * Dietmar Maurer (dietmar@ximian.com)
7 * Andreas Faerber <andreas.faerber@web.de>
9 * (C) 2003 Ximian, Inc.
10 * (C) 2007-2008 Andreas Faerber
15 #include <mono/metadata/abi-details.h>
16 #include <mono/metadata/appdomain.h>
17 #include <mono/metadata/debug-helpers.h>
18 #include <mono/utils/mono-proclib.h>
19 #include <mono/utils/mono-mmap.h>
20 #include <mono/utils/mono-hwcap.h>
23 #ifdef TARGET_POWERPC64
24 #include "cpu-ppc64.h"
31 #include <sys/sysctl.h>
37 #define FORCE_INDIR_CALL 1
48 /* cpu_hw_caps contains the flags defined below */
49 static int cpu_hw_caps = 0;
50 static int cachelinesize = 0;
51 static int cachelineinc = 0;
53 PPC_ICACHE_SNOOP = 1 << 0,
54 PPC_MULTIPLE_LS_UNITS = 1 << 1,
55 PPC_SMP_CAPABLE = 1 << 2,
58 PPC_MOVE_FPR_GPR = 1 << 5,
62 #define BREAKPOINT_SIZE (PPC_LOAD_SEQUENCE_LENGTH + 4)
64 /* This mutex protects architecture specific caches */
65 #define mono_mini_arch_lock() mono_os_mutex_lock (&mini_arch_mutex)
66 #define mono_mini_arch_unlock() mono_os_mutex_unlock (&mini_arch_mutex)
67 static mono_mutex_t mini_arch_mutex;
69 int mono_exc_esp_offset = 0;
72 * The code generated for sequence points reads from this location, which is
73 * made read-only when single stepping is enabled.
75 static gpointer ss_trigger_page;
77 /* Enabled breakpoints read from this trigger page */
78 static gpointer bp_trigger_page;
80 #define MONO_EMIT_NEW_LOAD_R8(cfg,dr,addr) do { \
82 MONO_INST_NEW ((cfg), (inst), OP_R8CONST); \
83 inst->type = STACK_R8; \
85 inst->inst_p0 = (void*)(addr); \
86 mono_bblock_add_inst (cfg->cbb, inst); \
90 mono_arch_regname (int reg) {
91 static const char rnames[][4] = {
92 "r0", "sp", "r2", "r3", "r4",
93 "r5", "r6", "r7", "r8", "r9",
94 "r10", "r11", "r12", "r13", "r14",
95 "r15", "r16", "r17", "r18", "r19",
96 "r20", "r21", "r22", "r23", "r24",
97 "r25", "r26", "r27", "r28", "r29",
100 if (reg >= 0 && reg < 32)
106 mono_arch_fregname (int reg) {
107 static const char rnames[][4] = {
108 "f0", "f1", "f2", "f3", "f4",
109 "f5", "f6", "f7", "f8", "f9",
110 "f10", "f11", "f12", "f13", "f14",
111 "f15", "f16", "f17", "f18", "f19",
112 "f20", "f21", "f22", "f23", "f24",
113 "f25", "f26", "f27", "f28", "f29",
116 if (reg >= 0 && reg < 32)
121 /* this function overwrites r0, r11, r12 */
123 emit_memcpy (guint8 *code, int size, int dreg, int doffset, int sreg, int soffset)
125 /* unrolled, use the counter in big */
126 if (size > sizeof (gpointer) * 5) {
127 long shifted = size / SIZEOF_VOID_P;
128 guint8 *copy_loop_start, *copy_loop_jump;
130 ppc_load (code, ppc_r0, shifted);
131 ppc_mtctr (code, ppc_r0);
132 //g_assert (sreg == ppc_r12);
133 ppc_addi (code, ppc_r11, dreg, (doffset - sizeof (gpointer)));
134 ppc_addi (code, ppc_r12, sreg, (soffset - sizeof (gpointer)));
135 copy_loop_start = code;
136 ppc_ldptr_update (code, ppc_r0, (unsigned int)sizeof (gpointer), ppc_r12);
137 ppc_stptr_update (code, ppc_r0, (unsigned int)sizeof (gpointer), ppc_r11);
138 copy_loop_jump = code;
139 ppc_bc (code, PPC_BR_DEC_CTR_NONZERO, 0, 0);
140 ppc_patch (copy_loop_jump, copy_loop_start);
141 size -= shifted * sizeof (gpointer);
142 doffset = soffset = 0;
145 #ifdef __mono_ppc64__
146 /* the hardware has multiple load/store units and the move is long
147 enough to use more then one register, then use load/load/store/store
148 to execute 2 instructions per cycle. */
149 if ((cpu_hw_caps & PPC_MULTIPLE_LS_UNITS) && (dreg != ppc_r11) && (sreg != ppc_r11)) {
151 ppc_ldptr (code, ppc_r0, soffset, sreg);
152 ppc_ldptr (code, ppc_r11, soffset+8, sreg);
153 ppc_stptr (code, ppc_r0, doffset, dreg);
154 ppc_stptr (code, ppc_r11, doffset+8, dreg);
161 ppc_ldr (code, ppc_r0, soffset, sreg);
162 ppc_str (code, ppc_r0, doffset, dreg);
168 if ((cpu_hw_caps & PPC_MULTIPLE_LS_UNITS) && (dreg != ppc_r11) && (sreg != ppc_r11)) {
170 ppc_lwz (code, ppc_r0, soffset, sreg);
171 ppc_lwz (code, ppc_r11, soffset+4, sreg);
172 ppc_stw (code, ppc_r0, doffset, dreg);
173 ppc_stw (code, ppc_r11, doffset+4, dreg);
181 ppc_lwz (code, ppc_r0, soffset, sreg);
182 ppc_stw (code, ppc_r0, doffset, dreg);
188 ppc_lhz (code, ppc_r0, soffset, sreg);
189 ppc_sth (code, ppc_r0, doffset, dreg);
195 ppc_lbz (code, ppc_r0, soffset, sreg);
196 ppc_stb (code, ppc_r0, doffset, dreg);
205 * mono_arch_get_argument_info:
206 * @csig: a method signature
207 * @param_count: the number of parameters to consider
208 * @arg_info: an array to store the result infos
210 * Gathers information on parameters such as size, alignment and
211 * padding. arg_info should be large enought to hold param_count + 1 entries.
213 * Returns the size of the activation frame.
216 mono_arch_get_argument_info (MonoMethodSignature *csig, int param_count, MonoJitArgumentInfo *arg_info)
218 #ifdef __mono_ppc64__
222 int k, frame_size = 0;
223 int size, align, pad;
226 if (MONO_TYPE_ISSTRUCT (csig->ret)) {
227 frame_size += sizeof (gpointer);
231 arg_info [0].offset = offset;
234 frame_size += sizeof (gpointer);
238 arg_info [0].size = frame_size;
240 for (k = 0; k < param_count; k++) {
243 size = mono_type_native_stack_size (csig->params [k], (guint32*)&align);
245 size = mini_type_stack_size (csig->params [k], &align);
247 /* ignore alignment for now */
250 frame_size += pad = (align - (frame_size & (align - 1))) & (align - 1);
251 arg_info [k].pad = pad;
253 arg_info [k + 1].pad = 0;
254 arg_info [k + 1].size = size;
256 arg_info [k + 1].offset = offset;
260 align = MONO_ARCH_FRAME_ALIGNMENT;
261 frame_size += pad = (align - (frame_size & (align - 1))) & (align - 1);
262 arg_info [k].pad = pad;
268 #ifdef __mono_ppc64__
270 is_load_sequence (guint32 *seq)
272 return ppc_opcode (seq [0]) == 15 && /* lis */
273 ppc_opcode (seq [1]) == 24 && /* ori */
274 ppc_opcode (seq [2]) == 30 && /* sldi */
275 ppc_opcode (seq [3]) == 25 && /* oris */
276 ppc_opcode (seq [4]) == 24; /* ori */
279 #define ppc_load_get_dest(l) (((l)>>21) & 0x1f)
280 #define ppc_load_get_off(l) ((gint16)((l) & 0xffff))
284 #define ppc_is_load_op(opcode) (ppc_opcode ((opcode)) == 58 || ppc_opcode ((opcode)) == 32)
286 /* code must point to the blrl */
288 mono_ppc_is_direct_call_sequence (guint32 *code)
290 #ifdef __mono_ppc64__
291 g_assert(*code == 0x4e800021 || *code == 0x4e800020 || *code == 0x4e800420);
293 /* the thunk-less direct call sequence: lis/ori/sldi/oris/ori/mtlr/blrl */
294 if (ppc_opcode (code [-1]) == 31) { /* mtlr */
295 if (ppc_is_load_op (code [-2]) && ppc_is_load_op (code [-3])) { /* ld/ld */
296 if (!is_load_sequence (&code [-8]))
298 /* one of the loads must be "ld r2,8(rX)" or "ld r2,4(rX) for ilp32 */
299 return (ppc_load_get_dest (code [-2]) == ppc_r2 && ppc_load_get_off (code [-2]) == sizeof (gpointer)) ||
300 (ppc_load_get_dest (code [-3]) == ppc_r2 && ppc_load_get_off (code [-3]) == sizeof (gpointer));
302 if (ppc_opcode (code [-2]) == 24 && ppc_opcode (code [-3]) == 31) /* mr/nop */
303 return is_load_sequence (&code [-8]);
305 return is_load_sequence (&code [-6]);
309 g_assert(*code == 0x4e800021);
311 /* the thunk-less direct call sequence: lis/ori/mtlr/blrl */
312 return ppc_opcode (code [-1]) == 31 &&
313 ppc_opcode (code [-2]) == 24 &&
314 ppc_opcode (code [-3]) == 15;
318 #define MAX_ARCH_DELEGATE_PARAMS 7
321 get_delegate_invoke_impl (MonoTrampInfo **info, gboolean has_target, guint32 param_count, gboolean aot)
323 guint8 *code, *start;
326 int size = MONO_PPC_32_64_CASE (32, 32) + PPC_FTNPTR_SIZE;
328 start = code = mono_global_codeman_reserve (size);
330 code = mono_ppc_create_pre_code_ftnptr (code);
332 /* Replace the this argument with the target */
333 ppc_ldptr (code, ppc_r0, MONO_STRUCT_OFFSET (MonoDelegate, method_ptr), ppc_r3);
334 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
335 /* it's a function descriptor */
336 /* Can't use ldptr as it doesn't work with r0 */
337 ppc_ldptr_indexed (code, ppc_r0, 0, ppc_r0);
339 ppc_mtctr (code, ppc_r0);
340 ppc_ldptr (code, ppc_r3, MONO_STRUCT_OFFSET (MonoDelegate, target), ppc_r3);
341 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
343 g_assert ((code - start) <= size);
345 mono_arch_flush_icache (start, size);
349 size = MONO_PPC_32_64_CASE (32, 32) + param_count * 4 + PPC_FTNPTR_SIZE;
350 start = code = mono_global_codeman_reserve (size);
352 code = mono_ppc_create_pre_code_ftnptr (code);
354 ppc_ldptr (code, ppc_r0, MONO_STRUCT_OFFSET (MonoDelegate, method_ptr), ppc_r3);
355 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
356 /* it's a function descriptor */
357 ppc_ldptr_indexed (code, ppc_r0, 0, ppc_r0);
359 ppc_mtctr (code, ppc_r0);
360 /* slide down the arguments */
361 for (i = 0; i < param_count; ++i) {
362 ppc_mr (code, (ppc_r3 + i), (ppc_r3 + i + 1));
364 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
366 g_assert ((code - start) <= size);
368 mono_arch_flush_icache (start, size);
372 *info = mono_tramp_info_create ("delegate_invoke_impl_has_target", start, code - start, NULL, NULL);
374 char *name = g_strdup_printf ("delegate_invoke_impl_target_%d", param_count);
375 *info = mono_tramp_info_create (name, start, code - start, NULL, NULL);
383 mono_arch_get_delegate_invoke_impls (void)
389 get_delegate_invoke_impl (&info, TRUE, 0, TRUE);
390 res = g_slist_prepend (res, info);
392 for (i = 0; i <= MAX_ARCH_DELEGATE_PARAMS; ++i) {
393 get_delegate_invoke_impl (&info, FALSE, i, TRUE);
394 res = g_slist_prepend (res, info);
401 mono_arch_get_delegate_invoke_impl (MonoMethodSignature *sig, gboolean has_target)
403 guint8 *code, *start;
405 /* FIXME: Support more cases */
406 if (MONO_TYPE_ISSTRUCT (sig->ret))
410 static guint8* cached = NULL;
416 start = mono_aot_get_trampoline ("delegate_invoke_impl_has_target");
419 start = get_delegate_invoke_impl (&info, TRUE, 0, FALSE);
420 mono_tramp_info_register (info, NULL);
422 mono_memory_barrier ();
426 static guint8* cache [MAX_ARCH_DELEGATE_PARAMS + 1] = {NULL};
429 if (sig->param_count > MAX_ARCH_DELEGATE_PARAMS)
431 for (i = 0; i < sig->param_count; ++i)
432 if (!mono_is_regsize_var (sig->params [i]))
436 code = cache [sig->param_count];
441 char *name = g_strdup_printf ("delegate_invoke_impl_target_%d", sig->param_count);
442 start = mono_aot_get_trampoline (name);
446 start = get_delegate_invoke_impl (&info, FALSE, sig->param_count, FALSE);
447 mono_tramp_info_register (info, NULL);
450 mono_memory_barrier ();
452 cache [sig->param_count] = start;
458 mono_arch_get_delegate_virtual_invoke_impl (MonoMethodSignature *sig, MonoMethod *method, int offset, gboolean load_imt_reg)
464 mono_arch_get_this_arg_from_call (mgreg_t *regs, guint8 *code)
466 mgreg_t *r = (mgreg_t*)regs;
468 return (gpointer)(gsize)r [ppc_r3];
476 #define MAX_AUX_ENTRIES 128
478 * PPC_FEATURE_POWER4, PPC_FEATURE_POWER5, PPC_FEATURE_POWER5_PLUS, PPC_FEATURE_CELL,
479 * PPC_FEATURE_PA6T, PPC_FEATURE_ARCH_2_05 are considered supporting 2X ISA features
481 #define ISA_2X (0x00080000 | 0x00040000 | 0x00020000 | 0x00010000 | 0x00000800 | 0x00001000)
483 /* define PPC_FEATURE_64 HWCAP for 64-bit category. */
484 #define ISA_64 0x40000000
486 /* define PPC_FEATURE_POWER6_EXT HWCAP for power6x mffgpr/mftgpr instructions. */
487 #define ISA_MOVE_FPR_GPR 0x00000200
489 * Initialize the cpu to execute managed code.
492 mono_arch_cpu_init (void)
497 * Initialize architecture specific code.
500 mono_arch_init (void)
502 #if defined(MONO_CROSS_COMPILE)
503 #elif defined(__APPLE__)
505 size_t len = sizeof (cachelinesize);
508 mib [1] = HW_CACHELINE;
510 if (sysctl (mib, 2, &cachelinesize, &len, NULL, 0) == -1) {
514 cachelineinc = cachelinesize;
516 #elif defined(__linux__)
517 AuxVec vec [MAX_AUX_ENTRIES];
518 int i, vec_entries = 0;
519 /* sadly this will work only with 2.6 kernels... */
520 FILE* f = fopen ("/proc/self/auxv", "rb");
523 vec_entries = fread (&vec, sizeof (AuxVec), MAX_AUX_ENTRIES, f);
527 for (i = 0; i < vec_entries; i++) {
528 int type = vec [i].type;
530 if (type == 19) { /* AT_DCACHEBSIZE */
531 cachelinesize = vec [i].value;
535 #elif defined(G_COMPILER_CODEWARRIOR)
539 //#error Need a way to get cache line size
542 if (mono_hwcap_ppc_has_icache_snoop)
543 cpu_hw_caps |= PPC_ICACHE_SNOOP;
545 if (mono_hwcap_ppc_is_isa_2x)
546 cpu_hw_caps |= PPC_ISA_2X;
548 if (mono_hwcap_ppc_is_isa_64)
549 cpu_hw_caps |= PPC_ISA_64;
551 if (mono_hwcap_ppc_has_move_fpr_gpr)
552 cpu_hw_caps |= PPC_MOVE_FPR_GPR;
554 if (mono_hwcap_ppc_has_multiple_ls_units)
555 cpu_hw_caps |= PPC_MULTIPLE_LS_UNITS;
561 cachelineinc = cachelinesize;
563 if (mono_cpu_count () > 1)
564 cpu_hw_caps |= PPC_SMP_CAPABLE;
566 mono_os_mutex_init_recursive (&mini_arch_mutex);
568 ss_trigger_page = mono_valloc (NULL, mono_pagesize (), MONO_MMAP_READ|MONO_MMAP_32BIT, MONO_MEM_ACCOUNT_OTHER);
569 bp_trigger_page = mono_valloc (NULL, mono_pagesize (), MONO_MMAP_READ|MONO_MMAP_32BIT, MONO_MEM_ACCOUNT_OTHER);
570 mono_mprotect (bp_trigger_page, mono_pagesize (), 0);
572 mono_aot_register_jit_icall ("mono_ppc_throw_exception", mono_ppc_throw_exception);
574 // FIXME: Fix partial sharing for power and remove this
575 mono_set_partial_sharing_supported (FALSE);
579 * Cleanup architecture specific code.
582 mono_arch_cleanup (void)
584 mono_os_mutex_destroy (&mini_arch_mutex);
588 mono_arch_have_fast_tls (void)
594 * This function returns the optimizations supported on this cpu.
597 mono_arch_cpu_optimizations (guint32 *exclude_mask)
601 /* no ppc-specific optimizations yet */
607 * This function test for all SIMD functions supported.
609 * Returns a bitmask corresponding to all supported versions.
613 mono_arch_cpu_enumerate_simd_versions (void)
615 /* SIMD is currently unimplemented */
619 #ifdef __mono_ppc64__
620 #define CASE_PPC32(c)
621 #define CASE_PPC64(c) case c:
623 #define CASE_PPC32(c) case c:
624 #define CASE_PPC64(c)
628 is_regsize_var (MonoType *t) {
631 t = mini_get_underlying_type (t);
635 CASE_PPC64 (MONO_TYPE_I8)
636 CASE_PPC64 (MONO_TYPE_U8)
640 case MONO_TYPE_FNPTR:
642 case MONO_TYPE_OBJECT:
643 case MONO_TYPE_STRING:
644 case MONO_TYPE_CLASS:
645 case MONO_TYPE_SZARRAY:
646 case MONO_TYPE_ARRAY:
648 case MONO_TYPE_GENERICINST:
649 if (!mono_type_generic_inst_is_valuetype (t))
652 case MONO_TYPE_VALUETYPE:
660 mono_arch_get_allocatable_int_vars (MonoCompile *cfg)
665 for (i = 0; i < cfg->num_varinfo; i++) {
666 MonoInst *ins = cfg->varinfo [i];
667 MonoMethodVar *vmv = MONO_VARINFO (cfg, i);
670 if (vmv->range.first_use.abs_pos >= vmv->range.last_use.abs_pos)
673 if (ins->flags & (MONO_INST_VOLATILE|MONO_INST_INDIRECT) || (ins->opcode != OP_LOCAL && ins->opcode != OP_ARG))
676 /* we can only allocate 32 bit values */
677 if (is_regsize_var (ins->inst_vtype)) {
678 g_assert (MONO_VARINFO (cfg, i)->reg == -1);
679 g_assert (i == vmv->idx);
680 vars = mono_varlist_insert_sorted (cfg, vars, vmv, FALSE);
686 #endif /* ifndef DISABLE_JIT */
689 mono_arch_get_global_int_regs (MonoCompile *cfg)
693 if (cfg->frame_reg != ppc_sp)
695 /* ppc_r13 is used by the system on PPC EABI */
696 for (i = 14; i < top; ++i) {
698 * Reserve r29 for holding the vtable address for virtual calls in AOT mode,
699 * since the trampolines can clobber r12.
701 if (!(cfg->compile_aot && i == 29))
702 regs = g_list_prepend (regs, GUINT_TO_POINTER (i));
709 * mono_arch_regalloc_cost:
711 * Return the cost, in number of memory references, of the action of
712 * allocating the variable VMV into a register during global register
716 mono_arch_regalloc_cost (MonoCompile *cfg, MonoMethodVar *vmv)
723 mono_arch_flush_icache (guint8 *code, gint size)
725 #ifdef MONO_CROSS_COMPILE
728 guint8 *endp, *start;
732 start = (guint8*)((gsize)start & ~(cachelinesize - 1));
733 /* use dcbf for smp support, later optimize for UP, see pem._64bit.d20030611.pdf page 211 */
734 #if defined(G_COMPILER_CODEWARRIOR)
735 if (cpu_hw_caps & PPC_SMP_CAPABLE) {
736 for (p = start; p < endp; p += cachelineinc) {
740 for (p = start; p < endp; p += cachelineinc) {
746 for (p = start; p < endp; p += cachelineinc) {
757 /* For POWER5/6 with ICACHE_SNOOPing only one icbi in the range is required.
758 * The sync is required to insure that the store queue is completely empty.
759 * While the icbi performs no cache operations, icbi/isync is required to
760 * kill local prefetch.
762 if (cpu_hw_caps & PPC_ICACHE_SNOOP) {
764 asm ("icbi 0,%0;" : : "r"(code) : "memory");
768 /* use dcbf for smp support, see pem._64bit.d20030611.pdf page 211 */
769 if (cpu_hw_caps & PPC_SMP_CAPABLE) {
770 for (p = start; p < endp; p += cachelineinc) {
771 asm ("dcbf 0,%0;" : : "r"(p) : "memory");
774 for (p = start; p < endp; p += cachelineinc) {
775 asm ("dcbst 0,%0;" : : "r"(p) : "memory");
780 for (p = start; p < endp; p += cachelineinc) {
781 /* for ISA2.0+ implementations we should not need any extra sync between the
782 * icbi instructions. Both the 2.0 PEM and the PowerISA-2.05 say this.
783 * So I am not sure which chip had this problem but its not an issue on
784 * of the ISA V2 chips.
786 if (cpu_hw_caps & PPC_ISA_2X)
787 asm ("icbi 0,%0;" : : "r"(p) : "memory");
789 asm ("icbi 0,%0; sync;" : : "r"(p) : "memory");
791 if (!(cpu_hw_caps & PPC_ISA_2X))
799 mono_arch_flush_register_windows (void)
804 #define ALWAYS_ON_STACK(s) s
805 #define FP_ALSO_IN_REG(s) s
807 #ifdef __mono_ppc64__
808 #define ALWAYS_ON_STACK(s) s
809 #define FP_ALSO_IN_REG(s) s
811 #define ALWAYS_ON_STACK(s)
812 #define FP_ALSO_IN_REG(s)
814 #define ALIGN_DOUBLES
823 RegTypeFPStructByVal, // For the v2 ABI, floats should be passed in FRs instead of GRs. Only valid for ABI v2!
828 guint32 vtsize; /* in param area */
830 guint8 vtregs; /* number of registers used to pass a RegTypeStructByVal/RegTypeFPStructByVal */
831 guint8 regtype : 4; /* 0 general, 1 basereg, 2 floating point register, see RegType* */
832 guint8 size : 4; /* 1, 2, 4, 8, or regs used by RegTypeStructByVal/RegTypeFPStructByVal */
833 guint8 bytes : 4; /* size in bytes - only valid for
834 RegTypeStructByVal/RegTypeFPStructByVal if the struct fits
835 in one word, otherwise it's 0*/
844 gboolean vtype_retaddr;
852 #if PPC_RETURN_SMALL_FLOAT_STRUCTS_IN_FR_REGS
854 // Test if a structure is completely composed of either float XOR double fields and has fewer than
855 // PPC_MOST_FLOAT_STRUCT_MEMBERS_TO_RETURN_VIA_REGISTER members.
856 // If this is true the structure can be returned directly via float registers instead of by a hidden parameter
857 // pointing to where the return value should be stored.
858 // This is as per the ELF ABI v2.
861 is_float_struct_returnable_via_regs (MonoType *type, int* member_cnt, int* member_size)
863 int local_member_cnt, local_member_size;
865 member_cnt = &local_member_cnt;
868 member_size = &local_member_size;
871 gboolean is_all_floats = mini_type_is_hfa(type, member_cnt, member_size);
872 return is_all_floats && (*member_cnt <= PPC_MOST_FLOAT_STRUCT_MEMBERS_TO_RETURN_VIA_REGISTERS);
876 #define is_float_struct_returnable_via_regs(a,b,c) (FALSE)
880 #if PPC_RETURN_SMALL_STRUCTS_IN_REGS
882 // Test if a structure is smaller in size than 2 doublewords (PPC_LARGEST_STRUCT_SIZE_TO_RETURN_VIA_REGISTERS) and is
883 // completely composed of fields all of basic types.
884 // If this is true the structure can be returned directly via registers r3/r4 instead of by a hidden parameter
885 // pointing to where the return value should be stored.
886 // This is as per the ELF ABI v2.
889 is_struct_returnable_via_regs (MonoClass *klass, gboolean is_pinvoke)
891 gboolean has_a_field = FALSE;
894 gpointer iter = NULL;
897 size = mono_type_native_stack_size (&klass->byval_arg, 0);
899 size = mini_type_stack_size (&klass->byval_arg, 0);
902 if (size > PPC_LARGEST_STRUCT_SIZE_TO_RETURN_VIA_REGISTERS)
904 while ((f = mono_class_get_fields (klass, &iter))) {
905 if (!(f->type->attrs & FIELD_ATTRIBUTE_STATIC)) {
906 // TBD: Is there a better way to check for the basic types?
907 if (f->type->byref) {
909 } else if ((f->type->type >= MONO_TYPE_BOOLEAN) && (f->type->type <= MONO_TYPE_R8)) {
911 } else if (MONO_TYPE_ISSTRUCT (f->type)) {
912 MonoClass *klass = mono_class_from_mono_type (f->type);
913 if (is_struct_returnable_via_regs(klass, is_pinvoke)) {
928 #define is_struct_returnable_via_regs(a,b) (FALSE)
933 add_general (guint *gr, guint *stack_size, ArgInfo *ainfo, gboolean simple)
935 #ifdef __mono_ppc64__
940 if (*gr >= 3 + PPC_NUM_REG_ARGS) {
941 ainfo->offset = PPC_STACK_PARAM_OFFSET + *stack_size;
942 ainfo->reg = ppc_sp; /* in the caller */
943 ainfo->regtype = RegTypeBase;
944 *stack_size += sizeof (gpointer);
946 ALWAYS_ON_STACK (*stack_size += sizeof (gpointer));
950 if (*gr >= 3 + PPC_NUM_REG_ARGS - 1) {
952 //*stack_size += (*stack_size % 8);
954 ainfo->offset = PPC_STACK_PARAM_OFFSET + *stack_size;
955 ainfo->reg = ppc_sp; /* in the caller */
956 ainfo->regtype = RegTypeBase;
963 ALWAYS_ON_STACK (*stack_size += 8);
971 #if defined(__APPLE__) || (defined(__mono_ppc64__) && !PPC_PASS_SMALL_FLOAT_STRUCTS_IN_FR_REGS)
973 has_only_a_r48_field (MonoClass *klass)
977 gboolean have_field = FALSE;
979 while ((f = mono_class_get_fields (klass, &iter))) {
980 if (!(f->type->attrs & FIELD_ATTRIBUTE_STATIC)) {
983 if (!f->type->byref && (f->type->type == MONO_TYPE_R4 || f->type->type == MONO_TYPE_R8))
994 get_call_info (MonoMethodSignature *sig)
996 guint i, fr, gr, pstart;
997 int n = sig->hasthis + sig->param_count;
998 MonoType *simpletype;
999 guint32 stack_size = 0;
1000 CallInfo *cinfo = g_malloc0 (sizeof (CallInfo) + sizeof (ArgInfo) * n);
1001 gboolean is_pinvoke = sig->pinvoke;
1003 fr = PPC_FIRST_FPARG_REG;
1004 gr = PPC_FIRST_ARG_REG;
1006 /* FIXME: handle returning a struct */
1007 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
1008 cinfo->vtype_retaddr = TRUE;
1014 * To simplify get_this_arg_reg () and LLVM integration, emit the vret arg after
1015 * the first argument, allowing 'this' to be always passed in the first arg reg.
1016 * Also do this if the first argument is a reference type, since virtual calls
1017 * are sometimes made using calli without sig->hasthis set, like in the delegate
1020 if (cinfo->vtype_retaddr && !is_pinvoke && (sig->hasthis || (sig->param_count > 0 && MONO_TYPE_IS_REFERENCE (mini_get_underlying_type (sig->params [0]))))) {
1022 add_general (&gr, &stack_size, cinfo->args + 0, TRUE);
1025 add_general (&gr, &stack_size, &cinfo->args [sig->hasthis + 0], TRUE);
1029 add_general (&gr, &stack_size, &cinfo->ret, TRUE);
1030 cinfo->struct_ret = cinfo->ret.reg;
1031 cinfo->vret_arg_index = 1;
1035 add_general (&gr, &stack_size, cinfo->args + 0, TRUE);
1039 if (cinfo->vtype_retaddr) {
1040 add_general (&gr, &stack_size, &cinfo->ret, TRUE);
1041 cinfo->struct_ret = cinfo->ret.reg;
1045 DEBUG(printf("params: %d\n", sig->param_count));
1046 for (i = pstart; i < sig->param_count; ++i) {
1047 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (i == sig->sentinelpos)) {
1048 /* Prevent implicit arguments and sig_cookie from
1049 being passed in registers */
1050 gr = PPC_LAST_ARG_REG + 1;
1051 /* FIXME: don't we have to set fr, too? */
1052 /* Emit the signature cookie just before the implicit arguments */
1053 add_general (&gr, &stack_size, &cinfo->sig_cookie, TRUE);
1055 DEBUG(printf("param %d: ", i));
1056 if (sig->params [i]->byref) {
1057 DEBUG(printf("byref\n"));
1058 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
1062 simpletype = mini_get_underlying_type (sig->params [i]);
1063 switch (simpletype->type) {
1064 case MONO_TYPE_BOOLEAN:
1067 cinfo->args [n].size = 1;
1068 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
1071 case MONO_TYPE_CHAR:
1074 cinfo->args [n].size = 2;
1075 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
1080 cinfo->args [n].size = 4;
1081 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
1087 case MONO_TYPE_FNPTR:
1088 case MONO_TYPE_CLASS:
1089 case MONO_TYPE_OBJECT:
1090 case MONO_TYPE_STRING:
1091 case MONO_TYPE_SZARRAY:
1092 case MONO_TYPE_ARRAY:
1093 cinfo->args [n].size = sizeof (gpointer);
1094 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
1097 case MONO_TYPE_GENERICINST:
1098 if (!mono_type_generic_inst_is_valuetype (simpletype)) {
1099 cinfo->args [n].size = sizeof (gpointer);
1100 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
1105 case MONO_TYPE_VALUETYPE:
1106 case MONO_TYPE_TYPEDBYREF: {
1108 MonoClass *klass = mono_class_from_mono_type (sig->params [i]);
1109 if (simpletype->type == MONO_TYPE_TYPEDBYREF)
1110 size = sizeof (MonoTypedRef);
1111 else if (is_pinvoke)
1112 size = mono_class_native_size (klass, NULL);
1114 size = mono_class_value_size (klass, NULL);
1116 #if defined(__APPLE__) || (defined(__mono_ppc64__) && !PPC_PASS_SMALL_FLOAT_STRUCTS_IN_FR_REGS)
1117 if ((size == 4 || size == 8) && has_only_a_r48_field (klass)) {
1118 cinfo->args [n].size = size;
1120 /* It was 7, now it is 8 in LinuxPPC */
1121 if (fr <= PPC_LAST_FPARG_REG) {
1122 cinfo->args [n].regtype = RegTypeFP;
1123 cinfo->args [n].reg = fr;
1125 FP_ALSO_IN_REG (gr ++);
1126 #if !defined(__mono_ppc64__)
1128 FP_ALSO_IN_REG (gr ++);
1130 ALWAYS_ON_STACK (stack_size += size);
1132 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size;
1133 cinfo->args [n].regtype = RegTypeBase;
1134 cinfo->args [n].reg = ppc_sp; /* in the caller*/
1141 DEBUG(printf ("load %d bytes struct\n",
1142 mono_class_native_size (sig->params [i]->data.klass, NULL)));
1144 #if PPC_PASS_STRUCTS_BY_VALUE
1146 int align_size = size;
1148 int rest = PPC_LAST_ARG_REG - gr + 1;
1151 #if PPC_PASS_SMALL_FLOAT_STRUCTS_IN_FR_REGS
1154 gboolean is_all_floats = is_float_struct_returnable_via_regs (sig->params [i], &mbr_cnt, &mbr_size);
1156 if (is_all_floats) {
1157 rest = PPC_LAST_FPARG_REG - fr + 1;
1159 // Pass small (<= 8 member) structures entirely made up of either float or double members
1160 // in FR registers. There have to be at least mbr_cnt registers left.
1161 if (is_all_floats &&
1162 (rest >= mbr_cnt)) {
1164 n_in_regs = MIN (rest, nregs);
1165 cinfo->args [n].regtype = RegTypeFPStructByVal;
1166 cinfo->args [n].vtregs = n_in_regs;
1167 cinfo->args [n].size = mbr_size;
1168 cinfo->args [n].vtsize = nregs - n_in_regs;
1169 cinfo->args [n].reg = fr;
1171 if (mbr_size == 4) {
1173 FP_ALSO_IN_REG (gr += (n_in_regs+1)/2);
1176 FP_ALSO_IN_REG (gr += (n_in_regs));
1181 align_size += (sizeof (gpointer) - 1);
1182 align_size &= ~(sizeof (gpointer) - 1);
1183 nregs = (align_size + sizeof (gpointer) -1 ) / sizeof (gpointer);
1184 n_in_regs = MIN (rest, nregs);
1188 /* FIXME: check this */
1189 if (size >= 3 && size % 4 != 0)
1192 cinfo->args [n].regtype = RegTypeStructByVal;
1193 cinfo->args [n].vtregs = n_in_regs;
1194 cinfo->args [n].size = n_in_regs;
1195 cinfo->args [n].vtsize = nregs - n_in_regs;
1196 cinfo->args [n].reg = gr;
1200 #ifdef __mono_ppc64__
1201 if (nregs == 1 && is_pinvoke)
1202 cinfo->args [n].bytes = size;
1205 cinfo->args [n].bytes = 0;
1206 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size;
1207 /*g_print ("offset for arg %d at %d\n", n, PPC_STACK_PARAM_OFFSET + stack_size);*/
1208 stack_size += nregs * sizeof (gpointer);
1211 add_general (&gr, &stack_size, cinfo->args + n, TRUE);
1212 cinfo->args [n].regtype = RegTypeStructByAddr;
1213 cinfo->args [n].vtsize = size;
1220 cinfo->args [n].size = 8;
1221 add_general (&gr, &stack_size, cinfo->args + n, SIZEOF_REGISTER == 8);
1225 cinfo->args [n].size = 4;
1227 /* It was 7, now it is 8 in LinuxPPC */
1228 if (fr <= PPC_LAST_FPARG_REG
1229 // For non-native vararg calls the parms must go in storage
1230 && !(!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG))
1232 cinfo->args [n].regtype = RegTypeFP;
1233 cinfo->args [n].reg = fr;
1235 FP_ALSO_IN_REG (gr ++);
1236 ALWAYS_ON_STACK (stack_size += SIZEOF_REGISTER);
1238 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size + MONO_PPC_32_64_CASE (0, 4);
1239 cinfo->args [n].regtype = RegTypeBase;
1240 cinfo->args [n].reg = ppc_sp; /* in the caller*/
1241 stack_size += SIZEOF_REGISTER;
1246 cinfo->args [n].size = 8;
1247 /* It was 7, now it is 8 in LinuxPPC */
1248 if (fr <= PPC_LAST_FPARG_REG
1249 // For non-native vararg calls the parms must go in storage
1250 && !(!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG))
1252 cinfo->args [n].regtype = RegTypeFP;
1253 cinfo->args [n].reg = fr;
1255 FP_ALSO_IN_REG (gr += sizeof (double) / SIZEOF_REGISTER);
1256 ALWAYS_ON_STACK (stack_size += 8);
1258 cinfo->args [n].offset = PPC_STACK_PARAM_OFFSET + stack_size;
1259 cinfo->args [n].regtype = RegTypeBase;
1260 cinfo->args [n].reg = ppc_sp; /* in the caller*/
1266 g_error ("Can't trampoline 0x%x", sig->params [i]->type);
1271 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (i == sig->sentinelpos)) {
1272 /* Prevent implicit arguments and sig_cookie from
1273 being passed in registers */
1274 gr = PPC_LAST_ARG_REG + 1;
1275 /* Emit the signature cookie just before the implicit arguments */
1276 add_general (&gr, &stack_size, &cinfo->sig_cookie, TRUE);
1280 simpletype = mini_get_underlying_type (sig->ret);
1281 switch (simpletype->type) {
1282 case MONO_TYPE_BOOLEAN:
1287 case MONO_TYPE_CHAR:
1293 case MONO_TYPE_FNPTR:
1294 case MONO_TYPE_CLASS:
1295 case MONO_TYPE_OBJECT:
1296 case MONO_TYPE_SZARRAY:
1297 case MONO_TYPE_ARRAY:
1298 case MONO_TYPE_STRING:
1299 cinfo->ret.reg = ppc_r3;
1303 cinfo->ret.reg = ppc_r3;
1307 cinfo->ret.reg = ppc_f1;
1308 cinfo->ret.regtype = RegTypeFP;
1310 case MONO_TYPE_GENERICINST:
1311 if (!mono_type_generic_inst_is_valuetype (simpletype)) {
1312 cinfo->ret.reg = ppc_r3;
1316 case MONO_TYPE_VALUETYPE:
1318 case MONO_TYPE_TYPEDBYREF:
1319 case MONO_TYPE_VOID:
1322 g_error ("Can't handle as return value 0x%x", sig->ret->type);
1326 /* align stack size to 16 */
1327 DEBUG (printf (" stack size: %d (%d)\n", (stack_size + 15) & ~15, stack_size));
1328 stack_size = (stack_size + 15) & ~15;
1330 cinfo->stack_usage = stack_size;
1335 mono_arch_tail_call_supported (MonoCompile *cfg, MonoMethodSignature *caller_sig, MonoMethodSignature *callee_sig)
1341 c1 = get_call_info (caller_sig);
1342 c2 = get_call_info (callee_sig);
1343 res = c1->stack_usage >= c2->stack_usage;
1344 if (callee_sig->ret && MONO_TYPE_ISSTRUCT (callee_sig->ret))
1345 /* An address on the callee's stack is passed as the first argument */
1347 for (i = 0; i < c2->nargs; ++i) {
1348 if (c2->args [i].regtype == RegTypeStructByAddr)
1349 /* An address on the callee's stack is passed as the argument */
1354 if (!mono_debug_count ())
1365 * Set var information according to the calling convention. ppc version.
1366 * The locals var stuff should most likely be split in another method.
1369 mono_arch_allocate_vars (MonoCompile *m)
1371 MonoMethodSignature *sig;
1372 MonoMethodHeader *header;
1374 int i, offset, size, align, curinst;
1375 int frame_reg = ppc_sp;
1377 guint32 locals_stack_size, locals_stack_align;
1379 m->flags |= MONO_CFG_HAS_SPILLUP;
1381 /* allow room for the vararg method args: void* and long/double */
1382 if (mono_jit_trace_calls != NULL && mono_trace_eval (m->method))
1383 m->param_area = MAX (m->param_area, sizeof (gpointer)*8);
1384 /* this is bug #60332: remove when #59509 is fixed, so no weird vararg
1385 * call convs needs to be handled this way.
1387 if (m->flags & MONO_CFG_HAS_VARARGS)
1388 m->param_area = MAX (m->param_area, sizeof (gpointer)*8);
1389 /* gtk-sharp and other broken code will dllimport vararg functions even with
1390 * non-varargs signatures. Since there is little hope people will get this right
1391 * we assume they won't.
1393 if (m->method->wrapper_type == MONO_WRAPPER_MANAGED_TO_NATIVE)
1394 m->param_area = MAX (m->param_area, sizeof (gpointer)*8);
1399 * We use the frame register also for any method that has
1400 * exception clauses. This way, when the handlers are called,
1401 * the code will reference local variables using the frame reg instead of
1402 * the stack pointer: if we had to restore the stack pointer, we'd
1403 * corrupt the method frames that are already on the stack (since
1404 * filters get called before stack unwinding happens) when the filter
1405 * code would call any method (this also applies to finally etc.).
1407 if ((m->flags & MONO_CFG_HAS_ALLOCA) || header->num_clauses)
1408 frame_reg = ppc_r31;
1409 m->frame_reg = frame_reg;
1410 if (frame_reg != ppc_sp) {
1411 m->used_int_regs |= 1 << frame_reg;
1414 sig = mono_method_signature (m->method);
1418 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
1419 m->ret->opcode = OP_REGVAR;
1420 m->ret->inst_c0 = m->ret->dreg = ppc_r3;
1422 /* FIXME: handle long values? */
1423 switch (mini_get_underlying_type (sig->ret)->type) {
1424 case MONO_TYPE_VOID:
1428 m->ret->opcode = OP_REGVAR;
1429 m->ret->inst_c0 = m->ret->dreg = ppc_f1;
1432 m->ret->opcode = OP_REGVAR;
1433 m->ret->inst_c0 = m->ret->dreg = ppc_r3;
1437 /* local vars are at a positive offset from the stack pointer */
1439 * also note that if the function uses alloca, we use ppc_r31
1440 * to point at the local variables.
1442 offset = PPC_MINIMAL_STACK_SIZE; /* linkage area */
1443 /* align the offset to 16 bytes: not sure this is needed here */
1445 //offset &= ~(16 - 1);
1447 /* add parameter area size for called functions */
1448 offset += m->param_area;
1450 offset &= ~(16 - 1);
1452 /* allow room to save the return value */
1453 if (mono_jit_trace_calls != NULL && mono_trace_eval (m->method))
1456 /* the MonoLMF structure is stored just below the stack pointer */
1459 /* this stuff should not be needed on ppc and the new jit,
1460 * because a call on ppc to the handlers doesn't change the
1461 * stack pointer and the jist doesn't manipulate the stack pointer
1462 * for operations involving valuetypes.
1464 /* reserve space to store the esp */
1465 offset += sizeof (gpointer);
1467 /* this is a global constant */
1468 mono_exc_esp_offset = offset;
1471 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
1472 offset += sizeof(gpointer) - 1;
1473 offset &= ~(sizeof(gpointer) - 1);
1475 m->vret_addr->opcode = OP_REGOFFSET;
1476 m->vret_addr->inst_basereg = frame_reg;
1477 m->vret_addr->inst_offset = offset;
1479 if (G_UNLIKELY (m->verbose_level > 1)) {
1480 printf ("vret_addr =");
1481 mono_print_ins (m->vret_addr);
1484 offset += sizeof(gpointer);
1487 offsets = mono_allocate_stack_slots (m, FALSE, &locals_stack_size, &locals_stack_align);
1488 if (locals_stack_align) {
1489 offset += (locals_stack_align - 1);
1490 offset &= ~(locals_stack_align - 1);
1492 for (i = m->locals_start; i < m->num_varinfo; i++) {
1493 if (offsets [i] != -1) {
1494 MonoInst *inst = m->varinfo [i];
1495 inst->opcode = OP_REGOFFSET;
1496 inst->inst_basereg = frame_reg;
1497 inst->inst_offset = offset + offsets [i];
1499 g_print ("allocating local %d (%s) to %d\n",
1500 i, mono_type_get_name (inst->inst_vtype), inst->inst_offset);
1504 offset += locals_stack_size;
1508 inst = m->args [curinst];
1509 if (inst->opcode != OP_REGVAR) {
1510 inst->opcode = OP_REGOFFSET;
1511 inst->inst_basereg = frame_reg;
1512 offset += sizeof (gpointer) - 1;
1513 offset &= ~(sizeof (gpointer) - 1);
1514 inst->inst_offset = offset;
1515 offset += sizeof (gpointer);
1520 for (i = 0; i < sig->param_count; ++i) {
1521 inst = m->args [curinst];
1522 if (inst->opcode != OP_REGVAR) {
1523 inst->opcode = OP_REGOFFSET;
1524 inst->inst_basereg = frame_reg;
1526 size = mono_type_native_stack_size (sig->params [i], (guint32*)&align);
1527 inst->backend.is_pinvoke = 1;
1529 size = mono_type_size (sig->params [i], &align);
1531 if (MONO_TYPE_ISSTRUCT (sig->params [i]) && size < sizeof (gpointer))
1532 size = align = sizeof (gpointer);
1534 * Use at least 4/8 byte alignment, since these might be passed in registers, and
1535 * they are saved using std in the prolog.
1537 align = sizeof (gpointer);
1538 offset += align - 1;
1539 offset &= ~(align - 1);
1540 inst->inst_offset = offset;
1546 /* some storage for fp conversions */
1549 m->arch.fp_conv_var_offset = offset;
1552 /* align the offset to 16 bytes */
1554 offset &= ~(16 - 1);
1557 m->stack_offset = offset;
1559 if (sig->call_convention == MONO_CALL_VARARG) {
1560 CallInfo *cinfo = get_call_info (m->method->signature);
1562 m->sig_cookie = cinfo->sig_cookie.offset;
1569 mono_arch_create_vars (MonoCompile *cfg)
1571 MonoMethodSignature *sig = mono_method_signature (cfg->method);
1573 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
1574 cfg->vret_addr = mono_compile_create_var (cfg, &mono_defaults.int_class->byval_arg, OP_ARG);
1578 /* Fixme: we need an alignment solution for enter_method and mono_arch_call_opcode,
1579 * currently alignment in mono_arch_call_opcode is computed without arch_get_argument_info
1583 emit_sig_cookie (MonoCompile *cfg, MonoCallInst *call, CallInfo *cinfo)
1585 int sig_reg = mono_alloc_ireg (cfg);
1587 /* FIXME: Add support for signature tokens to AOT */
1588 cfg->disable_aot = TRUE;
1590 MONO_EMIT_NEW_ICONST (cfg, sig_reg, (gulong)call->signature);
1591 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG,
1592 ppc_r1, cinfo->sig_cookie.offset, sig_reg);
1596 mono_arch_emit_call (MonoCompile *cfg, MonoCallInst *call)
1599 MonoMethodSignature *sig;
1603 sig = call->signature;
1604 n = sig->param_count + sig->hasthis;
1606 cinfo = get_call_info (sig);
1608 for (i = 0; i < n; ++i) {
1609 ArgInfo *ainfo = cinfo->args + i;
1612 if (i >= sig->hasthis)
1613 t = sig->params [i - sig->hasthis];
1615 t = &mono_defaults.int_class->byval_arg;
1616 t = mini_get_underlying_type (t);
1618 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (i == sig->sentinelpos))
1619 emit_sig_cookie (cfg, call, cinfo);
1621 in = call->args [i];
1623 if (ainfo->regtype == RegTypeGeneral) {
1624 #ifndef __mono_ppc64__
1625 if (!t->byref && ((t->type == MONO_TYPE_I8) || (t->type == MONO_TYPE_U8))) {
1626 MONO_INST_NEW (cfg, ins, OP_MOVE);
1627 ins->dreg = mono_alloc_ireg (cfg);
1628 ins->sreg1 = MONO_LVREG_LS (in->dreg);
1629 MONO_ADD_INS (cfg->cbb, ins);
1630 mono_call_inst_add_outarg_reg (cfg, call, ins->dreg, ainfo->reg + 1, FALSE);
1632 MONO_INST_NEW (cfg, ins, OP_MOVE);
1633 ins->dreg = mono_alloc_ireg (cfg);
1634 ins->sreg1 = MONO_LVREG_MS (in->dreg);
1635 MONO_ADD_INS (cfg->cbb, ins);
1636 mono_call_inst_add_outarg_reg (cfg, call, ins->dreg, ainfo->reg, FALSE);
1640 MONO_INST_NEW (cfg, ins, OP_MOVE);
1641 ins->dreg = mono_alloc_ireg (cfg);
1642 ins->sreg1 = in->dreg;
1643 MONO_ADD_INS (cfg->cbb, ins);
1645 mono_call_inst_add_outarg_reg (cfg, call, ins->dreg, ainfo->reg, FALSE);
1647 } else if (ainfo->regtype == RegTypeStructByAddr) {
1648 MONO_INST_NEW (cfg, ins, OP_OUTARG_VT);
1649 ins->opcode = OP_OUTARG_VT;
1650 ins->sreg1 = in->dreg;
1651 ins->klass = in->klass;
1652 ins->inst_p0 = call;
1653 ins->inst_p1 = mono_mempool_alloc (cfg->mempool, sizeof (ArgInfo));
1654 memcpy (ins->inst_p1, ainfo, sizeof (ArgInfo));
1655 MONO_ADD_INS (cfg->cbb, ins);
1656 } else if (ainfo->regtype == RegTypeStructByVal) {
1657 /* this is further handled in mono_arch_emit_outarg_vt () */
1658 MONO_INST_NEW (cfg, ins, OP_OUTARG_VT);
1659 ins->opcode = OP_OUTARG_VT;
1660 ins->sreg1 = in->dreg;
1661 ins->klass = in->klass;
1662 ins->inst_p0 = call;
1663 ins->inst_p1 = mono_mempool_alloc (cfg->mempool, sizeof (ArgInfo));
1664 memcpy (ins->inst_p1, ainfo, sizeof (ArgInfo));
1665 MONO_ADD_INS (cfg->cbb, ins);
1666 } else if (ainfo->regtype == RegTypeFPStructByVal) {
1667 /* this is further handled in mono_arch_emit_outarg_vt () */
1668 MONO_INST_NEW (cfg, ins, OP_OUTARG_VT);
1669 ins->opcode = OP_OUTARG_VT;
1670 ins->sreg1 = in->dreg;
1671 ins->klass = in->klass;
1672 ins->inst_p0 = call;
1673 ins->inst_p1 = mono_mempool_alloc (cfg->mempool, sizeof (ArgInfo));
1674 memcpy (ins->inst_p1, ainfo, sizeof (ArgInfo));
1675 MONO_ADD_INS (cfg->cbb, ins);
1676 cfg->flags |= MONO_CFG_HAS_FPOUT;
1677 } else if (ainfo->regtype == RegTypeBase) {
1678 if (!t->byref && ((t->type == MONO_TYPE_I8) || (t->type == MONO_TYPE_U8))) {
1679 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI8_MEMBASE_REG, ppc_r1, ainfo->offset, in->dreg);
1680 } else if (!t->byref && ((t->type == MONO_TYPE_R4) || (t->type == MONO_TYPE_R8))) {
1681 if (t->type == MONO_TYPE_R8)
1682 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORER8_MEMBASE_REG, ppc_r1, ainfo->offset, in->dreg);
1684 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORER4_MEMBASE_REG, ppc_r1, ainfo->offset, in->dreg);
1686 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG, ppc_r1, ainfo->offset, in->dreg);
1688 } else if (ainfo->regtype == RegTypeFP) {
1689 if (t->type == MONO_TYPE_VALUETYPE) {
1690 /* this is further handled in mono_arch_emit_outarg_vt () */
1691 MONO_INST_NEW (cfg, ins, OP_OUTARG_VT);
1692 ins->opcode = OP_OUTARG_VT;
1693 ins->sreg1 = in->dreg;
1694 ins->klass = in->klass;
1695 ins->inst_p0 = call;
1696 ins->inst_p1 = mono_mempool_alloc (cfg->mempool, sizeof (ArgInfo));
1697 memcpy (ins->inst_p1, ainfo, sizeof (ArgInfo));
1698 MONO_ADD_INS (cfg->cbb, ins);
1700 cfg->flags |= MONO_CFG_HAS_FPOUT;
1702 int dreg = mono_alloc_freg (cfg);
1704 if (ainfo->size == 4) {
1705 MONO_EMIT_NEW_UNALU (cfg, OP_FCONV_TO_R4, dreg, in->dreg);
1707 MONO_INST_NEW (cfg, ins, OP_FMOVE);
1709 ins->sreg1 = in->dreg;
1710 MONO_ADD_INS (cfg->cbb, ins);
1713 mono_call_inst_add_outarg_reg (cfg, call, dreg, ainfo->reg, TRUE);
1714 cfg->flags |= MONO_CFG_HAS_FPOUT;
1717 g_assert_not_reached ();
1721 /* Emit the signature cookie in the case that there is no
1722 additional argument */
1723 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (n == sig->sentinelpos))
1724 emit_sig_cookie (cfg, call, cinfo);
1726 if (cinfo->struct_ret) {
1729 MONO_INST_NEW (cfg, vtarg, OP_MOVE);
1730 vtarg->sreg1 = call->vret_var->dreg;
1731 vtarg->dreg = mono_alloc_preg (cfg);
1732 MONO_ADD_INS (cfg->cbb, vtarg);
1734 mono_call_inst_add_outarg_reg (cfg, call, vtarg->dreg, cinfo->struct_ret, FALSE);
1737 call->stack_usage = cinfo->stack_usage;
1738 cfg->param_area = MAX (PPC_MINIMAL_PARAM_AREA_SIZE, MAX (cfg->param_area, cinfo->stack_usage));
1739 cfg->flags |= MONO_CFG_HAS_CALLS;
1747 mono_arch_emit_outarg_vt (MonoCompile *cfg, MonoInst *ins, MonoInst *src)
1749 MonoCallInst *call = (MonoCallInst*)ins->inst_p0;
1750 ArgInfo *ainfo = ins->inst_p1;
1751 int ovf_size = ainfo->vtsize;
1752 int doffset = ainfo->offset;
1753 int i, soffset, dreg;
1755 if (ainfo->regtype == RegTypeStructByVal) {
1762 * Darwin pinvokes needs some special handling for 1
1763 * and 2 byte arguments
1765 g_assert (ins->klass);
1766 if (call->signature->pinvoke)
1767 size = mono_class_native_size (ins->klass, NULL);
1768 if (size == 2 || size == 1) {
1769 int tmpr = mono_alloc_ireg (cfg);
1771 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADI1_MEMBASE, tmpr, src->dreg, soffset);
1773 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADI2_MEMBASE, tmpr, src->dreg, soffset);
1774 dreg = mono_alloc_ireg (cfg);
1775 MONO_EMIT_NEW_UNALU (cfg, OP_MOVE, dreg, tmpr);
1776 mono_call_inst_add_outarg_reg (cfg, call, dreg, ainfo->reg, FALSE);
1779 for (i = 0; i < ainfo->vtregs; ++i) {
1780 dreg = mono_alloc_ireg (cfg);
1781 #if G_BYTE_ORDER == G_BIG_ENDIAN
1782 int antipadding = 0;
1785 antipadding = sizeof (gpointer) - ainfo->bytes;
1787 MONO_EMIT_NEW_LOAD_MEMBASE (cfg, dreg, src->dreg, soffset);
1789 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHR_UN_IMM, dreg, dreg, antipadding * 8);
1791 MONO_EMIT_NEW_LOAD_MEMBASE (cfg, dreg, src->dreg, soffset);
1793 mono_call_inst_add_outarg_reg (cfg, call, dreg, ainfo->reg + i, FALSE);
1794 soffset += sizeof (gpointer);
1797 mini_emit_memcpy (cfg, ppc_r1, doffset + soffset, src->dreg, soffset, ovf_size * sizeof (gpointer), 0);
1798 } else if (ainfo->regtype == RegTypeFPStructByVal) {
1800 for (i = 0; i < ainfo->vtregs; ++i) {
1801 int tmpr = mono_alloc_freg (cfg);
1802 if (ainfo->size == 4)
1803 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADR4_MEMBASE, tmpr, src->dreg, soffset);
1805 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADR8_MEMBASE, tmpr, src->dreg, soffset);
1806 dreg = mono_alloc_freg (cfg);
1807 MONO_EMIT_NEW_UNALU (cfg, OP_FMOVE, dreg, tmpr);
1808 mono_call_inst_add_outarg_reg (cfg, call, dreg, ainfo->reg+i, TRUE);
1809 soffset += ainfo->size;
1812 mini_emit_memcpy (cfg, ppc_r1, doffset + soffset, src->dreg, soffset, ovf_size * sizeof (gpointer), 0);
1813 } else if (ainfo->regtype == RegTypeFP) {
1814 int tmpr = mono_alloc_freg (cfg);
1815 if (ainfo->size == 4)
1816 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADR4_MEMBASE, tmpr, src->dreg, 0);
1818 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADR8_MEMBASE, tmpr, src->dreg, 0);
1819 dreg = mono_alloc_freg (cfg);
1820 MONO_EMIT_NEW_UNALU (cfg, OP_FMOVE, dreg, tmpr);
1821 mono_call_inst_add_outarg_reg (cfg, call, dreg, ainfo->reg, TRUE);
1823 MonoInst *vtcopy = mono_compile_create_var (cfg, &src->klass->byval_arg, OP_LOCAL);
1827 /* FIXME: alignment? */
1828 if (call->signature->pinvoke) {
1829 size = mono_type_native_stack_size (&src->klass->byval_arg, NULL);
1830 vtcopy->backend.is_pinvoke = 1;
1832 size = mini_type_stack_size (&src->klass->byval_arg, NULL);
1835 g_assert (ovf_size > 0);
1837 EMIT_NEW_VARLOADA (cfg, load, vtcopy, vtcopy->inst_vtype);
1838 mini_emit_memcpy (cfg, load->dreg, 0, src->dreg, 0, size, 0);
1841 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG, ppc_r1, ainfo->offset, load->dreg);
1843 mono_call_inst_add_outarg_reg (cfg, call, load->dreg, ainfo->reg, FALSE);
1848 mono_arch_emit_setret (MonoCompile *cfg, MonoMethod *method, MonoInst *val)
1850 MonoType *ret = mini_get_underlying_type (mono_method_signature (method)->ret);
1852 #ifndef __mono_ppc64__
1853 if (ret->type == MONO_TYPE_I8 || ret->type == MONO_TYPE_U8) {
1856 MONO_INST_NEW (cfg, ins, OP_SETLRET);
1857 ins->sreg1 = MONO_LVREG_LS (val->dreg);
1858 ins->sreg2 = MONO_LVREG_MS (val->dreg);
1859 MONO_ADD_INS (cfg->cbb, ins);
1863 if (ret->type == MONO_TYPE_R8 || ret->type == MONO_TYPE_R4) {
1864 MONO_EMIT_NEW_UNALU (cfg, OP_FMOVE, cfg->ret->dreg, val->dreg);
1868 MONO_EMIT_NEW_UNALU (cfg, OP_MOVE, cfg->ret->dreg, val->dreg);
1871 /* FIXME: this is just a useless hint: fix the interface to include the opcode */
1873 mono_arch_is_inst_imm (gint64 imm)
1878 #endif /* DISABLE_JIT */
1881 * Allow tracing to work with this interface (with an optional argument)
1885 mono_arch_instrument_prolog (MonoCompile *cfg, void *func, void *p, gboolean enable_arguments)
1889 ppc_load_ptr (code, ppc_r3, cfg->method);
1890 ppc_li (code, ppc_r4, 0); /* NULL ebp for now */
1891 ppc_load_func (code, PPC_CALL_REG, func);
1892 ppc_mtlr (code, PPC_CALL_REG);
1906 mono_arch_instrument_epilog_full (MonoCompile *cfg, void *func, void *p, gboolean enable_arguments, gboolean preserve_argument_registers)
1909 int save_mode = SAVE_NONE;
1911 MonoMethod *method = cfg->method;
1912 int rtype = mini_get_underlying_type (mono_method_signature (method)->ret)->type;
1913 int save_offset = PPC_STACK_PARAM_OFFSET + cfg->param_area;
1917 offset = code - cfg->native_code;
1918 /* we need about 16 instructions */
1919 if (offset > (cfg->code_size - 16 * 4)) {
1920 cfg->code_size *= 2;
1921 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
1922 code = cfg->native_code + offset;
1926 case MONO_TYPE_VOID:
1927 /* special case string .ctor icall */
1928 if (strcmp (".ctor", method->name) && method->klass == mono_defaults.string_class)
1929 save_mode = SAVE_ONE;
1931 save_mode = SAVE_NONE;
1933 #ifndef __mono_ppc64__
1936 save_mode = SAVE_TWO;
1941 save_mode = SAVE_FP;
1943 case MONO_TYPE_VALUETYPE:
1944 save_mode = SAVE_STRUCT;
1947 save_mode = SAVE_ONE;
1951 switch (save_mode) {
1953 ppc_stw (code, ppc_r3, save_offset, cfg->frame_reg);
1954 ppc_stw (code, ppc_r4, save_offset + 4, cfg->frame_reg);
1955 if (enable_arguments) {
1956 ppc_mr (code, ppc_r5, ppc_r4);
1957 ppc_mr (code, ppc_r4, ppc_r3);
1961 ppc_stptr (code, ppc_r3, save_offset, cfg->frame_reg);
1962 if (enable_arguments) {
1963 ppc_mr (code, ppc_r4, ppc_r3);
1967 ppc_stfd (code, ppc_f1, save_offset, cfg->frame_reg);
1968 if (enable_arguments) {
1969 /* FIXME: what reg? */
1970 ppc_fmr (code, ppc_f3, ppc_f1);
1971 /* FIXME: use 8 byte load on PPC64 */
1972 ppc_lwz (code, ppc_r4, save_offset, cfg->frame_reg);
1973 ppc_lwz (code, ppc_r5, save_offset + 4, cfg->frame_reg);
1977 if (enable_arguments) {
1978 /* FIXME: get the actual address */
1979 ppc_mr (code, ppc_r4, ppc_r3);
1980 // FIXME: Support the new v2 ABI!
1988 ppc_load_ptr (code, ppc_r3, cfg->method);
1989 ppc_load_func (code, PPC_CALL_REG, func);
1990 ppc_mtlr (code, PPC_CALL_REG);
1993 switch (save_mode) {
1995 ppc_lwz (code, ppc_r3, save_offset, cfg->frame_reg);
1996 ppc_lwz (code, ppc_r4, save_offset + 4, cfg->frame_reg);
1999 ppc_ldptr (code, ppc_r3, save_offset, cfg->frame_reg);
2002 ppc_lfd (code, ppc_f1, save_offset, cfg->frame_reg);
2012 * Conditional branches have a small offset, so if it is likely overflowed,
2013 * we do a branch to the end of the method (uncond branches have much larger
2014 * offsets) where we perform the conditional and jump back unconditionally.
2015 * It's slightly slower, since we add two uncond branches, but it's very simple
2016 * with the current patch implementation and such large methods are likely not
2017 * going to be perf critical anyway.
2022 const char *exception;
2029 #define EMIT_COND_BRANCH_FLAGS(ins,b0,b1) \
2030 if (0 && ins->inst_true_bb->native_offset) { \
2031 ppc_bc (code, (b0), (b1), (code - cfg->native_code + ins->inst_true_bb->native_offset) & 0xffff); \
2033 int br_disp = ins->inst_true_bb->max_offset - offset; \
2034 if (!ppc_is_imm16 (br_disp + 1024) || ! ppc_is_imm16 (ppc_is_imm16 (br_disp - 1024))) { \
2035 MonoOvfJump *ovfj = mono_mempool_alloc (cfg->mempool, sizeof (MonoOvfJump)); \
2036 ovfj->data.bb = ins->inst_true_bb; \
2037 ovfj->ip_offset = 0; \
2038 ovfj->b0_cond = (b0); \
2039 ovfj->b1_cond = (b1); \
2040 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_BB_OVF, ovfj); \
2043 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_BB, ins->inst_true_bb); \
2044 ppc_bc (code, (b0), (b1), 0); \
2048 #define EMIT_COND_BRANCH(ins,cond) EMIT_COND_BRANCH_FLAGS(ins, branch_b0_table [(cond)], branch_b1_table [(cond)])
2050 /* emit an exception if condition is fail
2052 * We assign the extra code used to throw the implicit exceptions
2053 * to cfg->bb_exit as far as the big branch handling is concerned
2055 #define EMIT_COND_SYSTEM_EXCEPTION_FLAGS(b0,b1,exc_name) \
2057 int br_disp = cfg->bb_exit->max_offset - offset; \
2058 if (!ppc_is_imm16 (br_disp + 1024) || ! ppc_is_imm16 (ppc_is_imm16 (br_disp - 1024))) { \
2059 MonoOvfJump *ovfj = mono_mempool_alloc (cfg->mempool, sizeof (MonoOvfJump)); \
2060 ovfj->data.exception = (exc_name); \
2061 ovfj->ip_offset = code - cfg->native_code; \
2062 ovfj->b0_cond = (b0); \
2063 ovfj->b1_cond = (b1); \
2064 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_EXC_OVF, ovfj); \
2066 cfg->bb_exit->max_offset += 24; \
2068 mono_add_patch_info (cfg, code - cfg->native_code, \
2069 MONO_PATCH_INFO_EXC, exc_name); \
2070 ppc_bcl (code, (b0), (b1), 0); \
2074 #define EMIT_COND_SYSTEM_EXCEPTION(cond,exc_name) EMIT_COND_SYSTEM_EXCEPTION_FLAGS(branch_b0_table [(cond)], branch_b1_table [(cond)], (exc_name))
2077 mono_arch_peephole_pass_1 (MonoCompile *cfg, MonoBasicBlock *bb)
2082 normalize_opcode (int opcode)
2085 #ifndef __mono_ilp32__
2086 case MONO_PPC_32_64_CASE (OP_LOADI4_MEMBASE, OP_LOADI8_MEMBASE):
2087 return OP_LOAD_MEMBASE;
2088 case MONO_PPC_32_64_CASE (OP_LOADI4_MEMINDEX, OP_LOADI8_MEMINDEX):
2089 return OP_LOAD_MEMINDEX;
2090 case MONO_PPC_32_64_CASE (OP_STOREI4_MEMBASE_REG, OP_STOREI8_MEMBASE_REG):
2091 return OP_STORE_MEMBASE_REG;
2092 case MONO_PPC_32_64_CASE (OP_STOREI4_MEMBASE_IMM, OP_STOREI8_MEMBASE_IMM):
2093 return OP_STORE_MEMBASE_IMM;
2094 case MONO_PPC_32_64_CASE (OP_STOREI4_MEMINDEX, OP_STOREI8_MEMINDEX):
2095 return OP_STORE_MEMINDEX;
2097 case MONO_PPC_32_64_CASE (OP_ISHR_IMM, OP_LSHR_IMM):
2099 case MONO_PPC_32_64_CASE (OP_ISHR_UN_IMM, OP_LSHR_UN_IMM):
2100 return OP_SHR_UN_IMM;
2107 mono_arch_peephole_pass_2 (MonoCompile *cfg, MonoBasicBlock *bb)
2109 MonoInst *ins, *n, *last_ins = NULL;
2111 MONO_BB_FOR_EACH_INS_SAFE (bb, n, ins) {
2112 switch (normalize_opcode (ins->opcode)) {
2114 /* remove unnecessary multiplication with 1 */
2115 if (ins->inst_imm == 1) {
2116 if (ins->dreg != ins->sreg1) {
2117 ins->opcode = OP_MOVE;
2119 MONO_DELETE_INS (bb, ins);
2123 int power2 = mono_is_power_of_two (ins->inst_imm);
2125 ins->opcode = OP_SHL_IMM;
2126 ins->inst_imm = power2;
2130 case OP_LOAD_MEMBASE:
2132 * OP_STORE_MEMBASE_REG reg, offset(basereg)
2133 * OP_LOAD_MEMBASE offset(basereg), reg
2135 if (last_ins && normalize_opcode (last_ins->opcode) == OP_STORE_MEMBASE_REG &&
2136 ins->inst_basereg == last_ins->inst_destbasereg &&
2137 ins->inst_offset == last_ins->inst_offset) {
2138 if (ins->dreg == last_ins->sreg1) {
2139 MONO_DELETE_INS (bb, ins);
2142 //static int c = 0; printf ("MATCHX %s %d\n", cfg->method->name,c++);
2143 ins->opcode = OP_MOVE;
2144 ins->sreg1 = last_ins->sreg1;
2148 * Note: reg1 must be different from the basereg in the second load
2149 * OP_LOAD_MEMBASE offset(basereg), reg1
2150 * OP_LOAD_MEMBASE offset(basereg), reg2
2152 * OP_LOAD_MEMBASE offset(basereg), reg1
2153 * OP_MOVE reg1, reg2
2155 } else if (last_ins && normalize_opcode (last_ins->opcode) == OP_LOAD_MEMBASE &&
2156 ins->inst_basereg != last_ins->dreg &&
2157 ins->inst_basereg == last_ins->inst_basereg &&
2158 ins->inst_offset == last_ins->inst_offset) {
2160 if (ins->dreg == last_ins->dreg) {
2161 MONO_DELETE_INS (bb, ins);
2164 ins->opcode = OP_MOVE;
2165 ins->sreg1 = last_ins->dreg;
2168 //g_assert_not_reached ();
2172 * OP_STORE_MEMBASE_IMM imm, offset(basereg)
2173 * OP_LOAD_MEMBASE offset(basereg), reg
2175 * OP_STORE_MEMBASE_IMM imm, offset(basereg)
2176 * OP_ICONST reg, imm
2178 } else if (last_ins && normalize_opcode (last_ins->opcode) == OP_STORE_MEMBASE_IMM &&
2179 ins->inst_basereg == last_ins->inst_destbasereg &&
2180 ins->inst_offset == last_ins->inst_offset) {
2181 //static int c = 0; printf ("MATCHX %s %d\n", cfg->method->name,c++);
2182 ins->opcode = OP_ICONST;
2183 ins->inst_c0 = last_ins->inst_imm;
2184 g_assert_not_reached (); // check this rule
2188 case OP_LOADU1_MEMBASE:
2189 case OP_LOADI1_MEMBASE:
2190 if (last_ins && (last_ins->opcode == OP_STOREI1_MEMBASE_REG) &&
2191 ins->inst_basereg == last_ins->inst_destbasereg &&
2192 ins->inst_offset == last_ins->inst_offset) {
2193 ins->opcode = (ins->opcode == OP_LOADI1_MEMBASE) ? OP_ICONV_TO_I1 : OP_ICONV_TO_U1;
2194 ins->sreg1 = last_ins->sreg1;
2197 case OP_LOADU2_MEMBASE:
2198 case OP_LOADI2_MEMBASE:
2199 if (last_ins && (last_ins->opcode == OP_STOREI2_MEMBASE_REG) &&
2200 ins->inst_basereg == last_ins->inst_destbasereg &&
2201 ins->inst_offset == last_ins->inst_offset) {
2202 ins->opcode = (ins->opcode == OP_LOADI2_MEMBASE) ? OP_ICONV_TO_I2 : OP_ICONV_TO_U2;
2203 ins->sreg1 = last_ins->sreg1;
2206 #ifdef __mono_ppc64__
2207 case OP_LOADU4_MEMBASE:
2208 case OP_LOADI4_MEMBASE:
2209 if (last_ins && (last_ins->opcode == OP_STOREI4_MEMBASE_REG) &&
2210 ins->inst_basereg == last_ins->inst_destbasereg &&
2211 ins->inst_offset == last_ins->inst_offset) {
2212 ins->opcode = (ins->opcode == OP_LOADI4_MEMBASE) ? OP_ICONV_TO_I4 : OP_ICONV_TO_U4;
2213 ins->sreg1 = last_ins->sreg1;
2218 ins->opcode = OP_MOVE;
2222 if (ins->dreg == ins->sreg1) {
2223 MONO_DELETE_INS (bb, ins);
2227 * OP_MOVE sreg, dreg
2228 * OP_MOVE dreg, sreg
2230 if (last_ins && last_ins->opcode == OP_MOVE &&
2231 ins->sreg1 == last_ins->dreg &&
2232 ins->dreg == last_ins->sreg1) {
2233 MONO_DELETE_INS (bb, ins);
2241 bb->last_ins = last_ins;
2245 mono_arch_decompose_opts (MonoCompile *cfg, MonoInst *ins)
2247 switch (ins->opcode) {
2248 case OP_ICONV_TO_R_UN: {
2249 // This value is OK as-is for both big and little endian because of how it is stored
2250 static const guint64 adjust_val = 0x4330000000000000ULL;
2251 int msw_reg = mono_alloc_ireg (cfg);
2252 int adj_reg = mono_alloc_freg (cfg);
2253 int tmp_reg = mono_alloc_freg (cfg);
2254 int basereg = ppc_sp;
2256 MONO_EMIT_NEW_ICONST (cfg, msw_reg, 0x43300000);
2257 if (!ppc_is_imm16 (offset + 4)) {
2258 basereg = mono_alloc_ireg (cfg);
2259 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_IADD_IMM, basereg, cfg->frame_reg, offset);
2261 #if G_BYTE_ORDER == G_BIG_ENDIAN
2262 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, basereg, offset, msw_reg);
2263 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, basereg, offset + 4, ins->sreg1);
2265 // For little endian the words are reversed
2266 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, basereg, offset + 4, msw_reg);
2267 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, basereg, offset, ins->sreg1);
2269 MONO_EMIT_NEW_LOAD_R8 (cfg, adj_reg, &adjust_val);
2270 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADR8_MEMBASE, tmp_reg, basereg, offset);
2271 MONO_EMIT_NEW_BIALU (cfg, OP_FSUB, ins->dreg, tmp_reg, adj_reg);
2272 ins->opcode = OP_NOP;
2275 #ifndef __mono_ppc64__
2276 case OP_ICONV_TO_R4:
2277 case OP_ICONV_TO_R8: {
2278 /* If we have a PPC_FEATURE_64 machine we can avoid
2279 this and use the fcfid instruction. Otherwise
2280 on an old 32-bit chip and we have to do this the
2282 if (!(cpu_hw_caps & PPC_ISA_64)) {
2283 /* FIXME: change precision for CEE_CONV_R4 */
2284 static const guint64 adjust_val = 0x4330000080000000ULL;
2285 int msw_reg = mono_alloc_ireg (cfg);
2286 int xored = mono_alloc_ireg (cfg);
2287 int adj_reg = mono_alloc_freg (cfg);
2288 int tmp_reg = mono_alloc_freg (cfg);
2289 int basereg = ppc_sp;
2291 if (!ppc_is_imm16 (offset + 4)) {
2292 basereg = mono_alloc_ireg (cfg);
2293 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_IADD_IMM, basereg, cfg->frame_reg, offset);
2295 MONO_EMIT_NEW_ICONST (cfg, msw_reg, 0x43300000);
2296 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, basereg, offset, msw_reg);
2297 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_XOR_IMM, xored, ins->sreg1, 0x80000000);
2298 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, basereg, offset + 4, xored);
2299 MONO_EMIT_NEW_LOAD_R8 (cfg, adj_reg, (gpointer)&adjust_val);
2300 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADR8_MEMBASE, tmp_reg, basereg, offset);
2301 MONO_EMIT_NEW_BIALU (cfg, OP_FSUB, ins->dreg, tmp_reg, adj_reg);
2302 if (ins->opcode == OP_ICONV_TO_R4)
2303 MONO_EMIT_NEW_UNALU (cfg, OP_FCONV_TO_R4, ins->dreg, ins->dreg);
2304 ins->opcode = OP_NOP;
2310 int msw_reg = mono_alloc_ireg (cfg);
2311 int basereg = ppc_sp;
2313 if (!ppc_is_imm16 (offset + 4)) {
2314 basereg = mono_alloc_ireg (cfg);
2315 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_IADD_IMM, basereg, cfg->frame_reg, offset);
2317 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORER8_MEMBASE_REG, basereg, offset, ins->sreg1);
2318 #if G_BYTE_ORDER == G_BIG_ENDIAN
2319 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADI4_MEMBASE, msw_reg, basereg, offset);
2321 MONO_EMIT_NEW_LOAD_MEMBASE_OP (cfg, OP_LOADI4_MEMBASE, msw_reg, basereg, offset+4);
2323 MONO_EMIT_NEW_UNALU (cfg, OP_CHECK_FINITE, -1, msw_reg);
2324 MONO_EMIT_NEW_UNALU (cfg, OP_FMOVE, ins->dreg, ins->sreg1);
2325 ins->opcode = OP_NOP;
2328 #ifdef __mono_ppc64__
2330 case OP_IADD_OVF_UN:
2332 int shifted1_reg = mono_alloc_ireg (cfg);
2333 int shifted2_reg = mono_alloc_ireg (cfg);
2334 int result_shifted_reg = mono_alloc_ireg (cfg);
2336 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHL_IMM, shifted1_reg, ins->sreg1, 32);
2337 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHL_IMM, shifted2_reg, ins->sreg2, 32);
2338 MONO_EMIT_NEW_BIALU (cfg, ins->opcode, result_shifted_reg, shifted1_reg, shifted2_reg);
2339 if (ins->opcode == OP_IADD_OVF_UN)
2340 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHR_UN_IMM, ins->dreg, result_shifted_reg, 32);
2342 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_SHR_IMM, ins->dreg, result_shifted_reg, 32);
2343 ins->opcode = OP_NOP;
2353 mono_arch_decompose_long_opts (MonoCompile *cfg, MonoInst *ins)
2355 switch (ins->opcode) {
2357 /* ADC sets the condition code */
2358 MONO_EMIT_NEW_BIALU (cfg, OP_ADDCC, MONO_LVREG_LS (ins->dreg), MONO_LVREG_LS (ins->sreg1), MONO_LVREG_LS (ins->sreg2));
2359 MONO_EMIT_NEW_BIALU (cfg, OP_ADD_OVF_CARRY, MONO_LVREG_MS (ins->dreg), MONO_LVREG_MS (ins->sreg1), MONO_LVREG_MS (ins->sreg2));
2362 case OP_LADD_OVF_UN:
2363 /* ADC sets the condition code */
2364 MONO_EMIT_NEW_BIALU (cfg, OP_ADDCC, MONO_LVREG_LS (ins->dreg), MONO_LVREG_LS (ins->sreg1), MONO_LVREG_LS (ins->sreg2));
2365 MONO_EMIT_NEW_BIALU (cfg, OP_ADD_OVF_UN_CARRY, MONO_LVREG_MS (ins->dreg), MONO_LVREG_MS (ins->sreg1), MONO_LVREG_MS (ins->sreg2));
2369 /* SBB sets the condition code */
2370 MONO_EMIT_NEW_BIALU (cfg, OP_SUBCC, MONO_LVREG_LS (ins->dreg), MONO_LVREG_LS (ins->sreg1), MONO_LVREG_LS (ins->sreg2));
2371 MONO_EMIT_NEW_BIALU (cfg, OP_SUB_OVF_CARRY, MONO_LVREG_MS (ins->dreg), MONO_LVREG_MS (ins->sreg1), MONO_LVREG_MS (ins->sreg2));
2374 case OP_LSUB_OVF_UN:
2375 /* SBB sets the condition code */
2376 MONO_EMIT_NEW_BIALU (cfg, OP_SUBCC, MONO_LVREG_LS (ins->dreg), MONO_LVREG_LS (ins->sreg1), MONO_LVREG_LS (ins->sreg2));
2377 MONO_EMIT_NEW_BIALU (cfg, OP_SUB_OVF_UN_CARRY, MONO_LVREG_MS (ins->dreg), MONO_LVREG_MS (ins->sreg1), MONO_LVREG_MS (ins->sreg2));
2381 /* From gcc generated code */
2382 MONO_EMIT_NEW_BIALU_IMM (cfg, OP_PPC_SUBFIC, MONO_LVREG_LS (ins->dreg), MONO_LVREG_LS (ins->sreg1), 0);
2383 MONO_EMIT_NEW_UNALU (cfg, OP_PPC_SUBFZE, MONO_LVREG_MS (ins->dreg), MONO_LVREG_MS (ins->sreg1));
2392 * the branch_b0_table should maintain the order of these
2406 branch_b0_table [] = {
2421 branch_b1_table [] = {
2435 #define NEW_INS(cfg,dest,op) do { \
2436 MONO_INST_NEW((cfg), (dest), (op)); \
2437 mono_bblock_insert_after_ins (bb, last_ins, (dest)); \
2441 map_to_reg_reg_op (int op)
2450 case OP_COMPARE_IMM:
2452 case OP_ICOMPARE_IMM:
2454 case OP_LCOMPARE_IMM:
2470 case OP_LOAD_MEMBASE:
2471 return OP_LOAD_MEMINDEX;
2472 case OP_LOADI4_MEMBASE:
2473 return OP_LOADI4_MEMINDEX;
2474 case OP_LOADU4_MEMBASE:
2475 return OP_LOADU4_MEMINDEX;
2476 case OP_LOADI8_MEMBASE:
2477 return OP_LOADI8_MEMINDEX;
2478 case OP_LOADU1_MEMBASE:
2479 return OP_LOADU1_MEMINDEX;
2480 case OP_LOADI2_MEMBASE:
2481 return OP_LOADI2_MEMINDEX;
2482 case OP_LOADU2_MEMBASE:
2483 return OP_LOADU2_MEMINDEX;
2484 case OP_LOADI1_MEMBASE:
2485 return OP_LOADI1_MEMINDEX;
2486 case OP_LOADR4_MEMBASE:
2487 return OP_LOADR4_MEMINDEX;
2488 case OP_LOADR8_MEMBASE:
2489 return OP_LOADR8_MEMINDEX;
2490 case OP_STOREI1_MEMBASE_REG:
2491 return OP_STOREI1_MEMINDEX;
2492 case OP_STOREI2_MEMBASE_REG:
2493 return OP_STOREI2_MEMINDEX;
2494 case OP_STOREI4_MEMBASE_REG:
2495 return OP_STOREI4_MEMINDEX;
2496 case OP_STOREI8_MEMBASE_REG:
2497 return OP_STOREI8_MEMINDEX;
2498 case OP_STORE_MEMBASE_REG:
2499 return OP_STORE_MEMINDEX;
2500 case OP_STORER4_MEMBASE_REG:
2501 return OP_STORER4_MEMINDEX;
2502 case OP_STORER8_MEMBASE_REG:
2503 return OP_STORER8_MEMINDEX;
2504 case OP_STORE_MEMBASE_IMM:
2505 return OP_STORE_MEMBASE_REG;
2506 case OP_STOREI1_MEMBASE_IMM:
2507 return OP_STOREI1_MEMBASE_REG;
2508 case OP_STOREI2_MEMBASE_IMM:
2509 return OP_STOREI2_MEMBASE_REG;
2510 case OP_STOREI4_MEMBASE_IMM:
2511 return OP_STOREI4_MEMBASE_REG;
2512 case OP_STOREI8_MEMBASE_IMM:
2513 return OP_STOREI8_MEMBASE_REG;
2515 if (mono_op_imm_to_op (op) == -1)
2516 g_error ("mono_op_imm_to_op failed for %s\n", mono_inst_name (op));
2517 return mono_op_imm_to_op (op);
2520 //#define map_to_reg_reg_op(op) (cfg->new_ir? mono_op_imm_to_op (op): map_to_reg_reg_op (op))
2522 #define compare_opcode_is_unsigned(opcode) \
2523 (((opcode) >= CEE_BNE_UN && (opcode) <= CEE_BLT_UN) || \
2524 ((opcode) >= OP_IBNE_UN && (opcode) <= OP_IBLT_UN) || \
2525 ((opcode) >= OP_LBNE_UN && (opcode) <= OP_LBLT_UN) || \
2526 ((opcode) >= OP_COND_EXC_NE_UN && (opcode) <= OP_COND_EXC_LT_UN) || \
2527 ((opcode) >= OP_COND_EXC_INE_UN && (opcode) <= OP_COND_EXC_ILT_UN) || \
2528 ((opcode) == OP_CLT_UN || (opcode) == OP_CGT_UN || \
2529 (opcode) == OP_ICLT_UN || (opcode) == OP_ICGT_UN || \
2530 (opcode) == OP_LCLT_UN || (opcode) == OP_LCGT_UN))
2533 * Remove from the instruction list the instructions that can't be
2534 * represented with very simple instructions with no register
2538 mono_arch_lowering_pass (MonoCompile *cfg, MonoBasicBlock *bb)
2540 MonoInst *ins, *next, *temp, *last_ins = NULL;
2543 MONO_BB_FOR_EACH_INS (bb, ins) {
2545 switch (ins->opcode) {
2546 case OP_IDIV_UN_IMM:
2549 case OP_IREM_UN_IMM:
2550 CASE_PPC64 (OP_LREM_IMM) {
2551 NEW_INS (cfg, temp, OP_ICONST);
2552 temp->inst_c0 = ins->inst_imm;
2553 temp->dreg = mono_alloc_ireg (cfg);
2554 ins->sreg2 = temp->dreg;
2555 if (ins->opcode == OP_IDIV_IMM)
2556 ins->opcode = OP_IDIV;
2557 else if (ins->opcode == OP_IREM_IMM)
2558 ins->opcode = OP_IREM;
2559 else if (ins->opcode == OP_IDIV_UN_IMM)
2560 ins->opcode = OP_IDIV_UN;
2561 else if (ins->opcode == OP_IREM_UN_IMM)
2562 ins->opcode = OP_IREM_UN;
2563 else if (ins->opcode == OP_LREM_IMM)
2564 ins->opcode = OP_LREM;
2566 /* handle rem separately */
2571 CASE_PPC64 (OP_LREM)
2572 CASE_PPC64 (OP_LREM_UN) {
2574 /* we change a rem dest, src1, src2 to
2575 * div temp1, src1, src2
2576 * mul temp2, temp1, src2
2577 * sub dest, src1, temp2
2579 if (ins->opcode == OP_IREM || ins->opcode == OP_IREM_UN) {
2580 NEW_INS (cfg, mul, OP_IMUL);
2581 NEW_INS (cfg, temp, ins->opcode == OP_IREM? OP_IDIV: OP_IDIV_UN);
2582 ins->opcode = OP_ISUB;
2584 NEW_INS (cfg, mul, OP_LMUL);
2585 NEW_INS (cfg, temp, ins->opcode == OP_LREM? OP_LDIV: OP_LDIV_UN);
2586 ins->opcode = OP_LSUB;
2588 temp->sreg1 = ins->sreg1;
2589 temp->sreg2 = ins->sreg2;
2590 temp->dreg = mono_alloc_ireg (cfg);
2591 mul->sreg1 = temp->dreg;
2592 mul->sreg2 = ins->sreg2;
2593 mul->dreg = mono_alloc_ireg (cfg);
2594 ins->sreg2 = mul->dreg;
2598 CASE_PPC64 (OP_LADD_IMM)
2601 if (!ppc_is_imm16 (ins->inst_imm)) {
2602 NEW_INS (cfg, temp, OP_ICONST);
2603 temp->inst_c0 = ins->inst_imm;
2604 temp->dreg = mono_alloc_ireg (cfg);
2605 ins->sreg2 = temp->dreg;
2606 ins->opcode = map_to_reg_reg_op (ins->opcode);
2610 CASE_PPC64 (OP_LSUB_IMM)
2612 if (!ppc_is_imm16 (-ins->inst_imm)) {
2613 NEW_INS (cfg, temp, OP_ICONST);
2614 temp->inst_c0 = ins->inst_imm;
2615 temp->dreg = mono_alloc_ireg (cfg);
2616 ins->sreg2 = temp->dreg;
2617 ins->opcode = map_to_reg_reg_op (ins->opcode);
2629 gboolean is_imm = ((ins->inst_imm & 0xffff0000) && (ins->inst_imm & 0xffff));
2630 #ifdef __mono_ppc64__
2631 if (ins->inst_imm & 0xffffffff00000000ULL)
2635 NEW_INS (cfg, temp, OP_ICONST);
2636 temp->inst_c0 = ins->inst_imm;
2637 temp->dreg = mono_alloc_ireg (cfg);
2638 ins->sreg2 = temp->dreg;
2639 ins->opcode = map_to_reg_reg_op (ins->opcode);
2648 NEW_INS (cfg, temp, OP_ICONST);
2649 temp->inst_c0 = ins->inst_imm;
2650 temp->dreg = mono_alloc_ireg (cfg);
2651 ins->sreg2 = temp->dreg;
2652 ins->opcode = map_to_reg_reg_op (ins->opcode);
2654 case OP_COMPARE_IMM:
2655 case OP_ICOMPARE_IMM:
2656 CASE_PPC64 (OP_LCOMPARE_IMM)
2658 /* Branch opts can eliminate the branch */
2659 if (!next || (!(MONO_IS_COND_BRANCH_OP (next) || MONO_IS_COND_EXC (next) || MONO_IS_SETCC (next)))) {
2660 ins->opcode = OP_NOP;
2664 if (compare_opcode_is_unsigned (next->opcode)) {
2665 if (!ppc_is_uimm16 (ins->inst_imm)) {
2666 NEW_INS (cfg, temp, OP_ICONST);
2667 temp->inst_c0 = ins->inst_imm;
2668 temp->dreg = mono_alloc_ireg (cfg);
2669 ins->sreg2 = temp->dreg;
2670 ins->opcode = map_to_reg_reg_op (ins->opcode);
2673 if (!ppc_is_imm16 (ins->inst_imm)) {
2674 NEW_INS (cfg, temp, OP_ICONST);
2675 temp->inst_c0 = ins->inst_imm;
2676 temp->dreg = mono_alloc_ireg (cfg);
2677 ins->sreg2 = temp->dreg;
2678 ins->opcode = map_to_reg_reg_op (ins->opcode);
2684 if (ins->inst_imm == 1) {
2685 ins->opcode = OP_MOVE;
2688 if (ins->inst_imm == 0) {
2689 ins->opcode = OP_ICONST;
2693 imm = mono_is_power_of_two (ins->inst_imm);
2695 ins->opcode = OP_SHL_IMM;
2696 ins->inst_imm = imm;
2699 if (!ppc_is_imm16 (ins->inst_imm)) {
2700 NEW_INS (cfg, temp, OP_ICONST);
2701 temp->inst_c0 = ins->inst_imm;
2702 temp->dreg = mono_alloc_ireg (cfg);
2703 ins->sreg2 = temp->dreg;
2704 ins->opcode = map_to_reg_reg_op (ins->opcode);
2707 case OP_LOCALLOC_IMM:
2708 NEW_INS (cfg, temp, OP_ICONST);
2709 temp->inst_c0 = ins->inst_imm;
2710 temp->dreg = mono_alloc_ireg (cfg);
2711 ins->sreg1 = temp->dreg;
2712 ins->opcode = OP_LOCALLOC;
2714 case OP_LOAD_MEMBASE:
2715 case OP_LOADI4_MEMBASE:
2716 CASE_PPC64 (OP_LOADI8_MEMBASE)
2717 case OP_LOADU4_MEMBASE:
2718 case OP_LOADI2_MEMBASE:
2719 case OP_LOADU2_MEMBASE:
2720 case OP_LOADI1_MEMBASE:
2721 case OP_LOADU1_MEMBASE:
2722 case OP_LOADR4_MEMBASE:
2723 case OP_LOADR8_MEMBASE:
2724 case OP_STORE_MEMBASE_REG:
2725 CASE_PPC64 (OP_STOREI8_MEMBASE_REG)
2726 case OP_STOREI4_MEMBASE_REG:
2727 case OP_STOREI2_MEMBASE_REG:
2728 case OP_STOREI1_MEMBASE_REG:
2729 case OP_STORER4_MEMBASE_REG:
2730 case OP_STORER8_MEMBASE_REG:
2731 /* we can do two things: load the immed in a register
2732 * and use an indexed load, or see if the immed can be
2733 * represented as an ad_imm + a load with a smaller offset
2734 * that fits. We just do the first for now, optimize later.
2736 if (ppc_is_imm16 (ins->inst_offset))
2738 NEW_INS (cfg, temp, OP_ICONST);
2739 temp->inst_c0 = ins->inst_offset;
2740 temp->dreg = mono_alloc_ireg (cfg);
2741 ins->sreg2 = temp->dreg;
2742 ins->opcode = map_to_reg_reg_op (ins->opcode);
2744 case OP_STORE_MEMBASE_IMM:
2745 case OP_STOREI1_MEMBASE_IMM:
2746 case OP_STOREI2_MEMBASE_IMM:
2747 case OP_STOREI4_MEMBASE_IMM:
2748 CASE_PPC64 (OP_STOREI8_MEMBASE_IMM)
2749 NEW_INS (cfg, temp, OP_ICONST);
2750 temp->inst_c0 = ins->inst_imm;
2751 temp->dreg = mono_alloc_ireg (cfg);
2752 ins->sreg1 = temp->dreg;
2753 ins->opcode = map_to_reg_reg_op (ins->opcode);
2755 goto loop_start; /* make it handle the possibly big ins->inst_offset */
2758 if (cfg->compile_aot) {
2759 /* Keep these in the aot case */
2762 NEW_INS (cfg, temp, OP_ICONST);
2763 temp->inst_c0 = (gulong)ins->inst_p0;
2764 temp->dreg = mono_alloc_ireg (cfg);
2765 ins->inst_basereg = temp->dreg;
2766 ins->inst_offset = 0;
2767 ins->opcode = ins->opcode == OP_R4CONST? OP_LOADR4_MEMBASE: OP_LOADR8_MEMBASE;
2769 /* make it handle the possibly big ins->inst_offset
2770 * later optimize to use lis + load_membase
2776 bb->last_ins = last_ins;
2777 bb->max_vreg = cfg->next_vreg;
2781 emit_float_to_int (MonoCompile *cfg, guchar *code, int dreg, int sreg, int size, gboolean is_signed)
2783 long offset = cfg->arch.fp_conv_var_offset;
2785 /* sreg is a float, dreg is an integer reg. ppc_f0 is used a scratch */
2786 #ifdef __mono_ppc64__
2788 ppc_fctidz (code, ppc_f0, sreg);
2793 ppc_fctiwz (code, ppc_f0, sreg);
2796 if (ppc_is_imm16 (offset + sub_offset)) {
2797 ppc_stfd (code, ppc_f0, offset, cfg->frame_reg);
2799 ppc_ldr (code, dreg, offset + sub_offset, cfg->frame_reg);
2801 ppc_lwz (code, dreg, offset + sub_offset, cfg->frame_reg);
2803 ppc_load (code, dreg, offset);
2804 ppc_add (code, dreg, dreg, cfg->frame_reg);
2805 ppc_stfd (code, ppc_f0, 0, dreg);
2807 ppc_ldr (code, dreg, sub_offset, dreg);
2809 ppc_lwz (code, dreg, sub_offset, dreg);
2813 ppc_andid (code, dreg, dreg, 0xff);
2815 ppc_andid (code, dreg, dreg, 0xffff);
2816 #ifdef __mono_ppc64__
2818 ppc_clrldi (code, dreg, dreg, 32);
2822 ppc_extsb (code, dreg, dreg);
2824 ppc_extsh (code, dreg, dreg);
2825 #ifdef __mono_ppc64__
2827 ppc_extsw (code, dreg, dreg);
2835 const guchar *target;
2840 #define is_call_imm(diff) ((glong)(diff) >= -33554432 && (glong)(diff) <= 33554431)
2843 search_thunk_slot (void *data, int csize, int bsize, void *user_data) {
2844 #ifdef __mono_ppc64__
2845 g_assert_not_reached ();
2847 PatchData *pdata = (PatchData*)user_data;
2848 guchar *code = data;
2849 guint32 *thunks = data;
2850 guint32 *endthunks = (guint32*)(code + bsize);
2854 int difflow, diffhigh;
2856 /* always ensure a call from pdata->code can reach to the thunks without further thunks */
2857 difflow = (char*)pdata->code - (char*)thunks;
2858 diffhigh = (char*)pdata->code - (char*)endthunks;
2859 if (!((is_call_imm (thunks) && is_call_imm (endthunks)) || (is_call_imm (difflow) && is_call_imm (diffhigh))))
2862 templ = (guchar*)load;
2863 ppc_load_sequence (templ, ppc_r0, pdata->target);
2865 //g_print ("thunk nentries: %d\n", ((char*)endthunks - (char*)thunks)/16);
2866 if ((pdata->found == 2) || (pdata->code >= code && pdata->code <= code + csize)) {
2867 while (thunks < endthunks) {
2868 //g_print ("looking for target: %p at %p (%08x-%08x)\n", pdata->target, thunks, thunks [0], thunks [1]);
2869 if ((thunks [0] == load [0]) && (thunks [1] == load [1])) {
2870 ppc_patch (pdata->code, (guchar*)thunks);
2873 static int num_thunks = 0;
2875 if ((num_thunks % 20) == 0)
2876 g_print ("num_thunks lookup: %d\n", num_thunks);
2879 } else if ((thunks [0] == 0) && (thunks [1] == 0)) {
2880 /* found a free slot instead: emit thunk */
2881 code = (guchar*)thunks;
2882 ppc_lis (code, ppc_r0, (gulong)(pdata->target) >> 16);
2883 ppc_ori (code, ppc_r0, ppc_r0, (gulong)(pdata->target) & 0xffff);
2884 ppc_mtctr (code, ppc_r0);
2885 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
2886 mono_arch_flush_icache ((guchar*)thunks, 16);
2888 ppc_patch (pdata->code, (guchar*)thunks);
2891 static int num_thunks = 0;
2893 if ((num_thunks % 20) == 0)
2894 g_print ("num_thunks: %d\n", num_thunks);
2898 /* skip 16 bytes, the size of the thunk */
2902 //g_print ("failed thunk lookup for %p from %p at %p (%d entries)\n", pdata->target, pdata->code, data, count);
2909 handle_thunk (int absolute, guchar *code, const guchar *target) {
2910 MonoDomain *domain = mono_domain_get ();
2914 pdata.target = target;
2915 pdata.absolute = absolute;
2918 mono_domain_lock (domain);
2919 mono_domain_code_foreach (domain, search_thunk_slot, &pdata);
2922 /* this uses the first available slot */
2924 mono_domain_code_foreach (domain, search_thunk_slot, &pdata);
2926 mono_domain_unlock (domain);
2928 if (pdata.found != 1)
2929 g_print ("thunk failed for %p from %p\n", target, code);
2930 g_assert (pdata.found == 1);
2934 patch_ins (guint8 *code, guint32 ins)
2936 *(guint32*)code = ins;
2937 mono_arch_flush_icache (code, 4);
2941 ppc_patch_full (guchar *code, const guchar *target, gboolean is_fd)
2943 guint32 ins = *(guint32*)code;
2944 guint32 prim = ins >> 26;
2947 //g_print ("patching 0x%08x (0x%08x) to point to 0x%08x\n", code, ins, target);
2949 // prefer relative branches, they are more position independent (e.g. for AOT compilation).
2950 gint diff = target - code;
2953 if (diff <= 33554431){
2954 ins = (18 << 26) | (diff) | (ins & 1);
2955 patch_ins (code, ins);
2959 /* diff between 0 and -33554432 */
2960 if (diff >= -33554432){
2961 ins = (18 << 26) | (diff & ~0xfc000000) | (ins & 1);
2962 patch_ins (code, ins);
2967 if ((glong)target >= 0){
2968 if ((glong)target <= 33554431){
2969 ins = (18 << 26) | ((gulong) target) | (ins & 1) | 2;
2970 patch_ins (code, ins);
2974 if ((glong)target >= -33554432){
2975 ins = (18 << 26) | (((gulong)target) & ~0xfc000000) | (ins & 1) | 2;
2976 patch_ins (code, ins);
2981 handle_thunk (TRUE, code, target);
2984 g_assert_not_reached ();
2992 guint32 li = (gulong)target;
2993 ins = (ins & 0xffff0000) | (ins & 3);
2994 ovf = li & 0xffff0000;
2995 if (ovf != 0 && ovf != 0xffff0000)
2996 g_assert_not_reached ();
2999 // FIXME: assert the top bits of li are 0
3001 gint diff = target - code;
3002 ins = (ins & 0xffff0000) | (ins & 3);
3003 ovf = diff & 0xffff0000;
3004 if (ovf != 0 && ovf != 0xffff0000)
3005 g_assert_not_reached ();
3009 patch_ins (code, ins);
3013 if (prim == 15 || ins == 0x4e800021 || ins == 0x4e800020 || ins == 0x4e800420) {
3014 #ifdef __mono_ppc64__
3015 guint32 *seq = (guint32*)code;
3016 guint32 *branch_ins;
3018 /* the trampoline code will try to patch the blrl, blr, bcctr */
3019 if (ins == 0x4e800021 || ins == 0x4e800020 || ins == 0x4e800420) {
3021 if (ppc_is_load_op (seq [-3]) || ppc_opcode (seq [-3]) == 31) /* ld || lwz || mr */
3026 if (ppc_is_load_op (seq [5])
3027 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
3028 /* With function descs we need to do more careful
3030 || ppc_opcode (seq [5]) == 31 /* ld || lwz || mr */
3033 branch_ins = seq + 8;
3035 branch_ins = seq + 6;
3038 seq = (guint32*)code;
3039 /* this is the lis/ori/sldi/oris/ori/(ld/ld|mr/nop)/mtlr/blrl sequence */
3040 g_assert (mono_ppc_is_direct_call_sequence (branch_ins));
3042 if (ppc_is_load_op (seq [5])) {
3043 g_assert (ppc_is_load_op (seq [6]));
3046 guint8 *buf = (guint8*)&seq [5];
3047 ppc_mr (buf, PPC_CALL_REG, ppc_r12);
3052 target = mono_get_addr_from_ftnptr ((gpointer)target);
3055 /* FIXME: make this thread safe */
3056 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
3057 /* FIXME: we're assuming we're using r12 here */
3058 ppc_load_ptr_sequence (code, ppc_r12, target);
3060 ppc_load_ptr_sequence (code, PPC_CALL_REG, target);
3062 mono_arch_flush_icache ((guint8*)seq, 28);
3065 /* the trampoline code will try to patch the blrl, blr, bcctr */
3066 if (ins == 0x4e800021 || ins == 0x4e800020 || ins == 0x4e800420) {
3069 /* this is the lis/ori/mtlr/blrl sequence */
3070 seq = (guint32*)code;
3071 g_assert ((seq [0] >> 26) == 15);
3072 g_assert ((seq [1] >> 26) == 24);
3073 g_assert ((seq [2] >> 26) == 31);
3074 g_assert (seq [3] == 0x4e800021 || seq [3] == 0x4e800020 || seq [3] == 0x4e800420);
3075 /* FIXME: make this thread safe */
3076 ppc_lis (code, PPC_CALL_REG, (guint32)(target) >> 16);
3077 ppc_ori (code, PPC_CALL_REG, PPC_CALL_REG, (guint32)(target) & 0xffff);
3078 mono_arch_flush_icache (code - 8, 8);
3081 g_assert_not_reached ();
3083 // g_print ("patched with 0x%08x\n", ins);
3087 ppc_patch (guchar *code, const guchar *target)
3089 ppc_patch_full (code, target, FALSE);
3093 mono_ppc_patch (guchar *code, const guchar *target)
3095 ppc_patch (code, target);
3099 emit_move_return_value (MonoCompile *cfg, MonoInst *ins, guint8 *code)
3101 switch (ins->opcode) {
3104 case OP_FCALL_MEMBASE:
3105 if (ins->dreg != ppc_f1)
3106 ppc_fmr (code, ins->dreg, ppc_f1);
3114 ins_native_length (MonoCompile *cfg, MonoInst *ins)
3116 return ((guint8 *)ins_get_spec (ins->opcode))[MONO_INST_LEN];
3120 emit_reserve_param_area (MonoCompile *cfg, guint8 *code)
3122 long size = cfg->param_area;
3124 size += MONO_ARCH_FRAME_ALIGNMENT - 1;
3125 size &= -MONO_ARCH_FRAME_ALIGNMENT;
3130 ppc_ldptr (code, ppc_r0, 0, ppc_sp);
3131 if (ppc_is_imm16 (-size)) {
3132 ppc_stptr_update (code, ppc_r0, -size, ppc_sp);
3134 ppc_load (code, ppc_r12, -size);
3135 ppc_stptr_update_indexed (code, ppc_r0, ppc_sp, ppc_r12);
3142 emit_unreserve_param_area (MonoCompile *cfg, guint8 *code)
3144 long size = cfg->param_area;
3146 size += MONO_ARCH_FRAME_ALIGNMENT - 1;
3147 size &= -MONO_ARCH_FRAME_ALIGNMENT;
3152 ppc_ldptr (code, ppc_r0, 0, ppc_sp);
3153 if (ppc_is_imm16 (size)) {
3154 ppc_stptr_update (code, ppc_r0, size, ppc_sp);
3156 ppc_load (code, ppc_r12, size);
3157 ppc_stptr_update_indexed (code, ppc_r0, ppc_sp, ppc_r12);
3163 #define MASK_SHIFT_IMM(i) ((i) & MONO_PPC_32_64_CASE (0x1f, 0x3f))
3167 mono_arch_output_basic_block (MonoCompile *cfg, MonoBasicBlock *bb)
3169 MonoInst *ins, *next;
3172 guint8 *code = cfg->native_code + cfg->code_len;
3173 MonoInst *last_ins = NULL;
3174 guint last_offset = 0;
3178 /* we don't align basic blocks of loops on ppc */
3180 if (cfg->verbose_level > 2)
3181 g_print ("Basic block %d starting at offset 0x%x\n", bb->block_num, bb->native_offset);
3183 cpos = bb->max_offset;
3185 if (cfg->prof_options & MONO_PROFILE_COVERAGE) {
3186 //MonoCoverageInfo *cov = mono_get_coverage_info (cfg->method);
3187 //g_assert (!mono_compile_aot);
3190 // cov->data [bb->dfn].iloffset = bb->cil_code - cfg->cil_code;
3191 /* this is not thread save, but good enough */
3192 /* fixme: howto handle overflows? */
3193 //x86_inc_mem (code, &cov->data [bb->dfn].count);
3196 MONO_BB_FOR_EACH_INS (bb, ins) {
3197 offset = code - cfg->native_code;
3199 max_len = ins_native_length (cfg, ins);
3201 if (offset > (cfg->code_size - max_len - 16)) {
3202 cfg->code_size *= 2;
3203 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
3204 code = cfg->native_code + offset;
3206 // if (ins->cil_code)
3207 // g_print ("cil code\n");
3208 mono_debug_record_line_number (cfg, ins, offset);
3210 switch (normalize_opcode (ins->opcode)) {
3211 case OP_RELAXED_NOP:
3214 case OP_DUMMY_STORE:
3215 case OP_NOT_REACHED:
3218 case OP_IL_SEQ_POINT:
3219 mono_add_seq_point (cfg, bb, ins, code - cfg->native_code);
3221 case OP_SEQ_POINT: {
3224 if (cfg->compile_aot)
3228 * Read from the single stepping trigger page. This will cause a
3229 * SIGSEGV when single stepping is enabled.
3230 * We do this _before_ the breakpoint, so single stepping after
3231 * a breakpoint is hit will step to the next IL offset.
3233 if (ins->flags & MONO_INST_SINGLE_STEP_LOC) {
3234 ppc_load (code, ppc_r12, (gsize)ss_trigger_page);
3235 ppc_ldptr (code, ppc_r12, 0, ppc_r12);
3238 mono_add_seq_point (cfg, bb, ins, code - cfg->native_code);
3241 * A placeholder for a possible breakpoint inserted by
3242 * mono_arch_set_breakpoint ().
3244 for (i = 0; i < BREAKPOINT_SIZE / 4; ++i)
3249 ppc_mullw (code, ppc_r0, ins->sreg1, ins->sreg2);
3250 ppc_mulhw (code, ppc_r3, ins->sreg1, ins->sreg2);
3251 ppc_mr (code, ppc_r4, ppc_r0);
3254 ppc_mullw (code, ppc_r0, ins->sreg1, ins->sreg2);
3255 ppc_mulhwu (code, ppc_r3, ins->sreg1, ins->sreg2);
3256 ppc_mr (code, ppc_r4, ppc_r0);
3258 case OP_MEMORY_BARRIER:
3261 case OP_STOREI1_MEMBASE_REG:
3262 if (ppc_is_imm16 (ins->inst_offset)) {
3263 ppc_stb (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
3265 if (ppc_is_imm32 (ins->inst_offset)) {
3266 ppc_addis (code, ppc_r11, ins->inst_destbasereg, ppc_ha(ins->inst_offset));
3267 ppc_stb (code, ins->sreg1, ins->inst_offset, ppc_r11);
3269 ppc_load (code, ppc_r0, ins->inst_offset);
3270 ppc_stbx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
3274 case OP_STOREI2_MEMBASE_REG:
3275 if (ppc_is_imm16 (ins->inst_offset)) {
3276 ppc_sth (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
3278 if (ppc_is_imm32 (ins->inst_offset)) {
3279 ppc_addis (code, ppc_r11, ins->inst_destbasereg, ppc_ha(ins->inst_offset));
3280 ppc_sth (code, ins->sreg1, ins->inst_offset, ppc_r11);
3282 ppc_load (code, ppc_r0, ins->inst_offset);
3283 ppc_sthx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
3287 case OP_STORE_MEMBASE_REG:
3288 if (ppc_is_imm16 (ins->inst_offset)) {
3289 ppc_stptr (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
3291 if (ppc_is_imm32 (ins->inst_offset)) {
3292 ppc_addis (code, ppc_r11, ins->inst_destbasereg, ppc_ha(ins->inst_offset));
3293 ppc_stptr (code, ins->sreg1, ins->inst_offset, ppc_r11);
3295 ppc_load (code, ppc_r0, ins->inst_offset);
3296 ppc_stptr_indexed (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
3300 #ifdef __mono_ilp32__
3301 case OP_STOREI8_MEMBASE_REG:
3302 if (ppc_is_imm16 (ins->inst_offset)) {
3303 ppc_str (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
3305 ppc_load (code, ppc_r0, ins->inst_offset);
3306 ppc_str_indexed (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
3310 case OP_STOREI1_MEMINDEX:
3311 ppc_stbx (code, ins->sreg1, ins->inst_destbasereg, ins->sreg2);
3313 case OP_STOREI2_MEMINDEX:
3314 ppc_sthx (code, ins->sreg1, ins->inst_destbasereg, ins->sreg2);
3316 case OP_STORE_MEMINDEX:
3317 ppc_stptr_indexed (code, ins->sreg1, ins->inst_destbasereg, ins->sreg2);
3320 g_assert_not_reached ();
3322 case OP_LOAD_MEMBASE:
3323 if (ppc_is_imm16 (ins->inst_offset)) {
3324 ppc_ldptr (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3326 if (ppc_is_imm32 (ins->inst_offset) && (ins->dreg > 0)) {
3327 ppc_addis (code, ins->dreg, ins->inst_basereg, ppc_ha(ins->inst_offset));
3328 ppc_ldptr (code, ins->dreg, ins->inst_offset, ins->dreg);
3330 ppc_load (code, ppc_r0, ins->inst_offset);
3331 ppc_ldptr_indexed (code, ins->dreg, ins->inst_basereg, ppc_r0);
3335 case OP_LOADI4_MEMBASE:
3336 #ifdef __mono_ppc64__
3337 if (ppc_is_imm16 (ins->inst_offset)) {
3338 ppc_lwa (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3340 if (ppc_is_imm32 (ins->inst_offset) && (ins->dreg > 0)) {
3341 ppc_addis (code, ins->dreg, ins->inst_basereg, ppc_ha(ins->inst_offset));
3342 ppc_lwa (code, ins->dreg, ins->inst_offset, ins->dreg);
3344 ppc_load (code, ppc_r0, ins->inst_offset);
3345 ppc_lwax (code, ins->dreg, ins->inst_basereg, ppc_r0);
3350 case OP_LOADU4_MEMBASE:
3351 if (ppc_is_imm16 (ins->inst_offset)) {
3352 ppc_lwz (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3354 if (ppc_is_imm32 (ins->inst_offset) && (ins->dreg > 0)) {
3355 ppc_addis (code, ins->dreg, ins->inst_basereg, ppc_ha(ins->inst_offset));
3356 ppc_lwz (code, ins->dreg, ins->inst_offset, ins->dreg);
3358 ppc_load (code, ppc_r0, ins->inst_offset);
3359 ppc_lwzx (code, ins->dreg, ins->inst_basereg, ppc_r0);
3363 case OP_LOADI1_MEMBASE:
3364 case OP_LOADU1_MEMBASE:
3365 if (ppc_is_imm16 (ins->inst_offset)) {
3366 ppc_lbz (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3368 if (ppc_is_imm32 (ins->inst_offset) && (ins->dreg > 0)) {
3369 ppc_addis (code, ins->dreg, ins->inst_basereg, ppc_ha(ins->inst_offset));
3370 ppc_lbz (code, ins->dreg, ins->inst_offset, ins->dreg);
3372 ppc_load (code, ppc_r0, ins->inst_offset);
3373 ppc_lbzx (code, ins->dreg, ins->inst_basereg, ppc_r0);
3376 if (ins->opcode == OP_LOADI1_MEMBASE)
3377 ppc_extsb (code, ins->dreg, ins->dreg);
3379 case OP_LOADU2_MEMBASE:
3380 if (ppc_is_imm16 (ins->inst_offset)) {
3381 ppc_lhz (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3383 if (ppc_is_imm32 (ins->inst_offset) && (ins->dreg > 0)) {
3384 ppc_addis (code, ins->dreg, ins->inst_basereg, ppc_ha(ins->inst_offset));
3385 ppc_lhz (code, ins->dreg, ins->inst_offset, ins->dreg);
3387 ppc_load (code, ppc_r0, ins->inst_offset);
3388 ppc_lhzx (code, ins->dreg, ins->inst_basereg, ppc_r0);
3392 case OP_LOADI2_MEMBASE:
3393 if (ppc_is_imm16 (ins->inst_offset)) {
3394 ppc_lha (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3396 if (ppc_is_imm32 (ins->inst_offset) && (ins->dreg > 0)) {
3397 ppc_addis (code, ins->dreg, ins->inst_basereg, ppc_ha(ins->inst_offset));
3398 ppc_lha (code, ins->dreg, ins->inst_offset, ins->dreg);
3400 ppc_load (code, ppc_r0, ins->inst_offset);
3401 ppc_lhax (code, ins->dreg, ins->inst_basereg, ppc_r0);
3405 #ifdef __mono_ilp32__
3406 case OP_LOADI8_MEMBASE:
3407 if (ppc_is_imm16 (ins->inst_offset)) {
3408 ppc_ldr (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
3410 ppc_load (code, ppc_r0, ins->inst_offset);
3411 ppc_ldr_indexed (code, ins->dreg, ins->inst_basereg, ppc_r0);
3415 case OP_LOAD_MEMINDEX:
3416 ppc_ldptr_indexed (code, ins->dreg, ins->inst_basereg, ins->sreg2);
3418 case OP_LOADI4_MEMINDEX:
3419 #ifdef __mono_ppc64__
3420 ppc_lwax (code, ins->dreg, ins->inst_basereg, ins->sreg2);
3423 case OP_LOADU4_MEMINDEX:
3424 ppc_lwzx (code, ins->dreg, ins->inst_basereg, ins->sreg2);
3426 case OP_LOADU2_MEMINDEX:
3427 ppc_lhzx (code, ins->dreg, ins->inst_basereg, ins->sreg2);
3429 case OP_LOADI2_MEMINDEX:
3430 ppc_lhax (code, ins->dreg, ins->inst_basereg, ins->sreg2);
3432 case OP_LOADU1_MEMINDEX:
3433 ppc_lbzx (code, ins->dreg, ins->inst_basereg, ins->sreg2);
3435 case OP_LOADI1_MEMINDEX:
3436 ppc_lbzx (code, ins->dreg, ins->inst_basereg, ins->sreg2);
3437 ppc_extsb (code, ins->dreg, ins->dreg);
3439 case OP_ICONV_TO_I1:
3440 CASE_PPC64 (OP_LCONV_TO_I1)
3441 ppc_extsb (code, ins->dreg, ins->sreg1);
3443 case OP_ICONV_TO_I2:
3444 CASE_PPC64 (OP_LCONV_TO_I2)
3445 ppc_extsh (code, ins->dreg, ins->sreg1);
3447 case OP_ICONV_TO_U1:
3448 CASE_PPC64 (OP_LCONV_TO_U1)
3449 ppc_clrlwi (code, ins->dreg, ins->sreg1, 24);
3451 case OP_ICONV_TO_U2:
3452 CASE_PPC64 (OP_LCONV_TO_U2)
3453 ppc_clrlwi (code, ins->dreg, ins->sreg1, 16);
3457 CASE_PPC64 (OP_LCOMPARE)
3458 L = (sizeof (mgreg_t) == 4 || ins->opcode == OP_ICOMPARE) ? 0 : 1;
3460 if (next && compare_opcode_is_unsigned (next->opcode))
3461 ppc_cmpl (code, 0, L, ins->sreg1, ins->sreg2);
3463 ppc_cmp (code, 0, L, ins->sreg1, ins->sreg2);
3465 case OP_COMPARE_IMM:
3466 case OP_ICOMPARE_IMM:
3467 CASE_PPC64 (OP_LCOMPARE_IMM)
3468 L = (sizeof (mgreg_t) == 4 || ins->opcode == OP_ICOMPARE_IMM) ? 0 : 1;
3470 if (next && compare_opcode_is_unsigned (next->opcode)) {
3471 if (ppc_is_uimm16 (ins->inst_imm)) {
3472 ppc_cmpli (code, 0, L, ins->sreg1, (ins->inst_imm & 0xffff));
3474 g_assert_not_reached ();
3477 if (ppc_is_imm16 (ins->inst_imm)) {
3478 ppc_cmpi (code, 0, L, ins->sreg1, (ins->inst_imm & 0xffff));
3480 g_assert_not_reached ();
3486 * gdb does not like encountering a trap in the debugged code. So
3487 * instead of emitting a trap, we emit a call a C function and place a
3491 ppc_mr (code, ppc_r3, ins->sreg1);
3492 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD,
3493 (gpointer)"mono_break");
3494 if ((FORCE_INDIR_CALL || cfg->method->dynamic) && !cfg->compile_aot) {
3495 ppc_load_func (code, PPC_CALL_REG, 0);
3496 ppc_mtlr (code, PPC_CALL_REG);
3504 ppc_addco (code, ins->dreg, ins->sreg1, ins->sreg2);
3507 CASE_PPC64 (OP_LADD)
3508 ppc_add (code, ins->dreg, ins->sreg1, ins->sreg2);
3512 ppc_adde (code, ins->dreg, ins->sreg1, ins->sreg2);
3515 if (ppc_is_imm16 (ins->inst_imm)) {
3516 ppc_addic (code, ins->dreg, ins->sreg1, ins->inst_imm);
3518 g_assert_not_reached ();
3523 CASE_PPC64 (OP_LADD_IMM)
3524 if (ppc_is_imm16 (ins->inst_imm)) {
3525 ppc_addi (code, ins->dreg, ins->sreg1, ins->inst_imm);
3527 g_assert_not_reached ();
3531 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3533 ppc_addo (code, ins->dreg, ins->sreg1, ins->sreg2);
3534 ppc_mfspr (code, ppc_r0, ppc_xer);
3535 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3536 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3538 case OP_IADD_OVF_UN:
3539 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3541 ppc_addco (code, ins->dreg, ins->sreg1, ins->sreg2);
3542 ppc_mfspr (code, ppc_r0, ppc_xer);
3543 ppc_andisd (code, ppc_r0, ppc_r0, (1<<13));
3544 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3547 CASE_PPC64 (OP_LSUB_OVF)
3548 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3550 ppc_subfo (code, ins->dreg, ins->sreg2, ins->sreg1);
3551 ppc_mfspr (code, ppc_r0, ppc_xer);
3552 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3553 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3555 case OP_ISUB_OVF_UN:
3556 CASE_PPC64 (OP_LSUB_OVF_UN)
3557 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3559 ppc_subfc (code, ins->dreg, ins->sreg2, ins->sreg1);
3560 ppc_mfspr (code, ppc_r0, ppc_xer);
3561 ppc_andisd (code, ppc_r0, ppc_r0, (1<<13));
3562 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_TRUE, PPC_BR_EQ, "OverflowException");
3564 case OP_ADD_OVF_CARRY:
3565 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3567 ppc_addeo (code, ins->dreg, ins->sreg1, ins->sreg2);
3568 ppc_mfspr (code, ppc_r0, ppc_xer);
3569 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3570 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3572 case OP_ADD_OVF_UN_CARRY:
3573 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3575 ppc_addeo (code, ins->dreg, ins->sreg1, ins->sreg2);
3576 ppc_mfspr (code, ppc_r0, ppc_xer);
3577 ppc_andisd (code, ppc_r0, ppc_r0, (1<<13));
3578 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3580 case OP_SUB_OVF_CARRY:
3581 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3583 ppc_subfeo (code, ins->dreg, ins->sreg2, ins->sreg1);
3584 ppc_mfspr (code, ppc_r0, ppc_xer);
3585 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3586 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3588 case OP_SUB_OVF_UN_CARRY:
3589 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
3591 ppc_subfeo (code, ins->dreg, ins->sreg2, ins->sreg1);
3592 ppc_mfspr (code, ppc_r0, ppc_xer);
3593 ppc_andisd (code, ppc_r0, ppc_r0, (1<<13));
3594 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_TRUE, PPC_BR_EQ, "OverflowException");
3598 ppc_subfco (code, ins->dreg, ins->sreg2, ins->sreg1);
3601 CASE_PPC64 (OP_LSUB)
3602 ppc_subf (code, ins->dreg, ins->sreg2, ins->sreg1);
3606 ppc_subfe (code, ins->dreg, ins->sreg2, ins->sreg1);
3610 CASE_PPC64 (OP_LSUB_IMM)
3611 // we add the negated value
3612 if (ppc_is_imm16 (-ins->inst_imm))
3613 ppc_addi (code, ins->dreg, ins->sreg1, -ins->inst_imm);
3615 g_assert_not_reached ();
3619 g_assert (ppc_is_imm16 (ins->inst_imm));
3620 ppc_subfic (code, ins->dreg, ins->sreg1, ins->inst_imm);
3623 ppc_subfze (code, ins->dreg, ins->sreg1);
3626 CASE_PPC64 (OP_LAND)
3627 /* FIXME: the ppc macros as inconsistent here: put dest as the first arg! */
3628 ppc_and (code, ins->sreg1, ins->dreg, ins->sreg2);
3632 CASE_PPC64 (OP_LAND_IMM)
3633 if (!(ins->inst_imm & 0xffff0000)) {
3634 ppc_andid (code, ins->sreg1, ins->dreg, ins->inst_imm);
3635 } else if (!(ins->inst_imm & 0xffff)) {
3636 ppc_andisd (code, ins->sreg1, ins->dreg, ((guint32)ins->inst_imm >> 16));
3638 g_assert_not_reached ();
3642 CASE_PPC64 (OP_LDIV) {
3643 guint8 *divisor_is_m1;
3644 /* XER format: SO, OV, CA, reserved [21 bits], count [8 bits]
3646 ppc_compare_reg_imm (code, 0, ins->sreg2, -1);
3647 divisor_is_m1 = code;
3648 ppc_bc (code, PPC_BR_FALSE | PPC_BR_LIKELY, PPC_BR_EQ, 0);
3649 ppc_lis (code, ppc_r0, 0x8000);
3650 #ifdef __mono_ppc64__
3651 if (ins->opcode == OP_LDIV)
3652 ppc_sldi (code, ppc_r0, ppc_r0, 32);
3654 ppc_compare (code, 0, ins->sreg1, ppc_r0);
3655 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_TRUE, PPC_BR_EQ, "OverflowException");
3656 ppc_patch (divisor_is_m1, code);
3657 /* XER format: SO, OV, CA, reserved [21 bits], count [8 bits]
3659 if (ins->opcode == OP_IDIV)
3660 ppc_divwod (code, ins->dreg, ins->sreg1, ins->sreg2);
3661 #ifdef __mono_ppc64__
3663 ppc_divdod (code, ins->dreg, ins->sreg1, ins->sreg2);
3665 ppc_mfspr (code, ppc_r0, ppc_xer);
3666 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3667 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "DivideByZeroException");
3671 CASE_PPC64 (OP_LDIV_UN)
3672 if (ins->opcode == OP_IDIV_UN)
3673 ppc_divwuod (code, ins->dreg, ins->sreg1, ins->sreg2);
3674 #ifdef __mono_ppc64__
3676 ppc_divduod (code, ins->dreg, ins->sreg1, ins->sreg2);
3678 ppc_mfspr (code, ppc_r0, ppc_xer);
3679 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3680 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "DivideByZeroException");
3686 g_assert_not_reached ();
3689 ppc_or (code, ins->dreg, ins->sreg1, ins->sreg2);
3693 CASE_PPC64 (OP_LOR_IMM)
3694 if (!(ins->inst_imm & 0xffff0000)) {
3695 ppc_ori (code, ins->sreg1, ins->dreg, ins->inst_imm);
3696 } else if (!(ins->inst_imm & 0xffff)) {
3697 ppc_oris (code, ins->dreg, ins->sreg1, ((guint32)(ins->inst_imm) >> 16));
3699 g_assert_not_reached ();
3703 CASE_PPC64 (OP_LXOR)
3704 ppc_xor (code, ins->dreg, ins->sreg1, ins->sreg2);
3708 CASE_PPC64 (OP_LXOR_IMM)
3709 if (!(ins->inst_imm & 0xffff0000)) {
3710 ppc_xori (code, ins->sreg1, ins->dreg, ins->inst_imm);
3711 } else if (!(ins->inst_imm & 0xffff)) {
3712 ppc_xoris (code, ins->sreg1, ins->dreg, ((guint32)(ins->inst_imm) >> 16));
3714 g_assert_not_reached ();
3718 CASE_PPC64 (OP_LSHL)
3719 ppc_shift_left (code, ins->dreg, ins->sreg1, ins->sreg2);
3723 CASE_PPC64 (OP_LSHL_IMM)
3724 ppc_shift_left_imm (code, ins->dreg, ins->sreg1, MASK_SHIFT_IMM (ins->inst_imm));
3727 ppc_sraw (code, ins->dreg, ins->sreg1, ins->sreg2);
3730 ppc_shift_right_arith_imm (code, ins->dreg, ins->sreg1, MASK_SHIFT_IMM (ins->inst_imm));
3733 if (MASK_SHIFT_IMM (ins->inst_imm))
3734 ppc_shift_right_imm (code, ins->dreg, ins->sreg1, MASK_SHIFT_IMM (ins->inst_imm));
3736 ppc_mr (code, ins->dreg, ins->sreg1);
3739 ppc_srw (code, ins->dreg, ins->sreg1, ins->sreg2);
3742 CASE_PPC64 (OP_LNOT)
3743 ppc_not (code, ins->dreg, ins->sreg1);
3746 CASE_PPC64 (OP_LNEG)
3747 ppc_neg (code, ins->dreg, ins->sreg1);
3750 CASE_PPC64 (OP_LMUL)
3751 ppc_multiply (code, ins->dreg, ins->sreg1, ins->sreg2);
3755 CASE_PPC64 (OP_LMUL_IMM)
3756 if (ppc_is_imm16 (ins->inst_imm)) {
3757 ppc_mulli (code, ins->dreg, ins->sreg1, ins->inst_imm);
3759 g_assert_not_reached ();
3763 CASE_PPC64 (OP_LMUL_OVF)
3764 /* we annot use mcrxr, since it's not implemented on some processors
3765 * XER format: SO, OV, CA, reserved [21 bits], count [8 bits]
3767 if (ins->opcode == OP_IMUL_OVF)
3768 ppc_mullwo (code, ins->dreg, ins->sreg1, ins->sreg2);
3769 #ifdef __mono_ppc64__
3771 ppc_mulldo (code, ins->dreg, ins->sreg1, ins->sreg2);
3773 ppc_mfspr (code, ppc_r0, ppc_xer);
3774 ppc_andisd (code, ppc_r0, ppc_r0, (1<<14));
3775 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, "OverflowException");
3777 case OP_IMUL_OVF_UN:
3778 CASE_PPC64 (OP_LMUL_OVF_UN)
3779 /* we first multiply to get the high word and compare to 0
3780 * to set the flags, then the result is discarded and then
3781 * we multiply to get the lower * bits result
3783 if (ins->opcode == OP_IMUL_OVF_UN)
3784 ppc_mulhwu (code, ppc_r0, ins->sreg1, ins->sreg2);
3785 #ifdef __mono_ppc64__
3787 ppc_mulhdu (code, ppc_r0, ins->sreg1, ins->sreg2);
3789 ppc_cmpi (code, 0, 0, ppc_r0, 0);
3790 EMIT_COND_SYSTEM_EXCEPTION (CEE_BNE_UN - CEE_BEQ, "OverflowException");
3791 ppc_multiply (code, ins->dreg, ins->sreg1, ins->sreg2);
3794 ppc_load (code, ins->dreg, ins->inst_c0);
3797 ppc_load (code, ins->dreg, ins->inst_l);
3800 case OP_LOAD_GOTADDR:
3801 /* The PLT implementation depends on this */
3802 g_assert (ins->dreg == ppc_r30);
3804 code = mono_arch_emit_load_got_addr (cfg->native_code, code, cfg, NULL);
3807 // FIXME: Fix max instruction length
3808 mono_add_patch_info (cfg, offset, (MonoJumpInfoType)ins->inst_right->inst_i1, ins->inst_right->inst_p0);
3809 /* arch_emit_got_access () patches this */
3810 ppc_load32 (code, ppc_r0, 0);
3811 ppc_ldptr_indexed (code, ins->dreg, ins->inst_basereg, ppc_r0);
3814 mono_add_patch_info (cfg, offset, (MonoJumpInfoType)ins->inst_i1, ins->inst_p0);
3815 ppc_load_sequence (code, ins->dreg, 0);
3817 CASE_PPC32 (OP_ICONV_TO_I4)
3818 CASE_PPC32 (OP_ICONV_TO_U4)
3820 if (ins->dreg != ins->sreg1)
3821 ppc_mr (code, ins->dreg, ins->sreg1);
3824 int saved = ins->sreg1;
3825 if (ins->sreg1 == ppc_r3) {
3826 ppc_mr (code, ppc_r0, ins->sreg1);
3829 if (ins->sreg2 != ppc_r3)
3830 ppc_mr (code, ppc_r3, ins->sreg2);
3831 if (saved != ppc_r4)
3832 ppc_mr (code, ppc_r4, saved);
3836 if (ins->dreg != ins->sreg1)
3837 ppc_fmr (code, ins->dreg, ins->sreg1);
3839 case OP_MOVE_F_TO_I4:
3840 ppc_stfs (code, ins->sreg1, -4, ppc_r1);
3841 ppc_ldptr (code, ins->dreg, -4, ppc_r1);
3843 case OP_MOVE_I4_TO_F:
3844 ppc_stw (code, ins->sreg1, -4, ppc_r1);
3845 ppc_lfs (code, ins->dreg, -4, ppc_r1);
3847 case OP_FCONV_TO_R4:
3848 ppc_frsp (code, ins->dreg, ins->sreg1);
3852 MonoCallInst *call = (MonoCallInst*)ins;
3855 * Keep in sync with mono_arch_emit_epilog
3857 g_assert (!cfg->method->save_lmf);
3859 * Note: we can use ppc_r12 here because it is dead anyway:
3860 * we're leaving the method.
3862 if (1 || cfg->flags & MONO_CFG_HAS_CALLS) {
3863 long ret_offset = cfg->stack_usage + PPC_RET_ADDR_OFFSET;
3864 if (ppc_is_imm16 (ret_offset)) {
3865 ppc_ldptr (code, ppc_r0, ret_offset, cfg->frame_reg);
3867 ppc_load (code, ppc_r12, ret_offset);
3868 ppc_ldptr_indexed (code, ppc_r0, cfg->frame_reg, ppc_r12);
3870 ppc_mtlr (code, ppc_r0);
3873 if (ppc_is_imm16 (cfg->stack_usage)) {
3874 ppc_addi (code, ppc_r12, cfg->frame_reg, cfg->stack_usage);
3876 /* cfg->stack_usage is an int, so we can use
3877 * an addis/addi sequence here even in 64-bit. */
3878 ppc_addis (code, ppc_r12, cfg->frame_reg, ppc_ha(cfg->stack_usage));
3879 ppc_addi (code, ppc_r12, ppc_r12, cfg->stack_usage);
3881 if (!cfg->method->save_lmf) {
3883 for (i = 31; i >= 13; --i) {
3884 if (cfg->used_int_regs & (1 << i)) {
3885 pos += sizeof (gpointer);
3886 ppc_ldptr (code, i, -pos, ppc_r12);
3890 /* FIXME restore from MonoLMF: though this can't happen yet */
3893 /* Copy arguments on the stack to our argument area */
3894 if (call->stack_usage) {
3895 code = emit_memcpy (code, call->stack_usage, ppc_r12, PPC_STACK_PARAM_OFFSET, ppc_sp, PPC_STACK_PARAM_OFFSET);
3896 /* r12 was clobbered */
3897 g_assert (cfg->frame_reg == ppc_sp);
3898 if (ppc_is_imm16 (cfg->stack_usage)) {
3899 ppc_addi (code, ppc_r12, cfg->frame_reg, cfg->stack_usage);
3901 /* cfg->stack_usage is an int, so we can use
3902 * an addis/addi sequence here even in 64-bit. */
3903 ppc_addis (code, ppc_r12, cfg->frame_reg, ppc_ha(cfg->stack_usage));
3904 ppc_addi (code, ppc_r12, ppc_r12, cfg->stack_usage);
3908 ppc_mr (code, ppc_sp, ppc_r12);
3909 mono_add_patch_info (cfg, (guint8*) code - cfg->native_code, MONO_PATCH_INFO_METHOD_JUMP, call->method);
3910 if (cfg->compile_aot) {
3911 /* arch_emit_got_access () patches this */
3912 ppc_load32 (code, ppc_r0, 0);
3913 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
3914 ppc_ldptr_indexed (code, ppc_r12, ppc_r30, ppc_r0);
3915 ppc_ldptr (code, ppc_r0, 0, ppc_r12);
3917 ppc_ldptr_indexed (code, ppc_r0, ppc_r30, ppc_r0);
3919 ppc_mtctr (code, ppc_r0);
3920 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
3927 /* ensure ins->sreg1 is not NULL */
3928 ppc_ldptr (code, ppc_r0, 0, ins->sreg1);
3931 long cookie_offset = cfg->sig_cookie + cfg->stack_usage;
3932 if (ppc_is_imm16 (cookie_offset)) {
3933 ppc_addi (code, ppc_r0, cfg->frame_reg, cookie_offset);
3935 ppc_load (code, ppc_r0, cookie_offset);
3936 ppc_add (code, ppc_r0, cfg->frame_reg, ppc_r0);
3938 ppc_stptr (code, ppc_r0, 0, ins->sreg1);
3947 call = (MonoCallInst*)ins;
3948 if (ins->flags & MONO_INST_HAS_METHOD)
3949 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_METHOD, call->method);
3951 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_ABS, call->fptr);
3952 if ((FORCE_INDIR_CALL || cfg->method->dynamic) && !cfg->compile_aot) {
3953 ppc_load_func (code, PPC_CALL_REG, 0);
3954 ppc_mtlr (code, PPC_CALL_REG);
3959 /* FIXME: this should be handled somewhere else in the new jit */
3960 code = emit_move_return_value (cfg, ins, code);
3966 case OP_VOIDCALL_REG:
3968 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
3969 ppc_ldptr (code, ppc_r0, 0, ins->sreg1);
3970 /* FIXME: if we know that this is a method, we
3971 can omit this load */
3972 ppc_ldptr (code, ppc_r2, 8, ins->sreg1);
3973 ppc_mtlr (code, ppc_r0);
3975 #if (_CALL_ELF == 2)
3976 if (ins->flags & MONO_INST_HAS_METHOD) {
3977 // Not a global entry point
3979 // Need to set up r12 with function entry address for global entry point
3980 if (ppc_r12 != ins->sreg1) {
3981 ppc_mr(code,ppc_r12,ins->sreg1);
3985 ppc_mtlr (code, ins->sreg1);
3988 /* FIXME: this should be handled somewhere else in the new jit */
3989 code = emit_move_return_value (cfg, ins, code);
3991 case OP_FCALL_MEMBASE:
3992 case OP_LCALL_MEMBASE:
3993 case OP_VCALL_MEMBASE:
3994 case OP_VCALL2_MEMBASE:
3995 case OP_VOIDCALL_MEMBASE:
3996 case OP_CALL_MEMBASE:
3997 if (cfg->compile_aot && ins->sreg1 == ppc_r12) {
3998 /* The trampolines clobber this */
3999 ppc_mr (code, ppc_r29, ins->sreg1);
4000 ppc_ldptr (code, ppc_r0, ins->inst_offset, ppc_r29);
4002 ppc_ldptr (code, ppc_r0, ins->inst_offset, ins->sreg1);
4004 ppc_mtlr (code, ppc_r0);
4006 /* FIXME: this should be handled somewhere else in the new jit */
4007 code = emit_move_return_value (cfg, ins, code);
4010 guint8 * zero_loop_jump, * zero_loop_start;
4011 /* keep alignment */
4012 int alloca_waste = PPC_STACK_PARAM_OFFSET + cfg->param_area + 31;
4013 int area_offset = alloca_waste;
4015 ppc_addi (code, ppc_r12, ins->sreg1, alloca_waste + 31);
4016 /* FIXME: should be calculated from MONO_ARCH_FRAME_ALIGNMENT */
4017 ppc_clear_right_imm (code, ppc_r12, ppc_r12, 4);
4018 /* use ctr to store the number of words to 0 if needed */
4019 if (ins->flags & MONO_INST_INIT) {
4020 /* we zero 4 bytes at a time:
4021 * we add 7 instead of 3 so that we set the counter to
4022 * at least 1, otherwise the bdnz instruction will make
4023 * it negative and iterate billions of times.
4025 ppc_addi (code, ppc_r0, ins->sreg1, 7);
4026 ppc_shift_right_arith_imm (code, ppc_r0, ppc_r0, 2);
4027 ppc_mtctr (code, ppc_r0);
4029 ppc_ldptr (code, ppc_r0, 0, ppc_sp);
4030 ppc_neg (code, ppc_r12, ppc_r12);
4031 ppc_stptr_update_indexed (code, ppc_r0, ppc_sp, ppc_r12);
4033 /* FIXME: make this loop work in 8 byte
4034 increments on PPC64 */
4035 if (ins->flags & MONO_INST_INIT) {
4036 /* adjust the dest reg by -4 so we can use stwu */
4037 /* we actually adjust -8 because we let the loop
4040 ppc_addi (code, ins->dreg, ppc_sp, (area_offset - 8));
4041 ppc_li (code, ppc_r12, 0);
4042 zero_loop_start = code;
4043 ppc_stwu (code, ppc_r12, 4, ins->dreg);
4044 zero_loop_jump = code;
4045 ppc_bc (code, PPC_BR_DEC_CTR_NONZERO, 0, 0);
4046 ppc_patch (zero_loop_jump, zero_loop_start);
4048 ppc_addi (code, ins->dreg, ppc_sp, area_offset);
4053 ppc_mr (code, ppc_r3, ins->sreg1);
4054 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD,
4055 (gpointer)"mono_arch_throw_exception");
4056 if ((FORCE_INDIR_CALL || cfg->method->dynamic) && !cfg->compile_aot) {
4057 ppc_load_func (code, PPC_CALL_REG, 0);
4058 ppc_mtlr (code, PPC_CALL_REG);
4067 ppc_mr (code, ppc_r3, ins->sreg1);
4068 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD,
4069 (gpointer)"mono_arch_rethrow_exception");
4070 if ((FORCE_INDIR_CALL || cfg->method->dynamic) && !cfg->compile_aot) {
4071 ppc_load_func (code, PPC_CALL_REG, 0);
4072 ppc_mtlr (code, PPC_CALL_REG);
4079 case OP_START_HANDLER: {
4080 MonoInst *spvar = mono_find_spvar_for_region (cfg, bb->region);
4081 g_assert (spvar->inst_basereg != ppc_sp);
4082 code = emit_reserve_param_area (cfg, code);
4083 ppc_mflr (code, ppc_r0);
4084 if (ppc_is_imm16 (spvar->inst_offset)) {
4085 ppc_stptr (code, ppc_r0, spvar->inst_offset, spvar->inst_basereg);
4087 ppc_load (code, ppc_r12, spvar->inst_offset);
4088 ppc_stptr_indexed (code, ppc_r0, ppc_r12, spvar->inst_basereg);
4092 case OP_ENDFILTER: {
4093 MonoInst *spvar = mono_find_spvar_for_region (cfg, bb->region);
4094 g_assert (spvar->inst_basereg != ppc_sp);
4095 code = emit_unreserve_param_area (cfg, code);
4096 if (ins->sreg1 != ppc_r3)
4097 ppc_mr (code, ppc_r3, ins->sreg1);
4098 if (ppc_is_imm16 (spvar->inst_offset)) {
4099 ppc_ldptr (code, ppc_r0, spvar->inst_offset, spvar->inst_basereg);
4101 ppc_load (code, ppc_r12, spvar->inst_offset);
4102 ppc_ldptr_indexed (code, ppc_r0, spvar->inst_basereg, ppc_r12);
4104 ppc_mtlr (code, ppc_r0);
4108 case OP_ENDFINALLY: {
4109 MonoInst *spvar = mono_find_spvar_for_region (cfg, bb->region);
4110 g_assert (spvar->inst_basereg != ppc_sp);
4111 code = emit_unreserve_param_area (cfg, code);
4112 ppc_ldptr (code, ppc_r0, spvar->inst_offset, spvar->inst_basereg);
4113 ppc_mtlr (code, ppc_r0);
4117 case OP_CALL_HANDLER:
4118 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_BB, ins->inst_target_bb);
4120 mono_cfg_add_try_hole (cfg, ins->inst_eh_block, code, bb);
4123 ins->inst_c0 = code - cfg->native_code;
4126 /*if (ins->inst_target_bb->native_offset) {
4128 //x86_jump_code (code, cfg->native_code + ins->inst_target_bb->native_offset);
4130 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_target_bb);
4135 ppc_mtctr (code, ins->sreg1);
4136 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
4140 CASE_PPC64 (OP_LCEQ)
4141 ppc_li (code, ins->dreg, 0);
4142 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 2);
4143 ppc_li (code, ins->dreg, 1);
4149 CASE_PPC64 (OP_LCLT)
4150 CASE_PPC64 (OP_LCLT_UN)
4151 ppc_li (code, ins->dreg, 1);
4152 ppc_bc (code, PPC_BR_TRUE, PPC_BR_LT, 2);
4153 ppc_li (code, ins->dreg, 0);
4159 CASE_PPC64 (OP_LCGT)
4160 CASE_PPC64 (OP_LCGT_UN)
4161 ppc_li (code, ins->dreg, 1);
4162 ppc_bc (code, PPC_BR_TRUE, PPC_BR_GT, 2);
4163 ppc_li (code, ins->dreg, 0);
4165 case OP_COND_EXC_EQ:
4166 case OP_COND_EXC_NE_UN:
4167 case OP_COND_EXC_LT:
4168 case OP_COND_EXC_LT_UN:
4169 case OP_COND_EXC_GT:
4170 case OP_COND_EXC_GT_UN:
4171 case OP_COND_EXC_GE:
4172 case OP_COND_EXC_GE_UN:
4173 case OP_COND_EXC_LE:
4174 case OP_COND_EXC_LE_UN:
4175 EMIT_COND_SYSTEM_EXCEPTION (ins->opcode - OP_COND_EXC_EQ, ins->inst_p1);
4177 case OP_COND_EXC_IEQ:
4178 case OP_COND_EXC_INE_UN:
4179 case OP_COND_EXC_ILT:
4180 case OP_COND_EXC_ILT_UN:
4181 case OP_COND_EXC_IGT:
4182 case OP_COND_EXC_IGT_UN:
4183 case OP_COND_EXC_IGE:
4184 case OP_COND_EXC_IGE_UN:
4185 case OP_COND_EXC_ILE:
4186 case OP_COND_EXC_ILE_UN:
4187 EMIT_COND_SYSTEM_EXCEPTION (ins->opcode - OP_COND_EXC_IEQ, ins->inst_p1);
4199 EMIT_COND_BRANCH (ins, ins->opcode - OP_IBEQ);
4202 /* floating point opcodes */
4204 g_assert (cfg->compile_aot);
4206 /* FIXME: Optimize this */
4208 ppc_mflr (code, ppc_r12);
4210 *(double*)code = *(double*)ins->inst_p0;
4212 ppc_lfd (code, ins->dreg, 8, ppc_r12);
4215 g_assert_not_reached ();
4217 case OP_STORER8_MEMBASE_REG:
4218 if (ppc_is_imm16 (ins->inst_offset)) {
4219 ppc_stfd (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
4221 if (ppc_is_imm32 (ins->inst_offset)) {
4222 ppc_addis (code, ppc_r11, ins->inst_destbasereg, ppc_ha(ins->inst_offset));
4223 ppc_stfd (code, ins->sreg1, ins->inst_offset, ppc_r11);
4225 ppc_load (code, ppc_r0, ins->inst_offset);
4226 ppc_stfdx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
4230 case OP_LOADR8_MEMBASE:
4231 if (ppc_is_imm16 (ins->inst_offset)) {
4232 ppc_lfd (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
4234 if (ppc_is_imm32 (ins->inst_offset)) {
4235 ppc_addis (code, ppc_r11, ins->inst_destbasereg, ppc_ha(ins->inst_offset));
4236 ppc_lfd (code, ins->dreg, ins->inst_offset, ppc_r11);
4238 ppc_load (code, ppc_r0, ins->inst_offset);
4239 ppc_lfdx (code, ins->dreg, ins->inst_destbasereg, ppc_r0);
4243 case OP_STORER4_MEMBASE_REG:
4244 ppc_frsp (code, ins->sreg1, ins->sreg1);
4245 if (ppc_is_imm16 (ins->inst_offset)) {
4246 ppc_stfs (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
4248 if (ppc_is_imm32 (ins->inst_offset)) {
4249 ppc_addis (code, ppc_r11, ins->inst_destbasereg, ppc_ha(ins->inst_offset));
4250 ppc_stfs (code, ins->sreg1, ins->inst_offset, ppc_r11);
4252 ppc_load (code, ppc_r0, ins->inst_offset);
4253 ppc_stfsx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
4257 case OP_LOADR4_MEMBASE:
4258 if (ppc_is_imm16 (ins->inst_offset)) {
4259 ppc_lfs (code, ins->dreg, ins->inst_offset, ins->inst_basereg);
4261 if (ppc_is_imm32 (ins->inst_offset)) {
4262 ppc_addis (code, ppc_r11, ins->inst_destbasereg, ppc_ha(ins->inst_offset));
4263 ppc_lfs (code, ins->dreg, ins->inst_offset, ppc_r11);
4265 ppc_load (code, ppc_r0, ins->inst_offset);
4266 ppc_lfsx (code, ins->dreg, ins->inst_destbasereg, ppc_r0);
4270 case OP_LOADR4_MEMINDEX:
4271 ppc_lfsx (code, ins->dreg, ins->inst_basereg, ins->sreg2);
4273 case OP_LOADR8_MEMINDEX:
4274 ppc_lfdx (code, ins->dreg, ins->inst_basereg, ins->sreg2);
4276 case OP_STORER4_MEMINDEX:
4277 ppc_frsp (code, ins->sreg1, ins->sreg1);
4278 ppc_stfsx (code, ins->sreg1, ins->inst_destbasereg, ins->sreg2);
4280 case OP_STORER8_MEMINDEX:
4281 ppc_stfdx (code, ins->sreg1, ins->inst_destbasereg, ins->sreg2);
4284 case CEE_CONV_R4: /* FIXME: change precision */
4286 g_assert_not_reached ();
4287 case OP_FCONV_TO_I1:
4288 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 1, TRUE);
4290 case OP_FCONV_TO_U1:
4291 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 1, FALSE);
4293 case OP_FCONV_TO_I2:
4294 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 2, TRUE);
4296 case OP_FCONV_TO_U2:
4297 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 2, FALSE);
4299 case OP_FCONV_TO_I4:
4301 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 4, TRUE);
4303 case OP_FCONV_TO_U4:
4305 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 4, FALSE);
4307 case OP_LCONV_TO_R_UN:
4308 g_assert_not_reached ();
4309 /* Implemented as helper calls */
4311 case OP_LCONV_TO_OVF_I4_2:
4312 case OP_LCONV_TO_OVF_I: {
4313 #ifdef __mono_ppc64__
4316 guint8 *negative_branch, *msword_positive_branch, *msword_negative_branch, *ovf_ex_target;
4317 // Check if its negative
4318 ppc_cmpi (code, 0, 0, ins->sreg1, 0);
4319 negative_branch = code;
4320 ppc_bc (code, PPC_BR_TRUE, PPC_BR_LT, 0);
4321 // Its positive msword == 0
4322 ppc_cmpi (code, 0, 0, ins->sreg2, 0);
4323 msword_positive_branch = code;
4324 ppc_bc (code, PPC_BR_TRUE, PPC_BR_EQ, 0);
4326 ovf_ex_target = code;
4327 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_ALWAYS, 0, "OverflowException");
4329 ppc_patch (negative_branch, code);
4330 ppc_cmpi (code, 0, 0, ins->sreg2, -1);
4331 msword_negative_branch = code;
4332 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
4333 ppc_patch (msword_negative_branch, ovf_ex_target);
4335 ppc_patch (msword_positive_branch, code);
4336 if (ins->dreg != ins->sreg1)
4337 ppc_mr (code, ins->dreg, ins->sreg1);
4342 ppc_fsqrtd (code, ins->dreg, ins->sreg1);
4345 ppc_fadd (code, ins->dreg, ins->sreg1, ins->sreg2);
4348 ppc_fsub (code, ins->dreg, ins->sreg1, ins->sreg2);
4351 ppc_fmul (code, ins->dreg, ins->sreg1, ins->sreg2);
4354 ppc_fdiv (code, ins->dreg, ins->sreg1, ins->sreg2);
4357 ppc_fneg (code, ins->dreg, ins->sreg1);
4361 g_assert_not_reached ();
4364 ppc_fcmpu (code, 0, ins->sreg1, ins->sreg2);
4367 ppc_fcmpo (code, 0, ins->sreg1, ins->sreg2);
4368 ppc_li (code, ins->dreg, 0);
4369 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 2);
4370 ppc_li (code, ins->dreg, 1);
4373 ppc_fcmpo (code, 0, ins->sreg1, ins->sreg2);
4374 ppc_li (code, ins->dreg, 1);
4375 ppc_bc (code, PPC_BR_TRUE, PPC_BR_LT, 2);
4376 ppc_li (code, ins->dreg, 0);
4379 ppc_fcmpu (code, 0, ins->sreg1, ins->sreg2);
4380 ppc_li (code, ins->dreg, 1);
4381 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 3);
4382 ppc_bc (code, PPC_BR_TRUE, PPC_BR_LT, 2);
4383 ppc_li (code, ins->dreg, 0);
4386 ppc_fcmpo (code, 0, ins->sreg1, ins->sreg2);
4387 ppc_li (code, ins->dreg, 1);
4388 ppc_bc (code, PPC_BR_TRUE, PPC_BR_GT, 2);
4389 ppc_li (code, ins->dreg, 0);
4392 ppc_fcmpu (code, 0, ins->sreg1, ins->sreg2);
4393 ppc_li (code, ins->dreg, 1);
4394 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 3);
4395 ppc_bc (code, PPC_BR_TRUE, PPC_BR_GT, 2);
4396 ppc_li (code, ins->dreg, 0);
4399 EMIT_COND_BRANCH (ins, CEE_BEQ - CEE_BEQ);
4402 EMIT_COND_BRANCH (ins, CEE_BNE_UN - CEE_BEQ);
4405 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 2);
4406 EMIT_COND_BRANCH (ins, CEE_BLT - CEE_BEQ);
4409 EMIT_COND_BRANCH_FLAGS (ins, PPC_BR_TRUE, PPC_BR_SO);
4410 EMIT_COND_BRANCH (ins, CEE_BLT_UN - CEE_BEQ);
4413 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 2);
4414 EMIT_COND_BRANCH (ins, CEE_BGT - CEE_BEQ);
4417 EMIT_COND_BRANCH_FLAGS (ins, PPC_BR_TRUE, PPC_BR_SO);
4418 EMIT_COND_BRANCH (ins, CEE_BGT_UN - CEE_BEQ);
4421 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 2);
4422 EMIT_COND_BRANCH (ins, CEE_BGE - CEE_BEQ);
4425 EMIT_COND_BRANCH (ins, CEE_BGE_UN - CEE_BEQ);
4428 ppc_bc (code, PPC_BR_TRUE, PPC_BR_SO, 2);
4429 EMIT_COND_BRANCH (ins, CEE_BLE - CEE_BEQ);
4432 EMIT_COND_BRANCH (ins, CEE_BLE_UN - CEE_BEQ);
4435 g_assert_not_reached ();
4436 case OP_CHECK_FINITE: {
4437 ppc_rlwinm (code, ins->sreg1, ins->sreg1, 0, 1, 31);
4438 ppc_addis (code, ins->sreg1, ins->sreg1, -32752);
4439 ppc_rlwinmd (code, ins->sreg1, ins->sreg1, 1, 31, 31);
4440 EMIT_COND_SYSTEM_EXCEPTION (CEE_BEQ - CEE_BEQ, "ArithmeticException");
4443 mono_add_patch_info (cfg, offset, (MonoJumpInfoType)ins->inst_c1, ins->inst_p0);
4444 #ifdef __mono_ppc64__
4445 ppc_load_sequence (code, ins->dreg, (guint64)0x0f0f0f0f0f0f0f0fLL);
4447 ppc_load_sequence (code, ins->dreg, (gulong)0x0f0f0f0fL);
4452 #ifdef __mono_ppc64__
4453 case OP_ICONV_TO_I4:
4455 ppc_extsw (code, ins->dreg, ins->sreg1);
4457 case OP_ICONV_TO_U4:
4459 ppc_clrldi (code, ins->dreg, ins->sreg1, 32);
4461 case OP_ICONV_TO_R4:
4462 case OP_ICONV_TO_R8:
4463 case OP_LCONV_TO_R4:
4464 case OP_LCONV_TO_R8: {
4466 if (ins->opcode == OP_ICONV_TO_R4 || ins->opcode == OP_ICONV_TO_R8) {
4467 ppc_extsw (code, ppc_r0, ins->sreg1);
4472 if (cpu_hw_caps & PPC_MOVE_FPR_GPR) {
4473 ppc_mffgpr (code, ins->dreg, tmp);
4475 ppc_str (code, tmp, -8, ppc_r1);
4476 ppc_lfd (code, ins->dreg, -8, ppc_r1);
4478 ppc_fcfid (code, ins->dreg, ins->dreg);
4479 if (ins->opcode == OP_ICONV_TO_R4 || ins->opcode == OP_LCONV_TO_R4)
4480 ppc_frsp (code, ins->dreg, ins->dreg);
4484 ppc_srad (code, ins->dreg, ins->sreg1, ins->sreg2);
4487 ppc_srd (code, ins->dreg, ins->sreg1, ins->sreg2);
4490 /* check XER [0-3] (SO, OV, CA): we can't use mcrxr
4492 ppc_mfspr (code, ppc_r0, ppc_xer);
4493 ppc_andisd (code, ppc_r0, ppc_r0, (1 << 13)); /* CA */
4494 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, ins->inst_p1);
4496 case OP_COND_EXC_OV:
4497 ppc_mfspr (code, ppc_r0, ppc_xer);
4498 ppc_andisd (code, ppc_r0, ppc_r0, (1 << 14)); /* OV */
4499 EMIT_COND_SYSTEM_EXCEPTION_FLAGS (PPC_BR_FALSE, PPC_BR_EQ, ins->inst_p1);
4511 EMIT_COND_BRANCH (ins, ins->opcode - OP_LBEQ);
4513 case OP_FCONV_TO_I8:
4514 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 8, TRUE);
4516 case OP_FCONV_TO_U8:
4517 code = emit_float_to_int (cfg, code, ins->dreg, ins->sreg1, 8, FALSE);
4519 case OP_STOREI4_MEMBASE_REG:
4520 if (ppc_is_imm16 (ins->inst_offset)) {
4521 ppc_stw (code, ins->sreg1, ins->inst_offset, ins->inst_destbasereg);
4523 ppc_load (code, ppc_r0, ins->inst_offset);
4524 ppc_stwx (code, ins->sreg1, ins->inst_destbasereg, ppc_r0);
4527 case OP_STOREI4_MEMINDEX:
4528 ppc_stwx (code, ins->sreg1, ins->sreg2, ins->inst_destbasereg);
4531 ppc_srawi (code, ins->dreg, ins->sreg1, (ins->inst_imm & 0x1f));
4533 case OP_ISHR_UN_IMM:
4534 if (ins->inst_imm & 0x1f)
4535 ppc_srwi (code, ins->dreg, ins->sreg1, (ins->inst_imm & 0x1f));
4537 ppc_mr (code, ins->dreg, ins->sreg1);
4540 case OP_ICONV_TO_R4:
4541 case OP_ICONV_TO_R8: {
4542 if (cpu_hw_caps & PPC_ISA_64) {
4543 ppc_srawi(code, ppc_r0, ins->sreg1, 31);
4544 ppc_stw (code, ppc_r0, -8, ppc_r1);
4545 ppc_stw (code, ins->sreg1, -4, ppc_r1);
4546 ppc_lfd (code, ins->dreg, -8, ppc_r1);
4547 ppc_fcfid (code, ins->dreg, ins->dreg);
4548 if (ins->opcode == OP_ICONV_TO_R4)
4549 ppc_frsp (code, ins->dreg, ins->dreg);
4555 case OP_ATOMIC_ADD_I4:
4556 CASE_PPC64 (OP_ATOMIC_ADD_I8) {
4557 int location = ins->inst_basereg;
4558 int addend = ins->sreg2;
4559 guint8 *loop, *branch;
4560 g_assert (ins->inst_offset == 0);
4564 if (ins->opcode == OP_ATOMIC_ADD_I4)
4565 ppc_lwarx (code, ppc_r0, 0, location);
4566 #ifdef __mono_ppc64__
4568 ppc_ldarx (code, ppc_r0, 0, location);
4571 ppc_add (code, ppc_r0, ppc_r0, addend);
4573 if (ins->opcode == OP_ATOMIC_ADD_I4)
4574 ppc_stwcxd (code, ppc_r0, 0, location);
4575 #ifdef __mono_ppc64__
4577 ppc_stdcxd (code, ppc_r0, 0, location);
4581 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
4582 ppc_patch (branch, loop);
4585 ppc_mr (code, ins->dreg, ppc_r0);
4588 case OP_ATOMIC_CAS_I4:
4589 CASE_PPC64 (OP_ATOMIC_CAS_I8) {
4590 int location = ins->sreg1;
4591 int value = ins->sreg2;
4592 int comparand = ins->sreg3;
4593 guint8 *start, *not_equal, *lost_reservation;
4597 if (ins->opcode == OP_ATOMIC_CAS_I4)
4598 ppc_lwarx (code, ppc_r0, 0, location);
4599 #ifdef __mono_ppc64__
4601 ppc_ldarx (code, ppc_r0, 0, location);
4604 ppc_cmp (code, 0, ins->opcode == OP_ATOMIC_CAS_I4 ? 0 : 1, ppc_r0, comparand);
4606 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
4608 if (ins->opcode == OP_ATOMIC_CAS_I4)
4609 ppc_stwcxd (code, value, 0, location);
4610 #ifdef __mono_ppc64__
4612 ppc_stdcxd (code, value, 0, location);
4615 lost_reservation = code;
4616 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
4617 ppc_patch (lost_reservation, start);
4618 ppc_patch (not_equal, code);
4621 ppc_mr (code, ins->dreg, ppc_r0);
4624 case OP_GC_SAFE_POINT:
4628 g_warning ("unknown opcode %s in %s()\n", mono_inst_name (ins->opcode), __FUNCTION__);
4629 g_assert_not_reached ();
4632 if ((cfg->opt & MONO_OPT_BRANCH) && ((code - cfg->native_code - offset) > max_len)) {
4633 g_warning ("wrong maximal instruction length of instruction %s (expected %d, got %ld)",
4634 mono_inst_name (ins->opcode), max_len, (glong)(code - cfg->native_code - offset));
4635 g_assert_not_reached ();
4641 last_offset = offset;
4644 cfg->code_len = code - cfg->native_code;
4646 #endif /* !DISABLE_JIT */
4649 mono_arch_register_lowlevel_calls (void)
4651 /* The signature doesn't matter */
4652 mono_register_jit_icall (mono_ppc_throw_exception, "mono_ppc_throw_exception", mono_create_icall_signature ("void"), TRUE);
4655 #ifdef __mono_ppc64__
4656 #ifdef _LITTLE_ENDIAN
4657 #define patch_load_sequence(ip,val) do {\
4658 guint16 *__load = (guint16*)(ip); \
4659 g_assert (sizeof (val) == sizeof (gsize)); \
4660 __load [0] = (((guint64)(gsize)(val)) >> 48) & 0xffff; \
4661 __load [2] = (((guint64)(gsize)(val)) >> 32) & 0xffff; \
4662 __load [6] = (((guint64)(gsize)(val)) >> 16) & 0xffff; \
4663 __load [8] = ((guint64)(gsize)(val)) & 0xffff; \
4665 #elif defined _BIG_ENDIAN
4666 #define patch_load_sequence(ip,val) do {\
4667 guint16 *__load = (guint16*)(ip); \
4668 g_assert (sizeof (val) == sizeof (gsize)); \
4669 __load [1] = (((guint64)(gsize)(val)) >> 48) & 0xffff; \
4670 __load [3] = (((guint64)(gsize)(val)) >> 32) & 0xffff; \
4671 __load [7] = (((guint64)(gsize)(val)) >> 16) & 0xffff; \
4672 __load [9] = ((guint64)(gsize)(val)) & 0xffff; \
4675 #error huh? No endianess defined by compiler
4678 #define patch_load_sequence(ip,val) do {\
4679 guint16 *__lis_ori = (guint16*)(ip); \
4680 __lis_ori [1] = (((gulong)(val)) >> 16) & 0xffff; \
4681 __lis_ori [3] = ((gulong)(val)) & 0xffff; \
4687 mono_arch_patch_code (MonoCompile *cfg, MonoMethod *method, MonoDomain *domain, guint8 *code, MonoJumpInfo *ji, gboolean run_cctors, MonoError *error)
4689 MonoJumpInfo *patch_info;
4690 gboolean compile_aot = !run_cctors;
4692 mono_error_init (error);
4694 for (patch_info = ji; patch_info; patch_info = patch_info->next) {
4695 unsigned char *ip = patch_info->ip.i + code;
4696 unsigned char *target;
4697 gboolean is_fd = FALSE;
4699 target = mono_resolve_patch_target (method, domain, code, patch_info, run_cctors, error);
4700 return_if_nok (error);
4703 switch (patch_info->type) {
4704 case MONO_PATCH_INFO_BB:
4705 case MONO_PATCH_INFO_LABEL:
4708 /* No need to patch these */
4713 switch (patch_info->type) {
4714 case MONO_PATCH_INFO_IP:
4715 patch_load_sequence (ip, ip);
4717 case MONO_PATCH_INFO_METHOD_REL:
4718 g_assert_not_reached ();
4719 *((gpointer *)(ip)) = code + patch_info->data.offset;
4721 case MONO_PATCH_INFO_SWITCH: {
4722 gpointer *table = (gpointer *)patch_info->data.table->table;
4725 patch_load_sequence (ip, table);
4727 for (i = 0; i < patch_info->data.table->table_size; i++) {
4728 table [i] = (glong)patch_info->data.table->table [i] + code;
4730 /* we put into the table the absolute address, no need for ppc_patch in this case */
4733 case MONO_PATCH_INFO_METHODCONST:
4734 case MONO_PATCH_INFO_CLASS:
4735 case MONO_PATCH_INFO_IMAGE:
4736 case MONO_PATCH_INFO_FIELD:
4737 case MONO_PATCH_INFO_VTABLE:
4738 case MONO_PATCH_INFO_IID:
4739 case MONO_PATCH_INFO_SFLDA:
4740 case MONO_PATCH_INFO_LDSTR:
4741 case MONO_PATCH_INFO_TYPE_FROM_HANDLE:
4742 case MONO_PATCH_INFO_LDTOKEN:
4743 /* from OP_AOTCONST : lis + ori */
4744 patch_load_sequence (ip, target);
4746 case MONO_PATCH_INFO_R4:
4747 case MONO_PATCH_INFO_R8:
4748 g_assert_not_reached ();
4749 *((gconstpointer *)(ip + 2)) = patch_info->data.target;
4751 case MONO_PATCH_INFO_EXC_NAME:
4752 g_assert_not_reached ();
4753 *((gconstpointer *)(ip + 1)) = patch_info->data.name;
4755 case MONO_PATCH_INFO_NONE:
4756 case MONO_PATCH_INFO_BB_OVF:
4757 case MONO_PATCH_INFO_EXC_OVF:
4758 /* everything is dealt with at epilog output time */
4760 #ifdef PPC_USES_FUNCTION_DESCRIPTOR
4761 case MONO_PATCH_INFO_INTERNAL_METHOD:
4762 case MONO_PATCH_INFO_ABS:
4763 case MONO_PATCH_INFO_RGCTX_FETCH:
4764 case MONO_PATCH_INFO_JIT_ICALL_ADDR:
4771 ppc_patch_full (ip, target, is_fd);
4776 * Emit code to save the registers in used_int_regs or the registers in the MonoLMF
4777 * structure at positive offset pos from register base_reg. pos is guaranteed to fit into
4778 * the instruction offset immediate for all the registers.
4781 save_registers (MonoCompile *cfg, guint8* code, int pos, int base_reg, gboolean save_lmf, guint32 used_int_regs, int cfa_offset)
4785 for (i = 13; i <= 31; i++) {
4786 if (used_int_regs & (1 << i)) {
4787 ppc_str (code, i, pos, base_reg);
4788 mono_emit_unwind_op_offset (cfg, code, i, pos - cfa_offset);
4789 pos += sizeof (mgreg_t);
4793 /* pos is the start of the MonoLMF structure */
4794 int offset = pos + G_STRUCT_OFFSET (MonoLMF, iregs);
4795 for (i = 13; i <= 31; i++) {
4796 ppc_str (code, i, offset, base_reg);
4797 mono_emit_unwind_op_offset (cfg, code, i, offset - cfa_offset);
4798 offset += sizeof (mgreg_t);
4800 offset = pos + G_STRUCT_OFFSET (MonoLMF, fregs);
4801 for (i = 14; i < 32; i++) {
4802 ppc_stfd (code, i, offset, base_reg);
4803 offset += sizeof (gdouble);
4810 * Stack frame layout:
4812 * ------------------- sp
4813 * MonoLMF structure or saved registers
4814 * -------------------
4816 * -------------------
4818 * -------------------
4819 * optional 8 bytes for tracing
4820 * -------------------
4821 * param area size is cfg->param_area
4822 * -------------------
4823 * linkage area size is PPC_STACK_PARAM_OFFSET
4824 * ------------------- sp
4828 mono_arch_emit_prolog (MonoCompile *cfg)
4830 MonoMethod *method = cfg->method;
4832 MonoMethodSignature *sig;
4834 long alloc_size, pos, max_offset, cfa_offset;
4840 int tailcall_struct_index;
4842 if (mono_jit_trace_calls != NULL && mono_trace_eval (method))
4845 sig = mono_method_signature (method);
4846 cfg->code_size = 512 + sig->param_count * 32;
4847 code = cfg->native_code = g_malloc (cfg->code_size);
4851 /* We currently emit unwind info for aot, but don't use it */
4852 mono_emit_unwind_op_def_cfa (cfg, code, ppc_r1, 0);
4854 if (1 || cfg->flags & MONO_CFG_HAS_CALLS) {
4855 ppc_mflr (code, ppc_r0);
4856 ppc_str (code, ppc_r0, PPC_RET_ADDR_OFFSET, ppc_sp);
4857 mono_emit_unwind_op_offset (cfg, code, ppc_lr, PPC_RET_ADDR_OFFSET);
4860 alloc_size = cfg->stack_offset;
4863 if (!method->save_lmf) {
4864 for (i = 31; i >= 13; --i) {
4865 if (cfg->used_int_regs & (1 << i)) {
4866 pos += sizeof (mgreg_t);
4870 pos += sizeof (MonoLMF);
4874 // align to MONO_ARCH_FRAME_ALIGNMENT bytes
4875 if (alloc_size & (MONO_ARCH_FRAME_ALIGNMENT - 1)) {
4876 alloc_size += MONO_ARCH_FRAME_ALIGNMENT - 1;
4877 alloc_size &= ~(MONO_ARCH_FRAME_ALIGNMENT - 1);
4880 cfg->stack_usage = alloc_size;
4881 g_assert ((alloc_size & (MONO_ARCH_FRAME_ALIGNMENT-1)) == 0);
4883 if (ppc_is_imm16 (-alloc_size)) {
4884 ppc_str_update (code, ppc_sp, -alloc_size, ppc_sp);
4885 cfa_offset = alloc_size;
4886 mono_emit_unwind_op_def_cfa_offset (cfg, code, alloc_size);
4887 code = save_registers (cfg, code, alloc_size - pos, ppc_sp, method->save_lmf, cfg->used_int_regs, cfa_offset);
4890 ppc_addi (code, ppc_r12, ppc_sp, -pos);
4891 ppc_load (code, ppc_r0, -alloc_size);
4892 ppc_str_update_indexed (code, ppc_sp, ppc_sp, ppc_r0);
4893 cfa_offset = alloc_size;
4894 mono_emit_unwind_op_def_cfa_offset (cfg, code, alloc_size);
4895 code = save_registers (cfg, code, 0, ppc_r12, method->save_lmf, cfg->used_int_regs, cfa_offset);
4898 if (cfg->frame_reg != ppc_sp) {
4899 ppc_mr (code, cfg->frame_reg, ppc_sp);
4900 mono_emit_unwind_op_def_cfa_reg (cfg, code, cfg->frame_reg);
4903 /* store runtime generic context */
4904 if (cfg->rgctx_var) {
4905 g_assert (cfg->rgctx_var->opcode == OP_REGOFFSET &&
4906 (cfg->rgctx_var->inst_basereg == ppc_r1 || cfg->rgctx_var->inst_basereg == ppc_r31));
4908 ppc_stptr (code, MONO_ARCH_RGCTX_REG, cfg->rgctx_var->inst_offset, cfg->rgctx_var->inst_basereg);
4911 /* compute max_offset in order to use short forward jumps
4912 * we always do it on ppc because the immediate displacement
4913 * for jumps is too small
4916 for (bb = cfg->bb_entry; bb; bb = bb->next_bb) {
4918 bb->max_offset = max_offset;
4920 if (cfg->prof_options & MONO_PROFILE_COVERAGE)
4923 MONO_BB_FOR_EACH_INS (bb, ins)
4924 max_offset += ins_native_length (cfg, ins);
4927 /* load arguments allocated to register from the stack */
4930 cinfo = get_call_info (sig);
4932 if (MONO_TYPE_ISSTRUCT (sig->ret)) {
4933 ArgInfo *ainfo = &cinfo->ret;
4935 inst = cfg->vret_addr;
4938 if (ppc_is_imm16 (inst->inst_offset)) {
4939 ppc_stptr (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4941 ppc_load (code, ppc_r12, inst->inst_offset);
4942 ppc_stptr_indexed (code, ainfo->reg, ppc_r12, inst->inst_basereg);
4946 tailcall_struct_index = 0;
4947 for (i = 0; i < sig->param_count + sig->hasthis; ++i) {
4948 ArgInfo *ainfo = cinfo->args + i;
4949 inst = cfg->args [pos];
4951 if (cfg->verbose_level > 2)
4952 g_print ("Saving argument %d (type: %d)\n", i, ainfo->regtype);
4953 if (inst->opcode == OP_REGVAR) {
4954 if (ainfo->regtype == RegTypeGeneral)
4955 ppc_mr (code, inst->dreg, ainfo->reg);
4956 else if (ainfo->regtype == RegTypeFP)
4957 ppc_fmr (code, inst->dreg, ainfo->reg);
4958 else if (ainfo->regtype == RegTypeBase) {
4959 ppc_ldr (code, ppc_r12, 0, ppc_sp);
4960 ppc_ldptr (code, inst->dreg, ainfo->offset, ppc_r12);
4962 g_assert_not_reached ();
4964 if (cfg->verbose_level > 2)
4965 g_print ("Argument %ld assigned to register %s\n", pos, mono_arch_regname (inst->dreg));
4967 /* the argument should be put on the stack: FIXME handle size != word */
4968 if (ainfo->regtype == RegTypeGeneral) {
4969 switch (ainfo->size) {
4971 if (ppc_is_imm16 (inst->inst_offset)) {
4972 ppc_stb (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4974 if (ppc_is_imm32 (inst->inst_offset)) {
4975 ppc_addis (code, ppc_r12, inst->inst_basereg, ppc_ha(inst->inst_offset));
4976 ppc_stb (code, ainfo->reg, inst->inst_offset, ppc_r12);
4978 ppc_load (code, ppc_r12, inst->inst_offset);
4979 ppc_stbx (code, ainfo->reg, inst->inst_basereg, ppc_r12);
4984 if (ppc_is_imm16 (inst->inst_offset)) {
4985 ppc_sth (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
4987 if (ppc_is_imm32 (inst->inst_offset)) {
4988 ppc_addis (code, ppc_r12, inst->inst_basereg, ppc_ha(inst->inst_offset));
4989 ppc_sth (code, ainfo->reg, inst->inst_offset, ppc_r12);
4991 ppc_load (code, ppc_r12, inst->inst_offset);
4992 ppc_sthx (code, ainfo->reg, inst->inst_basereg, ppc_r12);
4996 #ifdef __mono_ppc64__
4998 if (ppc_is_imm16 (inst->inst_offset)) {
4999 ppc_stw (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
5001 if (ppc_is_imm32 (inst->inst_offset)) {
5002 ppc_addis (code, ppc_r12, inst->inst_basereg, ppc_ha(inst->inst_offset));
5003 ppc_stw (code, ainfo->reg, inst->inst_offset, ppc_r12);
5005 ppc_load (code, ppc_r12, inst->inst_offset);
5006 ppc_stwx (code, ainfo->reg, inst->inst_basereg, ppc_r12);
5011 if (ppc_is_imm16 (inst->inst_offset)) {
5012 ppc_str (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
5014 ppc_load (code, ppc_r12, inst->inst_offset);
5015 ppc_str_indexed (code, ainfo->reg, ppc_r12, inst->inst_basereg);
5020 if (ppc_is_imm16 (inst->inst_offset + 4)) {
5021 ppc_stw (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
5022 ppc_stw (code, ainfo->reg + 1, inst->inst_offset + 4, inst->inst_basereg);
5024 ppc_addis (code, ppc_r12, inst->inst_basereg, ppc_ha(inst->inst_offset));
5025 ppc_addi (code, ppc_r12, ppc_r12, inst->inst_offset);
5026 ppc_stw (code, ainfo->reg, 0, ppc_r12);
5027 ppc_stw (code, ainfo->reg + 1, 4, ppc_r12);
5032 if (ppc_is_imm16 (inst->inst_offset)) {
5033 ppc_stptr (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
5035 if (ppc_is_imm32 (inst->inst_offset)) {
5036 ppc_addis (code, ppc_r12, inst->inst_basereg, ppc_ha(inst->inst_offset));
5037 ppc_stptr (code, ainfo->reg, inst->inst_offset, ppc_r12);
5039 ppc_load (code, ppc_r12, inst->inst_offset);
5040 ppc_stptr_indexed (code, ainfo->reg, inst->inst_basereg, ppc_r12);
5045 } else if (ainfo->regtype == RegTypeBase) {
5046 g_assert (ppc_is_imm16 (ainfo->offset));
5047 /* load the previous stack pointer in r12 */
5048 ppc_ldr (code, ppc_r12, 0, ppc_sp);
5049 ppc_ldptr (code, ppc_r0, ainfo->offset, ppc_r12);
5050 switch (ainfo->size) {
5052 if (ppc_is_imm16 (inst->inst_offset)) {
5053 ppc_stb (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
5055 if (ppc_is_imm32 (inst->inst_offset)) {
5056 ppc_addis (code, ppc_r12, inst->inst_basereg, ppc_ha(inst->inst_offset));
5057 ppc_stb (code, ppc_r0, inst->inst_offset, ppc_r12);
5059 ppc_load (code, ppc_r12, inst->inst_offset);
5060 ppc_stbx (code, ppc_r0, inst->inst_basereg, ppc_r12);
5065 if (ppc_is_imm16 (inst->inst_offset)) {
5066 ppc_sth (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
5068 if (ppc_is_imm32 (inst->inst_offset)) {
5069 ppc_addis (code, ppc_r12, inst->inst_basereg, ppc_ha(inst->inst_offset));
5070 ppc_sth (code, ppc_r0, inst->inst_offset, ppc_r12);
5072 ppc_load (code, ppc_r12, inst->inst_offset);
5073 ppc_sthx (code, ppc_r0, inst->inst_basereg, ppc_r12);
5077 #ifdef __mono_ppc64__
5079 if (ppc_is_imm16 (inst->inst_offset)) {
5080 ppc_stw (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
5082 if (ppc_is_imm32 (inst->inst_offset)) {
5083 ppc_addis (code, ppc_r12, inst->inst_basereg, ppc_ha(inst->inst_offset));
5084 ppc_stw (code, ppc_r0, inst->inst_offset, ppc_r12);
5086 ppc_load (code, ppc_r12, inst->inst_offset);
5087 ppc_stwx (code, ppc_r0, inst->inst_basereg, ppc_r12);
5092 if (ppc_is_imm16 (inst->inst_offset)) {
5093 ppc_str (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
5095 ppc_load (code, ppc_r12, inst->inst_offset);
5096 ppc_str_indexed (code, ppc_r0, ppc_r12, inst->inst_basereg);
5101 g_assert (ppc_is_imm16 (ainfo->offset + 4));
5102 if (ppc_is_imm16 (inst->inst_offset + 4)) {
5103 ppc_stw (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
5104 ppc_lwz (code, ppc_r0, ainfo->offset + 4, ppc_r12);
5105 ppc_stw (code, ppc_r0, inst->inst_offset + 4, inst->inst_basereg);
5107 /* use r11 to load the 2nd half of the long before we clobber r12. */
5108 ppc_lwz (code, ppc_r11, ainfo->offset + 4, ppc_r12);
5109 ppc_addis (code, ppc_r12, inst->inst_basereg, ppc_ha(inst->inst_offset));
5110 ppc_addi (code, ppc_r12, ppc_r12, inst->inst_offset);
5111 ppc_stw (code, ppc_r0, 0, ppc_r12);
5112 ppc_stw (code, ppc_r11, 4, ppc_r12);
5117 if (ppc_is_imm16 (inst->inst_offset)) {
5118 ppc_stptr (code, ppc_r0, inst->inst_offset, inst->inst_basereg);
5120 if (ppc_is_imm32 (inst->inst_offset)) {
5121 ppc_addis (code, ppc_r12, inst->inst_basereg, ppc_ha(inst->inst_offset));
5122 ppc_stptr (code, ppc_r0, inst->inst_offset, ppc_r12);
5124 ppc_load (code, ppc_r12, inst->inst_offset);
5125 ppc_stptr_indexed (code, ppc_r0, inst->inst_basereg, ppc_r12);
5130 } else if (ainfo->regtype == RegTypeFP) {
5131 g_assert (ppc_is_imm16 (inst->inst_offset));
5132 if (ainfo->size == 8)
5133 ppc_stfd (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
5134 else if (ainfo->size == 4)
5135 ppc_stfs (code, ainfo->reg, inst->inst_offset, inst->inst_basereg);
5137 g_assert_not_reached ();
5138 } else if (ainfo->regtype == RegTypeFPStructByVal) {
5139 int doffset = inst->inst_offset;
5143 g_assert (ppc_is_imm16 (inst->inst_offset));
5144 g_assert (ppc_is_imm16 (inst->inst_offset + ainfo->vtregs * sizeof (gpointer)));
5145 /* FIXME: what if there is no class? */
5146 if (sig->pinvoke && mono_class_from_mono_type (inst->inst_vtype))
5147 size = mono_class_native_size (mono_class_from_mono_type (inst->inst_vtype), NULL);
5148 for (cur_reg = 0; cur_reg < ainfo->vtregs; ++cur_reg) {
5149 if (ainfo->size == 4) {
5150 ppc_stfs (code, ainfo->reg + cur_reg, doffset, inst->inst_basereg);
5152 ppc_stfd (code, ainfo->reg + cur_reg, doffset, inst->inst_basereg);
5154 soffset += ainfo->size;
5155 doffset += ainfo->size;
5157 } else if (ainfo->regtype == RegTypeStructByVal) {
5158 int doffset = inst->inst_offset;
5162 g_assert (ppc_is_imm16 (inst->inst_offset));
5163 g_assert (ppc_is_imm16 (inst->inst_offset + ainfo->vtregs * sizeof (gpointer)));
5164 /* FIXME: what if there is no class? */
5165 if (sig->pinvoke && mono_class_from_mono_type (inst->inst_vtype))
5166 size = mono_class_native_size (mono_class_from_mono_type (inst->inst_vtype), NULL);
5167 for (cur_reg = 0; cur_reg < ainfo->vtregs; ++cur_reg) {
5170 * Darwin handles 1 and 2 byte
5171 * structs specially by
5172 * loading h/b into the arg
5173 * register. Only done for
5177 ppc_sth (code, ainfo->reg + cur_reg, doffset, inst->inst_basereg);
5179 ppc_stb (code, ainfo->reg + cur_reg, doffset, inst->inst_basereg);
5183 #ifdef __mono_ppc64__
5185 g_assert (cur_reg == 0);
5186 #if G_BYTE_ORDER == G_BIG_ENDIAN
5187 ppc_sldi (code, ppc_r0, ainfo->reg,
5188 (sizeof (gpointer) - ainfo->bytes) * 8);
5189 ppc_stptr (code, ppc_r0, doffset, inst->inst_basereg);
5191 if (mono_class_native_size (inst->klass, NULL) == 1) {
5192 ppc_stb (code, ainfo->reg + cur_reg, doffset, inst->inst_basereg);
5193 } else if (mono_class_native_size (inst->klass, NULL) == 2) {
5194 ppc_sth (code, ainfo->reg + cur_reg, doffset, inst->inst_basereg);
5195 } else if (mono_class_native_size (inst->klass, NULL) == 4) { // WDS -- maybe <=4?
5196 ppc_stw (code, ainfo->reg + cur_reg, doffset, inst->inst_basereg);
5198 ppc_stptr (code, ainfo->reg + cur_reg, doffset, inst->inst_basereg); // WDS -- Better way?
5204 ppc_stptr (code, ainfo->reg + cur_reg, doffset,
5205 inst->inst_basereg);
5208 soffset += sizeof (gpointer);
5209 doffset += sizeof (gpointer);
5211 if (ainfo->vtsize) {
5212 /* FIXME: we need to do the shifting here, too */
5215 /* load the previous stack pointer in r12 (r0 gets overwritten by the memcpy) */
5216 ppc_ldr (code, ppc_r12, 0, ppc_sp);
5217 if ((size & MONO_PPC_32_64_CASE (3, 7)) != 0) {
5218 code = emit_memcpy (code, size - soffset,
5219 inst->inst_basereg, doffset,
5220 ppc_r12, ainfo->offset + soffset);
5222 code = emit_memcpy (code, ainfo->vtsize * sizeof (gpointer),
5223 inst->inst_basereg, doffset,
5224 ppc_r12, ainfo->offset + soffset);
5227 } else if (ainfo->regtype == RegTypeStructByAddr) {
5228 /* if it was originally a RegTypeBase */
5229 if (ainfo->offset) {
5230 /* load the previous stack pointer in r12 */
5231 ppc_ldr (code, ppc_r12, 0, ppc_sp);
5232 ppc_ldptr (code, ppc_r12, ainfo->offset, ppc_r12);
5234 ppc_mr (code, ppc_r12, ainfo->reg);
5237 if (cfg->tailcall_valuetype_addrs) {
5238 MonoInst *addr = cfg->tailcall_valuetype_addrs [tailcall_struct_index];
5240 g_assert (ppc_is_imm16 (addr->inst_offset));
5241 ppc_stptr (code, ppc_r12, addr->inst_offset, addr->inst_basereg);
5243 tailcall_struct_index++;
5246 g_assert (ppc_is_imm16 (inst->inst_offset));
5247 code = emit_memcpy (code, ainfo->vtsize, inst->inst_basereg, inst->inst_offset, ppc_r12, 0);
5248 /*g_print ("copy in %s: %d bytes from %d to offset: %d\n", method->name, ainfo->vtsize, ainfo->reg, inst->inst_offset);*/
5250 g_assert_not_reached ();
5255 if (method->save_lmf) {
5256 if (cfg->compile_aot) {
5257 /* Compute the got address which is needed by the PLT entry */
5258 code = mono_arch_emit_load_got_addr (cfg->native_code, code, cfg, NULL);
5260 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_INTERNAL_METHOD,
5261 (gpointer)"mono_tls_get_lmf_addr");
5262 if ((FORCE_INDIR_CALL || cfg->method->dynamic) && !cfg->compile_aot) {
5263 ppc_load_func (code, PPC_CALL_REG, 0);
5264 ppc_mtlr (code, PPC_CALL_REG);
5269 /* we build the MonoLMF structure on the stack - see mini-ppc.h */
5270 /* lmf_offset is the offset from the previous stack pointer,
5271 * alloc_size is the total stack space allocated, so the offset
5272 * of MonoLMF from the current stack ptr is alloc_size - lmf_offset.
5273 * The pointer to the struct is put in ppc_r12 (new_lmf).
5274 * The callee-saved registers are already in the MonoLMF structure
5276 ppc_addi (code, ppc_r12, ppc_sp, alloc_size - lmf_offset);
5277 /* ppc_r3 is the result from mono_get_lmf_addr () */
5278 ppc_stptr (code, ppc_r3, G_STRUCT_OFFSET(MonoLMF, lmf_addr), ppc_r12);
5279 /* new_lmf->previous_lmf = *lmf_addr */
5280 ppc_ldptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r3);
5281 ppc_stptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r12);
5282 /* *(lmf_addr) = r12 */
5283 ppc_stptr (code, ppc_r12, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r3);
5284 /* save method info */
5285 if (cfg->compile_aot)
5287 ppc_load (code, ppc_r0, 0);
5289 ppc_load_ptr (code, ppc_r0, method);
5290 ppc_stptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, method), ppc_r12);
5291 ppc_stptr (code, ppc_sp, G_STRUCT_OFFSET(MonoLMF, ebp), ppc_r12);
5292 /* save the current IP */
5293 if (cfg->compile_aot) {
5295 ppc_mflr (code, ppc_r0);
5297 mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_IP, NULL);
5298 #ifdef __mono_ppc64__
5299 ppc_load_sequence (code, ppc_r0, (guint64)0x0101010101010101LL);
5301 ppc_load_sequence (code, ppc_r0, (gulong)0x01010101L);
5304 ppc_stptr (code, ppc_r0, G_STRUCT_OFFSET(MonoLMF, eip), ppc_r12);
5308 code = mono_arch_instrument_prolog (cfg, mono_trace_enter_method, code, TRUE);
5310 cfg->code_len = code - cfg->native_code;
5311 g_assert (cfg->code_len <= cfg->code_size);
5318 mono_arch_emit_epilog (MonoCompile *cfg)
5320 MonoMethod *method = cfg->method;
5322 int max_epilog_size = 16 + 20*4;
5325 if (cfg->method->save_lmf)
5326 max_epilog_size += 128;
5328 if (mono_jit_trace_calls != NULL)
5329 max_epilog_size += 50;
5331 if (cfg->prof_options & MONO_PROFILE_ENTER_LEAVE)
5332 max_epilog_size += 50;
5334 while (cfg->code_len + max_epilog_size > (cfg->code_size - 16)) {
5335 cfg->code_size *= 2;
5336 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
5337 cfg->stat_code_reallocs++;
5341 * Keep in sync with OP_JMP
5343 code = cfg->native_code + cfg->code_len;
5345 if (mono_jit_trace_calls != NULL && mono_trace_eval (method)) {
5346 code = mono_arch_instrument_epilog (cfg, mono_trace_leave_method, code, TRUE);
5350 if (method->save_lmf) {
5352 pos += sizeof (MonoLMF);
5354 /* save the frame reg in r8 */
5355 ppc_mr (code, ppc_r8, cfg->frame_reg);
5356 ppc_addi (code, ppc_r12, cfg->frame_reg, cfg->stack_usage - lmf_offset);
5357 /* r5 = previous_lmf */
5358 ppc_ldptr (code, ppc_r5, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r12);
5360 ppc_ldptr (code, ppc_r6, G_STRUCT_OFFSET(MonoLMF, lmf_addr), ppc_r12);
5361 /* *(lmf_addr) = previous_lmf */
5362 ppc_stptr (code, ppc_r5, G_STRUCT_OFFSET(MonoLMF, previous_lmf), ppc_r6);
5363 /* FIXME: speedup: there is no actual need to restore the registers if
5364 * we didn't actually change them (idea from Zoltan).
5367 ppc_ldr_multiple (code, ppc_r13, G_STRUCT_OFFSET(MonoLMF, iregs), ppc_r12);
5369 /*for (i = 14; i < 32; i++) {
5370 ppc_lfd (code, i, G_STRUCT_OFFSET(MonoLMF, fregs) + ((i-14) * sizeof (gdouble)), ppc_r12);
5372 g_assert (ppc_is_imm16 (cfg->stack_usage + PPC_RET_ADDR_OFFSET));
5373 /* use the saved copy of the frame reg in r8 */
5374 if (1 || cfg->flags & MONO_CFG_HAS_CALLS) {
5375 ppc_ldr (code, ppc_r0, cfg->stack_usage + PPC_RET_ADDR_OFFSET, ppc_r8);
5376 ppc_mtlr (code, ppc_r0);
5378 ppc_addic (code, ppc_sp, ppc_r8, cfg->stack_usage);
5380 if (1 || cfg->flags & MONO_CFG_HAS_CALLS) {
5381 long return_offset = cfg->stack_usage + PPC_RET_ADDR_OFFSET;
5382 if (ppc_is_imm16 (return_offset)) {
5383 ppc_ldr (code, ppc_r0, return_offset, cfg->frame_reg);
5385 ppc_load (code, ppc_r12, return_offset);
5386 ppc_ldr_indexed (code, ppc_r0, cfg->frame_reg, ppc_r12);
5388 ppc_mtlr (code, ppc_r0);
5390 if (ppc_is_imm16 (cfg->stack_usage)) {
5391 int offset = cfg->stack_usage;
5392 for (i = 13; i <= 31; i++) {
5393 if (cfg->used_int_regs & (1 << i))
5394 offset -= sizeof (mgreg_t);
5396 if (cfg->frame_reg != ppc_sp)
5397 ppc_mr (code, ppc_r12, cfg->frame_reg);
5398 /* note r31 (possibly the frame register) is restored last */
5399 for (i = 13; i <= 31; i++) {
5400 if (cfg->used_int_regs & (1 << i)) {
5401 ppc_ldr (code, i, offset, cfg->frame_reg);
5402 offset += sizeof (mgreg_t);
5405 if (cfg->frame_reg != ppc_sp)
5406 ppc_addi (code, ppc_sp, ppc_r12, cfg->stack_usage);
5408 ppc_addi (code, ppc_sp, ppc_sp, cfg->stack_usage);
5410 ppc_load32 (code, ppc_r12, cfg->stack_usage);
5411 if (cfg->used_int_regs) {
5412 ppc_add (code, ppc_r12, cfg->frame_reg, ppc_r12);
5413 for (i = 31; i >= 13; --i) {
5414 if (cfg->used_int_regs & (1 << i)) {
5415 pos += sizeof (mgreg_t);
5416 ppc_ldr (code, i, -pos, ppc_r12);
5419 ppc_mr (code, ppc_sp, ppc_r12);
5421 ppc_add (code, ppc_sp, cfg->frame_reg, ppc_r12);
5428 cfg->code_len = code - cfg->native_code;
5430 g_assert (cfg->code_len < cfg->code_size);
5433 #endif /* ifndef DISABLE_JIT */
5435 /* remove once throw_exception_by_name is eliminated */
5437 exception_id_by_name (const char *name)
5439 if (strcmp (name, "IndexOutOfRangeException") == 0)
5440 return MONO_EXC_INDEX_OUT_OF_RANGE;
5441 if (strcmp (name, "OverflowException") == 0)
5442 return MONO_EXC_OVERFLOW;
5443 if (strcmp (name, "ArithmeticException") == 0)
5444 return MONO_EXC_ARITHMETIC;
5445 if (strcmp (name, "DivideByZeroException") == 0)
5446 return MONO_EXC_DIVIDE_BY_ZERO;
5447 if (strcmp (name, "InvalidCastException") == 0)
5448 return MONO_EXC_INVALID_CAST;
5449 if (strcmp (name, "NullReferenceException") == 0)
5450 return MONO_EXC_NULL_REF;
5451 if (strcmp (name, "ArrayTypeMismatchException") == 0)
5452 return MONO_EXC_ARRAY_TYPE_MISMATCH;
5453 if (strcmp (name, "ArgumentException") == 0)
5454 return MONO_EXC_ARGUMENT;
5455 g_error ("Unknown intrinsic exception %s\n", name);
5461 mono_arch_emit_exceptions (MonoCompile *cfg)
5463 MonoJumpInfo *patch_info;
5466 guint8* exc_throw_pos [MONO_EXC_INTRINS_NUM];
5467 guint8 exc_throw_found [MONO_EXC_INTRINS_NUM];
5468 int max_epilog_size = 50;
5470 for (i = 0; i < MONO_EXC_INTRINS_NUM; i++) {
5471 exc_throw_pos [i] = NULL;
5472 exc_throw_found [i] = 0;
5475 /* count the number of exception infos */
5478 * make sure we have enough space for exceptions
5480 for (patch_info = cfg->patch_info; patch_info; patch_info = patch_info->next) {
5481 if (patch_info->type == MONO_PATCH_INFO_EXC) {
5482 i = exception_id_by_name (patch_info->data.target);
5483 if (!exc_throw_found [i]) {
5484 max_epilog_size += (2 * PPC_LOAD_SEQUENCE_LENGTH) + 5 * 4;
5485 exc_throw_found [i] = TRUE;
5487 } else if (patch_info->type == MONO_PATCH_INFO_BB_OVF)
5488 max_epilog_size += 12;
5489 else if (patch_info->type == MONO_PATCH_INFO_EXC_OVF) {
5490 MonoOvfJump *ovfj = (MonoOvfJump*)patch_info->data.target;
5491 i = exception_id_by_name (ovfj->data.exception);
5492 if (!exc_throw_found [i]) {
5493 max_epilog_size += (2 * PPC_LOAD_SEQUENCE_LENGTH) + 5 * 4;
5494 exc_throw_found [i] = TRUE;
5496 max_epilog_size += 8;
5500 while (cfg->code_len + max_epilog_size > (cfg->code_size - 16)) {
5501 cfg->code_size *= 2;
5502 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
5503 cfg->stat_code_reallocs++;
5506 code = cfg->native_code + cfg->code_len;
5508 /* add code to raise exceptions */
5509 for (patch_info = cfg->patch_info; patch_info; patch_info = patch_info->next) {
5510 switch (patch_info->type) {
5511 case MONO_PATCH_INFO_BB_OVF: {
5512 MonoOvfJump *ovfj = (MonoOvfJump*)patch_info->data.target;
5513 unsigned char *ip = patch_info->ip.i + cfg->native_code;
5514 /* patch the initial jump */
5515 ppc_patch (ip, code);
5516 ppc_bc (code, ovfj->b0_cond, ovfj->b1_cond, 2);
5518 ppc_patch (code - 4, ip + 4); /* jump back after the initiali branch */
5519 /* jump back to the true target */
5521 ip = ovfj->data.bb->native_offset + cfg->native_code;
5522 ppc_patch (code - 4, ip);
5523 patch_info->type = MONO_PATCH_INFO_NONE;
5526 case MONO_PATCH_INFO_EXC_OVF: {
5527 MonoOvfJump *ovfj = (MonoOvfJump*)patch_info->data.target;
5528 MonoJumpInfo *newji;
5529 unsigned char *ip = patch_info->ip.i + cfg->native_code;
5530 unsigned char *bcl = code;
5531 /* patch the initial jump: we arrived here with a call */
5532 ppc_patch (ip, code);
5533 ppc_bc (code, ovfj->b0_cond, ovfj->b1_cond, 0);
5535 ppc_patch (code - 4, ip + 4); /* jump back after the initiali branch */
5536 /* patch the conditional jump to the right handler */
5537 /* make it processed next */
5538 newji = mono_mempool_alloc (cfg->mempool, sizeof (MonoJumpInfo));
5539 newji->type = MONO_PATCH_INFO_EXC;
5540 newji->ip.i = bcl - cfg->native_code;
5541 newji->data.target = ovfj->data.exception;
5542 newji->next = patch_info->next;
5543 patch_info->next = newji;
5544 patch_info->type = MONO_PATCH_INFO_NONE;
5547 case MONO_PATCH_INFO_EXC: {
5548 MonoClass *exc_class;
5550 unsigned char *ip = patch_info->ip.i + cfg->native_code;
5551 i = exception_id_by_name (patch_info->data.target);
5552 if (exc_throw_pos [i] && !(ip > exc_throw_pos [i] && ip - exc_throw_pos [i] > 50000)) {
5553 ppc_patch (ip, exc_throw_pos [i]);
5554 patch_info->type = MONO_PATCH_INFO_NONE;
5557 exc_throw_pos [i] = code;
5560 exc_class = mono_class_load_from_name (mono_defaults.corlib, "System", patch_info->data.name);
5562 ppc_patch (ip, code);
5563 /*mono_add_patch_info (cfg, code - cfg->native_code, MONO_PATCH_INFO_EXC_NAME, patch_info->data.target);*/
5564 ppc_load (code, ppc_r3, exc_class->type_token);
5565 /* we got here from a conditional call, so the calling ip is set in lr */
5566 ppc_mflr (code, ppc_r4);
5567 patch_info->type = MONO_PATCH_INFO_INTERNAL_METHOD;
5568 patch_info->data.name = "mono_arch_throw_corlib_exception";
5569 patch_info->ip.i = code - cfg->native_code;
5570 if (FORCE_INDIR_CALL || cfg->method->dynamic) {
5571 ppc_load_func (code, PPC_CALL_REG, 0);
5572 ppc_mtctr (code, PPC_CALL_REG);
5573 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
5585 cfg->code_len = code - cfg->native_code;
5587 g_assert (cfg->code_len <= cfg->code_size);
5593 try_offset_access (void *value, guint32 idx)
5595 register void* me __asm__ ("r2");
5596 void ***p = (void***)((char*)me + 284);
5597 int idx1 = idx / 32;
5598 int idx2 = idx % 32;
5601 if (value != p[idx1][idx2])
5608 mono_arch_finish_init (void)
5613 mono_arch_free_jit_tls_data (MonoJitTlsData *tls)
5617 #define CMP_SIZE (PPC_LOAD_SEQUENCE_LENGTH + 4)
5619 #define LOADSTORE_SIZE 4
5620 #define JUMP_IMM_SIZE 12
5621 #define JUMP_IMM32_SIZE (PPC_LOAD_SEQUENCE_LENGTH + 8)
5622 #define ENABLE_WRONG_METHOD_CHECK 0
5625 * LOCKING: called with the domain lock held
5628 mono_arch_build_imt_trampoline (MonoVTable *vtable, MonoDomain *domain, MonoIMTCheckItem **imt_entries, int count,
5629 gpointer fail_tramp)
5633 guint8 *code, *start;
5635 for (i = 0; i < count; ++i) {
5636 MonoIMTCheckItem *item = imt_entries [i];
5637 if (item->is_equals) {
5638 if (item->check_target_idx) {
5639 if (!item->compare_done)
5640 item->chunk_size += CMP_SIZE;
5641 if (item->has_target_code)
5642 item->chunk_size += BR_SIZE + JUMP_IMM32_SIZE;
5644 item->chunk_size += LOADSTORE_SIZE + BR_SIZE + JUMP_IMM_SIZE;
5647 item->chunk_size += CMP_SIZE + BR_SIZE + JUMP_IMM32_SIZE * 2;
5648 if (!item->has_target_code)
5649 item->chunk_size += LOADSTORE_SIZE;
5651 item->chunk_size += LOADSTORE_SIZE + JUMP_IMM_SIZE;
5652 #if ENABLE_WRONG_METHOD_CHECK
5653 item->chunk_size += CMP_SIZE + BR_SIZE + 4;
5658 item->chunk_size += CMP_SIZE + BR_SIZE;
5659 imt_entries [item->check_target_idx]->compare_done = TRUE;
5661 size += item->chunk_size;
5663 /* the initial load of the vtable address */
5664 size += PPC_LOAD_SEQUENCE_LENGTH + LOADSTORE_SIZE;
5666 code = mono_method_alloc_generic_virtual_trampoline (domain, size);
5668 code = mono_domain_code_reserve (domain, size);
5673 * We need to save and restore r12 because it might be
5674 * used by the caller as the vtable register, so
5675 * clobbering it will trip up the magic trampoline.
5677 * FIXME: Get rid of this by making sure that r12 is
5678 * not used as the vtable register in interface calls.
5680 ppc_stptr (code, ppc_r12, PPC_RET_ADDR_OFFSET, ppc_sp);
5681 ppc_load (code, ppc_r12, (gsize)(& (vtable->vtable [0])));
5683 for (i = 0; i < count; ++i) {
5684 MonoIMTCheckItem *item = imt_entries [i];
5685 item->code_target = code;
5686 if (item->is_equals) {
5687 if (item->check_target_idx) {
5688 if (!item->compare_done) {
5689 ppc_load (code, ppc_r0, (gsize)item->key);
5690 ppc_compare_log (code, 0, MONO_ARCH_IMT_REG, ppc_r0);
5692 item->jmp_code = code;
5693 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
5694 if (item->has_target_code) {
5695 ppc_load_ptr (code, ppc_r0, item->value.target_code);
5697 ppc_ldptr (code, ppc_r0, (sizeof (gpointer) * item->value.vtable_slot), ppc_r12);
5698 ppc_ldptr (code, ppc_r12, PPC_RET_ADDR_OFFSET, ppc_sp);
5700 ppc_mtctr (code, ppc_r0);
5701 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
5704 ppc_load (code, ppc_r0, (gulong)item->key);
5705 ppc_compare_log (code, 0, MONO_ARCH_IMT_REG, ppc_r0);
5706 item->jmp_code = code;
5707 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
5708 if (item->has_target_code) {
5709 ppc_load_ptr (code, ppc_r0, item->value.target_code);
5712 ppc_load_ptr (code, ppc_r0, & (vtable->vtable [item->value.vtable_slot]));
5713 ppc_ldptr_indexed (code, ppc_r0, 0, ppc_r0);
5715 ppc_mtctr (code, ppc_r0);
5716 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
5717 ppc_patch (item->jmp_code, code);
5718 ppc_load_ptr (code, ppc_r0, fail_tramp);
5719 ppc_mtctr (code, ppc_r0);
5720 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
5721 item->jmp_code = NULL;
5723 /* enable the commented code to assert on wrong method */
5724 #if ENABLE_WRONG_METHOD_CHECK
5725 ppc_load (code, ppc_r0, (guint32)item->key);
5726 ppc_compare_log (code, 0, MONO_ARCH_IMT_REG, ppc_r0);
5727 item->jmp_code = code;
5728 ppc_bc (code, PPC_BR_FALSE, PPC_BR_EQ, 0);
5730 ppc_ldptr (code, ppc_r0, (sizeof (gpointer) * item->value.vtable_slot), ppc_r12);
5731 ppc_ldptr (code, ppc_r12, PPC_RET_ADDR_OFFSET, ppc_sp);
5732 ppc_mtctr (code, ppc_r0);
5733 ppc_bcctr (code, PPC_BR_ALWAYS, 0);
5734 #if ENABLE_WRONG_METHOD_CHECK
5735 ppc_patch (item->jmp_code, code);
5737 item->jmp_code = NULL;
5742 ppc_load (code, ppc_r0, (gulong)item->key);
5743 ppc_compare_log (code, 0, MONO_ARCH_IMT_REG, ppc_r0);
5744 item->jmp_code = code;
5745 ppc_bc (code, PPC_BR_FALSE, PPC_BR_LT, 0);
5748 /* patch the branches to get to the target items */
5749 for (i = 0; i < count; ++i) {
5750 MonoIMTCheckItem *item = imt_entries [i];
5751 if (item->jmp_code) {
5752 if (item->check_target_idx) {
5753 ppc_patch (item->jmp_code, imt_entries [item->check_target_idx]->code_target);
5759 mono_stats.imt_trampolines_size += code - start;
5760 g_assert (code - start <= size);
5761 mono_arch_flush_icache (start, size);
5763 mono_tramp_info_register (mono_tramp_info_create (NULL, start, code - start, NULL, NULL), domain);
5769 mono_arch_find_imt_method (mgreg_t *regs, guint8 *code)
5771 mgreg_t *r = (mgreg_t*)regs;
5773 return (MonoMethod*)(gsize) r [MONO_ARCH_IMT_REG];
5777 mono_arch_find_static_call_vtable (mgreg_t *regs, guint8 *code)
5779 mgreg_t *r = (mgreg_t*)regs;
5781 return (MonoVTable*)(gsize) r [MONO_ARCH_RGCTX_REG];
5785 mono_arch_get_cie_program (void)
5789 mono_add_unwind_op_def_cfa (l, (guint8*)NULL, (guint8*)NULL, ppc_r1, 0);
5795 mono_arch_emit_inst_for_method (MonoCompile *cfg, MonoMethod *cmethod, MonoMethodSignature *fsig, MonoInst **args)
5802 mono_arch_print_tree (MonoInst *tree, int arity)
5808 mono_arch_context_get_int_reg (MonoContext *ctx, int reg)
5811 return (mgreg_t)MONO_CONTEXT_GET_SP (ctx);
5813 return ctx->regs [reg];
5817 mono_arch_get_patch_offset (guint8 *code)
5823 * mono_aot_emit_load_got_addr:
5825 * Emit code to load the got address.
5826 * On PPC, the result is placed into r30.
5829 mono_arch_emit_load_got_addr (guint8 *start, guint8 *code, MonoCompile *cfg, MonoJumpInfo **ji)
5832 ppc_mflr (code, ppc_r30);
5834 mono_add_patch_info (cfg, code - start, MONO_PATCH_INFO_GOT_OFFSET, NULL);
5836 *ji = mono_patch_info_list_prepend (*ji, code - start, MONO_PATCH_INFO_GOT_OFFSET, NULL);
5837 /* arch_emit_got_address () patches this */
5838 #if defined(TARGET_POWERPC64)
5844 ppc_load32 (code, ppc_r0, 0);
5845 ppc_add (code, ppc_r30, ppc_r30, ppc_r0);
5852 * mono_ppc_emit_load_aotconst:
5854 * Emit code to load the contents of the GOT slot identified by TRAMP_TYPE and
5855 * TARGET from the mscorlib GOT in full-aot code.
5856 * On PPC, the GOT address is assumed to be in r30, and the result is placed into
5860 mono_arch_emit_load_aotconst (guint8 *start, guint8 *code, MonoJumpInfo **ji, MonoJumpInfoType tramp_type, gconstpointer target)
5862 /* Load the mscorlib got address */
5863 ppc_ldptr (code, ppc_r12, sizeof (gpointer), ppc_r30);
5864 *ji = mono_patch_info_list_prepend (*ji, code - start, tramp_type, target);
5865 /* arch_emit_got_access () patches this */
5866 ppc_load32 (code, ppc_r0, 0);
5867 ppc_ldptr_indexed (code, ppc_r12, ppc_r12, ppc_r0);
5872 /* Soft Debug support */
5873 #ifdef MONO_ARCH_SOFT_DEBUG_SUPPORTED
5880 * mono_arch_set_breakpoint:
5882 * See mini-amd64.c for docs.
5885 mono_arch_set_breakpoint (MonoJitInfo *ji, guint8 *ip)
5888 guint8 *orig_code = code;
5890 ppc_load_sequence (code, ppc_r12, (gsize)bp_trigger_page);
5891 ppc_ldptr (code, ppc_r12, 0, ppc_r12);
5893 g_assert (code - orig_code == BREAKPOINT_SIZE);
5895 mono_arch_flush_icache (orig_code, code - orig_code);
5899 * mono_arch_clear_breakpoint:
5901 * See mini-amd64.c for docs.
5904 mono_arch_clear_breakpoint (MonoJitInfo *ji, guint8 *ip)
5909 for (i = 0; i < BREAKPOINT_SIZE / 4; ++i)
5912 mono_arch_flush_icache (ip, code - ip);
5916 * mono_arch_is_breakpoint_event:
5918 * See mini-amd64.c for docs.
5921 mono_arch_is_breakpoint_event (void *info, void *sigctx)
5923 siginfo_t* sinfo = (siginfo_t*) info;
5924 /* Sometimes the address is off by 4 */
5925 if (sinfo->si_addr >= bp_trigger_page && (guint8*)sinfo->si_addr <= (guint8*)bp_trigger_page + 128)
5932 * mono_arch_skip_breakpoint:
5934 * See mini-amd64.c for docs.
5937 mono_arch_skip_breakpoint (MonoContext *ctx, MonoJitInfo *ji)
5939 /* skip the ldptr */
5940 MONO_CONTEXT_SET_IP (ctx, (guint8*)MONO_CONTEXT_GET_IP (ctx) + 4);
5948 * mono_arch_start_single_stepping:
5950 * See mini-amd64.c for docs.
5953 mono_arch_start_single_stepping (void)
5955 mono_mprotect (ss_trigger_page, mono_pagesize (), 0);
5959 * mono_arch_stop_single_stepping:
5961 * See mini-amd64.c for docs.
5964 mono_arch_stop_single_stepping (void)
5966 mono_mprotect (ss_trigger_page, mono_pagesize (), MONO_MMAP_READ);
5970 * mono_arch_is_single_step_event:
5972 * See mini-amd64.c for docs.
5975 mono_arch_is_single_step_event (void *info, void *sigctx)
5977 siginfo_t* sinfo = (siginfo_t*) info;
5978 /* Sometimes the address is off by 4 */
5979 if (sinfo->si_addr >= ss_trigger_page && (guint8*)sinfo->si_addr <= (guint8*)ss_trigger_page + 128)
5986 * mono_arch_skip_single_step:
5988 * See mini-amd64.c for docs.
5991 mono_arch_skip_single_step (MonoContext *ctx)
5993 /* skip the ldptr */
5994 MONO_CONTEXT_SET_IP (ctx, (guint8*)MONO_CONTEXT_GET_IP (ctx) + 4);
5998 * mono_arch_create_seq_point_info:
6000 * See mini-amd64.c for docs.
6003 mono_arch_get_seq_point_info (MonoDomain *domain, guint8 *code)
6010 mono_arch_init_lmf_ext (MonoLMFExt *ext, gpointer prev_lmf)
6012 ext->lmf.previous_lmf = prev_lmf;
6013 /* Mark that this is a MonoLMFExt */
6014 ext->lmf.previous_lmf = (gpointer)(((gssize)ext->lmf.previous_lmf) | 2);
6015 ext->lmf.ebp = (gssize)ext;
6021 mono_arch_opcode_supported (int opcode)
6024 case OP_ATOMIC_ADD_I4:
6025 case OP_ATOMIC_CAS_I4:
6026 #ifdef TARGET_POWERPC64
6027 case OP_ATOMIC_ADD_I8:
6028 case OP_ATOMIC_CAS_I8:
6038 // FIXME: To get the test case finally_block_ending_in_dead_bb to work properly we need to define the following
6039 // (in mini-ppc.h) and then implement the fuction mono_arch_create_handler_block_trampoline.
6040 // #define MONO_ARCH_HAVE_HANDLER_BLOCK_GUARD 1
6043 mono_arch_create_handler_block_trampoline (void)