3 * ARM64 backend for the Mono code generator
5 * Copyright 2013 Xamarin, Inc (http://www.xamarin.com)
10 * Paolo Molaro (lupus@ximian.com)
11 * Dietmar Maurer (dietmar@ximian.com)
13 * (C) 2003 Ximian, Inc.
14 * Copyright 2003-2011 Novell, Inc (http://www.novell.com)
15 * Copyright 2011 Xamarin, Inc (http://www.xamarin.com)
16 * Licensed under the MIT license. See LICENSE file in the project root for full license information.
20 #include "cpu-arm64.h"
23 #include <mono/arch/arm64/arm64-codegen.h>
24 #include <mono/utils/mono-mmap.h>
25 #include <mono/utils/mono-memory-model.h>
26 #include <mono/metadata/abi-details.h>
31 * - ARM(R) Architecture Reference Manual, ARMv8, for ARMv8-A architecture profile (DDI0487A_a_armv8_arm.pdf)
32 * - Procedure Call Standard for the ARM 64-bit Architecture (AArch64) (IHI0055B_aapcs64.pdf)
33 * - ELF for the ARM 64-bit Architecture (IHI0056B_aaelf64.pdf)
36 * - ip0/ip1/lr are used as temporary registers
37 * - r27 is used as the rgctx/imt register
38 * - r28 is used to access arguments passed on the stack
39 * - d15/d16 are used as fp temporary registers
42 #define ALIGN_TO(val,align) ((((guint64)val) + ((align) - 1)) & ~((align) - 1))
44 #define FP_TEMP_REG ARMREG_D16
45 #define FP_TEMP_REG2 ARMREG_D17
47 #define THUNK_SIZE (4 * 4)
49 /* The single step trampoline */
50 static gpointer ss_trampoline;
52 /* The breakpoint trampoline */
53 static gpointer bp_trampoline;
55 static gboolean ios_abi;
57 static __attribute__ ((__warn_unused_result__)) guint8* emit_load_regset (guint8 *code, guint64 regs, int basereg, int offset);
60 mono_arch_regname (int reg)
62 static const char * rnames[] = {
63 "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9",
64 "r10", "r11", "r12", "r13", "r14", "r15", "r16", "r17", "r18", "r19",
65 "r20", "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "fp",
68 if (reg >= 0 && reg < 32)
74 mono_arch_fregname (int reg)
76 static const char * rnames[] = {
77 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7", "d8", "d9",
78 "d10", "d11", "d12", "d13", "d14", "d15", "d16", "d17", "d18", "d19",
79 "d20", "d21", "d22", "d23", "d24", "d25", "d26", "d27", "d28", "d29",
82 if (reg >= 0 && reg < 32)
88 mono_arch_get_argument_info (MonoMethodSignature *csig, int param_count, MonoJitArgumentInfo *arg_info)
94 #define MAX_ARCH_DELEGATE_PARAMS 7
97 get_delegate_invoke_impl (gboolean has_target, gboolean param_count, guint32 *code_size)
102 start = code = mono_global_codeman_reserve (12);
104 /* Replace the this argument with the target */
105 arm_ldrx (code, ARMREG_IP0, ARMREG_R0, MONO_STRUCT_OFFSET (MonoDelegate, method_ptr));
106 arm_ldrx (code, ARMREG_R0, ARMREG_R0, MONO_STRUCT_OFFSET (MonoDelegate, target));
107 arm_brx (code, ARMREG_IP0);
109 g_assert ((code - start) <= 12);
111 mono_arch_flush_icache (start, 12);
115 size = 8 + param_count * 4;
116 start = code = mono_global_codeman_reserve (size);
118 arm_ldrx (code, ARMREG_IP0, ARMREG_R0, MONO_STRUCT_OFFSET (MonoDelegate, method_ptr));
119 /* slide down the arguments */
120 for (i = 0; i < param_count; ++i)
121 arm_movx (code, i, i + 1);
122 arm_brx (code, ARMREG_IP0);
124 g_assert ((code - start) <= size);
126 mono_arch_flush_icache (start, size);
130 *code_size = code - start;
136 * mono_arch_get_delegate_invoke_impls:
138 * Return a list of MonoAotTrampInfo structures for the delegate invoke impl
142 mono_arch_get_delegate_invoke_impls (void)
150 code = get_delegate_invoke_impl (TRUE, 0, &code_len);
151 res = g_slist_prepend (res, mono_tramp_info_create ("delegate_invoke_impl_has_target", code, code_len, NULL, NULL));
153 for (i = 0; i <= MAX_ARCH_DELEGATE_PARAMS; ++i) {
154 code = get_delegate_invoke_impl (FALSE, i, &code_len);
155 tramp_name = g_strdup_printf ("delegate_invoke_impl_target_%d", i);
156 res = g_slist_prepend (res, mono_tramp_info_create (tramp_name, code, code_len, NULL, NULL));
164 mono_arch_get_delegate_invoke_impl (MonoMethodSignature *sig, gboolean has_target)
166 guint8 *code, *start;
169 * vtypes are returned in registers, or using the dedicated r8 register, so
170 * they can be supported by delegate invokes.
174 static guint8* cached = NULL;
180 start = mono_aot_get_trampoline ("delegate_invoke_impl_has_target");
182 start = get_delegate_invoke_impl (TRUE, 0, NULL);
183 mono_memory_barrier ();
187 static guint8* cache [MAX_ARCH_DELEGATE_PARAMS + 1] = {NULL};
190 if (sig->param_count > MAX_ARCH_DELEGATE_PARAMS)
192 for (i = 0; i < sig->param_count; ++i)
193 if (!mono_is_regsize_var (sig->params [i]))
196 code = cache [sig->param_count];
201 char *name = g_strdup_printf ("delegate_invoke_impl_target_%d", sig->param_count);
202 start = mono_aot_get_trampoline (name);
205 start = get_delegate_invoke_impl (FALSE, sig->param_count, NULL);
207 mono_memory_barrier ();
208 cache [sig->param_count] = start;
216 mono_arch_get_delegate_virtual_invoke_impl (MonoMethodSignature *sig, MonoMethod *method, int offset, gboolean load_imt_reg)
222 mono_arch_get_this_arg_from_call (mgreg_t *regs, guint8 *code)
224 return (gpointer)regs [ARMREG_R0];
228 mono_arch_cpu_init (void)
233 mono_arch_init (void)
235 mono_aot_register_jit_icall ("mono_arm_throw_exception", mono_arm_throw_exception);
236 mono_aot_register_jit_icall ("mono_arm_resume_unwind", mono_arm_resume_unwind);
237 mono_aot_register_jit_icall ("mono_arm_handler_block_trampoline_helper", mono_arm_handler_block_trampoline_helper);
240 bp_trampoline = mini_get_breakpoint_trampoline ();
242 mono_arm_gsharedvt_init ();
244 #if defined(TARGET_IOS)
250 mono_arch_cleanup (void)
255 mono_arch_cpu_optimizations (guint32 *exclude_mask)
262 mono_arch_cpu_enumerate_simd_versions (void)
268 mono_arch_register_lowlevel_calls (void)
273 mono_arch_finish_init (void)
277 /* The maximum length is 2 instructions */
279 emit_imm (guint8 *code, int dreg, int imm)
281 // FIXME: Optimize this
284 arm_movnx (code, dreg, (~limm) & 0xffff, 0);
285 arm_movkx (code, dreg, (limm >> 16) & 0xffff, 16);
287 arm_movzx (code, dreg, imm & 0xffff, 0);
289 arm_movkx (code, dreg, (imm >> 16) & 0xffff, 16);
295 /* The maximum length is 4 instructions */
297 emit_imm64 (guint8 *code, int dreg, guint64 imm)
299 // FIXME: Optimize this
300 arm_movzx (code, dreg, imm & 0xffff, 0);
301 if ((imm >> 16) & 0xffff)
302 arm_movkx (code, dreg, (imm >> 16) & 0xffff, 16);
303 if ((imm >> 32) & 0xffff)
304 arm_movkx (code, dreg, (imm >> 32) & 0xffff, 32);
305 if ((imm >> 48) & 0xffff)
306 arm_movkx (code, dreg, (imm >> 48) & 0xffff, 48);
312 mono_arm_emit_imm64 (guint8 *code, int dreg, gint64 imm)
314 return emit_imm64 (code, dreg, imm);
320 * Emit a patchable code sequence for constructing a 64 bit immediate.
323 emit_imm64_template (guint8 *code, int dreg)
325 arm_movzx (code, dreg, 0, 0);
326 arm_movkx (code, dreg, 0, 16);
327 arm_movkx (code, dreg, 0, 32);
328 arm_movkx (code, dreg, 0, 48);
333 static inline __attribute__ ((__warn_unused_result__)) guint8*
334 emit_addw_imm (guint8 *code, int dreg, int sreg, int imm)
336 if (!arm_is_arith_imm (imm)) {
337 code = emit_imm (code, ARMREG_LR, imm);
338 arm_addw (code, dreg, sreg, ARMREG_LR);
340 arm_addw_imm (code, dreg, sreg, imm);
345 static inline __attribute__ ((__warn_unused_result__)) guint8*
346 emit_addx_imm (guint8 *code, int dreg, int sreg, int imm)
348 if (!arm_is_arith_imm (imm)) {
349 code = emit_imm (code, ARMREG_LR, imm);
350 arm_addx (code, dreg, sreg, ARMREG_LR);
352 arm_addx_imm (code, dreg, sreg, imm);
357 static inline __attribute__ ((__warn_unused_result__)) guint8*
358 emit_subw_imm (guint8 *code, int dreg, int sreg, int imm)
360 if (!arm_is_arith_imm (imm)) {
361 code = emit_imm (code, ARMREG_LR, imm);
362 arm_subw (code, dreg, sreg, ARMREG_LR);
364 arm_subw_imm (code, dreg, sreg, imm);
369 static inline __attribute__ ((__warn_unused_result__)) guint8*
370 emit_subx_imm (guint8 *code, int dreg, int sreg, int imm)
372 if (!arm_is_arith_imm (imm)) {
373 code = emit_imm (code, ARMREG_LR, imm);
374 arm_subx (code, dreg, sreg, ARMREG_LR);
376 arm_subx_imm (code, dreg, sreg, imm);
381 /* Emit sp+=imm. Clobbers ip0/ip1 */
382 static inline __attribute__ ((__warn_unused_result__)) guint8*
383 emit_addx_sp_imm (guint8 *code, int imm)
385 code = emit_imm (code, ARMREG_IP0, imm);
386 arm_movspx (code, ARMREG_IP1, ARMREG_SP);
387 arm_addx (code, ARMREG_IP1, ARMREG_IP1, ARMREG_IP0);
388 arm_movspx (code, ARMREG_SP, ARMREG_IP1);
392 /* Emit sp-=imm. Clobbers ip0/ip1 */
393 static inline __attribute__ ((__warn_unused_result__)) guint8*
394 emit_subx_sp_imm (guint8 *code, int imm)
396 code = emit_imm (code, ARMREG_IP0, imm);
397 arm_movspx (code, ARMREG_IP1, ARMREG_SP);
398 arm_subx (code, ARMREG_IP1, ARMREG_IP1, ARMREG_IP0);
399 arm_movspx (code, ARMREG_SP, ARMREG_IP1);
403 static inline __attribute__ ((__warn_unused_result__)) guint8*
404 emit_andw_imm (guint8 *code, int dreg, int sreg, int imm)
407 code = emit_imm (code, ARMREG_LR, imm);
408 arm_andw (code, dreg, sreg, ARMREG_LR);
413 static inline __attribute__ ((__warn_unused_result__)) guint8*
414 emit_andx_imm (guint8 *code, int dreg, int sreg, int imm)
417 code = emit_imm (code, ARMREG_LR, imm);
418 arm_andx (code, dreg, sreg, ARMREG_LR);
423 static inline __attribute__ ((__warn_unused_result__)) guint8*
424 emit_orrw_imm (guint8 *code, int dreg, int sreg, int imm)
427 code = emit_imm (code, ARMREG_LR, imm);
428 arm_orrw (code, dreg, sreg, ARMREG_LR);
433 static inline __attribute__ ((__warn_unused_result__)) guint8*
434 emit_orrx_imm (guint8 *code, int dreg, int sreg, int imm)
437 code = emit_imm (code, ARMREG_LR, imm);
438 arm_orrx (code, dreg, sreg, ARMREG_LR);
443 static inline __attribute__ ((__warn_unused_result__)) guint8*
444 emit_eorw_imm (guint8 *code, int dreg, int sreg, int imm)
447 code = emit_imm (code, ARMREG_LR, imm);
448 arm_eorw (code, dreg, sreg, ARMREG_LR);
453 static inline __attribute__ ((__warn_unused_result__)) guint8*
454 emit_eorx_imm (guint8 *code, int dreg, int sreg, int imm)
457 code = emit_imm (code, ARMREG_LR, imm);
458 arm_eorx (code, dreg, sreg, ARMREG_LR);
463 static inline __attribute__ ((__warn_unused_result__)) guint8*
464 emit_cmpw_imm (guint8 *code, int sreg, int imm)
467 arm_cmpw (code, sreg, ARMREG_RZR);
470 code = emit_imm (code, ARMREG_LR, imm);
471 arm_cmpw (code, sreg, ARMREG_LR);
477 static inline __attribute__ ((__warn_unused_result__)) guint8*
478 emit_cmpx_imm (guint8 *code, int sreg, int imm)
481 arm_cmpx (code, sreg, ARMREG_RZR);
484 code = emit_imm (code, ARMREG_LR, imm);
485 arm_cmpx (code, sreg, ARMREG_LR);
491 static inline __attribute__ ((__warn_unused_result__)) guint8*
492 emit_strb (guint8 *code, int rt, int rn, int imm)
494 if (arm_is_strb_imm (imm)) {
495 arm_strb (code, rt, rn, imm);
497 g_assert (rt != ARMREG_IP0);
498 g_assert (rn != ARMREG_IP0);
499 code = emit_imm (code, ARMREG_IP0, imm);
500 arm_strb_reg (code, rt, rn, ARMREG_IP0);
505 static inline __attribute__ ((__warn_unused_result__)) guint8*
506 emit_strh (guint8 *code, int rt, int rn, int imm)
508 if (arm_is_strh_imm (imm)) {
509 arm_strh (code, rt, rn, imm);
511 g_assert (rt != ARMREG_IP0);
512 g_assert (rn != ARMREG_IP0);
513 code = emit_imm (code, ARMREG_IP0, imm);
514 arm_strh_reg (code, rt, rn, ARMREG_IP0);
519 static inline __attribute__ ((__warn_unused_result__)) guint8*
520 emit_strw (guint8 *code, int rt, int rn, int imm)
522 if (arm_is_strw_imm (imm)) {
523 arm_strw (code, rt, rn, imm);
525 g_assert (rt != ARMREG_IP0);
526 g_assert (rn != ARMREG_IP0);
527 code = emit_imm (code, ARMREG_IP0, imm);
528 arm_strw_reg (code, rt, rn, ARMREG_IP0);
533 static inline __attribute__ ((__warn_unused_result__)) guint8*
534 emit_strfpw (guint8 *code, int rt, int rn, int imm)
536 if (arm_is_strw_imm (imm)) {
537 arm_strfpw (code, rt, rn, imm);
539 g_assert (rn != ARMREG_IP0);
540 code = emit_imm (code, ARMREG_IP0, imm);
541 arm_addx (code, ARMREG_IP0, rn, ARMREG_IP0);
542 arm_strfpw (code, rt, ARMREG_IP0, 0);
547 static inline __attribute__ ((__warn_unused_result__)) guint8*
548 emit_strfpx (guint8 *code, int rt, int rn, int imm)
550 if (arm_is_strx_imm (imm)) {
551 arm_strfpx (code, rt, rn, imm);
553 g_assert (rn != ARMREG_IP0);
554 code = emit_imm (code, ARMREG_IP0, imm);
555 arm_addx (code, ARMREG_IP0, rn, ARMREG_IP0);
556 arm_strfpx (code, rt, ARMREG_IP0, 0);
561 static inline __attribute__ ((__warn_unused_result__)) guint8*
562 emit_strx (guint8 *code, int rt, int rn, int imm)
564 if (arm_is_strx_imm (imm)) {
565 arm_strx (code, rt, rn, imm);
567 g_assert (rt != ARMREG_IP0);
568 g_assert (rn != ARMREG_IP0);
569 code = emit_imm (code, ARMREG_IP0, imm);
570 arm_strx_reg (code, rt, rn, ARMREG_IP0);
575 static inline __attribute__ ((__warn_unused_result__)) guint8*
576 emit_ldrb (guint8 *code, int rt, int rn, int imm)
578 if (arm_is_pimm12_scaled (imm, 1)) {
579 arm_ldrb (code, rt, rn, imm);
581 g_assert (rt != ARMREG_IP0);
582 g_assert (rn != ARMREG_IP0);
583 code = emit_imm (code, ARMREG_IP0, imm);
584 arm_ldrb_reg (code, rt, rn, ARMREG_IP0);
589 static inline __attribute__ ((__warn_unused_result__)) guint8*
590 emit_ldrsbx (guint8 *code, int rt, int rn, int imm)
592 if (arm_is_pimm12_scaled (imm, 1)) {
593 arm_ldrsbx (code, rt, rn, imm);
595 g_assert (rt != ARMREG_IP0);
596 g_assert (rn != ARMREG_IP0);
597 code = emit_imm (code, ARMREG_IP0, imm);
598 arm_ldrsbx_reg (code, rt, rn, ARMREG_IP0);
603 static inline __attribute__ ((__warn_unused_result__)) guint8*
604 emit_ldrh (guint8 *code, int rt, int rn, int imm)
606 if (arm_is_pimm12_scaled (imm, 2)) {
607 arm_ldrh (code, rt, rn, imm);
609 g_assert (rt != ARMREG_IP0);
610 g_assert (rn != ARMREG_IP0);
611 code = emit_imm (code, ARMREG_IP0, imm);
612 arm_ldrh_reg (code, rt, rn, ARMREG_IP0);
617 static inline __attribute__ ((__warn_unused_result__)) guint8*
618 emit_ldrshx (guint8 *code, int rt, int rn, int imm)
620 if (arm_is_pimm12_scaled (imm, 2)) {
621 arm_ldrshx (code, rt, rn, imm);
623 g_assert (rt != ARMREG_IP0);
624 g_assert (rn != ARMREG_IP0);
625 code = emit_imm (code, ARMREG_IP0, imm);
626 arm_ldrshx_reg (code, rt, rn, ARMREG_IP0);
631 static inline __attribute__ ((__warn_unused_result__)) guint8*
632 emit_ldrswx (guint8 *code, int rt, int rn, int imm)
634 if (arm_is_pimm12_scaled (imm, 4)) {
635 arm_ldrswx (code, rt, rn, imm);
637 g_assert (rt != ARMREG_IP0);
638 g_assert (rn != ARMREG_IP0);
639 code = emit_imm (code, ARMREG_IP0, imm);
640 arm_ldrswx_reg (code, rt, rn, ARMREG_IP0);
645 static inline __attribute__ ((__warn_unused_result__)) guint8*
646 emit_ldrw (guint8 *code, int rt, int rn, int imm)
648 if (arm_is_pimm12_scaled (imm, 4)) {
649 arm_ldrw (code, rt, rn, imm);
651 g_assert (rn != ARMREG_IP0);
652 code = emit_imm (code, ARMREG_IP0, imm);
653 arm_ldrw_reg (code, rt, rn, ARMREG_IP0);
658 static inline __attribute__ ((__warn_unused_result__)) guint8*
659 emit_ldrx (guint8 *code, int rt, int rn, int imm)
661 if (arm_is_pimm12_scaled (imm, 8)) {
662 arm_ldrx (code, rt, rn, imm);
664 g_assert (rn != ARMREG_IP0);
665 code = emit_imm (code, ARMREG_IP0, imm);
666 arm_ldrx_reg (code, rt, rn, ARMREG_IP0);
671 static inline __attribute__ ((__warn_unused_result__)) guint8*
672 emit_ldrfpw (guint8 *code, int rt, int rn, int imm)
674 if (arm_is_pimm12_scaled (imm, 4)) {
675 arm_ldrfpw (code, rt, rn, imm);
677 g_assert (rn != ARMREG_IP0);
678 code = emit_imm (code, ARMREG_IP0, imm);
679 arm_addx (code, ARMREG_IP0, rn, ARMREG_IP0);
680 arm_ldrfpw (code, rt, ARMREG_IP0, 0);
685 static inline __attribute__ ((__warn_unused_result__)) guint8*
686 emit_ldrfpx (guint8 *code, int rt, int rn, int imm)
688 if (arm_is_pimm12_scaled (imm, 8)) {
689 arm_ldrfpx (code, rt, rn, imm);
691 g_assert (rn != ARMREG_IP0);
692 code = emit_imm (code, ARMREG_IP0, imm);
693 arm_addx (code, ARMREG_IP0, rn, ARMREG_IP0);
694 arm_ldrfpx (code, rt, ARMREG_IP0, 0);
700 mono_arm_emit_ldrx (guint8 *code, int rt, int rn, int imm)
702 return emit_ldrx (code, rt, rn, imm);
706 emit_call (MonoCompile *cfg, guint8* code, guint32 patch_type, gconstpointer data)
709 mono_add_patch_info_rel (cfg, code - cfg->native_code, patch_type, data, MONO_R_ARM64_IMM);
710 code = emit_imm64_template (code, ARMREG_LR);
711 arm_blrx (code, ARMREG_LR);
713 mono_add_patch_info_rel (cfg, code - cfg->native_code, patch_type, data, MONO_R_ARM64_BL);
715 cfg->thunk_area += THUNK_SIZE;
720 emit_aotconst_full (MonoCompile *cfg, MonoJumpInfo **ji, guint8 *code, guint8 *start, int dreg, guint32 patch_type, gconstpointer data)
723 mono_add_patch_info (cfg, code - cfg->native_code, patch_type, data);
725 *ji = mono_patch_info_list_prepend (*ji, code - start, patch_type, data);
726 /* See arch_emit_got_access () in aot-compiler.c */
727 arm_ldrx_lit (code, dreg, 0);
734 emit_aotconst (MonoCompile *cfg, guint8 *code, int dreg, guint32 patch_type, gconstpointer data)
736 return emit_aotconst_full (cfg, NULL, code, NULL, dreg, patch_type, data);
740 * mono_arm_emit_aotconst:
742 * Emit code to load an AOT constant into DREG. Usable from trampolines.
745 mono_arm_emit_aotconst (gpointer ji, guint8 *code, guint8 *code_start, int dreg, guint32 patch_type, gconstpointer data)
747 return emit_aotconst_full (NULL, (MonoJumpInfo**)ji, code, code_start, dreg, patch_type, data);
751 mono_arch_have_fast_tls (void)
761 emit_tls_get (guint8 *code, int dreg, int tls_offset)
763 arm_mrs (code, dreg, ARM_MRS_REG_TPIDR_EL0);
764 if (tls_offset < 256) {
765 arm_ldrx (code, dreg, dreg, tls_offset);
767 code = emit_addx_imm (code, dreg, dreg, tls_offset);
768 arm_ldrx (code, dreg, dreg, 0);
774 emit_tls_set (guint8 *code, int sreg, int tls_offset)
776 int tmpreg = ARMREG_IP0;
778 g_assert (sreg != tmpreg);
779 arm_mrs (code, tmpreg, ARM_MRS_REG_TPIDR_EL0);
780 if (tls_offset < 256) {
781 arm_strx (code, sreg, tmpreg, tls_offset);
783 code = emit_addx_imm (code, tmpreg, tmpreg, tls_offset);
784 arm_strx (code, sreg, tmpreg, 0);
792 * - ldrp [fp, lr], [sp], !stack_offfset
793 * Clobbers TEMP_REGS.
795 __attribute__ ((__warn_unused_result__)) guint8*
796 mono_arm_emit_destroy_frame (guint8 *code, int stack_offset, guint64 temp_regs)
798 arm_movspx (code, ARMREG_SP, ARMREG_FP);
800 if (arm_is_ldpx_imm (stack_offset)) {
801 arm_ldpx_post (code, ARMREG_FP, ARMREG_LR, ARMREG_SP, stack_offset);
803 arm_ldpx (code, ARMREG_FP, ARMREG_LR, ARMREG_SP, 0);
804 /* sp += stack_offset */
805 g_assert (temp_regs & (1 << ARMREG_IP0));
806 if (temp_regs & (1 << ARMREG_IP1)) {
807 code = emit_addx_sp_imm (code, stack_offset);
809 int imm = stack_offset;
811 /* Can't use addx_sp_imm () since we can't clobber ip0/ip1 */
812 arm_addx_imm (code, ARMREG_IP0, ARMREG_SP, 0);
814 arm_addx_imm (code, ARMREG_IP0, ARMREG_IP0, 256);
817 arm_addx_imm (code, ARMREG_SP, ARMREG_IP0, imm);
823 #define is_call_imm(diff) ((gint)(diff) >= -33554432 && (gint)(diff) <= 33554431)
826 emit_thunk (guint8 *code, gconstpointer target)
830 arm_ldrx_lit (code, ARMREG_IP0, code + 8);
831 arm_brx (code, ARMREG_IP0);
832 *(guint64*)code = (guint64)target;
833 code += sizeof (guint64);
835 mono_arch_flush_icache (p, code - p);
840 create_thunk (MonoCompile *cfg, MonoDomain *domain, guchar *code, const guchar *target)
843 MonoThunkJitInfo *info;
847 guint8 *target_thunk;
850 domain = mono_domain_get ();
854 * This can be called multiple times during JITting,
855 * save the current position in cfg->arch to avoid
856 * doing a O(n^2) search.
858 if (!cfg->arch.thunks) {
859 cfg->arch.thunks = cfg->thunks;
860 cfg->arch.thunks_size = cfg->thunk_area;
862 thunks = cfg->arch.thunks;
863 thunks_size = cfg->arch.thunks_size;
865 g_print ("thunk failed %p->%p, thunk space=%d method %s", code, target, thunks_size, mono_method_full_name (cfg->method, TRUE));
866 g_assert_not_reached ();
869 g_assert (*(guint32*)thunks == 0);
870 emit_thunk (thunks, target);
872 cfg->arch.thunks += THUNK_SIZE;
873 cfg->arch.thunks_size -= THUNK_SIZE;
877 ji = mini_jit_info_table_find (domain, (char*)code, NULL);
879 info = mono_jit_info_get_thunk_info (ji);
882 thunks = (guint8*)ji->code_start + info->thunks_offset;
883 thunks_size = info->thunks_size;
885 orig_target = mono_arch_get_call_target (code + 4);
887 mono_domain_lock (domain);
890 if (orig_target >= thunks && orig_target < thunks + thunks_size) {
891 /* The call already points to a thunk, because of trampolines etc. */
892 target_thunk = orig_target;
894 for (p = thunks; p < thunks + thunks_size; p += THUNK_SIZE) {
895 if (((guint32*)p) [0] == 0) {
899 } else if (((guint64*)p) [1] == (guint64)target) {
900 /* Thunk already points to target */
907 //printf ("THUNK: %p %p %p\n", code, target, target_thunk);
910 mono_domain_unlock (domain);
911 g_print ("thunk failed %p->%p, thunk space=%d method %s", code, target, thunks_size, cfg ? mono_method_full_name (cfg->method, TRUE) : mono_method_full_name (jinfo_get_method (ji), TRUE));
912 g_assert_not_reached ();
915 emit_thunk (target_thunk, target);
917 mono_domain_unlock (domain);
924 arm_patch_full (MonoCompile *cfg, MonoDomain *domain, guint8 *code, guint8 *target, int relocation)
926 switch (relocation) {
928 if (arm_is_bl_disp (code, target)) {
929 arm_b (code, target);
933 thunk = create_thunk (cfg, domain, code, target);
934 g_assert (arm_is_bl_disp (code, thunk));
938 case MONO_R_ARM64_BCC: {
941 cond = arm_get_bcc_cond (code);
942 arm_bcc (code, cond, target);
945 case MONO_R_ARM64_CBZ:
946 arm_set_cbz_target (code, target);
948 case MONO_R_ARM64_IMM: {
949 guint64 imm = (guint64)target;
952 /* emit_imm64_template () */
953 dreg = arm_get_movzx_rd (code);
954 arm_movzx (code, dreg, imm & 0xffff, 0);
955 arm_movkx (code, dreg, (imm >> 16) & 0xffff, 16);
956 arm_movkx (code, dreg, (imm >> 32) & 0xffff, 32);
957 arm_movkx (code, dreg, (imm >> 48) & 0xffff, 48);
960 case MONO_R_ARM64_BL:
961 if (arm_is_bl_disp (code, target)) {
962 arm_bl (code, target);
966 thunk = create_thunk (cfg, domain, code, target);
967 g_assert (arm_is_bl_disp (code, thunk));
968 arm_bl (code, thunk);
972 g_assert_not_reached ();
977 arm_patch_rel (guint8 *code, guint8 *target, int relocation)
979 arm_patch_full (NULL, NULL, code, target, relocation);
983 mono_arm_patch (guint8 *code, guint8 *target, int relocation)
985 arm_patch_rel (code, target, relocation);
989 mono_arch_patch_code_new (MonoCompile *cfg, MonoDomain *domain, guint8 *code, MonoJumpInfo *ji, gpointer target)
993 ip = ji->ip.i + code;
996 case MONO_PATCH_INFO_METHOD_JUMP:
997 /* ji->relocation is not set by the caller */
998 arm_patch_rel (ip, (guint8*)target, MONO_R_ARM64_B);
1001 arm_patch_full (cfg, domain, ip, (guint8*)target, ji->relocation);
1007 mono_arch_free_jit_tls_data (MonoJitTlsData *tls)
1012 mono_arch_flush_register_windows (void)
1017 mono_arch_find_imt_method (mgreg_t *regs, guint8 *code)
1019 return (gpointer)regs [MONO_ARCH_RGCTX_REG];
1023 mono_arch_find_static_call_vtable (mgreg_t *regs, guint8 *code)
1025 return (gpointer)regs [MONO_ARCH_RGCTX_REG];
1029 mono_arch_context_get_int_reg (MonoContext *ctx, int reg)
1031 return ctx->regs [reg];
1035 mono_arch_context_set_int_reg (MonoContext *ctx, int reg, mgreg_t val)
1037 ctx->regs [reg] = val;
1041 * mono_arch_set_target:
1043 * Set the target architecture the JIT backend should generate code for, in the form
1044 * of a GNU target triplet. Only used in AOT mode.
1047 mono_arch_set_target (char *mtriple)
1049 if (strstr (mtriple, "darwin") || strstr (mtriple, "ios")) {
1055 add_general (CallInfo *cinfo, ArgInfo *ainfo, int size, gboolean sign)
1057 if (cinfo->gr >= PARAM_REGS) {
1058 ainfo->storage = ArgOnStack;
1060 /* Assume size == align */
1061 cinfo->stack_usage = ALIGN_TO (cinfo->stack_usage, size);
1062 ainfo->offset = cinfo->stack_usage;
1063 ainfo->slot_size = size;
1065 cinfo->stack_usage += size;
1067 ainfo->offset = cinfo->stack_usage;
1068 ainfo->slot_size = 8;
1069 ainfo->sign = FALSE;
1070 /* Put arguments into 8 byte aligned stack slots */
1071 cinfo->stack_usage += 8;
1074 ainfo->storage = ArgInIReg;
1075 ainfo->reg = cinfo->gr;
1081 add_fp (CallInfo *cinfo, ArgInfo *ainfo, gboolean single)
1083 int size = single ? 4 : 8;
1085 if (cinfo->fr >= FP_PARAM_REGS) {
1086 ainfo->storage = single ? ArgOnStackR4 : ArgOnStackR8;
1088 cinfo->stack_usage = ALIGN_TO (cinfo->stack_usage, size);
1089 ainfo->offset = cinfo->stack_usage;
1090 ainfo->slot_size = size;
1091 cinfo->stack_usage += size;
1093 ainfo->offset = cinfo->stack_usage;
1094 ainfo->slot_size = 8;
1095 /* Put arguments into 8 byte aligned stack slots */
1096 cinfo->stack_usage += 8;
1100 ainfo->storage = ArgInFRegR4;
1102 ainfo->storage = ArgInFReg;
1103 ainfo->reg = cinfo->fr;
1109 is_hfa (MonoType *t, int *out_nfields, int *out_esize, int *field_offsets)
1113 MonoClassField *field;
1114 MonoType *ftype, *prev_ftype = NULL;
1117 klass = mono_class_from_mono_type (t);
1119 while ((field = mono_class_get_fields (klass, &iter))) {
1120 if (field->type->attrs & FIELD_ATTRIBUTE_STATIC)
1122 ftype = mono_field_get_type (field);
1123 ftype = mini_get_underlying_type (ftype);
1125 if (MONO_TYPE_ISSTRUCT (ftype)) {
1126 int nested_nfields, nested_esize;
1127 int nested_field_offsets [16];
1129 if (!is_hfa (ftype, &nested_nfields, &nested_esize, nested_field_offsets))
1131 if (nested_esize == 4)
1132 ftype = &mono_defaults.single_class->byval_arg;
1134 ftype = &mono_defaults.double_class->byval_arg;
1135 if (prev_ftype && prev_ftype->type != ftype->type)
1138 for (i = 0; i < nested_nfields; ++i) {
1139 if (nfields + i < 4)
1140 field_offsets [nfields + i] = field->offset - sizeof (MonoObject) + nested_field_offsets [i];
1142 nfields += nested_nfields;
1144 if (!(!ftype->byref && (ftype->type == MONO_TYPE_R4 || ftype->type == MONO_TYPE_R8)))
1146 if (prev_ftype && prev_ftype->type != ftype->type)
1150 field_offsets [nfields] = field->offset - sizeof (MonoObject);
1154 if (nfields == 0 || nfields > 4)
1156 *out_nfields = nfields;
1157 *out_esize = prev_ftype->type == MONO_TYPE_R4 ? 4 : 8;
1162 add_valuetype (CallInfo *cinfo, ArgInfo *ainfo, MonoType *t)
1164 int i, size, align_size, nregs, nfields, esize;
1165 int field_offsets [16];
1168 size = mini_type_stack_size_full (t, &align, cinfo->pinvoke);
1169 align_size = ALIGN_TO (size, 8);
1171 nregs = align_size / 8;
1172 if (is_hfa (t, &nfields, &esize, field_offsets)) {
1174 * The struct might include nested float structs aligned at 8,
1175 * so need to keep track of the offsets of the individual fields.
1177 if (cinfo->fr + nfields <= FP_PARAM_REGS) {
1178 ainfo->storage = ArgHFA;
1179 ainfo->reg = cinfo->fr;
1180 ainfo->nregs = nfields;
1182 ainfo->esize = esize;
1183 for (i = 0; i < nfields; ++i)
1184 ainfo->foffsets [i] = field_offsets [i];
1185 cinfo->fr += ainfo->nregs;
1187 ainfo->nfregs_to_skip = FP_PARAM_REGS > cinfo->fr ? FP_PARAM_REGS - cinfo->fr : 0;
1188 cinfo->fr = FP_PARAM_REGS;
1189 size = ALIGN_TO (size, 8);
1190 ainfo->storage = ArgVtypeOnStack;
1191 ainfo->offset = cinfo->stack_usage;
1194 ainfo->nregs = nfields;
1195 ainfo->esize = esize;
1196 cinfo->stack_usage += size;
1201 if (align_size > 16) {
1202 ainfo->storage = ArgVtypeByRef;
1207 if (cinfo->gr + nregs > PARAM_REGS) {
1208 size = ALIGN_TO (size, 8);
1209 ainfo->storage = ArgVtypeOnStack;
1210 ainfo->offset = cinfo->stack_usage;
1212 cinfo->stack_usage += size;
1213 cinfo->gr = PARAM_REGS;
1215 ainfo->storage = ArgVtypeInIRegs;
1216 ainfo->reg = cinfo->gr;
1217 ainfo->nregs = nregs;
1224 add_param (CallInfo *cinfo, ArgInfo *ainfo, MonoType *t)
1228 ptype = mini_get_underlying_type (t);
1229 switch (ptype->type) {
1231 add_general (cinfo, ainfo, 1, TRUE);
1234 add_general (cinfo, ainfo, 1, FALSE);
1237 add_general (cinfo, ainfo, 2, TRUE);
1240 add_general (cinfo, ainfo, 2, FALSE);
1243 add_general (cinfo, ainfo, 4, TRUE);
1246 add_general (cinfo, ainfo, 4, FALSE);
1251 case MONO_TYPE_FNPTR:
1252 case MONO_TYPE_OBJECT:
1255 add_general (cinfo, ainfo, 8, FALSE);
1258 add_fp (cinfo, ainfo, FALSE);
1261 add_fp (cinfo, ainfo, TRUE);
1263 case MONO_TYPE_VALUETYPE:
1264 case MONO_TYPE_TYPEDBYREF:
1265 add_valuetype (cinfo, ainfo, ptype);
1267 case MONO_TYPE_VOID:
1268 ainfo->storage = ArgNone;
1270 case MONO_TYPE_GENERICINST:
1271 if (!mono_type_generic_inst_is_valuetype (ptype)) {
1272 add_general (cinfo, ainfo, 8, FALSE);
1273 } else if (mini_is_gsharedvt_variable_type (ptype)) {
1275 * Treat gsharedvt arguments as large vtypes
1277 ainfo->storage = ArgVtypeByRef;
1278 ainfo->gsharedvt = TRUE;
1280 add_valuetype (cinfo, ainfo, ptype);
1284 case MONO_TYPE_MVAR:
1285 g_assert (mini_is_gsharedvt_type (ptype));
1286 ainfo->storage = ArgVtypeByRef;
1287 ainfo->gsharedvt = TRUE;
1290 g_assert_not_reached ();
1298 * Obtain information about a call according to the calling convention.
1301 get_call_info (MonoMemPool *mp, MonoMethodSignature *sig)
1305 int n, pstart, pindex;
1307 n = sig->hasthis + sig->param_count;
1310 cinfo = mono_mempool_alloc0 (mp, sizeof (CallInfo) + (sizeof (ArgInfo) * n));
1312 cinfo = g_malloc0 (sizeof (CallInfo) + (sizeof (ArgInfo) * n));
1315 cinfo->pinvoke = sig->pinvoke;
1318 add_param (cinfo, &cinfo->ret, sig->ret);
1319 if (cinfo->ret.storage == ArgVtypeByRef)
1320 cinfo->ret.reg = ARMREG_R8;
1324 cinfo->stack_usage = 0;
1328 add_general (cinfo, cinfo->args + 0, 8, FALSE);
1330 for (pindex = pstart; pindex < sig->param_count; ++pindex) {
1331 ainfo = cinfo->args + sig->hasthis + pindex;
1333 if ((sig->call_convention == MONO_CALL_VARARG) && (pindex == sig->sentinelpos)) {
1334 /* Prevent implicit arguments and sig_cookie from
1335 being passed in registers */
1336 cinfo->gr = PARAM_REGS;
1337 cinfo->fr = FP_PARAM_REGS;
1338 /* Emit the signature cookie just before the implicit arguments */
1339 add_param (cinfo, &cinfo->sig_cookie, &mono_defaults.int_class->byval_arg);
1342 add_param (cinfo, ainfo, sig->params [pindex]);
1343 if (ainfo->storage == ArgVtypeByRef) {
1344 /* Pass the argument address in the next register */
1345 if (cinfo->gr >= PARAM_REGS) {
1346 ainfo->storage = ArgVtypeByRefOnStack;
1347 cinfo->stack_usage = ALIGN_TO (cinfo->stack_usage, 8);
1348 ainfo->offset = cinfo->stack_usage;
1349 cinfo->stack_usage += 8;
1351 ainfo->reg = cinfo->gr;
1357 /* Handle the case where there are no implicit arguments */
1358 if ((sig->call_convention == MONO_CALL_VARARG) && (pindex == sig->sentinelpos)) {
1359 /* Prevent implicit arguments and sig_cookie from
1360 being passed in registers */
1361 cinfo->gr = PARAM_REGS;
1362 cinfo->fr = FP_PARAM_REGS;
1363 /* Emit the signature cookie just before the implicit arguments */
1364 add_param (cinfo, &cinfo->sig_cookie, &mono_defaults.int_class->byval_arg);
1367 cinfo->stack_usage = ALIGN_TO (cinfo->stack_usage, MONO_ARCH_FRAME_ALIGNMENT);
1373 MonoMethodSignature *sig;
1376 MonoType **param_types;
1377 int n_fpargs, n_fpret;
1381 dyn_call_supported (CallInfo *cinfo, MonoMethodSignature *sig)
1385 if (sig->hasthis + sig->param_count > PARAM_REGS + DYN_CALL_STACK_ARGS)
1388 // FIXME: Add more cases
1389 switch (cinfo->ret.storage) {
1396 case ArgVtypeInIRegs:
1397 if (cinfo->ret.nregs > 2)
1406 for (i = 0; i < cinfo->nargs; ++i) {
1407 ArgInfo *ainfo = &cinfo->args [i];
1409 switch (ainfo->storage) {
1411 case ArgVtypeInIRegs:
1418 if (ainfo->offset >= DYN_CALL_STACK_ARGS * sizeof (mgreg_t))
1430 mono_arch_dyn_call_prepare (MonoMethodSignature *sig)
1432 ArchDynCallInfo *info;
1436 cinfo = get_call_info (NULL, sig);
1438 if (!dyn_call_supported (cinfo, sig)) {
1443 info = g_new0 (ArchDynCallInfo, 1);
1444 // FIXME: Preprocess the info to speed up start_dyn_call ()
1446 info->cinfo = cinfo;
1447 info->rtype = mini_get_underlying_type (sig->ret);
1448 info->param_types = g_new0 (MonoType*, sig->param_count);
1449 for (i = 0; i < sig->param_count; ++i)
1450 info->param_types [i] = mini_get_underlying_type (sig->params [i]);
1452 switch (cinfo->ret.storage) {
1458 info->n_fpret = cinfo->ret.nregs;
1464 return (MonoDynCallInfo*)info;
1468 mono_arch_dyn_call_free (MonoDynCallInfo *info)
1470 ArchDynCallInfo *ainfo = (ArchDynCallInfo*)info;
1472 g_free (ainfo->cinfo);
1473 g_free (ainfo->param_types);
1478 bitcast_r4_to_r8 (float f)
1486 bitcast_r8_to_r4 (double f)
1494 mono_arch_start_dyn_call (MonoDynCallInfo *info, gpointer **args, guint8 *ret, guint8 *buf, int buf_len)
1496 ArchDynCallInfo *dinfo = (ArchDynCallInfo*)info;
1497 DynCallArgs *p = (DynCallArgs*)buf;
1498 int aindex, arg_index, greg, i, pindex;
1499 MonoMethodSignature *sig = dinfo->sig;
1500 CallInfo *cinfo = dinfo->cinfo;
1501 int buffer_offset = 0;
1503 g_assert (buf_len >= sizeof (DynCallArgs));
1507 p->n_fpargs = dinfo->n_fpargs;
1508 p->n_fpret = dinfo->n_fpret;
1515 p->regs [greg ++] = (mgreg_t)*(args [arg_index ++]);
1517 if (cinfo->ret.storage == ArgVtypeByRef)
1518 p->regs [ARMREG_R8] = (mgreg_t)ret;
1520 for (aindex = pindex; aindex < sig->param_count; aindex++) {
1521 MonoType *t = dinfo->param_types [aindex];
1522 gpointer *arg = args [arg_index ++];
1523 ArgInfo *ainfo = &cinfo->args [aindex + sig->hasthis];
1526 if (ainfo->storage == ArgOnStack) {
1527 slot = PARAM_REGS + 1 + (ainfo->offset / sizeof (mgreg_t));
1533 p->regs [slot] = (mgreg_t)*arg;
1537 if (ios_abi && ainfo->storage == ArgOnStack) {
1538 guint8 *stack_arg = (guint8*)&(p->regs [PARAM_REGS + 1]) + ainfo->offset;
1539 gboolean handled = TRUE;
1541 /* Special case arguments smaller than 1 machine word */
1544 *(guint8*)stack_arg = *(guint8*)arg;
1547 *(gint8*)stack_arg = *(gint8*)arg;
1550 *(guint16*)stack_arg = *(guint16*)arg;
1553 *(gint16*)stack_arg = *(gint16*)arg;
1556 *(gint32*)stack_arg = *(gint32*)arg;
1559 *(guint32*)stack_arg = *(guint32*)arg;
1570 case MONO_TYPE_OBJECT:
1576 p->regs [slot] = (mgreg_t)*arg;
1579 p->regs [slot] = *(guint8*)arg;
1582 p->regs [slot] = *(gint8*)arg;
1585 p->regs [slot] = *(gint16*)arg;
1588 p->regs [slot] = *(guint16*)arg;
1591 p->regs [slot] = *(gint32*)arg;
1594 p->regs [slot] = *(guint32*)arg;
1597 p->fpregs [ainfo->reg] = bitcast_r4_to_r8 (*(float*)arg);
1601 p->fpregs [ainfo->reg] = *(double*)arg;
1604 case MONO_TYPE_GENERICINST:
1605 if (MONO_TYPE_IS_REFERENCE (t)) {
1606 p->regs [slot] = (mgreg_t)*arg;
1609 if (t->type == MONO_TYPE_GENERICINST && mono_class_is_nullable (mono_class_from_mono_type (t))) {
1610 MonoClass *klass = mono_class_from_mono_type (t);
1611 guint8 *nullable_buf;
1615 * Use p->buffer as a temporary buffer since the data needs to be available after this call
1616 * if the nullable param is passed by ref.
1618 size = mono_class_value_size (klass, NULL);
1619 nullable_buf = p->buffer + buffer_offset;
1620 buffer_offset += size;
1621 g_assert (buffer_offset <= 256);
1623 /* The argument pointed to by arg is either a boxed vtype or null */
1624 mono_nullable_init (nullable_buf, (MonoObject*)arg, klass);
1626 arg = (gpointer*)nullable_buf;
1632 case MONO_TYPE_VALUETYPE:
1633 switch (ainfo->storage) {
1634 case ArgVtypeInIRegs:
1635 for (i = 0; i < ainfo->nregs; ++i)
1636 p->regs [slot ++] = ((mgreg_t*)arg) [i];
1639 if (ainfo->esize == 4) {
1640 for (i = 0; i < ainfo->nregs; ++i)
1641 p->fpregs [ainfo->reg + i] = bitcast_r4_to_r8 (((float*)arg) [ainfo->foffsets [i] / 4]);
1643 for (i = 0; i < ainfo->nregs; ++i)
1644 p->fpregs [ainfo->reg + i] = ((double*)arg) [ainfo->foffsets [i] / 8];
1646 p->n_fpargs += ainfo->nregs;
1649 p->regs [slot] = (mgreg_t)arg;
1652 g_assert_not_reached ();
1657 g_assert_not_reached ();
1663 mono_arch_finish_dyn_call (MonoDynCallInfo *info, guint8 *buf)
1665 ArchDynCallInfo *ainfo = (ArchDynCallInfo*)info;
1666 CallInfo *cinfo = ainfo->cinfo;
1667 DynCallArgs *args = (DynCallArgs*)buf;
1668 MonoType *ptype = ainfo->rtype;
1669 guint8 *ret = args->ret;
1670 mgreg_t res = args->res;
1671 mgreg_t res2 = args->res2;
1674 if (cinfo->ret.storage == ArgVtypeByRef)
1677 switch (ptype->type) {
1678 case MONO_TYPE_VOID:
1679 *(gpointer*)ret = NULL;
1681 case MONO_TYPE_OBJECT:
1685 *(gpointer*)ret = (gpointer)res;
1691 *(guint8*)ret = res;
1694 *(gint16*)ret = res;
1697 *(guint16*)ret = res;
1700 *(gint32*)ret = res;
1703 *(guint32*)ret = res;
1707 *(guint64*)ret = res;
1710 *(float*)ret = bitcast_r8_to_r4 (args->fpregs [0]);
1713 *(double*)ret = args->fpregs [0];
1715 case MONO_TYPE_GENERICINST:
1716 if (MONO_TYPE_IS_REFERENCE (ptype)) {
1717 *(gpointer*)ret = (gpointer)res;
1722 case MONO_TYPE_VALUETYPE:
1723 switch (ainfo->cinfo->ret.storage) {
1724 case ArgVtypeInIRegs:
1725 *(mgreg_t*)ret = res;
1726 if (ainfo->cinfo->ret.nregs > 1)
1727 ((mgreg_t*)ret) [1] = res2;
1730 /* Use the same area for returning fp values */
1731 if (cinfo->ret.esize == 4) {
1732 for (i = 0; i < cinfo->ret.nregs; ++i)
1733 ((float*)ret) [cinfo->ret.foffsets [i] / 4] = bitcast_r8_to_r4 (args->fpregs [i]);
1735 for (i = 0; i < cinfo->ret.nregs; ++i)
1736 ((double*)ret) [cinfo->ret.foffsets [i] / 8] = args->fpregs [i];
1740 g_assert_not_reached ();
1745 g_assert_not_reached ();
1750 void sys_icache_invalidate (void *start, size_t len);
1754 mono_arch_flush_icache (guint8 *code, gint size)
1756 #ifndef MONO_CROSS_COMPILE
1758 sys_icache_invalidate (code, size);
1760 /* Don't rely on GCC's __clear_cache implementation, as it caches
1761 * icache/dcache cache line sizes, that can vary between cores on
1762 * big.LITTLE architectures. */
1763 guint64 end = (guint64) (code + size);
1765 /* always go with cacheline size of 4 bytes as this code isn't perf critical
1766 * anyway. Reading the cache line size from a machine register can be racy
1767 * on a big.LITTLE architecture if the cores don't have the same cache line
1769 const size_t icache_line_size = 4;
1770 const size_t dcache_line_size = 4;
1772 addr = (guint64) code & ~(guint64) (dcache_line_size - 1);
1773 for (; addr < end; addr += dcache_line_size)
1774 asm volatile("dc civac, %0" : : "r" (addr) : "memory");
1775 asm volatile("dsb ish" : : : "memory");
1777 addr = (guint64) code & ~(guint64) (icache_line_size - 1);
1778 for (; addr < end; addr += icache_line_size)
1779 asm volatile("ic ivau, %0" : : "r" (addr) : "memory");
1781 asm volatile ("dsb ish" : : : "memory");
1782 asm volatile ("isb" : : : "memory");
1790 mono_arch_opcode_needs_emulation (MonoCompile *cfg, int opcode)
1797 mono_arch_get_allocatable_int_vars (MonoCompile *cfg)
1802 for (i = 0; i < cfg->num_varinfo; i++) {
1803 MonoInst *ins = cfg->varinfo [i];
1804 MonoMethodVar *vmv = MONO_VARINFO (cfg, i);
1807 if (vmv->range.first_use.abs_pos >= vmv->range.last_use.abs_pos)
1810 if ((ins->flags & (MONO_INST_IS_DEAD|MONO_INST_VOLATILE|MONO_INST_INDIRECT)) ||
1811 (ins->opcode != OP_LOCAL && ins->opcode != OP_ARG))
1814 if (mono_is_regsize_var (ins->inst_vtype)) {
1815 g_assert (MONO_VARINFO (cfg, i)->reg == -1);
1816 g_assert (i == vmv->idx);
1817 vars = g_list_prepend (vars, vmv);
1821 vars = mono_varlist_sort (cfg, vars, 0);
1827 mono_arch_get_global_int_regs (MonoCompile *cfg)
1832 /* r28 is reserved for cfg->arch.args_reg */
1833 /* r27 is reserved for the imt argument */
1834 for (i = ARMREG_R19; i <= ARMREG_R26; ++i)
1835 regs = g_list_prepend (regs, GUINT_TO_POINTER (i));
1841 mono_arch_regalloc_cost (MonoCompile *cfg, MonoMethodVar *vmv)
1843 MonoInst *ins = cfg->varinfo [vmv->idx];
1845 if (ins->opcode == OP_ARG)
1852 mono_arch_create_vars (MonoCompile *cfg)
1854 MonoMethodSignature *sig;
1857 sig = mono_method_signature (cfg->method);
1858 if (!cfg->arch.cinfo)
1859 cfg->arch.cinfo = get_call_info (cfg->mempool, sig);
1860 cinfo = cfg->arch.cinfo;
1862 if (cinfo->ret.storage == ArgVtypeByRef) {
1863 cfg->vret_addr = mono_compile_create_var (cfg, &mono_defaults.int_class->byval_arg, OP_LOCAL);
1864 cfg->vret_addr->flags |= MONO_INST_VOLATILE;
1867 if (cfg->gen_sdb_seq_points) {
1870 if (cfg->compile_aot) {
1871 ins = mono_compile_create_var (cfg, &mono_defaults.int_class->byval_arg, OP_LOCAL);
1872 ins->flags |= MONO_INST_VOLATILE;
1873 cfg->arch.seq_point_info_var = ins;
1876 ins = mono_compile_create_var (cfg, &mono_defaults.int_class->byval_arg, OP_LOCAL);
1877 ins->flags |= MONO_INST_VOLATILE;
1878 cfg->arch.ss_tramp_var = ins;
1880 ins = mono_compile_create_var (cfg, &mono_defaults.int_class->byval_arg, OP_LOCAL);
1881 ins->flags |= MONO_INST_VOLATILE;
1882 cfg->arch.bp_tramp_var = ins;
1885 if (cfg->method->save_lmf) {
1886 cfg->create_lmf_var = TRUE;
1892 mono_arch_allocate_vars (MonoCompile *cfg)
1894 MonoMethodSignature *sig;
1898 int i, offset, size, align;
1899 guint32 locals_stack_size, locals_stack_align;
1903 * Allocate arguments and locals to either register (OP_REGVAR) or to a stack slot (OP_REGOFFSET).
1904 * Compute cfg->stack_offset and update cfg->used_int_regs.
1907 sig = mono_method_signature (cfg->method);
1909 if (!cfg->arch.cinfo)
1910 cfg->arch.cinfo = get_call_info (cfg->mempool, sig);
1911 cinfo = cfg->arch.cinfo;
1914 * The ARM64 ABI always uses a frame pointer.
1915 * The instruction set prefers positive offsets, so fp points to the bottom of the
1916 * frame, and stack slots are at positive offsets.
1917 * If some arguments are received on the stack, their offsets relative to fp can
1918 * not be computed right now because the stack frame might grow due to spilling
1919 * done by the local register allocator. To solve this, we reserve a register
1920 * which points to them.
1921 * The stack frame looks like this:
1922 * args_reg -> <bottom of parent frame>
1924 * fp -> <saved fp+lr>
1925 * sp -> <localloc/params area>
1927 cfg->frame_reg = ARMREG_FP;
1928 cfg->flags |= MONO_CFG_HAS_SPILLUP;
1934 if (cinfo->stack_usage) {
1935 g_assert (!(cfg->used_int_regs & (1 << ARMREG_R28)));
1936 cfg->arch.args_reg = ARMREG_R28;
1937 cfg->used_int_regs |= 1 << ARMREG_R28;
1940 if (cfg->method->save_lmf) {
1941 /* The LMF var is allocated normally */
1943 /* Callee saved regs */
1944 cfg->arch.saved_gregs_offset = offset;
1945 for (i = 0; i < 32; ++i)
1946 if ((MONO_ARCH_CALLEE_SAVED_REGS & (1 << i)) && (cfg->used_int_regs & (1 << i)))
1951 switch (cinfo->ret.storage) {
1957 cfg->ret->opcode = OP_REGVAR;
1958 cfg->ret->dreg = cinfo->ret.reg;
1960 case ArgVtypeInIRegs:
1962 /* Allocate a local to hold the result, the epilog will copy it to the correct place */
1963 cfg->ret->opcode = OP_REGOFFSET;
1964 cfg->ret->inst_basereg = cfg->frame_reg;
1965 cfg->ret->inst_offset = offset;
1966 if (cinfo->ret.storage == ArgHFA)
1973 /* This variable will be initalized in the prolog from R8 */
1974 cfg->vret_addr->opcode = OP_REGOFFSET;
1975 cfg->vret_addr->inst_basereg = cfg->frame_reg;
1976 cfg->vret_addr->inst_offset = offset;
1978 if (G_UNLIKELY (cfg->verbose_level > 1)) {
1979 printf ("vret_addr =");
1980 mono_print_ins (cfg->vret_addr);
1984 g_assert_not_reached ();
1989 for (i = 0; i < sig->param_count + sig->hasthis; ++i) {
1990 ainfo = cinfo->args + i;
1992 ins = cfg->args [i];
1993 if (ins->opcode == OP_REGVAR)
1996 ins->opcode = OP_REGOFFSET;
1997 ins->inst_basereg = cfg->frame_reg;
1999 switch (ainfo->storage) {
2003 // FIXME: Use nregs/size
2004 /* These will be copied to the stack in the prolog */
2005 ins->inst_offset = offset;
2011 case ArgVtypeOnStack:
2012 /* These are in the parent frame */
2013 g_assert (cfg->arch.args_reg);
2014 ins->inst_basereg = cfg->arch.args_reg;
2015 ins->inst_offset = ainfo->offset;
2017 case ArgVtypeInIRegs:
2019 ins->opcode = OP_REGOFFSET;
2020 ins->inst_basereg = cfg->frame_reg;
2021 /* These arguments are saved to the stack in the prolog */
2022 ins->inst_offset = offset;
2023 if (cfg->verbose_level >= 2)
2024 printf ("arg %d allocated to %s+0x%0x.\n", i, mono_arch_regname (ins->inst_basereg), (int)ins->inst_offset);
2025 if (ainfo->storage == ArgHFA)
2031 case ArgVtypeByRefOnStack: {
2034 if (ainfo->gsharedvt) {
2035 ins->opcode = OP_REGOFFSET;
2036 ins->inst_basereg = cfg->arch.args_reg;
2037 ins->inst_offset = ainfo->offset;
2041 /* The vtype address is in the parent frame */
2042 g_assert (cfg->arch.args_reg);
2043 MONO_INST_NEW (cfg, vtaddr, 0);
2044 vtaddr->opcode = OP_REGOFFSET;
2045 vtaddr->inst_basereg = cfg->arch.args_reg;
2046 vtaddr->inst_offset = ainfo->offset;
2048 /* Need an indirection */
2049 ins->opcode = OP_VTARG_ADDR;
2050 ins->inst_left = vtaddr;
2053 case ArgVtypeByRef: {
2056 if (ainfo->gsharedvt) {
2057 ins->opcode = OP_REGOFFSET;
2058 ins->inst_basereg = cfg->frame_reg;
2059 ins->inst_offset = offset;
2064 /* The vtype address is in a register, will be copied to the stack in the prolog */
2065 MONO_INST_NEW (cfg, vtaddr, 0);
2066 vtaddr->opcode = OP_REGOFFSET;
2067 vtaddr->inst_basereg = cfg->frame_reg;
2068 vtaddr->inst_offset = offset;
2071 /* Need an indirection */
2072 ins->opcode = OP_VTARG_ADDR;
2073 ins->inst_left = vtaddr;
2077 g_assert_not_reached ();
2082 /* Allocate these first so they have a small offset, OP_SEQ_POINT depends on this */
2083 // FIXME: Allocate these to registers
2084 ins = cfg->arch.seq_point_info_var;
2088 offset += align - 1;
2089 offset &= ~(align - 1);
2090 ins->opcode = OP_REGOFFSET;
2091 ins->inst_basereg = cfg->frame_reg;
2092 ins->inst_offset = offset;
2095 ins = cfg->arch.ss_tramp_var;
2099 offset += align - 1;
2100 offset &= ~(align - 1);
2101 ins->opcode = OP_REGOFFSET;
2102 ins->inst_basereg = cfg->frame_reg;
2103 ins->inst_offset = offset;
2106 ins = cfg->arch.bp_tramp_var;
2110 offset += align - 1;
2111 offset &= ~(align - 1);
2112 ins->opcode = OP_REGOFFSET;
2113 ins->inst_basereg = cfg->frame_reg;
2114 ins->inst_offset = offset;
2119 offsets = mono_allocate_stack_slots (cfg, FALSE, &locals_stack_size, &locals_stack_align);
2120 if (locals_stack_align)
2121 offset = ALIGN_TO (offset, locals_stack_align);
2123 for (i = cfg->locals_start; i < cfg->num_varinfo; i++) {
2124 if (offsets [i] != -1) {
2125 ins = cfg->varinfo [i];
2126 ins->opcode = OP_REGOFFSET;
2127 ins->inst_basereg = cfg->frame_reg;
2128 ins->inst_offset = offset + offsets [i];
2129 //printf ("allocated local %d to ", i); mono_print_tree_nl (ins);
2132 offset += locals_stack_size;
2134 offset = ALIGN_TO (offset, MONO_ARCH_FRAME_ALIGNMENT);
2136 cfg->stack_offset = offset;
2141 mono_arch_get_llvm_call_info (MonoCompile *cfg, MonoMethodSignature *sig)
2146 LLVMCallInfo *linfo;
2148 n = sig->param_count + sig->hasthis;
2150 cinfo = get_call_info (cfg->mempool, sig);
2152 linfo = mono_mempool_alloc0 (cfg->mempool, sizeof (LLVMCallInfo) + (sizeof (LLVMArgInfo) * n));
2154 switch (cinfo->ret.storage) {
2161 linfo->ret.storage = LLVMArgVtypeByRef;
2164 // FIXME: This doesn't work yet since the llvm backend represents these types as an i8
2165 // array which is returned in int regs
2168 linfo->ret.storage = LLVMArgFpStruct;
2169 linfo->ret.nslots = cinfo->ret.nregs;
2170 linfo->ret.esize = cinfo->ret.esize;
2172 case ArgVtypeInIRegs:
2173 /* LLVM models this by returning an int */
2174 linfo->ret.storage = LLVMArgVtypeAsScalar;
2175 linfo->ret.nslots = cinfo->ret.nregs;
2176 linfo->ret.esize = cinfo->ret.esize;
2179 g_assert_not_reached ();
2183 for (i = 0; i < n; ++i) {
2184 LLVMArgInfo *lainfo = &linfo->args [i];
2186 ainfo = cinfo->args + i;
2188 lainfo->storage = LLVMArgNone;
2190 switch (ainfo->storage) {
2197 lainfo->storage = LLVMArgNormal;
2200 case ArgVtypeByRefOnStack:
2201 lainfo->storage = LLVMArgVtypeByRef;
2206 lainfo->storage = LLVMArgAsFpArgs;
2207 lainfo->nslots = ainfo->nregs;
2208 lainfo->esize = ainfo->esize;
2209 for (j = 0; j < ainfo->nregs; ++j)
2210 lainfo->pair_storage [j] = LLVMArgInFPReg;
2213 case ArgVtypeInIRegs:
2214 lainfo->storage = LLVMArgAsIArgs;
2215 lainfo->nslots = ainfo->nregs;
2217 case ArgVtypeOnStack:
2221 lainfo->storage = LLVMArgAsFpArgs;
2222 lainfo->nslots = ainfo->nregs;
2223 lainfo->esize = ainfo->esize;
2224 lainfo->ndummy_fpargs = ainfo->nfregs_to_skip;
2225 for (j = 0; j < ainfo->nregs; ++j)
2226 lainfo->pair_storage [j] = LLVMArgInFPReg;
2228 lainfo->storage = LLVMArgAsIArgs;
2229 lainfo->nslots = ainfo->size / 8;
2233 g_assert_not_reached ();
2243 add_outarg_reg (MonoCompile *cfg, MonoCallInst *call, ArgStorage storage, int reg, MonoInst *arg)
2249 MONO_INST_NEW (cfg, ins, OP_MOVE);
2250 ins->dreg = mono_alloc_ireg_copy (cfg, arg->dreg);
2251 ins->sreg1 = arg->dreg;
2252 MONO_ADD_INS (cfg->cbb, ins);
2253 mono_call_inst_add_outarg_reg (cfg, call, ins->dreg, reg, FALSE);
2256 MONO_INST_NEW (cfg, ins, OP_FMOVE);
2257 ins->dreg = mono_alloc_freg (cfg);
2258 ins->sreg1 = arg->dreg;
2259 MONO_ADD_INS (cfg->cbb, ins);
2260 mono_call_inst_add_outarg_reg (cfg, call, ins->dreg, reg, TRUE);
2263 if (COMPILE_LLVM (cfg))
2264 MONO_INST_NEW (cfg, ins, OP_FMOVE);
2266 MONO_INST_NEW (cfg, ins, OP_RMOVE);
2268 MONO_INST_NEW (cfg, ins, OP_ARM_SETFREG_R4);
2269 ins->dreg = mono_alloc_freg (cfg);
2270 ins->sreg1 = arg->dreg;
2271 MONO_ADD_INS (cfg->cbb, ins);
2272 mono_call_inst_add_outarg_reg (cfg, call, ins->dreg, reg, TRUE);
2275 g_assert_not_reached ();
2281 emit_sig_cookie (MonoCompile *cfg, MonoCallInst *call, CallInfo *cinfo)
2283 MonoMethodSignature *tmp_sig;
2286 if (call->tail_call)
2289 g_assert (cinfo->sig_cookie.storage == ArgOnStack);
2292 * mono_ArgIterator_Setup assumes the signature cookie is
2293 * passed first and all the arguments which were before it are
2294 * passed on the stack after the signature. So compensate by
2295 * passing a different signature.
2297 tmp_sig = mono_metadata_signature_dup (call->signature);
2298 tmp_sig->param_count -= call->signature->sentinelpos;
2299 tmp_sig->sentinelpos = 0;
2300 memcpy (tmp_sig->params, call->signature->params + call->signature->sentinelpos, tmp_sig->param_count * sizeof (MonoType*));
2302 sig_reg = mono_alloc_ireg (cfg);
2303 MONO_EMIT_NEW_SIGNATURECONST (cfg, sig_reg, tmp_sig);
2305 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG, ARMREG_SP, cinfo->sig_cookie.offset, sig_reg);
2309 mono_arch_emit_call (MonoCompile *cfg, MonoCallInst *call)
2311 MonoMethodSignature *sig;
2312 MonoInst *arg, *vtarg;
2317 sig = call->signature;
2319 cinfo = get_call_info (cfg->mempool, sig);
2321 switch (cinfo->ret.storage) {
2322 case ArgVtypeInIRegs:
2325 * The vtype is returned in registers, save the return area address in a local, and save the vtype into
2326 * the location pointed to by it after call in emit_move_return_value ().
2328 if (!cfg->arch.vret_addr_loc) {
2329 cfg->arch.vret_addr_loc = mono_compile_create_var (cfg, &mono_defaults.int_class->byval_arg, OP_LOCAL);
2330 /* Prevent it from being register allocated or optimized away */
2331 ((MonoInst*)cfg->arch.vret_addr_loc)->flags |= MONO_INST_VOLATILE;
2334 MONO_EMIT_NEW_UNALU (cfg, OP_MOVE, ((MonoInst*)cfg->arch.vret_addr_loc)->dreg, call->vret_var->dreg);
2337 /* Pass the vtype return address in R8 */
2338 MONO_INST_NEW (cfg, vtarg, OP_MOVE);
2339 vtarg->sreg1 = call->vret_var->dreg;
2340 vtarg->dreg = mono_alloc_preg (cfg);
2341 MONO_ADD_INS (cfg->cbb, vtarg);
2343 mono_call_inst_add_outarg_reg (cfg, call, vtarg->dreg, cinfo->ret.reg, FALSE);
2349 for (i = 0; i < cinfo->nargs; ++i) {
2350 ainfo = cinfo->args + i;
2351 arg = call->args [i];
2353 if ((sig->call_convention == MONO_CALL_VARARG) && (i == sig->sentinelpos)) {
2354 /* Emit the signature cookie just before the implicit arguments */
2355 emit_sig_cookie (cfg, call, cinfo);
2358 switch (ainfo->storage) {
2362 add_outarg_reg (cfg, call, ainfo->storage, ainfo->reg, arg);
2365 switch (ainfo->slot_size) {
2367 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG, ARMREG_SP, ainfo->offset, arg->dreg);
2370 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI4_MEMBASE_REG, ARMREG_SP, ainfo->offset, arg->dreg);
2373 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI2_MEMBASE_REG, ARMREG_SP, ainfo->offset, arg->dreg);
2376 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI1_MEMBASE_REG, ARMREG_SP, ainfo->offset, arg->dreg);
2379 g_assert_not_reached ();
2384 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORER8_MEMBASE_REG, ARMREG_SP, ainfo->offset, arg->dreg);
2387 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORER4_MEMBASE_REG, ARMREG_SP, ainfo->offset, arg->dreg);
2389 case ArgVtypeInIRegs:
2391 case ArgVtypeByRefOnStack:
2392 case ArgVtypeOnStack:
2398 size = mono_class_value_size (arg->klass, &align);
2400 MONO_INST_NEW (cfg, ins, OP_OUTARG_VT);
2401 ins->sreg1 = arg->dreg;
2402 ins->klass = arg->klass;
2403 ins->backend.size = size;
2404 ins->inst_p0 = call;
2405 ins->inst_p1 = mono_mempool_alloc (cfg->mempool, sizeof (ArgInfo));
2406 memcpy (ins->inst_p1, ainfo, sizeof (ArgInfo));
2407 MONO_ADD_INS (cfg->cbb, ins);
2411 g_assert_not_reached ();
2416 /* Handle the case where there are no implicit arguments */
2417 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (cinfo->nargs == sig->sentinelpos))
2418 emit_sig_cookie (cfg, call, cinfo);
2420 call->call_info = cinfo;
2421 call->stack_usage = cinfo->stack_usage;
2425 mono_arch_emit_outarg_vt (MonoCompile *cfg, MonoInst *ins, MonoInst *src)
2427 MonoCallInst *call = (MonoCallInst*)ins->inst_p0;
2428 ArgInfo *ainfo = ins->inst_p1;
2432 if (ins->backend.size == 0 && !ainfo->gsharedvt)
2435 switch (ainfo->storage) {
2436 case ArgVtypeInIRegs:
2437 for (i = 0; i < ainfo->nregs; ++i) {
2438 // FIXME: Smaller sizes
2439 MONO_INST_NEW (cfg, load, OP_LOADI8_MEMBASE);
2440 load->dreg = mono_alloc_ireg (cfg);
2441 load->inst_basereg = src->dreg;
2442 load->inst_offset = i * sizeof(mgreg_t);
2443 MONO_ADD_INS (cfg->cbb, load);
2444 add_outarg_reg (cfg, call, ArgInIReg, ainfo->reg + i, load);
2448 for (i = 0; i < ainfo->nregs; ++i) {
2449 if (ainfo->esize == 4)
2450 MONO_INST_NEW (cfg, load, OP_LOADR4_MEMBASE);
2452 MONO_INST_NEW (cfg, load, OP_LOADR8_MEMBASE);
2453 load->dreg = mono_alloc_freg (cfg);
2454 load->inst_basereg = src->dreg;
2455 load->inst_offset = ainfo->foffsets [i];
2456 MONO_ADD_INS (cfg->cbb, load);
2457 add_outarg_reg (cfg, call, ainfo->esize == 4 ? ArgInFRegR4 : ArgInFReg, ainfo->reg + i, load);
2461 case ArgVtypeByRefOnStack: {
2462 MonoInst *vtaddr, *load, *arg;
2464 /* Pass the vtype address in a reg/on the stack */
2465 if (ainfo->gsharedvt) {
2468 /* Make a copy of the argument */
2469 vtaddr = mono_compile_create_var (cfg, &ins->klass->byval_arg, OP_LOCAL);
2471 MONO_INST_NEW (cfg, load, OP_LDADDR);
2472 load->inst_p0 = vtaddr;
2473 vtaddr->flags |= MONO_INST_INDIRECT;
2474 load->type = STACK_MP;
2475 load->klass = vtaddr->klass;
2476 load->dreg = mono_alloc_ireg (cfg);
2477 MONO_ADD_INS (cfg->cbb, load);
2478 mini_emit_memcpy (cfg, load->dreg, 0, src->dreg, 0, ainfo->size, 8);
2481 if (ainfo->storage == ArgVtypeByRef) {
2482 MONO_INST_NEW (cfg, arg, OP_MOVE);
2483 arg->dreg = mono_alloc_preg (cfg);
2484 arg->sreg1 = load->dreg;
2485 MONO_ADD_INS (cfg->cbb, arg);
2486 add_outarg_reg (cfg, call, ArgInIReg, ainfo->reg, arg);
2488 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STORE_MEMBASE_REG, ARMREG_SP, ainfo->offset, load->dreg);
2492 case ArgVtypeOnStack:
2493 for (i = 0; i < ainfo->size / 8; ++i) {
2494 MONO_INST_NEW (cfg, load, OP_LOADI8_MEMBASE);
2495 load->dreg = mono_alloc_ireg (cfg);
2496 load->inst_basereg = src->dreg;
2497 load->inst_offset = i * 8;
2498 MONO_ADD_INS (cfg->cbb, load);
2499 MONO_EMIT_NEW_STORE_MEMBASE (cfg, OP_STOREI8_MEMBASE_REG, ARMREG_SP, ainfo->offset + (i * 8), load->dreg);
2503 g_assert_not_reached ();
2509 mono_arch_emit_setret (MonoCompile *cfg, MonoMethod *method, MonoInst *val)
2511 MonoMethodSignature *sig;
2514 sig = mono_method_signature (cfg->method);
2515 if (!cfg->arch.cinfo)
2516 cfg->arch.cinfo = get_call_info (cfg->mempool, sig);
2517 cinfo = cfg->arch.cinfo;
2519 switch (cinfo->ret.storage) {
2523 MONO_EMIT_NEW_UNALU (cfg, OP_MOVE, cfg->ret->dreg, val->dreg);
2526 MONO_EMIT_NEW_UNALU (cfg, OP_FMOVE, cfg->ret->dreg, val->dreg);
2529 if (COMPILE_LLVM (cfg))
2530 MONO_EMIT_NEW_UNALU (cfg, OP_FMOVE, cfg->ret->dreg, val->dreg);
2532 MONO_EMIT_NEW_UNALU (cfg, OP_RMOVE, cfg->ret->dreg, val->dreg);
2534 MONO_EMIT_NEW_UNALU (cfg, OP_ARM_SETFREG_R4, cfg->ret->dreg, val->dreg);
2537 g_assert_not_reached ();
2543 mono_arch_tail_call_supported (MonoCompile *cfg, MonoMethodSignature *caller_sig, MonoMethodSignature *callee_sig)
2548 if (cfg->compile_aot && !cfg->full_aot)
2549 /* OP_TAILCALL doesn't work with AOT */
2552 c1 = get_call_info (NULL, caller_sig);
2553 c2 = get_call_info (NULL, callee_sig);
2555 // FIXME: Relax these restrictions
2556 if (c1->stack_usage != 0)
2558 if (c1->stack_usage != c2->stack_usage)
2560 if ((c1->ret.storage != ArgNone && c1->ret.storage != ArgInIReg) || c1->ret.storage != c2->ret.storage)
2570 mono_arch_is_inst_imm (gint64 imm)
2572 return (imm >= -((gint64)1<<31) && imm <= (((gint64)1<<31)-1));
2576 mono_arch_instrument_prolog (MonoCompile *cfg, void *func, void *p, gboolean enable_arguments)
2583 mono_arch_instrument_epilog_full (MonoCompile *cfg, void *func, void *p, gboolean enable_arguments, gboolean preserve_argument_registers)
2590 mono_arch_peephole_pass_1 (MonoCompile *cfg, MonoBasicBlock *bb)
2596 mono_arch_peephole_pass_2 (MonoCompile *cfg, MonoBasicBlock *bb)
2601 #define ADD_NEW_INS(cfg,dest,op) do { \
2602 MONO_INST_NEW ((cfg), (dest), (op)); \
2603 mono_bblock_insert_before_ins (bb, ins, (dest)); \
2607 mono_arch_lowering_pass (MonoCompile *cfg, MonoBasicBlock *bb)
2609 MonoInst *ins, *temp, *last_ins = NULL;
2611 MONO_BB_FOR_EACH_INS (bb, ins) {
2612 switch (ins->opcode) {
2617 if (ins->next && (ins->next->opcode == OP_COND_EXC_C || ins->next->opcode == OP_COND_EXC_IC))
2618 /* ARM sets the C flag to 1 if there was _no_ overflow */
2619 ins->next->opcode = OP_COND_EXC_NC;
2623 case OP_IDIV_UN_IMM:
2624 case OP_IREM_UN_IMM:
2626 mono_decompose_op_imm (cfg, bb, ins);
2628 case OP_LOCALLOC_IMM:
2629 if (ins->inst_imm > 32) {
2630 ADD_NEW_INS (cfg, temp, OP_ICONST);
2631 temp->inst_c0 = ins->inst_imm;
2632 temp->dreg = mono_alloc_ireg (cfg);
2633 ins->sreg1 = temp->dreg;
2634 ins->opcode = mono_op_imm_to_op (ins->opcode);
2637 case OP_ICOMPARE_IMM:
2638 if (ins->inst_imm == 0 && ins->next && ins->next->opcode == OP_IBEQ) {
2639 ins->next->opcode = OP_ARM64_CBZW;
2640 ins->next->sreg1 = ins->sreg1;
2642 } else if (ins->inst_imm == 0 && ins->next && ins->next->opcode == OP_IBNE_UN) {
2643 ins->next->opcode = OP_ARM64_CBNZW;
2644 ins->next->sreg1 = ins->sreg1;
2648 case OP_LCOMPARE_IMM:
2649 case OP_COMPARE_IMM:
2650 if (ins->inst_imm == 0 && ins->next && ins->next->opcode == OP_LBEQ) {
2651 ins->next->opcode = OP_ARM64_CBZX;
2652 ins->next->sreg1 = ins->sreg1;
2654 } else if (ins->inst_imm == 0 && ins->next && ins->next->opcode == OP_LBNE_UN) {
2655 ins->next->opcode = OP_ARM64_CBNZX;
2656 ins->next->sreg1 = ins->sreg1;
2661 gboolean swap = FALSE;
2665 /* Optimized away */
2671 * FP compares with unordered operands set the flags
2672 * to NZCV=0011, which matches some non-unordered compares
2673 * as well, like LE, so have to swap the operands.
2675 switch (ins->next->opcode) {
2677 ins->next->opcode = OP_FBGT;
2681 ins->next->opcode = OP_FBGE;
2689 ins->sreg1 = ins->sreg2;
2700 bb->last_ins = last_ins;
2701 bb->max_vreg = cfg->next_vreg;
2705 mono_arch_decompose_long_opts (MonoCompile *cfg, MonoInst *long_ins)
2710 opcode_to_armcond (int opcode)
2721 case OP_COND_EXC_IEQ:
2722 case OP_COND_EXC_EQ:
2739 case OP_COND_EXC_IGT:
2740 case OP_COND_EXC_GT:
2755 case OP_COND_EXC_ILT:
2756 case OP_COND_EXC_LT:
2764 case OP_COND_EXC_INE_UN:
2765 case OP_COND_EXC_NE_UN:
2771 case OP_COND_EXC_IGE_UN:
2772 case OP_COND_EXC_GE_UN:
2782 case OP_COND_EXC_IGT_UN:
2783 case OP_COND_EXC_GT_UN:
2789 case OP_COND_EXC_ILE_UN:
2790 case OP_COND_EXC_LE_UN:
2798 case OP_COND_EXC_ILT_UN:
2799 case OP_COND_EXC_LT_UN:
2802 * FCMP sets the NZCV condition bits as follows:
2807 * ARMCOND_LT is N!=V, so it matches unordered too, so
2808 * fclt and fclt_un need to be special cased.
2818 case OP_COND_EXC_IC:
2820 case OP_COND_EXC_OV:
2821 case OP_COND_EXC_IOV:
2823 case OP_COND_EXC_NC:
2824 case OP_COND_EXC_INC:
2826 case OP_COND_EXC_NO:
2827 case OP_COND_EXC_INO:
2830 printf ("%s\n", mono_inst_name (opcode));
2831 g_assert_not_reached ();
2836 /* This clobbers LR */
2837 static inline __attribute__ ((__warn_unused_result__)) guint8*
2838 emit_cond_exc (MonoCompile *cfg, guint8 *code, int opcode, const char *exc_name)
2842 cond = opcode_to_armcond (opcode);
2844 arm_adrx (code, ARMREG_IP1, code);
2845 mono_add_patch_info_rel (cfg, code - cfg->native_code, MONO_PATCH_INFO_EXC, exc_name, MONO_R_ARM64_BCC);
2846 arm_bcc (code, cond, 0);
2851 emit_move_return_value (MonoCompile *cfg, guint8 * code, MonoInst *ins)
2856 call = (MonoCallInst*)ins;
2857 cinfo = call->call_info;
2859 switch (cinfo->ret.storage) {
2863 /* LLVM compiled code might only set the bottom bits */
2864 if (call->signature && mini_get_underlying_type (call->signature->ret)->type == MONO_TYPE_I4)
2865 arm_sxtwx (code, call->inst.dreg, cinfo->ret.reg);
2866 else if (call->inst.dreg != cinfo->ret.reg)
2867 arm_movx (code, call->inst.dreg, cinfo->ret.reg);
2870 if (call->inst.dreg != cinfo->ret.reg)
2871 arm_fmovd (code, call->inst.dreg, cinfo->ret.reg);
2875 arm_fmovs (code, call->inst.dreg, cinfo->ret.reg);
2877 arm_fcvt_sd (code, call->inst.dreg, cinfo->ret.reg);
2879 case ArgVtypeInIRegs: {
2880 MonoInst *loc = cfg->arch.vret_addr_loc;
2883 /* Load the destination address */
2884 g_assert (loc && loc->opcode == OP_REGOFFSET);
2885 code = emit_ldrx (code, ARMREG_LR, loc->inst_basereg, loc->inst_offset);
2886 for (i = 0; i < cinfo->ret.nregs; ++i)
2887 arm_strx (code, cinfo->ret.reg + i, ARMREG_LR, i * 8);
2891 MonoInst *loc = cfg->arch.vret_addr_loc;
2894 /* Load the destination address */
2895 g_assert (loc && loc->opcode == OP_REGOFFSET);
2896 code = emit_ldrx (code, ARMREG_LR, loc->inst_basereg, loc->inst_offset);
2897 for (i = 0; i < cinfo->ret.nregs; ++i) {
2898 if (cinfo->ret.esize == 4)
2899 arm_strfpw (code, cinfo->ret.reg + i, ARMREG_LR, cinfo->ret.foffsets [i]);
2901 arm_strfpx (code, cinfo->ret.reg + i, ARMREG_LR, cinfo->ret.foffsets [i]);
2908 g_assert_not_reached ();
2915 * emit_branch_island:
2917 * Emit a branch island for the conditional branches from cfg->native_code + start_offset to code.
2920 emit_branch_island (MonoCompile *cfg, guint8 *code, int start_offset)
2923 int offset, island_size;
2925 /* Iterate over the patch infos added so far by this bb */
2927 for (ji = cfg->patch_info; ji; ji = ji->next) {
2928 if (ji->ip.i < start_offset)
2929 /* The patch infos are in reverse order, so this means the end */
2931 if (ji->relocation == MONO_R_ARM64_BCC || ji->relocation == MONO_R_ARM64_CBZ)
2936 offset = code - cfg->native_code;
2937 if (offset > (cfg->code_size - island_size - 16)) {
2938 cfg->code_size *= 2;
2939 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
2940 code = cfg->native_code + offset;
2943 /* Branch over the island */
2944 arm_b (code, code + 4 + island_size);
2946 for (ji = cfg->patch_info; ji; ji = ji->next) {
2947 if (ji->ip.i < start_offset)
2949 if (ji->relocation == MONO_R_ARM64_BCC || ji->relocation == MONO_R_ARM64_CBZ) {
2950 /* Rewrite the cond branch so it branches to an uncoditional branch in the branch island */
2951 arm_patch_rel (cfg->native_code + ji->ip.i, code, ji->relocation);
2952 /* Rewrite the patch so it points to the unconditional branch */
2953 ji->ip.i = code - cfg->native_code;
2954 ji->relocation = MONO_R_ARM64_B;
2963 mono_arch_output_basic_block (MonoCompile *cfg, MonoBasicBlock *bb)
2968 guint8 *code = cfg->native_code + cfg->code_len;
2969 int start_offset, max_len, dreg, sreg1, sreg2;
2972 if (cfg->verbose_level > 2)
2973 g_print ("Basic block %d starting at offset 0x%x\n", bb->block_num, bb->native_offset);
2975 start_offset = code - cfg->native_code;
2977 MONO_BB_FOR_EACH_INS (bb, ins) {
2978 offset = code - cfg->native_code;
2980 max_len = ((guint8 *)ins_get_spec (ins->opcode))[MONO_INST_LEN];
2982 if (offset > (cfg->code_size - max_len - 16)) {
2983 cfg->code_size *= 2;
2984 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
2985 code = cfg->native_code + offset;
2988 if (G_UNLIKELY (cfg->arch.cond_branch_islands && offset - start_offset > 4 * 0x1ffff)) {
2989 /* Emit a branch island for large basic blocks */
2990 code = emit_branch_island (cfg, code, start_offset);
2991 offset = code - cfg->native_code;
2992 start_offset = offset;
2995 mono_debug_record_line_number (cfg, ins, offset);
3000 imm = ins->inst_imm;
3002 switch (ins->opcode) {
3004 code = emit_imm (code, dreg, ins->inst_c0);
3007 code = emit_imm64 (code, dreg, ins->inst_c0);
3011 arm_movx (code, dreg, sreg1);
3014 case OP_RELAXED_NOP:
3017 mono_add_patch_info_rel (cfg, offset, (MonoJumpInfoType)ins->inst_i1, ins->inst_p0, MONO_R_ARM64_IMM);
3018 code = emit_imm64_template (code, dreg);
3022 * gdb does not like encountering the hw breakpoint ins in the debugged code.
3023 * So instead of emitting a trap, we emit a call a C function and place a
3026 code = emit_call (cfg, code, MONO_PATCH_INFO_INTERNAL_METHOD, (gpointer)"mono_break");
3031 arm_addx_imm (code, ARMREG_IP0, sreg1, (MONO_ARCH_FRAME_ALIGNMENT - 1));
3032 // FIXME: andx_imm doesn't work yet
3033 code = emit_imm (code, ARMREG_IP1, -MONO_ARCH_FRAME_ALIGNMENT);
3034 arm_andx (code, ARMREG_IP0, ARMREG_IP0, ARMREG_IP1);
3035 //arm_andx_imm (code, ARMREG_IP0, sreg1, - MONO_ARCH_FRAME_ALIGNMENT);
3036 arm_movspx (code, ARMREG_IP1, ARMREG_SP);
3037 arm_subx (code, ARMREG_IP1, ARMREG_IP1, ARMREG_IP0);
3038 arm_movspx (code, ARMREG_SP, ARMREG_IP1);
3041 /* ip1 = pointer, ip0 = end */
3042 arm_addx (code, ARMREG_IP0, ARMREG_IP1, ARMREG_IP0);
3044 arm_cmpx (code, ARMREG_IP1, ARMREG_IP0);
3046 arm_bcc (code, ARMCOND_EQ, 0);
3047 arm_stpx (code, ARMREG_RZR, ARMREG_RZR, ARMREG_IP1, 0);
3048 arm_addx_imm (code, ARMREG_IP1, ARMREG_IP1, 16);
3049 arm_b (code, buf [0]);
3050 arm_patch_rel (buf [1], code, MONO_R_ARM64_BCC);
3052 arm_movspx (code, dreg, ARMREG_SP);
3053 if (cfg->param_area)
3054 code = emit_subx_sp_imm (code, cfg->param_area);
3057 case OP_LOCALLOC_IMM: {
3060 imm = ALIGN_TO (ins->inst_imm, MONO_ARCH_FRAME_ALIGNMENT);
3061 g_assert (arm_is_arith_imm (imm));
3062 arm_subx_imm (code, ARMREG_SP, ARMREG_SP, imm);
3065 g_assert (MONO_ARCH_FRAME_ALIGNMENT == 16);
3067 while (offset < imm) {
3068 arm_stpx (code, ARMREG_RZR, ARMREG_RZR, ARMREG_SP, offset);
3071 arm_movspx (code, dreg, ARMREG_SP);
3072 if (cfg->param_area)
3073 code = emit_subx_sp_imm (code, cfg->param_area);
3077 code = emit_aotconst (cfg, code, dreg, (MonoJumpInfoType)ins->inst_i1, ins->inst_p0);
3079 case OP_OBJC_GET_SELECTOR:
3080 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_OBJC_SELECTOR_REF, ins->inst_p0);
3081 /* See arch_emit_objc_selector_ref () in aot-compiler.c */
3082 arm_ldrx_lit (code, ins->dreg, 0);
3086 case OP_SEQ_POINT: {
3087 MonoInst *info_var = cfg->arch.seq_point_info_var;
3090 * For AOT, we use one got slot per method, which will point to a
3091 * SeqPointInfo structure, containing all the information required
3092 * by the code below.
3094 if (cfg->compile_aot) {
3095 g_assert (info_var);
3096 g_assert (info_var->opcode == OP_REGOFFSET);
3099 if (ins->flags & MONO_INST_SINGLE_STEP_LOC) {
3100 MonoInst *var = cfg->arch.ss_tramp_var;
3103 g_assert (var->opcode == OP_REGOFFSET);
3104 /* Load ss_tramp_var */
3105 /* This is equal to &ss_trampoline */
3106 arm_ldrx (code, ARMREG_IP1, var->inst_basereg, var->inst_offset);
3107 /* Load the trampoline address */
3108 arm_ldrx (code, ARMREG_IP1, ARMREG_IP1, 0);
3109 /* Call it if it is non-null */
3110 arm_cbzx (code, ARMREG_IP1, code + 8);
3111 arm_blrx (code, ARMREG_IP1);
3114 mono_add_seq_point (cfg, bb, ins, code - cfg->native_code);
3116 if (cfg->compile_aot) {
3117 guint32 offset = code - cfg->native_code;
3120 arm_ldrx (code, ARMREG_IP1, info_var->inst_basereg, info_var->inst_offset);
3121 /* Add the offset */
3122 val = ((offset / 4) * sizeof (guint8*)) + MONO_STRUCT_OFFSET (SeqPointInfo, bp_addrs);
3123 /* Load the info->bp_addrs [offset], which is either 0 or the address of the bp trampoline */
3124 code = emit_ldrx (code, ARMREG_IP1, ARMREG_IP1, val);
3125 /* Skip the load if its 0 */
3126 arm_cbzx (code, ARMREG_IP1, code + 8);
3127 /* Call the breakpoint trampoline */
3128 arm_blrx (code, ARMREG_IP1);
3130 MonoInst *var = cfg->arch.bp_tramp_var;
3133 g_assert (var->opcode == OP_REGOFFSET);
3134 /* Load the address of the bp trampoline into IP0 */
3135 arm_ldrx (code, ARMREG_IP0, var->inst_basereg, var->inst_offset);
3137 * A placeholder for a possible breakpoint inserted by
3138 * mono_arch_set_breakpoint ().
3147 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_target_bb, MONO_R_ARM64_B);
3151 arm_brx (code, sreg1);
3183 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_true_bb, MONO_R_ARM64_BCC);
3184 cond = opcode_to_armcond (ins->opcode);
3185 arm_bcc (code, cond, 0);
3189 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_true_bb, MONO_R_ARM64_BCC);
3190 /* For fp compares, ARMCOND_LT is lt or unordered */
3191 arm_bcc (code, ARMCOND_LT, 0);
3194 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_true_bb, MONO_R_ARM64_BCC);
3195 arm_bcc (code, ARMCOND_EQ, 0);
3196 offset = code - cfg->native_code;
3197 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_true_bb, MONO_R_ARM64_BCC);
3198 /* For fp compares, ARMCOND_LT is lt or unordered */
3199 arm_bcc (code, ARMCOND_LT, 0);
3202 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_true_bb, MONO_R_ARM64_CBZ);
3203 arm_cbzw (code, sreg1, 0);
3206 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_true_bb, MONO_R_ARM64_CBZ);
3207 arm_cbzx (code, sreg1, 0);
3209 case OP_ARM64_CBNZW:
3210 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_true_bb, MONO_R_ARM64_CBZ);
3211 arm_cbnzw (code, sreg1, 0);
3213 case OP_ARM64_CBNZX:
3214 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_true_bb, MONO_R_ARM64_CBZ);
3215 arm_cbnzx (code, sreg1, 0);
3219 arm_addw (code, dreg, sreg1, sreg2);
3222 arm_addx (code, dreg, sreg1, sreg2);
3225 arm_subw (code, dreg, sreg1, sreg2);
3228 arm_subx (code, dreg, sreg1, sreg2);
3231 arm_andw (code, dreg, sreg1, sreg2);
3234 arm_andx (code, dreg, sreg1, sreg2);
3237 arm_orrw (code, dreg, sreg1, sreg2);
3240 arm_orrx (code, dreg, sreg1, sreg2);
3243 arm_eorw (code, dreg, sreg1, sreg2);
3246 arm_eorx (code, dreg, sreg1, sreg2);
3249 arm_negw (code, dreg, sreg1);
3252 arm_negx (code, dreg, sreg1);
3255 arm_mvnw (code, dreg, sreg1);
3258 arm_mvnx (code, dreg, sreg1);
3261 arm_addsw (code, dreg, sreg1, sreg2);
3265 arm_addsx (code, dreg, sreg1, sreg2);
3268 arm_subsw (code, dreg, sreg1, sreg2);
3272 arm_subsx (code, dreg, sreg1, sreg2);
3275 arm_cmpw (code, sreg1, sreg2);
3279 arm_cmpx (code, sreg1, sreg2);
3282 code = emit_addw_imm (code, dreg, sreg1, imm);
3286 code = emit_addx_imm (code, dreg, sreg1, imm);
3289 code = emit_subw_imm (code, dreg, sreg1, imm);
3292 code = emit_subx_imm (code, dreg, sreg1, imm);
3295 code = emit_andw_imm (code, dreg, sreg1, imm);
3299 code = emit_andx_imm (code, dreg, sreg1, imm);
3302 code = emit_orrw_imm (code, dreg, sreg1, imm);
3305 code = emit_orrx_imm (code, dreg, sreg1, imm);
3308 code = emit_eorw_imm (code, dreg, sreg1, imm);
3311 code = emit_eorx_imm (code, dreg, sreg1, imm);
3313 case OP_ICOMPARE_IMM:
3314 code = emit_cmpw_imm (code, sreg1, imm);
3316 case OP_LCOMPARE_IMM:
3317 case OP_COMPARE_IMM:
3319 arm_cmpx (code, sreg1, ARMREG_RZR);
3321 // FIXME: 32 vs 64 bit issues for 0xffffffff
3322 code = emit_imm64 (code, ARMREG_LR, imm);
3323 arm_cmpx (code, sreg1, ARMREG_LR);
3327 arm_lslvw (code, dreg, sreg1, sreg2);
3330 arm_lslvx (code, dreg, sreg1, sreg2);
3333 arm_asrvw (code, dreg, sreg1, sreg2);
3336 arm_asrvx (code, dreg, sreg1, sreg2);
3339 arm_lsrvw (code, dreg, sreg1, sreg2);
3342 arm_lsrvx (code, dreg, sreg1, sreg2);
3346 arm_movx (code, dreg, sreg1);
3348 arm_lslw (code, dreg, sreg1, imm);
3352 arm_movx (code, dreg, sreg1);
3354 arm_lslx (code, dreg, sreg1, imm);
3358 arm_movx (code, dreg, sreg1);
3360 arm_asrw (code, dreg, sreg1, imm);
3365 arm_movx (code, dreg, sreg1);
3367 arm_asrx (code, dreg, sreg1, imm);
3369 case OP_ISHR_UN_IMM:
3371 arm_movx (code, dreg, sreg1);
3373 arm_lsrw (code, dreg, sreg1, imm);
3376 case OP_LSHR_UN_IMM:
3378 arm_movx (code, dreg, sreg1);
3380 arm_lsrx (code, dreg, sreg1, imm);
3385 arm_sxtwx (code, dreg, sreg1);
3388 /* Clean out the upper word */
3389 arm_movw (code, dreg, sreg1);
3392 arm_lslx (code, dreg, sreg1, imm);
3395 /* MULTIPLY/DIVISION */
3398 // FIXME: Optimize this
3399 /* Check for zero */
3400 arm_cmpx_imm (code, sreg2, 0);
3401 code = emit_cond_exc (cfg, code, OP_COND_EXC_IEQ, "DivideByZeroException");
3402 /* Check for INT_MIN/-1 */
3403 code = emit_imm (code, ARMREG_IP0, 0x80000000);
3404 arm_cmpx (code, sreg1, ARMREG_IP0);
3405 arm_cset (code, ARMCOND_EQ, ARMREG_IP1);
3406 code = emit_imm (code, ARMREG_IP0, 0xffffffff);
3407 arm_cmpx (code, sreg2, ARMREG_IP0);
3408 arm_cset (code, ARMCOND_EQ, ARMREG_IP0);
3409 arm_andx (code, ARMREG_IP0, ARMREG_IP0, ARMREG_IP1);
3410 arm_cmpx_imm (code, ARMREG_IP0, 1);
3411 code = emit_cond_exc (cfg, code, OP_COND_EXC_IEQ, "OverflowException");
3412 if (ins->opcode == OP_IREM) {
3413 arm_sdivw (code, ARMREG_LR, sreg1, sreg2);
3414 arm_msubw (code, dreg, ARMREG_LR, sreg2, sreg1);
3416 arm_sdivw (code, dreg, sreg1, sreg2);
3420 arm_cmpx_imm (code, sreg2, 0);
3421 code = emit_cond_exc (cfg, code, OP_COND_EXC_IEQ, "DivideByZeroException");
3422 arm_udivw (code, dreg, sreg1, sreg2);
3425 arm_cmpx_imm (code, sreg2, 0);
3426 code = emit_cond_exc (cfg, code, OP_COND_EXC_IEQ, "DivideByZeroException");
3427 arm_udivw (code, ARMREG_LR, sreg1, sreg2);
3428 arm_msubw (code, dreg, ARMREG_LR, sreg2, sreg1);
3432 // FIXME: Optimize this
3433 /* Check for zero */
3434 arm_cmpx_imm (code, sreg2, 0);
3435 code = emit_cond_exc (cfg, code, OP_COND_EXC_IEQ, "DivideByZeroException");
3436 /* Check for INT64_MIN/-1 */
3437 code = emit_imm64 (code, ARMREG_IP0, 0x8000000000000000);
3438 arm_cmpx (code, sreg1, ARMREG_IP0);
3439 arm_cset (code, ARMCOND_EQ, ARMREG_IP1);
3440 code = emit_imm64 (code, ARMREG_IP0, 0xffffffffffffffff);
3441 arm_cmpx (code, sreg2, ARMREG_IP0);
3442 arm_cset (code, ARMCOND_EQ, ARMREG_IP0);
3443 arm_andx (code, ARMREG_IP0, ARMREG_IP0, ARMREG_IP1);
3444 arm_cmpx_imm (code, ARMREG_IP0, 1);
3445 /* 64 bit uses ArithmeticException */
3446 code = emit_cond_exc (cfg, code, OP_COND_EXC_IEQ, "ArithmeticException");
3447 if (ins->opcode == OP_LREM) {
3448 arm_sdivx (code, ARMREG_LR, sreg1, sreg2);
3449 arm_msubx (code, dreg, ARMREG_LR, sreg2, sreg1);
3451 arm_sdivx (code, dreg, sreg1, sreg2);
3455 arm_cmpx_imm (code, sreg2, 0);
3456 code = emit_cond_exc (cfg, code, OP_COND_EXC_IEQ, "DivideByZeroException");
3457 arm_udivx (code, dreg, sreg1, sreg2);
3460 arm_cmpx_imm (code, sreg2, 0);
3461 code = emit_cond_exc (cfg, code, OP_COND_EXC_IEQ, "DivideByZeroException");
3462 arm_udivx (code, ARMREG_LR, sreg1, sreg2);
3463 arm_msubx (code, dreg, ARMREG_LR, sreg2, sreg1);
3466 arm_mulw (code, dreg, sreg1, sreg2);
3469 arm_mulx (code, dreg, sreg1, sreg2);
3472 code = emit_imm (code, ARMREG_LR, imm);
3473 arm_mulw (code, dreg, sreg1, ARMREG_LR);
3477 code = emit_imm (code, ARMREG_LR, imm);
3478 arm_mulx (code, dreg, sreg1, ARMREG_LR);
3482 case OP_ICONV_TO_I1:
3483 case OP_LCONV_TO_I1:
3484 arm_sxtbx (code, dreg, sreg1);
3486 case OP_ICONV_TO_I2:
3487 case OP_LCONV_TO_I2:
3488 arm_sxthx (code, dreg, sreg1);
3490 case OP_ICONV_TO_U1:
3491 case OP_LCONV_TO_U1:
3492 arm_uxtbw (code, dreg, sreg1);
3494 case OP_ICONV_TO_U2:
3495 case OP_LCONV_TO_U2:
3496 arm_uxthw (code, dreg, sreg1);
3522 cond = opcode_to_armcond (ins->opcode);
3523 arm_cset (code, cond, dreg);
3536 cond = opcode_to_armcond (ins->opcode);
3537 arm_fcmpd (code, sreg1, sreg2);
3538 arm_cset (code, cond, dreg);
3543 case OP_LOADI1_MEMBASE:
3544 code = emit_ldrsbx (code, dreg, ins->inst_basereg, ins->inst_offset);
3546 case OP_LOADU1_MEMBASE:
3547 code = emit_ldrb (code, dreg, ins->inst_basereg, ins->inst_offset);
3549 case OP_LOADI2_MEMBASE:
3550 code = emit_ldrshx (code, dreg, ins->inst_basereg, ins->inst_offset);
3552 case OP_LOADU2_MEMBASE:
3553 code = emit_ldrh (code, dreg, ins->inst_basereg, ins->inst_offset);
3555 case OP_LOADI4_MEMBASE:
3556 code = emit_ldrswx (code, dreg, ins->inst_basereg, ins->inst_offset);
3558 case OP_LOADU4_MEMBASE:
3559 code = emit_ldrw (code, dreg, ins->inst_basereg, ins->inst_offset);
3561 case OP_LOAD_MEMBASE:
3562 case OP_LOADI8_MEMBASE:
3563 code = emit_ldrx (code, dreg, ins->inst_basereg, ins->inst_offset);
3565 case OP_STOREI1_MEMBASE_IMM:
3566 case OP_STOREI2_MEMBASE_IMM:
3567 case OP_STOREI4_MEMBASE_IMM:
3568 case OP_STORE_MEMBASE_IMM:
3569 case OP_STOREI8_MEMBASE_IMM: {
3573 code = emit_imm (code, ARMREG_LR, imm);
3576 immreg = ARMREG_RZR;
3579 switch (ins->opcode) {
3580 case OP_STOREI1_MEMBASE_IMM:
3581 code = emit_strb (code, immreg, ins->inst_destbasereg, ins->inst_offset);
3583 case OP_STOREI2_MEMBASE_IMM:
3584 code = emit_strh (code, immreg, ins->inst_destbasereg, ins->inst_offset);
3586 case OP_STOREI4_MEMBASE_IMM:
3587 code = emit_strw (code, immreg, ins->inst_destbasereg, ins->inst_offset);
3589 case OP_STORE_MEMBASE_IMM:
3590 case OP_STOREI8_MEMBASE_IMM:
3591 code = emit_strx (code, immreg, ins->inst_destbasereg, ins->inst_offset);
3594 g_assert_not_reached ();
3599 case OP_STOREI1_MEMBASE_REG:
3600 code = emit_strb (code, sreg1, ins->inst_destbasereg, ins->inst_offset);
3602 case OP_STOREI2_MEMBASE_REG:
3603 code = emit_strh (code, sreg1, ins->inst_destbasereg, ins->inst_offset);
3605 case OP_STOREI4_MEMBASE_REG:
3606 code = emit_strw (code, sreg1, ins->inst_destbasereg, ins->inst_offset);
3608 case OP_STORE_MEMBASE_REG:
3609 case OP_STOREI8_MEMBASE_REG:
3610 code = emit_strx (code, sreg1, ins->inst_destbasereg, ins->inst_offset);
3613 code = emit_tls_get (code, dreg, ins->inst_offset);
3616 code = emit_tls_set (code, sreg1, ins->inst_offset);
3619 case OP_MEMORY_BARRIER:
3622 case OP_ATOMIC_ADD_I4: {
3626 arm_ldxrw (code, ARMREG_IP0, sreg1);
3627 arm_addx (code, ARMREG_IP0, ARMREG_IP0, sreg2);
3628 arm_stlxrw (code, ARMREG_IP1, ARMREG_IP0, sreg1);
3629 arm_cbnzw (code, ARMREG_IP1, buf [0]);
3632 arm_movx (code, dreg, ARMREG_IP0);
3635 case OP_ATOMIC_ADD_I8: {
3639 arm_ldxrx (code, ARMREG_IP0, sreg1);
3640 arm_addx (code, ARMREG_IP0, ARMREG_IP0, sreg2);
3641 arm_stlxrx (code, ARMREG_IP1, ARMREG_IP0, sreg1);
3642 arm_cbnzx (code, ARMREG_IP1, buf [0]);
3645 arm_movx (code, dreg, ARMREG_IP0);
3648 case OP_ATOMIC_EXCHANGE_I4: {
3652 arm_ldxrw (code, ARMREG_IP0, sreg1);
3653 arm_stlxrw (code, ARMREG_IP1, sreg2, sreg1);
3654 arm_cbnzw (code, ARMREG_IP1, buf [0]);
3657 arm_movx (code, dreg, ARMREG_IP0);
3660 case OP_ATOMIC_EXCHANGE_I8: {
3664 arm_ldxrx (code, ARMREG_IP0, sreg1);
3665 arm_stlxrx (code, ARMREG_IP1, sreg2, sreg1);
3666 arm_cbnzw (code, ARMREG_IP1, buf [0]);
3669 arm_movx (code, dreg, ARMREG_IP0);
3672 case OP_ATOMIC_CAS_I4: {
3675 /* sreg2 is the value, sreg3 is the comparand */
3677 arm_ldxrw (code, ARMREG_IP0, sreg1);
3678 arm_cmpw (code, ARMREG_IP0, ins->sreg3);
3680 arm_bcc (code, ARMCOND_NE, 0);
3681 arm_stlxrw (code, ARMREG_IP1, sreg2, sreg1);
3682 arm_cbnzw (code, ARMREG_IP1, buf [0]);
3683 arm_patch_rel (buf [1], code, MONO_R_ARM64_BCC);
3686 arm_movx (code, dreg, ARMREG_IP0);
3689 case OP_ATOMIC_CAS_I8: {
3693 arm_ldxrx (code, ARMREG_IP0, sreg1);
3694 arm_cmpx (code, ARMREG_IP0, ins->sreg3);
3696 arm_bcc (code, ARMCOND_NE, 0);
3697 arm_stlxrx (code, ARMREG_IP1, sreg2, sreg1);
3698 arm_cbnzw (code, ARMREG_IP1, buf [0]);
3699 arm_patch_rel (buf [1], code, MONO_R_ARM64_BCC);
3702 arm_movx (code, dreg, ARMREG_IP0);
3705 case OP_ATOMIC_LOAD_I1: {
3706 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
3707 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
3709 arm_ldarb (code, ins->dreg, ARMREG_LR);
3710 arm_sxtbx (code, ins->dreg, ins->dreg);
3713 case OP_ATOMIC_LOAD_U1: {
3714 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
3715 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
3717 arm_ldarb (code, ins->dreg, ARMREG_LR);
3718 arm_uxtbx (code, ins->dreg, ins->dreg);
3721 case OP_ATOMIC_LOAD_I2: {
3722 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
3723 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
3725 arm_ldarh (code, ins->dreg, ARMREG_LR);
3726 arm_sxthx (code, ins->dreg, ins->dreg);
3729 case OP_ATOMIC_LOAD_U2: {
3730 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
3731 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
3733 arm_ldarh (code, ins->dreg, ARMREG_LR);
3734 arm_uxthx (code, ins->dreg, ins->dreg);
3737 case OP_ATOMIC_LOAD_I4: {
3738 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
3739 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
3741 arm_ldarw (code, ins->dreg, ARMREG_LR);
3742 arm_sxtwx (code, ins->dreg, ins->dreg);
3745 case OP_ATOMIC_LOAD_U4: {
3746 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
3747 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
3749 arm_ldarw (code, ins->dreg, ARMREG_LR);
3750 arm_movw (code, ins->dreg, ins->dreg); /* Clear upper half of the register. */
3753 case OP_ATOMIC_LOAD_I8:
3754 case OP_ATOMIC_LOAD_U8: {
3755 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
3756 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
3758 arm_ldarx (code, ins->dreg, ARMREG_LR);
3761 case OP_ATOMIC_LOAD_R4: {
3762 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
3763 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
3766 arm_ldarw (code, ARMREG_LR, ARMREG_LR);
3767 arm_fmov_rx_to_double (code, ins->dreg, ARMREG_LR);
3769 arm_ldarw (code, ARMREG_LR, ARMREG_LR);
3770 arm_fmov_rx_to_double (code, FP_TEMP_REG, ARMREG_LR);
3771 arm_fcvt_sd (code, ins->dreg, FP_TEMP_REG);
3775 case OP_ATOMIC_LOAD_R8: {
3776 code = emit_addx_imm (code, ARMREG_LR, ins->inst_basereg, ins->inst_offset);
3777 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
3779 arm_ldarx (code, ARMREG_LR, ARMREG_LR);
3780 arm_fmov_rx_to_double (code, ins->dreg, ARMREG_LR);
3783 case OP_ATOMIC_STORE_I1:
3784 case OP_ATOMIC_STORE_U1: {
3785 code = emit_addx_imm (code, ARMREG_LR, ins->inst_destbasereg, ins->inst_offset);
3786 arm_stlrb (code, ARMREG_LR, ins->sreg1);
3787 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
3791 case OP_ATOMIC_STORE_I2:
3792 case OP_ATOMIC_STORE_U2: {
3793 code = emit_addx_imm (code, ARMREG_LR, ins->inst_destbasereg, ins->inst_offset);
3794 arm_stlrh (code, ARMREG_LR, ins->sreg1);
3795 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
3799 case OP_ATOMIC_STORE_I4:
3800 case OP_ATOMIC_STORE_U4: {
3801 code = emit_addx_imm (code, ARMREG_LR, ins->inst_destbasereg, ins->inst_offset);
3802 arm_stlrw (code, ARMREG_LR, ins->sreg1);
3803 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
3807 case OP_ATOMIC_STORE_I8:
3808 case OP_ATOMIC_STORE_U8: {
3809 code = emit_addx_imm (code, ARMREG_LR, ins->inst_destbasereg, ins->inst_offset);
3810 arm_stlrx (code, ARMREG_LR, ins->sreg1);
3811 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
3815 case OP_ATOMIC_STORE_R4: {
3816 code = emit_addx_imm (code, ARMREG_LR, ins->inst_destbasereg, ins->inst_offset);
3818 arm_fmov_double_to_rx (code, ARMREG_IP0, ins->sreg1);
3819 arm_stlrw (code, ARMREG_LR, ARMREG_IP0);
3821 arm_fcvt_ds (code, FP_TEMP_REG, ins->sreg1);
3822 arm_fmov_double_to_rx (code, ARMREG_IP0, FP_TEMP_REG);
3823 arm_stlrw (code, ARMREG_LR, ARMREG_IP0);
3825 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
3829 case OP_ATOMIC_STORE_R8: {
3830 code = emit_addx_imm (code, ARMREG_LR, ins->inst_destbasereg, ins->inst_offset);
3831 arm_fmov_double_to_rx (code, ARMREG_IP0, ins->sreg1);
3832 arm_stlrx (code, ARMREG_LR, ARMREG_IP0);
3833 if (ins->backend.memory_barrier_kind == MONO_MEMORY_BARRIER_SEQ)
3840 guint64 imm = *(guint64*)ins->inst_p0;
3843 arm_fmov_rx_to_double (code, dreg, ARMREG_RZR);
3845 code = emit_imm64 (code, ARMREG_LR, imm);
3846 arm_fmov_rx_to_double (code, ins->dreg, ARMREG_LR);
3851 guint64 imm = *(guint32*)ins->inst_p0;
3853 code = emit_imm64 (code, ARMREG_LR, imm);
3855 arm_fmov_rx_to_double (code, dreg, ARMREG_LR);
3857 arm_fmov_rx_to_double (code, FP_TEMP_REG, ARMREG_LR);
3858 arm_fcvt_sd (code, dreg, FP_TEMP_REG);
3862 case OP_LOADR8_MEMBASE:
3863 code = emit_ldrfpx (code, dreg, ins->inst_basereg, ins->inst_offset);
3865 case OP_LOADR4_MEMBASE:
3867 code = emit_ldrfpw (code, dreg, ins->inst_basereg, ins->inst_offset);
3869 code = emit_ldrfpw (code, FP_TEMP_REG, ins->inst_basereg, ins->inst_offset);
3870 arm_fcvt_sd (code, dreg, FP_TEMP_REG);
3873 case OP_STORER8_MEMBASE_REG:
3874 code = emit_strfpx (code, sreg1, ins->inst_destbasereg, ins->inst_offset);
3876 case OP_STORER4_MEMBASE_REG:
3878 code = emit_strfpw (code, sreg1, ins->inst_destbasereg, ins->inst_offset);
3880 arm_fcvt_ds (code, FP_TEMP_REG, sreg1);
3881 code = emit_strfpw (code, FP_TEMP_REG, ins->inst_destbasereg, ins->inst_offset);
3886 arm_fmovd (code, dreg, sreg1);
3890 arm_fmovs (code, dreg, sreg1);
3892 case OP_MOVE_F_TO_I4:
3894 arm_fmov_double_to_rx (code, ins->dreg, ins->sreg1);
3896 arm_fcvt_ds (code, ins->dreg, ins->sreg1);
3897 arm_fmov_double_to_rx (code, ins->dreg, ins->dreg);
3900 case OP_MOVE_I4_TO_F:
3902 arm_fmov_rx_to_double (code, ins->dreg, ins->sreg1);
3904 arm_fmov_rx_to_double (code, ins->dreg, ins->sreg1);
3905 arm_fcvt_sd (code, ins->dreg, ins->dreg);
3908 case OP_MOVE_F_TO_I8:
3909 arm_fmov_double_to_rx (code, ins->dreg, ins->sreg1);
3911 case OP_MOVE_I8_TO_F:
3912 arm_fmov_rx_to_double (code, ins->dreg, ins->sreg1);
3915 arm_fcmpd (code, sreg1, sreg2);
3918 arm_fcmps (code, sreg1, sreg2);
3920 case OP_FCONV_TO_I1:
3921 arm_fcvtzs_dx (code, dreg, sreg1);
3922 arm_sxtbx (code, dreg, dreg);
3924 case OP_FCONV_TO_U1:
3925 arm_fcvtzu_dx (code, dreg, sreg1);
3926 arm_uxtbw (code, dreg, dreg);
3928 case OP_FCONV_TO_I2:
3929 arm_fcvtzs_dx (code, dreg, sreg1);
3930 arm_sxthx (code, dreg, dreg);
3932 case OP_FCONV_TO_U2:
3933 arm_fcvtzu_dx (code, dreg, sreg1);
3934 arm_uxthw (code, dreg, dreg);
3936 case OP_FCONV_TO_I4:
3937 arm_fcvtzs_dx (code, dreg, sreg1);
3938 arm_sxtwx (code, dreg, dreg);
3940 case OP_FCONV_TO_U4:
3941 arm_fcvtzu_dx (code, dreg, sreg1);
3943 case OP_FCONV_TO_I8:
3944 arm_fcvtzs_dx (code, dreg, sreg1);
3946 case OP_FCONV_TO_U8:
3947 arm_fcvtzu_dx (code, dreg, sreg1);
3949 case OP_FCONV_TO_R4:
3951 arm_fcvt_ds (code, dreg, sreg1);
3953 arm_fcvt_ds (code, FP_TEMP_REG, sreg1);
3954 arm_fcvt_sd (code, dreg, FP_TEMP_REG);
3957 case OP_ICONV_TO_R4:
3959 arm_scvtf_rw_to_s (code, dreg, sreg1);
3961 arm_scvtf_rw_to_s (code, FP_TEMP_REG, sreg1);
3962 arm_fcvt_sd (code, dreg, FP_TEMP_REG);
3965 case OP_LCONV_TO_R4:
3967 arm_scvtf_rx_to_s (code, dreg, sreg1);
3969 arm_scvtf_rx_to_s (code, FP_TEMP_REG, sreg1);
3970 arm_fcvt_sd (code, dreg, FP_TEMP_REG);
3973 case OP_ICONV_TO_R8:
3974 arm_scvtf_rw_to_d (code, dreg, sreg1);
3976 case OP_LCONV_TO_R8:
3977 arm_scvtf_rx_to_d (code, dreg, sreg1);
3979 case OP_ICONV_TO_R_UN:
3980 arm_ucvtf_rw_to_d (code, dreg, sreg1);
3982 case OP_LCONV_TO_R_UN:
3983 arm_ucvtf_rx_to_d (code, dreg, sreg1);
3986 arm_fadd_d (code, dreg, sreg1, sreg2);
3989 arm_fsub_d (code, dreg, sreg1, sreg2);
3992 arm_fmul_d (code, dreg, sreg1, sreg2);
3995 arm_fdiv_d (code, dreg, sreg1, sreg2);
3999 g_assert_not_reached ();
4002 arm_fneg_d (code, dreg, sreg1);
4004 case OP_ARM_SETFREG_R4:
4005 arm_fcvt_ds (code, dreg, sreg1);
4008 /* Check for infinity */
4009 code = emit_imm64 (code, ARMREG_LR, 0x7fefffffffffffffLL);
4010 arm_fmov_rx_to_double (code, FP_TEMP_REG, ARMREG_LR);
4011 arm_fabs_d (code, FP_TEMP_REG2, sreg1);
4012 arm_fcmpd (code, FP_TEMP_REG2, FP_TEMP_REG);
4013 code = emit_cond_exc (cfg, code, OP_COND_EXC_GT, "ArithmeticException");
4014 /* Check for nans */
4015 arm_fcmpd (code, FP_TEMP_REG2, FP_TEMP_REG2);
4016 code = emit_cond_exc (cfg, code, OP_COND_EXC_OV, "ArithmeticException");
4017 arm_fmovd (code, dreg, sreg1);
4022 arm_fadd_s (code, dreg, sreg1, sreg2);
4025 arm_fsub_s (code, dreg, sreg1, sreg2);
4028 arm_fmul_s (code, dreg, sreg1, sreg2);
4031 arm_fdiv_s (code, dreg, sreg1, sreg2);
4034 arm_fneg_s (code, dreg, sreg1);
4036 case OP_RCONV_TO_I1:
4037 arm_fcvtzs_sx (code, dreg, sreg1);
4038 arm_sxtbx (code, dreg, dreg);
4040 case OP_RCONV_TO_U1:
4041 arm_fcvtzu_sx (code, dreg, sreg1);
4042 arm_uxtbw (code, dreg, dreg);
4044 case OP_RCONV_TO_I2:
4045 arm_fcvtzs_sx (code, dreg, sreg1);
4046 arm_sxthx (code, dreg, dreg);
4048 case OP_RCONV_TO_U2:
4049 arm_fcvtzu_sx (code, dreg, sreg1);
4050 arm_uxthw (code, dreg, dreg);
4052 case OP_RCONV_TO_I4:
4053 arm_fcvtzs_sx (code, dreg, sreg1);
4054 arm_sxtwx (code, dreg, dreg);
4056 case OP_RCONV_TO_U4:
4057 arm_fcvtzu_sx (code, dreg, sreg1);
4059 case OP_RCONV_TO_I8:
4060 arm_fcvtzs_sx (code, dreg, sreg1);
4062 case OP_RCONV_TO_U8:
4063 arm_fcvtzu_sx (code, dreg, sreg1);
4065 case OP_RCONV_TO_R8:
4066 arm_fcvt_sd (code, dreg, sreg1);
4068 case OP_RCONV_TO_R4:
4070 arm_fmovs (code, dreg, sreg1);
4082 cond = opcode_to_armcond (ins->opcode);
4083 arm_fcmps (code, sreg1, sreg2);
4084 arm_cset (code, cond, dreg);
4095 call = (MonoCallInst*)ins;
4096 if (ins->flags & MONO_INST_HAS_METHOD)
4097 code = emit_call (cfg, code, MONO_PATCH_INFO_METHOD, call->method);
4099 code = emit_call (cfg, code, MONO_PATCH_INFO_ABS, call->fptr);
4100 code = emit_move_return_value (cfg, code, ins);
4102 case OP_VOIDCALL_REG:
4108 arm_blrx (code, sreg1);
4109 code = emit_move_return_value (cfg, code, ins);
4111 case OP_VOIDCALL_MEMBASE:
4112 case OP_CALL_MEMBASE:
4113 case OP_LCALL_MEMBASE:
4114 case OP_FCALL_MEMBASE:
4115 case OP_RCALL_MEMBASE:
4116 case OP_VCALL2_MEMBASE:
4117 code = emit_ldrx (code, ARMREG_IP0, ins->inst_basereg, ins->inst_offset);
4118 arm_blrx (code, ARMREG_IP0);
4119 code = emit_move_return_value (cfg, code, ins);
4122 MonoCallInst *call = (MonoCallInst*)ins;
4124 g_assert (!cfg->method->save_lmf);
4126 // FIXME: Copy stack arguments
4128 /* Restore registers */
4129 code = emit_load_regset (code, MONO_ARCH_CALLEE_SAVED_REGS & cfg->used_int_regs, ARMREG_FP, cfg->arch.saved_gregs_offset);
4132 code = mono_arm_emit_destroy_frame (code, cfg->stack_offset, ((1 << ARMREG_IP0) | (1 << ARMREG_IP1)));
4134 if (cfg->compile_aot) {
4135 /* This is not a PLT patch */
4136 code = emit_aotconst (cfg, code, ARMREG_IP0, MONO_PATCH_INFO_METHOD_JUMP, call->method);
4137 arm_brx (code, ARMREG_IP0);
4139 mono_add_patch_info_rel (cfg, code - cfg->native_code, MONO_PATCH_INFO_METHOD_JUMP, call->method, MONO_R_ARM64_B);
4142 ins->flags |= MONO_INST_GC_CALLSITE;
4143 ins->backend.pc_offset = code - cfg->native_code;
4147 g_assert (cfg->arch.cinfo);
4148 code = emit_addx_imm (code, ARMREG_IP0, cfg->arch.args_reg, ((CallInfo*)cfg->arch.cinfo)->sig_cookie.offset);
4149 arm_strx (code, ARMREG_IP0, sreg1, 0);
4152 MonoInst *var = cfg->dyn_call_var;
4153 guint8 *labels [16];
4157 * sreg1 points to a DynCallArgs structure initialized by mono_arch_start_dyn_call ().
4158 * sreg2 is the function to call.
4161 g_assert (var->opcode == OP_REGOFFSET);
4163 arm_movx (code, ARMREG_LR, sreg1);
4164 arm_movx (code, ARMREG_IP1, sreg2);
4166 /* Save args buffer */
4167 code = emit_strx (code, ARMREG_LR, var->inst_basereg, var->inst_offset);
4169 /* Set fp argument regs */
4170 code = emit_ldrw (code, ARMREG_R0, ARMREG_LR, MONO_STRUCT_OFFSET (DynCallArgs, n_fpargs));
4171 arm_cmpw (code, ARMREG_R0, ARMREG_RZR);
4173 arm_bcc (code, ARMCOND_EQ, 0);
4174 for (i = 0; i < 8; ++i)
4175 code = emit_ldrfpx (code, ARMREG_D0 + i, ARMREG_LR, MONO_STRUCT_OFFSET (DynCallArgs, fpregs) + (i * 8));
4176 arm_patch_rel (labels [0], code, MONO_R_ARM64_BCC);
4178 /* Set stack args */
4179 for (i = 0; i < DYN_CALL_STACK_ARGS; ++i) {
4180 code = emit_ldrx (code, ARMREG_R0, ARMREG_LR, MONO_STRUCT_OFFSET (DynCallArgs, regs) + ((PARAM_REGS + 1 + i) * sizeof (mgreg_t)));
4181 code = emit_strx (code, ARMREG_R0, ARMREG_SP, i * sizeof (mgreg_t));
4184 /* Set argument registers + r8 */
4185 code = mono_arm_emit_load_regarray (code, 0x1ff, ARMREG_LR, 0);
4188 arm_blrx (code, ARMREG_IP1);
4191 code = emit_ldrx (code, ARMREG_LR, var->inst_basereg, var->inst_offset);
4192 arm_strx (code, ARMREG_R0, ARMREG_LR, MONO_STRUCT_OFFSET (DynCallArgs, res));
4193 arm_strx (code, ARMREG_R1, ARMREG_LR, MONO_STRUCT_OFFSET (DynCallArgs, res2));
4194 /* Save fp result */
4195 code = emit_ldrw (code, ARMREG_R0, ARMREG_LR, MONO_STRUCT_OFFSET (DynCallArgs, n_fpret));
4196 arm_cmpw (code, ARMREG_R0, ARMREG_RZR);
4198 arm_bcc (code, ARMCOND_EQ, 0);
4199 for (i = 0; i < 8; ++i)
4200 code = emit_strfpx (code, ARMREG_D0 + i, ARMREG_LR, MONO_STRUCT_OFFSET (DynCallArgs, fpregs) + (i * 8));
4201 arm_patch_rel (labels [1], code, MONO_R_ARM64_BCC);
4205 case OP_GENERIC_CLASS_INIT: {
4209 byte_offset = MONO_STRUCT_OFFSET (MonoVTable, initialized);
4211 /* Load vtable->initialized */
4212 arm_ldrsbx (code, ARMREG_IP0, sreg1, byte_offset);
4214 arm_cbnzx (code, ARMREG_IP0, 0);
4217 g_assert (sreg1 == ARMREG_R0);
4218 code = emit_call (cfg, code, MONO_PATCH_INFO_INTERNAL_METHOD,
4219 (gpointer)"mono_generic_class_init");
4221 mono_arm_patch (jump, code, MONO_R_ARM64_CBZ);
4226 arm_ldrx (code, ARMREG_LR, sreg1, 0);
4229 case OP_NOT_REACHED:
4232 case OP_IL_SEQ_POINT:
4233 mono_add_seq_point (cfg, bb, ins, code - cfg->native_code);
4238 case OP_COND_EXC_IC:
4239 case OP_COND_EXC_OV:
4240 case OP_COND_EXC_IOV:
4241 case OP_COND_EXC_NC:
4242 case OP_COND_EXC_INC:
4243 case OP_COND_EXC_NO:
4244 case OP_COND_EXC_INO:
4245 case OP_COND_EXC_EQ:
4246 case OP_COND_EXC_IEQ:
4247 case OP_COND_EXC_NE_UN:
4248 case OP_COND_EXC_INE_UN:
4249 case OP_COND_EXC_ILT:
4250 case OP_COND_EXC_LT:
4251 case OP_COND_EXC_ILT_UN:
4252 case OP_COND_EXC_LT_UN:
4253 case OP_COND_EXC_IGT:
4254 case OP_COND_EXC_GT:
4255 case OP_COND_EXC_IGT_UN:
4256 case OP_COND_EXC_GT_UN:
4257 case OP_COND_EXC_IGE:
4258 case OP_COND_EXC_GE:
4259 case OP_COND_EXC_IGE_UN:
4260 case OP_COND_EXC_GE_UN:
4261 case OP_COND_EXC_ILE:
4262 case OP_COND_EXC_LE:
4263 case OP_COND_EXC_ILE_UN:
4264 case OP_COND_EXC_LE_UN:
4265 code = emit_cond_exc (cfg, code, ins->opcode, ins->inst_p1);
4268 if (sreg1 != ARMREG_R0)
4269 arm_movx (code, ARMREG_R0, sreg1);
4270 code = emit_call (cfg, code, MONO_PATCH_INFO_INTERNAL_METHOD,
4271 (gpointer)"mono_arch_throw_exception");
4274 if (sreg1 != ARMREG_R0)
4275 arm_movx (code, ARMREG_R0, sreg1);
4276 code = emit_call (cfg, code, MONO_PATCH_INFO_INTERNAL_METHOD,
4277 (gpointer)"mono_arch_rethrow_exception");
4279 case OP_CALL_HANDLER:
4280 mono_add_patch_info_rel (cfg, offset, MONO_PATCH_INFO_BB, ins->inst_target_bb, MONO_R_ARM64_BL);
4282 cfg->thunk_area += THUNK_SIZE;
4283 mono_cfg_add_try_hole (cfg, ins->inst_eh_block, code, bb);
4285 case OP_START_HANDLER: {
4286 MonoInst *spvar = mono_find_spvar_for_region (cfg, bb->region);
4288 /* Save caller address */
4289 code = emit_strx (code, ARMREG_LR, spvar->inst_basereg, spvar->inst_offset);
4292 * Reserve a param area, see test_0_finally_param_area ().
4293 * This is needed because the param area is not set up when
4294 * we are called from EH code.
4296 if (cfg->param_area)
4297 code = emit_subx_sp_imm (code, cfg->param_area);
4301 case OP_ENDFILTER: {
4302 MonoInst *spvar = mono_find_spvar_for_region (cfg, bb->region);
4304 if (cfg->param_area)
4305 code = emit_addx_sp_imm (code, cfg->param_area);
4307 if (ins->opcode == OP_ENDFILTER && sreg1 != ARMREG_R0)
4308 arm_movx (code, ARMREG_R0, sreg1);
4310 /* Return to either after the branch in OP_CALL_HANDLER, or to the EH code */
4311 code = emit_ldrx (code, ARMREG_LR, spvar->inst_basereg, spvar->inst_offset);
4312 arm_brx (code, ARMREG_LR);
4316 if (ins->dreg != ARMREG_R0)
4317 arm_movx (code, ins->dreg, ARMREG_R0);
4319 case OP_GC_SAFE_POINT: {
4320 #if defined (USE_COOP_GC)
4323 arm_ldrx (code, ARMREG_IP1, ins->sreg1, 0);
4324 /* Call it if it is non-null */
4326 arm_cbzx (code, ARMREG_IP1, 0);
4327 code = emit_call (cfg, code, MONO_PATCH_INFO_INTERNAL_METHOD, "mono_threads_state_poll");
4328 mono_arm_patch (buf [0], code, MONO_R_ARM64_CBZ);
4334 g_warning ("unknown opcode %s in %s()\n", mono_inst_name (ins->opcode), __FUNCTION__);
4335 g_assert_not_reached ();
4338 if ((cfg->opt & MONO_OPT_BRANCH) && ((code - cfg->native_code - offset) > max_len)) {
4339 g_warning ("wrong maximal instruction length of instruction %s (expected %d, got %d)",
4340 mono_inst_name (ins->opcode), max_len, code - cfg->native_code - offset);
4341 g_assert_not_reached ();
4346 * If the compiled code size is larger than the bcc displacement (19 bits signed),
4347 * insert branch islands between/inside basic blocks.
4349 if (cfg->arch.cond_branch_islands)
4350 code = emit_branch_island (cfg, code, start_offset);
4352 cfg->code_len = code - cfg->native_code;
4356 emit_move_args (MonoCompile *cfg, guint8 *code)
4363 cinfo = cfg->arch.cinfo;
4365 for (i = 0; i < cinfo->nargs; ++i) {
4366 ainfo = cinfo->args + i;
4367 ins = cfg->args [i];
4369 if (ins->opcode == OP_REGVAR) {
4370 switch (ainfo->storage) {
4372 arm_movx (code, ins->dreg, ainfo->reg);
4375 switch (ainfo->slot_size) {
4378 code = emit_ldrsbx (code, ins->dreg, cfg->arch.args_reg, ainfo->offset);
4380 code = emit_ldrb (code, ins->dreg, cfg->arch.args_reg, ainfo->offset);
4384 code = emit_ldrshx (code, ins->dreg, cfg->arch.args_reg, ainfo->offset);
4386 code = emit_ldrh (code, ins->dreg, cfg->arch.args_reg, ainfo->offset);
4390 code = emit_ldrswx (code, ins->dreg, cfg->arch.args_reg, ainfo->offset);
4392 code = emit_ldrw (code, ins->dreg, cfg->arch.args_reg, ainfo->offset);
4395 code = emit_ldrx (code, ins->dreg, cfg->arch.args_reg, ainfo->offset);
4400 g_assert_not_reached ();
4404 if (ainfo->storage != ArgVtypeByRef && ainfo->storage != ArgVtypeByRefOnStack)
4405 g_assert (ins->opcode == OP_REGOFFSET);
4407 switch (ainfo->storage) {
4409 /* Stack slots for arguments have size 8 */
4410 code = emit_strx (code, ainfo->reg, ins->inst_basereg, ins->inst_offset);
4413 code = emit_strfpx (code, ainfo->reg, ins->inst_basereg, ins->inst_offset);
4416 code = emit_strfpw (code, ainfo->reg, ins->inst_basereg, ins->inst_offset);
4421 case ArgVtypeByRefOnStack:
4422 case ArgVtypeOnStack:
4424 case ArgVtypeByRef: {
4425 MonoInst *addr_arg = ins->inst_left;
4427 if (ainfo->gsharedvt) {
4428 g_assert (ins->opcode == OP_GSHAREDVT_ARG_REGOFFSET);
4429 arm_strx (code, ainfo->reg, ins->inst_basereg, ins->inst_offset);
4431 g_assert (ins->opcode == OP_VTARG_ADDR);
4432 g_assert (addr_arg->opcode == OP_REGOFFSET);
4433 arm_strx (code, ainfo->reg, addr_arg->inst_basereg, addr_arg->inst_offset);
4437 case ArgVtypeInIRegs:
4438 for (part = 0; part < ainfo->nregs; part ++) {
4439 code = emit_strx (code, ainfo->reg + part, ins->inst_basereg, ins->inst_offset + (part * 8));
4443 for (part = 0; part < ainfo->nregs; part ++) {
4444 if (ainfo->esize == 4)
4445 code = emit_strfpw (code, ainfo->reg + part, ins->inst_basereg, ins->inst_offset + ainfo->foffsets [part]);
4447 code = emit_strfpx (code, ainfo->reg + part, ins->inst_basereg, ins->inst_offset + ainfo->foffsets [part]);
4451 g_assert_not_reached ();
4461 * emit_store_regarray:
4463 * Emit code to store the registers in REGS into the appropriate elements of
4464 * the register array at BASEREG+OFFSET.
4466 static __attribute__ ((__warn_unused_result__)) guint8*
4467 emit_store_regarray (guint8 *code, guint64 regs, int basereg, int offset)
4471 for (i = 0; i < 32; ++i) {
4472 if (regs & (1 << i)) {
4473 if (i + 1 < 32 && (regs & (1 << (i + 1))) && (i + 1 != ARMREG_SP)) {
4474 arm_stpx (code, i, i + 1, basereg, offset + (i * 8));
4476 } else if (i == ARMREG_SP) {
4477 arm_movspx (code, ARMREG_IP1, ARMREG_SP);
4478 arm_strx (code, ARMREG_IP1, basereg, offset + (i * 8));
4480 arm_strx (code, i, basereg, offset + (i * 8));
4488 * emit_load_regarray:
4490 * Emit code to load the registers in REGS from the appropriate elements of
4491 * the register array at BASEREG+OFFSET.
4493 static __attribute__ ((__warn_unused_result__)) guint8*
4494 emit_load_regarray (guint8 *code, guint64 regs, int basereg, int offset)
4498 for (i = 0; i < 32; ++i) {
4499 if (regs & (1 << i)) {
4500 if ((regs & (1 << (i + 1))) && (i + 1 != ARMREG_SP)) {
4501 if (offset + (i * 8) < 500)
4502 arm_ldpx (code, i, i + 1, basereg, offset + (i * 8));
4504 code = emit_ldrx (code, i, basereg, offset + (i * 8));
4505 code = emit_ldrx (code, i + 1, basereg, offset + ((i + 1) * 8));
4508 } else if (i == ARMREG_SP) {
4509 g_assert_not_reached ();
4511 code = emit_ldrx (code, i, basereg, offset + (i * 8));
4519 * emit_store_regset:
4521 * Emit code to store the registers in REGS into consecutive memory locations starting
4522 * at BASEREG+OFFSET.
4524 static __attribute__ ((__warn_unused_result__)) guint8*
4525 emit_store_regset (guint8 *code, guint64 regs, int basereg, int offset)
4530 for (i = 0; i < 32; ++i) {
4531 if (regs & (1 << i)) {
4532 if ((regs & (1 << (i + 1))) && (i + 1 != ARMREG_SP)) {
4533 arm_stpx (code, i, i + 1, basereg, offset + (pos * 8));
4536 } else if (i == ARMREG_SP) {
4537 arm_movspx (code, ARMREG_IP1, ARMREG_SP);
4538 arm_strx (code, ARMREG_IP1, basereg, offset + (pos * 8));
4540 arm_strx (code, i, basereg, offset + (pos * 8));
4551 * Emit code to load the registers in REGS from consecutive memory locations starting
4552 * at BASEREG+OFFSET.
4554 static __attribute__ ((__warn_unused_result__)) guint8*
4555 emit_load_regset (guint8 *code, guint64 regs, int basereg, int offset)
4560 for (i = 0; i < 32; ++i) {
4561 if (regs & (1 << i)) {
4562 if ((regs & (1 << (i + 1))) && (i + 1 != ARMREG_SP)) {
4563 arm_ldpx (code, i, i + 1, basereg, offset + (pos * 8));
4566 } else if (i == ARMREG_SP) {
4567 g_assert_not_reached ();
4569 arm_ldrx (code, i, basereg, offset + (pos * 8));
4577 __attribute__ ((__warn_unused_result__)) guint8*
4578 mono_arm_emit_load_regarray (guint8 *code, guint64 regs, int basereg, int offset)
4580 return emit_load_regarray (code, regs, basereg, offset);
4583 __attribute__ ((__warn_unused_result__)) guint8*
4584 mono_arm_emit_store_regarray (guint8 *code, guint64 regs, int basereg, int offset)
4586 return emit_store_regarray (code, regs, basereg, offset);
4589 __attribute__ ((__warn_unused_result__)) guint8*
4590 mono_arm_emit_store_regset (guint8 *code, guint64 regs, int basereg, int offset)
4592 return emit_store_regset (code, regs, basereg, offset);
4595 /* Same as emit_store_regset, but emit unwind info too */
4596 /* CFA_OFFSET is the offset between the CFA and basereg */
4597 static __attribute__ ((__warn_unused_result__)) guint8*
4598 emit_store_regset_cfa (MonoCompile *cfg, guint8 *code, guint64 regs, int basereg, int offset, int cfa_offset, guint64 no_cfa_regset)
4600 int i, j, pos, nregs;
4601 guint32 cfa_regset = regs & ~no_cfa_regset;
4604 for (i = 0; i < 32; ++i) {
4606 if (regs & (1 << i)) {
4607 if ((regs & (1 << (i + 1))) && (i + 1 != ARMREG_SP)) {
4609 arm_stpx (code, i, i + 1, basereg, offset + (pos * 8));
4611 code = emit_strx (code, i, basereg, offset + (pos * 8));
4612 code = emit_strx (code, i + 1, basereg, offset + (pos * 8) + 8);
4615 } else if (i == ARMREG_SP) {
4616 arm_movspx (code, ARMREG_IP1, ARMREG_SP);
4617 code = emit_strx (code, ARMREG_IP1, basereg, offset + (pos * 8));
4619 code = emit_strx (code, i, basereg, offset + (pos * 8));
4622 for (j = 0; j < nregs; ++j) {
4623 if (cfa_regset & (1 << (i + j)))
4624 mono_emit_unwind_op_offset (cfg, code, i + j, (- cfa_offset) + offset + ((pos + j) * 8));
4637 * Emit code to initialize an LMF structure at LMF_OFFSET.
4641 emit_setup_lmf (MonoCompile *cfg, guint8 *code, gint32 lmf_offset, int cfa_offset)
4644 * The LMF should contain all the state required to be able to reconstruct the machine state
4645 * at the current point of execution. Since the LMF is only read during EH, only callee
4646 * saved etc. registers need to be saved.
4647 * FIXME: Save callee saved fp regs, JITted code doesn't use them, but native code does, and they
4648 * need to be restored during EH.
4652 arm_adrx (code, ARMREG_LR, code);
4653 code = emit_strx (code, ARMREG_LR, ARMREG_FP, lmf_offset + MONO_STRUCT_OFFSET (MonoLMF, pc));
4654 /* gregs + fp + sp */
4655 /* Don't emit unwind info for sp/fp, they are already handled in the prolog */
4656 code = emit_store_regset_cfa (cfg, code, MONO_ARCH_LMF_REGS, ARMREG_FP, lmf_offset + MONO_STRUCT_OFFSET (MonoLMF, gregs), cfa_offset, (1 << ARMREG_FP) | (1 << ARMREG_SP));
4662 mono_arch_emit_prolog (MonoCompile *cfg)
4664 MonoMethod *method = cfg->method;
4665 MonoMethodSignature *sig;
4668 int cfa_offset, max_offset;
4670 sig = mono_method_signature (method);
4671 cfg->code_size = 256 + sig->param_count * 64;
4672 code = cfg->native_code = g_malloc (cfg->code_size);
4674 /* This can be unaligned */
4675 cfg->stack_offset = ALIGN_TO (cfg->stack_offset, MONO_ARCH_FRAME_ALIGNMENT);
4681 mono_emit_unwind_op_def_cfa (cfg, code, ARMREG_SP, 0);
4684 if (arm_is_ldpx_imm (-cfg->stack_offset)) {
4685 arm_stpx_pre (code, ARMREG_FP, ARMREG_LR, ARMREG_SP, -cfg->stack_offset);
4687 /* sp -= cfg->stack_offset */
4688 /* This clobbers ip0/ip1 */
4689 code = emit_subx_sp_imm (code, cfg->stack_offset);
4690 arm_stpx (code, ARMREG_FP, ARMREG_LR, ARMREG_SP, 0);
4692 cfa_offset += cfg->stack_offset;
4693 mono_emit_unwind_op_def_cfa_offset (cfg, code, cfa_offset);
4694 mono_emit_unwind_op_offset (cfg, code, ARMREG_FP, (- cfa_offset) + 0);
4695 mono_emit_unwind_op_offset (cfg, code, ARMREG_LR, (- cfa_offset) + 8);
4696 arm_movspx (code, ARMREG_FP, ARMREG_SP);
4697 mono_emit_unwind_op_def_cfa_reg (cfg, code, ARMREG_FP);
4698 if (cfg->param_area) {
4699 /* The param area is below the frame pointer */
4700 code = emit_subx_sp_imm (code, cfg->param_area);
4703 if (cfg->method->save_lmf) {
4704 code = emit_setup_lmf (cfg, code, cfg->lmf_var->inst_offset, cfa_offset);
4707 code = emit_store_regset_cfa (cfg, code, MONO_ARCH_CALLEE_SAVED_REGS & cfg->used_int_regs, ARMREG_FP, cfg->arch.saved_gregs_offset, cfa_offset, 0);
4710 /* Setup args reg */
4711 if (cfg->arch.args_reg) {
4712 /* The register was already saved above */
4713 code = emit_addx_imm (code, cfg->arch.args_reg, ARMREG_FP, cfg->stack_offset);
4716 /* Save return area addr received in R8 */
4717 if (cfg->vret_addr) {
4718 MonoInst *ins = cfg->vret_addr;
4720 g_assert (ins->opcode == OP_REGOFFSET);
4721 code = emit_strx (code, ARMREG_R8, ins->inst_basereg, ins->inst_offset);
4724 /* Save mrgctx received in MONO_ARCH_RGCTX_REG */
4725 if (cfg->rgctx_var) {
4726 MonoInst *ins = cfg->rgctx_var;
4728 g_assert (ins->opcode == OP_REGOFFSET);
4730 code = emit_strx (code, MONO_ARCH_RGCTX_REG, ins->inst_basereg, ins->inst_offset);
4734 * Move arguments to their registers/stack locations.
4736 code = emit_move_args (cfg, code);
4738 /* Initialize seq_point_info_var */
4739 if (cfg->arch.seq_point_info_var) {
4740 MonoInst *ins = cfg->arch.seq_point_info_var;
4742 /* Initialize the variable from a GOT slot */
4743 code = emit_aotconst (cfg, code, ARMREG_IP0, MONO_PATCH_INFO_SEQ_POINT_INFO, cfg->method);
4744 g_assert (ins->opcode == OP_REGOFFSET);
4745 code = emit_strx (code, ARMREG_IP0, ins->inst_basereg, ins->inst_offset);
4747 /* Initialize ss_tramp_var */
4748 ins = cfg->arch.ss_tramp_var;
4749 g_assert (ins->opcode == OP_REGOFFSET);
4751 code = emit_ldrx (code, ARMREG_IP1, ARMREG_IP0, MONO_STRUCT_OFFSET (SeqPointInfo, ss_tramp_addr));
4752 code = emit_strx (code, ARMREG_IP1, ins->inst_basereg, ins->inst_offset);
4756 if (cfg->arch.ss_tramp_var) {
4757 /* Initialize ss_tramp_var */
4758 ins = cfg->arch.ss_tramp_var;
4759 g_assert (ins->opcode == OP_REGOFFSET);
4761 code = emit_imm64 (code, ARMREG_IP0, (guint64)&ss_trampoline);
4762 code = emit_strx (code, ARMREG_IP0, ins->inst_basereg, ins->inst_offset);
4765 if (cfg->arch.bp_tramp_var) {
4766 /* Initialize bp_tramp_var */
4767 ins = cfg->arch.bp_tramp_var;
4768 g_assert (ins->opcode == OP_REGOFFSET);
4770 code = emit_imm64 (code, ARMREG_IP0, (guint64)bp_trampoline);
4771 code = emit_strx (code, ARMREG_IP0, ins->inst_basereg, ins->inst_offset);
4776 if (cfg->opt & MONO_OPT_BRANCH) {
4777 for (bb = cfg->bb_entry; bb; bb = bb->next_bb) {
4779 bb->max_offset = max_offset;
4781 MONO_BB_FOR_EACH_INS (bb, ins) {
4782 max_offset += ((guint8 *)ins_get_spec (ins->opcode))[MONO_INST_LEN];
4786 if (max_offset > 0x3ffff * 4)
4787 cfg->arch.cond_branch_islands = TRUE;
4793 realloc_code (MonoCompile *cfg, int size)
4795 while (cfg->code_len + size > (cfg->code_size - 16)) {
4796 cfg->code_size *= 2;
4797 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
4798 cfg->stat_code_reallocs++;
4800 return cfg->native_code + cfg->code_len;
4804 mono_arch_emit_epilog (MonoCompile *cfg)
4807 int max_epilog_size;
4811 max_epilog_size = 16 + 20*4;
4812 code = realloc_code (cfg, max_epilog_size);
4814 if (cfg->method->save_lmf) {
4815 code = mono_arm_emit_load_regarray (code, MONO_ARCH_CALLEE_SAVED_REGS & cfg->used_int_regs, ARMREG_FP, cfg->lmf_var->inst_offset + MONO_STRUCT_OFFSET (MonoLMF, gregs) - (MONO_ARCH_FIRST_LMF_REG * 8));
4818 code = emit_load_regset (code, MONO_ARCH_CALLEE_SAVED_REGS & cfg->used_int_regs, ARMREG_FP, cfg->arch.saved_gregs_offset);
4821 /* Load returned vtypes into registers if needed */
4822 cinfo = cfg->arch.cinfo;
4823 switch (cinfo->ret.storage) {
4824 case ArgVtypeInIRegs: {
4825 MonoInst *ins = cfg->ret;
4827 for (i = 0; i < cinfo->ret.nregs; ++i)
4828 code = emit_ldrx (code, cinfo->ret.reg + i, ins->inst_basereg, ins->inst_offset + (i * 8));
4832 MonoInst *ins = cfg->ret;
4834 for (i = 0; i < cinfo->ret.nregs; ++i) {
4835 if (cinfo->ret.esize == 4)
4836 code = emit_ldrfpw (code, cinfo->ret.reg + i, ins->inst_basereg, ins->inst_offset + cinfo->ret.foffsets [i]);
4838 code = emit_ldrfpx (code, cinfo->ret.reg + i, ins->inst_basereg, ins->inst_offset + cinfo->ret.foffsets [i]);
4847 code = mono_arm_emit_destroy_frame (code, cfg->stack_offset, ((1 << ARMREG_IP0) | (1 << ARMREG_IP1)));
4849 arm_retx (code, ARMREG_LR);
4851 g_assert (code - (cfg->native_code + cfg->code_len) < max_epilog_size);
4853 cfg->code_len = code - cfg->native_code;
4857 mono_arch_emit_exceptions (MonoCompile *cfg)
4860 MonoClass *exc_class;
4862 guint8* exc_throw_pos [MONO_EXC_INTRINS_NUM];
4863 guint8 exc_throw_found [MONO_EXC_INTRINS_NUM];
4864 int i, id, size = 0;
4866 for (i = 0; i < MONO_EXC_INTRINS_NUM; i++) {
4867 exc_throw_pos [i] = NULL;
4868 exc_throw_found [i] = 0;
4871 for (ji = cfg->patch_info; ji; ji = ji->next) {
4872 if (ji->type == MONO_PATCH_INFO_EXC) {
4873 i = mini_exception_id_by_name (ji->data.target);
4874 if (!exc_throw_found [i]) {
4876 exc_throw_found [i] = TRUE;
4881 code = realloc_code (cfg, size);
4883 /* Emit code to raise corlib exceptions */
4884 for (ji = cfg->patch_info; ji; ji = ji->next) {
4885 if (ji->type != MONO_PATCH_INFO_EXC)
4888 ip = cfg->native_code + ji->ip.i;
4890 id = mini_exception_id_by_name (ji->data.target);
4892 if (exc_throw_pos [id]) {
4893 /* ip points to the bcc () in OP_COND_EXC_... */
4894 arm_patch_rel (ip, exc_throw_pos [id], ji->relocation);
4895 ji->type = MONO_PATCH_INFO_NONE;
4899 exc_throw_pos [id] = code;
4900 arm_patch_rel (ip, code, ji->relocation);
4902 /* We are being branched to from the code generated by emit_cond_exc (), the pc is in ip1 */
4904 /* r0 = type token */
4905 exc_class = mono_class_load_from_name (mono_defaults.corlib, "System", ji->data.name);
4906 code = emit_imm (code, ARMREG_R0, exc_class->type_token - MONO_TOKEN_TYPE_DEF);
4908 arm_movx (code, ARMREG_R1, ARMREG_IP1);
4909 /* Branch to the corlib exception throwing trampoline */
4910 ji->ip.i = code - cfg->native_code;
4911 ji->type = MONO_PATCH_INFO_INTERNAL_METHOD;
4912 ji->data.name = "mono_arch_throw_corlib_exception";
4913 ji->relocation = MONO_R_ARM64_BL;
4915 cfg->thunk_area += THUNK_SIZE;
4918 cfg->code_len = code - cfg->native_code;
4920 g_assert (cfg->code_len < cfg->code_size);
4924 mono_arch_emit_inst_for_method (MonoCompile *cfg, MonoMethod *cmethod, MonoMethodSignature *fsig, MonoInst **args)
4930 mono_arch_print_tree (MonoInst *tree, int arity)
4936 mono_arch_get_patch_offset (guint8 *code)
4942 mono_arch_build_imt_trampoline (MonoVTable *vtable, MonoDomain *domain, MonoIMTCheckItem **imt_entries, int count,
4943 gpointer fail_tramp)
4945 int i, buf_len, imt_reg;
4949 printf ("building IMT trampoline for class %s %s entries %d code size %d code at %p end %p vtable %p\n", vtable->klass->name_space, vtable->klass->name, count, size, start, ((guint8*)start) + size, vtable);
4950 for (i = 0; i < count; ++i) {
4951 MonoIMTCheckItem *item = imt_entries [i];
4952 printf ("method %d (%p) %s vtable slot %p is_equals %d chunk size %d\n", i, item->key, item->key->name, &vtable->vtable [item->value.vtable_slot], item->is_equals, item->chunk_size);
4957 for (i = 0; i < count; ++i) {
4958 MonoIMTCheckItem *item = imt_entries [i];
4959 if (item->is_equals) {
4960 gboolean fail_case = !item->check_target_idx && fail_tramp;
4962 if (item->check_target_idx || fail_case) {
4963 if (!item->compare_done || fail_case) {
4964 buf_len += 4 * 4 + 4;
4967 if (item->has_target_code) {
4984 buf = mono_method_alloc_generic_virtual_trampoline (domain, buf_len);
4986 buf = mono_domain_code_reserve (domain, buf_len);
4990 * We are called by JITted code, which passes in the IMT argument in
4991 * MONO_ARCH_RGCTX_REG (r27). We need to preserve all caller saved regs
4994 imt_reg = MONO_ARCH_RGCTX_REG;
4995 for (i = 0; i < count; ++i) {
4996 MonoIMTCheckItem *item = imt_entries [i];
4998 item->code_target = code;
5000 if (item->is_equals) {
5002 * Check the imt argument against item->key, if equals, jump to either
5003 * item->value.target_code or to vtable [item->value.vtable_slot].
5004 * If fail_tramp is set, jump to it if not-equals.
5006 gboolean fail_case = !item->check_target_idx && fail_tramp;
5008 if (item->check_target_idx || fail_case) {
5009 /* Compare imt_reg with item->key */
5010 if (!item->compare_done || fail_case) {
5011 // FIXME: Optimize this
5012 code = emit_imm64 (code, ARMREG_IP0, (guint64)item->key);
5013 arm_cmpx (code, imt_reg, ARMREG_IP0);
5015 item->jmp_code = code;
5016 arm_bcc (code, ARMCOND_NE, 0);
5017 /* Jump to target if equals */
5018 if (item->has_target_code) {
5019 code = emit_imm64 (code, ARMREG_IP0, (guint64)item->value.target_code);
5020 arm_brx (code, ARMREG_IP0);
5022 guint64 imm = (guint64)&(vtable->vtable [item->value.vtable_slot]);
5024 code = emit_imm64 (code, ARMREG_IP0, imm);
5025 arm_ldrx (code, ARMREG_IP0, ARMREG_IP0, 0);
5026 arm_brx (code, ARMREG_IP0);
5030 arm_patch_rel (item->jmp_code, code, MONO_R_ARM64_BCC);
5031 item->jmp_code = NULL;
5032 code = emit_imm64 (code, ARMREG_IP0, (guint64)fail_tramp);
5033 arm_brx (code, ARMREG_IP0);
5036 guint64 imm = (guint64)&(vtable->vtable [item->value.vtable_slot]);
5038 code = emit_imm64 (code, ARMREG_IP0, imm);
5039 arm_ldrx (code, ARMREG_IP0, ARMREG_IP0, 0);
5040 arm_brx (code, ARMREG_IP0);
5043 code = emit_imm64 (code, ARMREG_IP0, (guint64)item->key);
5044 arm_cmpx (code, imt_reg, ARMREG_IP0);
5045 item->jmp_code = code;
5046 arm_bcc (code, ARMCOND_HS, 0);
5049 /* Patch the branches */
5050 for (i = 0; i < count; ++i) {
5051 MonoIMTCheckItem *item = imt_entries [i];
5052 if (item->jmp_code && item->check_target_idx)
5053 arm_patch_rel (item->jmp_code, imt_entries [item->check_target_idx]->code_target, MONO_R_ARM64_BCC);
5056 g_assert ((code - buf) < buf_len);
5058 mono_arch_flush_icache (buf, code - buf);
5064 mono_arch_get_trampolines (gboolean aot)
5066 return mono_arm_get_exception_trampolines (aot);
5069 #else /* DISABLE_JIT */
5072 mono_arch_build_imt_trampoline (MonoVTable *vtable, MonoDomain *domain, MonoIMTCheckItem **imt_entries, int count,
5073 gpointer fail_tramp)
5075 g_assert_not_reached ();
5079 #endif /* !DISABLE_JIT */
5081 #ifdef MONO_ARCH_SOFT_DEBUG_SUPPORTED
5084 mono_arch_set_breakpoint (MonoJitInfo *ji, guint8 *ip)
5087 guint32 native_offset = ip - (guint8*)ji->code_start;
5090 SeqPointInfo *info = mono_arch_get_seq_point_info (mono_domain_get (), ji->code_start);
5092 g_assert (native_offset % 4 == 0);
5093 g_assert (info->bp_addrs [native_offset / 4] == 0);
5094 info->bp_addrs [native_offset / 4] = mini_get_breakpoint_trampoline ();
5096 /* ip points to an ldrx */
5098 arm_blrx (code, ARMREG_IP0);
5099 mono_arch_flush_icache (ip, code - ip);
5104 mono_arch_clear_breakpoint (MonoJitInfo *ji, guint8 *ip)
5109 guint32 native_offset = ip - (guint8*)ji->code_start;
5110 SeqPointInfo *info = mono_arch_get_seq_point_info (mono_domain_get (), ji->code_start);
5112 g_assert (native_offset % 4 == 0);
5113 info->bp_addrs [native_offset / 4] = NULL;
5115 /* ip points to an ldrx */
5118 mono_arch_flush_icache (ip, code - ip);
5123 mono_arch_start_single_stepping (void)
5125 ss_trampoline = mini_get_single_step_trampoline ();
5129 mono_arch_stop_single_stepping (void)
5131 ss_trampoline = NULL;
5135 mono_arch_is_single_step_event (void *info, void *sigctx)
5137 /* We use soft breakpoints on arm64 */
5142 mono_arch_is_breakpoint_event (void *info, void *sigctx)
5144 /* We use soft breakpoints on arm64 */
5149 mono_arch_skip_breakpoint (MonoContext *ctx, MonoJitInfo *ji)
5151 g_assert_not_reached ();
5155 mono_arch_skip_single_step (MonoContext *ctx)
5157 g_assert_not_reached ();
5161 mono_arch_get_seq_point_info (MonoDomain *domain, guint8 *code)
5166 // FIXME: Add a free function
5168 mono_domain_lock (domain);
5169 info = g_hash_table_lookup (domain_jit_info (domain)->arch_seq_points,
5171 mono_domain_unlock (domain);
5174 ji = mono_jit_info_table_find (domain, (char*)code);
5177 info = g_malloc0 (sizeof (SeqPointInfo) + (ji->code_size / 4) * sizeof(guint8*));
5179 info->ss_tramp_addr = &ss_trampoline;
5181 mono_domain_lock (domain);
5182 g_hash_table_insert (domain_jit_info (domain)->arch_seq_points,
5184 mono_domain_unlock (domain);
5191 mono_arch_init_lmf_ext (MonoLMFExt *ext, gpointer prev_lmf)
5193 ext->lmf.previous_lmf = prev_lmf;
5194 /* Mark that this is a MonoLMFExt */
5195 ext->lmf.previous_lmf = (gpointer)(((gssize)ext->lmf.previous_lmf) | 2);
5196 ext->lmf.gregs [MONO_ARCH_LMF_REG_SP] = (gssize)ext;
5199 #endif /* MONO_ARCH_SOFT_DEBUG_SUPPORTED */
5202 mono_arch_opcode_supported (int opcode)
5205 case OP_ATOMIC_ADD_I4:
5206 case OP_ATOMIC_ADD_I8:
5207 case OP_ATOMIC_EXCHANGE_I4:
5208 case OP_ATOMIC_EXCHANGE_I8:
5209 case OP_ATOMIC_CAS_I4:
5210 case OP_ATOMIC_CAS_I8:
5211 case OP_ATOMIC_LOAD_I1:
5212 case OP_ATOMIC_LOAD_I2:
5213 case OP_ATOMIC_LOAD_I4:
5214 case OP_ATOMIC_LOAD_I8:
5215 case OP_ATOMIC_LOAD_U1:
5216 case OP_ATOMIC_LOAD_U2:
5217 case OP_ATOMIC_LOAD_U4:
5218 case OP_ATOMIC_LOAD_U8:
5219 case OP_ATOMIC_LOAD_R4:
5220 case OP_ATOMIC_LOAD_R8:
5221 case OP_ATOMIC_STORE_I1:
5222 case OP_ATOMIC_STORE_I2:
5223 case OP_ATOMIC_STORE_I4:
5224 case OP_ATOMIC_STORE_I8:
5225 case OP_ATOMIC_STORE_U1:
5226 case OP_ATOMIC_STORE_U2:
5227 case OP_ATOMIC_STORE_U4:
5228 case OP_ATOMIC_STORE_U8:
5229 case OP_ATOMIC_STORE_R4:
5230 case OP_ATOMIC_STORE_R8:
5238 mono_arch_get_call_info (MonoMemPool *mp, MonoMethodSignature *sig)
5240 return get_call_info (mp, sig);
5244 mono_arch_install_handler_block_guard (MonoJitInfo *ji, MonoJitExceptionInfo *clause, MonoContext *ctx, gpointer new_value)
5251 bp = MONO_CONTEXT_GET_BP (ctx);
5252 lr_loc = (gpointer*)(bp + clause->exvar_offset);
5254 old_value = *lr_loc;
5255 if ((char*)old_value < (char*)ji->code_start || (char*)old_value > ((char*)ji->code_start + ji->code_size))
5258 *lr_loc = new_value;