2 * mini-ia64.c: IA64 backend for the Mono code generator
5 * Zoltan Varga (vargaz@gmail.com)
7 * (C) 2003 Ximian, Inc.
15 #include <mono/metadata/appdomain.h>
16 #include <mono/metadata/debug-helpers.h>
17 #include <mono/metadata/threads.h>
18 #include <mono/metadata/profiler-private.h>
19 #include <mono/utils/mono-math.h>
22 #include "mini-ia64.h"
26 static gint lmf_tls_offset = -1;
27 static gint appdomain_tls_offset = -1;
28 static gint thread_tls_offset = -1;
30 const char * const ia64_desc [OP_LAST];
31 static const char*const * ins_spec = ia64_desc;
33 #define ALIGN_TO(val,align) ((((guint64)val) + ((align) - 1)) & ~((align) - 1))
35 #define IS_IMM32(val) ((((guint64)val) >> 32) == 0)
38 * IA64 register usage:
39 * - local registers are used for global register allocation
40 * - r8..r11, r14..r30 is used for local register allocation
41 * - r31 is a scratch register used within opcode implementations
42 * - FIXME: Use out registers as well
43 * - the first three locals are used for saving ar.pfst, b0, and sp
44 * - compare instructions allways set p6 and p7
47 #define SIGNAL_STACK_SIZE (64 * 1024)
49 #define ARGS_OFFSET 16
51 #define GP_SCRATCH_REG 31
52 #define GP_SCRATCH_REG2 30
53 #define FP_SCRATCH_REG 32
55 #define LOOP_ALIGNMENT 8
56 #define bb_is_loop_start(bb) ((bb)->loop_body_start && (bb)->nesting)
58 #define NOT_IMPLEMENTED g_assert_not_reached ()
60 static const char* gregs [] = {
61 "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9",
62 "r10", "r11", "r12", "r13", "r14", "r15", "r16", "r17", "r18", "r19",
63 "r20", "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29",
64 "r30", "r31", "r32", "r33", "r34", "r35", "r36", "r37", "r38", "r39",
65 "r40", "r41", "r42", "r43", "r44", "r45", "r46", "r47", "r48", "r49",
66 "r50", "r51", "r52", "r53", "r54", "r55", "r56", "r57", "r58", "r59",
67 "r60", "r61", "r62", "r63", "r64", "r65", "r66", "r67", "r68", "r69",
68 "r70", "r71", "r72", "r73", "r74", "r75", "r76", "r77", "r78", "r79",
69 "r80", "r81", "r82", "r83", "r84", "r85", "r86", "r87", "r88", "r89",
70 "r90", "r91", "r92", "r93", "r94", "r95", "r96", "r97", "r98", "r99",
71 "r100", "r101", "r102", "r103", "r104", "r105", "r106", "r107", "r108", "r109",
72 "r110", "r111", "r112", "r113", "r114", "r115", "r116", "r117", "r118", "r119",
73 "r120", "r121", "r122", "r123", "r124", "r125", "r126", "r127"
77 mono_arch_regname (int reg)
85 static const char* fregs [] = {
86 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", "f8", "f9",
87 "f10", "f11", "f12", "f13", "f14", "f15", "f16", "f17", "f18", "f19",
88 "f20", "f21", "f22", "f23", "f24", "f25", "f26", "f27", "f28", "f29",
89 "f30", "f31", "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
90 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47", "f48", "f49",
91 "f50", "f51", "f52", "f53", "f54", "f55", "f56", "f57", "f58", "f59",
92 "f60", "f61", "f62", "f63", "f64", "f65", "f66", "f67", "f68", "f69",
93 "f70", "f71", "f72", "f73", "f74", "f75", "f76", "f77", "f78", "f79",
94 "f80", "f81", "f82", "f83", "f84", "f85", "f86", "f87", "f88", "f89",
95 "f90", "f91", "f92", "f93", "f94", "f95", "f96", "f97", "f98", "f99",
96 "f100", "f101", "f102", "f103", "f104", "f105", "f106", "f107", "f108", "f109",
97 "f110", "f111", "f112", "f113", "f114", "f115", "f116", "f117", "f118", "f119",
98 "f120", "f121", "f122", "f123", "f124", "f125", "f126", "f127"
102 mono_arch_fregname (int reg)
115 ArgNone /* only in pair_storage */
123 /* Only if storage == ArgValuetypeInReg */
124 ArgStorage pair_storage [2];
133 gboolean need_stack_align;
139 #define DEBUG(a) if (cfg->verbose_level > 1) a
141 #define NEW_ICONST(cfg,dest,val) do { \
142 (dest) = mono_mempool_alloc0 ((cfg)->mempool, sizeof (MonoInst)); \
143 (dest)->opcode = OP_ICONST; \
144 (dest)->inst_c0 = (val); \
145 (dest)->type = STACK_I4; \
151 add_general (guint32 *gr, guint32 *stack_size, ArgInfo *ainfo)
153 ainfo->offset = *stack_size;
155 if (*gr >= PARAM_REGS) {
156 ainfo->storage = ArgOnStack;
157 (*stack_size) += sizeof (gpointer);
160 ainfo->storage = ArgInIReg;
166 #define FLOAT_PARAM_REGS 8
169 add_float (guint32 *gr, guint32 *fr, guint32 *stack_size, ArgInfo *ainfo, gboolean is_double)
171 ainfo->offset = *stack_size;
173 if (*gr >= PARAM_REGS) {
174 ainfo->storage = ArgOnStack;
175 (*stack_size) += sizeof (gpointer);
178 ainfo->storage = ArgInFloatReg;
179 ainfo->reg = 8 + *fr;
188 * Obtain information about a call according to the calling convention.
189 * For IA64, see the "Itanium Software Conventions and Runtime Architecture
190 * Gude" document for more information.
193 get_call_info (MonoMethodSignature *sig, gboolean is_pinvoke)
197 int n = sig->hasthis + sig->param_count;
198 guint32 stack_size = 0;
201 cinfo = g_malloc0 (sizeof (CallInfo) + (sizeof (ArgInfo) * n));
208 ret_type = mono_type_get_underlying_type (sig->ret);
209 switch (ret_type->type) {
210 case MONO_TYPE_BOOLEAN:
221 case MONO_TYPE_FNPTR:
222 case MONO_TYPE_CLASS:
223 case MONO_TYPE_OBJECT:
224 case MONO_TYPE_SZARRAY:
225 case MONO_TYPE_ARRAY:
226 case MONO_TYPE_STRING:
227 cinfo->ret.storage = ArgInIReg;
228 cinfo->ret.reg = IA64_R8;
232 cinfo->ret.storage = ArgInIReg;
233 cinfo->ret.reg = IA64_R8;
237 cinfo->ret.storage = ArgInFloatReg;
243 g_error ("Can't handle as return value 0x%x", sig->ret->type);
249 add_general (&gr, &stack_size, cinfo->args + 0);
251 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (n == 0)) {
253 fr = FLOAT_PARAM_REGS;
255 /* Emit the signature cookie just before the implicit arguments */
256 add_general (&gr, &stack_size, &cinfo->sig_cookie);
259 for (i = 0; i < sig->param_count; ++i) {
260 ArgInfo *ainfo = &cinfo->args [sig->hasthis + i];
263 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (i == sig->sentinelpos)) {
264 /* We allways pass the sig cookie on the stack for simplicity */
266 * Prevent implicit arguments + the sig cookie from being passed
270 fr = FLOAT_PARAM_REGS;
272 /* Emit the signature cookie just before the implicit arguments */
273 add_general (&gr, &stack_size, &cinfo->sig_cookie);
276 if (sig->params [i]->byref) {
277 add_general (&gr, &stack_size, ainfo);
280 ptype = mono_type_get_underlying_type (sig->params [i]);
281 switch (ptype->type) {
282 case MONO_TYPE_BOOLEAN:
285 add_general (&gr, &stack_size, ainfo);
290 add_general (&gr, &stack_size, ainfo);
294 add_general (&gr, &stack_size, ainfo);
299 case MONO_TYPE_FNPTR:
300 case MONO_TYPE_CLASS:
301 case MONO_TYPE_OBJECT:
302 case MONO_TYPE_STRING:
303 case MONO_TYPE_SZARRAY:
304 case MONO_TYPE_ARRAY:
305 add_general (&gr, &stack_size, ainfo);
307 case MONO_TYPE_VALUETYPE:
309 //add_valuetype (sig, ainfo, sig->params [i], FALSE, &gr, &fr, &stack_size);
311 case MONO_TYPE_TYPEDBYREF:
312 stack_size += sizeof (MonoTypedRef);
313 ainfo->storage = ArgOnStack;
317 add_general (&gr, &stack_size, ainfo);
320 add_float (&gr, &fr, &stack_size, ainfo, FALSE);
323 add_float (&gr, &fr, &stack_size, ainfo, TRUE);
326 g_assert_not_reached ();
330 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (n > 0) && (sig->sentinelpos == sig->param_count)) {
332 fr = FLOAT_PARAM_REGS;
334 /* Emit the signature cookie just before the implicit arguments */
335 add_general (&gr, &stack_size, &cinfo->sig_cookie);
338 cinfo->stack_usage = stack_size;
339 cinfo->reg_usage = gr;
340 cinfo->freg_usage = fr;
345 * mono_arch_get_argument_info:
346 * @csig: a method signature
347 * @param_count: the number of parameters to consider
348 * @arg_info: an array to store the result infos
350 * Gathers information on parameters such as size, alignment and
351 * padding. arg_info should be large enought to hold param_count + 1 entries.
353 * Returns the size of the argument area on the stack.
356 mono_arch_get_argument_info (MonoMethodSignature *csig, int param_count, MonoJitArgumentInfo *arg_info)
358 g_assert_not_reached ();
364 * Initialize the cpu to execute managed code.
367 mono_arch_cpu_init (void)
372 * This function returns the optimizations supported on this cpu.
375 mono_arch_cpu_optimizazions (guint32 *exclude_mask)
383 is_regsize_var (MonoType *t) {
386 t = mono_type_get_underlying_type (t);
397 case MONO_TYPE_FNPTR:
399 case MONO_TYPE_OBJECT:
400 case MONO_TYPE_STRING:
401 case MONO_TYPE_CLASS:
402 case MONO_TYPE_SZARRAY:
403 case MONO_TYPE_ARRAY:
405 case MONO_TYPE_VALUETYPE:
412 mono_arch_get_allocatable_int_vars (MonoCompile *cfg)
417 for (i = 0; i < cfg->num_varinfo; i++) {
418 MonoInst *ins = cfg->varinfo [i];
419 MonoMethodVar *vmv = MONO_VARINFO (cfg, i);
422 if (vmv->range.first_use.abs_pos >= vmv->range.last_use.abs_pos)
425 if ((ins->flags & (MONO_INST_IS_DEAD|MONO_INST_VOLATILE|MONO_INST_INDIRECT)) ||
426 (ins->opcode != OP_LOCAL && ins->opcode != OP_ARG))
429 if (is_regsize_var (ins->inst_vtype)) {
430 g_assert (MONO_VARINFO (cfg, i)->reg == -1);
431 g_assert (i == vmv->idx);
432 vars = g_list_prepend (vars, vmv);
436 vars = mono_varlist_sort (cfg, vars, 0);
442 mono_ia64_alloc_stacked_registers (MonoCompile *cfg)
446 if (cfg->arch.reg_local0 > 0)
450 cinfo = get_call_info (mono_method_signature (cfg->method), FALSE);
452 /* Three registers are reserved for use by the prolog/epilog */
453 cfg->arch.reg_in0 = 32;
454 cfg->arch.reg_local0 = cfg->arch.reg_in0 + cinfo->reg_usage + 3;
455 cfg->arch.reg_out0 = cfg->arch.reg_local0 + 8;
457 cfg->arch.reg_saved_ar_pfs = cfg->arch.reg_local0 - 1;
458 cfg->arch.reg_saved_b0 = cfg->arch.reg_local0 - 2;
459 cfg->arch.reg_saved_sp = cfg->arch.reg_local0 - 3;
465 mono_arch_get_global_int_regs (MonoCompile *cfg)
470 mono_ia64_alloc_stacked_registers (cfg);
472 for (i = cfg->arch.reg_local0; i < cfg->arch.reg_out0; ++i) {
475 regs = g_list_prepend (regs, (gpointer)(gssize)(i));
482 * mono_arch_regalloc_cost:
484 * Return the cost, in number of memory references, of the action of
485 * allocating the variable VMV into a register during global register
489 mono_arch_regalloc_cost (MonoCompile *cfg, MonoMethodVar *vmv)
491 /* FIXME: Increase costs linearly to avoid using all local registers */
497 mono_arch_allocate_vars (MonoCompile *m)
499 MonoMethodSignature *sig;
500 MonoMethodHeader *header;
503 guint32 locals_stack_size, locals_stack_align;
507 mono_ia64_alloc_stacked_registers (m);
509 header = mono_method_get_header (m->method);
511 sig = mono_method_signature (m->method);
513 cinfo = get_call_info (sig, FALSE);
516 * We use the ABI calling conventions for managed code as well.
517 * Exception: valuetypes are never passed or returned in registers.
520 /* Locals are allocated backwards from %fp */
521 m->frame_reg = m->arch.reg_saved_sp;
524 if (m->method->save_lmf) {
527 /* Reserve stack space for saving LMF + argument regs */
528 offset += sizeof (MonoLMF);
529 m->arch.lmf_offset = offset;
533 if (sig->ret->type != MONO_TYPE_VOID) {
534 switch (cinfo->ret.storage) {
536 if ((MONO_TYPE_ISSTRUCT (sig->ret) && !mono_class_from_mono_type (sig->ret)->enumtype) || (sig->ret->type == MONO_TYPE_TYPEDBYREF)) {
537 /* The register is volatile */
538 m->ret->opcode = OP_REGOFFSET;
539 m->ret->inst_basereg = m->frame_reg;
541 m->ret->inst_offset = - offset;
544 m->ret->opcode = OP_REGVAR;
545 m->ret->inst_c0 = cinfo->ret.reg;
549 m->ret->opcode = OP_REGVAR;
550 m->ret->inst_c0 = cinfo->ret.reg;
553 g_assert_not_reached ();
555 m->ret->dreg = m->ret->inst_c0;
558 /* Allocate locals */
559 offsets = mono_allocate_stack_slots (m, &locals_stack_size, &locals_stack_align);
560 if (locals_stack_align) {
561 offset += (locals_stack_align - 1);
562 offset &= ~(locals_stack_align - 1);
564 for (i = m->locals_start; i < m->num_varinfo; i++) {
565 if (offsets [i] != -1) {
566 MonoInst *inst = m->varinfo [i];
567 inst->opcode = OP_REGOFFSET;
568 inst->inst_basereg = m->frame_reg;
569 inst->inst_offset = - (offset + offsets [i]);
570 //printf ("allocated local %d to ", i); mono_print_tree_nl (inst);
574 offset += locals_stack_size;
576 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG)) {
577 g_assert (cinfo->sig_cookie.storage == ArgOnStack);
578 m->sig_cookie = cinfo->sig_cookie.offset + ARGS_OFFSET;
581 for (i = 0; i < sig->param_count + sig->hasthis; ++i) {
582 inst = m->varinfo [i];
583 if (inst->opcode != OP_REGVAR) {
584 ArgInfo *ainfo = &cinfo->args [i];
585 gboolean inreg = TRUE;
588 if (sig->hasthis && (i == 0))
589 arg_type = &mono_defaults.object_class->byval_arg;
591 arg_type = sig->params [i - sig->hasthis];
593 /* FIXME: Allocate volatile arguments to registers */
594 if (inst->flags & (MONO_INST_VOLATILE|MONO_INST_INDIRECT))
597 inst->opcode = OP_REGOFFSET;
599 switch (ainfo->storage) {
601 inst->opcode = OP_REGVAR;
602 inst->dreg = m->arch.reg_in0 + ainfo->reg;
606 * Since float regs are volatile, we save the arguments to
607 * the stack in the prolog.
608 * FIXME: Avoid this if the method contains no calls.
613 inst->opcode = OP_REGOFFSET;
614 inst->inst_basereg = m->frame_reg;
615 inst->inst_offset = ARGS_OFFSET + ainfo->offset;
617 case ArgValuetypeInReg:
623 if (!inreg && (ainfo->storage != ArgOnStack)) {
624 inst->opcode = OP_REGOFFSET;
625 inst->inst_basereg = m->frame_reg;
626 /* These arguments are saved to the stack in the prolog */
627 if (ainfo->storage == ArgValuetypeInReg) {
629 offset += 2 * sizeof (gpointer);
632 offset += sizeof (gpointer);
633 inst->inst_offset = - offset;
638 m->stack_offset = offset;
644 mono_arch_create_vars (MonoCompile *cfg)
646 g_assert_not_reached ();
650 add_outarg_reg (MonoCompile *cfg, MonoCallInst *call, MonoInst *arg, ArgStorage storage, int reg, MonoInst *tree)
654 arg->opcode = OP_OUTARG_REG;
655 arg->inst_left = tree;
656 arg->inst_right = (MonoInst*)call;
658 call->used_iregs |= 1 << reg;
661 arg->opcode = OP_OUTARG_FREG;
662 arg->inst_left = tree;
663 arg->inst_right = (MonoInst*)call;
665 call->used_fregs |= 1 << reg;
668 g_assert_not_reached ();
673 * take the arguments and generate the arch-specific
674 * instructions to properly call the function in call.
675 * This includes pushing, moving arguments to the right register
677 * Issue: who does the spilling if needed, and when?
680 mono_arch_call_opcode (MonoCompile *cfg, MonoBasicBlock* bb, MonoCallInst *call, int is_virtual)
683 MonoMethodSignature *sig;
684 int i, n, stack_size;
690 mono_ia64_alloc_stacked_registers (cfg);
692 sig = call->signature;
693 n = sig->param_count + sig->hasthis;
695 cinfo = get_call_info (sig, sig->pinvoke);
697 for (i = 0; i < n; ++i) {
698 ainfo = cinfo->args + i;
700 if (!sig->pinvoke && (sig->call_convention == MONO_CALL_VARARG) && (i == sig->sentinelpos)) {
701 MonoMethodSignature *tmp_sig;
705 /* Emit the signature cookie just before the implicit arguments */
707 /* FIXME: Add support for signature tokens to AOT */
708 cfg->disable_aot = TRUE;
710 g_assert (cinfo->sig_cookie.storage == ArgOnStack);
713 * mono_ArgIterator_Setup assumes the signature cookie is
714 * passed first and all the arguments which were before it are
715 * passed on the stack after the signature. So compensate by
716 * passing a different signature.
718 tmp_sig = mono_metadata_signature_dup (call->signature);
719 tmp_sig->param_count -= call->signature->sentinelpos;
720 tmp_sig->sentinelpos = 0;
721 memcpy (tmp_sig->params, call->signature->params + call->signature->sentinelpos, tmp_sig->param_count * sizeof (MonoType*));
723 MONO_INST_NEW (cfg, sig_arg, OP_ICONST);
724 sig_arg->inst_p0 = tmp_sig;
726 MONO_INST_NEW (cfg, arg, OP_OUTARG);
727 arg->inst_left = sig_arg;
728 arg->type = STACK_PTR;
730 /* prepend, so they get reversed */
731 arg->next = call->out_args;
732 call->out_args = arg;
735 if (is_virtual && i == 0) {
736 /* the argument will be attached to the call instruction */
741 MONO_INST_NEW (cfg, arg, OP_OUTARG);
743 arg->cil_code = in->cil_code;
745 arg->type = in->type;
746 /* prepend, so they get reversed */
747 arg->next = call->out_args;
748 call->out_args = arg;
750 if (sig->hasthis && (i == 0))
751 arg_type = &mono_defaults.object_class->byval_arg;
753 arg_type = sig->params [i - sig->hasthis];
755 if ((i >= sig->hasthis) && (MONO_TYPE_ISSTRUCT(arg_type))) {
759 if (arg_type->type == MONO_TYPE_TYPEDBYREF) {
760 size = sizeof (MonoTypedRef);
761 align = sizeof (gpointer);
765 size = mono_type_native_stack_size (&in->klass->byval_arg, &align);
767 size = mono_type_stack_size (&in->klass->byval_arg, &align);
768 if (ainfo->storage == ArgValuetypeInReg) {
773 switch (ainfo->storage) {
775 add_outarg_reg (cfg, call, arg, ainfo->storage, cfg->arch.reg_out0 + ainfo->reg, in);
778 add_outarg_reg (cfg, call, arg, ainfo->storage, ainfo->reg, in);
781 if (arg_type->type == MONO_TYPE_R4 && !arg_type->byref) {
782 arg->opcode = OP_OUTARG_R4;
785 arg->opcode = OP_OUTARG;
786 arg->inst_imm = 16 + ainfo->offset;
789 g_assert_not_reached ();
795 call->stack_usage = cinfo->stack_usage;
796 cfg->param_area = MAX (cfg->param_area, call->stack_usage);
797 cfg->arch.n_out_regs = MAX (cfg->arch.n_out_regs, cinfo->reg_usage);
798 cfg->flags |= MONO_CFG_HAS_CALLS;
806 peephole_pass (MonoCompile *cfg, MonoBasicBlock *bb)
808 MonoInst *ins, *last_ins = NULL;
812 switch (ins->opcode) {
823 if (ins->dreg == ins->sreg1) {
825 last_ins->next = ins->next;
835 if (last_ins && last_ins->opcode == OP_MOVE &&
836 ins->sreg1 == last_ins->dreg &&
837 ins->dreg == last_ins->sreg1) {
838 last_ins->next = ins->next;
848 bb->last_ins = last_ins;
852 opcode_to_ia64_cmp (int opcode)
856 return OP_IA64_CMP_EQ;
858 return OP_IA64_CMP_NE;
860 return OP_IA64_CMP_LE;
862 return OP_IA64_CMP_GE;
864 return OP_IA64_CMP_LT;
866 return OP_IA64_CMP_GT;
868 return OP_IA64_CMP_LE_UN;
870 return OP_IA64_CMP_GE_UN;
872 return OP_IA64_CMP_LT_UN;
874 return OP_IA64_CMP_GT_UN;
875 case OP_COND_EXC_GT_UN:
876 return OP_IA64_CMP_GT_UN;
877 case OP_COND_EXC_LE_UN:
878 return OP_IA64_CMP_LE_UN;
880 return OP_IA64_CMP_LT;
882 return OP_IA64_CMP_GT;
884 return OP_IA64_CMP_EQ;
886 return OP_IA64_CMP_LT;
888 return OP_IA64_CMP_GT;
890 return OP_IA64_CMP_LT_UN;
892 return OP_IA64_CMP_GT_UN;
894 return OP_IA64_CMP4_EQ;
896 return OP_IA64_CMP4_NE;
898 return OP_IA64_CMP4_LE;
900 return OP_IA64_CMP4_LT;
902 return OP_IA64_CMP4_GT;
904 return OP_IA64_CMP4_GE;
906 return OP_IA64_CMP4_LE_UN;
908 return OP_IA64_CMP4_LT_UN;
910 return OP_IA64_CMP4_GE_UN;
912 return OP_IA64_CMP4_GT_UN;
914 return OP_IA64_CMP4_EQ;
916 return OP_IA64_CMP4_LT;
918 return OP_IA64_CMP4_GT;
920 return OP_IA64_CMP4_LT_UN;
922 return OP_IA64_CMP4_GT_UN;
925 return OP_IA64_FCMP_EQ;
927 return OP_IA64_FCMP_NE;
930 return OP_IA64_FCMP_LT;
932 return OP_IA64_FCMP_LE;
935 return OP_IA64_FCMP_GT;
937 return OP_IA64_FCMP_GE;
940 return OP_IA64_FCMP_LT_UN;
943 return OP_IA64_FCMP_GT_UN;
945 return OP_IA64_FCMP_GE_UN;
947 return OP_IA64_FCMP_LE_UN;
949 printf ("%s\n", mono_inst_name (opcode));
956 opcode_to_ia64_cmp_imm (int opcode)
960 return OP_IA64_CMP_EQ_IMM;
963 return OP_IA64_CMP_NE_IMM;
966 return OP_IA64_CMP_GE_IMM;
969 return OP_IA64_CMP_LE_IMM;
972 return OP_IA64_CMP_GT_IMM;
975 return OP_IA64_CMP_LT_IMM;
978 return OP_IA64_CMP_GE_UN_IMM;
981 return OP_IA64_CMP_LE_UN_IMM;
984 return OP_IA64_CMP_GT_UN_IMM;
987 return OP_IA64_CMP_LT_UN_IMM;
990 return OP_IA64_CMP_EQ_IMM;
993 return OP_IA64_CMP_GT_IMM;
996 return OP_IA64_CMP_LT_IMM;
999 return OP_IA64_CMP_GT_UN_IMM;
1002 return OP_IA64_CMP_LT_UN_IMM;
1004 case OP_COND_EXC_GT_UN:
1005 return OP_IA64_CMP_LT_UN_IMM;
1007 case OP_COND_EXC_LT:
1008 return OP_IA64_CMP_GT_IMM;
1010 case OP_COND_EXC_GT:
1011 return OP_IA64_CMP_LT_IMM;
1014 return OP_IA64_CMP4_EQ_IMM;
1017 return OP_IA64_CMP4_NE_IMM;
1020 return OP_IA64_CMP4_GE_IMM;
1023 return OP_IA64_CMP4_LE_IMM;
1026 return OP_IA64_CMP4_GT_IMM;
1029 return OP_IA64_CMP4_LT_IMM;
1032 return OP_IA64_CMP4_GE_UN_IMM;
1035 return OP_IA64_CMP4_LE_UN_IMM;
1038 return OP_IA64_CMP4_GT_UN_IMM;
1041 return OP_IA64_CMP4_LT_UN_IMM;
1044 return OP_IA64_CMP4_EQ_IMM;
1047 return OP_IA64_CMP4_GT_IMM;
1050 return OP_IA64_CMP4_LT_IMM;
1053 return OP_IA64_CMP4_GT_UN_IMM;
1056 return OP_IA64_CMP4_LT_UN_IMM;
1059 printf ("%s\n", mono_inst_name (opcode));
1066 insert_after_ins (MonoBasicBlock *bb, MonoInst *ins, MonoInst *to_insert)
1070 bb->code = to_insert;
1071 to_insert->next = ins;
1074 to_insert->next = ins->next;
1075 ins->next = to_insert;
1079 #define NEW_INS(cfg,dest,op) do { \
1080 (dest) = mono_mempool_alloc0 ((cfg)->mempool, sizeof (MonoInst)); \
1081 (dest)->opcode = (op); \
1082 insert_after_ins (bb, last_ins, (dest)); \
1083 last_ins = (dest); \
1087 * mono_arch_lowering_pass:
1089 * Converts complex opcodes into simpler ones so that each IR instruction
1090 * corresponds to one machine instruction.
1093 mono_arch_lowering_pass (MonoCompile *cfg, MonoBasicBlock *bb)
1095 MonoInst *ins, *next, *temp, *temp2, *temp3, *last_ins = NULL;
1098 if (bb->max_ireg > cfg->rs->next_vireg)
1099 cfg->rs->next_vireg = bb->max_ireg;
1100 if (bb->max_freg > cfg->rs->next_vfreg)
1101 cfg->rs->next_vfreg = bb->max_freg;
1104 switch (ins->opcode) {
1105 case OP_STOREI1_MEMBASE_IMM:
1106 case OP_STOREI2_MEMBASE_IMM:
1107 case OP_STOREI4_MEMBASE_IMM:
1108 case OP_STOREI8_MEMBASE_IMM:
1109 case OP_STORE_MEMBASE_IMM:
1110 /* There are no store_membase instructions on ia64 */
1111 NEW_INS (cfg, temp, OP_I8CONST);
1112 temp->inst_c0 = ins->inst_offset;
1113 temp->dreg = mono_regstate_next_int (cfg->rs);
1114 NEW_INS (cfg, temp2, CEE_ADD);
1115 temp2->sreg1 = ins->inst_destbasereg;
1116 temp2->sreg2 = temp->dreg;
1117 temp2->dreg = mono_regstate_next_int (cfg->rs);
1119 switch (ins->opcode) {
1120 case OP_STOREI1_MEMBASE_IMM:
1121 ins->opcode = OP_STOREI1_MEMBASE_REG;
1123 case OP_STOREI2_MEMBASE_IMM:
1124 ins->opcode = OP_STOREI2_MEMBASE_REG;
1126 case OP_STOREI4_MEMBASE_IMM:
1127 ins->opcode = OP_STOREI4_MEMBASE_REG;
1129 case OP_STOREI8_MEMBASE_IMM:
1130 case OP_STORE_MEMBASE_IMM:
1131 ins->opcode = OP_STOREI8_MEMBASE_REG;
1134 g_assert_not_reached ();
1137 if (ins->inst_imm == 0)
1138 ins->sreg1 = IA64_R0;
1140 NEW_INS (cfg, temp3, OP_I8CONST);
1141 temp3->inst_c0 = ins->inst_imm;
1142 temp3->dreg = mono_regstate_next_int (cfg->rs);
1143 ins->sreg1 = temp3->dreg;
1146 ins->inst_offset = 0;
1147 ins->inst_destbasereg = temp2->dreg;
1149 case OP_STOREI1_MEMBASE_REG:
1150 case OP_STOREI2_MEMBASE_REG:
1151 case OP_STOREI4_MEMBASE_REG:
1152 case OP_STOREI8_MEMBASE_REG:
1153 case OP_STORER4_MEMBASE_REG:
1154 case OP_STORER8_MEMBASE_REG:
1155 case OP_STORE_MEMBASE_REG:
1156 /* There are no store_membase instructions on ia64 */
1157 if (ia64_is_imm14 (ins->inst_offset)) {
1158 NEW_INS (cfg, temp2, OP_ADD_IMM);
1159 temp2->sreg1 = ins->inst_destbasereg;
1160 temp2->inst_imm = ins->inst_offset;
1161 temp2->dreg = mono_regstate_next_int (cfg->rs);
1164 NEW_INS (cfg, temp, OP_I8CONST);
1165 temp->inst_c0 = ins->inst_offset;
1166 temp->dreg = mono_regstate_next_int (cfg->rs);
1167 NEW_INS (cfg, temp2, CEE_ADD);
1168 temp2->sreg1 = ins->inst_destbasereg;
1169 temp2->sreg2 = temp->dreg;
1170 temp2->dreg = mono_regstate_next_int (cfg->rs);
1173 ins->inst_offset = 0;
1174 ins->inst_destbasereg = temp2->dreg;
1176 case OP_LOADI1_MEMBASE:
1177 case OP_LOADU1_MEMBASE:
1178 case OP_LOADI2_MEMBASE:
1179 case OP_LOADU2_MEMBASE:
1180 case OP_LOADI4_MEMBASE:
1181 case OP_LOADU4_MEMBASE:
1182 case OP_LOADI8_MEMBASE:
1183 case OP_LOAD_MEMBASE:
1184 case OP_LOADR4_MEMBASE:
1185 case OP_LOADR8_MEMBASE:
1186 /* There are no load_membase instructions on ia64 */
1187 if (ia64_is_imm14 (ins->inst_offset)) {
1188 NEW_INS (cfg, temp2, OP_ADD_IMM);
1189 temp2->sreg1 = ins->inst_basereg;
1190 temp2->inst_imm = ins->inst_offset;
1191 temp2->dreg = mono_regstate_next_int (cfg->rs);
1194 NEW_INS (cfg, temp, OP_I8CONST);
1195 temp->inst_c0 = ins->inst_offset;
1196 temp->dreg = mono_regstate_next_int (cfg->rs);
1197 NEW_INS (cfg, temp2, CEE_ADD);
1198 temp2->sreg1 = ins->inst_basereg;
1199 temp2->sreg2 = temp->dreg;
1200 temp2->dreg = mono_regstate_next_int (cfg->rs);
1203 ins->inst_offset = 0;
1204 ins->inst_basereg = temp2->dreg;
1206 case OP_FCALL_MEMBASE:
1207 case OP_LCALL_MEMBASE:
1208 case OP_VCALL_MEMBASE:
1209 case OP_VOIDCALL_MEMBASE:
1210 case OP_CALL_MEMBASE:
1211 /* There are no membase instructions on ia64 */
1212 if (ia64_is_imm14 (ins->inst_offset)) {
1213 NEW_INS (cfg, temp2, OP_ADD_IMM);
1214 temp2->sreg1 = ins->sreg1;
1215 temp2->inst_imm = ins->inst_offset;
1216 temp2->dreg = mono_regstate_next_int (cfg->rs);
1219 NEW_INS (cfg, temp, OP_I8CONST);
1220 temp->inst_c0 = ins->inst_offset;
1221 temp->dreg = mono_regstate_next_int (cfg->rs);
1222 NEW_INS (cfg, temp2, CEE_ADD);
1223 temp2->sreg1 = ins->sreg1;
1224 temp2->sreg2 = temp->dreg;
1225 temp2->dreg = mono_regstate_next_int (cfg->rs);
1228 switch (ins->opcode) {
1229 case OP_FCALL_MEMBASE:
1230 ins->opcode = OP_FCALL_REG;
1232 case OP_LCALL_MEMBASE:
1233 ins->opcode = OP_LCALL_REG;
1235 case OP_VCALL_MEMBASE:
1236 ins->opcode = OP_VCALL_REG;
1238 case OP_VOIDCALL_MEMBASE:
1239 ins->opcode = OP_VOIDCALL_REG;
1241 case OP_CALL_MEMBASE:
1242 ins->opcode = OP_CALL_REG;
1245 g_assert_not_reached ();
1255 case OP_ISHR_UN_IMM:
1259 case OP_LSHR_UN_IMM:
1260 /* FIXME: There is an alu imm instruction */
1261 switch (ins->opcode) {
1263 ins->opcode = OP_IADD;
1266 ins->opcode = OP_ISUB;
1269 ins->opcode = OP_IAND;
1272 ins->opcode = OP_IOR;
1275 ins->opcode = OP_IXOR;
1278 ins->opcode = OP_ISHL;
1281 ins->opcode = OP_ISHR;
1283 case OP_ISHR_UN_IMM:
1284 ins->opcode = OP_ISHR_UN;
1287 ins->opcode = CEE_AND;
1290 ins->opcode = OP_LSHL;
1293 ins->opcode = OP_LSHL;
1295 case OP_LSHR_UN_IMM:
1296 ins->opcode = OP_LSHR_UN;
1299 g_assert_not_reached ();
1302 if (ins->inst_imm == 0)
1303 ins->sreg2 = IA64_R0;
1305 NEW_INS (cfg, temp, OP_I8CONST);
1306 temp->inst_c0 = ins->inst_imm;
1307 temp->dreg = mono_regstate_next_int (cfg->rs);
1308 ins->sreg2 = temp->dreg;
1311 case OP_COMPARE_IMM:
1312 case OP_ICOMPARE_IMM: {
1313 /* Instead of compare+b<cond>, ia64 has compare<cond>+br */
1317 * The compare_imm instructions have switched up arguments, and
1318 * some of them take an imm between -127 and 128.
1321 switch (next->opcode) {
1326 case OP_COND_EXC_LT:
1331 imm = ia64_is_imm8 (ins->inst_imm - 1);
1334 imm = ia64_is_imm8 (ins->inst_imm);
1339 ins->opcode = opcode_to_ia64_cmp_imm (next->opcode);
1340 ins->sreg2 = ins->sreg1;
1343 ins->opcode = opcode_to_ia64_cmp (next->opcode);
1345 if (ins->inst_imm == 0)
1346 ins->sreg2 = IA64_R0;
1348 NEW_INS (cfg, temp, OP_I8CONST);
1349 temp->inst_c0 = ins->inst_imm;
1350 temp->dreg = mono_regstate_next_int (cfg->rs);
1351 ins->sreg2 = temp->dreg;
1355 switch (next->opcode) {
1376 next->opcode = OP_IA64_BR_COND;
1377 if (! (next->flags & MONO_INST_BRLABEL))
1378 next->inst_target_bb = next->inst_true_bb;
1380 case OP_COND_EXC_GT_UN:
1381 case OP_COND_EXC_GT:
1382 case OP_COND_EXC_LT:
1383 next->opcode = OP_IA64_COND_EXC;
1395 next->opcode = OP_IA64_CSET;
1398 printf ("%s\n", mono_inst_name (next->opcode));
1408 /* Instead of compare+b<cond>, ia64 has compare<cond>+br */
1411 ins->opcode = opcode_to_ia64_cmp (next->opcode);
1412 switch (next->opcode) {
1443 next->opcode = OP_IA64_BR_COND;
1444 if (! (next->flags & MONO_INST_BRLABEL))
1445 next->inst_target_bb = next->inst_true_bb;
1447 case OP_COND_EXC_LT:
1448 case OP_COND_EXC_GT:
1449 case OP_COND_EXC_GT_UN:
1450 case OP_COND_EXC_LE_UN:
1451 next->opcode = OP_IA64_COND_EXC;
1468 next->opcode = OP_IA64_CSET;
1471 printf ("%s\n", mono_inst_name (next->opcode));
1482 bb->last_ins = last_ins;
1484 bb->max_ireg = cfg->rs->next_vireg;
1485 bb->max_freg = cfg->rs->next_vfreg;
1489 mono_arch_local_regalloc (MonoCompile *cfg, MonoBasicBlock *bb)
1494 mono_arch_lowering_pass (cfg, bb);
1496 mono_local_regalloc (cfg, bb);
1499 static Ia64CodegenState
1500 emit_move_return_value (MonoCompile *cfg, MonoInst *ins, Ia64CodegenState code)
1505 /* Move return value to the target register */
1506 /* FIXME: do this in the local reg allocator */
1507 switch (ins->opcode) {
1509 case OP_VOIDCALL_REG:
1510 case OP_VOIDCALL_MEMBASE:
1514 case OP_CALL_MEMBASE:
1517 case OP_LCALL_MEMBASE:
1518 g_assert (ins->dreg == IA64_R8);
1522 case OP_FCALL_MEMBASE:
1523 g_assert (ins->dreg == 8);
1527 case OP_VCALL_MEMBASE:
1531 g_assert_not_reached ();
1537 static Ia64CodegenState
1538 emit_call (MonoCompile *cfg, Ia64CodegenState code, guint32 patch_type, gconstpointer data)
1540 mono_add_patch_info (cfg, code.buf - cfg->native_code, patch_type, data);
1542 if (patch_type == MONO_PATCH_INFO_ABS) {
1544 ia64_movl (code, GP_SCRATCH_REG, 0);
1545 ia64_ld8_inc_imm_hint (code, GP_SCRATCH_REG2, GP_SCRATCH_REG, 8, 0);
1546 ia64_mov_to_br (code, IA64_B6, GP_SCRATCH_REG2, 0, 0, 0);
1547 ia64_ld8 (code, IA64_GP, GP_SCRATCH_REG);
1548 ia64_br_call_reg (code, IA64_B0, IA64_B6);
1551 ia64_br_call_hint (code, IA64_B0, 0, 0, 0, 0);
1556 #define bb_is_loop_start(bb) ((bb)->loop_body_start && (bb)->nesting)
1559 mono_arch_output_basic_block (MonoCompile *cfg, MonoBasicBlock *bb)
1564 Ia64CodegenState code;
1565 guint8 *code_start = cfg->native_code + cfg->code_len;
1566 MonoInst *last_ins = NULL;
1567 guint last_offset = 0;
1570 if (cfg->opt & MONO_OPT_PEEPHOLE)
1571 peephole_pass (cfg, bb);
1573 if (cfg->opt & MONO_OPT_LOOP) {
1577 if (cfg->verbose_level > 2)
1578 g_print ("Basic block %d starting at offset 0x%x\n", bb->block_num, bb->native_offset);
1580 cpos = bb->max_offset;
1582 if (cfg->prof_options & MONO_PROFILE_COVERAGE) {
1586 offset = code_start - cfg->native_code;
1588 ia64_codegen_init (code, code_start);
1592 offset = code.buf - cfg->native_code;
1594 max_len = ((guint8 *)ins_spec [ins->opcode])[MONO_INST_LEN];
1596 if (offset > (cfg->code_size - max_len - 16)) {
1597 ia64_codegen_close (code);
1599 offset = code.buf - cfg->native_code;
1601 cfg->code_size *= 2;
1602 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
1603 code_start = cfg->native_code + offset;
1604 mono_jit_stats.code_reallocs++;
1606 ia64_codegen_init (code, code_start);
1609 mono_debug_record_line_number (cfg, ins, offset);
1611 switch (ins->opcode) {
1614 if (ia64_is_imm14 (ins->inst_c0))
1615 ia64_adds_imm (code, ins->dreg, ins->inst_c0, IA64_R0);
1617 ia64_movl (code, ins->dreg, ins->inst_c0);
1620 ia64_mov (code, ins->dreg, ins->sreg1);
1623 case OP_IA64_BR_COND: {
1625 if (ins->opcode == OP_IA64_BR_COND)
1627 if (ins->flags & MONO_INST_BRLABEL) {
1628 if (ins->inst_i0->inst_c0) {
1631 ia64_begin_bundle (code);
1632 mono_add_patch_info (cfg, code.buf - cfg->native_code, MONO_PATCH_INFO_LABEL, ins->inst_i0);
1633 ia64_br_cond_hint_pred (code, pred, 0, 0, 0, 0);
1636 if (ins->inst_target_bb->native_offset) {
1637 gint64 disp = ((gint64)ins->inst_target_bb->native_offset - offset) >> 4;
1638 ia64_br_cond_hint_pred (code, pred, disp, 0, 0, 0);
1640 ia64_begin_bundle (code);
1641 mono_add_patch_info (cfg, code.buf - cfg->native_code, MONO_PATCH_INFO_BB, ins->inst_target_bb);
1642 ia64_br_cond_hint_pred (code, pred, 0, 0, 0, 0);
1648 ia64_begin_bundle (code);
1649 ins->inst_c0 = code.buf - cfg->native_code;
1652 ia64_mov_to_br (code, IA64_B6, ins->sreg1, 0, 0, 0);
1653 ia64_br_cond_reg_hint (code, IA64_B6, 0, 0, 0);
1657 ia64_add (code, ins->dreg, ins->sreg1, ins->sreg2);
1661 ia64_and (code, ins->dreg, ins->sreg1, ins->sreg2);
1665 ia64_or (code, ins->dreg, ins->sreg1, ins->sreg2);
1669 ia64_xor (code, ins->dreg, ins->sreg1, ins->sreg2);
1673 ia64_sub (code, ins->dreg, IA64_R0, ins->sreg1);
1677 ia64_andcm_imm (code, ins->dreg, -1, ins->sreg1);
1680 ia64_shl (code, ins->dreg, ins->sreg1, ins->sreg2);
1684 ia64_shr (code, ins->dreg, ins->sreg1, ins->sreg2);
1687 ia64_zxt4 (code, GP_SCRATCH_REG, ins->sreg1);
1688 ia64_shr_u (code, ins->dreg, GP_SCRATCH_REG, ins->sreg2);
1692 ia64_shl (code, ins->dreg, ins->sreg1, ins->sreg2);
1695 ia64_shr_u (code, ins->dreg, ins->sreg1, ins->sreg2);
1699 ia64_sub (code, ins->dreg, ins->sreg1, ins->sreg2);
1702 /* p6 and p7 is set if there is signed/unsigned overflow */
1704 /* Set p8-p9 == (sreg2 > 0) */
1705 ia64_cmp4_lt (code, 8, 9, IA64_R0, ins->sreg2);
1707 ia64_add (code, GP_SCRATCH_REG, ins->sreg1, ins->sreg2);
1709 /* (sreg2 > 0) && (res < ins->sreg1) => signed overflow */
1710 ia64_cmp4_lt_pred (code, 8, 6, 10, GP_SCRATCH_REG, ins->sreg1);
1711 /* (sreg2 <= 0) && (res > ins->sreg1) => signed overflow */
1712 ia64_cmp4_lt_pred (code, 9, 6, 10, ins->sreg1, GP_SCRATCH_REG);
1714 ia64_mov (code, ins->dreg, GP_SCRATCH_REG);
1716 /* FIXME: Set p7 as well */
1719 ia64_adds_imm (code, ins->dreg, ins->inst_imm, ins->sreg1);
1721 case OP_STOREI1_MEMBASE_REG:
1722 ia64_st1_hint (code, ins->inst_destbasereg, ins->sreg1, 0);
1724 case OP_STOREI2_MEMBASE_REG:
1725 ia64_st2_hint (code, ins->inst_destbasereg, ins->sreg1, 0);
1727 case OP_STOREI4_MEMBASE_REG:
1728 ia64_st4_hint (code, ins->inst_destbasereg, ins->sreg1, 0);
1730 case OP_STOREI8_MEMBASE_REG:
1731 case OP_STORE_MEMBASE_REG:
1732 ia64_st8_hint (code, ins->inst_destbasereg, ins->sreg1, 0);
1734 case OP_LOADU1_MEMBASE:
1735 ia64_ld1_hint (code, ins->dreg, ins->inst_basereg, 0);
1737 case OP_LOADU2_MEMBASE:
1738 ia64_ld2_hint (code, ins->dreg, ins->inst_basereg, 0);
1740 case OP_LOADU4_MEMBASE:
1741 ia64_ld4_hint (code, ins->dreg, ins->inst_basereg, 0);
1743 case OP_LOADI1_MEMBASE:
1744 ia64_ld1_hint (code, ins->dreg, ins->inst_basereg, 0);
1745 ia64_sxt1 (code, ins->dreg, ins->dreg);
1747 case OP_LOADI2_MEMBASE:
1748 ia64_ld2_hint (code, ins->dreg, ins->inst_basereg, 0);
1749 ia64_sxt2 (code, ins->dreg, ins->dreg);
1751 case OP_LOADI4_MEMBASE:
1752 ia64_ld4_hint (code, ins->dreg, ins->inst_basereg, 0);
1753 ia64_sxt4 (code, ins->dreg, ins->dreg);
1755 case OP_LOAD_MEMBASE:
1756 case OP_LOADI8_MEMBASE:
1757 ia64_ld8_hint (code, ins->dreg, ins->inst_basereg, 0);
1760 ia64_sxt1 (code, ins->dreg, ins->sreg1);
1763 ia64_sxt2 (code, ins->dreg, ins->sreg1);
1767 /* This should be emulated, but rules in inssel.brg generate it */
1770 /* Compare opcodes */
1771 case OP_IA64_CMP4_EQ:
1772 ia64_cmp4_eq (code, 6, 7, ins->sreg1, ins->sreg2);
1774 case OP_IA64_CMP4_NE:
1775 ia64_cmp4_ne (code, 6, 7, ins->sreg1, ins->sreg2);
1777 case OP_IA64_CMP4_LE:
1778 ia64_cmp4_le (code, 6, 7, ins->sreg1, ins->sreg2);
1780 case OP_IA64_CMP4_LT:
1781 ia64_cmp4_lt (code, 6, 7, ins->sreg1, ins->sreg2);
1783 case OP_IA64_CMP4_GE:
1784 ia64_cmp4_ge (code, 6, 7, ins->sreg1, ins->sreg2);
1786 case OP_IA64_CMP4_GT:
1787 ia64_cmp4_gt (code, 6, 7, ins->sreg1, ins->sreg2);
1789 case OP_IA64_CMP4_LT_UN:
1790 ia64_cmp4_ltu (code, 6, 7, ins->sreg1, ins->sreg2);
1792 case OP_IA64_CMP4_LE_UN:
1793 ia64_cmp4_leu (code, 6, 7, ins->sreg1, ins->sreg2);
1795 case OP_IA64_CMP4_GT_UN:
1796 ia64_cmp4_gtu (code, 6, 7, ins->sreg1, ins->sreg2);
1798 case OP_IA64_CMP4_GE_UN:
1799 ia64_cmp4_geu (code, 6, 7, ins->sreg1, ins->sreg2);
1801 case OP_IA64_CMP_EQ:
1802 ia64_cmp_eq (code, 6, 7, ins->sreg1, ins->sreg2);
1804 case OP_IA64_CMP_NE:
1805 ia64_cmp_ne (code, 6, 7, ins->sreg1, ins->sreg2);
1807 case OP_IA64_CMP_LE:
1808 ia64_cmp_le (code, 6, 7, ins->sreg1, ins->sreg2);
1810 case OP_IA64_CMP_LT:
1811 ia64_cmp_lt (code, 6, 7, ins->sreg1, ins->sreg2);
1813 case OP_IA64_CMP_GE:
1814 ia64_cmp_ge (code, 6, 7, ins->sreg1, ins->sreg2);
1816 case OP_IA64_CMP_GT:
1817 ia64_cmp_gt (code, 6, 7, ins->sreg1, ins->sreg2);
1819 case OP_IA64_CMP_GT_UN:
1820 ia64_cmp_gtu (code, 6, 7, ins->sreg1, ins->sreg2);
1822 case OP_IA64_CMP_LT_UN:
1823 ia64_cmp_ltu (code, 6, 7, ins->sreg1, ins->sreg2);
1825 case OP_IA64_CMP_GE_UN:
1826 ia64_cmp_geu (code, 6, 7, ins->sreg1, ins->sreg2);
1828 case OP_IA64_CMP_LE_UN:
1829 ia64_cmp_leu (code, 6, 7, ins->sreg1, ins->sreg2);
1831 case OP_IA64_CMP4_EQ_IMM:
1832 ia64_cmp4_eq_imm (code, 6, 7, ins->inst_imm, ins->sreg2);
1834 case OP_IA64_CMP4_NE_IMM:
1835 ia64_cmp4_ne_imm (code, 6, 7, ins->inst_imm, ins->sreg2);
1837 case OP_IA64_CMP4_LE_IMM:
1838 ia64_cmp4_le_imm (code, 6, 7, ins->inst_imm, ins->sreg2);
1840 case OP_IA64_CMP4_LT_IMM:
1841 ia64_cmp4_lt_imm (code, 6, 7, ins->inst_imm, ins->sreg2);
1843 case OP_IA64_CMP4_GE_IMM:
1844 ia64_cmp4_ge_imm (code, 6, 7, ins->inst_imm, ins->sreg2);
1846 case OP_IA64_CMP4_GT_IMM:
1847 ia64_cmp4_gt_imm (code, 6, 7, ins->inst_imm, ins->sreg2);
1849 case OP_IA64_CMP4_LT_UN_IMM:
1850 ia64_cmp4_ltu_imm (code, 6, 7, ins->inst_imm, ins->sreg2);
1852 case OP_IA64_CMP4_LE_UN_IMM:
1853 ia64_cmp4_leu_imm (code, 6, 7, ins->inst_imm, ins->sreg2);
1855 case OP_IA64_CMP4_GT_UN_IMM:
1856 ia64_cmp4_gtu_imm (code, 6, 7, ins->inst_imm, ins->sreg2);
1858 case OP_IA64_CMP4_GE_UN_IMM:
1859 ia64_cmp4_geu_imm (code, 6, 7, ins->inst_imm, ins->sreg2);
1861 case OP_IA64_CMP_EQ_IMM:
1862 ia64_cmp_eq_imm (code, 6, 7, ins->inst_imm, ins->sreg2);
1864 case OP_IA64_CMP_NE_IMM:
1865 ia64_cmp_ne_imm (code, 6, 7, ins->inst_imm, ins->sreg2);
1867 case OP_IA64_CMP_LE_IMM:
1868 ia64_cmp_le_imm (code, 6, 7, ins->inst_imm, ins->sreg2);
1870 case OP_IA64_CMP_LT_IMM:
1871 ia64_cmp_lt_imm (code, 6, 7, ins->inst_imm, ins->sreg2);
1873 case OP_IA64_CMP_GE_IMM:
1874 ia64_cmp_ge_imm (code, 6, 7, ins->inst_imm, ins->sreg2);
1876 case OP_IA64_CMP_GT_IMM:
1877 ia64_cmp_gt_imm (code, 6, 7, ins->inst_imm, ins->sreg2);
1879 case OP_IA64_CMP_GT_UN_IMM:
1880 ia64_cmp_gtu_imm (code, 6, 7, ins->inst_imm, ins->sreg2);
1882 case OP_IA64_CMP_LT_UN_IMM:
1883 ia64_cmp_ltu_imm (code, 6, 7, ins->inst_imm, ins->sreg2);
1885 case OP_IA64_CMP_GE_UN_IMM:
1886 ia64_cmp_geu_imm (code, 6, 7, ins->inst_imm, ins->sreg2);
1888 case OP_IA64_CMP_LE_UN_IMM:
1889 ia64_cmp_leu_imm (code, 6, 7, ins->inst_imm, ins->sreg2);
1891 case OP_IA64_FCMP_EQ:
1892 ia64_fcmp_eq_sf (code, 6, 7, ins->sreg1, ins->sreg2, 0);
1894 case OP_IA64_FCMP_NE:
1895 ia64_fcmp_ne_sf (code, 6, 7, ins->sreg1, ins->sreg2, 0);
1897 case OP_IA64_FCMP_LT:
1898 ia64_fcmp_lt_sf (code, 6, 7, ins->sreg1, ins->sreg2, 0);
1900 case OP_IA64_FCMP_GT:
1901 ia64_fcmp_gt_sf (code, 6, 7, ins->sreg1, ins->sreg2, 0);
1903 case OP_IA64_FCMP_LE:
1904 ia64_fcmp_le_sf (code, 6, 7, ins->sreg1, ins->sreg2, 0);
1906 case OP_IA64_FCMP_GE:
1907 ia64_fcmp_ge_sf (code, 6, 7, ins->sreg1, ins->sreg2, 0);
1909 case OP_IA64_FCMP_GT_UN:
1910 ia64_fcmp_gt_sf (code, 6, 7, ins->sreg1, ins->sreg2, 0);
1911 ia64_fcmp_unord_sf_pred (code, 7, 6, 7, ins->sreg1, ins->sreg2, 0);
1913 case OP_IA64_FCMP_LT_UN:
1914 ia64_fcmp_lt_sf (code, 6, 7, ins->sreg1, ins->sreg2, 0);
1915 ia64_fcmp_unord_sf_pred (code, 7, 6, 7, ins->sreg1, ins->sreg2, 0);
1917 case OP_IA64_FCMP_GE_UN:
1918 ia64_fcmp_ge_sf (code, 6, 7, ins->sreg1, ins->sreg2, 0);
1919 ia64_fcmp_unord_sf_pred (code, 7, 6, 7, ins->sreg1, ins->sreg2, 0);
1921 case OP_IA64_FCMP_LE_UN:
1922 ia64_fcmp_le_sf (code, 6, 7, ins->sreg1, ins->sreg2, 0);
1923 ia64_fcmp_unord_sf_pred (code, 7, 6, 7, ins->sreg1, ins->sreg2, 0);
1926 case OP_COND_EXC_IOV:
1928 ia64_break_i_pred (code, 6, 0);
1930 case OP_COND_EXC_IC:
1932 ia64_break_i_pred (code, 7, 0);
1934 case OP_IA64_COND_EXC:
1936 ia64_break_i_pred (code, 6, 0);
1939 /* FIXME: Do this with one instruction ? */
1940 ia64_mov (code, ins->dreg, IA64_R0);
1941 ia64_add1_pred (code, 6, ins->dreg, IA64_R0, IA64_R0);
1944 /* FIXME: Is this needed ? */
1945 ia64_sxt1 (code, ins->dreg, ins->sreg1);
1948 /* FIXME: Is this needed ? */
1949 ia64_sxt2 (code, ins->dreg, ins->sreg1);
1952 /* FIXME: Is this needed ? */
1953 ia64_sxt4 (code, ins->dreg, ins->sreg1);
1956 /* FIXME: Is this needed */
1957 ia64_zxt1 (code, ins->dreg, ins->sreg1);
1960 /* FIXME: Is this needed */
1961 ia64_zxt2 (code, ins->dreg, ins->sreg1);
1965 ia64_mov (code, ins->dreg, ins->sreg1);
1968 ia64_zxt4 (code, ins->dreg, ins->sreg1);
1970 case CEE_CONV_OVF_U4:
1972 ia64_mov (code, ins->dreg, ins->sreg1);
1979 double d = *(double *)ins->inst_p0;
1981 if ((d == 0.0) && (mono_signbit (d) == 0))
1982 ia64_fmov (code, ins->dreg, 0);
1984 ia64_fmov (code, ins->dreg, 1);
1986 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_R8, ins->inst_p0);
1987 ia64_movl (code, GP_SCRATCH_REG, 0);
1988 ia64_ldfd_hint (code, ins->dreg, GP_SCRATCH_REG, 0);
1993 float f = *(float *)ins->inst_p0;
1995 if ((f == 0.0) && (mono_signbit (f) == 0))
1996 ia64_fmov (code, ins->dreg, 0);
1998 ia64_fmov (code, ins->dreg, 1);
2000 mono_add_patch_info (cfg, offset, MONO_PATCH_INFO_R4, ins->inst_p0);
2001 ia64_movl (code, GP_SCRATCH_REG, 0);
2002 ia64_ldfs_hint (code, ins->dreg, GP_SCRATCH_REG, 0);
2007 ia64_fmov (code, ins->dreg, ins->sreg1);
2009 case OP_STORER8_MEMBASE_REG:
2010 ia64_stfd_hint (code, ins->inst_destbasereg, ins->sreg1, 0);
2012 case OP_STORER4_MEMBASE_REG:
2013 ia64_stfs_hint (code, ins->inst_destbasereg, ins->sreg1, 0);
2015 case OP_LOADR8_MEMBASE:
2016 ia64_ldfd_hint (code, ins->dreg, ins->inst_basereg, 0);
2018 case OP_LOADR4_MEMBASE:
2019 ia64_ldfs_hint (code, ins->dreg, ins->inst_basereg, 0);
2022 ia64_setf_sig (code, ins->dreg, ins->sreg1);
2023 ia64_fcvt_xf (code, ins->dreg, ins->dreg);
2024 ia64_fnorm_s_sf (code, ins->dreg, ins->dreg, 0);
2027 ia64_setf_sig (code, ins->dreg, ins->sreg1);
2028 ia64_fcvt_xf (code, ins->dreg, ins->dreg);
2029 ia64_fnorm_d_sf (code, ins->dreg, ins->dreg, 0);
2031 case OP_LCONV_TO_R8:
2032 /* FIXME: Difference with CEE_CONV_R8 ? */
2033 ia64_setf_sig (code, ins->dreg, ins->sreg1);
2034 ia64_fcvt_xf (code, ins->dreg, ins->dreg);
2035 ia64_fnorm_d_sf (code, ins->dreg, ins->dreg, 0);
2037 case OP_FCONV_TO_R4:
2038 ia64_fnorm_s_sf (code, ins->dreg, ins->sreg1, 0);
2040 case OP_FCONV_TO_I4:
2041 case OP_FCONV_TO_I2:
2042 case OP_FCONV_TO_U2:
2043 case OP_FCONV_TO_U1:
2044 /* FIXME: sign/zero extend ? */
2045 ia64_fcvt_fx_trunc_sf (code, FP_SCRATCH_REG, ins->sreg1, 0);
2046 ia64_getf_sig (code, ins->dreg, FP_SCRATCH_REG);
2048 case OP_FCONV_TO_I8:
2049 /* FIXME: Difference with OP_FCONV_TO_I4 ? */
2050 ia64_fcvt_fx_trunc_sf (code, FP_SCRATCH_REG, ins->sreg1, 0);
2051 ia64_getf_sig (code, ins->dreg, FP_SCRATCH_REG);
2054 ia64_fma_d_sf (code, ins->dreg, ins->sreg1, 1, ins->sreg2, 0);
2057 ia64_fms_d_sf (code, ins->dreg, ins->sreg1, 1, ins->sreg2, 0);
2060 ia64_fma_d_sf (code, ins->dreg, ins->sreg1, ins->sreg2, 0, 0);
2063 ia64_fmerge_ns (code, ins->dreg, ins->sreg1, ins->sreg1);
2068 /* ensure ins->sreg1 is not NULL */
2069 ia64_ld8_hint (code, GP_SCRATCH_REG, ins->sreg1, 0);
2077 call = (MonoCallInst*)ins;
2079 if (ins->flags & MONO_INST_HAS_METHOD)
2080 code = emit_call (cfg, code, MONO_PATCH_INFO_METHOD, call->method);
2082 code = emit_call (cfg, code, MONO_PATCH_INFO_ABS, call->fptr);
2084 code = emit_move_return_value (cfg, ins, code);
2090 case OP_VOIDCALL_REG:
2091 call = (MonoCallInst*)ins;
2094 ia64_mov (code, GP_SCRATCH_REG, ins->sreg1);
2095 ia64_ld8_inc_imm_hint (code, GP_SCRATCH_REG2, GP_SCRATCH_REG, 8, 0);
2096 ia64_mov_to_br (code, IA64_B6, GP_SCRATCH_REG2, 0, 0, 0);
2097 ia64_ld8 (code, IA64_GP, GP_SCRATCH_REG);
2098 ia64_br_call_reg (code, IA64_B0, IA64_B6);
2100 code = emit_move_return_value (cfg, ins, code);
2103 /* Exception handling */
2104 case OP_CALL_HANDLER:
2109 g_warning ("unknown opcode %s in %s()\n", mono_inst_name (ins->opcode), __FUNCTION__);
2110 g_assert_not_reached ();
2113 if ((code.buf - cfg->native_code - offset) > max_len) {
2114 g_warning ("wrong maximal instruction length of instruction %s (expected %d, got %ld)",
2115 mono_inst_name (ins->opcode), max_len, code.buf - cfg->native_code - offset);
2116 g_assert_not_reached ();
2122 last_offset = offset;
2127 ia64_codegen_close (code);
2129 cfg->code_len = code.buf - cfg->native_code;
2133 mono_arch_register_lowlevel_calls (void)
2137 static Ia64InsType ins_types_in_template [32][3] = {
2138 {IA64_INS_TYPE_M, IA64_INS_TYPE_I, IA64_INS_TYPE_I},
2139 {IA64_INS_TYPE_M, IA64_INS_TYPE_I, IA64_INS_TYPE_I},
2140 {IA64_INS_TYPE_M, IA64_INS_TYPE_I, IA64_INS_TYPE_I},
2141 {IA64_INS_TYPE_M, IA64_INS_TYPE_I, IA64_INS_TYPE_I},
2142 {IA64_INS_TYPE_M, IA64_INS_TYPE_LX, IA64_INS_TYPE_LX},
2143 {IA64_INS_TYPE_M, IA64_INS_TYPE_LX, IA64_INS_TYPE_LX},
2146 {IA64_INS_TYPE_M, IA64_INS_TYPE_M, IA64_INS_TYPE_I},
2147 {IA64_INS_TYPE_M, IA64_INS_TYPE_M, IA64_INS_TYPE_I},
2148 {IA64_INS_TYPE_M, IA64_INS_TYPE_M, IA64_INS_TYPE_I},
2149 {IA64_INS_TYPE_M, IA64_INS_TYPE_M, IA64_INS_TYPE_I},
2150 {IA64_INS_TYPE_M, IA64_INS_TYPE_F, IA64_INS_TYPE_I},
2151 {IA64_INS_TYPE_M, IA64_INS_TYPE_F, IA64_INS_TYPE_I},
2152 {IA64_INS_TYPE_M, IA64_INS_TYPE_M, IA64_INS_TYPE_F},
2153 {IA64_INS_TYPE_M, IA64_INS_TYPE_M, IA64_INS_TYPE_F},
2154 {IA64_INS_TYPE_M, IA64_INS_TYPE_I, IA64_INS_TYPE_B},
2155 {IA64_INS_TYPE_M, IA64_INS_TYPE_I, IA64_INS_TYPE_B},
2156 {IA64_INS_TYPE_M, IA64_INS_TYPE_B, IA64_INS_TYPE_B},
2157 {IA64_INS_TYPE_M, IA64_INS_TYPE_B, IA64_INS_TYPE_B},
2160 {IA64_INS_TYPE_B, IA64_INS_TYPE_B, IA64_INS_TYPE_B},
2161 {IA64_INS_TYPE_B, IA64_INS_TYPE_B, IA64_INS_TYPE_B},
2162 {IA64_INS_TYPE_M, IA64_INS_TYPE_M, IA64_INS_TYPE_B},
2163 {IA64_INS_TYPE_M, IA64_INS_TYPE_M, IA64_INS_TYPE_B},
2166 {IA64_INS_TYPE_M, IA64_INS_TYPE_F, IA64_INS_TYPE_B},
2167 {IA64_INS_TYPE_M, IA64_INS_TYPE_F, IA64_INS_TYPE_B},
2173 ia64_patch (unsigned char* code, gpointer target)
2176 guint64 instructions [3];
2178 Ia64CodegenState gen;
2180 template = ia64_bundle_template (code);
2181 instructions [0] = ia64_bundle_ins1 (code);
2182 instructions [1] = ia64_bundle_ins2 (code);
2183 instructions [2] = ia64_bundle_ins3 (code);
2185 ia64_codegen_init (gen, gen_buf);
2187 for (i = 0; i < 3; ++i) {
2188 guint64 ins = instructions [i];
2189 int opcode = ia64_ins_opcode (ins);
2192 gboolean nop = FALSE;
2193 switch (ins_types_in_template [template][i]) {
2194 case IA64_INS_TYPE_I:
2195 nop = (ins == IA64_NOP_I);
2197 case IA64_INS_TYPE_M:
2198 nop = (ins == IA64_NOP_M);
2200 case IA64_INS_TYPE_LX:
2209 switch (ins_types_in_template [template][i]) {
2210 case IA64_INS_TYPE_B:
2211 if ((opcode == 4) && (ia64_ins_btype (ins) == 0)) {
2213 gint64 disp = ((guint8*)target - code) >> 4;
2216 ia64_br_cond_hint_pred (gen, ia64_ins_qp (ins), disp, 0, 0, 0);
2218 instructions [i] = gen.instructions [0];
2220 else if (opcode == 5) {
2222 gint64 disp = ((guint8*)target - code) >> 4;
2225 ia64_br_call_hint_pred (gen, ia64_ins_qp (ins), ia64_ins_b1 (ins), disp, 0, 0, 0);
2226 instructions [i] = gen.instructions [0];
2231 case IA64_INS_TYPE_LX:
2235 if ((opcode == 6) && (ia64_ins_vc (ins) == 0)) {
2237 ia64_movl_pred (gen, ia64_ins_qp (ins), ia64_ins_r1 (ins), target);
2238 instructions [1] = gen.instructions [0];
2239 instructions [2] = gen.instructions [1];
2251 ia64_codegen_init (gen, code);
2252 ia64_emit_bundle_template (&gen, template, instructions [0], instructions [1], instructions [2]);
2256 mono_arch_patch_code (MonoMethod *method, MonoDomain *domain, guint8 *code, MonoJumpInfo *ji, gboolean run_cctors)
2258 MonoJumpInfo *patch_info;
2260 for (patch_info = ji; patch_info; patch_info = patch_info->next) {
2261 unsigned char *ip = patch_info->ip.i + code;
2262 const unsigned char *target;
2264 target = mono_resolve_patch_target (method, domain, code, patch_info, run_cctors);
2266 if (mono_compile_aot) {
2270 ia64_patch (ip, (gpointer)target);
2275 mono_arch_emit_prolog (MonoCompile *cfg)
2277 MonoMethod *method = cfg->method;
2279 MonoMethodSignature *sig;
2281 int alloc_size, pos, max_offset, i, quad;
2282 Ia64CodegenState code;
2285 sig = mono_method_signature (method);
2288 cinfo = get_call_info (sig, FALSE);
2290 cfg->code_size = MAX (((MonoMethodNormal *)method)->header->code_size * 4, 512);
2291 cfg->native_code = g_malloc (cfg->code_size);
2293 ia64_codegen_init (code, cfg->native_code);
2295 ia64_alloc (code, cfg->arch.reg_saved_ar_pfs, cfg->arch.reg_local0 - cfg->arch.reg_in0, cfg->arch.reg_out0 - cfg->arch.reg_local0, cfg->arch.n_out_regs, 0);
2296 ia64_mov_from_br (code, cfg->arch.reg_saved_b0, IA64_B0);
2298 alloc_size = ALIGN_TO (cfg->stack_offset, MONO_ARCH_FRAME_ALIGNMENT);
2299 if (cfg->param_area)
2300 alloc_size += cfg->param_area;
2304 alloc_size = ALIGN_TO (alloc_size, MONO_ARCH_FRAME_ALIGNMENT);
2308 if (method->save_lmf) {
2314 if (alloc_size || cinfo->stack_usage)
2315 ia64_mov (code, cfg->frame_reg, IA64_SP);
2318 /* See mono_emit_stack_alloc */
2319 #if defined(MONO_ARCH_SIGSEGV_ON_ALTSTACK)
2322 ia64_mov (code, cfg->arch.reg_saved_sp, IA64_SP);
2323 ia64_adds_imm (code, IA64_SP, (-alloc_size), IA64_SP);
2327 /* compute max_offset in order to use short forward jumps */
2329 if (cfg->opt & MONO_OPT_BRANCH) {
2330 for (bb = cfg->bb_entry; bb; bb = bb->next_bb) {
2331 MonoInst *ins = bb->code;
2332 bb->max_offset = max_offset;
2334 if (cfg->prof_options & MONO_PROFILE_COVERAGE)
2336 /* max alignment for loops */
2337 if ((cfg->opt & MONO_OPT_LOOP) && bb_is_loop_start (bb))
2338 max_offset += LOOP_ALIGNMENT;
2341 if (ins->opcode == OP_LABEL)
2342 ins->inst_c1 = max_offset;
2344 max_offset += ((guint8 *)ins_spec [ins->opcode])[MONO_INST_LEN];
2350 if (sig->ret->type != MONO_TYPE_VOID) {
2351 if ((cinfo->ret.storage == ArgInIReg) && (cfg->ret->opcode != OP_REGVAR)) {
2352 /* Save volatile arguments to the stack */
2357 /* Keep this in sync with emit_load_volatile_arguments */
2358 for (i = 0; i < sig->param_count + sig->hasthis; ++i) {
2359 ArgInfo *ainfo = cinfo->args + i;
2360 gint32 stack_offset;
2362 inst = cfg->varinfo [i];
2364 if (sig->hasthis && (i == 0))
2365 arg_type = &mono_defaults.object_class->byval_arg;
2367 arg_type = sig->params [i - sig->hasthis];
2369 arg_type = mono_type_get_underlying_type (arg_type);
2371 stack_offset = ainfo->offset + ARGS_OFFSET;
2373 /* Save volatile arguments to the stack */
2374 if (inst->opcode != OP_REGVAR) {
2375 switch (ainfo->storage) {
2378 /* FIXME: big offsets */
2379 g_assert (inst->opcode = OP_REGOFFSET);
2380 ia64_adds_imm (code, GP_SCRATCH_REG, inst->inst_offset, inst->inst_basereg);
2381 if (arg_type->byref)
2382 ia64_st8_hint (code, GP_SCRATCH_REG, cfg->arch.reg_in0 + ainfo->reg, 0);
2384 switch (arg_type->type) {
2386 ia64_stfs_hint (code, GP_SCRATCH_REG, ainfo->reg, 0);
2389 ia64_stfd_hint (code, GP_SCRATCH_REG, ainfo->reg, 0);
2392 ia64_st8_hint (code, GP_SCRATCH_REG, cfg->arch.reg_in0 + ainfo->reg, 0);
2404 if (inst->opcode == OP_REGVAR) {
2405 /* Argument allocated to (non-volatile) register */
2406 switch (ainfo->storage) {
2408 if (inst->dreg != cfg->arch.reg_in0 + ainfo->reg)
2409 ia64_mov (code, inst->dreg, cfg->arch.reg_in0 + ainfo->reg);
2417 if (method->save_lmf) {
2421 ia64_codegen_close (code);
2425 if (mono_jit_trace_calls != NULL && mono_trace_eval (method))
2426 code.buf = mono_arch_instrument_prolog (cfg, mono_trace_enter_method, code.buf, TRUE);
2428 cfg->code_len = code.buf - cfg->native_code;
2430 g_assert (cfg->code_len < cfg->code_size);
2436 mono_arch_emit_epilog (MonoCompile *cfg)
2438 MonoMethod *method = cfg->method;
2439 int quad, pos, i, alloc_size;
2440 int max_epilog_size = 16 * 4;
2441 Ia64CodegenState code;
2445 while (cfg->code_len + max_epilog_size > (cfg->code_size - 16)) {
2446 cfg->code_size *= 2;
2447 cfg->native_code = g_realloc (cfg->native_code, cfg->code_size);
2448 mono_jit_stats.code_reallocs++;
2451 buf = cfg->native_code + cfg->code_len;
2453 if (mono_jit_trace_calls != NULL && mono_trace_eval (method))
2454 buf = mono_arch_instrument_epilog (cfg, mono_trace_leave_method, buf, TRUE);
2456 ia64_codegen_init (code, buf);
2458 /* the code restoring the registers must be kept in sync with CEE_JMP */
2461 if (method->save_lmf) {
2465 /* Load returned vtypes into registers if needed */
2466 cinfo = get_call_info (mono_method_signature (method), FALSE);
2467 if (cinfo->ret.storage == ArgValuetypeInReg) {
2472 if (cfg->stack_offset)
2473 ia64_mov (code, IA64_SP, cfg->arch.reg_saved_sp);
2475 ia64_mov_to_ar_i (code, IA64_PFS, cfg->arch.reg_saved_ar_pfs);
2476 ia64_mov_ret_to_br (code, IA64_B0, cfg->arch.reg_saved_b0, 0, 0, 0);
2477 ia64_br_ret_reg_hint (code, IA64_B0, 0, 0, 0);
2479 ia64_codegen_close (code);
2481 cfg->code_len = code.buf - cfg->native_code;
2483 g_assert (cfg->code_len < cfg->code_size);
2487 mono_arch_emit_exceptions (MonoCompile *cfg)
2489 MonoJumpInfo *patch_info;
2491 Ia64CodegenState code;
2492 MonoClass *exc_classes [16];
2493 guint8 *exc_throw_start [16], *exc_throw_end [16];
2494 guint32 code_size = 0;
2496 /* Compute needed space */
2497 for (patch_info = cfg->patch_info; patch_info; patch_info = patch_info->next) {
2498 if (patch_info->type == MONO_PATCH_INFO_EXC)
2500 if (patch_info->type == MONO_PATCH_INFO_R8)
2501 code_size += 8 + 7; /* sizeof (double) + alignment */
2502 if (patch_info->type == MONO_PATCH_INFO_R4)
2503 code_size += 4 + 7; /* sizeof (float) + alignment */
2506 ia64_codegen_init (code, cfg->native_code + cfg->code_len);
2508 /* add code to raise exceptions */
2510 for (patch_info = cfg->patch_info; patch_info; patch_info = patch_info->next) {
2511 switch (patch_info->type) {
2512 case MONO_PATCH_INFO_EXC: {
2520 ia64_codegen_close (code);
2522 cfg->code_len = code.buf - cfg->native_code;
2524 g_assert (cfg->code_len < cfg->code_size);
2528 mono_arch_instrument_prolog (MonoCompile *cfg, void *func, void *p, gboolean enable_arguments)
2536 mono_arch_instrument_epilog (MonoCompile *cfg, void *func, void *p, gboolean enable_arguments)
2544 mono_arch_flush_icache (guint8 *code, gint size)
2546 guint8* p = (guint8*)((guint64)code & ~(0x3f));
2547 guint8* end = (guint8*)((guint64)code + size);
2550 __asm__ __volatile__ ("fc.i %0"::"r"(p));
2556 mono_arch_flush_register_windows (void)
2562 mono_arch_is_inst_imm (gint64 imm)
2564 /* The lowering pass will take care of it */
2570 * Determine whenever the trap whose info is in SIGINFO is caused by
2574 mono_arch_is_int_overflow (void *sigctx, void *info)
2582 mono_arch_get_patch_offset (guint8 *code)
2590 mono_arch_get_vcall_slot_addr (guint8* code, gpointer *regs)
2598 mono_arch_get_delegate_method_ptr_addr (guint8* code, gpointer *regs)
2605 static gboolean tls_offset_inited = FALSE;
2607 /* code should be simply return <tls var>; */
2609 read_tls_offset_from_method (void* method)
2616 #ifdef MONO_ARCH_SIGSEGV_ON_ALTSTACK
2619 setup_stack (MonoJitTlsData *tls)
2627 mono_arch_setup_jit_tls_data (MonoJitTlsData *tls)
2629 if (!tls_offset_inited) {
2630 tls_offset_inited = TRUE;
2634 lmf_tls_offset = read_tls_offset_from_method (mono_get_lmf_addr);
2635 appdomain_tls_offset = read_tls_offset_from_method (mono_domain_get);
2636 thread_tls_offset = read_tls_offset_from_method (mono_thread_current);
2640 #ifdef MONO_ARCH_SIGSEGV_ON_ALTSTACK
2646 mono_arch_free_jit_tls_data (MonoJitTlsData *tls)
2648 #ifdef MONO_ARCH_SIGSEGV_ON_ALTSTACK
2649 struct sigaltstack sa;
2651 sa.ss_sp = tls->signal_stack;
2652 sa.ss_size = SIGNAL_STACK_SIZE;
2653 sa.ss_flags = SS_DISABLE;
2654 sigaltstack (&sa, NULL);
2656 if (tls->signal_stack)
2657 munmap (tls->signal_stack, SIGNAL_STACK_SIZE);
2662 mono_arch_emit_this_vret_args (MonoCompile *cfg, MonoCallInst *inst, int this_reg, int this_type, int vt_reg)
2664 MonoCallInst *call = (MonoCallInst*)inst;
2665 int out_reg = cfg->arch.reg_out0;
2671 /* add the this argument */
2672 if (this_reg != -1) {
2674 MONO_INST_NEW (cfg, this, OP_MOVE);
2675 this->type = this_type;
2676 this->sreg1 = this_reg;
2677 this->dreg = mono_regstate_next_int (cfg->rs);
2678 mono_bblock_add_inst (cfg->cbb, this);
2680 mono_call_inst_add_outarg_reg (call, this->dreg, out_reg, FALSE);
2685 mono_arch_get_inst_for_method (MonoCompile *cfg, MonoMethod *cmethod, MonoMethodSignature *fsig, MonoInst **args)
2687 MonoInst *ins = NULL;
2689 if (cmethod->klass == mono_defaults.math_class) {
2690 if (strcmp (cmethod->name, "Sin") == 0) {
2691 MONO_INST_NEW (cfg, ins, OP_SIN);
2692 ins->inst_i0 = args [0];
2693 } else if (strcmp (cmethod->name, "Cos") == 0) {
2694 MONO_INST_NEW (cfg, ins, OP_COS);
2695 ins->inst_i0 = args [0];
2696 } else if (strcmp (cmethod->name, "Tan") == 0) {
2698 MONO_INST_NEW (cfg, ins, OP_TAN);
2699 ins->inst_i0 = args [0];
2700 } else if (strcmp (cmethod->name, "Atan") == 0) {
2702 MONO_INST_NEW (cfg, ins, OP_ATAN);
2703 ins->inst_i0 = args [0];
2704 } else if (strcmp (cmethod->name, "Sqrt") == 0) {
2705 MONO_INST_NEW (cfg, ins, OP_SQRT);
2706 ins->inst_i0 = args [0];
2707 } else if (strcmp (cmethod->name, "Abs") == 0 && fsig->params [0]->type == MONO_TYPE_R8) {
2708 MONO_INST_NEW (cfg, ins, OP_ABS);
2709 ins->inst_i0 = args [0];
2712 /* OP_FREM is not IEEE compatible */
2713 else if (strcmp (cmethod->name, "IEEERemainder") == 0) {
2714 MONO_INST_NEW (cfg, ins, OP_FREM);
2715 ins->inst_i0 = args [0];
2716 ins->inst_i1 = args [1];
2719 } else if(cmethod->klass->image == mono_defaults.corlib &&
2720 (strcmp (cmethod->klass->name_space, "System.Threading") == 0) &&
2721 (strcmp (cmethod->klass->name, "Interlocked") == 0)) {
2723 if (strcmp (cmethod->name, "Increment") == 0) {
2724 MonoInst *ins_iconst;
2727 if (fsig->params [0]->type == MONO_TYPE_I4)
2728 opcode = OP_ATOMIC_ADD_NEW_I4;
2729 else if (fsig->params [0]->type == MONO_TYPE_I8)
2730 opcode = OP_ATOMIC_ADD_NEW_I8;
2732 g_assert_not_reached ();
2733 MONO_INST_NEW (cfg, ins, opcode);
2734 MONO_INST_NEW (cfg, ins_iconst, OP_ICONST);
2735 ins_iconst->inst_c0 = 1;
2737 ins->inst_i0 = args [0];
2738 ins->inst_i1 = ins_iconst;
2739 } else if (strcmp (cmethod->name, "Decrement") == 0) {
2740 MonoInst *ins_iconst;
2743 if (fsig->params [0]->type == MONO_TYPE_I4)
2744 opcode = OP_ATOMIC_ADD_NEW_I4;
2745 else if (fsig->params [0]->type == MONO_TYPE_I8)
2746 opcode = OP_ATOMIC_ADD_NEW_I8;
2748 g_assert_not_reached ();
2749 MONO_INST_NEW (cfg, ins, opcode);
2750 MONO_INST_NEW (cfg, ins_iconst, OP_ICONST);
2751 ins_iconst->inst_c0 = -1;
2753 ins->inst_i0 = args [0];
2754 ins->inst_i1 = ins_iconst;
2755 } else if (strcmp (cmethod->name, "Add") == 0) {
2758 if (fsig->params [0]->type == MONO_TYPE_I4)
2759 opcode = OP_ATOMIC_ADD_I4;
2760 else if (fsig->params [0]->type == MONO_TYPE_I8)
2761 opcode = OP_ATOMIC_ADD_I8;
2763 g_assert_not_reached ();
2765 MONO_INST_NEW (cfg, ins, opcode);
2767 ins->inst_i0 = args [0];
2768 ins->inst_i1 = args [1];
2769 } else if (strcmp (cmethod->name, "Exchange") == 0) {
2772 if (fsig->params [0]->type == MONO_TYPE_I4)
2773 opcode = OP_ATOMIC_EXCHANGE_I4;
2774 else if ((fsig->params [0]->type == MONO_TYPE_I8) ||
2775 (fsig->params [0]->type == MONO_TYPE_I) ||
2776 (fsig->params [0]->type == MONO_TYPE_OBJECT))
2777 opcode = OP_ATOMIC_EXCHANGE_I8;
2781 MONO_INST_NEW (cfg, ins, opcode);
2783 ins->inst_i0 = args [0];
2784 ins->inst_i1 = args [1];
2785 } else if (strcmp (cmethod->name, "Read") == 0 && (fsig->params [0]->type == MONO_TYPE_I8)) {
2786 /* 64 bit reads are already atomic */
2787 MONO_INST_NEW (cfg, ins, CEE_LDIND_I8);
2788 ins->inst_i0 = args [0];
2792 * Can't implement CompareExchange methods this way since they have
2801 mono_arch_print_tree (MonoInst *tree, int arity)
2806 MonoInst* mono_arch_get_domain_intrinsic (MonoCompile* cfg)
2810 if (appdomain_tls_offset == -1)
2813 MONO_INST_NEW (cfg, ins, OP_TLS_GET);
2814 ins->inst_offset = appdomain_tls_offset;
2818 MonoInst* mono_arch_get_thread_intrinsic (MonoCompile* cfg)
2822 if (thread_tls_offset == -1)
2825 MONO_INST_NEW (cfg, ins, OP_TLS_GET);
2826 ins->inst_offset = thread_tls_offset;