};
#undef OPDEF
+/* alignment of activation frames */
+#define MONO_FRAME_ALIGNMENT 4
+
void print_lmf (void);
#define MBTREE_TYPE MBTree
X86AddressInfo ainfo;
MonoJitFieldInfo fi;
MonoJitBranchInfo bi;
- struct {
- gint32 size;
- gint32 align;
- } size_info;
+ MonoJitCallInfo call_info;
+ MonoJitArgumentInfo arg_info;
} data;
};
x86_call_code (s->code, 0); \
} while (0);
-#define X86_CALL_END \
- if (s->frame_size) { \
- x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, s->frame_size); \
- s->frame_size = 0; \
- }
-
-#define X86_CALL_BEGIN \
- if (tree->left->op != MB_TERM_NOP) { \
- s->frame_size += 4; \
- mono_assert (lreg >= 0); \
- x86_push_reg (s->code, lreg); \
- x86_alu_membase_imm (s->code, X86_CMP, lreg, 0, 0); \
+#define X86_ARG_PAD(pad) do { \
+ if (pad) { \
+ if (pad == 4) \
+ x86_push_reg (s->code, X86_EAX); \
+ else \
+ x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, pad); \
} \
- if (tree->data.i) { \
- int offset = VARINFO (s, tree->data.i).offset; \
- s->frame_size += 4; \
- x86_lea_membase (s->code, treg, X86_EBP, offset); \
- x86_push_reg (s->code, treg); \
- }
+} while (0)
+
+#define X86_CALL_END do { \
+ int size = tree->data.call_info.frame_size; \
+ if (size) \
+ x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, size); \
+} while (0)
+
+#define X86_CALL_BEGIN do { \
+ int pad = tree->data.call_info.pad; \
+ X86_ARG_PAD (pad); \
+ if (tree->left->op != MB_TERM_NOP) { \
+ mono_assert (lreg >= 0); \
+ x86_push_reg (s->code, lreg); \
+ x86_alu_membase_imm (s->code, X86_CMP, lreg, 0, 0); \
+ } \
+ if (tree->data.call_info.vtype_num) { \
+ int offset = VARINFO (s, tree->data.call_info.vtype_num).offset; \
+ x86_lea_membase (s->code, treg, X86_EBP, offset); \
+ x86_push_reg (s->code, treg); \
+ } \
+} while (0)
/* we use this macro to move one lreg to another - source and
destination may overlap, but the register allocator has to
%term REMOTE_STIND_I8 REMOTE_STIND_R4 REMOTE_STIND_R8 REMOTE_STIND_OBJ
%term SIN COS SQRT
-%term FUNC1 PROC3 FREE OBJADDR VTADDR
+%term FUNC1 PROC2 PROC3 FREE OBJADDR VTADDR
#
# we start at stmt
# we pass exception in ECX to catch handler
reg: EXCEPTION {
int offset = VARINFO (s, tree->data.i).offset;
- int reg = VARINFO (s, tree->data.i).reg;
if (tree->reg1 != X86_ECX)
x86_mov_reg_reg (s->code, tree->reg1, X86_ECX, 4);
/* store it so that we can RETHROW it later */
- if (reg < 0)
- x86_mov_membase_reg (s->code, X86_EBP, offset, tree->reg1, 4);
- else
- x86_mov_reg_reg (s->code, reg, tree->reg1, 4);
+ x86_mov_membase_reg (s->code, X86_EBP, offset, tree->reg1, 4);
}
stmt: THROW (reg) {
}
stmt: RETHROW {
- int off = VARINFO (s, tree->data.i).offset;
- int reg = VARINFO (s, tree->data.i).reg;
+ int offset = VARINFO (s, tree->data.i).offset;
gpointer target;
- if (reg < 0)
- x86_push_membase (s->code, X86_EBP, off);
- else
- x86_push_reg (s->code, reg);
+ x86_push_membase (s->code, X86_EBP, offset);
target = arch_get_throw_exception ();
mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, target);
x86_push_imm (s->code, tree->data.p);
mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_array_new_wrapper);
x86_call_code (s->code, 0);
- x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer) + 4);
+ x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
x86_pop_reg (s->code, X86_EDX);
x86_pop_reg (s->code, X86_ECX);
x86_push_imm (s->code, tree->data.p);
mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_array_new_specific);
x86_call_code (s->code, 0);
- x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer) + 4);
+ x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
x86_pop_reg (s->code, X86_EDX);
x86_pop_reg (s->code, X86_ECX);
x86_push_imm (s->code, tree->data.klass);
mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_object_new_wrapper);
x86_call_code (s->code, 0);
- x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer));
+ x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
x86_pop_reg (s->code, X86_EDX);
x86_pop_reg (s->code, X86_ECX);
x86_push_imm (s->code, tree->data.p);
mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_object_new_specific);
x86_call_code (s->code, 0);
- x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer));
+ x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
x86_pop_reg (s->code, X86_EDX);
x86_pop_reg (s->code, X86_ECX);
}
stmt: FREE (reg) {
- x86_push_reg (s->code, X86_EAX);
- x86_push_reg (s->code, X86_ECX);
- x86_push_reg (s->code, X86_EDX);
-
x86_push_reg (s->code, tree->left->reg1);
-
mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, g_free);
x86_call_code (s->code, 0);
x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
-
- x86_pop_reg (s->code, X86_EDX);
- x86_pop_reg (s->code, X86_ECX);
- x86_pop_reg (s->code, X86_EAX);
}
-stmt: PROC3 (reg, CPSRC (reg, reg)) {
- int dest_reg = tree->left->reg1;
- int source_reg = tree->right->left->reg1;
- int size_reg = tree->right->right->reg1;
-
- x86_push_reg (s->code, X86_EAX);
- x86_push_reg (s->code, X86_ECX);
- x86_push_reg (s->code, X86_EDX);
-
- x86_push_reg (s->code, size_reg);
- x86_push_reg (s->code, source_reg);
- x86_push_reg (s->code, dest_reg);
-
- switch (tree->data.i) {
- case MONO_MARSHAL_CONV_STR_BYVALSTR:
- mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_string_to_byvalstr);
- break;
- case MONO_MARSHAL_CONV_STR_BYVALWSTR:
- mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_string_to_byvalwstr);
- break;
- default:
- g_assert_not_reached ();
- }
+stmt: PROC2 (reg, reg) {
+ x86_push_reg (s->code, tree->right->reg1);
+ x86_push_reg (s->code, tree->left->reg1);
+ mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->data.p);
+ x86_call_code (s->code, 0);
+ x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
+}
+stmt: PROC3 (reg, CPSRC (reg, reg)) {
+ x86_push_reg (s->code, tree->right->right->reg1);
+ x86_push_reg (s->code, tree->right->left->reg1);
+ x86_push_reg (s->code, tree->left->reg1);
+ mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->data.p);
x86_call_code (s->code, 0);
x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
-
- x86_pop_reg (s->code, X86_EDX);
- x86_pop_reg (s->code, X86_ECX);
- x86_pop_reg (s->code, X86_EAX);
}
reg: FUNC1 (reg) {
reg: LOCALLOC (reg) {
int offset = 0;
- /* size must be aligned to 4 bytes */
- x86_alu_reg_imm (s->code, X86_ADD, tree->left->reg1, 3);
- x86_alu_reg_imm (s->code, X86_AND, tree->left->reg1, ~3);
+ /* size must be aligned to MONO_FRAME_ALIGNMENT bytes */
+ x86_alu_reg_imm (s->code, X86_ADD, tree->left->reg1, MONO_FRAME_ALIGNMENT - 1);
+ x86_alu_reg_imm (s->code, X86_AND, tree->left->reg1, ~(MONO_FRAME_ALIGNMENT - 1));
/* allocate space on stack */
x86_alu_reg_reg (s->code, X86_SUB, X86_ESP, tree->left->reg1);
stmt: INITOBJ (reg) {
int i, j;
- i = tree->data.i;
+ if (!(i = tree->data.i))
+ return;
if (i == 1 || i == 2 || i == 4) {
x86_mov_membase_imm (s->code, tree->left->reg1, 0, 0, i);
}
}
-
stmt: ARG_I4 (LDIND_I4 (addr)) {
MBTree *at = tree->left->left;
+ int pad = tree->data.arg_info.pad;
- s->frame_size += 4;
+ X86_ARG_PAD (pad);
switch (at->data.ainfo.amode) {
stmt: ARG_I4 (LDIND_I4 (ADDR_L)) {
int treg = VARINFO (s, tree->left->left->data.i).reg;
- s->frame_size += 4;
+ int pad = tree->data.arg_info.pad;
+
+ X86_ARG_PAD (pad);
x86_push_reg (s->code, treg);
} cost {
MBCOND ((VARINFO (data, tree->left->left->data.i).reg >= 0));
}
stmt: ARG_I4 (reg) {
- s->frame_size += 4;
+ int pad = tree->data.arg_info.pad;
+
+ X86_ARG_PAD (pad);
x86_push_reg (s->code, tree->left->reg1);
- PRINT_REG ("ARG_I4", tree->left->reg1);
}
stmt: ARG_I4 (ADDR_G) {
- s->frame_size += 4;
+ int pad = tree->data.arg_info.pad;
+
+ X86_ARG_PAD (pad);
x86_push_imm (s->code, tree->left->data.p);
}
stmt: ARG_I4 (CONST_I4) "MB_USE_OPT1(0)" {
- s->frame_size += 4;
+ int pad = tree->data.arg_info.pad;
+
+ X86_ARG_PAD (pad);
x86_push_imm (s->code, tree->left->data.i);
}
if (lreg == treg || rreg == treg)
mono_assert_not_reached ();
- X86_CALL_BEGIN
+ X86_CALL_BEGIN;
x86_call_reg (s->code, rreg);
- X86_CALL_END
+ X86_CALL_END;
mono_assert (tree->reg1 == X86_EAX);
}
if (lreg == treg)
treg = X86_EDX;
- X86_CALL_BEGIN
+ X86_CALL_BEGIN;
mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->right->data.p);
x86_call_code (s->code, 0);
- X86_CALL_END
+ X86_CALL_END;
mono_assert (tree->reg1 == X86_EAX);
}
if (lreg == treg)
treg = X86_EDX;
- X86_CALL_BEGIN
+ X86_CALL_BEGIN;
x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
x86_mov_reg_membase (s->code, lreg, lreg,
x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
- X86_CALL_END
+ X86_CALL_END;
mono_assert (tree->reg1 == X86_EAX);
}
if (lreg == treg)
treg = X86_EDX;
- X86_CALL_BEGIN
+ X86_CALL_BEGIN;
x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
x86_call_virtual (s->code, lreg,
G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2));
- X86_CALL_END
+ X86_CALL_END;
mono_assert (tree->reg1 == X86_EAX);
}
if (lreg == treg)
treg = X86_EDX;
- X86_CALL_BEGIN
+ X86_CALL_BEGIN;
mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->right->data.p);
x86_call_code (s->code, 0);
- X86_CALL_END
+ X86_CALL_END;
}
stmt: CALL_VOID (this, reg) {
if (lreg == treg || rreg == treg)
mono_assert_not_reached ();
- X86_CALL_BEGIN
+ X86_CALL_BEGIN;
x86_call_reg (s->code, tree->right->reg1);
- X86_CALL_END
+ X86_CALL_END;
}
stmt: CALL_VOID (this, INTF_ADDR) {
if (lreg == treg)
treg = X86_EDX;
- X86_CALL_BEGIN
+ X86_CALL_BEGIN;
x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
x86_mov_reg_membase (s->code, lreg, lreg,
x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
- X86_CALL_END
+ X86_CALL_END;
}
stmt: CALL_VOID (this, VFUNC_ADDR) {
if (lreg == treg)
treg = X86_EDX;
- X86_CALL_BEGIN
+ X86_CALL_BEGIN;
x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
x86_call_virtual (s->code, lreg,
G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2));
- X86_CALL_END
+ X86_CALL_END;
}
stmt: SWITCH (reg) {
if (lreg == treg || rreg == treg)
mono_assert_not_reached ();
- X86_CALL_BEGIN
+ X86_CALL_BEGIN;
x86_call_reg (s->code, rreg);
- X86_CALL_END
+ X86_CALL_END;
mono_assert (tree->reg1 == X86_EAX);
mono_assert (tree->reg2 == X86_EDX);
if (lreg == treg)
treg = X86_EDX;
- X86_CALL_BEGIN
+ X86_CALL_BEGIN;
mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->right->data.p);
x86_call_code (s->code, 0);
- X86_CALL_END
+ X86_CALL_END;
mono_assert (tree->reg1 == X86_EAX);
mono_assert (tree->reg2 == X86_EDX);
if (lreg == treg)
treg = X86_EDX;
- X86_CALL_BEGIN
+ X86_CALL_BEGIN;
x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
x86_call_virtual (s->code, lreg,
G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2));
- X86_CALL_END
+ X86_CALL_END;
mono_assert (tree->reg1 == X86_EAX);
mono_assert (tree->reg2 == X86_EDX);
if (lreg == treg)
treg = X86_EDX;
- X86_CALL_BEGIN
+ X86_CALL_BEGIN;
x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
x86_mov_reg_membase (s->code, lreg, lreg,
x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
- X86_CALL_END
+ X86_CALL_END;
mono_assert (tree->reg1 == X86_EAX);
mono_assert (tree->reg2 == X86_EDX);
stmt: ARG_I8 (lreg) {
- s->frame_size += 8;
+ int pad = tree->data.arg_info.pad;
+
+ X86_ARG_PAD (pad);
x86_push_reg (s->code, tree->left->reg2);
x86_push_reg (s->code, tree->left->reg1);
}
}
stmt: ARG_R4 (freg) {
- s->frame_size += 4;
- x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
+ int pad = tree->data.arg_info.pad;
+
+ x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4 + pad);
x86_fst_membase (s->code, X86_ESP, 0, FALSE, TRUE);
}
stmt: ARG_R8 (freg) {
- s->frame_size += 8;
- x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8);
+ int pad = tree->data.arg_info.pad;
+
+ x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8 + pad);
x86_fst_membase (s->code, X86_ESP, 0, TRUE, TRUE);
}
if (lreg == treg || rreg == treg)
mono_assert_not_reached ();
- X86_CALL_BEGIN
+ X86_CALL_BEGIN;
x86_call_reg (s->code, rreg);
- X86_CALL_END
+ X86_CALL_END;
}
freg: CALL_R8 (this, ADDR_G) {
if (lreg == treg)
treg = X86_EDX;
- X86_CALL_BEGIN
+ X86_CALL_BEGIN;
mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->right->data.p);
x86_call_code (s->code, 0);
- X86_CALL_END
+ X86_CALL_END;
}
freg: CALL_R8 (this, INTF_ADDR) {
if (lreg == treg)
treg = X86_EDX;
- X86_CALL_BEGIN
+ X86_CALL_BEGIN;
x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
x86_mov_reg_membase (s->code, lreg, lreg,
x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
- X86_CALL_END
+ X86_CALL_END;
}
freg: CALL_R8 (this, VFUNC_ADDR) {
if (lreg == treg)
treg = X86_EDX;
- X86_CALL_BEGIN
+ X86_CALL_BEGIN;
x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
x86_call_virtual (s->code, lreg,
G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2));
- X86_CALL_END
+ X86_CALL_END;
}
stmt: RET (freg) {
}
stmt: ARG_OBJ (CONST_I4) {
- s->frame_size += 4;
+ int pad = tree->data.arg_info.pad;
+
+ X86_ARG_PAD (pad);
x86_push_imm (s->code, tree->left->data.i);
}
stmt: ARG_OBJ (reg) {
- int pad;
- int size = tree->data.size_info.size;
- int align = tree->data.size_info.align;
+ int size = tree->data.arg_info.size;
+ int pad = tree->data.arg_info.pad;
int sa;
- if (!size)
+ if (!size)
return;
- g_assert ((align & 3) == 0);
-
- pad = (align - ((s->frame_size + size) & (align -1))) & (align - 1);
-
sa = size + pad;
- s->frame_size += sa;
-
- g_assert ((s->frame_size & (align - 1)) == 0);
-
/* reserve space for the argument */
x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, sa);
x86_push_reg (s->code, tree->left->reg1);
x86_push_membase (s->code, X86_EBP, 8);
-
mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, MEMCOPY);
x86_call_code (s->code, 0);