/* * x86.brg: X86 code generator * * Author: * Dietmar Maurer (dietmar@ximian.com) * * (C) 2001 Ximian, Inc. */ #include #include #include #include #ifndef PLATFORM_WIN32 #include #include #endif #include #include #include #include #include #include #include #include "regset.h" #include "jit.h" void print_lmf (void); #define MBTREE_TYPE MBTree #define MBCGEN_TYPE MonoFlowGraph #define MBCOST_DATA MonoFlowGraph #define MBALLOC_STATE mono_mempool_alloc (data->mp, sizeof (MBState)) typedef enum { AMImmediate = 0, // ptr AMBase = 1, // V[REG] AMIndex = 2, // V[REG*X] AMBaseIndex = 3, // V[REG*X][REG] } X86AddMode; typedef struct { int offset; X86AddMode amode:2; unsigned int shift:2; gint8 basereg; gint8 indexreg; } X86AddressInfo; struct _MBTree { guint16 op; unsigned last_instr:1; MBTree *left, *right; gpointer state; gpointer emit; gint32 addr; gint32 cli_addr; guint8 exclude_mask; gint8 reg1; gint8 reg2; gint8 reg3; MonoValueType svt; union { gint32 i; gint64 l; gpointer p; MonoBBlock *bb; MonoMethod *m; MonoClass *klass; MonoClassField *field; X86AddressInfo ainfo; MonoJitCallInfo ci; MonoJitFieldInfo fi; } data; }; gint64 mono_llmult (gint64 a, gint64 b); guint64 mono_llmult_ovf (gpointer *exc, guint32 al, gint32 ah, guint32 bl, gint32 bh); guint64 mono_llmult_ovf_un (gpointer *exc, guint32 al, guint32 ah, guint32 bl, guint32 bh); gint64 mono_lldiv (gint64 a, gint64 b); gint64 mono_llrem (gint64 a, gint64 b); guint64 mono_lldiv_un (guint64 a, guint64 b); guint64 mono_llrem_un (guint64 a, guint64 b); gpointer mono_ldsflda (MonoClass *klass, int offset); gpointer arch_get_lmf_addr (void); MonoArray* mono_array_new_wrapper (MonoClass *eclass, guint32 n); MonoObject * mono_object_new_wrapper (MonoClass *klass); MonoString* mono_ldstr_wrapper (MonoImage *image, guint32 ind); gpointer get_mono_object_isinst (void); #define MB_OPT_LEVEL 1 #if MB_OPT_LEVEL == 0 #define MB_USE_OPT1(c) 65535 #define MB_USE_OPT2(c) 65535 #endif #if MB_OPT_LEVEL == 1 #define MB_USE_OPT1(c) c #define MB_USE_OPT2(c) 65535 #endif #if MB_OPT_LEVEL >= 2 #define MB_USE_OPT1(c) c #define MB_USE_OPT2(c) c #endif //#define DEBUG #define REAL_PRINT_REG(text,reg) \ mono_assert (reg >= 0); \ x86_push_reg (s->code, X86_EAX); \ x86_push_reg (s->code, X86_EDX); \ x86_push_reg (s->code, X86_ECX); \ x86_push_reg (s->code, reg); \ x86_push_imm (s->code, reg); \ x86_push_imm (s->code, text " %d %p\n"); \ x86_mov_reg_imm (s->code, X86_EAX, printf); \ x86_call_reg (s->code, X86_EAX); \ x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 3*4); \ x86_pop_reg (s->code, X86_ECX); \ x86_pop_reg (s->code, X86_EDX); \ x86_pop_reg (s->code, X86_EAX); #ifdef DEBUG #define MEMCOPY debug_memcpy void *MEMCOPY (void *dest, const void *src, size_t n); #define PRINT_REG(text,reg) REAL_PRINT_REG(text,reg) #else #define MEMCOPY memcpy #define PRINT_REG(x,y) #endif /* The call instruction for virtual functions must have a known * size (used by x86_magic_trampoline) */ #define x86_call_virtual(inst,basereg,disp) \ do { \ *(inst)++ = (unsigned char)0xff; \ x86_address_byte ((inst), 2, 2, (basereg)); \ x86_imm_emit32 ((inst), (disp)); \ } while (0) /* emit an exception if condition is fail */ #define EMIT_COND_SYSTEM_EXCEPTION(cond,signed,exc_name) \ do { \ gpointer t; \ x86_branch8 (s->code, cond, 10, signed); \ x86_push_imm (s->code, exc_name); \ t = arch_get_throw_exception_by_name (); \ mono_add_jump_info (s, s->code + 1, \ MONO_JUMP_INFO_ABS, t); \ x86_call_code (s->code, 0); \ } while (0); %% # # terminal definitions # # constatnts %term CONST_I4 CONST_I8 CONST_R4 CONST_R8 %term LDIND_I1 LDIND_U1 LDIND_I2 LDIND_U2 LDIND_I4 LDIND_REF LDIND_I8 LDIND_R4 LDIND_R8 %term LDIND_U4 LDIND_OBJ %term STIND_I1 STIND_I2 STIND_I4 STIND_REF STIND_I8 STIND_R4 STIND_R8 STIND_OBJ %term ADDR_L ADDR_G ARG_I4 ARG_I8 ARG_R4 ARG_R8 ARG_OBJ ARG_STRING CALL_I4 CALL_I8 CALL_R8 CALL_VOID %term BREAK SWITCH BR RET_VOID RET RET_OBJ ENDFINALLY %term ADD ADD_OVF ADD_OVF_UN SUB SUB_OVF SUB_OVF_UN MUL MUL_OVF MUL_OVF_UN %term DIV DIV_UN REM REM_UN AND OR XOR SHL SHR SHR_UN NEG NOT %term BLT BLT_UN BEQ BNE_UN BRTRUE BRFALSE BGE BGE_UN BLE BLE_UN BGT BGT_UN %term CEQ CLT CLT_UN CGT CGT_UN %term CONV_I4 CONV_I1 CONV_I2 CONV_I8 CONV_U8 CONV_R4 CONV_R8 CONV_R_UN %term INTF_ADDR VFUNC_ADDR NOP NEWARR NEWARR_SPEC NEWOBJ NEWOBJ_SPEC NEWSTRUCT CPOBJ POP INITOBJ %term ISINST CASTCLASS UNBOX %term CONV_OVF_I1 CONV_OVF_U1 CONV_OVF_I2 CONV_OVF_U2 CONV_OVF_U4 CONV_OVF_U8 CONV_OVF_I4 %term CONV_OVF_I4_UN CONV_OVF_U1_UN CONV_OVF_U2_UN %term CONV_OVF_I2_UN CONV_OVF_I8_UN CONV_OVF_I1_UN %term EXCEPTION THROW RETHROW HANDLER CHECKTHIS %term LDLEN LDELEMA LDFTN LDVIRTFTN LDSTR LDSFLDA %term REMOTE_LDFLDA REMOTE_STIND_I1 REMOTE_STIND_I2 REMOTE_STIND_I4 REMOTE_STIND_REF %term REMOTE_STIND_I8 REMOTE_STIND_R4 REMOTE_STIND_R8 REMOTE_STIND_OBJ # # we start at stmt # %start stmt # # tree definitions # # integer constant folding coni4: AND (coni4, coni4) { tree->data.i = tree->left->data.i & tree->right->data.i; } coni4: OR (coni4, coni4) { tree->data.i = tree->left->data.i | tree->right->data.i; } coni4: XOR (coni4, coni4) { tree->data.i = tree->left->data.i ^ tree->right->data.i; } coni4: SHL (coni4, coni4) { tree->data.i = tree->left->data.i << tree->right->data.i; } coni4: SHR (coni4, coni4) { tree->data.i = tree->left->data.i >> tree->right->data.i; } coni4: NOT (coni4) { tree->data.i = ~tree->left->data.i; } coni4: ADD (coni4, coni4) { tree->data.i = tree->left->data.i + tree->right->data.i; } coni4: SUB (coni4, coni4) { tree->data.i = tree->left->data.i - tree->right->data.i; } coni4: MUL (coni4, coni4) { tree->data.i = tree->left->data.i * tree->right->data.i; } coni4: DIV (coni4, coni4) { tree->data.i = tree->left->data.i / tree->right->data.i; } cost { MBCOND (tree->right->data.i) return 0; } coni4: REM (coni4, coni4) { tree->data.i = tree->left->data.i % tree->right->data.i; } cost { MBCOND (tree->right->data.i) return 0; } coni4: CEQ (coni4, coni4) { if (tree->left->data.i == tree->right->data.i) tree->data.i = 1; else tree->data.i = 0; } coni4: CGT (coni4, coni4) { if (tree->left->data.i > tree->right->data.i) tree->data.i = 1; else tree->data.i = 0; } coni4: CLT (coni4, coni4) { if (tree->left->data.i < tree->right->data.i) tree->data.i = 1; else tree->data.i = 0; } coni4: CONST_I4 { /* do nothing */ } # # x86 adressing mode # acon: coni4 { tree->data.ainfo.offset = tree->data.i; tree->data.ainfo.amode = AMImmediate; } acon: ADDR_G { tree->data.ainfo.offset = tree->data.i; tree->data.ainfo.amode = AMImmediate; } acon: ADD (ADDR_G, coni4) { tree->data.ainfo.offset = (unsigned)tree->left->data.p + tree->right->data.i; tree->data.ainfo.amode = AMImmediate; } base: acon base: reg { tree->data.ainfo.offset = 0; tree->data.ainfo.basereg = tree->reg1; tree->data.ainfo.amode = AMBase; } base: ADD (reg, coni4) { tree->data.ainfo.offset = tree->right->data.i; tree->data.ainfo.basereg = tree->left->reg1; tree->data.ainfo.amode = AMBase; } base: ADDR_L { tree->data.ainfo.offset = g_array_index (s->varinfo, MonoVarInfo, tree->data.i).offset; tree->data.ainfo.basereg = X86_EBP; tree->data.ainfo.amode = AMBase; } index: reg { tree->data.ainfo.offset = 0; tree->data.ainfo.indexreg = tree->reg1; tree->data.ainfo.shift = 0; tree->data.ainfo.amode = AMIndex; } index: SHL (reg, coni4) { tree->data.ainfo.offset = 0; tree->data.ainfo.amode = AMIndex; tree->data.ainfo.indexreg = tree->left->reg1; tree->data.ainfo.shift = tree->right->data.i; } cost { MBCOND (tree->right->data.i == 0 || tree->right->data.i == 1 || tree->right->data.i == 2 || tree->right->data.i == 3); return 0; } index: MUL (reg, coni4) { static int fast_log2 [] = { 1, 0, 1, -1, 2, -1, -1, -1, 3 }; tree->data.ainfo.offset = 0; tree->data.ainfo.amode = AMIndex; tree->data.ainfo.indexreg = tree->left->reg1; tree->data.ainfo.shift = fast_log2 [tree->right->data.i]; } cost { MBCOND (tree->right->data.i == 1 || tree->right->data.i == 2 || tree->right->data.i == 4 || tree->right->data.i == 8); return 0; } addr: base addr: index addr: ADD (index, base) { tree->data.ainfo.offset = tree->right->data.ainfo.offset; tree->data.ainfo.basereg = tree->right->data.ainfo.basereg; tree->data.ainfo.amode = tree->left->data.ainfo.amode | tree->right->data.ainfo.amode; tree->data.ainfo.shift = tree->left->data.ainfo.shift; tree->data.ainfo.indexreg = tree->left->data.ainfo.indexreg; } # we pass exception in ECX to catch handler reg: EXCEPTION { int offset = g_array_index (s->varinfo, MonoVarInfo, tree->data.i).offset; if (tree->reg1 != X86_ECX) x86_mov_reg_reg (s->code, tree->reg1, X86_ECX, 4); /* store it so that we can RETHROW it later */ x86_mov_membase_reg (s->code, X86_EBP, offset, tree->reg1, 4); } stmt: THROW (reg) { gpointer target; x86_push_reg (s->code, tree->left->reg1); target = arch_get_throw_exception (); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, target); x86_call_code (s->code, target); } stmt: RETHROW { int off = g_array_index (s->varinfo, MonoVarInfo, tree->data.i).offset; gpointer target; x86_push_membase (s->code, X86_EBP, off); target = arch_get_throw_exception (); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, target); x86_call_code (s->code, target); } stmt: HANDLER { mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_BB, tree->data.bb); x86_call_imm (s->code, 0); } stmt: ENDFINALLY { x86_ret (s->code); } stmt: STIND_I4 (addr, coni4) { switch (tree->left->data.ainfo.amode) { case AMImmediate: x86_mov_mem_imm (s->code, tree->left->data.ainfo.offset, tree->right->data.i, 4); break; case AMBase: x86_mov_membase_imm (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, tree->right->data.i, 4); break; case AMIndex: x86_mov_memindex_imm (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, tree->right->data.i, 4); break; case AMBaseIndex: x86_mov_memindex_imm (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, tree->right->data.i, 4); break; } } stmt: STIND_I4 (addr, reg) { PRINT_REG ("STIND_I4", tree->right->reg1); switch (tree->left->data.ainfo.amode) { case AMImmediate: x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 4); break; case AMBase: x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, tree->right->reg1, 4); break; case AMIndex: x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, tree->right->reg1, 4); break; case AMBaseIndex: x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, tree->right->reg1, 4); break; } } stmt: REMOTE_STIND_I4 (reg, reg) { guint8 *br[2]; int treg = X86_EAX; int lreg = tree->left->reg1; int rreg = tree->right->reg1; int offset; if (lreg == treg) treg = X86_EDX; if (rreg == treg) treg = X86_ECX; x86_mov_reg_membase (s->code, treg, lreg, 0, 4); x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class)); br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE); /* this is a transparent proxy - remote the call */ /* save value to stack */ x86_push_reg (s->code, rreg); x86_push_reg (s->code, X86_ESP); x86_push_imm (s->code, tree->data.fi.field); x86_push_imm (s->code, tree->data.fi.klass); x86_push_reg (s->code, lreg); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_store_remote_field); x86_call_code (s->code, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20); br [1] = s->code; x86_jump8 (s->code, 0); x86_patch (br [0], s->code); offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) : tree->data.fi.field->offset; x86_mov_membase_reg (s->code, lreg, offset, rreg, 4); x86_patch (br [1], s->code); } stmt: STIND_REF (addr, reg) { PRINT_REG ("STIND_REF", tree->right->reg1); switch (tree->left->data.ainfo.amode) { case AMImmediate: x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 4); break; case AMBase: x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, tree->right->reg1, 4); break; case AMIndex: x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, tree->right->reg1, 4); break; case AMBaseIndex: x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, tree->right->reg1, 4); break; } } stmt: REMOTE_STIND_REF (reg, reg) { guint8 *br[2]; int treg = X86_EAX; int lreg = tree->left->reg1; int rreg = tree->right->reg1; int offset; if (lreg == treg) treg = X86_EDX; if (rreg == treg) treg = X86_ECX; x86_mov_reg_membase (s->code, treg, lreg, 0, 4); x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class)); br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE); /* this is a transparent proxy - remote the call */ /* save value to stack */ x86_push_reg (s->code, rreg); x86_push_reg (s->code, X86_ESP); x86_push_imm (s->code, tree->data.fi.field); x86_push_imm (s->code, tree->data.fi.klass); x86_push_reg (s->code, lreg); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_store_remote_field); x86_call_code (s->code, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20); br [1] = s->code; x86_jump8 (s->code, 0); x86_patch (br [0], s->code); offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) : tree->data.fi.field->offset; x86_mov_membase_reg (s->code, lreg, offset, rreg, 4); x86_patch (br [1], s->code); } stmt: STIND_I1 (addr, reg) { PRINT_REG ("STIND_I1", tree->right->reg1); switch (tree->left->data.ainfo.amode) { case AMImmediate: x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 1); break; case AMBase: x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, tree->right->reg1, 1); break; case AMIndex: x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, tree->right->reg1, 1); break; case AMBaseIndex: x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, tree->right->reg1, 1); break; } } stmt: REMOTE_STIND_I1 (reg, reg) { guint8 *br[2]; int treg = X86_EAX; int lreg = tree->left->reg1; int rreg = tree->right->reg1; int offset; if (lreg == treg) treg = X86_EDX; if (rreg == treg) treg = X86_ECX; x86_mov_reg_membase (s->code, treg, lreg, 0, 4); x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class)); br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE); /* this is a transparent proxy - remote the call */ /* save value to stack */ x86_push_reg (s->code, rreg); x86_push_reg (s->code, X86_ESP); x86_push_imm (s->code, tree->data.fi.field); x86_push_imm (s->code, tree->data.fi.klass); x86_push_reg (s->code, lreg); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_store_remote_field); x86_call_code (s->code, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20); br [1] = s->code; x86_jump8 (s->code, 0); x86_patch (br [0], s->code); offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) : tree->data.fi.field->offset; x86_mov_membase_reg (s->code, lreg, offset, rreg, 1); x86_patch (br [1], s->code); } stmt: STIND_I2 (addr, reg) { PRINT_REG ("STIND_I2", tree->right->reg1); switch (tree->left->data.ainfo.amode) { case AMImmediate: x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 2); break; case AMBase: x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, tree->right->reg1, 2); break; case AMIndex: x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, tree->right->reg1, 2); break; case AMBaseIndex: x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, tree->right->reg1, 2); break; } } stmt: REMOTE_STIND_I2 (reg, reg) { guint8 *br[2]; int treg = X86_EAX; int lreg = tree->left->reg1; int rreg = tree->right->reg1; int offset; if (lreg == treg) treg = X86_EDX; if (rreg == treg) treg = X86_ECX; x86_mov_reg_membase (s->code, treg, lreg, 0, 4); x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class)); br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE); /* this is a transparent proxy - remote the call */ /* save value to stack */ x86_push_reg (s->code, rreg); x86_push_reg (s->code, X86_ESP); x86_push_imm (s->code, tree->data.fi.field); x86_push_imm (s->code, tree->data.fi.klass); x86_push_reg (s->code, lreg); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_store_remote_field); x86_call_code (s->code, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20); br [1] = s->code; x86_jump8 (s->code, 0); x86_patch (br [0], s->code); offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) : tree->data.fi.field->offset; x86_mov_membase_reg (s->code, lreg, offset, rreg, 2); x86_patch (br [1], s->code); } reg: LDIND_I4 (addr) { switch (tree->left->data.ainfo.amode) { case AMImmediate: x86_mov_reg_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, 4); break; case AMBase: x86_mov_reg_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, 4); break; case AMIndex: x86_mov_reg_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, 4); break; case AMBaseIndex: x86_mov_reg_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, 4); break; } PRINT_REG ("LDIND_I4", tree->reg1); } reg: LDIND_REF (addr) { switch (tree->left->data.ainfo.amode) { case AMImmediate: x86_mov_reg_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, 4); break; case AMBase: x86_mov_reg_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, 4); break; case AMIndex: x86_mov_reg_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, 4); break; case AMBaseIndex: x86_mov_reg_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, 4); break; } PRINT_REG ("LDIND_REF", tree->reg1); } reg: LDIND_I1 (addr) { switch (tree->left->data.ainfo.amode) { case AMImmediate: x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, TRUE, FALSE); break; case AMBase: x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, TRUE, FALSE); break; case AMIndex: x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, TRUE, FALSE); break; case AMBaseIndex: x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, TRUE, FALSE); break; } PRINT_REG ("LDIND_I1", tree->reg1); } reg: LDIND_U1 (addr) { switch (tree->left->data.ainfo.amode) { case AMImmediate: x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, FALSE, FALSE); break; case AMBase: x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, FALSE, FALSE); break; case AMIndex: x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, FALSE, FALSE); break; case AMBaseIndex: x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, FALSE, FALSE); break; } PRINT_REG ("LDIND_U1", tree->reg1); } reg: LDIND_I2 (addr) { switch (tree->left->data.ainfo.amode) { case AMImmediate: x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, TRUE, TRUE); break; case AMBase: x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, TRUE, TRUE); break; case AMIndex: x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, TRUE, TRUE); break; case AMBaseIndex: x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, TRUE, TRUE); break; } PRINT_REG ("LDIND_U2", tree->reg1); } reg: LDIND_U2 (addr) { switch (tree->left->data.ainfo.amode) { case AMImmediate: x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, FALSE, TRUE); break; case AMBase: x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, FALSE, TRUE); break; case AMIndex: x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, FALSE, TRUE); break; case AMBaseIndex: x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, FALSE, TRUE); break; } PRINT_REG ("LDIND_U2", tree->reg1); } reg: LDIND_U4 (addr) { switch (tree->left->data.ainfo.amode) { case AMImmediate: x86_mov_reg_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, 4); break; case AMBase: x86_mov_reg_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, 4); break; case AMIndex: x86_mov_reg_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, 4); break; case AMBaseIndex: x86_mov_reg_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, 4); break; } PRINT_REG ("LDIND_U4", tree->reg1); } reg: REMOTE_LDFLDA (reg) { guint8 *br[2]; int treg = X86_EAX; int lreg = tree->left->reg1; if (lreg == X86_EAX) treg = X86_EDX; if (tree->reg1 != treg) x86_push_reg (s->code, treg); x86_mov_reg_membase (s->code, treg, lreg, 0, 4); x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class)); br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE); /* this is a transparent proxy - remote the call */ if (treg != X86_EAX) x86_push_reg (s->code, X86_EAX); if (treg != X86_EDX) x86_push_reg (s->code, X86_EDX); x86_push_reg (s->code, X86_ECX); x86_push_reg (s->code, X86_ESP); x86_push_imm (s->code, tree->data.fi.field); x86_push_imm (s->code, tree->data.fi.klass); x86_push_reg (s->code, lreg); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_load_remote_field); x86_call_code (s->code, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16); if (treg != X86_EAX) x86_mov_reg_reg (s->code, treg, X86_EAX, 4); x86_pop_reg (s->code, X86_ECX); if (treg != X86_EDX) x86_pop_reg (s->code, X86_EDX); if (treg != X86_EAX) x86_pop_reg (s->code, X86_EAX); x86_mov_reg_reg (s->code, tree->reg1, treg, 4); br [1] = s->code; x86_jump8 (s->code, 0); x86_patch (br [0], s->code); if (tree->data.fi.klass->valuetype) x86_lea_membase (s->code, tree->reg1, lreg, tree->data.fi.field->offset - sizeof (MonoObject)); else x86_lea_membase (s->code, tree->reg1, lreg, tree->data.fi.field->offset); x86_patch (br [1], s->code); if (tree->reg1 != treg) x86_pop_reg (s->code, treg); } reg: ADDR_L 5 { int offset = g_array_index (s->varinfo, MonoVarInfo, tree->data.i).offset; x86_lea_membase (s->code, tree->reg1, X86_EBP, offset); PRINT_REG ("ADDR_L", tree->reg1); } reg: ADDR_G 5 { x86_mov_reg_imm (s->code, tree->reg1, tree->data.p); } reg: CONV_I1 (reg) { x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, FALSE); } reg: CONV_I2 (reg) { x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, TRUE); } # warning: this chain rule requires a register reg: coni4 1 { x86_mov_reg_imm (s->code, tree->reg1, tree->data.i); } reg: CONV_I4 (reg) { if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); PRINT_REG ("CONV_I4", tree->left->reg1); } reg: CONV_OVF_I4 (reg) { if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); PRINT_REG ("CONV_OVF_I4", tree->left->reg1); } reg: CONV_OVF_U4 (reg) { /* Keep in sync with CONV_OVF_I4_UN below, they are the same on 32-bit machines */ x86_test_reg_imm (s->code, tree->left->reg1, 0x8000000); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException"); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } reg: CONV_OVF_I4_UN (reg) { /* Keep in sync with CONV_OVF_U4 above, they are the same on 32-bit machines */ x86_test_reg_imm (s->code, tree->left->reg1, 0x8000000); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException"); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } reg: CONV_OVF_I1 (reg) { /* probe value to be within -128 to 127 */ x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, 127); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LE, TRUE, "OverflowException"); x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, -128); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_GT, TRUE, "OverflowException"); x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, FALSE); } reg: CONV_OVF_I1_UN (reg) { /* probe values between 0 to 128 */ x86_test_reg_imm (s->code, tree->left->reg1, 0xffffff80); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException"); x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE); } reg: CONV_OVF_U1 (reg) { /* Keep in sync with CONV_OVF_U1_UN routine below, they are the same on 32-bit machines */ /* probe value to be within 0 to 255 */ x86_test_reg_imm (s->code, tree->left->reg1, 0xffffff00); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException"); x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE); } reg: CONV_OVF_U1_UN (reg) { /* Keep in sync with CONV_OVF_U1 routine above, they are the same on 32-bit machines */ /* probe value to be within 0 to 255 */ x86_test_reg_imm (s->code, tree->left->reg1, 0xffffff00); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException"); x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE); } reg: CONV_OVF_I2 (reg) { /* Probe value to be within -32768 and 32767 */ x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, 32767); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LE, TRUE, "OverflowException"); x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, -32768); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_GE, TRUE, "OverflowException"); x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, TRUE); } reg: CONV_OVF_U2 (reg) { /* Keep in sync with CONV_OVF_U2_UN below, they are the same on 32-bit machines */ /* Probe value to be within 0 and 65535 */ x86_test_reg_imm (s->code, tree->left->reg1, 0xffff0000); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "OverflowException"); x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE); } reg: CONV_OVF_U2_UN (reg) { /* Keep in sync with CONV_OVF_U2 above, they are the same on 32-bit machines */ /* Probe value to be within 0 and 65535 */ x86_test_reg_imm (s->code, tree->left->reg1, 0xffff0000); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException"); x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE); } reg: CONV_OVF_I2_UN (reg) { /* Convert uint value into short, value within 0 and 32767 */ x86_test_reg_imm (s->code, tree->left->reg1, 0xffff8000); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException"); x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE); } reg: MUL (reg, reg) { x86_imul_reg_reg (s->code, tree->left->reg1, tree->right->reg1); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } reg: MUL_OVF (reg, reg) { x86_imul_reg_reg (s->code, tree->left->reg1, tree->right->reg1); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException"); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } reg: MUL_OVF_UN (reg, reg) { mono_assert (tree->right->reg1 != X86_EAX); if (tree->left->reg1 != X86_EAX) x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4); x86_mul_reg (s->code, tree->right->reg1, FALSE); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException"); mono_assert (tree->reg1 == X86_EAX && tree->reg2 == X86_EDX); } reg: DIV (reg, reg) { mono_assert (tree->right->reg1 != X86_EAX); if (tree->left->reg1 != X86_EAX) x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4); x86_cdq (s->code); x86_div_reg (s->code, tree->right->reg1, TRUE); mono_assert (tree->reg1 == X86_EAX && tree->reg2 == X86_EDX); } reg: DIV_UN (reg, reg) { mono_assert (tree->right->reg1 != X86_EAX); if (tree->left->reg1 != X86_EAX) x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4); x86_mov_reg_imm (s->code, X86_EDX, 0); x86_div_reg (s->code, tree->right->reg1, FALSE); mono_assert (tree->reg1 == X86_EAX && tree->reg2 == X86_EDX); } reg: REM (reg, reg) { mono_assert (tree->right->reg1 != X86_EAX); mono_assert (tree->right->reg1 != X86_EDX); if (tree->left->reg1 != X86_EAX) x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4); /* sign extend to 64bit in EAX/EDX */ x86_cdq (s->code); x86_div_reg (s->code, tree->right->reg1, TRUE); x86_mov_reg_reg (s->code, X86_EAX, X86_EDX, 4); mono_assert (tree->reg1 == X86_EAX && tree->reg2 == X86_EDX); } reg: REM_UN (reg, reg) { mono_assert (tree->right->reg1 != X86_EAX); mono_assert (tree->right->reg1 != X86_EDX); if (tree->left->reg1 != X86_EAX) x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4); /* zero extend to 64bit in EAX/EDX */ x86_mov_reg_imm (s->code, X86_EDX, 0); x86_div_reg (s->code, tree->right->reg1, FALSE); x86_mov_reg_reg (s->code, X86_EAX, X86_EDX, 4); mono_assert (tree->reg1 == X86_EAX && tree->reg2 == X86_EDX); } reg: ADD (reg, coni4) "MB_USE_OPT1(0)" { if (tree->right->data.i == 1) x86_inc_reg (s->code, tree->left->reg1); else x86_alu_reg_imm (s->code, X86_ADD, tree->left->reg1, tree->right->data.i); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } reg: ADD (reg, reg) { x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } reg: ADD_OVF (reg, reg) { x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException"); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } reg: ADD_OVF_UN (reg, reg) { x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NC, FALSE, "OverflowException"); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } reg: SUB (reg, coni4) "MB_USE_OPT1(0)" { if (tree->right->data.i == 1) x86_dec_reg (s->code, tree->left->reg1); else x86_alu_reg_imm (s->code, X86_SUB, tree->left->reg1, tree->right->data.i); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } reg: SUB (reg, reg) { x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } reg: SUB_OVF (reg, reg) { x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException"); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } reg: SUB_OVF_UN (reg, reg) { x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NC, FALSE, "OverflowException"); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } reg: CEQ (reg, coni4) "MB_USE_OPT1(0)" { x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i); x86_set_reg (s->code, X86_CC_EQ, tree->reg1, TRUE); x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE); } reg: CEQ (reg, reg) { x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); x86_set_reg (s->code, X86_CC_EQ, tree->reg1, TRUE); x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE); } reg: CGT (reg, reg) { x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); x86_set_reg (s->code, X86_CC_GT, tree->reg1, TRUE); x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE); } reg: CGT_UN (reg, reg) { x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); x86_set_reg (s->code, X86_CC_GT, tree->reg1, FALSE); x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE); } reg: CLT (reg, reg) { x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); x86_set_reg (s->code, X86_CC_LT, tree->reg1, TRUE); x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE); } reg: CLT_UN (reg, reg) { x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); x86_set_reg (s->code, X86_CC_LT, tree->reg1, FALSE); x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE); } reg: AND (reg, reg) { x86_alu_reg_reg (s->code, X86_AND, tree->left->reg1, tree->right->reg1); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } reg: OR (reg, reg) { x86_alu_reg_reg (s->code, X86_OR, tree->left->reg1, tree->right->reg1); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } reg: XOR (reg, reg) { x86_alu_reg_reg (s->code, X86_XOR, tree->left->reg1, tree->right->reg1); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } reg: NEG (reg) { x86_neg_reg (s->code, tree->left->reg1); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } reg: NOT (reg) { x86_not_reg (s->code, tree->left->reg1); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } reg: SHL (reg, coni4) { x86_shift_reg_imm (s->code, X86_SHL, tree->left->reg1, tree->right->data.i); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } reg: SHL (reg, reg) { if (tree->right->reg1 != X86_ECX) x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4); x86_shift_reg (s->code, X86_SHL, tree->left->reg1); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); mono_assert (tree->reg1 != X86_ECX && tree->left->reg1 != X86_ECX); } reg: SHR (reg, coni4) { x86_shift_reg_imm (s->code, X86_SAR, tree->left->reg1, tree->right->data.i); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } reg: SHR (reg, reg) { if (tree->right->reg1 != X86_ECX) x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4); x86_shift_reg (s->code, X86_SAR, tree->left->reg1); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); mono_assert (tree->reg1 != X86_ECX && tree->left->reg1 != X86_ECX); } reg: SHR_UN (reg, coni4) { x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg1, tree->right->data.i); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } reg: SHR_UN (reg, reg) { if (tree->right->reg1 != X86_ECX) x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4); x86_shift_reg (s->code, X86_SHR, tree->left->reg1); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); mono_assert (tree->reg1 != X86_ECX && tree->left->reg1 != X86_ECX); } reg: LDSFLDA (coni4) { if (tree->reg1 != X86_EAX) x86_push_reg (s->code, X86_EAX); x86_push_reg (s->code, X86_ECX); x86_push_reg (s->code, X86_EDX); x86_push_imm (s->code, tree->left->data.i); x86_push_imm (s->code, tree->data.klass); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_ldsflda); x86_call_code (s->code, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8); x86_pop_reg (s->code, X86_EDX); x86_pop_reg (s->code, X86_ECX); if (tree->reg1 != X86_EAX) { x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4); x86_pop_reg (s->code, X86_EAX); } } # array support reg: LDLEN (reg) { x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NE, TRUE, "NullReferenceException"); x86_mov_reg_membase (s->code, tree->reg1, tree->left->reg1, G_STRUCT_OFFSET (MonoArray, max_length), 4); } reg: LDELEMA (reg, reg) { x86_alu_reg_membase (s->code, X86_CMP, tree->right->reg1, tree->left->reg1, G_STRUCT_OFFSET (MonoArray, max_length)); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LT, FALSE, "IndexOutOfRangeException"); if (tree->data.i == 1 || tree->data.i == 2 || tree->data.i == 4 || tree->data.i == 8) { static int fast_log2 [] = { 1, 0, 1, -1, 2, -1, -1, -1, 3 }; x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, G_STRUCT_OFFSET (MonoArray, vector), tree->right->reg1, fast_log2 [tree->data.i]); } else { x86_imul_reg_reg_imm (s->code, tree->right->reg1, tree->right->reg1, tree->data.i); x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->right->reg1); x86_alu_reg_imm (s->code, X86_ADD, tree->reg1, G_STRUCT_OFFSET (MonoArray, vector)); } } reg: LDSTR { if (tree->reg1 != X86_EAX) x86_push_reg (s->code, X86_EAX); x86_push_reg (s->code, X86_ECX); x86_push_reg (s->code, X86_EDX); x86_push_imm (s->code, tree->data.p); x86_push_imm (s->code, s->method->klass->image); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_ldstr_wrapper); x86_call_code (s->code, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8); x86_pop_reg (s->code, X86_EDX); x86_pop_reg (s->code, X86_ECX); if (tree->reg1 != X86_EAX) { x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4); x86_pop_reg (s->code, X86_EAX); } PRINT_REG ("LDSTR", tree->reg1); } reg: NEWARR (reg) { if (tree->reg1 != X86_EAX) x86_push_reg (s->code, X86_EAX); x86_push_reg (s->code, X86_ECX); x86_push_reg (s->code, X86_EDX); x86_push_reg (s->code, tree->left->reg1); x86_push_imm (s->code, tree->data.p); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_array_new_wrapper); x86_call_code (s->code, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer) + 4); x86_pop_reg (s->code, X86_EDX); x86_pop_reg (s->code, X86_ECX); if (tree->reg1 != X86_EAX) { x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4); x86_pop_reg (s->code, X86_EAX); } PRINT_REG ("NEWARR", tree->reg1); } reg: NEWARR_SPEC (reg) { if (tree->reg1 != X86_EAX) x86_push_reg (s->code, X86_EAX); x86_push_reg (s->code, X86_ECX); x86_push_reg (s->code, X86_EDX); x86_push_reg (s->code, tree->left->reg1); x86_push_imm (s->code, tree->data.p); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_array_new_specific); x86_call_code (s->code, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer) + 4); x86_pop_reg (s->code, X86_EDX); x86_pop_reg (s->code, X86_ECX); if (tree->reg1 != X86_EAX) { x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4); x86_pop_reg (s->code, X86_EAX); } PRINT_REG ("NEWARR_SPEC", tree->reg1); } reg: NEWOBJ { if (tree->reg1 != X86_EAX) x86_push_reg (s->code, X86_EAX); x86_push_reg (s->code, X86_ECX); x86_push_reg (s->code, X86_EDX); x86_push_imm (s->code, tree->data.klass); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_object_new_wrapper); x86_call_code (s->code, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer)); x86_pop_reg (s->code, X86_EDX); x86_pop_reg (s->code, X86_ECX); if (tree->reg1 != X86_EAX) { x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4); x86_pop_reg (s->code, X86_EAX); } PRINT_REG ("NEWOBJ", tree->reg1); } reg: NEWOBJ_SPEC { if (tree->reg1 != X86_EAX) x86_push_reg (s->code, X86_EAX); x86_push_reg (s->code, X86_ECX); x86_push_reg (s->code, X86_EDX); x86_push_imm (s->code, tree->data.p); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_object_new_specific); x86_call_code (s->code, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer)); x86_pop_reg (s->code, X86_EDX); x86_pop_reg (s->code, X86_ECX); if (tree->reg1 != X86_EAX) { x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4); x86_pop_reg (s->code, X86_EAX); } PRINT_REG ("NEWOBJ_SPEC", tree->reg1); } reg: NEWSTRUCT { int size = tree->data.i; int sa; mono_assert (size > 0); sa = size + 3; sa &= ~3; x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, sa); x86_mov_reg_reg (s->code, tree->reg1, X86_ESP, 4); } reg: UNBOX (reg) { if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); x86_test_reg_reg (s->code, tree->reg1, tree->reg1); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NE, TRUE, "NullReferenceException"); x86_push_reg (s->code, tree->reg1); x86_mov_reg_membase (s->code, tree->reg1, tree->reg1, 0, 4); x86_mov_reg_membase (s->code, tree->reg1, tree->reg1, 0, 4); x86_alu_membase_imm (s->code, X86_CMP, tree->reg1, G_STRUCT_OFFSET (MonoClass, element_class), ((int)(tree->data.klass->element_class))); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "InvalidCastException"); x86_pop_reg (s->code, tree->reg1); x86_alu_reg_imm (s->code, X86_ADD, tree->reg1, sizeof (MonoObject)); } reg: CASTCLASS (reg) { MonoClass *klass = tree->data.klass; guint8 *br [2]; int lreg = tree->left->reg1; x86_push_reg (s->code, lreg); x86_test_reg_reg (s->code, lreg, lreg); br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE); if (klass->flags & TYPE_ATTRIBUTE_INTERFACE) { /* lreg = obj->vtable */ x86_mov_reg_membase (s->code, lreg, lreg, 0, 4); x86_alu_membase_imm (s->code, X86_CMP, lreg, G_STRUCT_OFFSET (MonoVTable, max_interface_id), klass->interface_id); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_GE, FALSE, "InvalidCastException"); /* lreg = obj->vtable->interface_offsets */ x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4); x86_alu_membase_imm (s->code, X86_CMP, lreg, klass->interface_id << 2, 0); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NE, FALSE, "InvalidCastException"); } else { /* lreg = obj->vtable */ x86_mov_reg_membase (s->code, lreg, lreg, 0, 4); /* lreg = obj->vtable->klass */ x86_mov_reg_membase (s->code, lreg, lreg, 0, 4); if (klass->rank) { x86_alu_membase_imm (s->code, X86_CMP, lreg, G_STRUCT_OFFSET (MonoClass, rank), klass->rank); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "InvalidCastException"); x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, element_class), 4); x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, baseval), 4); x86_alu_reg_mem (s->code, X86_SUB, lreg, &klass->element_class->baseval); x86_alu_reg_mem (s->code, X86_CMP, lreg, &klass->element_class->diffval); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LE, FALSE, "InvalidCastException"); } else { if (klass->marshalbyref) { /* check for transparent_proxy */ x86_alu_reg_imm (s->code, X86_CMP, lreg, (int)mono_defaults.transparent_proxy_class); br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE); /* lreg = obj */ x86_mov_reg_membase (s->code, lreg, X86_ESP, 0, 4); x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoTransparentProxy, klass), 4); x86_patch (br [1], s->code); } x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, baseval), 4); x86_alu_reg_mem (s->code, X86_SUB, lreg, &klass->baseval); x86_alu_reg_mem (s->code, X86_CMP, lreg, &klass->diffval); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LE, FALSE, "InvalidCastException"); } } x86_patch (br [0], s->code); x86_pop_reg (s->code, tree->reg1); } reg: ISINST (reg) { MonoClass *klass = tree->data.klass; guint8 *br [3]; int lreg = tree->left->reg1; x86_push_reg (s->code, lreg); x86_test_reg_reg (s->code, lreg, lreg); br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE); if (klass->flags & TYPE_ATTRIBUTE_INTERFACE) { /* lreg = obj->vtable */ x86_mov_reg_membase (s->code, lreg, lreg, 0, 4); x86_alu_membase_imm (s->code, X86_CMP, lreg, G_STRUCT_OFFSET (MonoVTable, max_interface_id), klass->interface_id); br [1] = s->code; x86_branch8 (s->code, X86_CC_LT, 0, FALSE); /* lreg = obj->vtable->interface_offsets */ x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4); x86_alu_membase_imm (s->code, X86_CMP, lreg, klass->interface_id << 2, 0); br [2] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE); x86_patch (br [1], s->code); x86_mov_membase_imm (s->code, X86_ESP, 0, 0, 4); x86_patch (br [2], s->code); } else { /* lreg = obj->vtable */ x86_mov_reg_membase (s->code, lreg, lreg, 0, 4); /* lreg = obj->vtable->klass */ x86_mov_reg_membase (s->code, lreg, lreg, 0, 4); if (klass->rank) { x86_alu_membase_imm (s->code, X86_CMP, lreg, G_STRUCT_OFFSET (MonoClass, rank), klass->rank); br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE); x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, element_class), 4); x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, baseval), 4); x86_alu_reg_mem (s->code, X86_SUB, lreg, &klass->element_class->baseval); x86_alu_reg_mem (s->code, X86_CMP, lreg, &klass->element_class->diffval); br [2] = s->code; x86_branch8 (s->code, X86_CC_LE, 0, FALSE); x86_patch (br [1], s->code); x86_mov_membase_imm (s->code, X86_ESP, 0, 0, 4); x86_patch (br [2], s->code); } else { if (klass->marshalbyref) { /* check for transparent_proxy */ x86_alu_reg_imm (s->code, X86_CMP, lreg, (int)mono_defaults.transparent_proxy_class); br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE); /* lreg = obj */ x86_mov_reg_membase (s->code, lreg, X86_ESP, 0, 4); x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoTransparentProxy, klass), 4); x86_patch (br [1], s->code); } x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, baseval), 4); x86_alu_reg_mem (s->code, X86_SUB, lreg, &klass->baseval); x86_alu_reg_mem (s->code, X86_CMP, lreg, &klass->diffval); br [2] = s->code; x86_branch8 (s->code, X86_CC_LE, 0, FALSE); x86_mov_membase_imm (s->code, X86_ESP, 0, 0, 4); x86_patch (br [2], s->code); } } x86_patch (br [0], s->code); x86_pop_reg (s->code, tree->reg1); } stmt: INITOBJ (reg) { int i, j; i = tree->data.i; if (i == 1 || i == 2 || i == 4) { x86_mov_membase_imm (s->code, tree->left->reg1, 0, 0, i); return; } i = tree->data.i / 4; j = tree->data.i % 4; x86_push_reg (s->code, X86_EAX); if (tree->left->reg1 != X86_EDI) { x86_push_reg (s->code, X86_EDI); x86_mov_reg_reg (s->code, X86_EDI, tree->left->reg1, 4); } if (i) { x86_push_reg (s->code, X86_ECX); x86_alu_reg_reg (s->code, X86_XOR, X86_EAX, X86_EAX); x86_mov_reg_imm (s->code, X86_ECX, i); x86_cld (s->code); x86_prefix (s->code, X86_REP_PREFIX); x86_stosl (s->code); x86_pop_reg (s->code, X86_ECX); for (i = 0; i < j; i++) x86_stosb (s->code); } else { g_assert (j == 3); x86_mov_membase_imm (s->code, X86_EDI, 0, 0, 2); x86_mov_membase_imm (s->code, X86_EDI, 2, 0, 1); } if (tree->left->reg1 != X86_EDI) x86_pop_reg (s->code, X86_EDI); x86_pop_reg (s->code, X86_EAX); } stmt: NOP stmt: POP (reg) stmt: BR { mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_BB, tree->data.bb); x86_jump32 (s->code, 0); } stmt: BLT (reg, reg) 1 { x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_LT, 0, TRUE); } stmt: BLT (reg, coni4) "MB_USE_OPT1(0)" { x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_LT, 0, TRUE); } stmt: BLT_UN (reg, reg) 1 { x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_LT, 0, FALSE); } stmt: BLT_UN (reg, coni4) "MB_USE_OPT1(0)" { x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_LT, 0, FALSE); } stmt: BGT (reg, reg) 1 { x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_GT, 0, TRUE); } stmt: BGT (reg, coni4) "MB_USE_OPT1(0)" { x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_GT, 0, TRUE); } stmt: BGT_UN (reg, reg) 1 { x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_GT, 0, FALSE); } stmt: BGT_UN (reg, coni4) "MB_USE_OPT1(0)" { x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_GT, 0, FALSE); } stmt: BEQ (reg, coni4) "MB_USE_OPT1(0)" { x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_EQ, 0, TRUE); } stmt: BEQ (reg, reg) 1 { x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_EQ, 0, TRUE); } stmt: BNE_UN (reg, reg) 1 { x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_NE, 0, FALSE); } stmt: BNE_UN (reg, coni4) "MB_USE_OPT1(0)" { x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_NE, 0, FALSE); } stmt: BGE (reg, reg) 1 { x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_GE, 0, TRUE); } stmt: BGE (reg, coni4) "MB_USE_OPT1(0)" { x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_GE, 0, TRUE); } stmt: BGE_UN (reg, reg) 1 { x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_GE, 0, FALSE); } stmt: BGE_UN (reg, coni4) "MB_USE_OPT1(0)" { x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_GE, 0, FALSE); } stmt: BLE (reg, reg) 1 { x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_LE, 0, TRUE); } stmt: BLE (reg, coni4) "MB_USE_OPT1(0)" { x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_LE, 0, TRUE); } stmt: BLE_UN (reg, reg) 1 { x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_LE, 0, FALSE); } stmt: BLE_UN (reg, coni4) "MB_USE_OPT1(0)" { x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_LE, 0, FALSE); } stmt: BRTRUE (reg) { x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_NE, 0, TRUE); } stmt: BRFALSE (reg) { x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_EQ, 0, TRUE); } stmt: BREAK { x86_breakpoint (s->code); } stmt: RET (reg) { if (tree->left->reg1 != X86_EAX) x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4); if (!tree->last_instr) { mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_EPILOG, NULL); x86_jump32 (s->code, 0); } } stmt: RET_VOID { if (!tree->last_instr) { mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_EPILOG, NULL); x86_jump32 (s->code, 0); } } stmt: ARG_I4 (LDIND_I4 (addr)) { MBTree *at = tree->left->left; switch (at->data.ainfo.amode) { case AMImmediate: x86_push_mem (s->code, at->data.ainfo.offset); break; case AMBase: x86_push_membase (s->code, at->data.ainfo.basereg, at->data.ainfo.offset); break; case AMIndex: x86_push_memindex (s->code, X86_NOBASEREG, at->data.ainfo.offset, at->data.ainfo.indexreg, at->data.ainfo.shift); break; case AMBaseIndex: x86_push_memindex (s->code, at->data.ainfo.basereg, at->data.ainfo.offset, at->data.ainfo.indexreg, at->data.ainfo.shift); break; } } stmt: ARG_I4 (LDIND_U4 (addr)) { MBTree *at = tree->left->left; switch (at->data.ainfo.amode) { case AMImmediate: x86_push_mem (s->code, at->data.ainfo.offset); break; case AMBase: x86_push_membase (s->code, at->data.ainfo.basereg, at->data.ainfo.offset); break; case AMIndex: x86_push_memindex (s->code, X86_NOBASEREG, at->data.ainfo.offset, at->data.ainfo.indexreg, at->data.ainfo.shift); break; case AMBaseIndex: x86_push_memindex (s->code, at->data.ainfo.basereg, at->data.ainfo.offset, at->data.ainfo.indexreg, at->data.ainfo.shift); break; } } stmt: ARG_I4 (reg) { x86_push_reg (s->code, tree->left->reg1); PRINT_REG ("ARG_I4", tree->left->reg1); } # fixme: we must free the allocated strings somewhere stmt: ARG_STRING (reg) { x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4); x86_push_reg (s->code, X86_EAX); x86_push_reg (s->code, X86_ECX); x86_push_reg (s->code, X86_EDX); x86_push_reg (s->code, tree->left->reg1); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_string_to_utf8); x86_call_code (s->code, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4); x86_mov_membase_reg (s->code, X86_ESP, 12, X86_EAX, 4); x86_pop_reg (s->code, X86_EDX); x86_pop_reg (s->code, X86_ECX); x86_pop_reg (s->code, X86_EAX); } stmt: ARG_I4 (ADDR_G) { x86_push_imm (s->code, tree->left->data.p); } stmt: ARG_I4 (coni4) "MB_USE_OPT1(0)" { x86_push_imm (s->code, tree->left->data.i); } this: reg { PRINT_REG ("THIS", tree->reg1); } reg: CHECKTHIS (reg) { /* try to access the vtable - this will raise an exception * if the object is NULL */ x86_alu_membase_imm (s->code, X86_CMP, tree->left->reg1, 0, 0); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } stmt: CHECKTHIS (reg) { x86_alu_membase_imm (s->code, X86_CMP, tree->left->reg1, 0, 0); } this: NOP reg: CALL_I4 (this, reg) { int treg = X86_EAX; int lreg = tree->left->reg1; int rreg = tree->right->reg1; if (lreg == treg || rreg == treg) treg = X86_EDX; if (lreg == treg || rreg == treg) treg = X86_ECX; if (lreg == treg || rreg == treg) mono_assert_not_reached (); if (tree->left->op != MB_TERM_NOP) { mono_assert (lreg >= 0); x86_push_reg (s->code, lreg); } if (tree->data.ci.vtype_num) { int offset = g_array_index (s->varinfo, MonoVarInfo, tree->data.ci.vtype_num).offset; x86_lea_membase (s->code, treg, X86_EBP, offset); x86_push_reg (s->code, treg); } x86_call_reg (s->code, rreg); if (tree->data.ci.args_size) x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size); PRINT_REG ("CALL_I4", tree->reg1); mono_assert (tree->reg1 == X86_EAX); } reg: CALL_I4 (this, ADDR_G) { int lreg = tree->left->reg1; int treg = X86_EAX; if (lreg == treg) treg = X86_EDX; if (tree->left->op != MB_TERM_NOP) { mono_assert (lreg >= 0); x86_push_reg (s->code, lreg); x86_alu_membase_imm (s->code, X86_CMP, lreg, 0, 0); } if (tree->data.ci.vtype_num) { int offset = g_array_index (s->varinfo, MonoVarInfo, tree->data.ci.vtype_num).offset; x86_lea_membase (s->code, treg, X86_EBP, offset); x86_push_reg (s->code, treg); } mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, tree->right->data.p); x86_call_code (s->code, 0); if (tree->data.ci.args_size) x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size); PRINT_REG ("CALL_I4", tree->reg1); mono_assert (tree->reg1 == X86_EAX); } reg: LDVIRTFTN (reg, INTF_ADDR) { int lreg = tree->left->reg1; x86_mov_reg_membase (s->code, lreg, lreg, 0, 4); x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4); x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4); x86_mov_reg_membase (s->code, tree->reg1, lreg, tree->right->data.m->slot << 2, 4); } reg: CALL_I4 (this, INTF_ADDR) { int lreg = tree->left->reg1; int treg = X86_EAX; if (lreg == treg) treg = X86_EDX; if (tree->left->op != MB_TERM_NOP) { mono_assert (lreg >= 0); x86_push_reg (s->code, lreg); } if (tree->data.ci.vtype_num) { int offset = g_array_index (s->varinfo, MonoVarInfo, tree->data.ci.vtype_num).offset; x86_lea_membase (s->code, treg, X86_EBP, offset); x86_push_reg (s->code, treg); } x86_mov_reg_membase (s->code, lreg, lreg, 0, 4); x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4); x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4); x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2); if (tree->data.ci.args_size) x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size); PRINT_REG ("CALL_I4(INTERFACE)", tree->reg1); mono_assert (tree->reg1 == X86_EAX); } reg: LDVIRTFTN (reg, VFUNC_ADDR) { int lreg = tree->left->reg1; x86_mov_reg_membase (s->code, tree->reg1, lreg, 0, 4); x86_mov_reg_membase (s->code, tree->reg1, tree->reg1, G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2), 4); } reg: LDFTN { if (tree->reg1 != X86_EAX) x86_push_reg (s->code, X86_EAX); x86_push_reg (s->code, X86_ECX); x86_push_reg (s->code, X86_EDX); x86_push_imm (s->code, tree->data.m); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, arch_compile_method); x86_call_code (s->code, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer)); x86_pop_reg (s->code, X86_EDX); x86_pop_reg (s->code, X86_ECX); if (tree->reg1 != X86_EAX) { x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4); x86_pop_reg (s->code, X86_EAX); } PRINT_REG ("LDFTN", tree->reg1); } reg: CALL_I4 (this, VFUNC_ADDR) { int lreg = tree->left->reg1; int treg = X86_EAX; if (lreg == treg) treg = X86_EDX; if (tree->left->op != MB_TERM_NOP) { mono_assert (lreg >= 0); x86_push_reg (s->code, lreg); } if (tree->data.ci.vtype_num) { int offset = g_array_index (s->varinfo, MonoVarInfo, tree->data.ci.vtype_num).offset; x86_lea_membase (s->code, treg, X86_EBP, offset); x86_push_reg (s->code, treg); } x86_mov_reg_membase (s->code, lreg, lreg, 0, 4); x86_call_virtual (s->code, lreg, G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2)); if (tree->data.ci.args_size) x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size); PRINT_REG ("CALL_I4(VIRTUAL)", tree->reg1); mono_assert (tree->reg1 == X86_EAX); } stmt: CALL_VOID (this, ADDR_G) { int lreg = tree->left->reg1; int treg = X86_EAX; if (lreg == treg) treg = X86_EDX; if (tree->left->op != MB_TERM_NOP) { mono_assert (lreg >= 0); x86_push_reg (s->code, lreg); x86_alu_membase_imm (s->code, X86_CMP, lreg, 0, 0); } if (tree->data.ci.vtype_num) { int offset = g_array_index (s->varinfo, MonoVarInfo, tree->data.ci.vtype_num).offset; x86_lea_membase (s->code, treg, X86_EBP, offset); x86_push_reg (s->code, treg); } mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, tree->right->data.p); x86_call_code (s->code, 0); if (tree->data.ci.args_size) x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size); } stmt: CALL_VOID (this, INTF_ADDR) { int lreg = tree->left->reg1; int treg = X86_EAX; if (lreg == treg) treg = X86_EDX; if (tree->left->op != MB_TERM_NOP) { mono_assert (lreg >= 0); x86_push_reg (s->code, lreg); } if (tree->data.ci.vtype_num) { int offset = g_array_index (s->varinfo, MonoVarInfo, tree->data.ci.vtype_num).offset; x86_lea_membase (s->code, treg, X86_EBP, offset); x86_push_reg (s->code, treg); } x86_mov_reg_membase (s->code, lreg, lreg, 0, 4); x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4); x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4); x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2); if (tree->data.ci.args_size) x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size); } stmt: CALL_VOID (this, VFUNC_ADDR) { int lreg = tree->left->reg1; int treg = X86_EAX; if (lreg == treg) treg = X86_EDX; if (tree->left->op != MB_TERM_NOP) { mono_assert (lreg >= 0); x86_push_reg (s->code, lreg); } if (tree->data.ci.vtype_num) { int offset = g_array_index (s->varinfo, MonoVarInfo, tree->data.ci.vtype_num).offset; x86_lea_membase (s->code, treg, X86_EBP, offset); x86_push_reg (s->code, treg); } x86_mov_reg_membase (s->code, lreg, lreg, 0, 4); x86_call_virtual (s->code, lreg, G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2)); if (tree->data.ci.args_size) x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size); } stmt: SWITCH (reg) { guint32 offset; guint32 *jt = (guint32 *)tree->data.p; x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, jt [0]); offset = 6 + (guint32)s->code; x86_branch32 (s->code, X86_CC_GE, jt [jt [0] + 1] - offset, FALSE); x86_mov_reg_memindex (s->code, X86_EAX, X86_NOBASEREG, tree->data.i + 4, tree->left->reg1, 2, 4); x86_jump_reg (s->code, X86_EAX); } # # 64 bit integers # reg: CONV_I1 (lreg) { x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, FALSE); } reg: CONV_I2 (lreg) { x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, TRUE); } reg: CONV_I4 (lreg) { if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } reg: CONV_OVF_I4 (lreg){ guint8 *start = s->code; guchar* o1, *o2, *o3, *o4, *o5; int i; /* * Valid ints: 0xffffffff:8000000 to 00000000:0x7f000000 */ for (i = 0; i < 2; i++) { s->code = start; x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1); /* If the low word top bit is set, see if we are negative */ x86_branch8 (s->code, X86_CC_LT, o3 - o1, TRUE); o1 = s->code; /* We are not negative (no top bit set, check for our top word to be zero */ x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2); x86_branch8 (s->code, X86_CC_EQ, o4 - o2, TRUE); o2 = s->code; /* throw exception */ x86_push_imm (s->code, "OverflowException"); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, arch_get_throw_exception_by_name ()); x86_call_code (s->code, 0); o3 = s->code; /* our top bit is set, check that top word is 0xfffffff */ x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg2, 0xffffffff); o4 = s->code; /* nope, emit exception */ x86_branch8 (s->code, X86_CC_NE, o2 - o5, TRUE); o5 = s->code; if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } } reg: CONV_OVF_I4 (lreg){ guint8 *br [3], *label [1]; /* * Valid ints: 0xffffffff:8000000 to 00000000:0x7f000000 */ x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1); /* If the low word top bit is set, see if we are negative */ br [0] = s->code; x86_branch8 (s->code, X86_CC_LT, 0, TRUE); /* We are not negative (no top bit set, check for our top word to be zero */ x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2); br [1] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, TRUE); label [0] = s->code; /* throw exception */ x86_push_imm (s->code, "OverflowException"); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, arch_get_throw_exception_by_name ()); x86_call_code (s->code, 0); x86_patch (br [0], s->code); /* our top bit is set, check that top word is 0xfffffff */ x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg2, 0xffffffff); x86_patch (br [1], s->code); /* nope, emit exception */ br [2] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE); x86_patch (br [2], label [0]); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } reg: CONV_OVF_U4 (lreg) { /* Keep in sync with CONV_OVF_I4_UN below, they are the same on 32-bit machines */ /* top word must be 0 */ x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "OverflowException"); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } reg: CONV_OVF_I4_UN (lreg) { /* Keep in sync with CONV_OVF_U4 above, they are the same on 32-bit machines */ /* top word must be 0 */ x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "OverflowException"); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } stmt: POP (lreg) lreg: CONST_I8 1 { x86_mov_reg_imm (s->code, tree->reg1, *((gint32 *)&tree->data.p)); x86_mov_reg_imm (s->code, tree->reg2, *((gint32 *)&tree->data.p + 1)); } lreg: CONV_I8 (coni4) { x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i); if (tree->left->data.i >= 0) x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2); else x86_mov_reg_imm (s->code, tree->reg2, -1); } lreg: CONV_I8 (reg) { guint8 *i1; if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2); x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, 0); x86_branch8 (s->code, X86_CC_GE, 5, TRUE); i1 = s->code; x86_mov_reg_imm (s->code, tree->reg2, -1); mono_assert ((s->code - i1) == 5); } lreg: CONV_U8 (coni4) 1 { x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i); x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2); } lreg: CONV_OVF_U8 (coni4) { if (tree->left->data.i < 0){ x86_push_imm (s->code, "OverflowException"); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, arch_get_throw_exception_by_name ()); x86_call_code (s->code, 0); } else { x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i); x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2); } } lreg: CONV_OVF_I8_UN (coni4) { x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i); x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2); } lreg: CONV_OVF_U8 (reg) { x86_test_reg_imm (s->code, tree->left->reg1, 0x8000000); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "OverflowException"); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2); } lreg: CONV_OVF_I8_UN (reg) { /* Convert uint value into int64, we pass everything */ if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2); } stmt: STIND_I8 (addr, lreg) { switch (tree->left->data.ainfo.amode) { case AMImmediate: x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 4); x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset + 4, tree->right->reg2, 4); break; case AMBase: x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, tree->right->reg1, 4); x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset + 4, tree->right->reg2, 4); break; case AMIndex: x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, tree->right->reg1, 4); x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset + 4, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, tree->right->reg2, 4); break; case AMBaseIndex: x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, tree->right->reg1, 4); x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset + 4, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, tree->right->reg2, 4); break; } } stmt: REMOTE_STIND_I8 (reg, lreg) { guint8 *br[2]; int offset; x86_push_reg (s->code, tree->right->reg1); x86_mov_reg_membase (s->code, tree->right->reg1, tree->left->reg1, 0, 4); x86_alu_membase_imm (s->code, X86_CMP, tree->right->reg1, 0, ((int)mono_defaults.transparent_proxy_class)); x86_pop_reg (s->code, tree->right->reg1); br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE); /* this is a transparent proxy - remote the call */ /* save value to stack */ x86_push_reg (s->code, tree->right->reg2); x86_push_reg (s->code, tree->right->reg1); x86_push_reg (s->code, X86_ESP); x86_push_imm (s->code, tree->data.fi.field); x86_push_imm (s->code, tree->data.fi.klass); x86_push_reg (s->code, tree->left->reg1); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_store_remote_field); x86_call_code (s->code, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 24); br [1] = s->code; x86_jump8 (s->code, 0); x86_patch (br [0], s->code); offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) : tree->data.fi.field->offset; x86_mov_membase_reg (s->code, tree->left->reg1, offset, tree->right->reg1, 4); x86_mov_membase_reg (s->code, tree->left->reg1, offset + 4, tree->right->reg2, 4); x86_patch (br [1], s->code); } # an addr can use two address register (base and index register). The must take care # that we do not override them (thus the use of x86_lea) lreg: LDIND_I8 (addr) { switch (tree->left->data.ainfo.amode) { case AMImmediate: x86_mov_reg_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, 4); x86_mov_reg_mem (s->code, tree->reg2, tree->left->data.ainfo.offset + 4, 4); break; case AMBase: x86_lea_membase (s->code, tree->reg2, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset); x86_mov_reg_membase (s->code, tree->reg1, tree->reg2, 0, 4); x86_mov_reg_membase (s->code, tree->reg2, tree->reg2, 4, 4); break; case AMIndex: x86_lea_memindex (s->code, tree->reg2, X86_NOBASEREG, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift); x86_mov_reg_membase (s->code, tree->reg1, tree->reg2, 0, 4); x86_mov_reg_membase (s->code, tree->reg2, tree->reg2, 4, 4); break; case AMBaseIndex: x86_lea_memindex (s->code, tree->reg2, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift); x86_mov_reg_membase (s->code, tree->reg1, tree->reg2, 0, 4); x86_mov_reg_membase (s->code, tree->reg2, tree->reg2, 4, 4); break; } PRINT_REG ("LDIND_I8_0", tree->reg1); PRINT_REG ("LDIND_I8_1", tree->reg2); } lreg: SHR (lreg, coni4) { if (tree->right->data.i < 32) { x86_shrd_reg_imm (s->code, tree->left->reg1, tree->left->reg2, tree->right->data.i); x86_shift_reg_imm (s->code, X86_SAR, tree->left->reg2, tree->right->data.i); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); if (tree->reg2 != tree->left->reg2) x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4); } else if (tree->right->data.i < 64) { if (tree->reg1 != tree->left->reg2) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg2, 4); if (tree->reg2 != tree->left->reg2) x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4); x86_shift_reg_imm (s->code, X86_SAR, tree->reg2, 31); x86_shift_reg_imm (s->code, X86_SAR, tree->reg1, (tree->right->data.i - 32)); } /* else unspecified result */ } lreg: SHR_UN (lreg, coni4) { if (tree->right->data.i < 32) { x86_shrd_reg_imm (s->code, tree->left->reg1, tree->left->reg2, tree->right->data.i); x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg2, tree->right->data.i); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); if (tree->reg2 != tree->left->reg2) x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4); } else if (tree->right->data.i < 64) { x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg2, 4); x86_shift_reg_imm (s->code, X86_SHR, tree->reg1, (tree->right->data.i - 32)); x86_mov_reg_imm (s->code, tree->reg2, 0); } /* else unspecified result */ } lreg: SHR (lreg, reg) { guint8 *br [1]; if (tree->right->reg1 != X86_ECX) x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4); x86_shrd_reg (s->code, tree->left->reg1, tree->left->reg2); x86_shift_reg (s->code, X86_SAR, tree->left->reg2); x86_test_reg_imm (s->code, X86_ECX, 32); br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE); x86_mov_reg_reg (s->code, tree->left->reg1, tree->left->reg2, 4); x86_shift_reg_imm (s->code, X86_SAR, tree->reg2, 31); x86_patch (br [0], s->code); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); if (tree->reg2 != tree->left->reg2) x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4); } lreg: SHR_UN (lreg, reg) { guint8 *br [1]; if (tree->right->reg1 != X86_ECX) x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4); x86_shrd_reg (s->code, tree->left->reg1, tree->left->reg2); x86_shift_reg (s->code, X86_SHR, tree->left->reg2); x86_test_reg_imm (s->code, X86_ECX, 32); br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE); x86_mov_reg_reg (s->code, tree->left->reg1, tree->left->reg2, 4); x86_shift_reg_imm (s->code, X86_SHR, tree->reg2, 31); x86_patch (br [0], s->code); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); if (tree->reg2 != tree->left->reg2) x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4); } lreg: SHL (lreg, coni4) { if (tree->right->data.i < 32) { x86_shld_reg_imm (s->code, tree->left->reg2, tree->left->reg1, tree->right->data.i); x86_shift_reg_imm (s->code, X86_SHL, tree->left->reg1, tree->right->data.i); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); if (tree->reg2 != tree->left->reg2) x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4); } else if (tree->right->data.i < 64) { x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg1, 4); x86_shift_reg_imm (s->code, X86_SHL, tree->reg2, (tree->right->data.i - 32)); x86_alu_reg_reg (s->code, X86_XOR, tree->reg1, tree->reg1); } /* else unspecified result */ } lreg: SHL (lreg, reg) { guint8 *br [1]; if (tree->right->reg1 != X86_ECX) x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4); x86_shld_reg (s->code, tree->left->reg2, tree->left->reg1); x86_shift_reg (s->code, X86_SHL, tree->left->reg1); x86_test_reg_imm (s->code, X86_ECX, 32); br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE); x86_mov_reg_reg (s->code, tree->left->reg2, tree->left->reg1, 4); x86_alu_reg_reg (s->code, X86_XOR, tree->left->reg1, tree->left->reg1); x86_patch (br [0], s->code); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); if (tree->reg2 != tree->left->reg2) x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4); } lreg: ADD (lreg, lreg) { x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1); x86_alu_reg_reg (s->code, X86_ADC, tree->left->reg2, tree->right->reg2); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); if (tree->reg2 != tree->left->reg2) x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4); } lreg: ADD_OVF (lreg, lreg) { x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1); x86_alu_reg_reg (s->code, X86_ADC, tree->left->reg2, tree->right->reg2); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException"); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); if (tree->reg2 != tree->left->reg2) x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4); } lreg: ADD_OVF_UN (lreg, lreg) { x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1); x86_alu_reg_reg (s->code, X86_ADC, tree->left->reg2, tree->right->reg2); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NC, FALSE, "OverflowException"); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); if (tree->reg2 != tree->left->reg2) x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4); } lreg: SUB (lreg, lreg) { x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1); x86_alu_reg_reg (s->code, X86_SBB, tree->left->reg2, tree->right->reg2); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); if (tree->reg2 != tree->left->reg2) x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4); } lreg: SUB_OVF (lreg, lreg) { x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1); x86_alu_reg_reg (s->code, X86_SBB, tree->left->reg2, tree->right->reg2); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException"); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); if (tree->reg2 != tree->left->reg2) x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4); } lreg: SUB_OVF_UN (lreg, lreg) { x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1); x86_alu_reg_reg (s->code, X86_SBB, tree->left->reg2, tree->right->reg2); EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NC, FALSE, "OverflowException"); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); if (tree->reg2 != tree->left->reg2) x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4); } lreg: AND (lreg, lreg) { x86_alu_reg_reg (s->code, X86_AND, tree->left->reg1, tree->right->reg1); x86_alu_reg_reg (s->code, X86_AND, tree->left->reg2, tree->right->reg2); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); if (tree->reg2 != tree->left->reg2) x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4); } lreg: OR (lreg, lreg) { x86_alu_reg_reg (s->code, X86_OR, tree->left->reg1, tree->right->reg1); x86_alu_reg_reg (s->code, X86_OR, tree->left->reg2, tree->right->reg2); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); if (tree->reg2 != tree->left->reg2) x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4); } lreg: XOR (lreg, lreg) { x86_alu_reg_reg (s->code, X86_XOR, tree->left->reg1, tree->right->reg1); x86_alu_reg_reg (s->code, X86_XOR, tree->left->reg2, tree->right->reg2); if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); if (tree->reg2 != tree->left->reg2) x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4); } lreg: NEG (lreg) { if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); if (tree->reg2 != tree->left->reg2) x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4); x86_neg_reg (s->code, tree->reg1); x86_alu_reg_imm (s->code, X86_ADC, tree->reg2, 0); x86_neg_reg (s->code, tree->reg2); } lreg: NOT (lreg) { if (tree->reg1 != tree->left->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); if (tree->reg2 != tree->left->reg2) x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4); x86_not_reg (s->code, tree->reg1); x86_not_reg (s->code, tree->reg2); } lreg: MUL (lreg, lreg) { if (mono_regset_reg_used (s->rs, X86_ECX)) x86_push_reg (s->code, X86_ECX); x86_push_reg (s->code, tree->right->reg2); x86_push_reg (s->code, tree->right->reg1); x86_push_reg (s->code, tree->left->reg2); x86_push_reg (s->code, tree->left->reg1); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_llmult); x86_call_code (s->code, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16); if (mono_regset_reg_used (s->rs, X86_ECX)) x86_pop_reg (s->code, X86_ECX); mono_assert (tree->reg1 == X86_EAX && tree->reg2 == X86_EDX); } lreg: MUL_OVF (lreg, lreg) { if (mono_regset_reg_used (s->rs, X86_ECX)) x86_push_reg (s->code, X86_ECX); x86_push_reg (s->code, tree->right->reg2); x86_push_reg (s->code, tree->right->reg1); x86_push_reg (s->code, tree->left->reg2); x86_push_reg (s->code, tree->left->reg1); /* pass a pointer to store the resulting exception - * ugly, but it works */ x86_push_reg (s->code, X86_ESP); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_llmult_ovf); x86_call_code (s->code, 0); x86_mov_reg_membase (s->code, X86_ECX, X86_ESP, 4, 4); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20); x86_alu_reg_imm (s->code, X86_CMP, X86_ECX, 0); /* cond. emit exception */ x86_branch8 (s->code, X86_CC_EQ, 7, FALSE); x86_push_reg (s->code, X86_ECX); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, arch_get_throw_exception ()); x86_call_code (s->code, 0); if (mono_regset_reg_used (s->rs, X86_ECX)) x86_pop_reg (s->code, X86_ECX); mono_assert (tree->reg1 == X86_EAX && tree->reg2 == X86_EDX); } lreg: MUL_OVF_UN (lreg, lreg) { if (mono_regset_reg_used (s->rs, X86_ECX)) x86_push_reg (s->code, X86_ECX); x86_push_reg (s->code, tree->right->reg2); x86_push_reg (s->code, tree->right->reg1); x86_push_reg (s->code, tree->left->reg2); x86_push_reg (s->code, tree->left->reg1); /* pass a pointer to store the resulting exception - * ugly, but it works */ x86_push_reg (s->code, X86_ESP); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_llmult_ovf_un); x86_call_code (s->code, 0); x86_mov_reg_membase (s->code, X86_ECX, X86_ESP, 4, 4); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20); x86_alu_reg_imm (s->code, X86_CMP, X86_ECX, 0); /* cond. emit exception */ x86_branch8 (s->code, X86_CC_EQ, 7, FALSE); x86_push_reg (s->code, X86_ECX); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, arch_get_throw_exception ()); x86_call_code (s->code, 0); if (mono_regset_reg_used (s->rs, X86_ECX)) x86_pop_reg (s->code, X86_ECX); mono_assert (tree->reg1 == X86_EAX && tree->reg2 == X86_EDX); } lreg: DIV (lreg, lreg) { if (mono_regset_reg_used (s->rs, X86_ECX)) x86_push_reg (s->code, X86_ECX); x86_push_reg (s->code, tree->right->reg2); x86_push_reg (s->code, tree->right->reg1); x86_push_reg (s->code, tree->left->reg2); x86_push_reg (s->code, tree->left->reg1); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_lldiv); x86_call_code (s->code, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16); if (mono_regset_reg_used (s->rs, X86_ECX)) x86_pop_reg (s->code, X86_ECX); mono_assert (tree->reg1 == X86_EAX && tree->reg2 == X86_EDX); } lreg: REM (lreg, lreg) { if (mono_regset_reg_used (s->rs, X86_ECX)) x86_push_reg (s->code, X86_ECX); x86_push_reg (s->code, tree->right->reg2); x86_push_reg (s->code, tree->right->reg1); x86_push_reg (s->code, tree->left->reg2); x86_push_reg (s->code, tree->left->reg1); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_llrem); x86_call_code (s->code, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16); if (mono_regset_reg_used (s->rs, X86_ECX)) x86_pop_reg (s->code, X86_ECX); mono_assert (tree->reg1 == X86_EAX && tree->reg2 == X86_EDX); } lreg: DIV_UN (lreg, lreg) { if (mono_regset_reg_used (s->rs, X86_ECX)) x86_push_reg (s->code, X86_ECX); x86_push_reg (s->code, tree->right->reg2); x86_push_reg (s->code, tree->right->reg1); x86_push_reg (s->code, tree->left->reg2); x86_push_reg (s->code, tree->left->reg1); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_lldiv_un); x86_call_code (s->code, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16); if (mono_regset_reg_used (s->rs, X86_ECX)) x86_pop_reg (s->code, X86_ECX); mono_assert (tree->reg1 == X86_EAX && tree->reg2 == X86_EDX); } lreg: REM_UN (lreg, lreg) { if (mono_regset_reg_used (s->rs, X86_ECX)) x86_push_reg (s->code, X86_ECX); x86_push_reg (s->code, tree->right->reg2); x86_push_reg (s->code, tree->right->reg1); x86_push_reg (s->code, tree->left->reg2); x86_push_reg (s->code, tree->left->reg1); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_llrem_un); x86_call_code (s->code, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16); if (mono_regset_reg_used (s->rs, X86_ECX)) x86_pop_reg (s->code, X86_ECX); mono_assert (tree->reg1 == X86_EAX && tree->reg2 == X86_EDX); } lreg: CALL_I8 (this, ADDR_G) { int lreg = tree->left->reg1; int treg = X86_EAX; if (lreg == treg) treg = X86_EDX; if (tree->left->op != MB_TERM_NOP) { mono_assert (lreg >= 0); x86_push_reg (s->code, lreg); x86_alu_membase_imm (s->code, X86_CMP, lreg, 0, 0); } if (tree->data.ci.vtype_num) { int offset = g_array_index (s->varinfo, MonoVarInfo, tree->data.ci.vtype_num).offset; x86_lea_membase (s->code, treg, X86_EBP, offset); x86_push_reg (s->code, treg); } mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, tree->right->data.p); x86_call_code (s->code, 0); if (tree->data.ci.args_size) x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size); mono_assert (tree->reg1 == X86_EAX); mono_assert (tree->reg2 == X86_EDX); } lreg: CALL_I8 (this, VFUNC_ADDR) { int lreg = tree->left->reg1; int treg = X86_EAX; if (lreg == treg) treg = X86_EDX; if (tree->left->op != MB_TERM_NOP) { mono_assert (lreg >= 0); x86_push_reg (s->code, lreg); } if (tree->data.ci.vtype_num) { int offset = g_array_index (s->varinfo, MonoVarInfo, tree->data.ci.vtype_num).offset; x86_lea_membase (s->code, treg, X86_EBP, offset); x86_push_reg (s->code, treg); } x86_mov_reg_membase (s->code, lreg, lreg, 0, 4); x86_call_virtual (s->code, lreg, G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2)); if (tree->data.ci.args_size) x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size); PRINT_REG ("CALL0_I8(VIRTUAL)", tree->reg1); PRINT_REG ("CALL1_I8(VIRTUAL)", tree->reg2); mono_assert (tree->reg1 == X86_EAX); mono_assert (tree->reg2 == X86_EDX); } lreg: CALL_I8 (this, INTF_ADDR) { int lreg = tree->left->reg1; int treg = X86_EAX; if (lreg == treg) treg = X86_EDX; if (tree->left->op != MB_TERM_NOP) { mono_assert (lreg >= 0); x86_push_reg (s->code, lreg); } if (tree->data.ci.vtype_num) { int offset = g_array_index (s->varinfo, MonoVarInfo, tree->data.ci.vtype_num).offset; x86_lea_membase (s->code, treg, X86_EBP, offset); x86_push_reg (s->code, treg); } x86_mov_reg_membase (s->code, lreg, lreg, 0, 4); x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4); x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4); x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2); if (tree->data.ci.args_size) x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size); PRINT_REG ("CALL_I8(INTERFACE)", tree->reg1); mono_assert (tree->reg1 == X86_EAX); mono_assert (tree->reg2 == X86_EDX); } stmt: RET (lreg) { if (tree->left->reg1 != X86_EAX) { if (tree->left->reg2 != X86_EAX) { x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4); if (tree->left->reg2 != X86_EDX) x86_mov_reg_reg (s->code, X86_EDX, tree->left->reg2, 4); } else { x86_mov_reg_reg (s->code, X86_ECX, tree->left->reg2, 4); x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4); x86_mov_reg_reg (s->code, X86_EDX, X86_ECX, 4); } } else if (tree->left->reg2 != X86_EDX) { x86_mov_reg_reg (s->code, X86_EDX, tree->left->reg2, 4); } if (!tree->last_instr) { mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_EPILOG, NULL); x86_jump32 (s->code, 0); } } stmt: ARG_I8 (lreg) { x86_push_reg (s->code, tree->left->reg2); x86_push_reg (s->code, tree->left->reg1); } reg: CEQ (lreg, lreg) { guint8 *br [1]; x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE); x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2); x86_patch (br [0], s->code); x86_set_reg (s->code, X86_CC_EQ, tree->reg1, FALSE); x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE); } reg: CLT (lreg, lreg) { guint8 *br [4]; x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2); br [0] = s->code; x86_branch8 (s->code, X86_CC_GT, 0, TRUE); br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE); x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); br [2] = s->code; x86_branch8 (s->code, X86_CC_GE, 0, FALSE); /* set result to 1 */ x86_patch (br [1], s->code); x86_mov_reg_imm (s->code, tree->reg1, 1); br [3] = s->code; x86_jump8 (s->code, 0); /* set result to 0 */ x86_patch (br [0], s->code); x86_patch (br [2], s->code); x86_mov_reg_imm (s->code, tree->reg1, 0); x86_patch (br [3], s->code); } reg: CLT_UN (lreg, lreg) { guint8 *br [4]; x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2); br [0] = s->code; x86_branch8 (s->code, X86_CC_GT, 0, FALSE); br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE); x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); br [2] = s->code; x86_branch8 (s->code, X86_CC_GE, 0, FALSE); /* set result to 1 */ x86_patch (br [1], s->code); x86_mov_reg_imm (s->code, tree->reg1, 1); br [3] = s->code; x86_jump8 (s->code, 0); /* set result to 0 */ x86_patch (br [0], s->code); x86_patch (br [2], s->code); x86_mov_reg_imm (s->code, tree->reg1, 0); x86_patch (br [3], s->code); } reg: CGT (lreg, lreg) { guint8 *br [4]; x86_alu_reg_reg (s->code, X86_CMP, tree->right->reg2, tree->left->reg2); br [0] = s->code; x86_branch8 (s->code, X86_CC_GT, 0, TRUE); br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE); x86_alu_reg_reg (s->code, X86_CMP, tree->right->reg1, tree->left->reg1); br [2] = s->code; x86_branch8 (s->code, X86_CC_GE, 0, FALSE); /* set result to 1 */ x86_patch (br [1], s->code); x86_mov_reg_imm (s->code, tree->reg1, 1); br [3] = s->code; x86_jump8 (s->code, 0); /* set result to 0 */ x86_patch (br [0], s->code); x86_patch (br [2], s->code); x86_mov_reg_imm (s->code, tree->reg1, 0); x86_patch (br [3], s->code); } reg: CGT_UN (lreg, lreg) { guint8 *br [4]; x86_alu_reg_reg (s->code, X86_CMP, tree->right->reg2, tree->left->reg2); br [0] = s->code; x86_branch8 (s->code, X86_CC_GT, 0, FALSE); br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE); x86_alu_reg_reg (s->code, X86_CMP, tree->right->reg1, tree->left->reg1); br [2] = s->code; x86_branch8 (s->code, X86_CC_GE, 0, FALSE); /* set result to 1 */ x86_patch (br [1], s->code); x86_mov_reg_imm (s->code, tree->reg1, 1); br [3] = s->code; x86_jump8 (s->code, 0); /* set result to 0 */ x86_patch (br [0], s->code); x86_patch (br [2], s->code); x86_mov_reg_imm (s->code, tree->reg1, 0); x86_patch (br [3], s->code); } stmt: BEQ (lreg, lreg) { guint8 *br [1]; x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE); x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_EQ, 0, TRUE); x86_patch (br [0], s->code); } stmt: BNE_UN (lreg, lreg) { x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_NE, 0, FALSE); x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_NE, 0, FALSE); } stmt: BGE (lreg, lreg) { guint8 *br [1]; x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_GT, 0, TRUE); x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2); br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE); x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_GE, 0, FALSE); x86_patch (br [0], s->code); } stmt: BGE_UN (lreg, lreg) { guint8 *br [1]; x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_GT, 0, FALSE); x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2); br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE); x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_GE, 0, FALSE); x86_patch (br [0], s->code); } stmt: BGT (lreg, lreg) { guint8 *br [1]; x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_GT, 0, TRUE); x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2); br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE); x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_GT, 0, FALSE); x86_patch (br [0], s->code); } stmt: BGT_UN (lreg, lreg) { guint8 *br [1]; x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_GT, 0, FALSE); x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2); br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE); x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_GT, 0, FALSE); x86_patch (br [0], s->code); } stmt: BLT (lreg, lreg) { guint8 *br [1]; x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_LT, 0, TRUE); x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2); br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE); x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_LT, 0, FALSE); x86_patch (br [0], s->code); } stmt: BLT_UN (lreg, lreg) { guint8 *br [1]; x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_LT, 0, FALSE); x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2); br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE); x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_LT, 0, FALSE); x86_patch (br [0], s->code); } stmt: BLE (lreg, lreg) { guint8 *br [1]; x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_LT, 0, TRUE); x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2); br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE); x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_LE, 0, FALSE); x86_patch (br [0], s->code); } stmt: BLE_UN (lreg, lreg) { guint8 *br [1]; x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_LT, 0, FALSE); x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2); br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE); x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_LE, 0, FALSE); x86_patch (br [0], s->code); } # # floating point #stmt: STLOC (CONV_I4 (freg)) { # // fixme: set CW # x86_fist_pop_membase (s->code, X86_EBP, tree->data.i, FALSE); #} reg: CONV_I1 (freg) { x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4); x86_fnstcw_membase(s->code, X86_ESP, 0); x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2); x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00); x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2); x86_fldcw_membase (s->code, X86_ESP, 2); x86_push_reg (s->code, X86_EAX); // SP = SP - 4 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE); x86_pop_reg (s->code, tree->reg1); x86_widen_reg (s->code, tree->reg1, tree->reg1, TRUE, FALSE); x86_fldcw_membase (s->code, X86_ESP, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4); } reg: CONV_I2 (freg) { x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4); x86_fnstcw_membase(s->code, X86_ESP, 0); x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2); x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00); x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2); x86_fldcw_membase (s->code, X86_ESP, 2); x86_push_reg (s->code, X86_EAX); // SP = SP - 4 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE); x86_pop_reg (s->code, tree->reg1); x86_widen_reg (s->code, tree->reg1, tree->reg1, TRUE, TRUE); x86_fldcw_membase (s->code, X86_ESP, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4); } reg: CONV_I4 (freg) { x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4); x86_fnstcw_membase(s->code, X86_ESP, 0); x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2); x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00); x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2); x86_fldcw_membase (s->code, X86_ESP, 2); x86_push_reg (s->code, X86_EAX); // SP = SP - 4 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE); x86_pop_reg (s->code, tree->reg1); x86_fldcw_membase (s->code, X86_ESP, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4); } lreg: CONV_I8 (freg) { x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4); x86_fnstcw_membase(s->code, X86_ESP, 0); x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2); x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00); x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2); x86_fldcw_membase (s->code, X86_ESP, 2); x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8); x86_fist_pop_membase (s->code, X86_ESP, 0, TRUE); x86_pop_reg (s->code, tree->reg1); x86_pop_reg (s->code, tree->reg2); x86_fldcw_membase (s->code, X86_ESP, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8); } reg: CEQ (freg, freg) { int treg = tree->reg1; if (treg != X86_EAX) x86_push_reg (s->code, X86_EAX); // save EAX x86_fcompp (s->code); x86_fnstsw (s->code); x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500); x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x4000); x86_set_reg (s->code, X86_CC_EQ, tree->reg1, TRUE); x86_widen_reg (s->code, treg, treg, FALSE, FALSE); if (treg != X86_EAX) x86_pop_reg (s->code, X86_EAX); // save EAX } reg: CGT (freg, freg) { int treg = tree->reg1; if (treg != X86_EAX) x86_push_reg (s->code, X86_EAX); // save EAX x86_fcompp (s->code); x86_fnstsw (s->code); x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500); x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100); x86_set_reg (s->code, X86_CC_EQ, tree->reg1, TRUE); x86_widen_reg (s->code, treg, treg, FALSE, FALSE); if (treg != X86_EAX) x86_pop_reg (s->code, X86_EAX); // save EAX } reg: CGT_UN (freg, freg) { int treg = tree->reg1; if (treg != X86_EAX) x86_push_reg (s->code, X86_EAX); // save EAX x86_fcompp (s->code); x86_fnstsw (s->code); x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500); x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100); x86_set_reg (s->code, X86_CC_EQ, tree->reg1, TRUE); x86_widen_reg (s->code, treg, treg, FALSE, FALSE); if (treg != X86_EAX) x86_pop_reg (s->code, X86_EAX); // save EAX } reg: CLT (freg, freg) { int treg = tree->reg1; if (treg != X86_EAX) x86_push_reg (s->code, X86_EAX); // save EAX x86_fcompp (s->code); x86_fnstsw (s->code); x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500); x86_set_reg (s->code, X86_CC_EQ, tree->reg1, TRUE); x86_widen_reg (s->code, treg, treg, FALSE, FALSE); if (treg != X86_EAX) x86_pop_reg (s->code, X86_EAX); // save EAX } reg: CLT_UN (freg, freg) { int treg = tree->reg1; if (treg != X86_EAX) x86_push_reg (s->code, X86_EAX); // save EAX x86_fcompp (s->code); x86_fnstsw (s->code); x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500); x86_set_reg (s->code, X86_CC_EQ, tree->reg1, TRUE); x86_widen_reg (s->code, treg, treg, FALSE, FALSE); if (treg != X86_EAX) x86_pop_reg (s->code, X86_EAX); // save EAX } freg: CONV_R8 (freg) { /* nothing to do */ } freg: CONV_R4 (freg) { /* fixme: nothing to do ??*/ } freg: CONV_R8 (LDIND_I4 (ADDR_G)) { x86_fild (s->code, tree->left->left->data.p, FALSE); } freg: CONV_R4 (reg) { x86_push_reg (s->code, tree->left->reg1); x86_fild_membase (s->code, X86_ESP, 0, FALSE); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4); } freg: CONV_R8 (reg) { x86_push_reg (s->code, tree->left->reg1); x86_fild_membase (s->code, X86_ESP, 0, FALSE); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4); } freg: CONV_R_UN (reg) { x86_push_imm (s->code, 0); x86_push_reg (s->code, tree->left->reg1); x86_fild_membase (s->code, X86_ESP, 0, TRUE); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8); } freg: CONV_R_UN (lreg) { static guint8 mn[] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x3f, 0x40 }; guint8 *br [1]; /* load 64bit integer to FP stack */ x86_push_imm (s->code, 0); x86_push_reg (s->code, tree->left->reg2); x86_push_reg (s->code, tree->left->reg1); x86_fild_membase (s->code, X86_ESP, 0, TRUE); /* store as 80bit FP value */ x86_fst80_membase (s->code, X86_ESP, 0); /* test if lreg is negative */ x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1); br [0] = s->code; x86_branch8 (s->code, X86_CC_GEZ, 0, TRUE); /* add correction constant mn */ x86_fld80_mem (s->code, mn); x86_fld80_membase (s->code, X86_ESP, 0); x86_fp_op_reg (s->code, X86_FADD, 1, TRUE); x86_fst80_membase (s->code, X86_ESP, 0); x86_patch (br [0], s->code); x86_fld80_membase (s->code, X86_ESP, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12); } freg: CONV_R4 (lreg) { x86_push_reg (s->code, tree->left->reg2); x86_push_reg (s->code, tree->left->reg1); x86_fild_membase (s->code, X86_ESP, 0, TRUE); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8); } freg: CONV_R8 (lreg) { x86_push_reg (s->code, tree->left->reg2); x86_push_reg (s->code, tree->left->reg1); x86_fild_membase (s->code, X86_ESP, 0, TRUE); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8); } freg: CONST_R4 { float f = *(float *)tree->data.p; if (f == 0.0) x86_fldz (s->code); else if (f == 1.0) x86_fld1(s->code); else x86_fld (s->code, tree->data.p, FALSE); } freg: CONST_R8 { double d = *(double *)tree->data.p; if (d == 0.0) x86_fldz (s->code); else if (d == 1.0) x86_fld1(s->code); else x86_fld (s->code, tree->data.p, TRUE); } freg: LDIND_R4 (reg) { x86_fld_membase (s->code, tree->left->reg1, 0, FALSE); } freg: LDIND_R8 (reg) { x86_fld_membase (s->code, tree->left->reg1, 0, TRUE); } freg: ADD (freg, freg) { x86_fp_op_reg (s->code, X86_FADD, 1, TRUE); } freg: SUB (freg, freg) { x86_fp_op_reg (s->code, X86_FSUB, 1, TRUE); } freg: MUL (freg, freg) { x86_fp_op_reg (s->code, X86_FMUL, 1, TRUE); } freg: DIV (freg, freg) { x86_fp_op_reg (s->code, X86_FDIV, 1, TRUE); } freg: REM (freg, freg) { guint8 *l1, *l2; /* we need to exchange ST(0) with ST(1) */ x86_fxch (s->code, 1); /* this requires a loop, because fprem1 somtimes * returns a partial remainder */ l1 = s->code; x86_fprem1 (s->code); x86_fnstsw (s->code); x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x0400); l2 = s->code + 2; x86_branch8 (s->code, X86_CC_NE, l1 - l2, FALSE); /* pop result */ x86_fstp (s->code, 1); } freg: NEG (freg) { x86_fchs (s->code); } stmt: POP (freg) stmt: STIND_R4 (ADDR_L, freg) { int offset = g_array_index (s->varinfo, MonoVarInfo, tree->left->data.i).offset; x86_fst_membase (s->code, X86_EBP, offset, FALSE, TRUE); } stmt: STIND_R4 (reg, freg) { x86_fst_membase (s->code, tree->left->reg1, 0, FALSE, TRUE); } stmt: REMOTE_STIND_R4 (reg, freg) { guint8 *br[2]; int treg = X86_EAX; int lreg = tree->left->reg1; int offset; if (lreg == treg) treg = X86_EDX; x86_mov_reg_membase (s->code, treg, lreg, 0, 4); x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class)); br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE); /* this is a transparent proxy - remote the call */ /* save value to stack */ x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4); x86_fst_membase (s->code, X86_ESP, 0, FALSE, TRUE); x86_push_reg (s->code, X86_ESP); x86_push_imm (s->code, tree->data.fi.field); x86_push_imm (s->code, tree->data.fi.klass); x86_push_reg (s->code, lreg); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_store_remote_field); x86_call_code (s->code, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20); br [1] = s->code; x86_jump8 (s->code, 0); x86_patch (br [0], s->code); offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) : tree->data.fi.field->offset; x86_fst_membase (s->code, lreg, offset, FALSE, TRUE); x86_patch (br [1], s->code); } stmt: STIND_R8 (ADDR_L, freg) { int offset = g_array_index (s->varinfo, MonoVarInfo, tree->left->data.i).offset; x86_fst_membase (s->code, X86_EBP, offset, TRUE, TRUE); } stmt: STIND_R8 (reg, freg) { x86_fst_membase (s->code, tree->left->reg1, 0, TRUE, TRUE); } stmt: REMOTE_STIND_R8 (reg, freg) { guint8 *br[2]; int treg = X86_EAX; int lreg = tree->left->reg1; int offset; if (lreg == treg) treg = X86_EDX; x86_mov_reg_membase (s->code, treg, lreg, 0, 4); x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class)); br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE); /* this is a transparent proxy - remote the call */ /* save value to stack */ x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8); x86_fst_membase (s->code, X86_ESP, 0, TRUE, TRUE); x86_push_reg (s->code, X86_ESP); x86_push_imm (s->code, tree->data.fi.field); x86_push_imm (s->code, tree->data.fi.klass); x86_push_reg (s->code, lreg); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_store_remote_field); x86_call_code (s->code, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 24); br [1] = s->code; x86_jump8 (s->code, 0); x86_patch (br [0], s->code); offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) : tree->data.fi.field->offset; x86_fst_membase (s->code, lreg, offset, TRUE, TRUE); x86_patch (br [1], s->code); } stmt: ARG_R4 (freg) { x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4); x86_fst_membase (s->code, X86_ESP, 0, FALSE, TRUE); } stmt: ARG_R8 (freg) { x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8); x86_fst_membase (s->code, X86_ESP, 0, TRUE, TRUE); } # fixme: we need to implement unordered and ordered compares stmt: BEQ (freg, freg) { x86_fcompp (s->code); x86_fnstsw (s->code); x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500); x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x4000); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_EQ, 0, TRUE); } stmt: BNE_UN (freg, freg) { x86_fcompp (s->code); x86_fnstsw (s->code); x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500); x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x4000); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_NE, 0, FALSE); } stmt: BLT (freg, freg) { x86_fcompp (s->code); x86_fnstsw (s->code); x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_EQ, 0, FALSE); } # fixme: unordered ?? stmt: BLT_UN (freg, freg) { x86_fcompp (s->code); x86_fnstsw (s->code); x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_EQ, 0, FALSE); } stmt: BGE (freg, freg) { x86_fcompp (s->code); x86_fnstsw (s->code); x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_NE, 0, FALSE); } stmt: BGE_UN (freg, freg) { x86_fcompp (s->code); x86_fnstsw (s->code); x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_NE, 0, FALSE); } stmt: BGT (freg, freg) { x86_fcompp (s->code); x86_fnstsw (s->code); x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500); x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_EQ, 0, FALSE); } stmt: BGT_UN (freg, freg) { x86_fcompp (s->code); x86_fnstsw (s->code); x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500); x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_EQ, 0, FALSE); } stmt: BLE (freg, freg) { x86_fcompp (s->code); x86_fnstsw (s->code); x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500); x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_NE, 0, FALSE); } stmt: BLE_UN (freg, freg) { x86_fcompp (s->code); x86_fnstsw (s->code); x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500); x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100); mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb); x86_branch32 (s->code, X86_CC_NE, 0, FALSE); } freg: CALL_R8 (this, ADDR_G) { int lreg = tree->left->reg1; int treg = X86_EAX; if (lreg == treg) treg = X86_EDX; if (tree->left->op != MB_TERM_NOP) { mono_assert (lreg >= 0); x86_push_reg (s->code, lreg); x86_alu_membase_imm (s->code, X86_CMP, lreg, 0, 0); } if (tree->data.ci.vtype_num) { int offset = g_array_index (s->varinfo, MonoVarInfo, tree->data.ci.vtype_num).offset; x86_lea_membase (s->code, treg, X86_EBP, offset); x86_push_reg (s->code, treg); } mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, tree->right->data.p); x86_call_code (s->code, 0); if (tree->data.ci.args_size) x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size); } freg: CALL_R8 (this, INTF_ADDR) { int lreg = tree->left->reg1; int treg = X86_EAX; if (lreg == treg) treg = X86_EDX; if (tree->left->op != MB_TERM_NOP) { mono_assert (lreg >= 0); x86_push_reg (s->code, lreg); } if (tree->data.ci.vtype_num) { int offset = g_array_index (s->varinfo, MonoVarInfo, tree->data.ci.vtype_num).offset; x86_lea_membase (s->code, treg, X86_EBP, offset); x86_push_reg (s->code, treg); } x86_mov_reg_membase (s->code, lreg, lreg, 0, 4); x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4); x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4); x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2); if (tree->data.ci.args_size) x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size); } freg: CALL_R8 (this, VFUNC_ADDR) { int lreg = tree->left->reg1; int treg = X86_EAX; if (lreg == treg) treg = X86_EDX; if (tree->left->op != MB_TERM_NOP) { mono_assert (lreg >= 0); x86_push_reg (s->code, lreg); } if (tree->data.ci.vtype_num) { int offset = g_array_index (s->varinfo, MonoVarInfo, tree->data.ci.vtype_num).offset; x86_lea_membase (s->code, treg, X86_EBP, offset); x86_push_reg (s->code, treg); } x86_mov_reg_membase (s->code, lreg, lreg, 0, 4); x86_call_virtual (s->code, lreg, G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2)); if (tree->data.ci.args_size) x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size); } stmt: RET (freg) { if (!tree->last_instr) { mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_EPILOG, NULL); x86_jump32 (s->code, 0); } } # support for value types reg: LDIND_OBJ (reg) { if (tree->left->reg1 != tree->reg1) x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4); } stmt: STIND_OBJ (reg, reg) { mono_assert (tree->data.i > 0); x86_push_imm (s->code, tree->data.i); x86_push_reg (s->code, tree->right->reg1); x86_push_reg (s->code, tree->left->reg1); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, MEMCOPY); x86_call_code (s->code, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12); } stmt: REMOTE_STIND_OBJ (reg, reg) { guint8 *br[2]; int treg = X86_EAX; int lreg = tree->left->reg1; int rreg = tree->right->reg1; int offset; if (lreg == treg) treg = X86_EDX; if (rreg == treg) treg = X86_ECX; x86_mov_reg_membase (s->code, treg, lreg, 0, 4); x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class)); br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE); /* this is a transparent proxy - remote the call */ x86_push_reg (s->code, rreg); x86_push_imm (s->code, tree->data.fi.field); x86_push_imm (s->code, tree->data.fi.klass); x86_push_reg (s->code, lreg); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_store_remote_field); x86_call_code (s->code, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16); br [1] = s->code; x86_jump8 (s->code, 0); x86_patch (br [0], s->code); offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) : tree->data.fi.field->offset; x86_push_imm (s->code, mono_class_value_size (tree->data.fi.klass, NULL)); x86_push_reg (s->code, tree->right->reg1); x86_alu_reg_imm (s->code, X86_ADD, tree->left->reg1, sizeof (MonoObject)); x86_push_reg (s->code, tree->left->reg1); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, MEMCOPY); x86_call_code (s->code, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12); x86_patch (br [1], s->code); /* please test this first */ g_assert_not_reached (); } stmt: ARG_OBJ (coni4) { x86_push_imm (s->code, tree->left->data.i); } stmt: ARG_OBJ (reg) { int size = tree->data.i; int sa; mono_assert (size > 0); sa = size + 3; sa &= ~3; /* reserve space for the argument */ x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, sa); x86_push_reg (s->code, X86_EAX); x86_push_reg (s->code, X86_EDX); x86_push_reg (s->code, X86_ECX); x86_push_imm (s->code, size); x86_push_reg (s->code, tree->left->reg1); x86_lea_membase (s->code, X86_EAX, X86_ESP, 5*4); x86_push_reg (s->code, X86_EAX); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, MEMCOPY); x86_call_code (s->code, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12); x86_pop_reg (s->code, X86_ECX); x86_pop_reg (s->code, X86_EDX); x86_pop_reg (s->code, X86_EAX); } stmt: RET_OBJ (reg) { int size = tree->data.i; x86_push_imm (s->code, size); x86_push_reg (s->code, tree->left->reg1); x86_push_membase (s->code, X86_EBP, 8); mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, MEMCOPY); x86_call_code (s->code, 0); x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12); if (!tree->last_instr) { mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_EPILOG, NULL); x86_jump32 (s->code, 0); } } %% #include "jit.h" gint64 mono_llmult (gint64 a, gint64 b) { return a * b; } guint64 mono_llmult_ovf_un (gpointer *exc, guint32 al, guint32 ah, guint32 bl, guint32 bh) { guint64 res, t1; // fixme: this is incredible slow if (ah && bh) goto raise_exception; res = (guint64)al * (guint64)bl; t1 = (guint64)ah * (guint64)bl + (guint64)al * (guint64)bh; if (t1 > 0xffffffff) goto raise_exception; res += ((guint64)t1) << 32; *exc = NULL; return res; raise_exception: *exc = mono_get_exception_overflow (); return 0; } guint64 mono_llmult_ovf (gpointer *exc, guint32 al, gint32 ah, guint32 bl, gint32 bh) { gint64 res, t1; // fixme: check for overflow res = (gint64)al * (gint64)bl; t1 = (gint64)ah * bl + al * (gint64)bh; res += ((gint64)t1) << 32; *exc = NULL; return res; /* raise_exception: *exc = mono_get_exception_overflow (); return 0; */ } gint64 mono_lldiv (gint64 a, gint64 b) { return a / b; } gint64 mono_llrem (gint64 a, gint64 b) { return a % b; } guint64 mono_lldiv_un (guint64 a, guint64 b) { return a / b; } guint64 mono_llrem_un (guint64 a, guint64 b) { return a % b; } MBTree * mono_ctree_new (MonoMemPool *mp, int op, MBTree *left, MBTree *right) { MBTree *t = mono_mempool_alloc0 (mp, sizeof (MBTree)); t->op = op; t->left = left; t->right = right; t->reg1 = -1; t->reg2 = -1; t->reg3 = -1; t->svt = VAL_UNKNOWN; t->cli_addr = -1; return t; } MBTree * mono_ctree_new_leaf (MonoMemPool *mp, int op) { return mono_ctree_new (mp, op, NULL, NULL); } gpointer arch_get_lmf_addr (void) { gpointer *lmf; if ((lmf = TlsGetValue (lmf_thread_id))) return lmf; lmf = g_malloc (sizeof (gpointer)); *lmf = NULL; TlsSetValue (lmf_thread_id, lmf); return lmf; } MonoArray* mono_array_new_wrapper (MonoClass *eclass, guint32 n) { MonoDomain *domain = mono_domain_get (); return mono_array_new (domain, eclass, n); } MonoObject * mono_object_new_wrapper (MonoClass *klass) { MonoDomain *domain = mono_domain_get (); return mono_object_new (domain, klass); } MonoString* mono_ldstr_wrapper (MonoImage *image, guint32 ind) { MonoDomain *domain = mono_domain_get (); return mono_ldstr (domain, image, ind); } gpointer mono_ldsflda (MonoClass *klass, int offset) { MonoDomain *domain = mono_domain_get (); MonoVTable *vt; gpointer addr; vt = mono_class_vtable (domain, klass); addr = (char*)(vt->data) + offset; return addr; } #ifdef DEBUG void * MEMCOPY (void *dest, const void *src, size_t n) { int i, l = n; printf ("MEMCPY(%p to %p [%d]) ", src, dest, n); for (i = 0; i < l; i++) printf ("%02x ", *((guint8 *)src + i)); printf ("\n"); return memcpy (dest, src, n); } #endif