2 * x86.brg: X86 code generator
5 * Dietmar Maurer (dietmar@ximian.com)
7 * (C) 2001 Ximian, Inc.
16 #ifndef PLATFORM_WIN32
18 #include <sys/syscall.h>
21 #include <mono/metadata/blob.h>
22 #include <mono/metadata/metadata.h>
23 #include <mono/metadata/loader.h>
24 #include <mono/metadata/object.h>
25 #include <mono/metadata/tabledefs.h>
26 #include <mono/metadata/appdomain.h>
27 #include <mono/arch/x86/x86-codegen.h>
33 * Pull the list of opcodes
35 #define OPDEF(a,b,c,d,e,f,g,h,i,j) \
39 #include "mono/cil/opcode.def"
44 void print_lmf (void);
46 #define MBTREE_TYPE MBTree
47 #define MBCGEN_TYPE MonoFlowGraph
48 #define MBCOST_DATA MonoFlowGraph
49 #define MBALLOC_STATE mono_mempool_alloc (data->mp, sizeof (MBState))
52 AMImmediate = 0, // ptr
54 AMIndex = 2, // V[REG*X]
55 AMBaseIndex = 3, // V[REG*X][REG]
68 unsigned last_instr:1;
91 MonoClassField *field;
99 gint64 mono_llmult (gint64 a, gint64 b);
100 guint64 mono_llmult_ovf (gpointer *exc, guint32 al, gint32 ah, guint32 bl, gint32 bh);
101 guint64 mono_llmult_ovf_un (gpointer *exc, guint32 al, guint32 ah, guint32 bl, guint32 bh);
102 gint64 mono_lldiv (gint64 a, gint64 b);
103 gint64 mono_llrem (gint64 a, gint64 b);
104 guint64 mono_lldiv_un (guint64 a, guint64 b);
105 guint64 mono_llrem_un (guint64 a, guint64 b);
106 gpointer mono_ldsflda (MonoClass *klass, int offset);
108 gpointer arch_get_lmf_addr (void);
111 mono_array_new_wrapper (MonoClass *eclass, guint32 n);
113 mono_object_new_wrapper (MonoClass *klass);
115 mono_ldstr_wrapper (MonoImage *image, guint32 ind);
118 get_mono_object_isinst (void);
120 #define MB_OPT_LEVEL 1
122 #if MB_OPT_LEVEL == 0
123 #define MB_USE_OPT1(c) 65535
124 #define MB_USE_OPT2(c) 65535
126 #if MB_OPT_LEVEL == 1
127 #define MB_USE_OPT1(c) c
128 #define MB_USE_OPT2(c) 65535
130 #if MB_OPT_LEVEL >= 2
131 #define MB_USE_OPT1(c) c
132 #define MB_USE_OPT2(c) c
137 #define REAL_PRINT_REG(text,reg) \
138 mono_assert (reg >= 0); \
139 x86_push_reg (s->code, X86_EAX); \
140 x86_push_reg (s->code, X86_EDX); \
141 x86_push_reg (s->code, X86_ECX); \
142 x86_push_reg (s->code, reg); \
143 x86_push_imm (s->code, reg); \
144 x86_push_imm (s->code, text " %d %p\n"); \
145 x86_mov_reg_imm (s->code, X86_EAX, printf); \
146 x86_call_reg (s->code, X86_EAX); \
147 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 3*4); \
148 x86_pop_reg (s->code, X86_ECX); \
149 x86_pop_reg (s->code, X86_EDX); \
150 x86_pop_reg (s->code, X86_EAX);
153 #define MEMCOPY debug_memcpy
154 void *MEMCOPY (void *dest, const void *src, size_t n);
156 #define PRINT_REG(text,reg) REAL_PRINT_REG(text,reg)
159 #define MEMCOPY memcpy
161 #define PRINT_REG(x,y)
165 /* The call instruction for virtual functions must have a known
166 * size (used by x86_magic_trampoline)
168 #define x86_call_virtual(inst,basereg,disp) \
170 *(inst)++ = (unsigned char)0xff; \
171 x86_address_byte ((inst), 2, 2, (basereg)); \
172 x86_imm_emit32 ((inst), (disp)); \
175 /* emit an exception if condition is fail */
176 #define EMIT_COND_SYSTEM_EXCEPTION(cond,signed,exc_name) \
179 x86_branch8 (s->code, cond, 10, signed); \
180 x86_push_imm (s->code, exc_name); \
181 t = arch_get_throw_exception_by_name (); \
182 mono_add_jump_info (s, s->code + 1, \
183 MONO_JUMP_INFO_ABS, t); \
184 x86_call_code (s->code, 0); \
187 /* we use this macro to move one lreg to another - source and
188 destination may overlap, but the register allocator has to
189 make sure that ((d1 < d2) && (s1 < s2))
191 #define MOVE_LREG(d1,d2,s1,s2) \
193 g_assert ((d1 < d2) && (s1 < s2)); \
194 if ((d1) <= (s1)) { \
196 x86_mov_reg_reg (s->code, d1, s1, 4); \
198 x86_mov_reg_reg (s->code, d2, s2, 4); \
201 x86_mov_reg_reg (s->code, d2, s2, 4); \
203 x86_mov_reg_reg (s->code, d1, s1, 4); \
211 # terminal definitions
215 %term CONST_I4 CONST_I8 CONST_R4 CONST_R8
216 %term LDIND_I1 LDIND_U1 LDIND_I2 LDIND_U2 LDIND_I4 LDIND_REF LDIND_I8 LDIND_R4 LDIND_R8
217 %term LDIND_U4 LDIND_OBJ
218 %term STIND_I1 STIND_I2 STIND_I4 STIND_REF STIND_I8 STIND_R4 STIND_R8 STIND_OBJ
219 %term ADDR_L ADDR_G ARG_I4 ARG_I8 ARG_R4 ARG_R8 ARG_OBJ ARG_STRING CALL_I4 CALL_I8 CALL_R8 CALL_VOID
220 %term BREAK SWITCH BR RET_VOID RET RET_OBJ ENDFINALLY
221 %term ADD ADD_OVF ADD_OVF_UN SUB SUB_OVF SUB_OVF_UN MUL MUL_OVF MUL_OVF_UN
222 %term DIV DIV_UN REM REM_UN AND OR XOR SHL SHR SHR_UN NEG NOT
223 %term COMPARE CBRANCH BRTRUE BRFALSE CSET
224 %term CONV_I4 CONV_I1 CONV_I2 CONV_I8 CONV_U1 CONV_U2 CONV_U4 CONV_U8 CONV_R4 CONV_R8 CONV_R_UN
225 %term INTF_ADDR VFUNC_ADDR NOP NEWARR NEWARR_SPEC NEWOBJ NEWOBJ_SPEC NEWSTRUCT CPOBJ POP INITOBJ
226 %term ISINST CASTCLASS UNBOX
227 %term CONV_OVF_I1 CONV_OVF_U1 CONV_OVF_I2 CONV_OVF_U2 CONV_OVF_U4 CONV_OVF_U8 CONV_OVF_I4
228 %term CONV_OVF_I4_UN CONV_OVF_U1_UN CONV_OVF_U2_UN
229 %term CONV_OVF_I2_UN CONV_OVF_I8_UN CONV_OVF_I1_UN
230 %term EXCEPTION THROW RETHROW HANDLER CHECKTHIS
231 %term LDLEN LDELEMA LDFTN LDVIRTFTN LDSTR LDSFLDA
232 %term REMOTE_LDFLDA REMOTE_STIND_I1 REMOTE_STIND_I2 REMOTE_STIND_I4 REMOTE_STIND_REF
233 %term REMOTE_STIND_I8 REMOTE_STIND_R4 REMOTE_STIND_R8 REMOTE_STIND_OBJ
243 # integer constant folding
244 coni4: AND (coni4, coni4) {
245 tree->data.i = tree->left->data.i & tree->right->data.i;
248 coni4: OR (coni4, coni4) {
249 tree->data.i = tree->left->data.i | tree->right->data.i;
252 coni4: XOR (coni4, coni4) {
253 tree->data.i = tree->left->data.i ^ tree->right->data.i;
256 coni4: SHL (coni4, coni4) {
257 tree->data.i = tree->left->data.i << tree->right->data.i;
260 coni4: SHR (coni4, coni4) {
261 tree->data.i = tree->left->data.i >> tree->right->data.i;
265 tree->data.i = ~tree->left->data.i;
268 coni4: ADD (coni4, coni4) {
269 tree->data.i = tree->left->data.i + tree->right->data.i;
272 coni4: SUB (coni4, coni4) {
273 tree->data.i = tree->left->data.i - tree->right->data.i;
276 coni4: MUL (coni4, coni4) {
277 tree->data.i = tree->left->data.i * tree->right->data.i;
280 coni4: DIV (coni4, coni4) {
281 tree->data.i = tree->left->data.i / tree->right->data.i;
283 MBCOND (tree->right->data.i)
287 coni4: REM (coni4, coni4) {
288 tree->data.i = tree->left->data.i % tree->right->data.i;
290 MBCOND (tree->right->data.i)
303 tree->data.ainfo.offset = tree->data.i;
304 tree->data.ainfo.amode = AMImmediate;
308 tree->data.ainfo.offset = tree->data.i;
309 tree->data.ainfo.amode = AMImmediate;
312 acon: ADD (ADDR_G, coni4) {
313 tree->data.ainfo.offset = (unsigned)tree->left->data.p + tree->right->data.i;
314 tree->data.ainfo.amode = AMImmediate;
320 tree->data.ainfo.offset = 0;
321 tree->data.ainfo.basereg = tree->reg1;
322 tree->data.ainfo.amode = AMBase;
325 base: ADD (reg, coni4) {
326 tree->data.ainfo.offset = tree->right->data.i;
327 tree->data.ainfo.basereg = tree->left->reg1;
328 tree->data.ainfo.amode = AMBase;
332 tree->data.ainfo.offset = VARINFO (s, tree->data.i).offset;
333 tree->data.ainfo.basereg = X86_EBP;
334 tree->data.ainfo.amode = AMBase;
336 MBCOND (VARINFO (data, tree->data.i).reg < 0);
341 tree->data.ainfo.offset = 0;
342 tree->data.ainfo.indexreg = tree->reg1;
343 tree->data.ainfo.shift = 0;
344 tree->data.ainfo.amode = AMIndex;
347 index: SHL (reg, coni4) {
348 tree->data.ainfo.offset = 0;
349 tree->data.ainfo.amode = AMIndex;
350 tree->data.ainfo.indexreg = tree->left->reg1;
351 tree->data.ainfo.shift = tree->right->data.i;
353 MBCOND (tree->right->data.i == 0 ||
354 tree->right->data.i == 1 ||
355 tree->right->data.i == 2 ||
356 tree->right->data.i == 3);
361 index: MUL (reg, coni4) {
362 static int fast_log2 [] = { 1, 0, 1, -1, 2, -1, -1, -1, 3 };
364 tree->data.ainfo.offset = 0;
365 tree->data.ainfo.amode = AMIndex;
366 tree->data.ainfo.indexreg = tree->left->reg1;
367 tree->data.ainfo.shift = fast_log2 [tree->right->data.i];
369 MBCOND (tree->right->data.i == 1 ||
370 tree->right->data.i == 2 ||
371 tree->right->data.i == 4 ||
372 tree->right->data.i == 8);
381 addr: ADD (index, base) {
382 tree->data.ainfo.offset = tree->right->data.ainfo.offset;
383 tree->data.ainfo.basereg = tree->right->data.ainfo.basereg;
384 tree->data.ainfo.amode = tree->left->data.ainfo.amode |
385 tree->right->data.ainfo.amode;
386 tree->data.ainfo.shift = tree->left->data.ainfo.shift;
387 tree->data.ainfo.indexreg = tree->left->data.ainfo.indexreg;
390 # we pass exception in ECX to catch handler
392 int offset = VARINFO (s, tree->data.i).offset;
394 if (tree->reg1 != X86_ECX)
395 x86_mov_reg_reg (s->code, tree->reg1, X86_ECX, 4);
397 /* store it so that we can RETHROW it later */
398 x86_mov_membase_reg (s->code, X86_EBP, offset, tree->reg1, 4);
404 x86_push_reg (s->code, tree->left->reg1);
405 target = arch_get_throw_exception ();
406 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, target);
407 x86_call_code (s->code, target);
411 int off = VARINFO (s, tree->data.i).offset;
414 x86_push_membase (s->code, X86_EBP, off);
415 target = arch_get_throw_exception ();
416 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, target);
417 x86_call_code (s->code, target);
421 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_BB, tree->data.bb);
422 x86_call_imm (s->code, 0);
429 stmt: STIND_I4 (addr, coni4) {
430 switch (tree->left->data.ainfo.amode) {
433 x86_mov_mem_imm (s->code, tree->left->data.ainfo.offset, tree->right->data.i, 4);
437 x86_mov_membase_imm (s->code, tree->left->data.ainfo.basereg,
438 tree->left->data.ainfo.offset, tree->right->data.i, 4);
441 x86_mov_memindex_imm (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
442 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
443 tree->right->data.i, 4);
446 x86_mov_memindex_imm (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
447 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
448 tree->right->data.i, 4);
453 stmt: STIND_I4 (addr, reg) {
454 PRINT_REG ("STIND_I4", tree->right->reg1);
456 switch (tree->left->data.ainfo.amode) {
459 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 4);
463 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
464 tree->left->data.ainfo.offset, tree->right->reg1, 4);
467 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
468 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
469 tree->right->reg1, 4);
472 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
473 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
474 tree->right->reg1, 4);
479 stmt: REMOTE_STIND_I4 (reg, reg) {
482 int lreg = tree->left->reg1;
483 int rreg = tree->right->reg1;
492 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
493 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
494 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
496 /* this is a transparent proxy - remote the call */
498 /* save value to stack */
499 x86_push_reg (s->code, rreg);
501 x86_push_reg (s->code, X86_ESP);
502 x86_push_imm (s->code, tree->data.fi.field);
503 x86_push_imm (s->code, tree->data.fi.klass);
504 x86_push_reg (s->code, lreg);
505 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_store_remote_field);
506 x86_call_code (s->code, 0);
507 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
509 br [1] = s->code; x86_jump8 (s->code, 0);
511 x86_patch (br [0], s->code);
512 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
513 tree->data.fi.field->offset;
514 x86_mov_membase_reg (s->code, lreg, offset, rreg, 4);
516 x86_patch (br [1], s->code);
519 stmt: STIND_REF (addr, reg) {
520 PRINT_REG ("STIND_REF", tree->right->reg1);
522 switch (tree->left->data.ainfo.amode) {
525 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 4);
529 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
530 tree->left->data.ainfo.offset, tree->right->reg1, 4);
533 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
534 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
535 tree->right->reg1, 4);
538 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
539 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
540 tree->right->reg1, 4);
545 stmt: REMOTE_STIND_REF (reg, reg) {
548 int lreg = tree->left->reg1;
549 int rreg = tree->right->reg1;
558 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
559 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
560 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
562 /* this is a transparent proxy - remote the call */
564 /* save value to stack */
565 x86_push_reg (s->code, rreg);
567 x86_push_reg (s->code, X86_ESP);
568 x86_push_imm (s->code, tree->data.fi.field);
569 x86_push_imm (s->code, tree->data.fi.klass);
570 x86_push_reg (s->code, lreg);
571 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_store_remote_field);
572 x86_call_code (s->code, 0);
573 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
575 br [1] = s->code; x86_jump8 (s->code, 0);
577 x86_patch (br [0], s->code);
578 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
579 tree->data.fi.field->offset;
580 x86_mov_membase_reg (s->code, lreg, offset, rreg, 4);
582 x86_patch (br [1], s->code);
585 stmt: STIND_I1 (addr, reg) {
586 PRINT_REG ("STIND_I1", tree->right->reg1);
588 switch (tree->left->data.ainfo.amode) {
591 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 1);
595 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
596 tree->left->data.ainfo.offset, tree->right->reg1, 1);
599 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
600 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
601 tree->right->reg1, 1);
604 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
605 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
606 tree->right->reg1, 1);
611 stmt: REMOTE_STIND_I1 (reg, reg) {
614 int lreg = tree->left->reg1;
615 int rreg = tree->right->reg1;
624 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
625 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
626 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
628 /* this is a transparent proxy - remote the call */
630 /* save value to stack */
631 x86_push_reg (s->code, rreg);
633 x86_push_reg (s->code, X86_ESP);
634 x86_push_imm (s->code, tree->data.fi.field);
635 x86_push_imm (s->code, tree->data.fi.klass);
636 x86_push_reg (s->code, lreg);
637 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_store_remote_field);
638 x86_call_code (s->code, 0);
639 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
641 br [1] = s->code; x86_jump8 (s->code, 0);
643 x86_patch (br [0], s->code);
644 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
645 tree->data.fi.field->offset;
646 x86_mov_membase_reg (s->code, lreg, offset, rreg, 1);
648 x86_patch (br [1], s->code);
651 stmt: STIND_I2 (addr, reg) {
652 PRINT_REG ("STIND_I2", tree->right->reg1);
654 switch (tree->left->data.ainfo.amode) {
657 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 2);
661 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
662 tree->left->data.ainfo.offset, tree->right->reg1, 2);
665 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
666 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
667 tree->right->reg1, 2);
670 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
671 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
672 tree->right->reg1, 2);
677 stmt: REMOTE_STIND_I2 (reg, reg) {
680 int lreg = tree->left->reg1;
681 int rreg = tree->right->reg1;
690 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
691 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
692 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
694 /* this is a transparent proxy - remote the call */
696 /* save value to stack */
697 x86_push_reg (s->code, rreg);
699 x86_push_reg (s->code, X86_ESP);
700 x86_push_imm (s->code, tree->data.fi.field);
701 x86_push_imm (s->code, tree->data.fi.klass);
702 x86_push_reg (s->code, lreg);
703 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_store_remote_field);
704 x86_call_code (s->code, 0);
705 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
707 br [1] = s->code; x86_jump8 (s->code, 0);
709 x86_patch (br [0], s->code);
710 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
711 tree->data.fi.field->offset;
712 x86_mov_membase_reg (s->code, lreg, offset, rreg, 2);
714 x86_patch (br [1], s->code);
717 stmt: STIND_I4 (ADDR_L, reg) {
718 int treg = VARINFO (s, tree->left->data.i).reg;
720 if (treg != tree->right->reg1)
721 x86_mov_reg_reg (s->code, treg, tree->right->reg1, 4);
724 MBCOND ((VARINFO (data, tree->left->data.i).reg >= 0));
728 stmt: STIND_I4 (ADDR_L, coni4) {
729 int treg = VARINFO (s, tree->left->data.i).reg;
731 x86_mov_reg_imm (s->code, treg, tree->right->data.i);
734 MBCOND ((VARINFO (data, tree->left->data.i).reg >= 0));
738 reg: LDIND_I4 (ADDR_L) {
739 int treg = VARINFO (s, tree->left->data.i).reg;
741 if (treg != tree->reg1)
742 x86_mov_reg_reg (s->code, tree->reg1, treg, 4);
745 MBCOND ((VARINFO (data, tree->left->data.i).reg >= 0));
749 reg: LDIND_U4 (ADDR_L) {
750 int treg = VARINFO (s, tree->left->data.i).reg;
752 if (treg != tree->reg1)
753 x86_mov_reg_reg (s->code, tree->reg1, treg, 4);
756 MBCOND ((VARINFO (data, tree->left->data.i).reg >= 0));
760 stmt: STIND_REF (ADDR_L, reg) {
761 int treg = VARINFO (s, tree->left->data.i).reg;
763 if (treg != tree->right->reg1)
764 x86_mov_reg_reg (s->code, treg, tree->right->reg1, 4);
767 MBCOND ((VARINFO (data, tree->left->data.i).reg >= 0));
771 stmt: STIND_REF (ADDR_L, coni4) {
772 int treg = VARINFO (s, tree->left->data.i).reg;
774 if (treg != tree->right->reg1)
775 x86_mov_reg_imm (s->code, treg, tree->right->data.i);
778 MBCOND ((VARINFO (data, tree->left->data.i).reg >= 0));
782 reg: LDIND_REF (ADDR_L) {
783 int treg = VARINFO (s, tree->left->data.i).reg;
785 if (treg != tree->reg1)
786 x86_mov_reg_reg (s->code, tree->reg1, treg, 4);
789 MBCOND ((VARINFO (data, tree->left->data.i).reg >= 0));
793 reg: LDIND_I4 (addr) {
795 switch (tree->left->data.ainfo.amode) {
798 x86_mov_reg_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, 4);
802 x86_mov_reg_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
803 tree->left->data.ainfo.offset, 4);
806 x86_mov_reg_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
807 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, 4);
810 x86_mov_reg_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
811 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
812 tree->left->data.ainfo.shift, 4);
817 PRINT_REG ("LDIND_I4", tree->reg1);
820 reg: LDIND_REF (addr) {
822 switch (tree->left->data.ainfo.amode) {
825 x86_mov_reg_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, 4);
829 x86_mov_reg_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
830 tree->left->data.ainfo.offset, 4);
833 x86_mov_reg_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
834 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, 4);
837 x86_mov_reg_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
838 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
839 tree->left->data.ainfo.shift, 4);
844 PRINT_REG ("LDIND_REF", tree->reg1);
847 reg: LDIND_I1 (addr) {
848 switch (tree->left->data.ainfo.amode) {
851 x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, TRUE, FALSE);
855 x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
856 tree->left->data.ainfo.offset, TRUE, FALSE);
859 x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
860 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, TRUE, FALSE);
863 x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
864 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
865 tree->left->data.ainfo.shift, TRUE, FALSE);
869 PRINT_REG ("LDIND_I1", tree->reg1);
872 reg: LDIND_U1 (addr) {
873 switch (tree->left->data.ainfo.amode) {
876 x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, FALSE, FALSE);
880 x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
881 tree->left->data.ainfo.offset, FALSE, FALSE);
884 x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
885 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, FALSE, FALSE);
888 x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
889 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
890 tree->left->data.ainfo.shift, FALSE, FALSE);
894 PRINT_REG ("LDIND_U1", tree->reg1);
897 reg: LDIND_I2 (addr) {
898 switch (tree->left->data.ainfo.amode) {
901 x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, TRUE, TRUE);
905 x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
906 tree->left->data.ainfo.offset, TRUE, TRUE);
909 x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
910 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, TRUE, TRUE);
913 x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
914 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
915 tree->left->data.ainfo.shift, TRUE, TRUE);
919 PRINT_REG ("LDIND_U2", tree->reg1);
922 reg: LDIND_U2 (addr) {
923 switch (tree->left->data.ainfo.amode) {
926 x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, FALSE, TRUE);
930 x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
931 tree->left->data.ainfo.offset, FALSE, TRUE);
934 x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
935 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, FALSE, TRUE);
938 x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
939 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
940 tree->left->data.ainfo.shift, FALSE, TRUE);
944 PRINT_REG ("LDIND_U2", tree->reg1);
947 reg: LDIND_U4 (addr) {
948 switch (tree->left->data.ainfo.amode) {
951 x86_mov_reg_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, 4);
955 x86_mov_reg_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
956 tree->left->data.ainfo.offset, 4);
959 x86_mov_reg_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
960 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, 4);
963 x86_mov_reg_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
964 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
965 tree->left->data.ainfo.shift, 4);
969 PRINT_REG ("LDIND_U4", tree->reg1);
972 reg: REMOTE_LDFLDA (reg) {
975 int lreg = tree->left->reg1;
980 if (tree->reg1 != treg)
981 x86_push_reg (s->code, treg);
983 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
984 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
985 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
987 /* this is a transparent proxy - remote the call */
989 x86_push_reg (s->code, X86_EAX);
991 x86_push_reg (s->code, X86_EDX);
992 x86_push_reg (s->code, X86_ECX);
994 x86_push_reg (s->code, X86_ESP);
995 x86_push_imm (s->code, tree->data.fi.field);
996 x86_push_imm (s->code, tree->data.fi.klass);
997 x86_push_reg (s->code, lreg);
998 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_load_remote_field);
999 x86_call_code (s->code, 0);
1000 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
1002 if (treg != X86_EAX)
1003 x86_mov_reg_reg (s->code, treg, X86_EAX, 4);
1005 x86_pop_reg (s->code, X86_ECX);
1006 if (treg != X86_EDX)
1007 x86_pop_reg (s->code, X86_EDX);
1008 if (treg != X86_EAX)
1009 x86_pop_reg (s->code, X86_EAX);
1011 x86_mov_reg_reg (s->code, tree->reg1, treg, 4);
1013 br [1] = s->code; x86_jump8 (s->code, 0);
1015 x86_patch (br [0], s->code);
1016 if (tree->data.fi.klass->valuetype)
1017 x86_lea_membase (s->code, tree->reg1, lreg,
1018 tree->data.fi.field->offset - sizeof (MonoObject));
1020 x86_lea_membase (s->code, tree->reg1, lreg, tree->data.fi.field->offset);
1022 x86_patch (br [1], s->code);
1024 if (tree->reg1 != treg)
1025 x86_pop_reg (s->code, treg);
1029 int offset = VARINFO (s, tree->data.i).offset;
1031 x86_lea_membase (s->code, tree->reg1, X86_EBP, offset);
1033 PRINT_REG ("ADDR_L", tree->reg1);
1035 MBCOND (VARINFO (data, tree->data.i).reg < 0);
1041 x86_mov_reg_imm (s->code, tree->reg1, tree->data.p);
1044 reg: CONV_I1 (reg) {
1045 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, FALSE);
1048 reg: CONV_U1 (reg) {
1049 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE);
1052 reg: CONV_I2 (reg) {
1053 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, TRUE);
1056 reg: CONV_U2 (reg) {
1057 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE);
1060 # warning: this chain rule requires a register
1062 x86_mov_reg_imm (s->code, tree->reg1, tree->data.i);
1065 reg: CONV_I4 (reg) {
1066 if (tree->reg1 != tree->left->reg1)
1067 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1068 PRINT_REG ("CONV_I4", tree->left->reg1);
1071 reg: CONV_U4 (reg) {
1072 if (tree->reg1 != tree->left->reg1)
1073 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1074 PRINT_REG ("CONV_U4", tree->left->reg1);
1077 reg: CONV_OVF_I4 (reg) {
1078 if (tree->reg1 != tree->left->reg1)
1079 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1080 PRINT_REG ("CONV_OVF_I4", tree->left->reg1);
1083 reg: CONV_OVF_U4 (reg) {
1084 /* Keep in sync with CONV_OVF_I4_UN below, they are the same on 32-bit machines */
1085 x86_test_reg_imm (s->code, tree->left->reg1, 0x8000000);
1086 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1087 if (tree->reg1 != tree->left->reg1)
1088 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1091 reg: CONV_OVF_I4_UN (reg) {
1092 /* Keep in sync with CONV_OVF_U4 above, they are the same on 32-bit machines */
1093 x86_test_reg_imm (s->code, tree->left->reg1, 0x8000000);
1094 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1095 if (tree->reg1 != tree->left->reg1)
1096 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1099 reg: CONV_OVF_I1 (reg) {
1100 /* probe value to be within -128 to 127 */
1101 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, 127);
1102 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LE, TRUE, "OverflowException");
1103 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, -128);
1104 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_GT, TRUE, "OverflowException");
1105 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, FALSE);
1108 reg: CONV_OVF_I1_UN (reg) {
1109 /* probe values between 0 to 128 */
1110 x86_test_reg_imm (s->code, tree->left->reg1, 0xffffff80);
1111 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1112 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE);
1115 reg: CONV_OVF_U1 (reg) {
1116 /* Keep in sync with CONV_OVF_U1_UN routine below, they are the same on 32-bit machines */
1117 /* probe value to be within 0 to 255 */
1118 x86_test_reg_imm (s->code, tree->left->reg1, 0xffffff00);
1119 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1120 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE);
1123 reg: CONV_OVF_U1_UN (reg) {
1124 /* Keep in sync with CONV_OVF_U1 routine above, they are the same on 32-bit machines */
1125 /* probe value to be within 0 to 255 */
1126 x86_test_reg_imm (s->code, tree->left->reg1, 0xffffff00);
1127 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1128 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE);
1131 reg: CONV_OVF_I2 (reg) {
1132 /* Probe value to be within -32768 and 32767 */
1133 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, 32767);
1134 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LE, TRUE, "OverflowException");
1135 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, -32768);
1136 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_GE, TRUE, "OverflowException");
1137 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, TRUE);
1140 reg: CONV_OVF_U2 (reg) {
1141 /* Keep in sync with CONV_OVF_U2_UN below, they are the same on 32-bit machines */
1142 /* Probe value to be within 0 and 65535 */
1143 x86_test_reg_imm (s->code, tree->left->reg1, 0xffff0000);
1144 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "OverflowException");
1145 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE);
1148 reg: CONV_OVF_U2_UN (reg) {
1149 /* Keep in sync with CONV_OVF_U2 above, they are the same on 32-bit machines */
1150 /* Probe value to be within 0 and 65535 */
1151 x86_test_reg_imm (s->code, tree->left->reg1, 0xffff0000);
1152 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1153 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE);
1156 reg: CONV_OVF_I2_UN (reg) {
1157 /* Convert uint value into short, value within 0 and 32767 */
1158 x86_test_reg_imm (s->code, tree->left->reg1, 0xffff8000);
1159 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1160 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE);
1163 reg: MUL (reg, coni4) "MB_USE_OPT1(0)" {
1164 unsigned int i, j, k, v;
1166 v = tree->right->data.i;
1167 for (i = 0, j = 1, k = 0xfffffffe; i < 32; i++, j = j << 1, k = k << 1) {
1172 if (v < 0 || i == 32 || v & k) {
1175 /* LEA r1, [r2 + r2*2] */
1176 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 1);
1179 /* LEA r1, [r2 + r2*4] */
1180 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 2);
1183 /* LEA r1, [r2 + r2*2] */
1185 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 1);
1186 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->reg1);
1189 /* LEA r1, [r2 + r2*8] */
1190 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 3);
1193 /* LEA r1, [r2 + r2*4] */
1195 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 2);
1196 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->reg1);
1199 /* LEA r1, [r2 + r2*2] */
1201 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 1);
1202 x86_shift_reg_imm (s->code, X86_SHL, tree->reg1, 2);
1205 /* LEA r1, [r2 + r2*4] */
1206 /* LEA r1, [r1 + r1*4] */
1207 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 2);
1208 x86_lea_memindex (s->code, tree->reg1, tree->reg1, 0, tree->reg1, 2);
1211 /* LEA r1, [r2 + r2*4] */
1213 /* LEA r1, [r1 + r1*4] */
1214 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 2);
1215 x86_shift_reg_imm (s->code, X86_SHL, tree->reg1, 2);
1216 x86_lea_memindex (s->code, tree->reg1, tree->reg1, 0, tree->reg1, 2);
1219 x86_imul_reg_reg_imm (s->code, tree->reg1, tree->left->reg1, tree->right->data.i);
1223 x86_shift_reg_imm (s->code, X86_SHL, tree->left->reg1, i);
1224 if (tree->reg1 != tree->left->reg1)
1225 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1229 reg: MUL (reg, reg) {
1230 x86_imul_reg_reg (s->code, tree->left->reg1, tree->right->reg1);
1232 if (tree->reg1 != tree->left->reg1)
1233 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1236 reg: MUL_OVF (reg, reg) {
1237 x86_imul_reg_reg (s->code, tree->left->reg1, tree->right->reg1);
1238 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
1240 if (tree->reg1 != tree->left->reg1)
1241 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1244 reg: MUL_OVF_UN (reg, reg) {
1245 mono_assert (tree->right->reg1 != X86_EAX);
1247 if (tree->left->reg1 != X86_EAX)
1248 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1250 x86_mul_reg (s->code, tree->right->reg1, FALSE);
1251 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
1253 mono_assert (tree->reg1 == X86_EAX &&
1254 tree->reg2 == X86_EDX);
1257 reg: DIV (reg, coni4) {
1258 unsigned int i, j, k, v;
1260 v = tree->right->data.i;
1261 for (i = 0, j = 1, k = 0xfffffffe; i < 32; i++, j = j << 1, k = k << 1) {
1266 x86_shift_reg_imm (s->code, X86_SAR, tree->left->reg1, i);
1267 if (tree->reg1 != tree->left->reg1)
1268 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1271 unsigned int i, j, k, v;
1276 v = tree->right->data.i;
1277 for (i = 0, j = 1, k = 0xfffffffe; i < 32; i++, j = j << 1, k = k << 1) {
1282 if (i == 32 || v & k)
1289 reg: DIV (reg, reg) {
1290 mono_assert (tree->right->reg1 != X86_EAX);
1292 if (tree->left->reg1 != X86_EAX)
1293 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1296 x86_div_reg (s->code, tree->right->reg1, TRUE);
1298 mono_assert (tree->reg1 == X86_EAX &&
1299 tree->reg2 == X86_EDX);
1302 reg: DIV_UN (reg, coni4) {
1303 unsigned int i, j, k, v;
1306 v = tree->right->data.i;
1307 for (i = 0, j = 1, k = 0xfffffffe; i < 32; i++, j = j << 1, k = k << 1) {
1312 if (i == 32 || v & k) {
1313 for (i = 32, j = 0x80000000; --i >= 0; j >>= 1) {
1317 /* k = 32 + number of significant bits in v - 1 */
1321 for (i = 0; i < k; i++) f *= 2.0f;
1327 x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg1, k - 32);
1328 if (tree->reg1 != tree->left->reg1)
1329 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1330 } else if (r < 0.5f) {
1331 if (tree->left->reg1 != X86_EAX)
1332 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1333 x86_mov_reg_imm (s->code, X86_EDX, (guint32) floor(f));
1334 /* x86_inc_reg (s->code, X86_EAX); */
1335 /* INC is faster but we have to check for overflow. */
1336 x86_alu_reg_imm (s->code, X86_ADD, X86_EAX, 1);
1337 x86_branch8(s->code, X86_CC_C, 2, FALSE);
1338 x86_mul_reg (s->code, X86_EDX, FALSE);
1339 x86_shift_reg_imm (s->code, X86_SHR, X86_EDX, k - 32);
1340 if (tree->reg1 != X86_EDX)
1341 x86_mov_reg_reg (s->code, tree->reg1, X86_EDX, 4);
1343 if (tree->left->reg1 != X86_EAX)
1344 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1345 x86_mov_reg_imm (s->code, X86_EDX, (guint32) ceil(f));
1346 x86_mul_reg (s->code, X86_EDX, FALSE);
1347 x86_shift_reg_imm (s->code, X86_SHR, X86_EDX, k - 32);
1348 if (tree->reg1 != X86_EDX)
1349 x86_mov_reg_reg (s->code, tree->reg1, X86_EDX, 4);
1352 x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg1, i);
1353 if (tree->reg1 != tree->left->reg1)
1354 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1359 reg: DIV_UN (reg, reg) {
1360 mono_assert (tree->right->reg1 != X86_EAX);
1362 if (tree->left->reg1 != X86_EAX)
1363 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1365 x86_mov_reg_imm (s->code, X86_EDX, 0);
1366 x86_div_reg (s->code, tree->right->reg1, FALSE);
1368 mono_assert (tree->reg1 == X86_EAX &&
1369 tree->reg2 == X86_EDX);
1372 reg: REM (reg, reg) {
1373 mono_assert (tree->right->reg1 != X86_EAX);
1374 mono_assert (tree->right->reg1 != X86_EDX);
1376 if (tree->left->reg1 != X86_EAX)
1377 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1379 /* sign extend to 64bit in EAX/EDX */
1381 x86_div_reg (s->code, tree->right->reg1, TRUE);
1382 x86_mov_reg_reg (s->code, X86_EAX, X86_EDX, 4);
1384 mono_assert (tree->reg1 == X86_EAX &&
1385 tree->reg2 == X86_EDX);
1388 reg: REM_UN (reg, reg) {
1389 mono_assert (tree->right->reg1 != X86_EAX);
1390 mono_assert (tree->right->reg1 != X86_EDX);
1392 if (tree->left->reg1 != X86_EAX)
1393 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1395 /* zero extend to 64bit in EAX/EDX */
1396 x86_mov_reg_imm (s->code, X86_EDX, 0);
1397 x86_div_reg (s->code, tree->right->reg1, FALSE);
1398 x86_mov_reg_reg (s->code, X86_EAX, X86_EDX, 4);
1400 mono_assert (tree->reg1 == X86_EAX &&
1401 tree->reg2 == X86_EDX);
1404 reg: ADD (reg, coni4) "MB_USE_OPT1(0)" {
1405 if (tree->right->data.i == 1)
1406 x86_inc_reg (s->code, tree->left->reg1);
1408 x86_alu_reg_imm (s->code, X86_ADD, tree->left->reg1, tree->right->data.i);
1410 if (tree->reg1 != tree->left->reg1)
1411 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1415 reg: ADD (reg, reg) {
1416 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
1418 if (tree->reg1 != tree->left->reg1)
1419 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1422 reg: ADD_OVF (reg, reg) {
1423 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
1424 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
1426 if (tree->reg1 != tree->left->reg1)
1427 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1430 reg: ADD_OVF_UN (reg, reg) {
1431 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
1432 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NC, FALSE, "OverflowException");
1434 if (tree->reg1 != tree->left->reg1)
1435 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1438 reg: SUB (reg, coni4) "MB_USE_OPT1(0)" {
1439 if (tree->right->data.i == 1)
1440 x86_dec_reg (s->code, tree->left->reg1);
1442 x86_alu_reg_imm (s->code, X86_SUB, tree->left->reg1, tree->right->data.i);
1444 if (tree->reg1 != tree->left->reg1)
1445 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1448 reg: SUB (reg, reg) {
1449 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
1451 if (tree->reg1 != tree->left->reg1)
1452 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1455 reg: SUB_OVF (reg, reg) {
1456 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
1457 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
1459 if (tree->reg1 != tree->left->reg1)
1460 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1463 reg: SUB_OVF_UN (reg, reg) {
1464 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
1465 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NC, FALSE, "OverflowException");
1467 if (tree->reg1 != tree->left->reg1)
1468 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1471 reg: CSET (cflags) {
1473 switch (tree->data.i) {
1475 x86_set_reg (s->code, X86_CC_EQ, tree->reg1, TRUE);
1478 x86_set_reg (s->code, X86_CC_GT, tree->reg1, TRUE);
1481 x86_set_reg (s->code, X86_CC_GT, tree->reg1, FALSE);
1484 x86_set_reg (s->code, X86_CC_LT, tree->reg1, TRUE);
1487 x86_set_reg (s->code, X86_CC_LT, tree->reg1, FALSE);
1490 g_assert_not_reached ();
1493 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE);
1496 reg: AND (reg, coni4) "MB_USE_OPT1(0)" {
1497 x86_alu_reg_imm (s->code, X86_AND, tree->left->reg1, tree->right->data.i);
1499 if (tree->reg1 != tree->left->reg1)
1500 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1503 reg: AND (reg, reg) {
1504 x86_alu_reg_reg (s->code, X86_AND, tree->left->reg1, tree->right->reg1);
1506 if (tree->reg1 != tree->left->reg1)
1507 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1510 reg: OR (reg, coni4) "MB_USE_OPT1(0)" {
1511 x86_alu_reg_imm (s->code, X86_OR, tree->left->reg1, tree->right->data.i);
1513 if (tree->reg1 != tree->left->reg1)
1514 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1517 reg: OR (reg, reg) {
1518 x86_alu_reg_reg (s->code, X86_OR, tree->left->reg1, tree->right->reg1);
1520 if (tree->reg1 != tree->left->reg1)
1521 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1524 reg: XOR (reg, coni4) "MB_USE_OPT1(0)" {
1525 x86_alu_reg_imm (s->code, X86_XOR, tree->left->reg1, tree->right->data.i);
1527 if (tree->reg1 != tree->left->reg1)
1528 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1531 reg: XOR (reg, reg) {
1532 x86_alu_reg_reg (s->code, X86_XOR, tree->left->reg1, tree->right->reg1);
1534 if (tree->reg1 != tree->left->reg1)
1535 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1539 x86_neg_reg (s->code, tree->left->reg1);
1541 if (tree->reg1 != tree->left->reg1)
1542 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1546 x86_not_reg (s->code, tree->left->reg1);
1548 if (tree->reg1 != tree->left->reg1)
1549 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1552 reg: SHL (reg, coni4) {
1553 x86_shift_reg_imm (s->code, X86_SHL, tree->left->reg1, tree->right->data.i);
1555 if (tree->reg1 != tree->left->reg1)
1556 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1559 reg: SHL (reg, reg) {
1560 if (tree->right->reg1 != X86_ECX)
1561 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
1562 x86_shift_reg (s->code, X86_SHL, tree->left->reg1);
1564 if (tree->reg1 != tree->left->reg1)
1565 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1567 mono_assert (tree->reg1 != X86_ECX &&
1568 tree->left->reg1 != X86_ECX);
1571 reg: SHR (reg, coni4) {
1572 x86_shift_reg_imm (s->code, X86_SAR, tree->left->reg1, tree->right->data.i);
1574 if (tree->reg1 != tree->left->reg1)
1575 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1578 reg: SHR (reg, reg) {
1579 if (tree->right->reg1 != X86_ECX)
1580 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
1581 x86_shift_reg (s->code, X86_SAR, tree->left->reg1);
1583 if (tree->reg1 != tree->left->reg1)
1584 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1586 mono_assert (tree->reg1 != X86_ECX &&
1587 tree->left->reg1 != X86_ECX);
1590 reg: SHR_UN (reg, coni4) {
1591 x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg1, tree->right->data.i);
1593 if (tree->reg1 != tree->left->reg1)
1594 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1597 reg: SHR_UN (reg, reg) {
1598 if (tree->right->reg1 != X86_ECX)
1599 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
1600 x86_shift_reg (s->code, X86_SHR, tree->left->reg1);
1602 if (tree->reg1 != tree->left->reg1)
1603 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1605 mono_assert (tree->reg1 != X86_ECX &&
1606 tree->left->reg1 != X86_ECX);
1609 reg: LDSFLDA (coni4) {
1610 if (tree->reg1 != X86_EAX)
1611 x86_push_reg (s->code, X86_EAX);
1612 x86_push_reg (s->code, X86_ECX);
1613 x86_push_reg (s->code, X86_EDX);
1615 x86_push_imm (s->code, tree->left->data.i);
1616 x86_push_imm (s->code, tree->data.klass);
1617 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_ldsflda);
1618 x86_call_code (s->code, 0);
1619 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
1621 x86_pop_reg (s->code, X86_EDX);
1622 x86_pop_reg (s->code, X86_ECX);
1623 if (tree->reg1 != X86_EAX) {
1624 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1625 x86_pop_reg (s->code, X86_EAX);
1631 x86_mov_reg_membase (s->code, tree->reg1, tree->left->reg1,
1632 G_STRUCT_OFFSET (MonoArray, max_length), 4);
1635 reg: LDELEMA (reg, coni4) {
1638 x86_alu_membase_imm (s->code, X86_CMP, tree->left->reg1, G_STRUCT_OFFSET (MonoArray, max_length), tree->right->data.i);
1639 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_GE, FALSE, "IndexOutOfRangeException");
1641 ind = tree->data.i * tree->right->data.i + G_STRUCT_OFFSET (MonoArray, vector);
1643 x86_alu_reg_imm (s->code, X86_ADD, tree->left->reg1, ind);
1645 if (tree->reg1 != tree->left->reg1)
1646 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1650 reg: LDELEMA (reg, reg) {
1652 x86_alu_reg_membase (s->code, X86_CMP, tree->right->reg1, tree->left->reg1, G_STRUCT_OFFSET (MonoArray, max_length));
1653 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LT, FALSE, "IndexOutOfRangeException");
1655 if (tree->data.i == 1 || tree->data.i == 2 ||
1656 tree->data.i == 4 || tree->data.i == 8) {
1657 static int fast_log2 [] = { 1, 0, 1, -1, 2, -1, -1, -1, 3 };
1658 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1,
1659 G_STRUCT_OFFSET (MonoArray, vector), tree->right->reg1,
1660 fast_log2 [tree->data.i]);
1662 x86_imul_reg_reg_imm (s->code, tree->right->reg1, tree->right->reg1, tree->data.i);
1663 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->right->reg1);
1664 x86_alu_reg_imm (s->code, X86_ADD, tree->reg1, G_STRUCT_OFFSET (MonoArray, vector));
1669 if (tree->reg1 != X86_EAX)
1670 x86_push_reg (s->code, X86_EAX);
1671 x86_push_reg (s->code, X86_ECX);
1672 x86_push_reg (s->code, X86_EDX);
1674 x86_push_imm (s->code, tree->data.p);
1675 x86_push_imm (s->code, s->method->klass->image);
1676 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_ldstr_wrapper);
1677 x86_call_code (s->code, 0);
1678 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
1680 x86_pop_reg (s->code, X86_EDX);
1681 x86_pop_reg (s->code, X86_ECX);
1682 if (tree->reg1 != X86_EAX) {
1683 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1684 x86_pop_reg (s->code, X86_EAX);
1687 PRINT_REG ("LDSTR", tree->reg1);
1691 if (tree->reg1 != X86_EAX)
1692 x86_push_reg (s->code, X86_EAX);
1693 x86_push_reg (s->code, X86_ECX);
1694 x86_push_reg (s->code, X86_EDX);
1696 x86_push_reg (s->code, tree->left->reg1);
1697 x86_push_imm (s->code, tree->data.p);
1698 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_array_new_wrapper);
1699 x86_call_code (s->code, 0);
1700 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer) + 4);
1702 x86_pop_reg (s->code, X86_EDX);
1703 x86_pop_reg (s->code, X86_ECX);
1704 if (tree->reg1 != X86_EAX) {
1705 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1706 x86_pop_reg (s->code, X86_EAX);
1709 PRINT_REG ("NEWARR", tree->reg1);
1712 reg: NEWARR_SPEC (reg) {
1713 if (tree->reg1 != X86_EAX)
1714 x86_push_reg (s->code, X86_EAX);
1715 x86_push_reg (s->code, X86_ECX);
1716 x86_push_reg (s->code, X86_EDX);
1718 x86_push_reg (s->code, tree->left->reg1);
1719 x86_push_imm (s->code, tree->data.p);
1720 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_array_new_specific);
1721 x86_call_code (s->code, 0);
1722 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer) + 4);
1724 x86_pop_reg (s->code, X86_EDX);
1725 x86_pop_reg (s->code, X86_ECX);
1726 if (tree->reg1 != X86_EAX) {
1727 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1728 x86_pop_reg (s->code, X86_EAX);
1731 PRINT_REG ("NEWARR_SPEC", tree->reg1);
1735 if (tree->reg1 != X86_EAX)
1736 x86_push_reg (s->code, X86_EAX);
1737 x86_push_reg (s->code, X86_ECX);
1738 x86_push_reg (s->code, X86_EDX);
1740 x86_push_imm (s->code, tree->data.klass);
1741 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_object_new_wrapper);
1742 x86_call_code (s->code, 0);
1743 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer));
1745 x86_pop_reg (s->code, X86_EDX);
1746 x86_pop_reg (s->code, X86_ECX);
1747 if (tree->reg1 != X86_EAX) {
1748 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1749 x86_pop_reg (s->code, X86_EAX);
1751 PRINT_REG ("NEWOBJ", tree->reg1);
1755 if (tree->reg1 != X86_EAX)
1756 x86_push_reg (s->code, X86_EAX);
1757 x86_push_reg (s->code, X86_ECX);
1758 x86_push_reg (s->code, X86_EDX);
1760 x86_push_imm (s->code, tree->data.p);
1761 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_object_new_specific);
1762 x86_call_code (s->code, 0);
1763 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer));
1765 x86_pop_reg (s->code, X86_EDX);
1766 x86_pop_reg (s->code, X86_ECX);
1767 if (tree->reg1 != X86_EAX) {
1768 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1769 x86_pop_reg (s->code, X86_EAX);
1771 PRINT_REG ("NEWOBJ_SPEC", tree->reg1);
1775 int size = tree->data.i;
1778 mono_assert (size > 0);
1783 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, sa);
1784 x86_mov_reg_reg (s->code, tree->reg1, X86_ESP, 4);
1788 if (tree->reg1 != tree->left->reg1)
1789 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1791 x86_push_reg (s->code, tree->reg1);
1792 x86_mov_reg_membase (s->code, tree->reg1, tree->reg1, 0, 4);
1793 x86_mov_reg_membase (s->code, tree->reg1, tree->reg1, 0, 4);
1794 x86_alu_membase_imm (s->code, X86_CMP, tree->reg1,
1795 G_STRUCT_OFFSET (MonoClass, element_class), ((int)(tree->data.klass->element_class)));
1796 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "InvalidCastException");
1797 x86_pop_reg (s->code, tree->reg1);
1798 x86_alu_reg_imm (s->code, X86_ADD, tree->reg1, sizeof (MonoObject));
1801 reg: CASTCLASS (reg) {
1802 MonoClass *klass = tree->data.klass;
1804 int lreg = tree->left->reg1;
1806 x86_push_reg (s->code, lreg);
1807 x86_test_reg_reg (s->code, lreg, lreg);
1808 br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
1810 if (klass->flags & TYPE_ATTRIBUTE_INTERFACE) {
1811 /* lreg = obj->vtable */
1812 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
1814 x86_alu_membase_imm (s->code, X86_CMP, lreg, G_STRUCT_OFFSET (MonoVTable, max_interface_id),
1815 klass->interface_id);
1816 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_GE, FALSE, "InvalidCastException");
1817 /* lreg = obj->vtable->interface_offsets */
1818 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
1819 x86_alu_membase_imm (s->code, X86_CMP, lreg, klass->interface_id << 2, 0);
1820 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NE, FALSE, "InvalidCastException");
1823 /* lreg = obj->vtable */
1824 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
1825 /* lreg = obj->vtable->klass */
1826 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
1830 x86_alu_membase_imm (s->code, X86_CMP, lreg, G_STRUCT_OFFSET (MonoClass, rank), klass->rank);
1831 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "InvalidCastException");
1832 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, element_class), 4);
1833 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, baseval), 4);
1834 x86_alu_reg_mem (s->code, X86_SUB, lreg, &klass->element_class->baseval);
1835 x86_alu_reg_mem (s->code, X86_CMP, lreg, &klass->element_class->diffval);
1836 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LE, FALSE, "InvalidCastException");
1840 if (klass->marshalbyref) {
1841 /* check for transparent_proxy */
1842 x86_alu_reg_imm (s->code, X86_CMP, lreg, (int)mono_defaults.transparent_proxy_class);
1843 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
1846 x86_mov_reg_membase (s->code, lreg, X86_ESP, 0, 4);
1847 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoTransparentProxy,
1850 x86_patch (br [1], s->code);
1853 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, baseval), 4);
1854 x86_alu_reg_mem (s->code, X86_SUB, lreg, &klass->baseval);
1855 x86_alu_reg_mem (s->code, X86_CMP, lreg, &klass->diffval);
1856 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LE, FALSE, "InvalidCastException");
1860 x86_patch (br [0], s->code);
1861 x86_pop_reg (s->code, tree->reg1);
1865 MonoClass *klass = tree->data.klass;
1867 int lreg = tree->left->reg1;
1869 x86_push_reg (s->code, lreg);
1870 x86_test_reg_reg (s->code, lreg, lreg);
1871 br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
1873 if (klass->flags & TYPE_ATTRIBUTE_INTERFACE) {
1874 /* lreg = obj->vtable */
1875 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
1877 x86_alu_membase_imm (s->code, X86_CMP, lreg, G_STRUCT_OFFSET (MonoVTable, max_interface_id),
1878 klass->interface_id);
1879 br [1] = s->code; x86_branch8 (s->code, X86_CC_LT, 0, FALSE);
1880 /* lreg = obj->vtable->interface_offsets */
1881 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
1882 x86_alu_membase_imm (s->code, X86_CMP, lreg, klass->interface_id << 2, 0);
1883 br [2] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
1884 x86_patch (br [1], s->code);
1885 x86_mov_membase_imm (s->code, X86_ESP, 0, 0, 4);
1886 x86_patch (br [2], s->code);
1890 /* lreg = obj->vtable */
1891 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
1892 /* lreg = obj->vtable->klass */
1893 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
1897 x86_alu_membase_imm (s->code, X86_CMP, lreg, G_STRUCT_OFFSET (MonoClass, rank), klass->rank);
1898 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
1899 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, element_class), 4);
1900 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, baseval), 4);
1901 x86_alu_reg_mem (s->code, X86_SUB, lreg, &klass->element_class->baseval);
1902 x86_alu_reg_mem (s->code, X86_CMP, lreg, &klass->element_class->diffval);
1903 br [2] = s->code; x86_branch8 (s->code, X86_CC_LE, 0, FALSE);
1904 x86_patch (br [1], s->code);
1905 x86_mov_membase_imm (s->code, X86_ESP, 0, 0, 4);
1906 x86_patch (br [2], s->code);
1910 if (klass->marshalbyref) {
1911 /* check for transparent_proxy */
1912 x86_alu_reg_imm (s->code, X86_CMP, lreg, (int)mono_defaults.transparent_proxy_class);
1913 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
1916 x86_mov_reg_membase (s->code, lreg, X86_ESP, 0, 4);
1917 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoTransparentProxy,
1919 x86_patch (br [1], s->code);
1922 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, baseval), 4);
1923 x86_alu_reg_mem (s->code, X86_SUB, lreg, &klass->baseval);
1924 x86_alu_reg_mem (s->code, X86_CMP, lreg, &klass->diffval);
1925 br [2] = s->code; x86_branch8 (s->code, X86_CC_LE, 0, FALSE);
1926 x86_mov_membase_imm (s->code, X86_ESP, 0, 0, 4);
1927 x86_patch (br [2], s->code);
1931 x86_patch (br [0], s->code);
1932 x86_pop_reg (s->code, tree->reg1);
1935 stmt: INITOBJ (reg) {
1940 if (i == 1 || i == 2 || i == 4) {
1941 x86_mov_membase_imm (s->code, tree->left->reg1, 0, 0, i);
1945 i = tree->data.i / 4;
1946 j = tree->data.i % 4;
1948 x86_push_reg (s->code, X86_EAX);
1950 if (tree->left->reg1 != X86_EDI) {
1951 x86_push_reg (s->code, X86_EDI);
1952 x86_mov_reg_reg (s->code, X86_EDI, tree->left->reg1, 4);
1956 x86_push_reg (s->code, X86_ECX);
1957 x86_alu_reg_reg (s->code, X86_XOR, X86_EAX, X86_EAX);
1958 x86_mov_reg_imm (s->code, X86_ECX, i);
1960 x86_prefix (s->code, X86_REP_PREFIX);
1961 x86_stosl (s->code);
1962 x86_pop_reg (s->code, X86_ECX);
1964 for (i = 0; i < j; i++)
1965 x86_stosb (s->code);
1969 x86_mov_membase_imm (s->code, X86_EDI, 0, 0, 2);
1970 x86_mov_membase_imm (s->code, X86_EDI, 2, 0, 1);
1975 if (tree->left->reg1 != X86_EDI)
1976 x86_pop_reg (s->code, X86_EDI);
1978 x86_pop_reg (s->code, X86_EAX);
1986 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_BB, tree->data.bb);
1987 x86_jump32 (s->code, 0);
1990 cflags: COMPARE (reg, coni4) {
1991 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i);
1994 cflags: COMPARE (reg, reg) {
1995 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
1999 stmt: CBRANCH (cflags) {
2000 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
2002 switch (tree->data.bi.cond) {
2004 x86_branch32 (s->code, X86_CC_LT, 0, TRUE);
2007 x86_branch32 (s->code, X86_CC_LT, 0, FALSE);
2010 x86_branch32 (s->code, X86_CC_GT, 0, TRUE);
2013 x86_branch32 (s->code, X86_CC_GT, 0, FALSE);
2016 x86_branch32 (s->code, X86_CC_EQ, 0, TRUE);
2019 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
2022 x86_branch32 (s->code, X86_CC_GE, 0, TRUE);
2025 x86_branch32 (s->code, X86_CC_GE, 0, FALSE);
2028 x86_branch32 (s->code, X86_CC_LE, 0, TRUE);
2031 x86_branch32 (s->code, X86_CC_LE, 0, FALSE);
2034 g_assert_not_reached ();
2038 stmt: BRTRUE (reg) {
2039 x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1);
2040 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb);
2041 x86_branch32 (s->code, X86_CC_NE, 0, TRUE);
2044 stmt: BRFALSE (reg) {
2045 x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1);
2046 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bb);
2047 x86_branch32 (s->code, X86_CC_EQ, 0, TRUE);
2051 x86_breakpoint (s->code);
2055 if (tree->left->reg1 != X86_EAX)
2056 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
2058 if (!tree->last_instr) {
2059 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_EPILOG, NULL);
2060 x86_jump32 (s->code, 0);
2065 if (!tree->last_instr) {
2066 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_EPILOG, NULL);
2067 x86_jump32 (s->code, 0);
2072 stmt: ARG_I4 (LDIND_I4 (addr)) {
2073 MBTree *at = tree->left->left;
2075 switch (at->data.ainfo.amode) {
2078 x86_push_mem (s->code, at->data.ainfo.offset);
2082 x86_push_membase (s->code, at->data.ainfo.basereg, at->data.ainfo.offset);
2085 x86_push_memindex (s->code, X86_NOBASEREG, at->data.ainfo.offset,
2086 at->data.ainfo.indexreg, at->data.ainfo.shift);
2089 x86_push_memindex (s->code, at->data.ainfo.basereg,
2090 at->data.ainfo.offset, at->data.ainfo.indexreg,
2091 at->data.ainfo.shift);
2096 stmt: ARG_I4 (LDIND_U4 (addr)) {
2097 MBTree *at = tree->left->left;
2099 switch (at->data.ainfo.amode) {
2102 x86_push_mem (s->code, at->data.ainfo.offset);
2106 x86_push_membase (s->code, at->data.ainfo.basereg, at->data.ainfo.offset);
2109 x86_push_memindex (s->code, X86_NOBASEREG, at->data.ainfo.offset,
2110 at->data.ainfo.indexreg, at->data.ainfo.shift);
2113 x86_push_memindex (s->code, at->data.ainfo.basereg,
2114 at->data.ainfo.offset, at->data.ainfo.indexreg,
2115 at->data.ainfo.shift);
2120 stmt: ARG_I4 (reg) {
2121 x86_push_reg (s->code, tree->left->reg1);
2122 PRINT_REG ("ARG_I4", tree->left->reg1);
2125 # fixme: we must free the allocated strings somewhere
2126 stmt: ARG_STRING (reg) {
2127 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
2128 x86_push_reg (s->code, X86_EAX);
2129 x86_push_reg (s->code, X86_ECX);
2130 x86_push_reg (s->code, X86_EDX);
2132 x86_push_reg (s->code, tree->left->reg1);
2133 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_string_to_utf8);
2134 x86_call_code (s->code, 0);
2135 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
2137 x86_mov_membase_reg (s->code, X86_ESP, 12, X86_EAX, 4);
2139 x86_pop_reg (s->code, X86_EDX);
2140 x86_pop_reg (s->code, X86_ECX);
2141 x86_pop_reg (s->code, X86_EAX);
2144 stmt: ARG_I4 (ADDR_G) {
2145 x86_push_imm (s->code, tree->left->data.p);
2148 stmt: ARG_I4 (coni4) "MB_USE_OPT1(0)" {
2149 x86_push_imm (s->code, tree->left->data.i);
2153 PRINT_REG ("THIS", tree->reg1);
2156 reg: CHECKTHIS (reg) {
2157 /* try to access the vtable - this will raise an exception
2158 * if the object is NULL */
2159 x86_alu_membase_imm (s->code, X86_CMP, tree->left->reg1, 0, 0);
2160 if (tree->reg1 != tree->left->reg1)
2161 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
2164 stmt: CHECKTHIS (reg) {
2165 x86_alu_membase_imm (s->code, X86_CMP, tree->left->reg1, 0, 0);
2170 reg: CALL_I4 (this, reg) {
2172 int lreg = tree->left->reg1;
2173 int rreg = tree->right->reg1;
2175 if (lreg == treg || rreg == treg)
2177 if (lreg == treg || rreg == treg)
2179 if (lreg == treg || rreg == treg)
2180 mono_assert_not_reached ();
2182 if (tree->left->op != MB_TERM_NOP) {
2183 mono_assert (lreg >= 0);
2184 x86_push_reg (s->code, lreg);
2187 if (tree->data.ci.vtype_num) {
2188 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
2189 x86_lea_membase (s->code, treg, X86_EBP, offset);
2190 x86_push_reg (s->code, treg);
2193 x86_call_reg (s->code, rreg);
2195 if (tree->data.ci.args_size)
2196 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
2198 PRINT_REG ("CALL_I4", tree->reg1);
2200 mono_assert (tree->reg1 == X86_EAX);
2203 reg: CALL_I4 (this, ADDR_G) {
2204 int lreg = tree->left->reg1;
2210 if (tree->left->op != MB_TERM_NOP) {
2211 mono_assert (lreg >= 0);
2212 x86_push_reg (s->code, lreg);
2213 x86_alu_membase_imm (s->code, X86_CMP, lreg, 0, 0);
2216 if (tree->data.ci.vtype_num) {
2217 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
2218 x86_lea_membase (s->code, treg, X86_EBP, offset);
2219 x86_push_reg (s->code, treg);
2222 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, tree->right->data.p);
2223 x86_call_code (s->code, 0);
2225 if (tree->data.ci.args_size)
2226 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
2228 PRINT_REG ("CALL_I4", tree->reg1);
2230 mono_assert (tree->reg1 == X86_EAX);
2233 reg: LDVIRTFTN (reg, INTF_ADDR) {
2234 int lreg = tree->left->reg1;
2236 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2237 x86_mov_reg_membase (s->code, lreg, lreg,
2238 G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
2239 x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
2240 x86_mov_reg_membase (s->code, tree->reg1, lreg, tree->right->data.m->slot << 2, 4);
2243 reg: CALL_I4 (this, INTF_ADDR) {
2244 int lreg = tree->left->reg1;
2250 if (tree->left->op != MB_TERM_NOP) {
2251 mono_assert (lreg >= 0);
2252 x86_push_reg (s->code, lreg);
2255 if (tree->data.ci.vtype_num) {
2256 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
2257 x86_lea_membase (s->code, treg, X86_EBP, offset);
2258 x86_push_reg (s->code, treg);
2261 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2262 x86_mov_reg_membase (s->code, lreg, lreg,
2263 G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
2264 x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
2265 x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
2267 if (tree->data.ci.args_size)
2268 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
2270 PRINT_REG ("CALL_I4(INTERFACE)", tree->reg1);
2272 mono_assert (tree->reg1 == X86_EAX);
2275 reg: LDVIRTFTN (reg, VFUNC_ADDR) {
2276 int lreg = tree->left->reg1;
2278 x86_mov_reg_membase (s->code, tree->reg1, lreg, 0, 4);
2280 x86_mov_reg_membase (s->code, tree->reg1, tree->reg1, G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2), 4);
2284 if (tree->reg1 != X86_EAX)
2285 x86_push_reg (s->code, X86_EAX);
2286 x86_push_reg (s->code, X86_ECX);
2287 x86_push_reg (s->code, X86_EDX);
2289 x86_push_imm (s->code, tree->data.m);
2290 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, arch_compile_method);
2291 x86_call_code (s->code, 0);
2292 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer));
2294 x86_pop_reg (s->code, X86_EDX);
2295 x86_pop_reg (s->code, X86_ECX);
2296 if (tree->reg1 != X86_EAX) {
2297 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
2298 x86_pop_reg (s->code, X86_EAX);
2300 PRINT_REG ("LDFTN", tree->reg1);
2304 reg: CALL_I4 (this, VFUNC_ADDR) {
2305 int lreg = tree->left->reg1;
2311 if (tree->left->op != MB_TERM_NOP) {
2312 mono_assert (lreg >= 0);
2313 x86_push_reg (s->code, lreg);
2316 if (tree->data.ci.vtype_num) {
2317 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
2318 x86_lea_membase (s->code, treg, X86_EBP, offset);
2319 x86_push_reg (s->code, treg);
2322 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2323 x86_call_virtual (s->code, lreg,
2324 G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2));
2326 if (tree->data.ci.args_size)
2327 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
2329 PRINT_REG ("CALL_I4(VIRTUAL)", tree->reg1);
2331 mono_assert (tree->reg1 == X86_EAX);
2334 stmt: CALL_VOID (this, ADDR_G) {
2335 int lreg = tree->left->reg1;
2341 if (tree->left->op != MB_TERM_NOP) {
2342 mono_assert (lreg >= 0);
2343 x86_push_reg (s->code, lreg);
2344 x86_alu_membase_imm (s->code, X86_CMP, lreg, 0, 0);
2347 if (tree->data.ci.vtype_num) {
2348 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
2349 x86_lea_membase (s->code, treg, X86_EBP, offset);
2350 x86_push_reg (s->code, treg);
2353 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, tree->right->data.p);
2354 x86_call_code (s->code, 0);
2356 if (tree->data.ci.args_size)
2357 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
2360 stmt: CALL_VOID (this, INTF_ADDR) {
2361 int lreg = tree->left->reg1;
2367 if (tree->left->op != MB_TERM_NOP) {
2368 mono_assert (lreg >= 0);
2369 x86_push_reg (s->code, lreg);
2372 if (tree->data.ci.vtype_num) {
2373 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
2374 x86_lea_membase (s->code, treg, X86_EBP, offset);
2375 x86_push_reg (s->code, treg);
2378 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2379 x86_mov_reg_membase (s->code, lreg, lreg,
2380 G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
2381 x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
2382 x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
2384 if (tree->data.ci.args_size)
2385 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
2388 stmt: CALL_VOID (this, VFUNC_ADDR) {
2389 int lreg = tree->left->reg1;
2395 if (tree->left->op != MB_TERM_NOP) {
2396 mono_assert (lreg >= 0);
2397 x86_push_reg (s->code, lreg);
2400 if (tree->data.ci.vtype_num) {
2401 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
2402 x86_lea_membase (s->code, treg, X86_EBP, offset);
2403 x86_push_reg (s->code, treg);
2406 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2407 x86_call_virtual (s->code, lreg,
2408 G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2));
2410 if (tree->data.ci.args_size)
2411 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
2414 stmt: SWITCH (reg) {
2416 guint32 *jt = (guint32 *)tree->data.p;
2418 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, jt [0]);
2419 offset = 6 + (guint32)s->code;
2420 x86_branch32 (s->code, X86_CC_GE, jt [jt [0] + 1] - offset, FALSE);
2422 x86_mov_reg_memindex (s->code, X86_EAX, X86_NOBASEREG,
2423 tree->data.i + 4, tree->left->reg1, 2, 4);
2424 x86_jump_reg (s->code, X86_EAX);
2431 reg: CONV_I1 (lreg) {
2432 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, FALSE);
2435 reg: CONV_U1 (lreg) {
2436 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE);
2439 reg: CONV_I2 (lreg) {
2440 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, TRUE);
2443 reg: CONV_U2 (lreg) {
2444 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE);
2447 reg: CONV_I4 (lreg) {
2448 if (tree->reg1 != tree->left->reg1)
2449 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
2452 reg: CONV_U4 (lreg) {
2453 if (tree->reg1 != tree->left->reg1)
2454 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
2458 reg: CONV_OVF_I4 (lreg){
2459 guint8 *start = s->code;
2460 guchar* o1, *o2, *o3, *o4, *o5;
2464 * Valid ints: 0xffffffff:8000000 to 00000000:0x7f000000
2466 for (i = 0; i < 2; i++) {
2469 x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1);
2471 /* If the low word top bit is set, see if we are negative */
2472 x86_branch8 (s->code, X86_CC_LT, o3 - o1, TRUE);
2475 /* We are not negative (no top bit set, check for our top word to be zero */
2476 x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2);
2477 x86_branch8 (s->code, X86_CC_EQ, o4 - o2, TRUE);
2480 /* throw exception */
2481 x86_push_imm (s->code, "OverflowException");
2482 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS,
2483 arch_get_throw_exception_by_name ());
2484 x86_call_code (s->code, 0);
2487 /* our top bit is set, check that top word is 0xfffffff */
2488 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg2, 0xffffffff);
2491 /* nope, emit exception */
2492 x86_branch8 (s->code, X86_CC_NE, o2 - o5, TRUE);
2495 if (tree->reg1 != tree->left->reg1)
2496 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
2499 reg: CONV_OVF_I4 (lreg){
2500 guint8 *br [3], *label [1];
2503 * Valid ints: 0xffffffff:8000000 to 00000000:0x7f000000
2505 x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1);
2507 /* If the low word top bit is set, see if we are negative */
2508 br [0] = s->code; x86_branch8 (s->code, X86_CC_LT, 0, TRUE);
2510 /* We are not negative (no top bit set, check for our top word to be zero */
2511 x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2);
2512 br [1] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, TRUE);
2513 label [0] = s->code;
2515 /* throw exception */
2516 x86_push_imm (s->code, "OverflowException");
2517 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS,
2518 arch_get_throw_exception_by_name ());
2519 x86_call_code (s->code, 0);
2521 x86_patch (br [0], s->code);
2522 /* our top bit is set, check that top word is 0xfffffff */
2523 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg2, 0xffffffff);
2525 x86_patch (br [1], s->code);
2526 /* nope, emit exception */
2527 br [2] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
2528 x86_patch (br [2], label [0]);
2530 if (tree->reg1 != tree->left->reg1)
2531 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
2534 reg: CONV_OVF_U4 (lreg) {
2535 /* Keep in sync with CONV_OVF_I4_UN below, they are the same on 32-bit machines */
2536 /* top word must be 0 */
2537 x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2);
2538 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "OverflowException");
2539 if (tree->reg1 != tree->left->reg1)
2540 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
2543 reg: CONV_OVF_I4_UN (lreg) {
2544 /* Keep in sync with CONV_OVF_U4 above, they are the same on 32-bit machines */
2545 /* top word must be 0 */
2546 x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2);
2547 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "OverflowException");
2548 if (tree->reg1 != tree->left->reg1)
2549 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
2555 x86_mov_reg_imm (s->code, tree->reg1, *((gint32 *)&tree->data.p));
2556 x86_mov_reg_imm (s->code, tree->reg2, *((gint32 *)&tree->data.p + 1));
2559 lreg: CONV_I8 (coni4) {
2560 x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i);
2562 if (tree->left->data.i >= 0)
2563 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
2565 x86_mov_reg_imm (s->code, tree->reg2, -1);
2568 lreg: CONV_I8 (reg) {
2571 if (tree->reg1 != tree->left->reg1)
2572 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
2574 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
2575 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, 0);
2576 x86_branch8 (s->code, X86_CC_GE, 5, TRUE);
2578 x86_mov_reg_imm (s->code, tree->reg2, -1);
2579 mono_assert ((s->code - i1) == 5);
2582 lreg: CONV_U8 (coni4) 1 {
2583 x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i);
2584 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
2587 lreg: CONV_U8 (reg) {
2588 if (tree->reg1 != tree->left->reg1)
2589 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
2590 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
2593 lreg: CONV_OVF_U8 (coni4) {
2594 if (tree->left->data.i < 0){
2595 x86_push_imm (s->code, "OverflowException");
2596 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS,
2597 arch_get_throw_exception_by_name ());
2598 x86_call_code (s->code, 0);
2600 x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i);
2601 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
2605 lreg: CONV_OVF_I8_UN (coni4) {
2606 x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i);
2607 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
2610 lreg: CONV_OVF_U8 (reg) {
2611 x86_test_reg_imm (s->code, tree->left->reg1, 0x8000000);
2612 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "OverflowException");
2614 if (tree->reg1 != tree->left->reg1)
2615 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
2616 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
2619 lreg: CONV_OVF_I8_UN (reg) {
2620 /* Convert uint value into int64, we pass everything */
2621 if (tree->reg1 != tree->left->reg1)
2622 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
2623 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
2626 stmt: STIND_I8 (addr, lreg) {
2628 switch (tree->left->data.ainfo.amode) {
2631 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 4);
2632 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset + 4, tree->right->reg2, 4);
2636 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
2637 tree->left->data.ainfo.offset, tree->right->reg1, 4);
2638 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
2639 tree->left->data.ainfo.offset + 4, tree->right->reg2, 4);
2642 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
2643 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
2644 tree->right->reg1, 4);
2645 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset + 4,
2646 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
2647 tree->right->reg2, 4);
2650 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
2651 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
2652 tree->right->reg1, 4);
2653 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset + 4,
2654 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
2655 tree->right->reg2, 4);
2660 stmt: REMOTE_STIND_I8 (reg, lreg) {
2664 x86_push_reg (s->code, tree->right->reg1);
2665 x86_mov_reg_membase (s->code, tree->right->reg1, tree->left->reg1, 0, 4);
2666 x86_alu_membase_imm (s->code, X86_CMP, tree->right->reg1, 0, ((int)mono_defaults.transparent_proxy_class));
2667 x86_pop_reg (s->code, tree->right->reg1);
2669 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
2671 /* this is a transparent proxy - remote the call */
2673 /* save value to stack */
2674 x86_push_reg (s->code, tree->right->reg2);
2675 x86_push_reg (s->code, tree->right->reg1);
2677 x86_push_reg (s->code, X86_ESP);
2678 x86_push_imm (s->code, tree->data.fi.field);
2679 x86_push_imm (s->code, tree->data.fi.klass);
2680 x86_push_reg (s->code, tree->left->reg1);
2681 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_store_remote_field);
2682 x86_call_code (s->code, 0);
2683 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 24);
2685 br [1] = s->code; x86_jump8 (s->code, 0);
2687 x86_patch (br [0], s->code);
2688 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
2689 tree->data.fi.field->offset;
2690 x86_mov_membase_reg (s->code, tree->left->reg1, offset, tree->right->reg1, 4);
2691 x86_mov_membase_reg (s->code, tree->left->reg1, offset + 4, tree->right->reg2, 4);
2693 x86_patch (br [1], s->code);
2697 # an addr can use two address register (base and index register). The must take care
2698 # that we do not override them (thus the use of x86_lea)
2699 lreg: LDIND_I8 (addr) {
2701 switch (tree->left->data.ainfo.amode) {
2704 x86_mov_reg_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, 4);
2705 x86_mov_reg_mem (s->code, tree->reg2, tree->left->data.ainfo.offset + 4, 4);
2709 x86_lea_membase (s->code, tree->reg2, tree->left->data.ainfo.basereg,
2710 tree->left->data.ainfo.offset);
2711 x86_mov_reg_membase (s->code, tree->reg1, tree->reg2, 0, 4);
2712 x86_mov_reg_membase (s->code, tree->reg2, tree->reg2, 4, 4);
2715 x86_lea_memindex (s->code, tree->reg2, X86_NOBASEREG, tree->left->data.ainfo.offset,
2716 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift);
2717 x86_mov_reg_membase (s->code, tree->reg1, tree->reg2, 0, 4);
2718 x86_mov_reg_membase (s->code, tree->reg2, tree->reg2, 4, 4);
2721 x86_lea_memindex (s->code, tree->reg2, tree->left->data.ainfo.basereg,
2722 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
2723 tree->left->data.ainfo.shift);
2724 x86_mov_reg_membase (s->code, tree->reg1, tree->reg2, 0, 4);
2725 x86_mov_reg_membase (s->code, tree->reg2, tree->reg2, 4, 4);
2728 PRINT_REG ("LDIND_I8_0", tree->reg1);
2729 PRINT_REG ("LDIND_I8_1", tree->reg2);
2732 lreg: SHR (lreg, coni4) {
2733 if (tree->right->data.i < 32) {
2734 x86_shrd_reg_imm (s->code, tree->left->reg1, tree->left->reg2, tree->right->data.i);
2735 x86_shift_reg_imm (s->code, X86_SAR, tree->left->reg2, tree->right->data.i);
2736 if (tree->reg1 != tree->left->reg1)
2737 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
2738 if (tree->reg2 != tree->left->reg2)
2739 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
2740 } else if (tree->right->data.i < 64) {
2741 if (tree->reg1 != tree->left->reg2)
2742 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg2, 4);
2743 if (tree->reg2 != tree->left->reg2)
2744 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
2745 x86_shift_reg_imm (s->code, X86_SAR, tree->reg2, 31);
2746 x86_shift_reg_imm (s->code, X86_SAR, tree->reg1, (tree->right->data.i - 32));
2747 } /* else unspecified result */
2750 lreg: SHR_UN (lreg, coni4) {
2751 if (tree->right->data.i < 32) {
2752 x86_shrd_reg_imm (s->code, tree->left->reg1, tree->left->reg2, tree->right->data.i);
2753 x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg2, tree->right->data.i);
2754 if (tree->reg1 != tree->left->reg1)
2755 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
2756 if (tree->reg2 != tree->left->reg2)
2757 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
2758 } else if (tree->right->data.i < 64) {
2759 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg2, 4);
2760 x86_shift_reg_imm (s->code, X86_SHR, tree->reg1, (tree->right->data.i - 32));
2761 x86_mov_reg_imm (s->code, tree->reg2, 0);
2762 } /* else unspecified result */
2765 lreg: SHR (lreg, reg) {
2768 if (tree->right->reg1 != X86_ECX)
2769 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
2771 x86_shrd_reg (s->code, tree->left->reg1, tree->left->reg2);
2772 x86_shift_reg (s->code, X86_SAR, tree->left->reg2);
2773 x86_test_reg_imm (s->code, X86_ECX, 32);
2774 br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
2775 x86_mov_reg_reg (s->code, tree->left->reg1, tree->left->reg2, 4);
2776 x86_shift_reg_imm (s->code, X86_SAR, tree->reg2, 31);
2777 x86_patch (br [0], s->code);
2779 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
2782 lreg: SHR_UN (lreg, reg) {
2785 if (tree->right->reg1 != X86_ECX)
2786 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
2788 x86_shrd_reg (s->code, tree->left->reg1, tree->left->reg2);
2789 x86_shift_reg (s->code, X86_SHR, tree->left->reg2);
2790 x86_test_reg_imm (s->code, X86_ECX, 32);
2791 br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
2792 x86_mov_reg_reg (s->code, tree->left->reg1, tree->left->reg2, 4);
2793 x86_shift_reg_imm (s->code, X86_SHR, tree->reg2, 31);
2794 x86_patch (br [0], s->code);
2796 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
2799 lreg: SHL (lreg, coni4) {
2800 if (tree->right->data.i < 32) {
2801 x86_shld_reg_imm (s->code, tree->left->reg2, tree->left->reg1, tree->right->data.i);
2802 x86_shift_reg_imm (s->code, X86_SHL, tree->left->reg1, tree->right->data.i);
2803 if (tree->reg1 != tree->left->reg1)
2804 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
2805 if (tree->reg2 != tree->left->reg2)
2806 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
2807 } else if (tree->right->data.i < 64) {
2808 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg1, 4);
2809 x86_shift_reg_imm (s->code, X86_SHL, tree->reg2, (tree->right->data.i - 32));
2810 x86_alu_reg_reg (s->code, X86_XOR, tree->reg1, tree->reg1);
2811 } /* else unspecified result */
2814 lreg: SHL (lreg, reg) {
2817 if (tree->right->reg1 != X86_ECX)
2818 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
2820 x86_shld_reg (s->code, tree->left->reg2, tree->left->reg1);
2821 x86_shift_reg (s->code, X86_SHL, tree->left->reg1);
2822 x86_test_reg_imm (s->code, X86_ECX, 32);
2823 br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
2824 x86_mov_reg_reg (s->code, tree->left->reg2, tree->left->reg1, 4);
2825 x86_alu_reg_reg (s->code, X86_XOR, tree->left->reg1, tree->left->reg1);
2826 x86_patch (br [0], s->code);
2828 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
2831 lreg: ADD (lreg, lreg) {
2832 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
2833 x86_alu_reg_reg (s->code, X86_ADC, tree->left->reg2, tree->right->reg2);
2835 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
2838 lreg: ADD_OVF (lreg, lreg) {
2839 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
2840 x86_alu_reg_reg (s->code, X86_ADC, tree->left->reg2, tree->right->reg2);
2841 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
2843 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
2846 lreg: ADD_OVF_UN (lreg, lreg) {
2847 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
2848 x86_alu_reg_reg (s->code, X86_ADC, tree->left->reg2, tree->right->reg2);
2849 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NC, FALSE, "OverflowException");
2851 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
2854 lreg: SUB (lreg, lreg) {
2855 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
2856 x86_alu_reg_reg (s->code, X86_SBB, tree->left->reg2, tree->right->reg2);
2858 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
2861 lreg: SUB_OVF (lreg, lreg) {
2862 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
2863 x86_alu_reg_reg (s->code, X86_SBB, tree->left->reg2, tree->right->reg2);
2864 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
2866 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
2869 lreg: SUB_OVF_UN (lreg, lreg) {
2870 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
2871 x86_alu_reg_reg (s->code, X86_SBB, tree->left->reg2, tree->right->reg2);
2872 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NC, FALSE, "OverflowException");
2874 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
2877 lreg: AND (lreg, lreg) {
2878 x86_alu_reg_reg (s->code, X86_AND, tree->left->reg1, tree->right->reg1);
2879 x86_alu_reg_reg (s->code, X86_AND, tree->left->reg2, tree->right->reg2);
2881 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
2884 lreg: OR (lreg, lreg) {
2885 x86_alu_reg_reg (s->code, X86_OR, tree->left->reg1, tree->right->reg1);
2886 x86_alu_reg_reg (s->code, X86_OR, tree->left->reg2, tree->right->reg2);
2888 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
2891 lreg: XOR (lreg, lreg) {
2892 x86_alu_reg_reg (s->code, X86_XOR, tree->left->reg1, tree->right->reg1);
2893 x86_alu_reg_reg (s->code, X86_XOR, tree->left->reg2, tree->right->reg2);
2895 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
2899 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
2901 x86_neg_reg (s->code, tree->reg1);
2902 x86_alu_reg_imm (s->code, X86_ADC, tree->reg2, 0);
2903 x86_neg_reg (s->code, tree->reg2);
2907 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
2909 x86_not_reg (s->code, tree->reg1);
2910 x86_not_reg (s->code, tree->reg2);
2913 lreg: MUL (lreg, lreg) {
2914 if (mono_regset_reg_used (s->rs, X86_ECX))
2915 x86_push_reg (s->code, X86_ECX);
2917 x86_push_reg (s->code, tree->right->reg2);
2918 x86_push_reg (s->code, tree->right->reg1);
2919 x86_push_reg (s->code, tree->left->reg2);
2920 x86_push_reg (s->code, tree->left->reg1);
2921 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_llmult);
2922 x86_call_code (s->code, 0);
2923 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
2925 if (mono_regset_reg_used (s->rs, X86_ECX))
2926 x86_pop_reg (s->code, X86_ECX);
2928 mono_assert (tree->reg1 == X86_EAX &&
2929 tree->reg2 == X86_EDX);
2932 lreg: MUL_OVF (lreg, lreg) {
2933 if (mono_regset_reg_used (s->rs, X86_ECX))
2934 x86_push_reg (s->code, X86_ECX);
2936 x86_push_reg (s->code, tree->right->reg2);
2937 x86_push_reg (s->code, tree->right->reg1);
2938 x86_push_reg (s->code, tree->left->reg2);
2939 x86_push_reg (s->code, tree->left->reg1);
2940 /* pass a pointer to store the resulting exception -
2941 * ugly, but it works */
2942 x86_push_reg (s->code, X86_ESP);
2943 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_llmult_ovf);
2944 x86_call_code (s->code, 0);
2945 x86_mov_reg_membase (s->code, X86_ECX, X86_ESP, 4, 4);
2946 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
2947 x86_alu_reg_imm (s->code, X86_CMP, X86_ECX, 0);
2949 /* cond. emit exception */
2950 x86_branch8 (s->code, X86_CC_EQ, 7, FALSE);
2951 x86_push_reg (s->code, X86_ECX);
2952 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, arch_get_throw_exception ());
2953 x86_call_code (s->code, 0);
2955 if (mono_regset_reg_used (s->rs, X86_ECX))
2956 x86_pop_reg (s->code, X86_ECX);
2958 mono_assert (tree->reg1 == X86_EAX &&
2959 tree->reg2 == X86_EDX);
2962 lreg: MUL_OVF_UN (lreg, lreg) {
2963 if (mono_regset_reg_used (s->rs, X86_ECX))
2964 x86_push_reg (s->code, X86_ECX);
2966 x86_push_reg (s->code, tree->right->reg2);
2967 x86_push_reg (s->code, tree->right->reg1);
2968 x86_push_reg (s->code, tree->left->reg2);
2969 x86_push_reg (s->code, tree->left->reg1);
2970 /* pass a pointer to store the resulting exception -
2971 * ugly, but it works */
2972 x86_push_reg (s->code, X86_ESP);
2973 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_llmult_ovf_un);
2974 x86_call_code (s->code, 0);
2975 x86_mov_reg_membase (s->code, X86_ECX, X86_ESP, 4, 4);
2976 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
2977 x86_alu_reg_imm (s->code, X86_CMP, X86_ECX, 0);
2979 /* cond. emit exception */
2980 x86_branch8 (s->code, X86_CC_EQ, 7, FALSE);
2981 x86_push_reg (s->code, X86_ECX);
2982 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, arch_get_throw_exception ());
2983 x86_call_code (s->code, 0);
2985 if (mono_regset_reg_used (s->rs, X86_ECX))
2986 x86_pop_reg (s->code, X86_ECX);
2988 mono_assert (tree->reg1 == X86_EAX &&
2989 tree->reg2 == X86_EDX);
2992 lreg: DIV (lreg, lreg) {
2993 if (mono_regset_reg_used (s->rs, X86_ECX))
2994 x86_push_reg (s->code, X86_ECX);
2996 x86_push_reg (s->code, tree->right->reg2);
2997 x86_push_reg (s->code, tree->right->reg1);
2998 x86_push_reg (s->code, tree->left->reg2);
2999 x86_push_reg (s->code, tree->left->reg1);
3000 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_lldiv);
3001 x86_call_code (s->code, 0);
3002 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
3004 if (mono_regset_reg_used (s->rs, X86_ECX))
3005 x86_pop_reg (s->code, X86_ECX);
3007 mono_assert (tree->reg1 == X86_EAX &&
3008 tree->reg2 == X86_EDX);
3011 lreg: REM (lreg, lreg) {
3012 if (mono_regset_reg_used (s->rs, X86_ECX))
3013 x86_push_reg (s->code, X86_ECX);
3015 x86_push_reg (s->code, tree->right->reg2);
3016 x86_push_reg (s->code, tree->right->reg1);
3017 x86_push_reg (s->code, tree->left->reg2);
3018 x86_push_reg (s->code, tree->left->reg1);
3019 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_llrem);
3020 x86_call_code (s->code, 0);
3021 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
3023 if (mono_regset_reg_used (s->rs, X86_ECX))
3024 x86_pop_reg (s->code, X86_ECX);
3026 mono_assert (tree->reg1 == X86_EAX &&
3027 tree->reg2 == X86_EDX);
3030 lreg: DIV_UN (lreg, lreg) {
3031 if (mono_regset_reg_used (s->rs, X86_ECX))
3032 x86_push_reg (s->code, X86_ECX);
3034 x86_push_reg (s->code, tree->right->reg2);
3035 x86_push_reg (s->code, tree->right->reg1);
3036 x86_push_reg (s->code, tree->left->reg2);
3037 x86_push_reg (s->code, tree->left->reg1);
3038 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_lldiv_un);
3039 x86_call_code (s->code, 0);
3040 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
3042 if (mono_regset_reg_used (s->rs, X86_ECX))
3043 x86_pop_reg (s->code, X86_ECX);
3045 mono_assert (tree->reg1 == X86_EAX &&
3046 tree->reg2 == X86_EDX);
3049 lreg: REM_UN (lreg, lreg) {
3050 if (mono_regset_reg_used (s->rs, X86_ECX))
3051 x86_push_reg (s->code, X86_ECX);
3053 x86_push_reg (s->code, tree->right->reg2);
3054 x86_push_reg (s->code, tree->right->reg1);
3055 x86_push_reg (s->code, tree->left->reg2);
3056 x86_push_reg (s->code, tree->left->reg1);
3057 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_llrem_un);
3058 x86_call_code (s->code, 0);
3059 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
3061 if (mono_regset_reg_used (s->rs, X86_ECX))
3062 x86_pop_reg (s->code, X86_ECX);
3064 mono_assert (tree->reg1 == X86_EAX &&
3065 tree->reg2 == X86_EDX);
3068 lreg: CALL_I8 (this, ADDR_G) {
3069 int lreg = tree->left->reg1;
3075 if (tree->left->op != MB_TERM_NOP) {
3076 mono_assert (lreg >= 0);
3077 x86_push_reg (s->code, lreg);
3078 x86_alu_membase_imm (s->code, X86_CMP, lreg, 0, 0);
3081 if (tree->data.ci.vtype_num) {
3082 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
3083 x86_lea_membase (s->code, treg, X86_EBP, offset);
3084 x86_push_reg (s->code, treg);
3087 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, tree->right->data.p);
3088 x86_call_code (s->code, 0);
3090 if (tree->data.ci.args_size)
3091 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
3093 mono_assert (tree->reg1 == X86_EAX);
3094 mono_assert (tree->reg2 == X86_EDX);
3097 lreg: CALL_I8 (this, VFUNC_ADDR) {
3098 int lreg = tree->left->reg1;
3104 if (tree->left->op != MB_TERM_NOP) {
3105 mono_assert (lreg >= 0);
3106 x86_push_reg (s->code, lreg);
3109 if (tree->data.ci.vtype_num) {
3110 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
3111 x86_lea_membase (s->code, treg, X86_EBP, offset);
3112 x86_push_reg (s->code, treg);
3115 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
3116 x86_call_virtual (s->code, lreg,
3117 G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2));
3119 if (tree->data.ci.args_size)
3120 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
3122 PRINT_REG ("CALL0_I8(VIRTUAL)", tree->reg1);
3123 PRINT_REG ("CALL1_I8(VIRTUAL)", tree->reg2);
3125 mono_assert (tree->reg1 == X86_EAX);
3126 mono_assert (tree->reg2 == X86_EDX);
3129 lreg: CALL_I8 (this, INTF_ADDR) {
3130 int lreg = tree->left->reg1;
3136 if (tree->left->op != MB_TERM_NOP) {
3137 mono_assert (lreg >= 0);
3138 x86_push_reg (s->code, lreg);
3141 if (tree->data.ci.vtype_num) {
3142 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
3143 x86_lea_membase (s->code, treg, X86_EBP, offset);
3144 x86_push_reg (s->code, treg);
3147 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
3148 x86_mov_reg_membase (s->code, lreg, lreg,
3149 G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
3150 x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
3151 x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
3153 if (tree->data.ci.args_size)
3154 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
3156 PRINT_REG ("CALL_I8(INTERFACE)", tree->reg1);
3158 mono_assert (tree->reg1 == X86_EAX);
3159 mono_assert (tree->reg2 == X86_EDX);
3163 if (tree->left->reg1 != X86_EAX) {
3164 if (tree->left->reg2 != X86_EAX) {
3165 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
3166 if (tree->left->reg2 != X86_EDX)
3167 x86_mov_reg_reg (s->code, X86_EDX, tree->left->reg2, 4);
3169 x86_mov_reg_reg (s->code, X86_ECX, tree->left->reg2, 4);
3170 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
3171 x86_mov_reg_reg (s->code, X86_EDX, X86_ECX, 4);
3173 } else if (tree->left->reg2 != X86_EDX) {
3174 x86_mov_reg_reg (s->code, X86_EDX, tree->left->reg2, 4);
3177 if (!tree->last_instr) {
3178 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_EPILOG, NULL);
3179 x86_jump32 (s->code, 0);
3184 stmt: ARG_I8 (lreg) {
3185 x86_push_reg (s->code, tree->left->reg2);
3186 x86_push_reg (s->code, tree->left->reg1);
3189 reg: CSET (COMPARE (lreg, lreg)) {
3191 int lreg1, lreg2, rreg1, rreg2;
3193 lreg1 = tree->left->left->reg1;
3194 lreg2 = tree->left->left->reg2;
3195 rreg1 = tree->left->right->reg1;
3196 rreg2 = tree->left->right->reg2;
3199 if (tree->data.i == CEE_CEQ) {
3200 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3201 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3202 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3203 x86_patch (br [0], s->code);
3204 x86_set_reg (s->code, X86_CC_EQ, tree->reg1, FALSE);
3205 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE);
3209 switch (tree->data.i) {
3211 x86_alu_reg_reg (s->code, X86_CMP, rreg2, lreg2);
3212 br [0] = s->code; x86_branch8 (s->code, X86_CC_GT, 0, TRUE);
3213 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3214 x86_alu_reg_reg (s->code, X86_CMP, rreg1, lreg1);
3215 br [2] = s->code; x86_branch8 (s->code, X86_CC_GE, 0, FALSE);
3218 x86_alu_reg_reg (s->code, X86_CMP, rreg2, lreg2);
3219 br [0] = s->code; x86_branch8 (s->code, X86_CC_GT, 0, FALSE);
3220 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3221 x86_alu_reg_reg (s->code, X86_CMP, rreg1, lreg1);
3222 br [2] = s->code; x86_branch8 (s->code, X86_CC_GE, 0, FALSE);
3225 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3226 br [0] = s->code; x86_branch8 (s->code, X86_CC_GT, 0, TRUE);
3227 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3228 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3229 br [2] = s->code; x86_branch8 (s->code, X86_CC_GE, 0, FALSE);
3232 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3233 br [0] = s->code; x86_branch8 (s->code, X86_CC_GT, 0, FALSE);
3234 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3235 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3236 br [2] = s->code; x86_branch8 (s->code, X86_CC_GE, 0, FALSE);
3239 g_assert_not_reached ();
3242 /* set result to 1 */
3243 x86_patch (br [1], s->code);
3244 x86_mov_reg_imm (s->code, tree->reg1, 1);
3245 br [3] = s->code; x86_jump8 (s->code, 0);
3247 /* set result to 0 */
3248 x86_patch (br [0], s->code);
3249 x86_patch (br [2], s->code);
3250 x86_mov_reg_imm (s->code, tree->reg1, 0);
3252 x86_patch (br [3], s->code);
3255 stmt: CBRANCH (COMPARE (lreg, lreg)) {
3257 int lreg1, lreg2, rreg1, rreg2;
3259 lreg1 = tree->left->left->reg1;
3260 lreg2 = tree->left->left->reg2;
3261 rreg1 = tree->left->right->reg1;
3262 rreg2 = tree->left->right->reg2;
3264 switch (tree->data.bi.cond) {
3266 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3267 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3268 x86_branch32 (s->code, X86_CC_LT, 0, TRUE);
3269 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3270 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3271 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3272 x86_branch32 (s->code, X86_CC_LT, 0, FALSE);
3273 x86_patch (br [0], s->code);
3276 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3277 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3278 x86_branch32 (s->code, X86_CC_LT, 0, FALSE);
3279 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3280 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3281 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3282 x86_branch32 (s->code, X86_CC_LT, 0, FALSE);
3283 x86_patch (br [0], s->code);
3286 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3287 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3288 x86_branch32 (s->code, X86_CC_GT, 0, TRUE);
3289 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3290 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3291 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3292 x86_branch32 (s->code, X86_CC_GT, 0, FALSE);
3293 x86_patch (br [0], s->code);
3296 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3297 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3298 x86_branch32 (s->code, X86_CC_GT, 0, FALSE);
3299 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3300 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3301 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3302 x86_branch32 (s->code, X86_CC_GT, 0, FALSE);
3303 x86_patch (br [0], s->code);
3306 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3307 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3308 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3309 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3310 x86_branch32 (s->code, X86_CC_EQ, 0, TRUE);
3311 x86_patch (br [0], s->code);
3314 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3315 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3316 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
3317 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3318 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3319 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
3322 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3323 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3324 x86_branch32 (s->code, X86_CC_GT, 0, TRUE);
3325 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3326 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3327 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3328 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3329 x86_branch32 (s->code, X86_CC_GE, 0, FALSE);
3330 x86_patch (br [0], s->code);
3333 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3334 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3335 x86_branch32 (s->code, X86_CC_GT, 0, FALSE);
3336 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3337 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3338 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3339 x86_branch32 (s->code, X86_CC_GE, 0, FALSE);
3340 x86_patch (br [0], s->code);
3343 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3344 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3345 x86_branch32 (s->code, X86_CC_LT, 0, TRUE);
3346 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3347 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3348 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3349 x86_branch32 (s->code, X86_CC_LE, 0, FALSE);
3350 x86_patch (br [0], s->code);
3353 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3354 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3355 x86_branch32 (s->code, X86_CC_LT, 0, FALSE);
3356 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3357 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3358 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3359 x86_branch32 (s->code, X86_CC_LE, 0, FALSE);
3360 x86_patch (br [0], s->code);
3363 g_assert_not_reached ();
3370 #stmt: STLOC (CONV_I4 (freg)) {
3372 # x86_fist_pop_membase (s->code, X86_EBP, tree->data.i, FALSE);
3375 reg: CONV_I1 (freg) {
3376 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
3377 x86_fnstcw_membase(s->code, X86_ESP, 0);
3378 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
3379 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
3380 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
3381 x86_fldcw_membase (s->code, X86_ESP, 2);
3382 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
3383 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
3384 x86_pop_reg (s->code, tree->reg1);
3385 x86_widen_reg (s->code, tree->reg1, tree->reg1, TRUE, FALSE);
3386 x86_fldcw_membase (s->code, X86_ESP, 0);
3387 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
3390 reg: CONV_U1 (freg) {
3391 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
3392 x86_fnstcw_membase(s->code, X86_ESP, 0);
3393 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
3394 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
3395 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
3396 x86_fldcw_membase (s->code, X86_ESP, 2);
3397 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
3398 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
3399 x86_pop_reg (s->code, tree->reg1);
3400 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE);
3401 x86_fldcw_membase (s->code, X86_ESP, 0);
3402 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
3405 reg: CONV_I2 (freg) {
3406 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
3407 x86_fnstcw_membase(s->code, X86_ESP, 0);
3408 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
3409 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
3410 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
3411 x86_fldcw_membase (s->code, X86_ESP, 2);
3412 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
3413 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
3414 x86_pop_reg (s->code, tree->reg1);
3415 x86_widen_reg (s->code, tree->reg1, tree->reg1, TRUE, TRUE);
3416 x86_fldcw_membase (s->code, X86_ESP, 0);
3417 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
3420 reg: CONV_U2 (freg) {
3421 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
3422 x86_fnstcw_membase(s->code, X86_ESP, 0);
3423 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
3424 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
3425 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
3426 x86_fldcw_membase (s->code, X86_ESP, 2);
3427 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
3428 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
3429 x86_pop_reg (s->code, tree->reg1);
3430 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, TRUE);
3431 x86_fldcw_membase (s->code, X86_ESP, 0);
3432 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
3435 reg: CONV_I4 (freg) {
3436 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
3437 x86_fnstcw_membase(s->code, X86_ESP, 0);
3438 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
3439 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
3440 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
3441 x86_fldcw_membase (s->code, X86_ESP, 2);
3442 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
3443 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
3444 x86_pop_reg (s->code, tree->reg1);
3445 x86_fldcw_membase (s->code, X86_ESP, 0);
3446 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
3449 reg: CONV_U4 (freg) {
3450 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
3451 x86_fnstcw_membase(s->code, X86_ESP, 0);
3452 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
3453 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
3454 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
3455 x86_fldcw_membase (s->code, X86_ESP, 2);
3456 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
3457 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
3458 x86_pop_reg (s->code, tree->reg1);
3459 x86_fldcw_membase (s->code, X86_ESP, 0);
3460 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
3463 lreg: CONV_I8 (freg) {
3464 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
3465 x86_fnstcw_membase(s->code, X86_ESP, 0);
3466 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
3467 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
3468 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
3469 x86_fldcw_membase (s->code, X86_ESP, 2);
3470 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8);
3471 x86_fist_pop_membase (s->code, X86_ESP, 0, TRUE);
3472 x86_pop_reg (s->code, tree->reg1);
3473 x86_pop_reg (s->code, tree->reg2);
3474 x86_fldcw_membase (s->code, X86_ESP, 0);
3475 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
3478 lreg: CONV_U8 (freg) {
3479 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
3480 x86_fnstcw_membase(s->code, X86_ESP, 0);
3481 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
3482 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
3483 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
3484 x86_fldcw_membase (s->code, X86_ESP, 2);
3485 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8);
3486 x86_fist_pop_membase (s->code, X86_ESP, 0, TRUE);
3487 x86_pop_reg (s->code, tree->reg1);
3488 x86_pop_reg (s->code, tree->reg2);
3489 x86_fldcw_membase (s->code, X86_ESP, 0);
3490 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
3493 reg: CSET (COMPARE (freg, freg)) {
3494 int treg = tree->reg1;
3496 if (treg != X86_EAX)
3497 x86_push_reg (s->code, X86_EAX);
3499 x86_fcompp (s->code);
3500 x86_fnstsw (s->code);
3501 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500);
3503 switch (tree->data.i) {
3505 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x4000);
3506 x86_set_reg (s->code, X86_CC_EQ, treg, TRUE);
3507 x86_widen_reg (s->code, treg, treg, FALSE, FALSE);
3510 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
3511 x86_set_reg (s->code, X86_CC_EQ, treg, TRUE);
3512 x86_widen_reg (s->code, treg, treg, FALSE, FALSE);
3515 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
3516 x86_set_reg (s->code, X86_CC_EQ, treg, TRUE);
3517 x86_widen_reg (s->code, treg, treg, FALSE, FALSE);
3520 x86_set_reg (s->code, X86_CC_EQ, treg, TRUE);
3521 x86_widen_reg (s->code, treg, treg, FALSE, FALSE);
3524 x86_set_reg (s->code, X86_CC_EQ, tree->reg1, TRUE);
3525 x86_widen_reg (s->code, treg, treg, FALSE, FALSE);
3528 g_assert_not_reached ();
3531 if (treg != X86_EAX)
3532 x86_pop_reg (s->code, X86_EAX);
3535 freg: CONV_R8 (freg) {
3539 freg: CONV_R4 (freg) {
3540 /* fixme: nothing to do ??*/
3543 freg: CONV_R8 (LDIND_I4 (ADDR_G)) {
3544 x86_fild (s->code, tree->left->left->data.p, FALSE);
3547 freg: CONV_R4 (reg) {
3548 x86_push_reg (s->code, tree->left->reg1);
3549 x86_fild_membase (s->code, X86_ESP, 0, FALSE);
3550 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
3553 freg: CONV_R8 (reg) {
3554 x86_push_reg (s->code, tree->left->reg1);
3555 x86_fild_membase (s->code, X86_ESP, 0, FALSE);
3556 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
3559 freg: CONV_R_UN (reg) {
3560 x86_push_imm (s->code, 0);
3561 x86_push_reg (s->code, tree->left->reg1);
3562 x86_fild_membase (s->code, X86_ESP, 0, TRUE);
3563 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
3566 freg: CONV_R_UN (lreg) {
3567 static guint8 mn[] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x3f, 0x40 };
3570 /* load 64bit integer to FP stack */
3571 x86_push_imm (s->code, 0);
3572 x86_push_reg (s->code, tree->left->reg2);
3573 x86_push_reg (s->code, tree->left->reg1);
3574 x86_fild_membase (s->code, X86_ESP, 0, TRUE);
3575 /* store as 80bit FP value */
3576 x86_fst80_membase (s->code, X86_ESP, 0);
3578 /* test if lreg is negative */
3579 x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1);
3580 br [0] = s->code; x86_branch8 (s->code, X86_CC_GEZ, 0, TRUE);
3582 /* add correction constant mn */
3583 x86_fld80_mem (s->code, mn);
3584 x86_fld80_membase (s->code, X86_ESP, 0);
3585 x86_fp_op_reg (s->code, X86_FADD, 1, TRUE);
3586 x86_fst80_membase (s->code, X86_ESP, 0);
3588 x86_patch (br [0], s->code);
3590 x86_fld80_membase (s->code, X86_ESP, 0);
3591 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
3594 freg: CONV_R4 (lreg) {
3595 x86_push_reg (s->code, tree->left->reg2);
3596 x86_push_reg (s->code, tree->left->reg1);
3597 x86_fild_membase (s->code, X86_ESP, 0, TRUE);
3598 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
3601 freg: CONV_R8 (lreg) {
3602 x86_push_reg (s->code, tree->left->reg2);
3603 x86_push_reg (s->code, tree->left->reg1);
3604 x86_fild_membase (s->code, X86_ESP, 0, TRUE);
3605 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
3609 float f = *(float *)tree->data.p;
3616 x86_fld (s->code, tree->data.p, FALSE);
3620 double d = *(double *)tree->data.p;
3627 x86_fld (s->code, tree->data.p, TRUE);
3630 freg: LDIND_R4 (reg) {
3631 x86_fld_membase (s->code, tree->left->reg1, 0, FALSE);
3634 freg: LDIND_R8 (reg) {
3635 x86_fld_membase (s->code, tree->left->reg1, 0, TRUE);
3638 freg: ADD (freg, freg) {
3639 x86_fp_op_reg (s->code, X86_FADD, 1, TRUE);
3642 freg: SUB (freg, freg) {
3643 x86_fp_op_reg (s->code, X86_FSUB, 1, TRUE);
3646 freg: MUL (freg, freg) {
3647 x86_fp_op_reg (s->code, X86_FMUL, 1, TRUE);
3650 freg: DIV (freg, freg) {
3651 x86_fp_op_reg (s->code, X86_FDIV, 1, TRUE);
3654 freg: REM (freg, freg) {
3657 /* we need to exchange ST(0) with ST(1) */
3658 x86_fxch (s->code, 1);
3660 /* this requires a loop, because fprem1 somtimes
3661 * returns a partial remainder */
3663 x86_fprem1 (s->code);
3664 x86_fnstsw (s->code);
3665 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x0400);
3667 x86_branch8 (s->code, X86_CC_NE, l1 - l2, FALSE);
3670 x86_fstp (s->code, 1);
3679 stmt: STIND_R4 (ADDR_L, freg) {
3680 int offset = VARINFO (s, tree->left->data.i).offset;
3681 x86_fst_membase (s->code, X86_EBP, offset, FALSE, TRUE);
3684 stmt: STIND_R4 (reg, freg) {
3685 x86_fst_membase (s->code, tree->left->reg1, 0, FALSE, TRUE);
3688 stmt: REMOTE_STIND_R4 (reg, freg) {
3691 int lreg = tree->left->reg1;
3697 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
3698 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
3699 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3701 /* this is a transparent proxy - remote the call */
3703 /* save value to stack */
3704 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
3705 x86_fst_membase (s->code, X86_ESP, 0, FALSE, TRUE);
3707 x86_push_reg (s->code, X86_ESP);
3708 x86_push_imm (s->code, tree->data.fi.field);
3709 x86_push_imm (s->code, tree->data.fi.klass);
3710 x86_push_reg (s->code, lreg);
3711 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_store_remote_field);
3712 x86_call_code (s->code, 0);
3713 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
3715 br [1] = s->code; x86_jump8 (s->code, 0);
3717 x86_patch (br [0], s->code);
3718 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
3719 tree->data.fi.field->offset;
3720 x86_fst_membase (s->code, lreg, offset, FALSE, TRUE);
3722 x86_patch (br [1], s->code);
3725 stmt: STIND_R8 (ADDR_L, freg) {
3726 int offset = VARINFO (s, tree->left->data.i).offset;
3727 x86_fst_membase (s->code, X86_EBP, offset, TRUE, TRUE);
3730 stmt: STIND_R8 (reg, freg) {
3731 x86_fst_membase (s->code, tree->left->reg1, 0, TRUE, TRUE);
3734 stmt: REMOTE_STIND_R8 (reg, freg) {
3737 int lreg = tree->left->reg1;
3743 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
3744 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
3745 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3747 /* this is a transparent proxy - remote the call */
3749 /* save value to stack */
3750 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8);
3751 x86_fst_membase (s->code, X86_ESP, 0, TRUE, TRUE);
3753 x86_push_reg (s->code, X86_ESP);
3754 x86_push_imm (s->code, tree->data.fi.field);
3755 x86_push_imm (s->code, tree->data.fi.klass);
3756 x86_push_reg (s->code, lreg);
3757 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_store_remote_field);
3758 x86_call_code (s->code, 0);
3759 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 24);
3761 br [1] = s->code; x86_jump8 (s->code, 0);
3763 x86_patch (br [0], s->code);
3764 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
3765 tree->data.fi.field->offset;
3766 x86_fst_membase (s->code, lreg, offset, TRUE, TRUE);
3768 x86_patch (br [1], s->code);
3771 stmt: ARG_R4 (freg) {
3772 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
3773 x86_fst_membase (s->code, X86_ESP, 0, FALSE, TRUE);
3776 stmt: ARG_R8 (freg) {
3777 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8);
3778 x86_fst_membase (s->code, X86_ESP, 0, TRUE, TRUE);
3781 # fixme: we need to implement unordered and ordered compares
3783 stmt: CBRANCH (COMPARE (freg, freg)) {
3785 x86_fcompp (s->code);
3786 x86_fnstsw (s->code);
3787 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500);
3789 switch (tree->data.bi.cond) {
3791 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3792 x86_branch32 (s->code, X86_CC_EQ, 0, FALSE);
3795 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3796 x86_branch32 (s->code, X86_CC_EQ, 0, FALSE);
3799 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
3800 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3801 x86_branch32 (s->code, X86_CC_EQ, 0, FALSE);
3804 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
3805 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3806 x86_branch32 (s->code, X86_CC_EQ, 0, FALSE);
3809 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x4000);
3810 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3811 x86_branch32 (s->code, X86_CC_EQ, 0, TRUE);
3814 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x4000);
3815 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3816 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
3819 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3820 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
3823 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3824 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
3827 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
3828 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3829 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
3832 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
3833 mono_add_jump_info (s, s->code + 2, MONO_JUMP_INFO_BB, tree->data.bi.target);
3834 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
3837 g_assert_not_reached ();
3841 freg: CALL_R8 (this, ADDR_G) {
3842 int lreg = tree->left->reg1;
3848 if (tree->left->op != MB_TERM_NOP) {
3849 mono_assert (lreg >= 0);
3850 x86_push_reg (s->code, lreg);
3851 x86_alu_membase_imm (s->code, X86_CMP, lreg, 0, 0);
3854 if (tree->data.ci.vtype_num) {
3855 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
3856 x86_lea_membase (s->code, treg, X86_EBP, offset);
3857 x86_push_reg (s->code, treg);
3860 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, tree->right->data.p);
3861 x86_call_code (s->code, 0);
3863 if (tree->data.ci.args_size)
3864 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
3867 freg: CALL_R8 (this, INTF_ADDR) {
3868 int lreg = tree->left->reg1;
3874 if (tree->left->op != MB_TERM_NOP) {
3875 mono_assert (lreg >= 0);
3876 x86_push_reg (s->code, lreg);
3879 if (tree->data.ci.vtype_num) {
3880 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
3881 x86_lea_membase (s->code, treg, X86_EBP, offset);
3882 x86_push_reg (s->code, treg);
3885 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
3886 x86_mov_reg_membase (s->code, lreg, lreg,
3887 G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
3888 x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
3889 x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
3891 if (tree->data.ci.args_size)
3892 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
3895 freg: CALL_R8 (this, VFUNC_ADDR) {
3896 int lreg = tree->left->reg1;
3902 if (tree->left->op != MB_TERM_NOP) {
3903 mono_assert (lreg >= 0);
3904 x86_push_reg (s->code, lreg);
3907 if (tree->data.ci.vtype_num) {
3908 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
3909 x86_lea_membase (s->code, treg, X86_EBP, offset);
3910 x86_push_reg (s->code, treg);
3913 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
3914 x86_call_virtual (s->code, lreg,
3915 G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2));
3917 if (tree->data.ci.args_size)
3918 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
3922 if (!tree->last_instr) {
3923 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_EPILOG, NULL);
3924 x86_jump32 (s->code, 0);
3928 # support for value types
3930 reg: LDIND_OBJ (reg) {
3931 if (tree->left->reg1 != tree->reg1)
3932 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3935 stmt: STIND_OBJ (reg, reg) {
3936 mono_assert (tree->data.i > 0);
3938 x86_push_imm (s->code, tree->data.i);
3939 x86_push_reg (s->code, tree->right->reg1);
3940 x86_push_reg (s->code, tree->left->reg1);
3941 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, MEMCOPY);
3942 x86_call_code (s->code, 0);
3943 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
3946 stmt: REMOTE_STIND_OBJ (reg, reg) {
3949 int lreg = tree->left->reg1;
3950 int rreg = tree->right->reg1;
3959 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
3960 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
3961 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3963 /* this is a transparent proxy - remote the call */
3965 x86_push_reg (s->code, rreg);
3966 x86_push_imm (s->code, tree->data.fi.field);
3967 x86_push_imm (s->code, tree->data.fi.klass);
3968 x86_push_reg (s->code, lreg);
3969 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, mono_store_remote_field);
3970 x86_call_code (s->code, 0);
3971 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
3973 br [1] = s->code; x86_jump8 (s->code, 0);
3975 x86_patch (br [0], s->code);
3976 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
3977 tree->data.fi.field->offset;
3979 x86_push_imm (s->code, mono_class_value_size (tree->data.fi.klass, NULL));
3980 x86_push_reg (s->code, tree->right->reg1);
3981 x86_alu_reg_imm (s->code, X86_ADD, tree->left->reg1, sizeof (MonoObject));
3982 x86_push_reg (s->code, tree->left->reg1);
3983 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, MEMCOPY);
3984 x86_call_code (s->code, 0);
3985 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
3987 x86_patch (br [1], s->code);
3989 /* please test this first */
3990 g_assert_not_reached ();
3993 stmt: ARG_OBJ (coni4) {
3994 x86_push_imm (s->code, tree->left->data.i);
3997 stmt: ARG_OBJ (reg) {
3998 int size = tree->data.i;
4001 mono_assert (size > 0);
4006 /* reserve space for the argument */
4007 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, sa);
4009 x86_push_reg (s->code, X86_EAX);
4010 x86_push_reg (s->code, X86_EDX);
4011 x86_push_reg (s->code, X86_ECX);
4013 x86_push_imm (s->code, size);
4014 x86_push_reg (s->code, tree->left->reg1);
4015 x86_lea_membase (s->code, X86_EAX, X86_ESP, 5*4);
4016 x86_push_reg (s->code, X86_EAX);
4018 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, MEMCOPY);
4019 x86_call_code (s->code, 0);
4020 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
4022 x86_pop_reg (s->code, X86_ECX);
4023 x86_pop_reg (s->code, X86_EDX);
4024 x86_pop_reg (s->code, X86_EAX);
4027 stmt: RET_OBJ (reg) {
4028 int size = tree->data.i;
4030 x86_push_imm (s->code, size);
4031 x86_push_reg (s->code, tree->left->reg1);
4032 x86_push_membase (s->code, X86_EBP, 8);
4035 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_ABS, MEMCOPY);
4036 x86_call_code (s->code, 0);
4038 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
4040 if (!tree->last_instr) {
4041 mono_add_jump_info (s, s->code + 1, MONO_JUMP_INFO_EPILOG, NULL);
4042 x86_jump32 (s->code, 0);
4051 mono_llmult (gint64 a, gint64 b)
4057 mono_llmult_ovf_un (gpointer *exc, guint32 al, guint32 ah, guint32 bl, guint32 bh)
4061 // fixme: this is incredible slow
4064 goto raise_exception;
4066 res = (guint64)al * (guint64)bl;
4068 t1 = (guint64)ah * (guint64)bl + (guint64)al * (guint64)bh;
4070 if (t1 > 0xffffffff)
4071 goto raise_exception;
4073 res += ((guint64)t1) << 32;
4079 *exc = mono_get_exception_overflow ();
4084 mono_llmult_ovf (gpointer *exc, guint32 al, gint32 ah, guint32 bl, gint32 bh)
4088 // fixme: check for overflow
4090 res = (gint64)al * (gint64)bl;
4092 t1 = (gint64)ah * bl + al * (gint64)bh;
4094 res += ((gint64)t1) << 32;
4100 *exc = mono_get_exception_overflow ();
4106 mono_lldiv (gint64 a, gint64 b)
4112 mono_llrem (gint64 a, gint64 b)
4118 mono_lldiv_un (guint64 a, guint64 b)
4124 mono_llrem_un (guint64 a, guint64 b)
4130 mono_ctree_new (MonoMemPool *mp, int op, MBTree *left, MBTree *right)
4132 MBTree *t = mono_mempool_alloc0 (mp, sizeof (MBTree));
4140 t->svt = VAL_UNKNOWN;
4146 mono_ctree_new_leaf (MonoMemPool *mp, int op)
4148 return mono_ctree_new (mp, op, NULL, NULL);
4152 arch_get_lmf_addr (void)
4156 if ((lmf = TlsGetValue (lmf_thread_id)))
4159 lmf = g_malloc (sizeof (gpointer));
4162 TlsSetValue (lmf_thread_id, lmf);
4168 mono_array_new_wrapper (MonoClass *eclass, guint32 n)
4170 MonoDomain *domain = mono_domain_get ();
4172 return mono_array_new (domain, eclass, n);
4176 mono_object_new_wrapper (MonoClass *klass)
4178 MonoDomain *domain = mono_domain_get ();
4180 return mono_object_new (domain, klass);
4184 mono_ldstr_wrapper (MonoImage *image, guint32 ind)
4186 MonoDomain *domain = mono_domain_get ();
4188 return mono_ldstr (domain, image, ind);
4192 mono_ldsflda (MonoClass *klass, int offset)
4194 MonoDomain *domain = mono_domain_get ();
4198 vt = mono_class_vtable (domain, klass);
4199 addr = (char*)(vt->data) + offset;
4206 MEMCOPY (void *dest, const void *src, size_t n)
4210 printf ("MEMCPY(%p to %p [%d]) ", src, dest, n);
4212 for (i = 0; i < l; i++)
4213 printf ("%02x ", *((guint8 *)src + i));
4216 return memcpy (dest, src, n);