2 * x86.brg: X86 code generator
5 * Dietmar Maurer (dietmar@ximian.com)
7 * (C) 2001 Ximian, Inc.
16 #ifndef PLATFORM_WIN32
18 #include <sys/syscall.h>
21 #include <mono/metadata/blob.h>
22 #include <mono/metadata/metadata.h>
23 #include <mono/metadata/loader.h>
24 #include <mono/metadata/object.h>
25 #include <mono/metadata/tabledefs.h>
26 #include <mono/metadata/appdomain.h>
27 #include <mono/metadata/marshal.h>
28 #include <mono/arch/x86/x86-codegen.h>
34 * Pull the list of opcodes
36 #define OPDEF(a,b,c,d,e,f,g,h,i,j) \
40 #include "mono/cil/opcode.def"
45 void print_lmf (void);
47 #define MBTREE_TYPE MBTree
48 #define MBCGEN_TYPE MonoFlowGraph
49 #define MBCOST_DATA MonoFlowGraph
50 #define MBALLOC_STATE mono_mempool_alloc (data->mp, sizeof (MBState))
53 AMImmediate = 0, // ptr
55 AMIndex = 2, // V[REG*X]
56 AMBaseIndex = 3, // V[REG*X][REG]
69 unsigned last_instr:1;
92 MonoClassField *field;
100 gint64 mono_llmult (gint64 a, gint64 b);
101 guint64 mono_llmult_ovf (gpointer *exc, guint32 al, gint32 ah, guint32 bl, gint32 bh);
102 guint64 mono_llmult_ovf_un (gpointer *exc, guint32 al, guint32 ah, guint32 bl, guint32 bh);
103 gint64 mono_lldiv (gint64 a, gint64 b);
104 gint64 mono_llrem (gint64 a, gint64 b);
105 guint64 mono_lldiv_un (guint64 a, guint64 b);
106 guint64 mono_llrem_un (guint64 a, guint64 b);
107 gpointer mono_ldsflda (MonoClass *klass, int offset);
109 gpointer mono_ldvirtftn (MonoObject *this, int slot);
110 gpointer mono_ldintftn (MonoObject *this, int slot);
111 gpointer mono_ldftn (MonoMethod *method);
113 void mono_emit_fast_iconv(MBCGEN_TYPE* s, MBTREE_TYPE* tree);
114 void mono_emit_fast_iconv_i8(MBCGEN_TYPE* s, MBTREE_TYPE* tree);
117 mono_array_new_wrapper (MonoClass *eclass, guint32 n);
119 mono_object_new_wrapper (MonoClass *klass);
121 mono_ldstr_wrapper (MonoImage *image, guint32 ind);
124 get_mono_object_isinst (void);
126 #define MB_OPT_LEVEL 1
128 #if MB_OPT_LEVEL == 0
129 #define MB_USE_OPT1(c) 65535
130 #define MB_USE_OPT2(c) 65535
132 #if MB_OPT_LEVEL == 1
133 #define MB_USE_OPT1(c) c
134 #define MB_USE_OPT2(c) 65535
136 #if MB_OPT_LEVEL >= 2
137 #define MB_USE_OPT1(c) c
138 #define MB_USE_OPT2(c) c
143 #define REAL_PRINT_REG(text,reg) \
144 mono_assert (reg >= 0); \
145 x86_push_reg (s->code, X86_EAX); \
146 x86_push_reg (s->code, X86_EDX); \
147 x86_push_reg (s->code, X86_ECX); \
148 x86_push_reg (s->code, reg); \
149 x86_push_imm (s->code, reg); \
150 x86_push_imm (s->code, text " %d %p\n"); \
151 x86_mov_reg_imm (s->code, X86_EAX, printf); \
152 x86_call_reg (s->code, X86_EAX); \
153 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 3*4); \
154 x86_pop_reg (s->code, X86_ECX); \
155 x86_pop_reg (s->code, X86_EDX); \
156 x86_pop_reg (s->code, X86_EAX);
159 debug_memcopy (void *dest, const void *src, size_t n);
162 #define MEMCOPY debug_memcopy
163 #define PRINT_REG(text,reg) REAL_PRINT_REG(text,reg)
166 #define MEMCOPY memcpy
168 #define PRINT_REG(x,y)
172 /* The call instruction for virtual functions must have a known
173 * size (used by x86_magic_trampoline)
175 #define x86_call_virtual(inst,basereg,disp) \
177 *(inst)++ = (unsigned char)0xff; \
178 x86_address_byte ((inst), 2, 2, (basereg)); \
179 x86_imm_emit32 ((inst), (disp)); \
182 /* emit an exception if condition is fail */
183 #define EMIT_COND_SYSTEM_EXCEPTION(cond,signed,exc_name) \
186 x86_branch8 (s->code, cond, 10, signed); \
187 x86_push_imm (s->code, exc_name); \
188 t = arch_get_throw_exception_by_name (); \
189 mono_add_jump_info (s, s->code, \
190 MONO_JUMP_INFO_ABS, t); \
191 x86_call_code (s->code, 0); \
194 /* we use this macro to move one lreg to another - source and
195 destination may overlap, but the register allocator has to
196 make sure that ((d1 < d2) && (s1 < s2))
198 #define MOVE_LREG(d1,d2,s1,s2) \
200 g_assert ((d1 < d2) && (s1 < s2)); \
201 if ((d1) <= (s1)) { \
203 x86_mov_reg_reg (s->code, d1, s1, 4); \
205 x86_mov_reg_reg (s->code, d2, s2, 4); \
208 x86_mov_reg_reg (s->code, d2, s2, 4); \
210 x86_mov_reg_reg (s->code, d1, s1, 4); \
218 # terminal definitions
222 %term CONST_I4 CONST_I8 CONST_R4 CONST_R8
223 %term LDIND_I1 LDIND_U1 LDIND_I2 LDIND_U2 LDIND_I4 LDIND_I8 LDIND_R4 LDIND_R8 LDIND_OBJ
224 %term STIND_I1 STIND_I2 STIND_I4 STIND_I8 STIND_R4 STIND_R8 STIND_OBJ
225 %term ADDR_L ADDR_G ARG_I4 ARG_I8 ARG_R4 ARG_R8 ARG_OBJ CALL_I4 CALL_I8 CALL_R8 CALL_VOID
226 %term BREAK SWITCH BR RET_VOID RET RET_OBJ ENDFINALLY JMP
227 %term ADD ADD_OVF ADD_OVF_UN SUB SUB_OVF SUB_OVF_UN MUL MUL_OVF MUL_OVF_UN
228 %term DIV DIV_UN REM REM_UN AND OR XOR SHL SHR SHR_UN NEG NOT CKFINITE
229 %term COMPARE CBRANCH BRTRUE BRFALSE CSET
230 %term CONV_I4 CONV_I1 CONV_I2 CONV_I8 CONV_U1 CONV_U2 CONV_U4 CONV_U8 CONV_R4 CONV_R8 CONV_R_UN
231 %term INTF_ADDR VFUNC_ADDR NOP NEWARR NEWARR_SPEC NEWOBJ NEWOBJ_SPEC
232 %term INITBLK CPBLK CPSRC POP INITOBJ LOCALLOC
233 %term ISINST CASTCLASS UNBOX
234 %term CONV_OVF_I1 CONV_OVF_U1 CONV_OVF_I2 CONV_OVF_U2 CONV_OVF_U4 CONV_OVF_U8 CONV_OVF_I4
235 %term CONV_OVF_I4_UN CONV_OVF_U1_UN CONV_OVF_U2_UN
236 %term CONV_OVF_I2_UN CONV_OVF_I8_UN CONV_OVF_I1_UN
237 %term EXCEPTION THROW RETHROW HANDLER CHECKTHIS
238 %term LDLEN LDELEMA LDFTN LDVIRTFTN LDSTR LDSFLDA
239 %term REMOTE_LDFLDA REMOTE_STIND_I1 REMOTE_STIND_I2 REMOTE_STIND_I4
240 %term REMOTE_STIND_I8 REMOTE_STIND_R4 REMOTE_STIND_R8 REMOTE_STIND_OBJ
243 %term FUNC1 PROC3 FREE OBJADDR
259 tree->data.ainfo.offset = tree->data.i;
260 tree->data.ainfo.amode = AMImmediate;
264 tree->data.ainfo.offset = tree->data.i;
265 tree->data.ainfo.amode = AMImmediate;
268 acon: ADD (ADDR_G, CONST_I4) {
269 tree->data.ainfo.offset = (unsigned)tree->left->data.p + tree->right->data.i;
270 tree->data.ainfo.amode = AMImmediate;
276 tree->data.ainfo.offset = 0;
277 tree->data.ainfo.basereg = tree->reg1;
278 tree->data.ainfo.amode = AMBase;
281 base: ADD (reg, CONST_I4) {
282 tree->data.ainfo.offset = tree->right->data.i;
283 tree->data.ainfo.basereg = tree->left->reg1;
284 tree->data.ainfo.amode = AMBase;
288 tree->data.ainfo.offset = VARINFO (s, tree->data.i).offset;
289 tree->data.ainfo.basereg = X86_EBP;
290 tree->data.ainfo.amode = AMBase;
292 MBCOND (VARINFO (data, tree->data.i).reg < 0);
297 tree->data.ainfo.offset = 0;
298 tree->data.ainfo.indexreg = tree->reg1;
299 tree->data.ainfo.shift = 0;
300 tree->data.ainfo.amode = AMIndex;
303 index: SHL (reg, CONST_I4) {
304 tree->data.ainfo.offset = 0;
305 tree->data.ainfo.amode = AMIndex;
306 tree->data.ainfo.indexreg = tree->left->reg1;
307 tree->data.ainfo.shift = tree->right->data.i;
309 MBCOND (tree->right->data.i == 0 ||
310 tree->right->data.i == 1 ||
311 tree->right->data.i == 2 ||
312 tree->right->data.i == 3);
317 index: MUL (reg, CONST_I4) {
318 static int fast_log2 [] = { 1, 0, 1, -1, 2, -1, -1, -1, 3 };
320 tree->data.ainfo.offset = 0;
321 tree->data.ainfo.amode = AMIndex;
322 tree->data.ainfo.indexreg = tree->left->reg1;
323 tree->data.ainfo.shift = fast_log2 [tree->right->data.i];
325 MBCOND (tree->right->data.i == 1 ||
326 tree->right->data.i == 2 ||
327 tree->right->data.i == 4 ||
328 tree->right->data.i == 8);
337 addr: ADD (index, base) {
338 tree->data.ainfo.offset = tree->right->data.ainfo.offset;
339 tree->data.ainfo.basereg = tree->right->data.ainfo.basereg;
340 tree->data.ainfo.amode = tree->left->data.ainfo.amode |
341 tree->right->data.ainfo.amode;
342 tree->data.ainfo.shift = tree->left->data.ainfo.shift;
343 tree->data.ainfo.indexreg = tree->left->data.ainfo.indexreg;
346 # we pass exception in ECX to catch handler
348 int offset = VARINFO (s, tree->data.i).offset;
349 int reg = VARINFO (s, tree->data.i).reg;
351 if (tree->reg1 != X86_ECX)
352 x86_mov_reg_reg (s->code, tree->reg1, X86_ECX, 4);
354 /* store it so that we can RETHROW it later */
356 x86_mov_membase_reg (s->code, X86_EBP, offset, tree->reg1, 4);
358 x86_mov_reg_reg (s->code, reg, tree->reg1, 4);
364 x86_push_reg (s->code, tree->left->reg1);
365 target = arch_get_throw_exception ();
366 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, target);
367 x86_call_code (s->code, target);
371 int off = VARINFO (s, tree->data.i).offset;
372 int reg = VARINFO (s, tree->data.i).reg;
376 x86_push_membase (s->code, X86_EBP, off);
378 x86_push_reg (s->code, reg);
380 target = arch_get_throw_exception ();
381 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, target);
382 x86_call_code (s->code, target);
386 /* save ESP (used by ENDFINALLY) */
387 x86_mov_membase_reg (s->code, X86_EBP, mono_exc_esp_offset, X86_ESP, 4);
388 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bb);
389 x86_call_imm (s->code, 0);
393 /* restore ESP - which can be modified when we allocate value types
394 * in the finally handler */
395 x86_mov_reg_membase (s->code, X86_ESP, X86_EBP, mono_exc_esp_offset, 4);
396 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
400 stmt: STIND_I4 (ADDR_L, ADD (LDIND_I4 (ADDR_L), CONST_I4)) {
401 int vn = tree->left->data.i;
402 int treg = VARINFO (s, vn).reg;
403 int offset = VARINFO (s, vn).offset;
404 int data = tree->right->right->data.i;
408 x86_inc_reg (s->code, treg);
410 x86_inc_membase (s->code, X86_EBP, offset);
413 x86_alu_reg_imm (s->code, X86_ADD, treg, data);
415 x86_alu_membase_imm (s->code, X86_ADD, X86_EBP, offset, data);
418 MBCOND (tree->right->left->left->data.i == tree->left->data.i);
422 stmt: STIND_I4 (ADDR_L, SUB (LDIND_I4 (ADDR_L), CONST_I4)) {
423 int vn = tree->left->data.i;
424 int treg = VARINFO (s, vn).reg;
425 int offset = VARINFO (s, vn).offset;
426 int data = tree->right->right->data.i;
430 x86_dec_reg (s->code, treg);
432 x86_dec_membase (s->code, X86_EBP, offset);
435 x86_alu_reg_imm (s->code, X86_SUB, treg, data);
437 x86_alu_membase_imm (s->code, X86_SUB, X86_EBP, offset, data);
440 MBCOND (tree->right->left->left->data.i == tree->left->data.i);
444 stmt: STIND_I4 (ADDR_L, ADD (LDIND_I4 (ADDR_L), reg)) {
445 int vn = tree->left->data.i;
446 int treg = VARINFO (s, vn).reg;
447 int sreg = tree->right->right->reg1;
448 int offset = VARINFO (s, vn).offset;
451 x86_alu_reg_reg (s->code, X86_ADD, treg, sreg);
453 x86_alu_membase_reg (s->code, X86_ADD, X86_EBP, offset, sreg);
456 MBCOND (tree->right->left->left->data.i == tree->left->data.i);
460 stmt: STIND_I4 (ADDR_L, LDIND_I4 (ADDR_L)) {
461 int treg1 = VARINFO (s, tree->left->data.i).reg;
462 int treg2 = VARINFO (s, tree->right->left->data.i).reg;
463 int offset1 = VARINFO (s, tree->left->data.i).offset;
464 int offset2 = VARINFO (s, tree->right->left->data.i).offset;
466 //{static int cx= 0; printf ("CX %5d\n", cx++);}
468 if (treg1 >= 0 && treg2 >= 0) {
469 x86_mov_reg_reg (s->code, treg1, treg2, 4);
472 if (treg1 >= 0 && treg2 < 0) {
473 x86_mov_reg_membase (s->code, treg1, X86_EBP, offset2, 4);
476 if (treg1 < 0 && treg2 >= 0) {
477 x86_mov_membase_reg (s->code, X86_EBP, offset1, treg2, 4);
481 g_assert_not_reached ();
484 MBCOND (VARINFO (data, tree->left->data.i).reg >= 0 ||
485 VARINFO (data, tree->right->left->data.i).reg >= 0);
489 stmt: STIND_I4 (addr, CONST_I4) {
490 switch (tree->left->data.ainfo.amode) {
493 x86_mov_mem_imm (s->code, tree->left->data.ainfo.offset, tree->right->data.i, 4);
497 x86_mov_membase_imm (s->code, tree->left->data.ainfo.basereg,
498 tree->left->data.ainfo.offset, tree->right->data.i, 4);
501 x86_mov_memindex_imm (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
502 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
503 tree->right->data.i, 4);
506 x86_mov_memindex_imm (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
507 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
508 tree->right->data.i, 4);
513 stmt: STIND_I4 (addr, reg) {
515 switch (tree->left->data.ainfo.amode) {
518 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 4);
522 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
523 tree->left->data.ainfo.offset, tree->right->reg1, 4);
526 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
527 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
528 tree->right->reg1, 4);
531 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
532 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
533 tree->right->reg1, 4);
538 stmt: REMOTE_STIND_I4 (reg, reg) {
541 int lreg = tree->left->reg1;
542 int rreg = tree->right->reg1;
551 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
552 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
553 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
555 /* this is a transparent proxy - remote the call */
557 /* save value to stack */
558 x86_push_reg (s->code, rreg);
560 x86_push_reg (s->code, X86_ESP);
561 x86_push_imm (s->code, tree->data.fi.field);
562 x86_push_imm (s->code, tree->data.fi.klass);
563 x86_push_reg (s->code, lreg);
564 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
565 x86_call_code (s->code, 0);
566 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
568 br [1] = s->code; x86_jump8 (s->code, 0);
570 x86_patch (br [0], s->code);
571 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
572 tree->data.fi.field->offset;
573 x86_mov_membase_reg (s->code, lreg, offset, rreg, 4);
575 x86_patch (br [1], s->code);
578 stmt: STIND_I1 (addr, reg) {
579 PRINT_REG ("STIND_I1", tree->right->reg1);
581 switch (tree->left->data.ainfo.amode) {
584 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 1);
588 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
589 tree->left->data.ainfo.offset, tree->right->reg1, 1);
592 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
593 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
594 tree->right->reg1, 1);
597 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
598 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
599 tree->right->reg1, 1);
604 stmt: REMOTE_STIND_I1 (reg, reg) {
607 int lreg = tree->left->reg1;
608 int rreg = tree->right->reg1;
617 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
618 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
619 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
621 /* this is a transparent proxy - remote the call */
623 /* save value to stack */
624 x86_push_reg (s->code, rreg);
626 x86_push_reg (s->code, X86_ESP);
627 x86_push_imm (s->code, tree->data.fi.field);
628 x86_push_imm (s->code, tree->data.fi.klass);
629 x86_push_reg (s->code, lreg);
630 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
631 x86_call_code (s->code, 0);
632 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
634 br [1] = s->code; x86_jump8 (s->code, 0);
636 x86_patch (br [0], s->code);
637 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
638 tree->data.fi.field->offset;
639 x86_mov_membase_reg (s->code, lreg, offset, rreg, 1);
641 x86_patch (br [1], s->code);
644 stmt: STIND_I2 (addr, reg) {
645 PRINT_REG ("STIND_I2", tree->right->reg1);
647 switch (tree->left->data.ainfo.amode) {
650 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 2);
654 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
655 tree->left->data.ainfo.offset, tree->right->reg1, 2);
658 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
659 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
660 tree->right->reg1, 2);
663 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
664 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
665 tree->right->reg1, 2);
670 stmt: REMOTE_STIND_I2 (reg, reg) {
673 int lreg = tree->left->reg1;
674 int rreg = tree->right->reg1;
683 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
684 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
685 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
687 /* this is a transparent proxy - remote the call */
689 /* save value to stack */
690 x86_push_reg (s->code, rreg);
692 x86_push_reg (s->code, X86_ESP);
693 x86_push_imm (s->code, tree->data.fi.field);
694 x86_push_imm (s->code, tree->data.fi.klass);
695 x86_push_reg (s->code, lreg);
696 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
697 x86_call_code (s->code, 0);
698 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
700 br [1] = s->code; x86_jump8 (s->code, 0);
702 x86_patch (br [0], s->code);
703 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
704 tree->data.fi.field->offset;
705 x86_mov_membase_reg (s->code, lreg, offset, rreg, 2);
707 x86_patch (br [1], s->code);
710 reg: LDIND_I4 (ADDR_L) {
711 int treg = VARINFO (s, tree->left->data.i).reg;
713 if (treg != tree->reg1)
714 x86_mov_reg_reg (s->code, tree->reg1, treg, 4);
717 MBCOND ((VARINFO (data, tree->left->data.i).reg >= 0));
721 stmt: STIND_I4 (ADDR_L, CONST_I4) {
722 int treg = VARINFO (s, tree->left->data.i).reg;
724 x86_mov_reg_imm (s->code, treg, tree->right->data.i);
727 MBCOND ((VARINFO (data, tree->left->data.i).reg >= 0));
731 stmt: STIND_I4 (ADDR_L, LDIND_I4 (ADDR_L)) {
732 int treg = VARINFO (s, tree->left->data.i).reg;
733 int offset = VARINFO (s, tree->right->left->data.i).offset;
735 x86_mov_reg_membase (s->code, treg, X86_EBP, offset, 4);
737 MBCOND ((VARINFO (data, tree->left->data.i).reg >= 0));
738 MBCOND ((VARINFO (data, tree->right->left->data.i).reg < 0));
742 stmt: STIND_I4 (ADDR_L, reg) {
743 int treg = VARINFO (s, tree->left->data.i).reg;
745 if (treg != tree->right->reg1)
746 x86_mov_reg_reg (s->code, treg, tree->right->reg1, 4);
749 MBCOND ((VARINFO (data, tree->left->data.i).reg >= 0));
754 reg: LDIND_I4 (addr) {
756 switch (tree->left->data.ainfo.amode) {
759 x86_mov_reg_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, 4);
763 x86_mov_reg_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
764 tree->left->data.ainfo.offset, 4);
767 x86_mov_reg_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
768 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, 4);
771 x86_mov_reg_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
772 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
773 tree->left->data.ainfo.shift, 4);
778 PRINT_REG ("LDIND_I4", tree->reg1);
781 reg: LDIND_I1 (addr) {
782 switch (tree->left->data.ainfo.amode) {
785 x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, TRUE, FALSE);
789 x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
790 tree->left->data.ainfo.offset, TRUE, FALSE);
793 x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
794 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, TRUE, FALSE);
797 x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
798 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
799 tree->left->data.ainfo.shift, TRUE, FALSE);
803 PRINT_REG ("LDIND_I1", tree->reg1);
806 reg: LDIND_U1 (addr) {
807 switch (tree->left->data.ainfo.amode) {
810 x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, FALSE, FALSE);
814 x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
815 tree->left->data.ainfo.offset, FALSE, FALSE);
818 x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
819 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, FALSE, FALSE);
822 x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
823 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
824 tree->left->data.ainfo.shift, FALSE, FALSE);
828 PRINT_REG ("LDIND_U1", tree->reg1);
831 reg: LDIND_I2 (addr) {
832 switch (tree->left->data.ainfo.amode) {
835 x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, TRUE, TRUE);
839 x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
840 tree->left->data.ainfo.offset, TRUE, TRUE);
843 x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
844 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, TRUE, TRUE);
847 x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
848 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
849 tree->left->data.ainfo.shift, TRUE, TRUE);
853 PRINT_REG ("LDIND_U2", tree->reg1);
856 reg: LDIND_U2 (addr) {
857 switch (tree->left->data.ainfo.amode) {
860 x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, FALSE, TRUE);
864 x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
865 tree->left->data.ainfo.offset, FALSE, TRUE);
868 x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
869 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, FALSE, TRUE);
872 x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
873 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
874 tree->left->data.ainfo.shift, FALSE, TRUE);
878 PRINT_REG ("LDIND_U2", tree->reg1);
881 reg: REMOTE_LDFLDA (reg) {
884 int lreg = tree->left->reg1;
889 if (tree->reg1 != treg)
890 x86_push_reg (s->code, treg);
892 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
893 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
894 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
896 /* this is a transparent proxy - remote the call */
898 x86_push_reg (s->code, X86_EAX);
900 x86_push_reg (s->code, X86_EDX);
901 x86_push_reg (s->code, X86_ECX);
903 x86_push_reg (s->code, X86_ESP);
904 x86_push_imm (s->code, tree->data.fi.field);
905 x86_push_imm (s->code, tree->data.fi.klass);
906 x86_push_reg (s->code, lreg);
907 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_load_remote_field);
908 x86_call_code (s->code, 0);
909 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
912 x86_mov_reg_reg (s->code, treg, X86_EAX, 4);
914 x86_pop_reg (s->code, X86_ECX);
916 x86_pop_reg (s->code, X86_EDX);
918 x86_pop_reg (s->code, X86_EAX);
920 x86_mov_reg_reg (s->code, tree->reg1, treg, 4);
922 br [1] = s->code; x86_jump8 (s->code, 0);
924 x86_patch (br [0], s->code);
925 if (tree->data.fi.klass->valuetype)
926 x86_lea_membase (s->code, tree->reg1, lreg,
927 tree->data.fi.field->offset - sizeof (MonoObject));
929 x86_lea_membase (s->code, tree->reg1, lreg, tree->data.fi.field->offset);
931 x86_patch (br [1], s->code);
933 if (tree->reg1 != treg)
934 x86_pop_reg (s->code, treg);
938 int offset = VARINFO (s, tree->data.i).offset;
940 x86_lea_membase (s->code, tree->reg1, X86_EBP, offset);
942 PRINT_REG ("ADDR_L", tree->reg1);
944 MBCOND (VARINFO (data, tree->data.i).reg < 0);
950 x86_mov_reg_imm (s->code, tree->reg1, tree->data.p);
954 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, FALSE);
958 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE);
962 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, TRUE);
966 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE);
970 x86_mov_reg_imm (s->code, tree->reg1, tree->data.i);
974 if (tree->reg1 != tree->left->reg1)
975 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
976 PRINT_REG ("CONV_I4", tree->left->reg1);
980 if (tree->reg1 != tree->left->reg1)
981 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
982 PRINT_REG ("CONV_U4", tree->left->reg1);
985 reg: CONV_OVF_I4 (reg) {
986 if (tree->reg1 != tree->left->reg1)
987 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
988 PRINT_REG ("CONV_OVF_I4", tree->left->reg1);
991 reg: CONV_OVF_U4 (reg) {
992 /* Keep in sync with CONV_OVF_I4_UN below, they are the same on 32-bit machines */
993 x86_test_reg_imm (s->code, tree->left->reg1, 0x8000000);
994 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
995 if (tree->reg1 != tree->left->reg1)
996 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
999 reg: CONV_OVF_I4_UN (reg) {
1000 /* Keep in sync with CONV_OVF_U4 above, they are the same on 32-bit machines */
1001 x86_test_reg_imm (s->code, tree->left->reg1, 0x8000000);
1002 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1003 if (tree->reg1 != tree->left->reg1)
1004 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1007 reg: CONV_OVF_I1 (reg) {
1008 /* probe value to be within -128 to 127 */
1009 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, 127);
1010 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LE, TRUE, "OverflowException");
1011 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, -128);
1012 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_GT, TRUE, "OverflowException");
1013 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, FALSE);
1016 reg: CONV_OVF_I1_UN (reg) {
1017 /* probe values between 0 to 128 */
1018 x86_test_reg_imm (s->code, tree->left->reg1, 0xffffff80);
1019 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1020 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE);
1023 reg: CONV_OVF_U1 (reg) {
1024 /* Keep in sync with CONV_OVF_U1_UN routine below, they are the same on 32-bit machines */
1025 /* probe value to be within 0 to 255 */
1026 x86_test_reg_imm (s->code, tree->left->reg1, 0xffffff00);
1027 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1028 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE);
1031 reg: CONV_OVF_U1_UN (reg) {
1032 /* Keep in sync with CONV_OVF_U1 routine above, they are the same on 32-bit machines */
1033 /* probe value to be within 0 to 255 */
1034 x86_test_reg_imm (s->code, tree->left->reg1, 0xffffff00);
1035 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1036 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE);
1039 reg: CONV_OVF_I2 (reg) {
1040 /* Probe value to be within -32768 and 32767 */
1041 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, 32767);
1042 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LE, TRUE, "OverflowException");
1043 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, -32768);
1044 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_GE, TRUE, "OverflowException");
1045 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, TRUE);
1048 reg: CONV_OVF_U2 (reg) {
1049 /* Keep in sync with CONV_OVF_U2_UN below, they are the same on 32-bit machines */
1050 /* Probe value to be within 0 and 65535 */
1051 x86_test_reg_imm (s->code, tree->left->reg1, 0xffff0000);
1052 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "OverflowException");
1053 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE);
1056 reg: CONV_OVF_U2_UN (reg) {
1057 /* Keep in sync with CONV_OVF_U2 above, they are the same on 32-bit machines */
1058 /* Probe value to be within 0 and 65535 */
1059 x86_test_reg_imm (s->code, tree->left->reg1, 0xffff0000);
1060 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1061 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE);
1064 reg: CONV_OVF_I2_UN (reg) {
1065 /* Convert uint value into short, value within 0 and 32767 */
1066 x86_test_reg_imm (s->code, tree->left->reg1, 0xffff8000);
1067 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1068 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE);
1071 reg: MUL (reg, CONST_I4) "MB_USE_OPT1(0)" {
1072 unsigned int i, j, k, v;
1074 v = tree->right->data.i;
1075 for (i = 0, j = 1, k = 0xfffffffe; i < 32; i++, j = j << 1, k = k << 1) {
1080 if (v < 0 || i == 32 || v & k) {
1083 /* LEA r1, [r2 + r2*2] */
1084 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 1);
1087 /* LEA r1, [r2 + r2*4] */
1088 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 2);
1091 /* LEA r1, [r2 + r2*2] */
1093 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 1);
1094 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->reg1);
1097 /* LEA r1, [r2 + r2*8] */
1098 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 3);
1101 /* LEA r1, [r2 + r2*4] */
1103 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 2);
1104 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->reg1);
1107 /* LEA r1, [r2 + r2*2] */
1109 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 1);
1110 x86_shift_reg_imm (s->code, X86_SHL, tree->reg1, 2);
1113 /* LEA r1, [r2 + r2*4] */
1114 /* LEA r1, [r1 + r1*4] */
1115 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 2);
1116 x86_lea_memindex (s->code, tree->reg1, tree->reg1, 0, tree->reg1, 2);
1119 /* LEA r1, [r2 + r2*4] */
1121 /* LEA r1, [r1 + r1*4] */
1122 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 2);
1123 x86_shift_reg_imm (s->code, X86_SHL, tree->reg1, 2);
1124 x86_lea_memindex (s->code, tree->reg1, tree->reg1, 0, tree->reg1, 2);
1127 x86_imul_reg_reg_imm (s->code, tree->reg1, tree->left->reg1, tree->right->data.i);
1131 x86_shift_reg_imm (s->code, X86_SHL, tree->left->reg1, i);
1132 if (tree->reg1 != tree->left->reg1)
1133 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1137 reg: MUL (reg, reg) {
1138 x86_imul_reg_reg (s->code, tree->left->reg1, tree->right->reg1);
1140 if (tree->reg1 != tree->left->reg1)
1141 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1144 reg: MUL_OVF (reg, reg) {
1145 x86_imul_reg_reg (s->code, tree->left->reg1, tree->right->reg1);
1146 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
1148 if (tree->reg1 != tree->left->reg1)
1149 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1152 reg: MUL_OVF_UN (reg, reg) {
1153 mono_assert (tree->right->reg1 != X86_EAX);
1155 if (tree->left->reg1 != X86_EAX)
1156 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1158 x86_mul_reg (s->code, tree->right->reg1, FALSE);
1159 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
1161 mono_assert (tree->reg1 == X86_EAX &&
1162 tree->reg2 == X86_EDX);
1165 reg: DIV (reg, CONST_I4) {
1166 unsigned int i, j, k, v;
1168 v = tree->right->data.i;
1169 for (i = 0, j = 1, k = 0xfffffffe; i < 32; i++, j = j << 1, k = k << 1) {
1174 x86_shift_reg_imm (s->code, X86_SAR, tree->left->reg1, i);
1175 if (tree->reg1 != tree->left->reg1)
1176 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1179 unsigned int i, j, k, v;
1184 v = tree->right->data.i;
1185 for (i = 0, j = 1, k = 0xfffffffe; i < 32; i++, j = j << 1, k = k << 1) {
1190 if (i == 32 || v & k)
1197 reg: DIV (reg, reg) {
1198 mono_assert (tree->right->reg1 != X86_EAX);
1200 if (tree->left->reg1 != X86_EAX)
1201 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1204 x86_div_reg (s->code, tree->right->reg1, TRUE);
1206 mono_assert (tree->reg1 == X86_EAX &&
1207 tree->reg2 == X86_EDX);
1210 reg: DIV_UN (reg, CONST_I4) {
1211 unsigned int i, j, k, v;
1214 v = tree->right->data.i;
1215 for (i = 0, j = 1, k = 0xfffffffe; i < 32; i++, j = j << 1, k = k << 1) {
1220 if (i == 32 || v & k) {
1221 for (i = 32, j = 0x80000000; --i >= 0; j >>= 1) {
1225 /* k = 32 + number of significant bits in v - 1 */
1229 for (i = 0; i < k; i++) f *= 2.0f;
1235 x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg1, k - 32);
1236 if (tree->reg1 != tree->left->reg1)
1237 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1238 } else if (r < 0.5f) {
1239 if (tree->left->reg1 != X86_EAX)
1240 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1241 x86_mov_reg_imm (s->code, X86_EDX, (guint32) floor(f));
1242 /* x86_inc_reg (s->code, X86_EAX); */
1243 /* INC is faster but we have to check for overflow. */
1244 x86_alu_reg_imm (s->code, X86_ADD, X86_EAX, 1);
1245 x86_branch8(s->code, X86_CC_C, 2, FALSE);
1246 x86_mul_reg (s->code, X86_EDX, FALSE);
1247 x86_shift_reg_imm (s->code, X86_SHR, X86_EDX, k - 32);
1248 if (tree->reg1 != X86_EDX)
1249 x86_mov_reg_reg (s->code, tree->reg1, X86_EDX, 4);
1251 if (tree->left->reg1 != X86_EAX)
1252 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1253 x86_mov_reg_imm (s->code, X86_EDX, (guint32) ceil(f));
1254 x86_mul_reg (s->code, X86_EDX, FALSE);
1255 x86_shift_reg_imm (s->code, X86_SHR, X86_EDX, k - 32);
1256 if (tree->reg1 != X86_EDX)
1257 x86_mov_reg_reg (s->code, tree->reg1, X86_EDX, 4);
1260 x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg1, i);
1261 if (tree->reg1 != tree->left->reg1)
1262 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1267 reg: DIV_UN (reg, reg) {
1268 mono_assert (tree->right->reg1 != X86_EAX);
1270 if (tree->left->reg1 != X86_EAX)
1271 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1273 x86_mov_reg_imm (s->code, X86_EDX, 0);
1274 x86_div_reg (s->code, tree->right->reg1, FALSE);
1276 mono_assert (tree->reg1 == X86_EAX &&
1277 tree->reg2 == X86_EDX);
1280 reg: REM (reg, reg) {
1281 mono_assert (tree->right->reg1 != X86_EAX);
1282 mono_assert (tree->right->reg1 != X86_EDX);
1284 if (tree->left->reg1 != X86_EAX)
1285 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1287 /* sign extend to 64bit in EAX/EDX */
1289 x86_div_reg (s->code, tree->right->reg1, TRUE);
1290 x86_mov_reg_reg (s->code, X86_EAX, X86_EDX, 4);
1292 mono_assert (tree->reg1 == X86_EAX &&
1293 tree->reg2 == X86_EDX);
1296 reg: REM_UN (reg, reg) {
1297 mono_assert (tree->right->reg1 != X86_EAX);
1298 mono_assert (tree->right->reg1 != X86_EDX);
1300 if (tree->left->reg1 != X86_EAX)
1301 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1303 /* zero extend to 64bit in EAX/EDX */
1304 x86_mov_reg_imm (s->code, X86_EDX, 0);
1305 x86_div_reg (s->code, tree->right->reg1, FALSE);
1306 x86_mov_reg_reg (s->code, X86_EAX, X86_EDX, 4);
1308 mono_assert (tree->reg1 == X86_EAX &&
1309 tree->reg2 == X86_EDX);
1312 reg: ADD (reg, CONST_I4) "MB_USE_OPT1(0)" {
1313 if (tree->right->data.i == 1)
1314 x86_inc_reg (s->code, tree->left->reg1);
1316 x86_alu_reg_imm (s->code, X86_ADD, tree->left->reg1, tree->right->data.i);
1318 if (tree->reg1 != tree->left->reg1)
1319 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1323 reg: ADD (reg, LDIND_I4 (ADDR_L)) {
1324 int treg = VARINFO (s, tree->right->left->data.i).reg;
1326 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, treg);
1328 if (tree->reg1 != tree->left->reg1)
1329 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1331 MBCOND ((VARINFO (data, tree->right->left->data.i).reg >= 0));
1335 reg: ADD (reg, reg) {
1336 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
1338 if (tree->reg1 != tree->left->reg1)
1339 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1342 reg: ADD_OVF (reg, reg) {
1343 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
1344 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
1346 if (tree->reg1 != tree->left->reg1)
1347 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1350 reg: ADD_OVF_UN (reg, reg) {
1351 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
1352 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NC, FALSE, "OverflowException");
1354 if (tree->reg1 != tree->left->reg1)
1355 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1358 reg: SUB (reg, CONST_I4) "MB_USE_OPT1(0)" {
1359 if (tree->right->data.i == 1)
1360 x86_dec_reg (s->code, tree->left->reg1);
1362 x86_alu_reg_imm (s->code, X86_SUB, tree->left->reg1, tree->right->data.i);
1364 if (tree->reg1 != tree->left->reg1)
1365 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1368 reg: SUB (reg, LDIND_I4 (ADDR_L)) {
1369 int treg = VARINFO (s, tree->right->left->data.i).reg;
1371 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, treg);
1373 if (tree->reg1 != tree->left->reg1)
1374 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1376 MBCOND ((VARINFO (data, tree->right->left->data.i).reg >= 0));
1380 reg: SUB (reg, reg) {
1381 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
1383 if (tree->reg1 != tree->left->reg1)
1384 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1387 reg: SUB_OVF (reg, reg) {
1388 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
1389 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
1391 if (tree->reg1 != tree->left->reg1)
1392 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1395 reg: SUB_OVF_UN (reg, reg) {
1396 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
1397 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NC, FALSE, "OverflowException");
1399 if (tree->reg1 != tree->left->reg1)
1400 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1403 reg: CSET (cflags) {
1405 switch (tree->data.i) {
1407 x86_set_reg (s->code, X86_CC_EQ, tree->reg1, TRUE);
1410 x86_set_reg (s->code, X86_CC_GT, tree->reg1, TRUE);
1413 x86_set_reg (s->code, X86_CC_GT, tree->reg1, FALSE);
1416 x86_set_reg (s->code, X86_CC_LT, tree->reg1, TRUE);
1419 x86_set_reg (s->code, X86_CC_LT, tree->reg1, FALSE);
1422 g_assert_not_reached ();
1425 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE);
1428 reg: AND (reg, CONST_I4) "MB_USE_OPT1(0)" {
1429 x86_alu_reg_imm (s->code, X86_AND, tree->left->reg1, tree->right->data.i);
1431 if (tree->reg1 != tree->left->reg1)
1432 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1435 reg: AND (reg, reg) {
1436 x86_alu_reg_reg (s->code, X86_AND, tree->left->reg1, tree->right->reg1);
1438 if (tree->reg1 != tree->left->reg1)
1439 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1442 reg: OR (reg, CONST_I4) "MB_USE_OPT1(0)" {
1443 x86_alu_reg_imm (s->code, X86_OR, tree->left->reg1, tree->right->data.i);
1445 if (tree->reg1 != tree->left->reg1)
1446 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1449 reg: OR (reg, reg) {
1450 x86_alu_reg_reg (s->code, X86_OR, tree->left->reg1, tree->right->reg1);
1452 if (tree->reg1 != tree->left->reg1)
1453 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1456 reg: XOR (reg, CONST_I4) "MB_USE_OPT1(0)" {
1457 x86_alu_reg_imm (s->code, X86_XOR, tree->left->reg1, tree->right->data.i);
1459 if (tree->reg1 != tree->left->reg1)
1460 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1463 reg: XOR (reg, reg) {
1464 x86_alu_reg_reg (s->code, X86_XOR, tree->left->reg1, tree->right->reg1);
1466 if (tree->reg1 != tree->left->reg1)
1467 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1471 x86_neg_reg (s->code, tree->left->reg1);
1473 if (tree->reg1 != tree->left->reg1)
1474 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1478 x86_not_reg (s->code, tree->left->reg1);
1480 if (tree->reg1 != tree->left->reg1)
1481 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1484 reg: SHL (reg, CONST_I4) {
1485 x86_shift_reg_imm (s->code, X86_SHL, tree->left->reg1, tree->right->data.i);
1487 if (tree->reg1 != tree->left->reg1)
1488 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1491 reg: SHL (reg, reg) {
1492 if (tree->right->reg1 != X86_ECX) {
1493 x86_push_reg (s->code, X86_ECX);
1494 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
1496 x86_shift_reg (s->code, X86_SHL, tree->left->reg1);
1498 if (tree->reg1 != tree->left->reg1)
1499 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1501 if (tree->right->reg1 != X86_ECX)
1502 x86_pop_reg (s->code, X86_ECX);
1504 mono_assert (tree->reg1 != X86_ECX &&
1505 tree->left->reg1 != X86_ECX);
1508 reg: SHR (reg, CONST_I4) {
1509 x86_shift_reg_imm (s->code, X86_SAR, tree->left->reg1, tree->right->data.i);
1511 if (tree->reg1 != tree->left->reg1)
1512 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1515 reg: SHR (reg, reg) {
1516 if (tree->right->reg1 != X86_ECX) {
1517 x86_push_reg (s->code, X86_ECX);
1518 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
1521 x86_shift_reg (s->code, X86_SAR, tree->left->reg1);
1523 if (tree->reg1 != tree->left->reg1)
1524 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1526 if (tree->right->reg1 != X86_ECX)
1527 x86_pop_reg (s->code, X86_ECX);
1529 mono_assert (tree->reg1 != X86_ECX &&
1530 tree->left->reg1 != X86_ECX);
1533 reg: SHR_UN (reg, CONST_I4) {
1534 x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg1, tree->right->data.i);
1536 if (tree->reg1 != tree->left->reg1)
1537 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1540 reg: SHR_UN (reg, reg) {
1541 if (tree->right->reg1 != X86_ECX) {
1542 x86_push_reg (s->code, X86_ECX);
1543 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
1546 x86_shift_reg (s->code, X86_SHR, tree->left->reg1);
1548 if (tree->reg1 != tree->left->reg1)
1549 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1551 if (tree->right->reg1 != X86_ECX)
1552 x86_pop_reg (s->code, X86_ECX);
1554 mono_assert (tree->reg1 != X86_ECX &&
1555 tree->left->reg1 != X86_ECX);
1558 reg: LDSFLDA (CONST_I4) {
1559 if (tree->reg1 != X86_EAX)
1560 x86_push_reg (s->code, X86_EAX);
1561 x86_push_reg (s->code, X86_ECX);
1562 x86_push_reg (s->code, X86_EDX);
1564 x86_push_imm (s->code, tree->left->data.i);
1565 x86_push_imm (s->code, tree->data.klass);
1566 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_ldsflda);
1567 x86_call_code (s->code, 0);
1568 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
1570 x86_pop_reg (s->code, X86_EDX);
1571 x86_pop_reg (s->code, X86_ECX);
1572 if (tree->reg1 != X86_EAX) {
1573 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1574 x86_pop_reg (s->code, X86_EAX);
1580 x86_mov_reg_membase (s->code, tree->reg1, tree->left->reg1,
1581 G_STRUCT_OFFSET (MonoArray, max_length), 4);
1584 reg: LDELEMA (reg, CONST_I4) {
1587 x86_alu_membase_imm (s->code, X86_CMP, tree->left->reg1, G_STRUCT_OFFSET (MonoArray, max_length), tree->right->data.i);
1588 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_GT, FALSE, "IndexOutOfRangeException");
1590 ind = tree->data.i * tree->right->data.i + G_STRUCT_OFFSET (MonoArray, vector);
1592 x86_alu_reg_imm (s->code, X86_ADD, tree->left->reg1, ind);
1594 if (tree->reg1 != tree->left->reg1)
1595 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1599 reg: LDELEMA (reg, reg) {
1601 x86_alu_reg_membase (s->code, X86_CMP, tree->right->reg1, tree->left->reg1, G_STRUCT_OFFSET (MonoArray, max_length));
1602 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LT, FALSE, "IndexOutOfRangeException");
1604 if (tree->data.i == 1 || tree->data.i == 2 ||
1605 tree->data.i == 4 || tree->data.i == 8) {
1606 static int fast_log2 [] = { 1, 0, 1, -1, 2, -1, -1, -1, 3 };
1607 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1,
1608 G_STRUCT_OFFSET (MonoArray, vector), tree->right->reg1,
1609 fast_log2 [tree->data.i]);
1611 x86_imul_reg_reg_imm (s->code, tree->right->reg1, tree->right->reg1, tree->data.i);
1612 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->right->reg1);
1613 x86_alu_reg_imm (s->code, X86_ADD, tree->reg1, G_STRUCT_OFFSET (MonoArray, vector));
1618 if (tree->reg1 != X86_EAX)
1619 x86_push_reg (s->code, X86_EAX);
1620 x86_push_reg (s->code, X86_ECX);
1621 x86_push_reg (s->code, X86_EDX);
1623 x86_push_imm (s->code, tree->data.p);
1624 x86_push_imm (s->code, s->method->klass->image);
1625 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_ldstr_wrapper);
1626 x86_call_code (s->code, 0);
1627 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
1629 x86_pop_reg (s->code, X86_EDX);
1630 x86_pop_reg (s->code, X86_ECX);
1631 if (tree->reg1 != X86_EAX) {
1632 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1633 x86_pop_reg (s->code, X86_EAX);
1636 PRINT_REG ("LDSTR", tree->reg1);
1640 if (tree->reg1 != X86_EAX)
1641 x86_push_reg (s->code, X86_EAX);
1642 x86_push_reg (s->code, X86_ECX);
1643 x86_push_reg (s->code, X86_EDX);
1645 x86_push_reg (s->code, tree->left->reg1);
1646 x86_push_imm (s->code, tree->data.p);
1647 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_array_new_wrapper);
1648 x86_call_code (s->code, 0);
1649 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer) + 4);
1651 x86_pop_reg (s->code, X86_EDX);
1652 x86_pop_reg (s->code, X86_ECX);
1653 if (tree->reg1 != X86_EAX) {
1654 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1655 x86_pop_reg (s->code, X86_EAX);
1658 PRINT_REG ("NEWARR", tree->reg1);
1661 reg: NEWARR_SPEC (reg) {
1662 if (tree->reg1 != X86_EAX)
1663 x86_push_reg (s->code, X86_EAX);
1664 x86_push_reg (s->code, X86_ECX);
1665 x86_push_reg (s->code, X86_EDX);
1667 x86_push_reg (s->code, tree->left->reg1);
1668 x86_push_imm (s->code, tree->data.p);
1669 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_array_new_specific);
1670 x86_call_code (s->code, 0);
1671 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer) + 4);
1673 x86_pop_reg (s->code, X86_EDX);
1674 x86_pop_reg (s->code, X86_ECX);
1675 if (tree->reg1 != X86_EAX) {
1676 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1677 x86_pop_reg (s->code, X86_EAX);
1680 PRINT_REG ("NEWARR_SPEC", tree->reg1);
1684 if (tree->reg1 != X86_EAX)
1685 x86_push_reg (s->code, X86_EAX);
1686 x86_push_reg (s->code, X86_ECX);
1687 x86_push_reg (s->code, X86_EDX);
1689 x86_push_imm (s->code, tree->data.klass);
1690 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_object_new_wrapper);
1691 x86_call_code (s->code, 0);
1692 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer));
1694 x86_pop_reg (s->code, X86_EDX);
1695 x86_pop_reg (s->code, X86_ECX);
1696 if (tree->reg1 != X86_EAX) {
1697 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1698 x86_pop_reg (s->code, X86_EAX);
1700 PRINT_REG ("NEWOBJ", tree->reg1);
1704 if (tree->reg1 != X86_EAX)
1705 x86_push_reg (s->code, X86_EAX);
1706 x86_push_reg (s->code, X86_ECX);
1707 x86_push_reg (s->code, X86_EDX);
1709 x86_push_imm (s->code, tree->data.p);
1710 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_object_new_specific);
1711 x86_call_code (s->code, 0);
1712 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer));
1714 x86_pop_reg (s->code, X86_EDX);
1715 x86_pop_reg (s->code, X86_ECX);
1716 if (tree->reg1 != X86_EAX) {
1717 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1718 x86_pop_reg (s->code, X86_EAX);
1720 PRINT_REG ("NEWOBJ_SPEC", tree->reg1);
1723 reg: OBJADDR (reg) {
1724 if (tree->left->reg1 != tree->reg1)
1725 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1729 x86_push_reg (s->code, X86_EAX);
1730 x86_push_reg (s->code, X86_ECX);
1731 x86_push_reg (s->code, X86_EDX);
1733 x86_push_reg (s->code, tree->left->reg1);
1735 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, g_free);
1736 x86_call_code (s->code, 0);
1737 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
1739 x86_pop_reg (s->code, X86_EDX);
1740 x86_pop_reg (s->code, X86_ECX);
1741 x86_pop_reg (s->code, X86_EAX);
1744 stmt: PROC3 (reg, CPSRC (reg, reg)) {
1745 int dest_reg = tree->left->reg1;
1746 int source_reg = tree->right->left->reg1;
1747 int size_reg = tree->right->right->reg1;
1749 x86_push_reg (s->code, X86_EAX);
1750 x86_push_reg (s->code, X86_ECX);
1751 x86_push_reg (s->code, X86_EDX);
1753 x86_push_reg (s->code, size_reg);
1754 x86_push_reg (s->code, source_reg);
1755 x86_push_reg (s->code, dest_reg);
1757 switch (tree->data.i) {
1758 case MONO_MARSHAL_CONV_STR_BYVALSTR:
1759 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_string_to_byvalstr);
1761 case MONO_MARSHAL_CONV_STR_BYVALWSTR:
1762 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_string_to_byvalwstr);
1765 g_assert_not_reached ();
1768 x86_call_code (s->code, 0);
1769 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
1771 x86_pop_reg (s->code, X86_EDX);
1772 x86_pop_reg (s->code, X86_ECX);
1773 x86_pop_reg (s->code, X86_EAX);
1777 if (tree->reg1 != X86_EAX)
1778 x86_push_reg (s->code, X86_EAX);
1779 x86_push_reg (s->code, X86_ECX);
1780 x86_push_reg (s->code, X86_EDX);
1782 x86_push_reg (s->code, tree->left->reg1);
1784 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->data.p);
1785 x86_call_code (s->code, 0);
1786 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer));
1788 x86_pop_reg (s->code, X86_EDX);
1789 x86_pop_reg (s->code, X86_ECX);
1790 if (tree->reg1 != X86_EAX) {
1791 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1792 x86_pop_reg (s->code, X86_EAX);
1797 reg: LOCALLOC (reg) {
1799 /* size must be aligned to 4 bytes */
1800 x86_alu_reg_imm (s->code, X86_ADD, tree->left->reg1, 3);
1801 x86_alu_reg_imm (s->code, X86_AND, tree->left->reg1, ~3);
1803 /* allocate space on stack */
1804 x86_alu_reg_reg (s->code, X86_SUB, X86_ESP, tree->left->reg1);
1807 /* initialize with zero */
1808 if (tree->reg1 != X86_EAX && tree->left->reg1 != X86_EAX) {
1809 x86_push_reg (s->code, X86_EAX);
1812 if (tree->reg1 != X86_ECX && tree->left->reg1 != X86_ECX) {
1813 x86_push_reg (s->code, X86_ECX);
1816 if (tree->reg1 != X86_EDI && tree->left->reg1 != X86_EDI) {
1817 x86_push_reg (s->code, X86_EDI);
1821 x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg1, 2);
1822 if (tree->left->reg1 != X86_ECX)
1823 x86_mov_reg_imm (s->code, X86_ECX, tree->left->reg1);
1824 x86_alu_reg_reg (s->code, X86_XOR, X86_EAX, X86_EAX);
1826 x86_lea_membase (s->code, X86_EDI, X86_ESP, offset);
1828 x86_prefix (s->code, X86_REP_PREFIX);
1829 x86_stosl (s->code);
1831 if (tree->reg1 != X86_EDI && tree->left->reg1 != X86_EDI)
1832 x86_pop_reg (s->code, X86_EDI);
1833 if (tree->reg1 != X86_ECX && tree->left->reg1 != X86_ECX)
1834 x86_pop_reg (s->code, X86_ECX);
1835 if (tree->reg1 != X86_EAX && tree->left->reg1 != X86_EAX)
1836 x86_pop_reg (s->code, X86_EAX);
1839 x86_mov_reg_reg (s->code, tree->reg1, X86_ESP, 4);
1843 if (tree->reg1 != tree->left->reg1)
1844 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1846 x86_push_reg (s->code, tree->reg1);
1847 x86_mov_reg_membase (s->code, tree->reg1, tree->reg1, 0, 4);
1848 x86_mov_reg_membase (s->code, tree->reg1, tree->reg1, 0, 4);
1849 x86_alu_membase_imm (s->code, X86_CMP, tree->reg1,
1850 G_STRUCT_OFFSET (MonoClass, element_class), ((int)(tree->data.klass->element_class)));
1851 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "InvalidCastException");
1852 x86_pop_reg (s->code, tree->reg1);
1853 x86_alu_reg_imm (s->code, X86_ADD, tree->reg1, sizeof (MonoObject));
1856 reg: CASTCLASS (reg) {
1857 MonoClass *klass = tree->data.klass;
1859 int lreg = tree->left->reg1;
1861 x86_push_reg (s->code, lreg);
1862 x86_test_reg_reg (s->code, lreg, lreg);
1863 br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
1865 if (klass->flags & TYPE_ATTRIBUTE_INTERFACE) {
1866 /* lreg = obj->vtable */
1867 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
1869 x86_alu_membase_imm (s->code, X86_CMP, lreg, G_STRUCT_OFFSET (MonoVTable, max_interface_id),
1870 klass->interface_id);
1871 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_GE, FALSE, "InvalidCastException");
1872 /* lreg = obj->vtable->interface_offsets */
1873 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
1874 x86_alu_membase_imm (s->code, X86_CMP, lreg, klass->interface_id << 2, 0);
1875 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NE, FALSE, "InvalidCastException");
1878 /* lreg = obj->vtable */
1879 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
1880 /* lreg = obj->vtable->klass */
1881 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
1885 x86_alu_membase_imm (s->code, X86_CMP, lreg, G_STRUCT_OFFSET (MonoClass, rank), klass->rank);
1886 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "InvalidCastException");
1887 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, element_class), 4);
1888 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, baseval), 4);
1889 x86_alu_reg_mem (s->code, X86_SUB, lreg, &klass->element_class->baseval);
1890 x86_alu_reg_mem (s->code, X86_CMP, lreg, &klass->element_class->diffval);
1891 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LE, FALSE, "InvalidCastException");
1895 if (klass->marshalbyref) {
1896 /* check for transparent_proxy */
1897 x86_alu_reg_imm (s->code, X86_CMP, lreg, (int)mono_defaults.transparent_proxy_class);
1898 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
1901 x86_mov_reg_membase (s->code, lreg, X86_ESP, 0, 4);
1902 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoTransparentProxy,
1905 x86_patch (br [1], s->code);
1908 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, baseval), 4);
1909 x86_alu_reg_mem (s->code, X86_SUB, lreg, &klass->baseval);
1910 x86_alu_reg_mem (s->code, X86_CMP, lreg, &klass->diffval);
1911 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LE, FALSE, "InvalidCastException");
1915 x86_patch (br [0], s->code);
1916 x86_pop_reg (s->code, tree->reg1);
1920 MonoClass *klass = tree->data.klass;
1922 int lreg = tree->left->reg1;
1924 x86_push_reg (s->code, lreg);
1925 x86_test_reg_reg (s->code, lreg, lreg);
1926 br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
1928 if (klass->flags & TYPE_ATTRIBUTE_INTERFACE) {
1929 /* lreg = obj->vtable */
1930 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
1932 x86_alu_membase_imm (s->code, X86_CMP, lreg, G_STRUCT_OFFSET (MonoVTable, max_interface_id),
1933 klass->interface_id);
1934 br [1] = s->code; x86_branch8 (s->code, X86_CC_LT, 0, FALSE);
1935 /* lreg = obj->vtable->interface_offsets */
1936 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
1937 x86_alu_membase_imm (s->code, X86_CMP, lreg, klass->interface_id << 2, 0);
1938 br [2] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
1939 x86_patch (br [1], s->code);
1940 x86_mov_membase_imm (s->code, X86_ESP, 0, 0, 4);
1941 x86_patch (br [2], s->code);
1945 /* lreg = obj->vtable */
1946 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
1947 /* lreg = obj->vtable->klass */
1948 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
1952 x86_alu_membase_imm (s->code, X86_CMP, lreg, G_STRUCT_OFFSET (MonoClass, rank), klass->rank);
1953 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
1954 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, element_class), 4);
1955 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, baseval), 4);
1956 x86_alu_reg_mem (s->code, X86_SUB, lreg, &klass->element_class->baseval);
1957 x86_alu_reg_mem (s->code, X86_CMP, lreg, &klass->element_class->diffval);
1958 br [2] = s->code; x86_branch8 (s->code, X86_CC_LE, 0, FALSE);
1959 x86_patch (br [1], s->code);
1960 x86_mov_membase_imm (s->code, X86_ESP, 0, 0, 4);
1961 x86_patch (br [2], s->code);
1965 if (klass->marshalbyref) {
1966 /* check for transparent_proxy */
1967 x86_alu_reg_imm (s->code, X86_CMP, lreg, (int)mono_defaults.transparent_proxy_class);
1968 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
1971 x86_mov_reg_membase (s->code, lreg, X86_ESP, 0, 4);
1972 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoTransparentProxy,
1974 x86_patch (br [1], s->code);
1977 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, baseval), 4);
1978 x86_alu_reg_mem (s->code, X86_SUB, lreg, &klass->baseval);
1979 x86_alu_reg_mem (s->code, X86_CMP, lreg, &klass->diffval);
1980 br [2] = s->code; x86_branch8 (s->code, X86_CC_LE, 0, FALSE);
1981 x86_mov_membase_imm (s->code, X86_ESP, 0, 0, 4);
1982 x86_patch (br [2], s->code);
1986 x86_patch (br [0], s->code);
1987 x86_pop_reg (s->code, tree->reg1);
1990 stmt: INITOBJ (reg) {
1995 if (i == 1 || i == 2 || i == 4) {
1996 x86_mov_membase_imm (s->code, tree->left->reg1, 0, 0, i);
2000 i = tree->data.i / 4;
2001 j = tree->data.i % 4;
2003 if (tree->left->reg1 != X86_EDI) {
2004 x86_push_reg (s->code, X86_EDI);
2005 x86_mov_reg_reg (s->code, X86_EDI, tree->left->reg1, 4);
2009 x86_alu_reg_reg (s->code, X86_XOR, X86_EAX, X86_EAX);
2010 x86_mov_reg_imm (s->code, X86_ECX, i);
2012 x86_prefix (s->code, X86_REP_PREFIX);
2013 x86_stosl (s->code);
2015 for (i = 0; i < j; i++)
2016 x86_stosb (s->code);
2020 x86_mov_membase_imm (s->code, X86_EDI, 0, 0, 2);
2021 x86_mov_membase_imm (s->code, X86_EDI, 2, 0, 1);
2026 if (tree->left->reg1 != X86_EDI)
2027 x86_pop_reg (s->code, X86_EDI);
2030 stmt: CPBLK (reg, CPSRC (reg, CONST_I4)) {
2031 int dest_reg = tree->left->reg1;
2032 int source_reg = tree->right->left->reg1;
2033 int count = tree->right->right->data.i;
2034 int sreg = dest_reg != X86_EAX ? X86_EAX : X86_EDX;
2035 int spill_pos = 0, dest_offset = 0, source_offset = 0;
2036 int save_esi = FALSE, save_edi = FALSE;
2038 // TODO: handle unaligned. prefix
2044 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 1);
2045 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 1);
2048 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 2);
2049 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 2);
2052 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 2);
2053 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 2);
2054 x86_mov_reg_membase (s->code, sreg, source_reg, 2, 1);
2055 x86_mov_membase_reg (s->code, dest_reg, 2, sreg, 1);
2058 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 4);
2059 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 4);
2062 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 4);
2063 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 4);
2064 x86_mov_reg_membase (s->code, sreg, source_reg, 4, 1);
2065 x86_mov_membase_reg (s->code, dest_reg, 4, sreg, 1);
2068 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 4);
2069 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 4);
2070 x86_mov_reg_membase (s->code, sreg, source_reg, 4, 2);
2071 x86_mov_membase_reg (s->code, dest_reg, 4, sreg, 2);
2074 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 4);
2075 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 4);
2076 x86_mov_reg_membase (s->code, sreg, source_reg, 4, 2);
2077 x86_mov_membase_reg (s->code, dest_reg, 4, sreg, 2);
2078 x86_mov_reg_membase (s->code, sreg, source_reg, 6, 1);
2079 x86_mov_membase_reg (s->code, dest_reg, 6, sreg, 1);
2082 x86_fild_membase (s->code, source_reg, 0, TRUE);
2083 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2086 x86_fild_membase (s->code, source_reg, 0, TRUE);
2087 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2088 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 1);
2089 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 1);
2092 x86_fild_membase (s->code, source_reg, 0, TRUE);
2093 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2094 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 2);
2095 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 2);
2098 x86_fild_membase (s->code, source_reg, 0, TRUE);
2099 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2100 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 2);
2101 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 2);
2102 x86_mov_reg_membase (s->code, sreg, source_reg, 10, 1);
2103 x86_mov_membase_reg (s->code, dest_reg, 10, sreg, 1);
2106 x86_fild_membase (s->code, source_reg, 0, TRUE);
2107 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2108 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 4);
2109 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 4);
2112 x86_fild_membase (s->code, source_reg, 0, TRUE);
2113 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2114 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 4);
2115 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 4);
2116 x86_mov_reg_membase (s->code, sreg, source_reg, 12, 1);
2117 x86_mov_membase_reg (s->code, dest_reg, 12, sreg, 1);
2120 x86_fild_membase (s->code, source_reg, 0, TRUE);
2121 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2122 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 4);
2123 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 4);
2124 x86_mov_reg_membase (s->code, sreg, source_reg, 12, 2);
2125 x86_mov_membase_reg (s->code, dest_reg, 12, sreg, 2);
2128 x86_fild_membase (s->code, source_reg, 0, TRUE);
2129 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2130 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 4);
2131 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 4);
2132 x86_mov_reg_membase (s->code, sreg, source_reg, 12, 2);
2133 x86_mov_membase_reg (s->code, dest_reg, 12, sreg, 2);
2134 x86_mov_reg_membase (s->code, sreg, source_reg, 14, 1);
2135 x86_mov_membase_reg (s->code, dest_reg, 14, sreg, 1);
2138 g_assert (count > 15);
2140 if (dest_reg != X86_ESI && source_reg != X86_ESI &&
2141 mono_regset_reg_used (s->rs, X86_ESI))
2143 if (dest_reg != X86_EDI && source_reg != X86_EDI &&
2144 mono_regset_reg_used (s->rs, X86_EDI))
2148 x86_push_reg (s->code, X86_ESI);
2150 x86_push_reg (s->code, X86_EDI);
2152 if (dest_reg == X86_ESI) {
2153 dest_offset = ++spill_pos;
2155 if (source_reg == X86_EDI) {
2156 source_offset = ++spill_pos;
2160 x86_push_reg (s->code, source_reg);
2162 x86_push_reg (s->code, dest_reg);
2164 if (source_reg != X86_ESI) {
2166 x86_mov_reg_membase (s->code, X86_ESI, X86_ESP, (source_offset-1)<<2, 4);
2168 x86_mov_reg_reg (s->code, X86_ESI, source_reg, 4);
2170 if (dest_reg != X86_EDI) {
2172 x86_mov_reg_membase (s->code, X86_EDI, X86_ESP, (dest_offset-1)<<2, 4);
2174 x86_mov_reg_reg (s->code, X86_EDI, dest_reg, 4);
2177 x86_mov_reg_imm (s->code, X86_ECX, count >> 2);
2179 x86_prefix (s->code, X86_REP_PREFIX);
2180 x86_movsd (s->code);
2182 switch (count & 3) {
2184 x86_mov_reg_membase (s->code, sreg, X86_ESI, 0, 1);
2185 x86_mov_membase_reg (s->code, X86_EDI, 0, sreg, 1);
2188 x86_mov_reg_membase (s->code, sreg, X86_ESI, 0, 2);
2189 x86_mov_membase_reg (s->code, X86_EDI, 0, sreg, 2);
2192 x86_mov_reg_membase (s->code, sreg, X86_ESI, 0, 2);
2193 x86_mov_membase_reg (s->code, X86_EDI, 0, sreg, 2);
2194 x86_mov_reg_membase (s->code, sreg, X86_ESI, 2, 1);
2195 x86_mov_membase_reg (s->code, X86_EDI, 2, sreg, 1);
2201 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, spill_pos<<2);
2204 x86_pop_reg (s->code, X86_EDI);
2206 x86_pop_reg (s->code, X86_ESI);
2211 MBCOND (mono_inline_memcpy);
2215 stmt: CPBLK (reg, CPSRC (reg, reg)) {
2216 int dest_reg = tree->left->reg1;
2217 int source_reg = tree->right->left->reg1;
2218 int size_reg = tree->right->right->reg1;
2219 int spill_pos = 0, size_offset = 0, dest_offset = 0, source_offset = 0;
2220 int save_esi = FALSE, save_edi = FALSE;
2222 if (!mono_inline_memcpy) {
2223 x86_push_reg (s->code, size_reg);
2224 x86_push_reg (s->code, source_reg);
2225 x86_push_reg (s->code, dest_reg);
2226 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, memmove);
2227 x86_call_code (s->code, 0);
2228 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
2230 if (dest_reg != X86_ESI && source_reg != X86_ESI && size_reg != X86_ESI &&
2231 mono_regset_reg_used (s->rs, X86_ESI))
2233 if (dest_reg != X86_EDI && source_reg != X86_EDI && size_reg != X86_EDI &&
2234 mono_regset_reg_used (s->rs, X86_EDI))
2238 x86_push_reg (s->code, X86_ESI);
2240 x86_push_reg (s->code, X86_EDI);
2242 if (size_reg == X86_EDI || size_reg == X86_ESI) {
2243 size_offset = ++spill_pos;
2245 if (dest_reg == X86_ECX || dest_reg == X86_ESI) {
2246 dest_offset = ++spill_pos;
2248 if (source_reg == X86_ECX || source_reg == X86_EDI) {
2249 source_offset = ++spill_pos;
2253 x86_push_reg (s->code, source_reg);
2255 x86_push_reg (s->code, dest_reg);
2257 x86_push_reg (s->code, size_reg);
2259 if (source_reg != X86_ESI) {
2261 x86_mov_reg_membase (s->code, X86_ESI, X86_ESP, (source_offset-1)<<2, 4);
2263 x86_mov_reg_reg (s->code, X86_ESI, source_reg, 4);
2265 if (dest_reg != X86_EDI) {
2267 x86_mov_reg_membase (s->code, X86_EDI, X86_ESP, (dest_offset-1)<<2, 4);
2269 x86_mov_reg_reg (s->code, X86_EDI, dest_reg, 4);
2271 if (size_reg != X86_ECX) {
2273 x86_mov_reg_membase (s->code, X86_ECX, X86_ESP, (size_offset-1)<<2, 4);
2275 x86_mov_reg_reg (s->code, X86_ECX, size_reg, 4);
2278 x86_push_reg (s->code, X86_ECX);
2279 x86_shift_reg_imm (s->code, X86_SHR, X86_ECX, 2);
2283 // move whole dwords first
2284 x86_prefix (s->code, X86_REP_PREFIX);
2285 x86_movsd (s->code);
2287 x86_pop_reg (s->code, X86_ECX);
2288 x86_alu_reg_imm (s->code, X86_AND, X86_ECX, 3);
2290 // move remaining bytes (if any)
2291 x86_prefix (s->code, X86_REP_PREFIX);
2292 x86_movsb (s->code);
2294 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, spill_pos<<2);
2297 x86_pop_reg (s->code, X86_EDI);
2299 x86_pop_reg (s->code, X86_ESI);
2303 stmt: INITBLK (reg, CPSRC (reg, CONST_I4)) {
2304 int dest_reg = tree->left->reg1;
2305 int value_reg = tree->right->left->reg1;
2306 int size = tree->right->right->data.i;
2307 int spill_pos = 0, dest_offset = 0, value_offset = 0;
2308 int save_edi = FALSE;
2314 if (mono_inline_memcpy) {
2316 if (dest_reg != X86_EDI && value_reg != X86_EDI &&
2317 mono_regset_reg_used (s->rs, X86_EDI)) {
2319 x86_push_reg (s->code, X86_EDI);
2322 if (dest_reg == X86_ECX || dest_reg == X86_EAX) {
2323 dest_offset = ++spill_pos;
2325 if (value_reg == X86_ECX || value_reg == X86_EDI) {
2326 value_offset = ++spill_pos;
2330 x86_push_reg (s->code, value_reg);
2332 x86_push_reg (s->code, dest_reg);
2334 if (value_reg != X86_EAX) {
2336 x86_mov_reg_membase (s->code, X86_EAX, X86_ESP, (value_offset-1)<<2, 4);
2338 x86_mov_reg_reg (s->code, X86_EAX, value_reg, 4);
2340 if (dest_reg != X86_EDI) {
2342 x86_mov_reg_membase (s->code, X86_EDI, X86_ESP, (dest_offset-1)<<2, 4);
2344 x86_mov_reg_reg (s->code, X86_EDI, dest_reg, 4);
2347 x86_widen_reg (s->code, X86_EAX, X86_EAX, FALSE, FALSE);
2348 x86_mov_reg_reg (s->code, X86_EDX, X86_EAX, 4);
2349 x86_shift_reg_imm (s->code, X86_SHL, X86_EAX, 8);
2350 x86_alu_reg_reg (s->code, X86_OR, X86_EAX, X86_EDX);
2351 x86_mov_reg_reg (s->code, X86_EDX, X86_EAX, 4);
2352 x86_shift_reg_imm (s->code, X86_SHL, X86_EAX, 16);
2353 x86_alu_reg_reg (s->code, X86_OR, X86_EAX, X86_EDX);
2356 x86_mov_reg_imm (s->code, X86_ECX, i);
2358 x86_prefix (s->code, X86_REP_PREFIX);
2359 x86_stosd (s->code);
2362 for (i = 0; i < j; i++)
2363 x86_stosb (s->code);
2365 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, spill_pos<<2);
2368 x86_pop_reg (s->code, X86_EDI);
2371 x86_push_imm (s->code, size);
2372 x86_push_reg (s->code, value_reg);
2373 x86_push_reg (s->code, dest_reg);
2374 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, memset);
2375 x86_call_code (s->code, 0);
2376 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
2379 MBCOND (mono_inline_memcpy);
2383 stmt: INITBLK (reg, CPSRC (reg, reg)) {
2384 int dest_reg = tree->left->reg1;
2385 int value_reg = tree->right->left->reg1;
2386 int size_reg = tree->right->right->reg1;
2387 int spill_pos = 0, size_offset = 0, dest_offset = 0, value_offset = 0;
2388 int save_edi = FALSE;
2390 if (mono_inline_memcpy) {
2392 if (dest_reg != X86_EDI && size_reg != X86_EDI && size_reg != X86_EDI &&
2393 mono_regset_reg_used (s->rs, X86_EDI)) {
2395 x86_push_reg (s->code, X86_EDI);
2398 if (size_reg == X86_EDI || size_reg == X86_EAX) {
2399 size_offset = ++spill_pos;
2401 if (dest_reg == X86_ECX || dest_reg == X86_EAX) {
2402 dest_offset = ++spill_pos;
2404 if (value_reg == X86_ECX || value_reg == X86_EDI) {
2405 value_offset = ++spill_pos;
2409 x86_push_reg (s->code, value_reg);
2411 x86_push_reg (s->code, dest_reg);
2413 x86_push_reg (s->code, size_reg);
2415 if (value_reg != X86_EAX) {
2417 x86_mov_reg_membase (s->code, X86_EAX, X86_ESP, (value_offset-1)<<2, 4);
2419 x86_mov_reg_reg (s->code, X86_EAX, value_reg, 4);
2421 if (dest_reg != X86_EDI) {
2423 x86_mov_reg_membase (s->code, X86_EDI, X86_ESP, (dest_offset-1)<<2, 4);
2425 x86_mov_reg_reg (s->code, X86_EDI, dest_reg, 4);
2427 if (size_reg != X86_ECX) {
2429 x86_mov_reg_membase (s->code, X86_ECX, X86_ESP, (size_offset-1)<<2, 4);
2431 x86_mov_reg_reg (s->code, X86_ECX, size_reg, 4);
2434 x86_widen_reg (s->code, X86_EAX, X86_EAX, FALSE, FALSE);
2435 x86_mov_reg_reg (s->code, X86_EDX, X86_EAX, 4);
2436 x86_shift_reg_imm (s->code, X86_SHL, X86_EAX, 8);
2437 x86_alu_reg_reg (s->code, X86_OR, X86_EAX, X86_EDX);
2438 x86_mov_reg_reg (s->code, X86_EDX, X86_EAX, 4);
2439 x86_shift_reg_imm (s->code, X86_SHL, X86_EAX, 16);
2440 x86_alu_reg_reg (s->code, X86_OR, X86_EAX, X86_EDX);
2442 x86_push_reg (s->code, X86_ECX);
2443 x86_shift_reg_imm (s->code, X86_SHR, X86_ECX, 2);
2446 // init whole dwords first
2447 x86_prefix (s->code, X86_REP_PREFIX);
2448 x86_stosd (s->code);
2450 x86_pop_reg (s->code, X86_ECX);
2451 x86_alu_reg_imm (s->code, X86_AND, X86_ECX, 3);
2453 // init remaining bytes (if any)
2454 x86_prefix (s->code, X86_REP_PREFIX);
2455 x86_stosb (s->code);
2457 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, spill_pos<<2);
2460 x86_pop_reg (s->code, X86_EDI);
2463 x86_push_reg (s->code, size_reg);
2464 x86_push_reg (s->code, value_reg);
2465 x86_push_reg (s->code, dest_reg);
2466 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, memset);
2467 x86_call_code (s->code, 0);
2468 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
2477 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bb);
2478 x86_jump32 (s->code, 0);
2481 cflags: COMPARE (reg, LDIND_I4 (ADDR_L)) {
2482 int treg = VARINFO (s, tree->right->left->data.i).reg;
2483 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, treg);
2485 MBCOND ((VARINFO (data, tree->right->left->data.i).reg >= 0));
2489 cflags: COMPARE (LDIND_I4 (ADDR_L), CONST_I4) {
2490 int treg = VARINFO (s, tree->left->left->data.i).reg;
2491 x86_alu_reg_imm (s->code, X86_CMP, treg, tree->right->data.i);
2493 MBCOND ((VARINFO (data, tree->left->left->data.i).reg >= 0));
2497 cflags: COMPARE (LDIND_I4 (ADDR_L), reg) {
2498 int treg = VARINFO (s, tree->left->left->data.i).reg;
2499 x86_alu_reg_reg (s->code, X86_CMP, treg, tree->right->reg1);
2501 MBCOND ((VARINFO (data, tree->left->left->data.i).reg >= 0));
2505 cflags: COMPARE (LDIND_I4 (ADDR_L), CONST_I4) {
2506 int offset = VARINFO (s, tree->left->left->data.i).offset;
2507 x86_alu_membase_imm (s->code, X86_CMP, X86_EBP, offset, tree->right->data.i);
2509 MBCOND ((VARINFO (data, tree->left->left->data.i).reg < 0));
2513 cflags: COMPARE (reg, CONST_I4) {
2514 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i);
2517 cflags: COMPARE (reg, reg) {
2518 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
2522 stmt: CBRANCH (cflags) {
2523 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
2525 switch (tree->data.bi.cond) {
2527 x86_branch32 (s->code, X86_CC_LT, 0, TRUE);
2530 x86_branch32 (s->code, X86_CC_LT, 0, FALSE);
2533 x86_branch32 (s->code, X86_CC_GT, 0, TRUE);
2536 x86_branch32 (s->code, X86_CC_GT, 0, FALSE);
2539 x86_branch32 (s->code, X86_CC_EQ, 0, TRUE);
2542 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
2545 x86_branch32 (s->code, X86_CC_GE, 0, TRUE);
2548 x86_branch32 (s->code, X86_CC_GE, 0, FALSE);
2551 x86_branch32 (s->code, X86_CC_LE, 0, TRUE);
2554 x86_branch32 (s->code, X86_CC_LE, 0, FALSE);
2557 g_assert_not_reached ();
2561 stmt: BRTRUE (LDIND_I4 (ADDR_L)) {
2562 int treg = VARINFO (s, tree->left->left->data.i).reg;
2563 int offset = VARINFO (s, tree->left->left->data.i).offset;
2566 x86_test_reg_reg (s->code, treg, treg);
2568 x86_alu_membase_imm (s->code, X86_CMP, X86_EBP, offset, 0);
2570 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bb);
2571 x86_branch32 (s->code, X86_CC_NE, 0, TRUE);
2574 stmt: BRTRUE (reg) {
2575 x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1);
2576 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bb);
2577 x86_branch32 (s->code, X86_CC_NE, 0, TRUE);
2580 stmt: BRFALSE (LDIND_I4 (ADDR_L)) {
2581 int treg = VARINFO (s, tree->left->left->data.i).reg;
2582 int offset = VARINFO (s, tree->left->left->data.i).offset;
2585 x86_test_reg_reg (s->code, treg, treg);
2587 x86_alu_membase_imm (s->code, X86_CMP, X86_EBP, offset, 0);
2589 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bb);
2590 x86_branch32 (s->code, X86_CC_EQ, 0, TRUE);
2592 //{static int cx= 0; printf ("CX1 %5d\n", cx++);}
2595 stmt: BRFALSE (reg) {
2596 x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1);
2597 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bb);
2598 x86_branch32 (s->code, X86_CC_EQ, 0, TRUE);
2602 x86_breakpoint (s->code);
2606 if (tree->left->reg1 != X86_EAX)
2607 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
2609 if (!tree->last_instr) {
2610 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_EPILOG, NULL);
2611 x86_jump32 (s->code, 0);
2616 if (!tree->last_instr) {
2617 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_EPILOG, NULL);
2618 x86_jump32 (s->code, 0);
2623 stmt: ARG_I4 (LDIND_I4 (addr)) {
2624 MBTree *at = tree->left->left;
2626 switch (at->data.ainfo.amode) {
2629 x86_push_mem (s->code, at->data.ainfo.offset);
2633 x86_push_membase (s->code, at->data.ainfo.basereg, at->data.ainfo.offset);
2636 x86_push_memindex (s->code, X86_NOBASEREG, at->data.ainfo.offset,
2637 at->data.ainfo.indexreg, at->data.ainfo.shift);
2640 x86_push_memindex (s->code, at->data.ainfo.basereg,
2641 at->data.ainfo.offset, at->data.ainfo.indexreg,
2642 at->data.ainfo.shift);
2647 stmt: ARG_I4 (LDIND_I4 (ADDR_L)) {
2648 int treg = VARINFO (s, tree->left->left->data.i).reg;
2649 x86_push_reg (s->code, treg);
2651 MBCOND ((VARINFO (data, tree->left->left->data.i).reg >= 0));
2655 stmt: ARG_I4 (reg) {
2656 x86_push_reg (s->code, tree->left->reg1);
2657 PRINT_REG ("ARG_I4", tree->left->reg1);
2660 stmt: ARG_I4 (ADDR_G) {
2661 x86_push_imm (s->code, tree->left->data.p);
2664 stmt: ARG_I4 (CONST_I4) "MB_USE_OPT1(0)" {
2665 x86_push_imm (s->code, tree->left->data.i);
2669 PRINT_REG ("THIS", tree->reg1);
2672 reg: CHECKTHIS (reg) {
2673 /* try to access the vtable - this will raise an exception
2674 * if the object is NULL */
2675 x86_alu_membase_imm (s->code, X86_CMP, tree->left->reg1, 0, 0);
2676 if (tree->reg1 != tree->left->reg1)
2677 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
2680 stmt: CHECKTHIS (reg) {
2681 x86_alu_membase_imm (s->code, X86_CMP, tree->left->reg1, 0, 0);
2688 /* restore callee saved registers */
2689 if (mono_regset_reg_used (s->rs, X86_EBX)) {
2690 x86_mov_reg_membase (s->code, X86_EBX, X86_EBP, pos, 4);
2693 if (mono_regset_reg_used (s->rs, X86_EDI)) {
2694 x86_mov_reg_membase (s->code, X86_EDI, X86_EBP, pos, 4);
2697 if (mono_regset_reg_used (s->rs, X86_ESI)) {
2698 x86_mov_reg_membase (s->code, X86_ESI, X86_EBP, pos, 4);
2701 /* restore ESP/EBP */
2702 x86_leave (s->code);
2704 /* jump to the method */
2705 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->data.p);
2706 x86_jump32 (s->code, 0);
2711 reg: CALL_I4 (this, reg) {
2713 int lreg = tree->left->reg1;
2714 int rreg = tree->right->reg1;
2716 if (lreg == treg || rreg == treg)
2718 if (lreg == treg || rreg == treg)
2720 if (lreg == treg || rreg == treg)
2721 mono_assert_not_reached ();
2723 if (tree->left->op != MB_TERM_NOP) {
2724 mono_assert (lreg >= 0);
2725 x86_push_reg (s->code, lreg);
2726 x86_alu_membase_imm (s->code, X86_CMP, lreg, 0, 0);
2729 if (tree->data.ci.vtype_num) {
2730 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
2731 x86_lea_membase (s->code, treg, X86_EBP, offset);
2732 x86_push_reg (s->code, treg);
2735 x86_call_reg (s->code, rreg);
2737 if (tree->data.ci.args_size)
2738 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
2740 PRINT_REG ("CALL_I4", tree->reg1);
2742 mono_assert (tree->reg1 == X86_EAX);
2745 reg: CALL_I4 (this, ADDR_G) {
2746 int lreg = tree->left->reg1;
2752 if (tree->left->op != MB_TERM_NOP) {
2753 mono_assert (lreg >= 0);
2754 x86_push_reg (s->code, lreg);
2755 x86_alu_membase_imm (s->code, X86_CMP, lreg, 0, 0);
2758 if (tree->data.ci.vtype_num) {
2759 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
2760 x86_lea_membase (s->code, treg, X86_EBP, offset);
2761 x86_push_reg (s->code, treg);
2764 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->right->data.p);
2765 x86_call_code (s->code, 0);
2767 if (tree->data.ci.args_size)
2768 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
2770 PRINT_REG ("CALL_I4", tree->reg1);
2772 mono_assert (tree->reg1 == X86_EAX);
2775 reg: LDVIRTFTN (reg, INTF_ADDR) {
2776 /* we cant return the value in the vtable, because it can be
2777 * a magic trampoline, and we cant pass that to the outside world */
2779 if (tree->reg1 != X86_EAX)
2780 x86_push_reg (s->code, X86_EAX);
2781 x86_push_reg (s->code, X86_ECX);
2782 x86_push_reg (s->code, X86_EDX);
2784 x86_push_imm (s->code, tree->right->data.m->klass->interface_id);
2785 x86_push_reg (s->code, tree->left->reg1);
2786 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_ldintftn);
2787 x86_call_code (s->code, 0);
2788 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
2790 x86_pop_reg (s->code, X86_EDX);
2791 x86_pop_reg (s->code, X86_ECX);
2792 if (tree->reg1 != X86_EAX) {
2793 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
2794 x86_pop_reg (s->code, X86_EAX);
2798 reg: CALL_I4 (this, INTF_ADDR) {
2799 int lreg = tree->left->reg1;
2805 if (tree->left->op != MB_TERM_NOP) {
2806 mono_assert (lreg >= 0);
2807 x86_push_reg (s->code, lreg);
2810 if (tree->data.ci.vtype_num) {
2811 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
2812 x86_lea_membase (s->code, treg, X86_EBP, offset);
2813 x86_push_reg (s->code, treg);
2816 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2817 x86_mov_reg_membase (s->code, lreg, lreg,
2818 G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
2819 x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
2820 x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
2822 if (tree->data.ci.args_size)
2823 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
2825 PRINT_REG ("CALL_I4(INTERFACE)", tree->reg1);
2827 mono_assert (tree->reg1 == X86_EAX);
2830 reg: LDVIRTFTN (reg, VFUNC_ADDR) {
2831 /* we cant return the value in the vtable, because it can be
2832 * a magic trampoline, and we cant pass that to the outside world */
2834 if (tree->reg1 != X86_EAX)
2835 x86_push_reg (s->code, X86_EAX);
2836 x86_push_reg (s->code, X86_ECX);
2837 x86_push_reg (s->code, X86_EDX);
2839 x86_push_imm (s->code, tree->right->data.m->slot);
2840 x86_push_reg (s->code, tree->left->reg1);
2841 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_ldvirtftn);
2842 x86_call_code (s->code, 0);
2843 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
2845 x86_pop_reg (s->code, X86_EDX);
2846 x86_pop_reg (s->code, X86_ECX);
2847 if (tree->reg1 != X86_EAX) {
2848 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
2849 x86_pop_reg (s->code, X86_EAX);
2854 if (tree->reg1 != X86_EAX)
2855 x86_push_reg (s->code, X86_EAX);
2856 x86_push_reg (s->code, X86_ECX);
2857 x86_push_reg (s->code, X86_EDX);
2859 x86_push_imm (s->code, tree->data.m);
2860 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_ldftn);
2861 x86_call_code (s->code, 0);
2862 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer));
2864 x86_pop_reg (s->code, X86_EDX);
2865 x86_pop_reg (s->code, X86_ECX);
2866 if (tree->reg1 != X86_EAX) {
2867 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
2868 x86_pop_reg (s->code, X86_EAX);
2873 reg: CALL_I4 (this, VFUNC_ADDR) {
2874 int lreg = tree->left->reg1;
2880 if (tree->left->op != MB_TERM_NOP) {
2881 mono_assert (lreg >= 0);
2882 x86_push_reg (s->code, lreg);
2885 if (tree->data.ci.vtype_num) {
2886 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
2887 x86_lea_membase (s->code, treg, X86_EBP, offset);
2888 x86_push_reg (s->code, treg);
2891 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2892 x86_call_virtual (s->code, lreg,
2893 G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2));
2895 if (tree->data.ci.args_size)
2896 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
2898 PRINT_REG ("CALL_I4(VIRTUAL)", tree->reg1);
2900 mono_assert (tree->reg1 == X86_EAX);
2903 stmt: CALL_VOID (this, ADDR_G) {
2904 int lreg = tree->left->reg1;
2910 if (tree->left->op != MB_TERM_NOP) {
2911 mono_assert (lreg >= 0);
2912 x86_push_reg (s->code, lreg);
2913 x86_alu_membase_imm (s->code, X86_CMP, lreg, 0, 0);
2916 if (tree->data.ci.vtype_num) {
2917 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
2918 x86_lea_membase (s->code, treg, X86_EBP, offset);
2919 x86_push_reg (s->code, treg);
2922 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->right->data.p);
2923 x86_call_code (s->code, 0);
2925 if (tree->data.ci.args_size)
2926 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
2929 stmt: CALL_VOID (this, reg) {
2931 int lreg = tree->left->reg1;
2932 int rreg = tree->right->reg1;
2934 if (lreg == treg || rreg == treg)
2936 if (lreg == treg || rreg == treg)
2938 if (lreg == treg || rreg == treg)
2939 mono_assert_not_reached ();
2941 if (tree->left->op != MB_TERM_NOP) {
2942 mono_assert (lreg >= 0);
2943 x86_push_reg (s->code, lreg);
2944 x86_alu_membase_imm (s->code, X86_CMP, lreg, 0, 0);
2947 if (tree->data.ci.vtype_num) {
2948 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
2949 x86_lea_membase (s->code, treg, X86_EBP, offset);
2950 x86_push_reg (s->code, treg);
2953 x86_call_reg (s->code, tree->right->reg1);
2955 if (tree->data.ci.args_size)
2956 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
2959 stmt: CALL_VOID (this, INTF_ADDR) {
2960 int lreg = tree->left->reg1;
2966 if (tree->left->op != MB_TERM_NOP) {
2967 mono_assert (lreg >= 0);
2968 x86_push_reg (s->code, lreg);
2971 if (tree->data.ci.vtype_num) {
2972 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
2973 x86_lea_membase (s->code, treg, X86_EBP, offset);
2974 x86_push_reg (s->code, treg);
2977 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2978 x86_mov_reg_membase (s->code, lreg, lreg,
2979 G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
2980 x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
2981 x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
2983 if (tree->data.ci.args_size)
2984 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
2987 stmt: CALL_VOID (this, VFUNC_ADDR) {
2988 int lreg = tree->left->reg1;
2994 if (tree->left->op != MB_TERM_NOP) {
2995 mono_assert (lreg >= 0);
2996 x86_push_reg (s->code, lreg);
2999 if (tree->data.ci.vtype_num) {
3000 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
3001 x86_lea_membase (s->code, treg, X86_EBP, offset);
3002 x86_push_reg (s->code, treg);
3005 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
3006 x86_call_virtual (s->code, lreg,
3007 G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2));
3009 if (tree->data.ci.args_size)
3010 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
3013 stmt: SWITCH (reg) {
3015 guint32 *jt = (guint32 *)tree->data.p;
3017 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, jt [0]);
3018 offset = 6 + (guint32)s->code;
3019 x86_branch32 (s->code, X86_CC_GE, jt [jt [0] + 1] - offset, FALSE);
3021 x86_mov_reg_memindex (s->code, X86_EAX, X86_NOBASEREG,
3022 tree->data.i + 4, tree->left->reg1, 2, 4);
3023 x86_jump_reg (s->code, X86_EAX);
3030 reg: CONV_I1 (lreg) {
3031 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, FALSE);
3034 reg: CONV_U1 (lreg) {
3035 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE);
3038 reg: CONV_I2 (lreg) {
3039 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, TRUE);
3042 reg: CONV_U2 (lreg) {
3043 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE);
3046 reg: CONV_I4 (lreg) {
3047 if (tree->reg1 != tree->left->reg1)
3048 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3051 reg: CONV_U4 (lreg) {
3052 if (tree->reg1 != tree->left->reg1)
3053 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3057 reg: CONV_OVF_I4 (lreg){
3058 guint8 *start = s->code;
3059 guchar* o1, *o2, *o3, *o4, *o5;
3063 * Valid ints: 0xffffffff:8000000 to 00000000:0x7f000000
3065 for (i = 0; i < 2; i++) {
3068 x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1);
3070 /* If the low word top bit is set, see if we are negative */
3071 x86_branch8 (s->code, X86_CC_LT, o3 - o1, TRUE);
3074 /* We are not negative (no top bit set, check for our top word to be zero */
3075 x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2);
3076 x86_branch8 (s->code, X86_CC_EQ, o4 - o2, TRUE);
3079 /* throw exception */
3080 x86_push_imm (s->code, "OverflowException");
3081 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS,
3082 arch_get_throw_exception_by_name ());
3083 x86_call_code (s->code, 0);
3086 /* our top bit is set, check that top word is 0xfffffff */
3087 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg2, 0xffffffff);
3090 /* nope, emit exception */
3091 x86_branch8 (s->code, X86_CC_NE, o2 - o5, TRUE);
3094 if (tree->reg1 != tree->left->reg1)
3095 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3098 reg: CONV_OVF_I4 (lreg){
3099 guint8 *br [3], *label [1];
3102 * Valid ints: 0xffffffff:8000000 to 00000000:0x7f000000
3104 x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1);
3106 /* If the low word top bit is set, see if we are negative */
3107 br [0] = s->code; x86_branch8 (s->code, X86_CC_LT, 0, TRUE);
3109 /* We are not negative (no top bit set, check for our top word to be zero */
3110 x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2);
3111 br [1] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, TRUE);
3112 label [0] = s->code;
3114 /* throw exception */
3115 x86_push_imm (s->code, "OverflowException");
3116 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS,
3117 arch_get_throw_exception_by_name ());
3118 x86_call_code (s->code, 0);
3120 x86_patch (br [0], s->code);
3121 /* our top bit is set, check that top word is 0xfffffff */
3122 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg2, 0xffffffff);
3124 x86_patch (br [1], s->code);
3125 /* nope, emit exception */
3126 br [2] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3127 x86_patch (br [2], label [0]);
3129 if (tree->reg1 != tree->left->reg1)
3130 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3133 reg: CONV_OVF_U4 (lreg) {
3134 /* Keep in sync with CONV_OVF_I4_UN below, they are the same on 32-bit machines */
3135 /* top word must be 0 */
3136 x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2);
3137 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "OverflowException");
3138 if (tree->reg1 != tree->left->reg1)
3139 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3142 reg: CONV_OVF_I4_UN (lreg) {
3143 /* Keep in sync with CONV_OVF_U4 above, they are the same on 32-bit machines */
3144 /* top word must be 0 */
3145 x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2);
3146 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "OverflowException");
3147 if (tree->reg1 != tree->left->reg1)
3148 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3154 x86_mov_reg_imm (s->code, tree->reg1, *((gint32 *)&tree->data.p));
3155 x86_mov_reg_imm (s->code, tree->reg2, *((gint32 *)&tree->data.p + 1));
3158 lreg: CONV_I8 (CONST_I4) {
3159 x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i);
3161 if (tree->left->data.i >= 0)
3162 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3164 x86_mov_reg_imm (s->code, tree->reg2, -1);
3167 lreg: CONV_I8 (reg) {
3170 if (tree->reg1 != tree->left->reg1)
3171 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3173 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3174 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, 0);
3175 x86_branch8 (s->code, X86_CC_GE, 5, TRUE);
3177 x86_mov_reg_imm (s->code, tree->reg2, -1);
3178 mono_assert ((s->code - i1) == 5);
3181 lreg: CONV_U8 (CONST_I4) 1 {
3182 x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i);
3183 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3186 lreg: CONV_U8 (reg) {
3187 if (tree->reg1 != tree->left->reg1)
3188 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3189 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3192 lreg: CONV_OVF_U8 (CONST_I4) {
3193 if (tree->left->data.i < 0){
3194 x86_push_imm (s->code, "OverflowException");
3195 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS,
3196 arch_get_throw_exception_by_name ());
3197 x86_call_code (s->code, 0);
3199 x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i);
3200 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3204 lreg: CONV_OVF_I8_UN (CONST_I4) {
3205 x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i);
3206 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3209 lreg: CONV_OVF_U8 (reg) {
3210 x86_test_reg_imm (s->code, tree->left->reg1, 0x8000000);
3211 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "OverflowException");
3213 if (tree->reg1 != tree->left->reg1)
3214 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3215 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3218 lreg: CONV_OVF_I8_UN (reg) {
3219 /* Convert uint value into int64, we pass everything */
3220 if (tree->reg1 != tree->left->reg1)
3221 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3222 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3225 stmt: STIND_I8 (addr, lreg) {
3227 switch (tree->left->data.ainfo.amode) {
3230 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 4);
3231 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset + 4, tree->right->reg2, 4);
3235 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
3236 tree->left->data.ainfo.offset, tree->right->reg1, 4);
3237 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
3238 tree->left->data.ainfo.offset + 4, tree->right->reg2, 4);
3241 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
3242 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
3243 tree->right->reg1, 4);
3244 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset + 4,
3245 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
3246 tree->right->reg2, 4);
3249 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
3250 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
3251 tree->right->reg1, 4);
3252 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset + 4,
3253 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
3254 tree->right->reg2, 4);
3259 stmt: REMOTE_STIND_I8 (reg, lreg) {
3263 x86_push_reg (s->code, tree->right->reg1);
3264 x86_mov_reg_membase (s->code, tree->right->reg1, tree->left->reg1, 0, 4);
3265 x86_alu_membase_imm (s->code, X86_CMP, tree->right->reg1, 0, ((int)mono_defaults.transparent_proxy_class));
3266 x86_pop_reg (s->code, tree->right->reg1);
3268 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3270 /* this is a transparent proxy - remote the call */
3272 /* save value to stack */
3273 x86_push_reg (s->code, tree->right->reg2);
3274 x86_push_reg (s->code, tree->right->reg1);
3276 x86_push_reg (s->code, X86_ESP);
3277 x86_push_imm (s->code, tree->data.fi.field);
3278 x86_push_imm (s->code, tree->data.fi.klass);
3279 x86_push_reg (s->code, tree->left->reg1);
3280 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
3281 x86_call_code (s->code, 0);
3282 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 24);
3284 br [1] = s->code; x86_jump8 (s->code, 0);
3286 x86_patch (br [0], s->code);
3287 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
3288 tree->data.fi.field->offset;
3289 x86_mov_membase_reg (s->code, tree->left->reg1, offset, tree->right->reg1, 4);
3290 x86_mov_membase_reg (s->code, tree->left->reg1, offset + 4, tree->right->reg2, 4);
3292 x86_patch (br [1], s->code);
3296 # an addr can use two address register (base and index register). The must take care
3297 # that we do not override them (thus the use of x86_lea)
3298 lreg: LDIND_I8 (addr) {
3300 switch (tree->left->data.ainfo.amode) {
3303 x86_mov_reg_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, 4);
3304 x86_mov_reg_mem (s->code, tree->reg2, tree->left->data.ainfo.offset + 4, 4);
3308 x86_lea_membase (s->code, tree->reg2, tree->left->data.ainfo.basereg,
3309 tree->left->data.ainfo.offset);
3310 x86_mov_reg_membase (s->code, tree->reg1, tree->reg2, 0, 4);
3311 x86_mov_reg_membase (s->code, tree->reg2, tree->reg2, 4, 4);
3314 x86_lea_memindex (s->code, tree->reg2, X86_NOBASEREG, tree->left->data.ainfo.offset,
3315 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift);
3316 x86_mov_reg_membase (s->code, tree->reg1, tree->reg2, 0, 4);
3317 x86_mov_reg_membase (s->code, tree->reg2, tree->reg2, 4, 4);
3320 x86_lea_memindex (s->code, tree->reg2, tree->left->data.ainfo.basereg,
3321 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
3322 tree->left->data.ainfo.shift);
3323 x86_mov_reg_membase (s->code, tree->reg1, tree->reg2, 0, 4);
3324 x86_mov_reg_membase (s->code, tree->reg2, tree->reg2, 4, 4);
3327 PRINT_REG ("LDIND_I8_0", tree->reg1);
3328 PRINT_REG ("LDIND_I8_1", tree->reg2);
3331 lreg: SHR (lreg, CONST_I4) {
3332 if (tree->right->data.i < 32) {
3333 x86_shrd_reg_imm (s->code, tree->left->reg1, tree->left->reg2, tree->right->data.i);
3334 x86_shift_reg_imm (s->code, X86_SAR, tree->left->reg2, tree->right->data.i);
3335 if (tree->reg1 != tree->left->reg1)
3336 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3337 if (tree->reg2 != tree->left->reg2)
3338 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
3339 } else if (tree->right->data.i < 64) {
3340 if (tree->reg1 != tree->left->reg2)
3341 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg2, 4);
3342 if (tree->reg2 != tree->left->reg2)
3343 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
3344 x86_shift_reg_imm (s->code, X86_SAR, tree->reg2, 31);
3345 x86_shift_reg_imm (s->code, X86_SAR, tree->reg1, (tree->right->data.i - 32));
3346 } /* else unspecified result */
3349 lreg: SHR_UN (lreg, CONST_I4) {
3350 if (tree->right->data.i < 32) {
3351 x86_shrd_reg_imm (s->code, tree->left->reg1, tree->left->reg2, tree->right->data.i);
3352 x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg2, tree->right->data.i);
3353 if (tree->reg1 != tree->left->reg1)
3354 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3355 if (tree->reg2 != tree->left->reg2)
3356 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
3357 } else if (tree->right->data.i < 64) {
3358 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg2, 4);
3359 x86_shift_reg_imm (s->code, X86_SHR, tree->reg1, (tree->right->data.i - 32));
3360 x86_mov_reg_imm (s->code, tree->reg2, 0);
3361 } /* else unspecified result */
3364 lreg: SHR (lreg, reg) {
3367 if (tree->right->reg1 != X86_ECX)
3368 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
3370 x86_shrd_reg (s->code, tree->left->reg1, tree->left->reg2);
3371 x86_shift_reg (s->code, X86_SAR, tree->left->reg2);
3372 x86_test_reg_imm (s->code, X86_ECX, 32);
3373 br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
3374 x86_mov_reg_reg (s->code, tree->left->reg1, tree->left->reg2, 4);
3375 x86_shift_reg_imm (s->code, X86_SAR, tree->reg2, 31);
3376 x86_patch (br [0], s->code);
3378 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3381 lreg: SHR_UN (lreg, reg) {
3384 if (tree->right->reg1 != X86_ECX)
3385 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
3387 x86_shrd_reg (s->code, tree->left->reg1, tree->left->reg2);
3388 x86_shift_reg (s->code, X86_SHR, tree->left->reg2);
3389 x86_test_reg_imm (s->code, X86_ECX, 32);
3390 br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
3391 x86_mov_reg_reg (s->code, tree->left->reg1, tree->left->reg2, 4);
3392 x86_shift_reg_imm (s->code, X86_SHR, tree->reg2, 31);
3393 x86_patch (br [0], s->code);
3395 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3398 lreg: SHL (lreg, CONST_I4) {
3399 if (tree->right->data.i < 32) {
3400 x86_shld_reg_imm (s->code, tree->left->reg2, tree->left->reg1, tree->right->data.i);
3401 x86_shift_reg_imm (s->code, X86_SHL, tree->left->reg1, tree->right->data.i);
3402 if (tree->reg1 != tree->left->reg1)
3403 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3404 if (tree->reg2 != tree->left->reg2)
3405 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
3406 } else if (tree->right->data.i < 64) {
3407 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg1, 4);
3408 x86_shift_reg_imm (s->code, X86_SHL, tree->reg2, (tree->right->data.i - 32));
3409 x86_alu_reg_reg (s->code, X86_XOR, tree->reg1, tree->reg1);
3410 } /* else unspecified result */
3413 lreg: SHL (lreg, reg) {
3416 if (tree->right->reg1 != X86_ECX)
3417 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
3419 x86_shld_reg (s->code, tree->left->reg2, tree->left->reg1);
3420 x86_shift_reg (s->code, X86_SHL, tree->left->reg1);
3421 x86_test_reg_imm (s->code, X86_ECX, 32);
3422 br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
3423 x86_mov_reg_reg (s->code, tree->left->reg2, tree->left->reg1, 4);
3424 x86_alu_reg_reg (s->code, X86_XOR, tree->left->reg1, tree->left->reg1);
3425 x86_patch (br [0], s->code);
3427 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3430 lreg: ADD (lreg, lreg) {
3431 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
3432 x86_alu_reg_reg (s->code, X86_ADC, tree->left->reg2, tree->right->reg2);
3434 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3437 lreg: ADD_OVF (lreg, lreg) {
3438 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
3439 x86_alu_reg_reg (s->code, X86_ADC, tree->left->reg2, tree->right->reg2);
3440 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
3442 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3445 lreg: ADD_OVF_UN (lreg, lreg) {
3446 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
3447 x86_alu_reg_reg (s->code, X86_ADC, tree->left->reg2, tree->right->reg2);
3448 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NC, FALSE, "OverflowException");
3450 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3453 lreg: SUB (lreg, lreg) {
3454 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
3455 x86_alu_reg_reg (s->code, X86_SBB, tree->left->reg2, tree->right->reg2);
3457 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3460 lreg: SUB_OVF (lreg, lreg) {
3461 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
3462 x86_alu_reg_reg (s->code, X86_SBB, tree->left->reg2, tree->right->reg2);
3463 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
3465 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3468 lreg: SUB_OVF_UN (lreg, lreg) {
3469 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
3470 x86_alu_reg_reg (s->code, X86_SBB, tree->left->reg2, tree->right->reg2);
3471 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NC, FALSE, "OverflowException");
3473 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3476 lreg: AND (lreg, lreg) {
3477 x86_alu_reg_reg (s->code, X86_AND, tree->left->reg1, tree->right->reg1);
3478 x86_alu_reg_reg (s->code, X86_AND, tree->left->reg2, tree->right->reg2);
3480 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3483 lreg: OR (lreg, lreg) {
3484 x86_alu_reg_reg (s->code, X86_OR, tree->left->reg1, tree->right->reg1);
3485 x86_alu_reg_reg (s->code, X86_OR, tree->left->reg2, tree->right->reg2);
3487 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3490 lreg: XOR (lreg, lreg) {
3491 x86_alu_reg_reg (s->code, X86_XOR, tree->left->reg1, tree->right->reg1);
3492 x86_alu_reg_reg (s->code, X86_XOR, tree->left->reg2, tree->right->reg2);
3494 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3498 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3500 x86_neg_reg (s->code, tree->reg1);
3501 x86_alu_reg_imm (s->code, X86_ADC, tree->reg2, 0);
3502 x86_neg_reg (s->code, tree->reg2);
3506 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3508 x86_not_reg (s->code, tree->reg1);
3509 x86_not_reg (s->code, tree->reg2);
3512 lreg: MUL (lreg, lreg) {
3513 if (mono_regset_reg_used (s->rs, X86_ECX))
3514 x86_push_reg (s->code, X86_ECX);
3516 x86_push_reg (s->code, tree->right->reg2);
3517 x86_push_reg (s->code, tree->right->reg1);
3518 x86_push_reg (s->code, tree->left->reg2);
3519 x86_push_reg (s->code, tree->left->reg1);
3520 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_llmult);
3521 x86_call_code (s->code, 0);
3522 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
3524 if (mono_regset_reg_used (s->rs, X86_ECX))
3525 x86_pop_reg (s->code, X86_ECX);
3527 mono_assert (tree->reg1 == X86_EAX &&
3528 tree->reg2 == X86_EDX);
3531 lreg: MUL_OVF (lreg, lreg) {
3532 if (mono_regset_reg_used (s->rs, X86_ECX))
3533 x86_push_reg (s->code, X86_ECX);
3535 x86_push_reg (s->code, tree->right->reg2);
3536 x86_push_reg (s->code, tree->right->reg1);
3537 x86_push_reg (s->code, tree->left->reg2);
3538 x86_push_reg (s->code, tree->left->reg1);
3539 /* pass a pointer to store the resulting exception -
3540 * ugly, but it works */
3541 x86_push_reg (s->code, X86_ESP);
3542 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_llmult_ovf);
3543 x86_call_code (s->code, 0);
3544 x86_mov_reg_membase (s->code, X86_ECX, X86_ESP, 4, 4);
3545 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
3546 x86_alu_reg_imm (s->code, X86_CMP, X86_ECX, 0);
3548 /* cond. emit exception */
3549 x86_branch8 (s->code, X86_CC_EQ, 7, FALSE);
3550 x86_push_reg (s->code, X86_ECX);
3551 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, arch_get_throw_exception ());
3552 x86_call_code (s->code, 0);
3554 if (mono_regset_reg_used (s->rs, X86_ECX))
3555 x86_pop_reg (s->code, X86_ECX);
3557 mono_assert (tree->reg1 == X86_EAX &&
3558 tree->reg2 == X86_EDX);
3561 lreg: MUL_OVF_UN (lreg, lreg) {
3562 if (mono_regset_reg_used (s->rs, X86_ECX))
3563 x86_push_reg (s->code, X86_ECX);
3565 x86_push_reg (s->code, tree->right->reg2);
3566 x86_push_reg (s->code, tree->right->reg1);
3567 x86_push_reg (s->code, tree->left->reg2);
3568 x86_push_reg (s->code, tree->left->reg1);
3569 /* pass a pointer to store the resulting exception -
3570 * ugly, but it works */
3571 x86_push_reg (s->code, X86_ESP);
3572 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_llmult_ovf_un);
3573 x86_call_code (s->code, 0);
3574 x86_mov_reg_membase (s->code, X86_ECX, X86_ESP, 4, 4);
3575 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
3576 x86_alu_reg_imm (s->code, X86_CMP, X86_ECX, 0);
3578 /* cond. emit exception */
3579 x86_branch8 (s->code, X86_CC_EQ, 7, FALSE);
3580 x86_push_reg (s->code, X86_ECX);
3581 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, arch_get_throw_exception ());
3582 x86_call_code (s->code, 0);
3584 if (mono_regset_reg_used (s->rs, X86_ECX))
3585 x86_pop_reg (s->code, X86_ECX);
3587 mono_assert (tree->reg1 == X86_EAX &&
3588 tree->reg2 == X86_EDX);
3591 lreg: DIV (lreg, lreg) {
3592 if (mono_regset_reg_used (s->rs, X86_ECX))
3593 x86_push_reg (s->code, X86_ECX);
3595 x86_push_reg (s->code, tree->right->reg2);
3596 x86_push_reg (s->code, tree->right->reg1);
3597 x86_push_reg (s->code, tree->left->reg2);
3598 x86_push_reg (s->code, tree->left->reg1);
3599 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_lldiv);
3600 x86_call_code (s->code, 0);
3601 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
3603 if (mono_regset_reg_used (s->rs, X86_ECX))
3604 x86_pop_reg (s->code, X86_ECX);
3606 mono_assert (tree->reg1 == X86_EAX &&
3607 tree->reg2 == X86_EDX);
3610 lreg: REM (lreg, lreg) {
3611 if (mono_regset_reg_used (s->rs, X86_ECX))
3612 x86_push_reg (s->code, X86_ECX);
3614 x86_push_reg (s->code, tree->right->reg2);
3615 x86_push_reg (s->code, tree->right->reg1);
3616 x86_push_reg (s->code, tree->left->reg2);
3617 x86_push_reg (s->code, tree->left->reg1);
3618 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_llrem);
3619 x86_call_code (s->code, 0);
3620 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
3622 if (mono_regset_reg_used (s->rs, X86_ECX))
3623 x86_pop_reg (s->code, X86_ECX);
3625 mono_assert (tree->reg1 == X86_EAX &&
3626 tree->reg2 == X86_EDX);
3629 lreg: DIV_UN (lreg, lreg) {
3630 if (mono_regset_reg_used (s->rs, X86_ECX))
3631 x86_push_reg (s->code, X86_ECX);
3633 x86_push_reg (s->code, tree->right->reg2);
3634 x86_push_reg (s->code, tree->right->reg1);
3635 x86_push_reg (s->code, tree->left->reg2);
3636 x86_push_reg (s->code, tree->left->reg1);
3637 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_lldiv_un);
3638 x86_call_code (s->code, 0);
3639 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
3641 if (mono_regset_reg_used (s->rs, X86_ECX))
3642 x86_pop_reg (s->code, X86_ECX);
3644 mono_assert (tree->reg1 == X86_EAX &&
3645 tree->reg2 == X86_EDX);
3648 lreg: REM_UN (lreg, lreg) {
3649 if (mono_regset_reg_used (s->rs, X86_ECX))
3650 x86_push_reg (s->code, X86_ECX);
3652 x86_push_reg (s->code, tree->right->reg2);
3653 x86_push_reg (s->code, tree->right->reg1);
3654 x86_push_reg (s->code, tree->left->reg2);
3655 x86_push_reg (s->code, tree->left->reg1);
3656 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_llrem_un);
3657 x86_call_code (s->code, 0);
3658 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
3660 if (mono_regset_reg_used (s->rs, X86_ECX))
3661 x86_pop_reg (s->code, X86_ECX);
3663 mono_assert (tree->reg1 == X86_EAX &&
3664 tree->reg2 == X86_EDX);
3667 lreg: CALL_I8 (this, reg) {
3669 int lreg = tree->left->reg1;
3670 int rreg = tree->right->reg1;
3672 if (lreg == treg || rreg == treg)
3674 if (lreg == treg || rreg == treg)
3676 if (lreg == treg || rreg == treg)
3677 mono_assert_not_reached ();
3679 if (tree->left->op != MB_TERM_NOP) {
3680 mono_assert (lreg >= 0);
3681 x86_push_reg (s->code, lreg);
3682 x86_alu_membase_imm (s->code, X86_CMP, lreg, 0, 0);
3685 if (tree->data.ci.vtype_num) {
3686 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
3687 x86_lea_membase (s->code, treg, X86_EBP, offset);
3688 x86_push_reg (s->code, treg);
3691 x86_call_reg (s->code, rreg);
3693 if (tree->data.ci.args_size)
3694 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
3696 PRINT_REG ("CALL_I8", tree->reg1);
3698 mono_assert (tree->reg1 == X86_EAX);
3699 mono_assert (tree->reg2 == X86_EDX);
3702 lreg: CALL_I8 (this, ADDR_G) {
3703 int lreg = tree->left->reg1;
3709 if (tree->left->op != MB_TERM_NOP) {
3710 mono_assert (lreg >= 0);
3711 x86_push_reg (s->code, lreg);
3712 x86_alu_membase_imm (s->code, X86_CMP, lreg, 0, 0);
3715 if (tree->data.ci.vtype_num) {
3716 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
3717 x86_lea_membase (s->code, treg, X86_EBP, offset);
3718 x86_push_reg (s->code, treg);
3721 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->right->data.p);
3722 x86_call_code (s->code, 0);
3724 if (tree->data.ci.args_size)
3725 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
3727 mono_assert (tree->reg1 == X86_EAX);
3728 mono_assert (tree->reg2 == X86_EDX);
3731 lreg: CALL_I8 (this, VFUNC_ADDR) {
3732 int lreg = tree->left->reg1;
3738 if (tree->left->op != MB_TERM_NOP) {
3739 mono_assert (lreg >= 0);
3740 x86_push_reg (s->code, lreg);
3743 if (tree->data.ci.vtype_num) {
3744 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
3745 x86_lea_membase (s->code, treg, X86_EBP, offset);
3746 x86_push_reg (s->code, treg);
3749 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
3750 x86_call_virtual (s->code, lreg,
3751 G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2));
3753 if (tree->data.ci.args_size)
3754 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
3756 PRINT_REG ("CALL0_I8(VIRTUAL)", tree->reg1);
3757 PRINT_REG ("CALL1_I8(VIRTUAL)", tree->reg2);
3759 mono_assert (tree->reg1 == X86_EAX);
3760 mono_assert (tree->reg2 == X86_EDX);
3763 lreg: CALL_I8 (this, INTF_ADDR) {
3764 int lreg = tree->left->reg1;
3770 if (tree->left->op != MB_TERM_NOP) {
3771 mono_assert (lreg >= 0);
3772 x86_push_reg (s->code, lreg);
3775 if (tree->data.ci.vtype_num) {
3776 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
3777 x86_lea_membase (s->code, treg, X86_EBP, offset);
3778 x86_push_reg (s->code, treg);
3781 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
3782 x86_mov_reg_membase (s->code, lreg, lreg,
3783 G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
3784 x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
3785 x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
3787 if (tree->data.ci.args_size)
3788 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
3790 PRINT_REG ("CALL_I8(INTERFACE)", tree->reg1);
3792 mono_assert (tree->reg1 == X86_EAX);
3793 mono_assert (tree->reg2 == X86_EDX);
3797 if (tree->left->reg1 != X86_EAX) {
3798 if (tree->left->reg2 != X86_EAX) {
3799 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
3800 if (tree->left->reg2 != X86_EDX)
3801 x86_mov_reg_reg (s->code, X86_EDX, tree->left->reg2, 4);
3803 x86_mov_reg_reg (s->code, X86_ECX, tree->left->reg2, 4);
3804 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
3805 x86_mov_reg_reg (s->code, X86_EDX, X86_ECX, 4);
3807 } else if (tree->left->reg2 != X86_EDX) {
3808 x86_mov_reg_reg (s->code, X86_EDX, tree->left->reg2, 4);
3811 if (!tree->last_instr) {
3812 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_EPILOG, NULL);
3813 x86_jump32 (s->code, 0);
3818 stmt: ARG_I8 (lreg) {
3819 x86_push_reg (s->code, tree->left->reg2);
3820 x86_push_reg (s->code, tree->left->reg1);
3823 reg: CSET (COMPARE (lreg, lreg)) {
3825 int lreg1, lreg2, rreg1, rreg2;
3827 lreg1 = tree->left->left->reg1;
3828 lreg2 = tree->left->left->reg2;
3829 rreg1 = tree->left->right->reg1;
3830 rreg2 = tree->left->right->reg2;
3833 if (tree->data.i == CEE_CEQ) {
3834 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3835 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3836 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3837 x86_patch (br [0], s->code);
3838 x86_set_reg (s->code, X86_CC_EQ, tree->reg1, FALSE);
3839 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE);
3843 switch (tree->data.i) {
3845 x86_alu_reg_reg (s->code, X86_CMP, rreg2, lreg2);
3846 br [0] = s->code; x86_branch8 (s->code, X86_CC_GT, 0, TRUE);
3847 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3848 x86_alu_reg_reg (s->code, X86_CMP, rreg1, lreg1);
3849 br [2] = s->code; x86_branch8 (s->code, X86_CC_GE, 0, FALSE);
3852 x86_alu_reg_reg (s->code, X86_CMP, rreg2, lreg2);
3853 br [0] = s->code; x86_branch8 (s->code, X86_CC_GT, 0, FALSE);
3854 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3855 x86_alu_reg_reg (s->code, X86_CMP, rreg1, lreg1);
3856 br [2] = s->code; x86_branch8 (s->code, X86_CC_GE, 0, FALSE);
3859 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3860 br [0] = s->code; x86_branch8 (s->code, X86_CC_GT, 0, TRUE);
3861 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3862 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3863 br [2] = s->code; x86_branch8 (s->code, X86_CC_GE, 0, FALSE);
3866 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3867 br [0] = s->code; x86_branch8 (s->code, X86_CC_GT, 0, FALSE);
3868 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3869 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3870 br [2] = s->code; x86_branch8 (s->code, X86_CC_GE, 0, FALSE);
3873 g_assert_not_reached ();
3876 /* set result to 1 */
3877 x86_patch (br [1], s->code);
3878 x86_mov_reg_imm (s->code, tree->reg1, 1);
3879 br [3] = s->code; x86_jump8 (s->code, 0);
3881 /* set result to 0 */
3882 x86_patch (br [0], s->code);
3883 x86_patch (br [2], s->code);
3884 x86_mov_reg_imm (s->code, tree->reg1, 0);
3886 x86_patch (br [3], s->code);
3889 stmt: CBRANCH (COMPARE (lreg, lreg)) {
3891 int lreg1, lreg2, rreg1, rreg2;
3893 lreg1 = tree->left->left->reg1;
3894 lreg2 = tree->left->left->reg2;
3895 rreg1 = tree->left->right->reg1;
3896 rreg2 = tree->left->right->reg2;
3898 switch (tree->data.bi.cond) {
3900 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3901 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3902 x86_branch32 (s->code, X86_CC_LT, 0, TRUE);
3903 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3904 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3905 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3906 x86_branch32 (s->code, X86_CC_LT, 0, FALSE);
3907 x86_patch (br [0], s->code);
3910 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3911 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3912 x86_branch32 (s->code, X86_CC_LT, 0, FALSE);
3913 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3914 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3915 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3916 x86_branch32 (s->code, X86_CC_LT, 0, FALSE);
3917 x86_patch (br [0], s->code);
3920 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3921 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3922 x86_branch32 (s->code, X86_CC_GT, 0, TRUE);
3923 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3924 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3925 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3926 x86_branch32 (s->code, X86_CC_GT, 0, FALSE);
3927 x86_patch (br [0], s->code);
3930 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3931 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3932 x86_branch32 (s->code, X86_CC_GT, 0, FALSE);
3933 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3934 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3935 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3936 x86_branch32 (s->code, X86_CC_GT, 0, FALSE);
3937 x86_patch (br [0], s->code);
3940 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3941 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3942 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3943 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3944 x86_branch32 (s->code, X86_CC_EQ, 0, TRUE);
3945 x86_patch (br [0], s->code);
3948 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3949 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3950 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
3951 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3952 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3953 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
3956 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3957 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3958 x86_branch32 (s->code, X86_CC_GT, 0, TRUE);
3959 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3960 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3961 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3962 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3963 x86_branch32 (s->code, X86_CC_GE, 0, FALSE);
3964 x86_patch (br [0], s->code);
3967 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3968 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3969 x86_branch32 (s->code, X86_CC_GT, 0, FALSE);
3970 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3971 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3972 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3973 x86_branch32 (s->code, X86_CC_GE, 0, FALSE);
3974 x86_patch (br [0], s->code);
3977 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3978 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3979 x86_branch32 (s->code, X86_CC_LT, 0, TRUE);
3980 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3981 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3982 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3983 x86_branch32 (s->code, X86_CC_LE, 0, FALSE);
3984 x86_patch (br [0], s->code);
3987 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3988 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3989 x86_branch32 (s->code, X86_CC_LT, 0, FALSE);
3990 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3991 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3992 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3993 x86_branch32 (s->code, X86_CC_LE, 0, FALSE);
3994 x86_patch (br [0], s->code);
3997 g_assert_not_reached ();
4004 #stmt: STLOC (CONV_I4 (freg)) {
4006 # x86_fist_pop_membase (s->code, X86_EBP, tree->data.i, FALSE);
4009 reg: CONV_I1 (freg) {
4010 if (mono_use_fast_iconv) {
4011 mono_emit_fast_iconv(s, tree);
4012 x86_widen_reg (s->code, tree->reg1, tree->reg1, TRUE, FALSE);
4014 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4015 x86_fnstcw_membase(s->code, X86_ESP, 0);
4016 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4017 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4018 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4019 x86_fldcw_membase (s->code, X86_ESP, 2);
4020 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
4021 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
4022 x86_pop_reg (s->code, tree->reg1);
4023 x86_widen_reg (s->code, tree->reg1, tree->reg1, TRUE, FALSE);
4024 x86_fldcw_membase (s->code, X86_ESP, 0);
4025 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4029 reg: CONV_U1 (freg) {
4030 if (mono_use_fast_iconv) {
4031 mono_emit_fast_iconv(s, tree);
4032 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE);
4034 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4035 x86_fnstcw_membase(s->code, X86_ESP, 0);
4036 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4037 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4038 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4039 x86_fldcw_membase (s->code, X86_ESP, 2);
4040 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
4041 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
4042 x86_pop_reg (s->code, tree->reg1);
4043 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE);
4044 x86_fldcw_membase (s->code, X86_ESP, 0);
4045 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4049 reg: CONV_I2 (freg) {
4050 if (mono_use_fast_iconv) {
4051 mono_emit_fast_iconv(s, tree);
4052 x86_widen_reg (s->code, tree->reg1, tree->reg1, TRUE, TRUE);
4054 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4055 x86_fnstcw_membase(s->code, X86_ESP, 0);
4056 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4057 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4058 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4059 x86_fldcw_membase (s->code, X86_ESP, 2);
4060 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
4061 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
4062 x86_pop_reg (s->code, tree->reg1);
4063 x86_widen_reg (s->code, tree->reg1, tree->reg1, TRUE, TRUE);
4064 x86_fldcw_membase (s->code, X86_ESP, 0);
4065 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4069 reg: CONV_U2 (freg) {
4070 if (mono_use_fast_iconv) {
4071 mono_emit_fast_iconv(s, tree);
4072 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, TRUE);
4074 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4075 x86_fnstcw_membase(s->code, X86_ESP, 0);
4076 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4077 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4078 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4079 x86_fldcw_membase (s->code, X86_ESP, 2);
4080 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
4081 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
4082 x86_pop_reg (s->code, tree->reg1);
4083 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, TRUE);
4084 x86_fldcw_membase (s->code, X86_ESP, 0);
4085 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4089 reg: CONV_I4 (freg) {
4090 if (mono_use_fast_iconv) {
4091 mono_emit_fast_iconv(s, tree);
4093 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4094 x86_fnstcw_membase(s->code, X86_ESP, 0);
4095 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4096 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4097 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4098 x86_fldcw_membase (s->code, X86_ESP, 2);
4099 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
4100 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
4101 x86_pop_reg (s->code, tree->reg1);
4102 x86_fldcw_membase (s->code, X86_ESP, 0);
4103 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4107 reg: CONV_U4 (freg) {
4108 if (mono_use_fast_iconv) {
4109 mono_emit_fast_iconv(s, tree);
4111 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4112 x86_fnstcw_membase(s->code, X86_ESP, 0);
4113 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4114 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4115 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4116 x86_fldcw_membase (s->code, X86_ESP, 2);
4117 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
4118 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
4119 x86_pop_reg (s->code, tree->reg1);
4120 x86_fldcw_membase (s->code, X86_ESP, 0);
4121 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4125 lreg: CONV_I8 (freg) {
4126 if (mono_use_fast_iconv) {
4127 mono_emit_fast_iconv_i8(s, tree);
4129 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4130 x86_fnstcw_membase(s->code, X86_ESP, 0);
4131 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4132 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4133 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4134 x86_fldcw_membase (s->code, X86_ESP, 2);
4135 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8);
4136 x86_fist_pop_membase (s->code, X86_ESP, 0, TRUE);
4137 x86_pop_reg (s->code, tree->reg1);
4138 x86_pop_reg (s->code, tree->reg2);
4139 x86_fldcw_membase (s->code, X86_ESP, 0);
4140 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4144 lreg: CONV_U8 (freg) {
4145 if (mono_use_fast_iconv) {
4146 mono_emit_fast_iconv_i8(s, tree);
4148 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4149 x86_fnstcw_membase(s->code, X86_ESP, 0);
4150 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4151 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4152 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4153 x86_fldcw_membase (s->code, X86_ESP, 2);
4154 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8);
4155 x86_fist_pop_membase (s->code, X86_ESP, 0, TRUE);
4156 x86_pop_reg (s->code, tree->reg1);
4157 x86_pop_reg (s->code, tree->reg2);
4158 x86_fldcw_membase (s->code, X86_ESP, 0);
4159 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4163 reg: CSET (COMPARE (freg, freg)) {
4164 int treg = tree->reg1;
4166 if (treg != X86_EAX)
4167 x86_push_reg (s->code, X86_EAX);
4169 x86_fcompp (s->code);
4170 x86_fnstsw (s->code);
4171 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500);
4173 switch (tree->data.i) {
4175 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x4000);
4176 x86_set_reg (s->code, X86_CC_EQ, treg, TRUE);
4177 x86_widen_reg (s->code, treg, treg, FALSE, FALSE);
4180 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4181 x86_set_reg (s->code, X86_CC_EQ, treg, TRUE);
4182 x86_widen_reg (s->code, treg, treg, FALSE, FALSE);
4185 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4186 x86_set_reg (s->code, X86_CC_EQ, treg, TRUE);
4187 x86_widen_reg (s->code, treg, treg, FALSE, FALSE);
4190 x86_set_reg (s->code, X86_CC_EQ, treg, TRUE);
4191 x86_widen_reg (s->code, treg, treg, FALSE, FALSE);
4194 x86_set_reg (s->code, X86_CC_EQ, tree->reg1, TRUE);
4195 x86_widen_reg (s->code, treg, treg, FALSE, FALSE);
4198 g_assert_not_reached ();
4201 if (treg != X86_EAX)
4202 x86_pop_reg (s->code, X86_EAX);
4205 freg: CONV_R8 (freg) {
4209 freg: CONV_R4 (freg) {
4210 /* fixme: nothing to do ??*/
4213 freg: CONV_R8 (LDIND_I4 (ADDR_G)) {
4214 x86_fild (s->code, tree->left->left->data.p, FALSE);
4217 freg: CONV_R4 (reg) {
4218 x86_push_reg (s->code, tree->left->reg1);
4219 x86_fild_membase (s->code, X86_ESP, 0, FALSE);
4220 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4223 freg: CONV_R8 (reg) {
4224 x86_push_reg (s->code, tree->left->reg1);
4225 x86_fild_membase (s->code, X86_ESP, 0, FALSE);
4226 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4229 freg: CONV_R_UN (reg) {
4230 x86_push_imm (s->code, 0);
4231 x86_push_reg (s->code, tree->left->reg1);
4232 x86_fild_membase (s->code, X86_ESP, 0, TRUE);
4233 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
4236 freg: CONV_R_UN (lreg) {
4237 static guint8 mn[] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x3f, 0x40 };
4240 /* load 64bit integer to FP stack */
4241 x86_push_imm (s->code, 0);
4242 x86_push_reg (s->code, tree->left->reg2);
4243 x86_push_reg (s->code, tree->left->reg1);
4244 x86_fild_membase (s->code, X86_ESP, 0, TRUE);
4245 /* store as 80bit FP value */
4246 x86_fst80_membase (s->code, X86_ESP, 0);
4248 /* test if lreg is negative */
4249 x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1);
4250 br [0] = s->code; x86_branch8 (s->code, X86_CC_GEZ, 0, TRUE);
4252 /* add correction constant mn */
4253 x86_fld80_mem (s->code, mn);
4254 x86_fld80_membase (s->code, X86_ESP, 0);
4255 x86_fp_op_reg (s->code, X86_FADD, 1, TRUE);
4256 x86_fst80_membase (s->code, X86_ESP, 0);
4258 x86_patch (br [0], s->code);
4260 x86_fld80_membase (s->code, X86_ESP, 0);
4261 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
4264 freg: CONV_R4 (lreg) {
4265 x86_push_reg (s->code, tree->left->reg2);
4266 x86_push_reg (s->code, tree->left->reg1);
4267 x86_fild_membase (s->code, X86_ESP, 0, TRUE);
4268 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
4271 freg: CONV_R8 (lreg) {
4272 x86_push_reg (s->code, tree->left->reg2);
4273 x86_push_reg (s->code, tree->left->reg1);
4274 x86_fild_membase (s->code, X86_ESP, 0, TRUE);
4275 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
4279 float f = *(float *)tree->data.p;
4286 x86_fld (s->code, tree->data.p, FALSE);
4290 double d = *(double *)tree->data.p;
4297 x86_fld (s->code, tree->data.p, TRUE);
4300 freg: LDIND_R4 (addr) {
4302 switch (tree->left->data.ainfo.amode) {
4305 x86_fld (s->code, tree->left->data.ainfo.offset, FALSE);
4309 x86_fld_membase (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, FALSE);
4312 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, X86_NOBASEREG,
4313 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4314 tree->left->data.ainfo.shift);
4315 x86_fld_membase (s->code, tree->left->data.ainfo.indexreg, 0, FALSE);
4318 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.basereg,
4319 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4320 tree->left->data.ainfo.shift);
4321 x86_fld_membase (s->code, tree->left->data.ainfo.indexreg, 0, FALSE);
4326 freg: LDIND_R8 (addr) {
4328 switch (tree->left->data.ainfo.amode) {
4331 x86_fld (s->code, tree->left->data.ainfo.offset, TRUE);
4335 x86_fld_membase (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, TRUE);
4338 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, X86_NOBASEREG,
4339 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4340 tree->left->data.ainfo.shift);
4341 x86_fld_membase (s->code, tree->left->data.ainfo.indexreg, 0, TRUE);
4344 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.basereg,
4345 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4346 tree->left->data.ainfo.shift);
4347 x86_fld_membase (s->code, tree->left->data.ainfo.indexreg, 0, TRUE);
4353 freg: ADD (freg, freg) {
4354 x86_fp_op_reg (s->code, X86_FADD, 1, TRUE);
4357 freg: SUB (freg, freg) {
4358 x86_fp_op_reg (s->code, X86_FSUB, 1, TRUE);
4361 freg: MUL (freg, freg) {
4362 x86_fp_op_reg (s->code, X86_FMUL, 1, TRUE);
4365 freg: DIV (freg, freg) {
4366 x86_fp_op_reg (s->code, X86_FDIV, 1, TRUE);
4369 freg: CKFINITE (freg) {
4370 x86_push_reg (s->code, X86_EAX);
4372 x86_fnstsw (s->code);
4373 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4100);
4374 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4375 x86_pop_reg (s->code, X86_EAX);
4376 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NE, FALSE, "ArithmeticException");
4379 freg: REM (freg, freg) {
4382 /* we need to exchange ST(0) with ST(1) */
4383 x86_fxch (s->code, 1);
4385 /* this requires a loop, because fprem1 somtimes
4386 * returns a partial remainder */
4388 x86_fprem1 (s->code);
4389 x86_fnstsw (s->code);
4390 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x0400);
4392 x86_branch8 (s->code, X86_CC_NE, l1 - l2, FALSE);
4395 x86_fstp (s->code, 1);
4404 stmt: STIND_R4 (addr, freg) {
4406 switch (tree->left->data.ainfo.amode) {
4409 x86_fst (s->code, tree->left->data.ainfo.offset, FALSE, TRUE);
4413 x86_fst_membase (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
4417 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, X86_NOBASEREG,
4418 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4419 tree->left->data.ainfo.shift);
4420 x86_fst_membase (s->code, tree->left->data.ainfo.indexreg, 0, FALSE, TRUE);
4423 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.basereg,
4424 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4425 tree->left->data.ainfo.shift);
4426 x86_fst_membase (s->code, tree->left->data.ainfo.indexreg, 0, FALSE, TRUE);
4431 stmt: STIND_R8 (addr, freg) {
4433 switch (tree->left->data.ainfo.amode) {
4436 x86_fst (s->code, tree->left->data.ainfo.offset, TRUE, TRUE);
4440 x86_fst_membase (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
4444 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, X86_NOBASEREG,
4445 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4446 tree->left->data.ainfo.shift);
4447 x86_fst_membase (s->code, tree->left->data.ainfo.indexreg, 0, TRUE, TRUE);
4450 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.basereg,
4451 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4452 tree->left->data.ainfo.shift);
4453 x86_fst_membase (s->code, tree->left->data.ainfo.indexreg, 0, TRUE, TRUE);
4458 stmt: REMOTE_STIND_R4 (reg, freg) {
4461 int lreg = tree->left->reg1;
4467 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
4468 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
4469 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
4471 /* this is a transparent proxy - remote the call */
4473 /* save value to stack */
4474 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4475 x86_fst_membase (s->code, X86_ESP, 0, FALSE, TRUE);
4477 x86_push_reg (s->code, X86_ESP);
4478 x86_push_imm (s->code, tree->data.fi.field);
4479 x86_push_imm (s->code, tree->data.fi.klass);
4480 x86_push_reg (s->code, lreg);
4481 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
4482 x86_call_code (s->code, 0);
4483 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
4485 br [1] = s->code; x86_jump8 (s->code, 0);
4487 x86_patch (br [0], s->code);
4488 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
4489 tree->data.fi.field->offset;
4490 x86_fst_membase (s->code, lreg, offset, FALSE, TRUE);
4492 x86_patch (br [1], s->code);
4495 stmt: REMOTE_STIND_R8 (reg, freg) {
4498 int lreg = tree->left->reg1;
4504 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
4505 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
4506 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
4508 /* this is a transparent proxy - remote the call */
4510 /* save value to stack */
4511 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8);
4512 x86_fst_membase (s->code, X86_ESP, 0, TRUE, TRUE);
4514 x86_push_reg (s->code, X86_ESP);
4515 x86_push_imm (s->code, tree->data.fi.field);
4516 x86_push_imm (s->code, tree->data.fi.klass);
4517 x86_push_reg (s->code, lreg);
4518 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
4519 x86_call_code (s->code, 0);
4520 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 24);
4522 br [1] = s->code; x86_jump8 (s->code, 0);
4524 x86_patch (br [0], s->code);
4525 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
4526 tree->data.fi.field->offset;
4527 x86_fst_membase (s->code, lreg, offset, TRUE, TRUE);
4529 x86_patch (br [1], s->code);
4532 stmt: ARG_R4 (freg) {
4533 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4534 x86_fst_membase (s->code, X86_ESP, 0, FALSE, TRUE);
4537 stmt: ARG_R8 (freg) {
4538 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8);
4539 x86_fst_membase (s->code, X86_ESP, 0, TRUE, TRUE);
4542 # fixme: we need to implement unordered and ordered compares
4544 stmt: CBRANCH (COMPARE (freg, freg)) {
4546 x86_fcompp (s->code);
4547 x86_fnstsw (s->code);
4548 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500);
4550 switch (tree->data.bi.cond) {
4552 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4553 x86_branch32 (s->code, X86_CC_EQ, 0, FALSE);
4556 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4557 x86_branch32 (s->code, X86_CC_EQ, 0, FALSE);
4560 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4561 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4562 x86_branch32 (s->code, X86_CC_EQ, 0, FALSE);
4565 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4566 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4567 x86_branch32 (s->code, X86_CC_EQ, 0, FALSE);
4570 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x4000);
4571 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4572 x86_branch32 (s->code, X86_CC_EQ, 0, TRUE);
4575 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x4000);
4576 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4577 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
4580 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4581 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
4584 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4585 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
4588 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4589 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4590 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
4593 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4594 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4595 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
4598 g_assert_not_reached ();
4602 freg: CALL_R8 (this, reg) {
4604 int lreg = tree->left->reg1;
4605 int rreg = tree->right->reg1;
4607 if (lreg == treg || rreg == treg)
4609 if (lreg == treg || rreg == treg)
4611 if (lreg == treg || rreg == treg)
4612 mono_assert_not_reached ();
4614 if (tree->left->op != MB_TERM_NOP) {
4615 mono_assert (lreg >= 0);
4616 x86_push_reg (s->code, lreg);
4617 x86_alu_membase_imm (s->code, X86_CMP, lreg, 0, 0);
4620 if (tree->data.ci.vtype_num) {
4621 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
4622 x86_lea_membase (s->code, treg, X86_EBP, offset);
4623 x86_push_reg (s->code, treg);
4626 x86_call_reg (s->code, rreg);
4628 if (tree->data.ci.args_size)
4629 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
4632 freg: CALL_R8 (this, ADDR_G) {
4633 int lreg = tree->left->reg1;
4639 if (tree->left->op != MB_TERM_NOP) {
4640 mono_assert (lreg >= 0);
4641 x86_push_reg (s->code, lreg);
4642 x86_alu_membase_imm (s->code, X86_CMP, lreg, 0, 0);
4645 if (tree->data.ci.vtype_num) {
4646 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
4647 x86_lea_membase (s->code, treg, X86_EBP, offset);
4648 x86_push_reg (s->code, treg);
4651 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->right->data.p);
4652 x86_call_code (s->code, 0);
4654 if (tree->data.ci.args_size)
4655 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
4658 freg: CALL_R8 (this, INTF_ADDR) {
4659 int lreg = tree->left->reg1;
4665 if (tree->left->op != MB_TERM_NOP) {
4666 mono_assert (lreg >= 0);
4667 x86_push_reg (s->code, lreg);
4670 if (tree->data.ci.vtype_num) {
4671 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
4672 x86_lea_membase (s->code, treg, X86_EBP, offset);
4673 x86_push_reg (s->code, treg);
4676 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
4677 x86_mov_reg_membase (s->code, lreg, lreg,
4678 G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
4679 x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
4680 x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
4682 if (tree->data.ci.args_size)
4683 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
4686 freg: CALL_R8 (this, VFUNC_ADDR) {
4687 int lreg = tree->left->reg1;
4693 if (tree->left->op != MB_TERM_NOP) {
4694 mono_assert (lreg >= 0);
4695 x86_push_reg (s->code, lreg);
4698 if (tree->data.ci.vtype_num) {
4699 int offset = VARINFO (s, tree->data.ci.vtype_num).offset;
4700 x86_lea_membase (s->code, treg, X86_EBP, offset);
4701 x86_push_reg (s->code, treg);
4704 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
4705 x86_call_virtual (s->code, lreg,
4706 G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2));
4708 if (tree->data.ci.args_size)
4709 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->data.ci.args_size);
4713 if (!tree->last_instr) {
4714 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_EPILOG, NULL);
4715 x86_jump32 (s->code, 0);
4728 x86_fsqrt (s->code);
4731 # support for value types
4733 reg: LDIND_OBJ (reg) {
4734 if (tree->left->reg1 != tree->reg1)
4735 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
4738 stmt: STIND_OBJ (reg, reg) {
4739 mono_assert (tree->data.i > 0);
4741 x86_push_imm (s->code, tree->data.i);
4742 x86_push_reg (s->code, tree->right->reg1);
4743 x86_push_reg (s->code, tree->left->reg1);
4744 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, MEMCOPY);
4745 x86_call_code (s->code, 0);
4746 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
4749 stmt: REMOTE_STIND_OBJ (reg, reg) {
4752 int lreg = tree->left->reg1;
4753 int rreg = tree->right->reg1;
4762 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
4763 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
4764 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
4766 /* this is a transparent proxy - remote the call */
4768 x86_push_reg (s->code, rreg);
4769 x86_push_imm (s->code, tree->data.fi.field);
4770 x86_push_imm (s->code, tree->data.fi.klass);
4771 x86_push_reg (s->code, lreg);
4772 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
4773 x86_call_code (s->code, 0);
4774 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
4776 br [1] = s->code; x86_jump8 (s->code, 0);
4778 x86_patch (br [0], s->code);
4780 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
4781 tree->data.fi.field->offset;
4783 size = mono_class_value_size (tree->data.fi.field->type->data.klass, NULL);
4784 x86_push_imm (s->code, size);
4785 x86_push_reg (s->code, tree->right->reg1);
4786 x86_alu_reg_imm (s->code, X86_ADD, tree->left->reg1, offset);
4787 x86_push_reg (s->code, tree->left->reg1);
4788 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, MEMCOPY);
4789 x86_call_code (s->code, 0);
4790 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
4792 x86_patch (br [1], s->code);
4795 stmt: ARG_OBJ (CONST_I4) {
4796 x86_push_imm (s->code, tree->left->data.i);
4799 stmt: ARG_OBJ (reg) {
4800 int size = tree->data.i;
4803 mono_assert (size > 0);
4807 /* reserve space for the argument */
4808 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, sa);
4810 x86_push_reg (s->code, X86_EAX);
4811 x86_push_reg (s->code, X86_EDX);
4812 x86_push_reg (s->code, X86_ECX);
4814 x86_push_imm (s->code, size);
4815 x86_push_reg (s->code, tree->left->reg1);
4816 x86_lea_membase (s->code, X86_EAX, X86_ESP, 5*4);
4817 x86_push_reg (s->code, X86_EAX);
4819 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, MEMCOPY);
4820 x86_call_code (s->code, 0);
4821 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
4823 x86_pop_reg (s->code, X86_ECX);
4824 x86_pop_reg (s->code, X86_EDX);
4825 x86_pop_reg (s->code, X86_EAX);
4828 stmt: RET_OBJ (reg) {
4829 int size = tree->data.i;
4831 x86_push_imm (s->code, size);
4832 x86_push_reg (s->code, tree->left->reg1);
4833 x86_push_membase (s->code, X86_EBP, 8);
4836 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, MEMCOPY);
4837 x86_call_code (s->code, 0);
4839 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
4841 if (!tree->last_instr) {
4842 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_EPILOG, NULL);
4843 x86_jump32 (s->code, 0);
4852 mono_llmult (gint64 a, gint64 b)
4858 mono_llmult_ovf_un (gpointer *exc, guint32 al, guint32 ah, guint32 bl, guint32 bh)
4862 // fixme: this is incredible slow
4865 goto raise_exception;
4867 res = (guint64)al * (guint64)bl;
4869 t1 = (guint64)ah * (guint64)bl + (guint64)al * (guint64)bh;
4871 if (t1 > 0xffffffff)
4872 goto raise_exception;
4874 res += ((guint64)t1) << 32;
4880 *exc = mono_get_exception_overflow ();
4885 mono_llmult_ovf (gpointer *exc, guint32 al, gint32 ah, guint32 bl, gint32 bh)
4889 // fixme: check for overflow
4891 res = (gint64)al * (gint64)bl;
4893 t1 = (gint64)ah * bl + al * (gint64)bh;
4895 res += ((gint64)t1) << 32;
4901 *exc = mono_get_exception_overflow ();
4907 mono_lldiv (gint64 a, gint64 b)
4913 mono_llrem (gint64 a, gint64 b)
4919 mono_lldiv_un (guint64 a, guint64 b)
4925 mono_llrem_un (guint64 a, guint64 b)
4931 mono_array_new_wrapper (MonoClass *eclass, guint32 n)
4933 MonoDomain *domain = mono_domain_get ();
4935 return mono_array_new (domain, eclass, n);
4939 mono_object_new_wrapper (MonoClass *klass)
4941 MonoDomain *domain = mono_domain_get ();
4943 return mono_object_new (domain, klass);
4947 mono_ldstr_wrapper (MonoImage *image, guint32 ind)
4949 MonoDomain *domain = mono_domain_get ();
4951 return mono_ldstr (domain, image, ind);
4955 mono_ldsflda (MonoClass *klass, int offset)
4957 MonoDomain *domain = mono_domain_get ();
4961 vt = mono_class_vtable (domain, klass);
4962 addr = (char*)(vt->data) + offset;
4968 debug_memcopy (void *dest, const void *src, size_t n)
4972 printf ("MEMCPY(%p to %p [%d]) ", src, dest, n);
4974 for (i = 0; i < l; i++)
4975 printf ("%02x ", *((guint8 *)src + i));
4978 return memcpy (dest, src, n);
4981 void mono_emit_fast_iconv (MBCGEN_TYPE* s, MBTREE_TYPE* tree)
4984 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 12);
4985 x86_fist_membase (s->code, X86_ESP, 8, TRUE); // rounded value
4986 x86_fst_membase (s->code, X86_ESP, 0, FALSE, FALSE); // float value
4987 x86_fp_int_op_membase (s->code, X86_FSUB, X86_ESP, 8, TRUE);
4988 x86_fst_membase (s->code, X86_ESP, 4, FALSE, TRUE); // diff
4990 x86_pop_reg (s->code, tree->reg1); // float value
4991 x86_test_reg_reg (s->code, tree->reg1, tree->reg1);
4992 br[0] = s->code; x86_branch8 (s->code, X86_CC_S, 0, TRUE);
4994 x86_pop_reg (s->code, tree->reg1); // diff
4995 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->reg1);
4996 x86_pop_reg (s->code, tree->reg1); // rounded value
4997 x86_alu_reg_imm (s->code, X86_SBB, tree->reg1, 0);
4998 br[1] = s->code; x86_jump8 (s->code, 0);
5001 x86_patch (br[0], s->code);
5003 x86_pop_reg (s->code, tree->reg1); // diff
5004 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->reg1);
5005 x86_pop_reg (s->code, tree->reg1); // rounded value
5006 br[2] = s->code; x86_branch8 (s->code, X86_CC_Z, 0, FALSE);
5007 x86_alu_reg_imm (s->code, X86_SBB, tree->reg1, -1);
5008 x86_patch (br[1], s->code);
5009 x86_patch (br[2], s->code);
5012 void mono_emit_fast_iconv_i8 (MBCGEN_TYPE* s, MBTREE_TYPE* tree)
5015 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 16);
5016 x86_fld_reg (s->code, 0);
5017 x86_fist_pop_membase (s->code, X86_ESP, 8, TRUE); // rounded value (qword)
5018 x86_fst_membase (s->code, X86_ESP, 0, FALSE, FALSE); // float value
5019 x86_fild_membase (s->code, X86_ESP, 8, TRUE);
5020 x86_fp_op_reg (s->code, X86_FSUB, 1, TRUE); // diff
5021 x86_fst_membase (s->code, X86_ESP, 4, FALSE, TRUE); // diff
5023 x86_pop_reg (s->code, tree->reg1); // float value
5024 x86_test_reg_reg (s->code, tree->reg1, tree->reg1);
5025 br[0] = s->code; x86_branch8 (s->code, X86_CC_S, 0, TRUE);
5027 x86_pop_reg (s->code, tree->reg1); // diff
5028 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->reg1);
5029 x86_pop_reg (s->code, tree->reg1); // rounded value
5030 x86_pop_reg (s->code, tree->reg2);
5031 x86_alu_reg_imm (s->code, X86_SBB, tree->reg1, 0);
5032 x86_alu_reg_imm (s->code, X86_SBB, tree->reg2, 0);
5033 br[1] = s->code; x86_jump8 (s->code, 0);
5036 x86_patch (br[0], s->code);
5038 x86_pop_reg (s->code, tree->reg1); // diff
5039 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->reg1);
5040 x86_pop_reg (s->code, tree->reg1); // rounded value
5041 x86_pop_reg (s->code, tree->reg2);
5042 br[2] = s->code; x86_branch8 (s->code, X86_CC_Z, 0, FALSE);
5043 x86_alu_reg_imm (s->code, X86_SBB, tree->reg1, -1);
5044 x86_alu_reg_imm (s->code, X86_SBB, tree->reg2, -1);
5045 x86_patch (br[1], s->code);
5046 x86_patch (br[2], s->code);
5050 mono_ldvirtftn (MonoObject *this, int slot)
5055 gboolean is_proxy = FALSE;
5058 if ((class = this->vtable->klass) == mono_defaults.transparent_proxy_class) {
5059 class = ((MonoTransparentProxy *)this)->klass;
5064 g_assert (slot <= class->vtable_size);
5066 m = class->vtable [slot];
5069 return mono_jit_create_remoting_trampoline (m);
5071 EnterCriticalSection (metadata_section);
5072 addr = mono_compile_method (m);
5073 LeaveCriticalSection (metadata_section);
5079 mono_ldintftn (MonoObject *this, int slot)
5084 gboolean is_proxy = FALSE;
5087 if ((class = this->vtable->klass) == mono_defaults.transparent_proxy_class) {
5088 class = ((MonoTransparentProxy *)this)->klass;
5092 g_assert (slot < class->interface_count);
5094 slot = class->interface_offsets [slot];
5096 m = class->vtable [slot];
5099 return mono_jit_create_remoting_trampoline (m);
5101 EnterCriticalSection (metadata_section);
5102 addr = mono_compile_method (m);
5103 LeaveCriticalSection (metadata_section);
5108 gpointer mono_ldftn (MonoMethod *method)
5112 EnterCriticalSection (metadata_section);
5113 addr = mono_compile_method (method);
5114 LeaveCriticalSection (metadata_section);