2 * x86.brg: X86 code generator
5 * Dietmar Maurer (dietmar@ximian.com)
7 * (C) 2001 Ximian, Inc.
16 #ifndef PLATFORM_WIN32
18 #include <sys/syscall.h>
21 #include <mono/metadata/blob.h>
22 #include <mono/metadata/metadata.h>
23 #include <mono/metadata/loader.h>
24 #include <mono/metadata/object.h>
25 #include <mono/metadata/tabledefs.h>
26 #include <mono/metadata/appdomain.h>
27 #include <mono/metadata/marshal.h>
28 #include <mono/metadata/threads.h>
29 #include <mono/arch/x86/x86-codegen.h>
35 * Pull the list of opcodes
37 #define OPDEF(a,b,c,d,e,f,g,h,i,j) \
41 #include "mono/cil/opcode.def"
46 /* alignment of activation frames */
47 #define MONO_FRAME_ALIGNMENT 4
49 void print_lmf (void);
51 #define MBTREE_TYPE MBTree
52 #define MBCGEN_TYPE MonoFlowGraph
53 #define MBCOST_DATA MonoFlowGraph
54 #define MBALLOC_STATE mono_mempool_alloc (data->mp, sizeof (MBState))
57 AMImmediate = 0, // ptr
59 AMIndex = 2, // V[REG*X]
60 AMBaseIndex = 3, // V[REG*X][REG]
73 unsigned last_instr:1;
96 MonoClassField *field;
100 MonoJitCallInfo call_info;
101 MonoJitArgumentInfo arg_info;
105 gint64 mono_llmult (gint64 a, gint64 b);
106 guint64 mono_llmult_ovf (gpointer *exc, guint32 al, gint32 ah, guint32 bl, gint32 bh);
107 guint64 mono_llmult_ovf_un (gpointer *exc, guint32 al, guint32 ah, guint32 bl, guint32 bh);
108 gint64 mono_lldiv (gint64 a, gint64 b);
109 gint64 mono_llrem (gint64 a, gint64 b);
110 guint64 mono_lldiv_un (guint64 a, guint64 b);
111 guint64 mono_llrem_un (guint64 a, guint64 b);
112 gpointer mono_ldsflda (MonoClass *klass, int offset);
114 gpointer mono_ldvirtftn (MonoObject *this, int slot);
115 gpointer mono_ldintftn (MonoObject *this, int slot);
116 gpointer mono_ldftn (MonoMethod *method);
118 void mono_emit_fast_iconv (MBCGEN_TYPE* s, MBTREE_TYPE* tree);
119 void mono_emit_fast_iconv_i8 (MBCGEN_TYPE* s, MBTREE_TYPE* tree);
120 void mono_emit_stack_alloc (MBCGEN_TYPE* s, MBTREE_TYPE* tree);
121 void mono_emit_stack_alloc_const (MBCGEN_TYPE* s, MBTREE_TYPE* tree, int size);
124 mono_array_new_wrapper (MonoClass *eclass, guint32 n);
126 mono_object_new_wrapper (MonoClass *klass);
128 mono_ldstr_wrapper (MonoImage *image, guint32 ind);
131 get_mono_object_isinst (void);
133 #define MB_OPT_LEVEL 1
135 #if MB_OPT_LEVEL == 0
136 #define MB_USE_OPT1(c) 65535
137 #define MB_USE_OPT2(c) 65535
139 #if MB_OPT_LEVEL == 1
140 #define MB_USE_OPT1(c) c
141 #define MB_USE_OPT2(c) 65535
143 #if MB_OPT_LEVEL >= 2
144 #define MB_USE_OPT1(c) c
145 #define MB_USE_OPT2(c) c
150 #define REAL_PRINT_REG(text,reg) \
151 mono_assert (reg >= 0); \
152 x86_push_reg (s->code, X86_EAX); \
153 x86_push_reg (s->code, X86_EDX); \
154 x86_push_reg (s->code, X86_ECX); \
155 x86_push_reg (s->code, reg); \
156 x86_push_imm (s->code, reg); \
157 x86_push_imm (s->code, text " %d %p\n"); \
158 x86_mov_reg_imm (s->code, X86_EAX, printf); \
159 x86_call_reg (s->code, X86_EAX); \
160 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 3*4); \
161 x86_pop_reg (s->code, X86_ECX); \
162 x86_pop_reg (s->code, X86_EDX); \
163 x86_pop_reg (s->code, X86_EAX);
166 debug_memcopy (void *dest, const void *src, size_t n);
169 #define MEMCOPY debug_memcopy
170 #define PRINT_REG(text,reg) REAL_PRINT_REG(text,reg)
173 #define MEMCOPY memcpy
175 #define PRINT_REG(x,y)
179 /* The call instruction for virtual functions must have a known
180 * size (used by x86_magic_trampoline)
182 #define x86_call_virtual(inst,basereg,disp) \
184 *(inst)++ = (unsigned char)0xff; \
185 x86_address_byte ((inst), 2, 2, (basereg)); \
186 x86_imm_emit32 ((inst), (disp)); \
189 /* emit an exception if condition is fail */
190 #define EMIT_COND_SYSTEM_EXCEPTION(cond,signed,exc_name) \
193 x86_branch8 (s->code, cond, 10, signed); \
194 x86_push_imm (s->code, exc_name); \
195 t = arch_get_throw_exception_by_name (); \
196 mono_add_jump_info (s, s->code, \
197 MONO_JUMP_INFO_ABS, t); \
198 x86_call_code (s->code, 0); \
201 #define X86_ARG_PAD(pad) do { \
204 x86_push_reg (s->code, X86_EAX); \
206 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, pad); \
210 #define X86_CALL_END do { \
211 int size = tree->data.call_info.frame_size; \
213 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, size); \
216 #define X86_CALL_BEGIN do { \
217 int pad = tree->data.call_info.pad; \
219 if (tree->left->op != MB_TERM_NOP) { \
220 mono_assert (lreg >= 0); \
221 x86_push_reg (s->code, lreg); \
222 x86_alu_membase_imm (s->code, X86_CMP, lreg, 0, 0); \
224 if (tree->data.call_info.vtype_num) { \
225 int offset = VARINFO (s, tree->data.call_info.vtype_num).offset; \
226 x86_lea_membase (s->code, treg, X86_EBP, offset); \
227 x86_push_reg (s->code, treg); \
231 /* we use this macro to move one lreg to another - source and
232 destination may overlap, but the register allocator has to
233 make sure that ((d1 < d2) && (s1 < s2))
235 #define MOVE_LREG(d1,d2,s1,s2) \
237 g_assert ((d1 < d2) && (s1 < s2)); \
238 if ((d1) <= (s1)) { \
240 x86_mov_reg_reg (s->code, d1, s1, 4); \
242 x86_mov_reg_reg (s->code, d2, s2, 4); \
245 x86_mov_reg_reg (s->code, d2, s2, 4); \
247 x86_mov_reg_reg (s->code, d1, s1, 4); \
255 # terminal definitions
259 %term CONST_I4 CONST_I8 CONST_R4 CONST_R8
260 %term LDIND_I1 LDIND_U1 LDIND_I2 LDIND_U2 LDIND_I4 LDIND_I8 LDIND_R4 LDIND_R8 LDIND_OBJ
261 %term STIND_I1 STIND_I2 STIND_I4 STIND_I8 STIND_R4 STIND_R8 STIND_OBJ
262 %term ADDR_L ADDR_G ARG_I4 ARG_I8 ARG_R4 ARG_R8 ARG_OBJ CALL_I4 CALL_I8 CALL_R8 CALL_VOID
263 %term BREAK SWITCH BR RET_VOID RET RET_OBJ ENDFINALLY ENDFILTER JMP
264 %term ADD ADD_OVF ADD_OVF_UN SUB SUB_OVF SUB_OVF_UN MUL MUL_OVF MUL_OVF_UN
265 %term DIV DIV_UN REM REM_UN AND OR XOR SHL SHR SHR_UN NEG NOT CKFINITE
266 %term COMPARE CBRANCH BRTRUE BRFALSE CSET
267 %term CONV_I4 CONV_I1 CONV_I2 CONV_I8 CONV_U1 CONV_U2 CONV_U4 CONV_U8 CONV_R4 CONV_R8 CONV_R_UN
268 %term INTF_ADDR VFUNC_ADDR NOP NEWARR NEWARR_SPEC NEWOBJ NEWOBJ_SPEC
269 %term INITBLK CPBLK CPSRC POP INITOBJ LOCALLOC
270 %term ISINST CASTCLASS UNBOX
271 %term CONV_OVF_I1 CONV_OVF_U1 CONV_OVF_I2 CONV_OVF_U2 CONV_OVF_U4 CONV_OVF_U8 CONV_OVF_I4
272 %term CONV_OVF_I4_UN CONV_OVF_U1_UN CONV_OVF_U2_UN
273 %term CONV_OVF_I2_UN CONV_OVF_I8_UN CONV_OVF_I1_UN
274 %term EXCEPTION THROW RETHROW HANDLER CHECKTHIS RETHROW_ABORT
275 %term LDLEN LDELEMA LDFTN LDVIRTFTN LDSTR LDSFLDA
276 %term REMOTE_LDFLDA REMOTE_STIND_I1 REMOTE_STIND_I2 REMOTE_STIND_I4
277 %term REMOTE_STIND_I8 REMOTE_STIND_R4 REMOTE_STIND_R8 REMOTE_STIND_OBJ
280 %term FUNC1 PROC2 PROC3 FREE OBJADDR VTADDR
296 tree->data.ainfo.offset = tree->data.i;
297 tree->data.ainfo.amode = AMImmediate;
301 tree->data.ainfo.offset = tree->data.i;
302 tree->data.ainfo.amode = AMImmediate;
305 acon: ADD (ADDR_G, CONST_I4) {
306 tree->data.ainfo.offset = (unsigned)tree->left->data.p + tree->right->data.i;
307 tree->data.ainfo.amode = AMImmediate;
313 tree->data.ainfo.offset = 0;
314 tree->data.ainfo.basereg = tree->reg1;
315 tree->data.ainfo.amode = AMBase;
318 base: ADD (reg, CONST_I4) {
319 tree->data.ainfo.offset = tree->right->data.i;
320 tree->data.ainfo.basereg = tree->left->reg1;
321 tree->data.ainfo.amode = AMBase;
325 tree->data.ainfo.offset = VARINFO (s, tree->data.i).offset;
326 tree->data.ainfo.basereg = X86_EBP;
327 tree->data.ainfo.amode = AMBase;
329 MBCOND (VARINFO (data, tree->data.i).reg < 0);
334 tree->data.ainfo.offset = 0;
335 tree->data.ainfo.indexreg = tree->reg1;
336 tree->data.ainfo.shift = 0;
337 tree->data.ainfo.amode = AMIndex;
340 index: SHL (reg, CONST_I4) {
341 tree->data.ainfo.offset = 0;
342 tree->data.ainfo.amode = AMIndex;
343 tree->data.ainfo.indexreg = tree->left->reg1;
344 tree->data.ainfo.shift = tree->right->data.i;
346 MBCOND (tree->right->data.i == 0 ||
347 tree->right->data.i == 1 ||
348 tree->right->data.i == 2 ||
349 tree->right->data.i == 3);
354 index: MUL (reg, CONST_I4) {
355 static int fast_log2 [] = { 1, 0, 1, -1, 2, -1, -1, -1, 3 };
357 tree->data.ainfo.offset = 0;
358 tree->data.ainfo.amode = AMIndex;
359 tree->data.ainfo.indexreg = tree->left->reg1;
360 tree->data.ainfo.shift = fast_log2 [tree->right->data.i];
362 MBCOND (tree->right->data.i == 1 ||
363 tree->right->data.i == 2 ||
364 tree->right->data.i == 4 ||
365 tree->right->data.i == 8);
374 addr: ADD (index, base) {
375 tree->data.ainfo.offset = tree->right->data.ainfo.offset;
376 tree->data.ainfo.basereg = tree->right->data.ainfo.basereg;
377 tree->data.ainfo.amode = tree->left->data.ainfo.amode |
378 tree->right->data.ainfo.amode;
379 tree->data.ainfo.shift = tree->left->data.ainfo.shift;
380 tree->data.ainfo.indexreg = tree->left->data.ainfo.indexreg;
383 # we pass exception in ECX to catch handler
385 int offset = VARINFO (s, tree->data.i).offset;
387 if (tree->reg1 != X86_ECX)
388 x86_mov_reg_reg (s->code, tree->reg1, X86_ECX, 4);
390 /* store it so that we can RETHROW it later */
391 x86_mov_membase_reg (s->code, X86_EBP, offset, tree->reg1, 4);
397 x86_push_reg (s->code, tree->left->reg1);
398 target = arch_get_throw_exception ();
399 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, target);
400 x86_call_code (s->code, target);
404 int offset = VARINFO (s, tree->data.i).offset;
407 x86_push_membase (s->code, X86_EBP, offset);
409 target = arch_get_throw_exception ();
410 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, target);
411 x86_call_code (s->code, target);
414 stmt: RETHROW_ABORT {
418 target = mono_thread_current;
419 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, target);
420 x86_call_code (s->code, target);
422 x86_mov_reg_membase (s->code, X86_EAX, X86_EAX, G_STRUCT_OFFSET (MonoThread, abort_exc), 4);
423 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0);
425 br = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
427 x86_push_reg (s->code, X86_EAX);
429 target = arch_get_throw_exception ();
430 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, target);
431 x86_call_code (s->code, target);
433 x86_patch (br, s->code);
437 /* save ESP (used by ENDFINALLY) */
438 x86_mov_membase_reg (s->code, X86_EBP, mono_exc_esp_offset, X86_ESP, 4);
439 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bb);
440 x86_call_imm (s->code, 0);
444 /* restore ESP - which can be modified when we allocate value types
445 * in the finally handler */
446 x86_mov_reg_membase (s->code, X86_ESP, X86_EBP, mono_exc_esp_offset, 4);
447 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
451 stmt: ENDFILTER (reg) {
452 /* restore ESP - which can be modified when we allocate value types
454 x86_mov_reg_membase (s->code, X86_ESP, X86_EBP, mono_exc_esp_offset, 4);
455 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
456 if (tree->left->reg1 != X86_EAX)
457 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
461 stmt: STIND_I4 (ADDR_L, ADD (LDIND_I4 (ADDR_L), CONST_I4)) {
462 int vn = tree->left->data.i;
463 int treg = VARINFO (s, vn).reg;
464 int offset = VARINFO (s, vn).offset;
465 int data = tree->right->right->data.i;
469 x86_inc_reg (s->code, treg);
471 x86_inc_membase (s->code, X86_EBP, offset);
474 x86_alu_reg_imm (s->code, X86_ADD, treg, data);
476 x86_alu_membase_imm (s->code, X86_ADD, X86_EBP, offset, data);
479 MBCOND (tree->right->left->left->data.i == tree->left->data.i);
483 stmt: STIND_I4 (ADDR_L, SUB (LDIND_I4 (ADDR_L), CONST_I4)) {
484 int vn = tree->left->data.i;
485 int treg = VARINFO (s, vn).reg;
486 int offset = VARINFO (s, vn).offset;
487 int data = tree->right->right->data.i;
491 x86_dec_reg (s->code, treg);
493 x86_dec_membase (s->code, X86_EBP, offset);
496 x86_alu_reg_imm (s->code, X86_SUB, treg, data);
498 x86_alu_membase_imm (s->code, X86_SUB, X86_EBP, offset, data);
501 MBCOND (tree->right->left->left->data.i == tree->left->data.i);
505 stmt: STIND_I4 (ADDR_L, ADD (LDIND_I4 (ADDR_L), reg)) {
506 int vn = tree->left->data.i;
507 int treg = VARINFO (s, vn).reg;
508 int sreg = tree->right->right->reg1;
509 int offset = VARINFO (s, vn).offset;
512 x86_alu_reg_reg (s->code, X86_ADD, treg, sreg);
514 x86_alu_membase_reg (s->code, X86_ADD, X86_EBP, offset, sreg);
517 MBCOND (tree->right->left->left->data.i == tree->left->data.i);
521 stmt: STIND_I4 (ADDR_L, LDIND_I4 (ADDR_L)) {
522 int treg1 = VARINFO (s, tree->left->data.i).reg;
523 int treg2 = VARINFO (s, tree->right->left->data.i).reg;
524 int offset1 = VARINFO (s, tree->left->data.i).offset;
525 int offset2 = VARINFO (s, tree->right->left->data.i).offset;
527 //{static int cx= 0; printf ("CX %5d\n", cx++);}
529 if (treg1 >= 0 && treg2 >= 0) {
530 x86_mov_reg_reg (s->code, treg1, treg2, 4);
533 if (treg1 >= 0 && treg2 < 0) {
534 x86_mov_reg_membase (s->code, treg1, X86_EBP, offset2, 4);
537 if (treg1 < 0 && treg2 >= 0) {
538 x86_mov_membase_reg (s->code, X86_EBP, offset1, treg2, 4);
542 g_assert_not_reached ();
545 MBCOND (VARINFO (data, tree->left->data.i).reg >= 0 ||
546 VARINFO (data, tree->right->left->data.i).reg >= 0);
550 stmt: STIND_I4 (addr, CONST_I4) {
551 switch (tree->left->data.ainfo.amode) {
554 x86_mov_mem_imm (s->code, tree->left->data.ainfo.offset, tree->right->data.i, 4);
558 x86_mov_membase_imm (s->code, tree->left->data.ainfo.basereg,
559 tree->left->data.ainfo.offset, tree->right->data.i, 4);
562 x86_mov_memindex_imm (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
563 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
564 tree->right->data.i, 4);
567 x86_mov_memindex_imm (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
568 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
569 tree->right->data.i, 4);
574 stmt: STIND_I4 (addr, reg) {
576 switch (tree->left->data.ainfo.amode) {
579 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 4);
583 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
584 tree->left->data.ainfo.offset, tree->right->reg1, 4);
587 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
588 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
589 tree->right->reg1, 4);
592 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
593 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
594 tree->right->reg1, 4);
599 stmt: REMOTE_STIND_I4 (reg, reg) {
602 int lreg = tree->left->reg1;
603 int rreg = tree->right->reg1;
612 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
613 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
614 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
616 /* this is a transparent proxy - remote the call */
618 /* save value to stack */
619 x86_push_reg (s->code, rreg);
621 x86_push_reg (s->code, X86_ESP);
622 x86_push_imm (s->code, tree->data.fi.field);
623 x86_push_imm (s->code, tree->data.fi.klass);
624 x86_push_reg (s->code, lreg);
625 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
626 x86_call_code (s->code, 0);
627 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
629 br [1] = s->code; x86_jump8 (s->code, 0);
631 x86_patch (br [0], s->code);
632 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
633 tree->data.fi.field->offset;
634 x86_mov_membase_reg (s->code, lreg, offset, rreg, 4);
636 x86_patch (br [1], s->code);
639 stmt: STIND_I1 (addr, reg) {
640 PRINT_REG ("STIND_I1", tree->right->reg1);
642 switch (tree->left->data.ainfo.amode) {
645 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 1);
649 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
650 tree->left->data.ainfo.offset, tree->right->reg1, 1);
653 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
654 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
655 tree->right->reg1, 1);
658 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
659 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
660 tree->right->reg1, 1);
665 stmt: REMOTE_STIND_I1 (reg, reg) {
668 int lreg = tree->left->reg1;
669 int rreg = tree->right->reg1;
678 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
679 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
680 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
682 /* this is a transparent proxy - remote the call */
684 /* save value to stack */
685 x86_push_reg (s->code, rreg);
687 x86_push_reg (s->code, X86_ESP);
688 x86_push_imm (s->code, tree->data.fi.field);
689 x86_push_imm (s->code, tree->data.fi.klass);
690 x86_push_reg (s->code, lreg);
691 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
692 x86_call_code (s->code, 0);
693 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
695 br [1] = s->code; x86_jump8 (s->code, 0);
697 x86_patch (br [0], s->code);
698 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
699 tree->data.fi.field->offset;
700 x86_mov_membase_reg (s->code, lreg, offset, rreg, 1);
702 x86_patch (br [1], s->code);
705 stmt: STIND_I2 (addr, reg) {
706 PRINT_REG ("STIND_I2", tree->right->reg1);
708 switch (tree->left->data.ainfo.amode) {
711 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 2);
715 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
716 tree->left->data.ainfo.offset, tree->right->reg1, 2);
719 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
720 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
721 tree->right->reg1, 2);
724 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
725 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
726 tree->right->reg1, 2);
731 stmt: REMOTE_STIND_I2 (reg, reg) {
734 int lreg = tree->left->reg1;
735 int rreg = tree->right->reg1;
744 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
745 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
746 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
748 /* this is a transparent proxy - remote the call */
750 /* save value to stack */
751 x86_push_reg (s->code, rreg);
753 x86_push_reg (s->code, X86_ESP);
754 x86_push_imm (s->code, tree->data.fi.field);
755 x86_push_imm (s->code, tree->data.fi.klass);
756 x86_push_reg (s->code, lreg);
757 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
758 x86_call_code (s->code, 0);
759 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
761 br [1] = s->code; x86_jump8 (s->code, 0);
763 x86_patch (br [0], s->code);
764 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
765 tree->data.fi.field->offset;
766 x86_mov_membase_reg (s->code, lreg, offset, rreg, 2);
768 x86_patch (br [1], s->code);
771 reg: LDIND_I4 (ADDR_L) {
772 int treg = VARINFO (s, tree->left->data.i).reg;
774 if (treg != tree->reg1)
775 x86_mov_reg_reg (s->code, tree->reg1, treg, 4);
778 MBCOND ((VARINFO (data, tree->left->data.i).reg >= 0));
782 stmt: STIND_I4 (ADDR_L, CONST_I4) {
783 int treg = VARINFO (s, tree->left->data.i).reg;
785 x86_mov_reg_imm (s->code, treg, tree->right->data.i);
788 MBCOND ((VARINFO (data, tree->left->data.i).reg >= 0));
792 stmt: STIND_I4 (ADDR_L, LDIND_I4 (ADDR_L)) {
793 int treg = VARINFO (s, tree->left->data.i).reg;
794 int offset = VARINFO (s, tree->right->left->data.i).offset;
796 x86_mov_reg_membase (s->code, treg, X86_EBP, offset, 4);
798 MBCOND ((VARINFO (data, tree->left->data.i).reg >= 0));
799 MBCOND ((VARINFO (data, tree->right->left->data.i).reg < 0));
803 stmt: STIND_I4 (ADDR_L, reg) {
804 int treg = VARINFO (s, tree->left->data.i).reg;
806 if (treg != tree->right->reg1)
807 x86_mov_reg_reg (s->code, treg, tree->right->reg1, 4);
810 MBCOND ((VARINFO (data, tree->left->data.i).reg >= 0));
815 reg: LDIND_I4 (addr) {
817 switch (tree->left->data.ainfo.amode) {
820 x86_mov_reg_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, 4);
824 x86_mov_reg_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
825 tree->left->data.ainfo.offset, 4);
828 x86_mov_reg_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
829 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, 4);
832 x86_mov_reg_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
833 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
834 tree->left->data.ainfo.shift, 4);
839 PRINT_REG ("LDIND_I4", tree->reg1);
842 reg: LDIND_I1 (addr) {
843 switch (tree->left->data.ainfo.amode) {
846 x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, TRUE, FALSE);
850 x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
851 tree->left->data.ainfo.offset, TRUE, FALSE);
854 x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
855 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, TRUE, FALSE);
858 x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
859 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
860 tree->left->data.ainfo.shift, TRUE, FALSE);
864 PRINT_REG ("LDIND_I1", tree->reg1);
867 reg: LDIND_U1 (addr) {
868 switch (tree->left->data.ainfo.amode) {
871 x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, FALSE, FALSE);
875 x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
876 tree->left->data.ainfo.offset, FALSE, FALSE);
879 x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
880 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, FALSE, FALSE);
883 x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
884 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
885 tree->left->data.ainfo.shift, FALSE, FALSE);
889 PRINT_REG ("LDIND_U1", tree->reg1);
892 reg: LDIND_I2 (addr) {
893 switch (tree->left->data.ainfo.amode) {
896 x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, TRUE, TRUE);
900 x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
901 tree->left->data.ainfo.offset, TRUE, TRUE);
904 x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
905 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, TRUE, TRUE);
908 x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
909 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
910 tree->left->data.ainfo.shift, TRUE, TRUE);
914 PRINT_REG ("LDIND_U2", tree->reg1);
917 reg: LDIND_U2 (addr) {
918 switch (tree->left->data.ainfo.amode) {
921 x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, FALSE, TRUE);
925 x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
926 tree->left->data.ainfo.offset, FALSE, TRUE);
929 x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
930 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, FALSE, TRUE);
933 x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
934 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
935 tree->left->data.ainfo.shift, FALSE, TRUE);
939 PRINT_REG ("LDIND_U2", tree->reg1);
942 reg: REMOTE_LDFLDA (reg) {
945 int lreg = tree->left->reg1;
950 if (tree->reg1 != treg)
951 x86_push_reg (s->code, treg);
953 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
954 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
955 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
957 /* this is a transparent proxy - remote the call */
959 x86_push_reg (s->code, X86_EAX);
961 x86_push_reg (s->code, X86_EDX);
962 x86_push_reg (s->code, X86_ECX);
964 x86_push_imm (s->code, 0);
965 x86_push_imm (s->code, tree->data.fi.field);
966 x86_push_imm (s->code, tree->data.fi.klass);
967 x86_push_reg (s->code, lreg);
968 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_load_remote_field);
969 x86_call_code (s->code, 0);
970 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
973 x86_mov_reg_reg (s->code, treg, X86_EAX, 4);
975 x86_pop_reg (s->code, X86_ECX);
977 x86_pop_reg (s->code, X86_EDX);
979 x86_pop_reg (s->code, X86_EAX);
981 x86_mov_reg_reg (s->code, tree->reg1, treg, 4);
983 br [1] = s->code; x86_jump8 (s->code, 0);
985 x86_patch (br [0], s->code);
986 if (tree->data.fi.klass->valuetype)
987 x86_lea_membase (s->code, tree->reg1, lreg,
988 tree->data.fi.field->offset - sizeof (MonoObject));
990 x86_lea_membase (s->code, tree->reg1, lreg, tree->data.fi.field->offset);
992 x86_patch (br [1], s->code);
994 if (tree->reg1 != treg)
995 x86_pop_reg (s->code, treg);
999 int offset = VARINFO (s, tree->data.i).offset;
1001 x86_lea_membase (s->code, tree->reg1, X86_EBP, offset);
1003 PRINT_REG ("ADDR_L", tree->reg1);
1005 MBCOND (VARINFO (data, tree->data.i).reg < 0);
1011 x86_mov_reg_imm (s->code, tree->reg1, tree->data.p);
1014 reg: CONV_I1 (reg) {
1015 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, FALSE);
1018 reg: CONV_U1 (reg) {
1019 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE);
1022 reg: CONV_I2 (reg) {
1023 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, TRUE);
1026 reg: CONV_U2 (reg) {
1027 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE);
1031 x86_mov_reg_imm (s->code, tree->reg1, tree->data.i);
1034 reg: CONV_I4 (reg) {
1035 if (tree->reg1 != tree->left->reg1)
1036 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1037 PRINT_REG ("CONV_I4", tree->left->reg1);
1040 reg: CONV_U4 (reg) {
1041 if (tree->reg1 != tree->left->reg1)
1042 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1043 PRINT_REG ("CONV_U4", tree->left->reg1);
1046 reg: CONV_OVF_I4 (reg) {
1047 if (tree->reg1 != tree->left->reg1)
1048 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1049 PRINT_REG ("CONV_OVF_I4", tree->left->reg1);
1052 reg: CONV_OVF_U4 (reg) {
1053 /* Keep in sync with CONV_OVF_I4_UN below, they are the same on 32-bit machines */
1054 x86_test_reg_imm (s->code, tree->left->reg1, 0x8000000);
1055 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1056 if (tree->reg1 != tree->left->reg1)
1057 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1060 reg: CONV_OVF_I4_UN (reg) {
1061 /* Keep in sync with CONV_OVF_U4 above, they are the same on 32-bit machines */
1062 x86_test_reg_imm (s->code, tree->left->reg1, 0x8000000);
1063 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1064 if (tree->reg1 != tree->left->reg1)
1065 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1068 reg: CONV_OVF_I1 (reg) {
1069 /* probe value to be within -128 to 127 */
1070 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, 127);
1071 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LE, TRUE, "OverflowException");
1072 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, -128);
1073 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_GT, TRUE, "OverflowException");
1074 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, FALSE);
1077 reg: CONV_OVF_I1_UN (reg) {
1078 /* probe values between 0 to 128 */
1079 x86_test_reg_imm (s->code, tree->left->reg1, 0xffffff80);
1080 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1081 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE);
1084 reg: CONV_OVF_U1 (reg) {
1085 /* Keep in sync with CONV_OVF_U1_UN routine below, they are the same on 32-bit machines */
1086 /* probe value to be within 0 to 255 */
1087 x86_test_reg_imm (s->code, tree->left->reg1, 0xffffff00);
1088 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1089 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE);
1092 reg: CONV_OVF_U1_UN (reg) {
1093 /* Keep in sync with CONV_OVF_U1 routine above, they are the same on 32-bit machines */
1094 /* probe value to be within 0 to 255 */
1095 x86_test_reg_imm (s->code, tree->left->reg1, 0xffffff00);
1096 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1097 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE);
1100 reg: CONV_OVF_I2 (reg) {
1101 /* Probe value to be within -32768 and 32767 */
1102 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, 32767);
1103 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LE, TRUE, "OverflowException");
1104 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, -32768);
1105 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_GE, TRUE, "OverflowException");
1106 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, TRUE);
1109 reg: CONV_OVF_U2 (reg) {
1110 /* Keep in sync with CONV_OVF_U2_UN below, they are the same on 32-bit machines */
1111 /* Probe value to be within 0 and 65535 */
1112 x86_test_reg_imm (s->code, tree->left->reg1, 0xffff0000);
1113 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "OverflowException");
1114 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE);
1117 reg: CONV_OVF_U2_UN (reg) {
1118 /* Keep in sync with CONV_OVF_U2 above, they are the same on 32-bit machines */
1119 /* Probe value to be within 0 and 65535 */
1120 x86_test_reg_imm (s->code, tree->left->reg1, 0xffff0000);
1121 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1122 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE);
1125 reg: CONV_OVF_I2_UN (reg) {
1126 /* Convert uint value into short, value within 0 and 32767 */
1127 x86_test_reg_imm (s->code, tree->left->reg1, 0xffff8000);
1128 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1129 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE);
1132 reg: MUL (reg, CONST_I4) "MB_USE_OPT1(0)" {
1133 unsigned int i, j, k, v;
1135 v = tree->right->data.i;
1136 for (i = 0, j = 1, k = 0xfffffffe; i < 32; i++, j = j << 1, k = k << 1) {
1141 if (v < 0 || i == 32 || v & k) {
1144 /* LEA r1, [r2 + r2*2] */
1145 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 1);
1148 /* LEA r1, [r2 + r2*4] */
1149 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 2);
1152 /* LEA r1, [r2 + r2*2] */
1154 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 1);
1155 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->reg1);
1158 /* LEA r1, [r2 + r2*8] */
1159 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 3);
1162 /* LEA r1, [r2 + r2*4] */
1164 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 2);
1165 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->reg1);
1168 /* LEA r1, [r2 + r2*2] */
1170 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 1);
1171 x86_shift_reg_imm (s->code, X86_SHL, tree->reg1, 2);
1174 /* LEA r1, [r2 + r2*4] */
1175 /* LEA r1, [r1 + r1*4] */
1176 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 2);
1177 x86_lea_memindex (s->code, tree->reg1, tree->reg1, 0, tree->reg1, 2);
1180 /* LEA r1, [r2 + r2*4] */
1182 /* LEA r1, [r1 + r1*4] */
1183 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 2);
1184 x86_shift_reg_imm (s->code, X86_SHL, tree->reg1, 2);
1185 x86_lea_memindex (s->code, tree->reg1, tree->reg1, 0, tree->reg1, 2);
1188 x86_imul_reg_reg_imm (s->code, tree->reg1, tree->left->reg1, tree->right->data.i);
1192 x86_shift_reg_imm (s->code, X86_SHL, tree->left->reg1, i);
1193 if (tree->reg1 != tree->left->reg1)
1194 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1198 reg: MUL (reg, reg) {
1199 x86_imul_reg_reg (s->code, tree->left->reg1, tree->right->reg1);
1201 if (tree->reg1 != tree->left->reg1)
1202 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1205 reg: MUL_OVF (reg, reg) {
1206 x86_imul_reg_reg (s->code, tree->left->reg1, tree->right->reg1);
1207 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
1209 if (tree->reg1 != tree->left->reg1)
1210 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1213 reg: MUL_OVF_UN (reg, reg) {
1214 mono_assert (tree->right->reg1 != X86_EAX);
1216 if (tree->left->reg1 != X86_EAX)
1217 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1219 x86_mul_reg (s->code, tree->right->reg1, FALSE);
1220 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
1222 mono_assert (tree->reg1 == X86_EAX &&
1223 tree->reg2 == X86_EDX);
1226 reg: DIV (reg, CONST_I4) {
1227 unsigned int i, j, k, v;
1229 v = tree->right->data.i;
1230 for (i = 0, j = 1, k = 0xfffffffe; i < 32; i++, j = j << 1, k = k << 1) {
1235 x86_shift_reg_imm (s->code, X86_SAR, tree->left->reg1, i);
1236 if (tree->reg1 != tree->left->reg1)
1237 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1240 unsigned int i, j, k, v;
1245 v = tree->right->data.i;
1246 for (i = 0, j = 1, k = 0xfffffffe; i < 32; i++, j = j << 1, k = k << 1) {
1251 if (i == 32 || v & k)
1258 reg: DIV (reg, reg) {
1259 mono_assert (tree->right->reg1 != X86_EAX);
1261 if (tree->left->reg1 != X86_EAX)
1262 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1265 x86_div_reg (s->code, tree->right->reg1, TRUE);
1267 mono_assert (tree->reg1 == X86_EAX &&
1268 tree->reg2 == X86_EDX);
1271 reg: DIV_UN (reg, CONST_I4) {
1272 unsigned int i, j, k, v;
1275 v = tree->right->data.i;
1276 for (i = 0, j = 1, k = 0xfffffffe; i < 32; i++, j = j << 1, k = k << 1) {
1281 if (i == 32 || v & k) {
1282 for (i = 32, j = 0x80000000; --i >= 0; j >>= 1) {
1286 /* k = 32 + number of significant bits in v - 1 */
1290 for (i = 0; i < k; i++) f *= 2.0f;
1296 x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg1, k - 32);
1297 if (tree->reg1 != tree->left->reg1)
1298 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1299 } else if (r < 0.5f) {
1300 if (tree->left->reg1 != X86_EAX)
1301 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1302 x86_mov_reg_imm (s->code, X86_EDX, (guint32) floor(f));
1303 /* x86_inc_reg (s->code, X86_EAX); */
1304 /* INC is faster but we have to check for overflow. */
1305 x86_alu_reg_imm (s->code, X86_ADD, X86_EAX, 1);
1306 x86_branch8(s->code, X86_CC_C, 2, FALSE);
1307 x86_mul_reg (s->code, X86_EDX, FALSE);
1308 x86_shift_reg_imm (s->code, X86_SHR, X86_EDX, k - 32);
1309 if (tree->reg1 != X86_EDX)
1310 x86_mov_reg_reg (s->code, tree->reg1, X86_EDX, 4);
1312 if (tree->left->reg1 != X86_EAX)
1313 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1314 x86_mov_reg_imm (s->code, X86_EDX, (guint32) ceil(f));
1315 x86_mul_reg (s->code, X86_EDX, FALSE);
1316 x86_shift_reg_imm (s->code, X86_SHR, X86_EDX, k - 32);
1317 if (tree->reg1 != X86_EDX)
1318 x86_mov_reg_reg (s->code, tree->reg1, X86_EDX, 4);
1321 x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg1, i);
1322 if (tree->reg1 != tree->left->reg1)
1323 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1328 reg: DIV_UN (reg, reg) {
1329 mono_assert (tree->right->reg1 != X86_EAX);
1331 if (tree->left->reg1 != X86_EAX)
1332 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1334 x86_mov_reg_imm (s->code, X86_EDX, 0);
1335 x86_div_reg (s->code, tree->right->reg1, FALSE);
1337 mono_assert (tree->reg1 == X86_EAX &&
1338 tree->reg2 == X86_EDX);
1341 reg: REM (reg, reg) {
1342 mono_assert (tree->right->reg1 != X86_EAX);
1343 mono_assert (tree->right->reg1 != X86_EDX);
1345 if (tree->left->reg1 != X86_EAX)
1346 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1348 /* sign extend to 64bit in EAX/EDX */
1350 x86_div_reg (s->code, tree->right->reg1, TRUE);
1351 x86_mov_reg_reg (s->code, X86_EAX, X86_EDX, 4);
1353 mono_assert (tree->reg1 == X86_EAX &&
1354 tree->reg2 == X86_EDX);
1357 reg: REM_UN (reg, reg) {
1358 mono_assert (tree->right->reg1 != X86_EAX);
1359 mono_assert (tree->right->reg1 != X86_EDX);
1361 if (tree->left->reg1 != X86_EAX)
1362 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1364 /* zero extend to 64bit in EAX/EDX */
1365 x86_mov_reg_imm (s->code, X86_EDX, 0);
1366 x86_div_reg (s->code, tree->right->reg1, FALSE);
1367 x86_mov_reg_reg (s->code, X86_EAX, X86_EDX, 4);
1369 mono_assert (tree->reg1 == X86_EAX &&
1370 tree->reg2 == X86_EDX);
1373 reg: ADD (reg, CONST_I4) "MB_USE_OPT1(0)" {
1374 if (tree->right->data.i == 1)
1375 x86_inc_reg (s->code, tree->left->reg1);
1377 x86_alu_reg_imm (s->code, X86_ADD, tree->left->reg1, tree->right->data.i);
1379 if (tree->reg1 != tree->left->reg1)
1380 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1384 reg: ADD (reg, LDIND_I4 (ADDR_L)) {
1385 int treg = VARINFO (s, tree->right->left->data.i).reg;
1387 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, treg);
1389 if (tree->reg1 != tree->left->reg1)
1390 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1392 MBCOND ((VARINFO (data, tree->right->left->data.i).reg >= 0));
1396 reg: ADD (reg, reg) {
1397 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
1399 if (tree->reg1 != tree->left->reg1)
1400 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1403 reg: ADD_OVF (reg, reg) {
1404 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
1405 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
1407 if (tree->reg1 != tree->left->reg1)
1408 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1411 reg: ADD_OVF_UN (reg, reg) {
1412 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
1413 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NC, FALSE, "OverflowException");
1415 if (tree->reg1 != tree->left->reg1)
1416 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1419 reg: SUB (reg, CONST_I4) "MB_USE_OPT1(0)" {
1420 if (tree->right->data.i == 1)
1421 x86_dec_reg (s->code, tree->left->reg1);
1423 x86_alu_reg_imm (s->code, X86_SUB, tree->left->reg1, tree->right->data.i);
1425 if (tree->reg1 != tree->left->reg1)
1426 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1429 reg: SUB (reg, LDIND_I4 (ADDR_L)) {
1430 int treg = VARINFO (s, tree->right->left->data.i).reg;
1432 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, treg);
1434 if (tree->reg1 != tree->left->reg1)
1435 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1437 MBCOND ((VARINFO (data, tree->right->left->data.i).reg >= 0));
1441 reg: SUB (reg, reg) {
1442 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
1444 if (tree->reg1 != tree->left->reg1)
1445 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1448 reg: SUB_OVF (reg, reg) {
1449 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
1450 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
1452 if (tree->reg1 != tree->left->reg1)
1453 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1456 reg: SUB_OVF_UN (reg, reg) {
1457 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
1458 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NC, FALSE, "OverflowException");
1460 if (tree->reg1 != tree->left->reg1)
1461 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1464 reg: CSET (cflags) {
1466 switch (tree->data.i) {
1468 x86_set_reg (s->code, X86_CC_EQ, tree->reg1, TRUE);
1471 x86_set_reg (s->code, X86_CC_GT, tree->reg1, TRUE);
1474 x86_set_reg (s->code, X86_CC_GT, tree->reg1, FALSE);
1477 x86_set_reg (s->code, X86_CC_LT, tree->reg1, TRUE);
1480 x86_set_reg (s->code, X86_CC_LT, tree->reg1, FALSE);
1483 g_assert_not_reached ();
1486 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE);
1489 reg: AND (reg, CONST_I4) "MB_USE_OPT1(0)" {
1490 x86_alu_reg_imm (s->code, X86_AND, tree->left->reg1, tree->right->data.i);
1492 if (tree->reg1 != tree->left->reg1)
1493 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1496 reg: AND (reg, reg) {
1497 x86_alu_reg_reg (s->code, X86_AND, tree->left->reg1, tree->right->reg1);
1499 if (tree->reg1 != tree->left->reg1)
1500 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1503 reg: OR (reg, CONST_I4) "MB_USE_OPT1(0)" {
1504 x86_alu_reg_imm (s->code, X86_OR, tree->left->reg1, tree->right->data.i);
1506 if (tree->reg1 != tree->left->reg1)
1507 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1510 reg: OR (reg, reg) {
1511 x86_alu_reg_reg (s->code, X86_OR, tree->left->reg1, tree->right->reg1);
1513 if (tree->reg1 != tree->left->reg1)
1514 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1517 reg: XOR (reg, CONST_I4) "MB_USE_OPT1(0)" {
1518 x86_alu_reg_imm (s->code, X86_XOR, tree->left->reg1, tree->right->data.i);
1520 if (tree->reg1 != tree->left->reg1)
1521 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1524 reg: XOR (reg, reg) {
1525 x86_alu_reg_reg (s->code, X86_XOR, tree->left->reg1, tree->right->reg1);
1527 if (tree->reg1 != tree->left->reg1)
1528 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1532 x86_neg_reg (s->code, tree->left->reg1);
1534 if (tree->reg1 != tree->left->reg1)
1535 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1539 x86_not_reg (s->code, tree->left->reg1);
1541 if (tree->reg1 != tree->left->reg1)
1542 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1545 reg: SHL (reg, CONST_I4) {
1546 x86_shift_reg_imm (s->code, X86_SHL, tree->left->reg1, tree->right->data.i);
1548 if (tree->reg1 != tree->left->reg1)
1549 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1552 reg: SHL (reg, reg) {
1553 if (tree->right->reg1 != X86_ECX) {
1554 x86_push_reg (s->code, X86_ECX);
1555 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
1557 x86_shift_reg (s->code, X86_SHL, tree->left->reg1);
1559 if (tree->reg1 != tree->left->reg1)
1560 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1562 if (tree->right->reg1 != X86_ECX)
1563 x86_pop_reg (s->code, X86_ECX);
1565 mono_assert (tree->reg1 != X86_ECX &&
1566 tree->left->reg1 != X86_ECX);
1569 reg: SHR (reg, CONST_I4) {
1570 x86_shift_reg_imm (s->code, X86_SAR, tree->left->reg1, tree->right->data.i);
1572 if (tree->reg1 != tree->left->reg1)
1573 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1576 reg: SHR (reg, reg) {
1577 if (tree->right->reg1 != X86_ECX) {
1578 x86_push_reg (s->code, X86_ECX);
1579 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
1582 x86_shift_reg (s->code, X86_SAR, tree->left->reg1);
1584 if (tree->reg1 != tree->left->reg1)
1585 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1587 if (tree->right->reg1 != X86_ECX)
1588 x86_pop_reg (s->code, X86_ECX);
1590 mono_assert (tree->reg1 != X86_ECX &&
1591 tree->left->reg1 != X86_ECX);
1594 reg: SHR_UN (reg, CONST_I4) {
1595 x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg1, tree->right->data.i);
1597 if (tree->reg1 != tree->left->reg1)
1598 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1601 reg: SHR_UN (reg, reg) {
1602 if (tree->right->reg1 != X86_ECX) {
1603 x86_push_reg (s->code, X86_ECX);
1604 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
1607 x86_shift_reg (s->code, X86_SHR, tree->left->reg1);
1609 if (tree->reg1 != tree->left->reg1)
1610 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1612 if (tree->right->reg1 != X86_ECX)
1613 x86_pop_reg (s->code, X86_ECX);
1615 mono_assert (tree->reg1 != X86_ECX &&
1616 tree->left->reg1 != X86_ECX);
1619 reg: LDSFLDA (CONST_I4) {
1620 if (tree->reg1 != X86_EAX)
1621 x86_push_reg (s->code, X86_EAX);
1622 x86_push_reg (s->code, X86_ECX);
1623 x86_push_reg (s->code, X86_EDX);
1625 x86_push_imm (s->code, tree->left->data.i);
1626 x86_push_imm (s->code, tree->data.klass);
1627 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_ldsflda);
1628 x86_call_code (s->code, 0);
1629 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
1631 x86_pop_reg (s->code, X86_EDX);
1632 x86_pop_reg (s->code, X86_ECX);
1633 if (tree->reg1 != X86_EAX) {
1634 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1635 x86_pop_reg (s->code, X86_EAX);
1641 x86_mov_reg_membase (s->code, tree->reg1, tree->left->reg1,
1642 G_STRUCT_OFFSET (MonoArray, max_length), 4);
1645 reg: LDELEMA (reg, CONST_I4) {
1648 if (mono_jit_boundcheck){
1649 x86_alu_membase_imm (s->code, X86_CMP, tree->left->reg1, G_STRUCT_OFFSET (MonoArray, max_length), tree->right->data.i);
1650 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_GT, FALSE, "IndexOutOfRangeException");
1653 ind = tree->data.i * tree->right->data.i + G_STRUCT_OFFSET (MonoArray, vector);
1655 x86_lea_membase (s->code, tree->reg1, tree->left->reg1, ind);
1658 reg: LDELEMA (reg, reg) {
1660 if (mono_jit_boundcheck){
1661 x86_alu_reg_membase (s->code, X86_CMP, tree->right->reg1, tree->left->reg1, G_STRUCT_OFFSET (MonoArray, max_length));
1662 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LT, FALSE, "IndexOutOfRangeException");
1665 if (tree->data.i == 1 || tree->data.i == 2 ||
1666 tree->data.i == 4 || tree->data.i == 8) {
1667 static int fast_log2 [] = { 1, 0, 1, -1, 2, -1, -1, -1, 3 };
1668 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1,
1669 G_STRUCT_OFFSET (MonoArray, vector), tree->right->reg1,
1670 fast_log2 [tree->data.i]);
1672 x86_imul_reg_reg_imm (s->code, tree->right->reg1, tree->right->reg1, tree->data.i);
1673 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->right->reg1);
1674 x86_alu_reg_imm (s->code, X86_ADD, tree->reg1, G_STRUCT_OFFSET (MonoArray, vector));
1679 if (tree->reg1 != X86_EAX)
1680 x86_push_reg (s->code, X86_EAX);
1681 x86_push_reg (s->code, X86_ECX);
1682 x86_push_reg (s->code, X86_EDX);
1684 x86_push_imm (s->code, tree->data.p);
1685 x86_push_imm (s->code, s->method->klass->image);
1686 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_ldstr_wrapper);
1687 x86_call_code (s->code, 0);
1688 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
1690 x86_pop_reg (s->code, X86_EDX);
1691 x86_pop_reg (s->code, X86_ECX);
1692 if (tree->reg1 != X86_EAX) {
1693 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1694 x86_pop_reg (s->code, X86_EAX);
1697 PRINT_REG ("LDSTR", tree->reg1);
1701 if (tree->reg1 != X86_EAX)
1702 x86_push_reg (s->code, X86_EAX);
1703 x86_push_reg (s->code, X86_ECX);
1704 x86_push_reg (s->code, X86_EDX);
1706 x86_push_reg (s->code, tree->left->reg1);
1707 x86_push_imm (s->code, tree->data.p);
1708 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_array_new_wrapper);
1709 x86_call_code (s->code, 0);
1710 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
1712 x86_pop_reg (s->code, X86_EDX);
1713 x86_pop_reg (s->code, X86_ECX);
1714 if (tree->reg1 != X86_EAX) {
1715 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1716 x86_pop_reg (s->code, X86_EAX);
1719 PRINT_REG ("NEWARR", tree->reg1);
1722 reg: NEWARR_SPEC (reg) {
1723 if (tree->reg1 != X86_EAX)
1724 x86_push_reg (s->code, X86_EAX);
1725 x86_push_reg (s->code, X86_ECX);
1726 x86_push_reg (s->code, X86_EDX);
1728 x86_push_reg (s->code, tree->left->reg1);
1729 x86_push_imm (s->code, tree->data.p);
1730 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_array_new_specific);
1731 x86_call_code (s->code, 0);
1732 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
1734 x86_pop_reg (s->code, X86_EDX);
1735 x86_pop_reg (s->code, X86_ECX);
1736 if (tree->reg1 != X86_EAX) {
1737 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1738 x86_pop_reg (s->code, X86_EAX);
1741 PRINT_REG ("NEWARR_SPEC", tree->reg1);
1745 if (tree->reg1 != X86_EAX)
1746 x86_push_reg (s->code, X86_EAX);
1747 x86_push_reg (s->code, X86_ECX);
1748 x86_push_reg (s->code, X86_EDX);
1750 x86_push_imm (s->code, tree->data.klass);
1751 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_object_new_wrapper);
1752 x86_call_code (s->code, 0);
1753 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
1755 x86_pop_reg (s->code, X86_EDX);
1756 x86_pop_reg (s->code, X86_ECX);
1757 if (tree->reg1 != X86_EAX) {
1758 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1759 x86_pop_reg (s->code, X86_EAX);
1761 PRINT_REG ("NEWOBJ", tree->reg1);
1765 if (tree->reg1 != X86_EAX)
1766 x86_push_reg (s->code, X86_EAX);
1767 x86_push_reg (s->code, X86_ECX);
1768 x86_push_reg (s->code, X86_EDX);
1770 x86_push_imm (s->code, tree->data.p);
1771 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_object_new_specific);
1772 x86_call_code (s->code, 0);
1773 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
1775 x86_pop_reg (s->code, X86_EDX);
1776 x86_pop_reg (s->code, X86_ECX);
1777 if (tree->reg1 != X86_EAX) {
1778 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1779 x86_pop_reg (s->code, X86_EAX);
1781 PRINT_REG ("NEWOBJ_SPEC", tree->reg1);
1784 reg: OBJADDR (reg) {
1785 if (tree->left->reg1 != tree->reg1)
1786 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1789 reg: VTADDR (ADDR_L) {
1790 int offset = VARINFO (s, tree->left->data.i).offset;
1792 x86_lea_membase (s->code, tree->reg1, X86_EBP, offset);
1796 x86_push_reg (s->code, tree->left->reg1);
1797 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, g_free);
1798 x86_call_code (s->code, 0);
1799 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
1802 stmt: PROC2 (reg, reg) {
1803 x86_push_reg (s->code, tree->right->reg1);
1804 x86_push_reg (s->code, tree->left->reg1);
1805 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->data.p);
1806 x86_call_code (s->code, 0);
1807 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
1810 stmt: PROC3 (reg, CPSRC (reg, reg)) {
1811 x86_push_reg (s->code, tree->right->right->reg1);
1812 x86_push_reg (s->code, tree->right->left->reg1);
1813 x86_push_reg (s->code, tree->left->reg1);
1814 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->data.p);
1815 x86_call_code (s->code, 0);
1816 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
1820 if (tree->reg1 != X86_EAX)
1821 x86_push_reg (s->code, X86_EAX);
1822 x86_push_reg (s->code, X86_ECX);
1823 x86_push_reg (s->code, X86_EDX);
1825 x86_push_reg (s->code, tree->left->reg1);
1827 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->data.p);
1828 x86_call_code (s->code, 0);
1829 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer));
1831 x86_pop_reg (s->code, X86_EDX);
1832 x86_pop_reg (s->code, X86_ECX);
1833 if (tree->reg1 != X86_EAX) {
1834 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1835 x86_pop_reg (s->code, X86_EAX);
1839 reg: LOCALLOC (CONST_I4) {
1843 size = (tree->left->data.i + (MONO_FRAME_ALIGNMENT - 1)) & ~(MONO_FRAME_ALIGNMENT - 1); // align to MONO_FRAME_ALIGNMENT boundary
1847 mono_emit_stack_alloc_const (s, tree, size);
1849 if (tree->reg1 != X86_EDI && tree->left->reg1 != X86_EDI) {
1850 x86_push_reg (s->code, X86_EDI);
1853 if (tree->reg1 != X86_EAX && tree->left->reg1 != X86_EAX) {
1854 x86_push_reg (s->code, X86_EAX);
1857 if (tree->reg1 != X86_ECX && tree->left->reg1 != X86_ECX) {
1858 x86_push_reg (s->code, X86_ECX);
1862 x86_mov_reg_imm (s->code, X86_ECX, size >> 2);
1863 x86_alu_reg_reg (s->code, X86_SUB, X86_EAX, X86_EAX);
1865 x86_lea_membase (s->code, X86_EDI, X86_ESP, offset);
1867 x86_prefix (s->code, X86_REP_PREFIX);
1868 x86_stosd (s->code);
1870 if (tree->reg1 != X86_ECX && tree->left->reg1 != X86_ECX)
1871 x86_pop_reg (s->code, X86_ECX);
1872 if (tree->reg1 != X86_EAX && tree->left->reg1 != X86_EAX)
1873 x86_pop_reg (s->code, X86_EAX);
1874 if (tree->reg1 != X86_EDI && tree->left->reg1 != X86_EDI)
1875 x86_pop_reg (s->code, X86_EDI);
1878 x86_mov_reg_reg (s->code, tree->reg1, X86_ESP, 4);
1883 reg: LOCALLOC (reg) {
1885 /* size must be aligned to MONO_FRAME_ALIGNMENT bytes */
1886 x86_alu_reg_imm (s->code, X86_ADD, tree->left->reg1, MONO_FRAME_ALIGNMENT - 1);
1887 x86_alu_reg_imm (s->code, X86_AND, tree->left->reg1, ~(MONO_FRAME_ALIGNMENT - 1));
1889 /* allocate space on stack */
1890 mono_emit_stack_alloc (s, tree);
1893 /* initialize with zero */
1894 if (tree->reg1 != X86_EAX && tree->left->reg1 != X86_EAX) {
1895 x86_push_reg (s->code, X86_EAX);
1898 if (tree->reg1 != X86_ECX && tree->left->reg1 != X86_ECX) {
1899 x86_push_reg (s->code, X86_ECX);
1902 if (tree->reg1 != X86_EDI && tree->left->reg1 != X86_EDI) {
1903 x86_push_reg (s->code, X86_EDI);
1907 x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg1, 2);
1908 if (tree->left->reg1 != X86_ECX)
1909 x86_mov_reg_imm (s->code, X86_ECX, tree->left->reg1);
1910 x86_alu_reg_reg (s->code, X86_XOR, X86_EAX, X86_EAX);
1912 x86_lea_membase (s->code, X86_EDI, X86_ESP, offset);
1914 x86_prefix (s->code, X86_REP_PREFIX);
1915 x86_stosl (s->code);
1917 if (tree->reg1 != X86_EDI && tree->left->reg1 != X86_EDI)
1918 x86_pop_reg (s->code, X86_EDI);
1919 if (tree->reg1 != X86_ECX && tree->left->reg1 != X86_ECX)
1920 x86_pop_reg (s->code, X86_ECX);
1921 if (tree->reg1 != X86_EAX && tree->left->reg1 != X86_EAX)
1922 x86_pop_reg (s->code, X86_EAX);
1925 x86_mov_reg_reg (s->code, tree->reg1, X86_ESP, 4);
1929 if (tree->reg1 != tree->left->reg1)
1930 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1932 x86_push_reg (s->code, tree->reg1);
1933 x86_mov_reg_membase (s->code, tree->reg1, tree->reg1, 0, 4);
1934 x86_mov_reg_membase (s->code, tree->reg1, tree->reg1, 0, 4);
1935 x86_alu_membase_imm (s->code, X86_CMP, tree->reg1,
1936 G_STRUCT_OFFSET (MonoClass, element_class), ((int)(tree->data.klass->element_class)));
1937 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "InvalidCastException");
1938 x86_pop_reg (s->code, tree->reg1);
1939 x86_alu_reg_imm (s->code, X86_ADD, tree->reg1, sizeof (MonoObject));
1942 reg: CASTCLASS (reg) {
1943 MonoClass *klass = tree->data.klass;
1945 int lreg = tree->left->reg1;
1947 x86_push_reg (s->code, lreg);
1948 x86_test_reg_reg (s->code, lreg, lreg);
1949 br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
1951 if (klass->flags & TYPE_ATTRIBUTE_INTERFACE) {
1952 /* lreg = obj->vtable */
1953 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
1955 x86_alu_membase_imm (s->code, X86_CMP, lreg, G_STRUCT_OFFSET (MonoVTable, max_interface_id),
1956 klass->interface_id);
1957 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_GE, FALSE, "InvalidCastException");
1958 /* lreg = obj->vtable->interface_offsets */
1959 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
1960 x86_alu_membase_imm (s->code, X86_CMP, lreg, klass->interface_id << 2, 0);
1961 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NE, FALSE, "InvalidCastException");
1964 /* lreg = obj->vtable */
1965 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
1966 /* lreg = obj->vtable->klass */
1967 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
1971 x86_alu_membase_imm (s->code, X86_CMP, lreg, G_STRUCT_OFFSET (MonoClass, rank), klass->rank);
1972 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "InvalidCastException");
1973 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, cast_class), 4);
1974 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, baseval), 4);
1975 x86_alu_reg_mem (s->code, X86_SUB, lreg, &klass->cast_class->baseval);
1976 x86_alu_reg_mem (s->code, X86_CMP, lreg, &klass->cast_class->diffval);
1977 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LE, FALSE, "InvalidCastException");
1981 if (klass->marshalbyref) {
1982 /* check for transparent_proxy */
1983 x86_alu_reg_imm (s->code, X86_CMP, lreg, (int)mono_defaults.transparent_proxy_class);
1984 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
1987 x86_mov_reg_membase (s->code, lreg, X86_ESP, 0, 4);
1988 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoTransparentProxy,
1991 x86_patch (br [1], s->code);
1994 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, baseval), 4);
1995 x86_alu_reg_mem (s->code, X86_SUB, lreg, &klass->baseval);
1996 x86_alu_reg_mem (s->code, X86_CMP, lreg, &klass->diffval);
1997 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LE, FALSE, "InvalidCastException");
2001 x86_patch (br [0], s->code);
2002 x86_pop_reg (s->code, tree->reg1);
2006 MonoClass *klass = tree->data.klass;
2008 int lreg = tree->left->reg1;
2010 x86_push_reg (s->code, lreg);
2011 x86_test_reg_reg (s->code, lreg, lreg);
2012 br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
2014 if (klass->flags & TYPE_ATTRIBUTE_INTERFACE) {
2015 /* lreg = obj->vtable */
2016 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2018 x86_alu_membase_imm (s->code, X86_CMP, lreg, G_STRUCT_OFFSET (MonoVTable, max_interface_id),
2019 klass->interface_id);
2020 br [1] = s->code; x86_branch8 (s->code, X86_CC_LT, 0, FALSE);
2021 /* lreg = obj->vtable->interface_offsets */
2022 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
2023 x86_alu_membase_imm (s->code, X86_CMP, lreg, klass->interface_id << 2, 0);
2024 br [2] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
2025 x86_patch (br [1], s->code);
2026 x86_mov_membase_imm (s->code, X86_ESP, 0, 0, 4);
2027 x86_patch (br [2], s->code);
2031 /* lreg = obj->vtable */
2032 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2033 /* lreg = obj->vtable->klass */
2034 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2038 x86_alu_membase_imm (s->code, X86_CMP, lreg, G_STRUCT_OFFSET (MonoClass, rank), klass->rank);
2039 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
2040 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, cast_class), 4);
2041 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, baseval), 4);
2042 x86_alu_reg_mem (s->code, X86_SUB, lreg, &klass->cast_class->baseval);
2043 x86_alu_reg_mem (s->code, X86_CMP, lreg, &klass->cast_class->diffval);
2044 br [2] = s->code; x86_branch8 (s->code, X86_CC_LE, 0, FALSE);
2045 x86_patch (br [1], s->code);
2046 x86_mov_membase_imm (s->code, X86_ESP, 0, 0, 4);
2047 x86_patch (br [2], s->code);
2051 if (klass->marshalbyref) {
2052 /* check for transparent_proxy */
2053 x86_alu_reg_imm (s->code, X86_CMP, lreg, (int)mono_defaults.transparent_proxy_class);
2054 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
2057 x86_mov_reg_membase (s->code, lreg, X86_ESP, 0, 4);
2058 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoTransparentProxy,
2060 x86_patch (br [1], s->code);
2063 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, baseval), 4);
2064 x86_alu_reg_mem (s->code, X86_SUB, lreg, &klass->baseval);
2065 x86_alu_reg_mem (s->code, X86_CMP, lreg, &klass->diffval);
2066 br [2] = s->code; x86_branch8 (s->code, X86_CC_LE, 0, FALSE);
2067 x86_mov_membase_imm (s->code, X86_ESP, 0, 0, 4);
2068 x86_patch (br [2], s->code);
2072 x86_patch (br [0], s->code);
2073 x86_pop_reg (s->code, tree->reg1);
2076 stmt: INITOBJ (reg) {
2079 if (!(i = tree->data.i))
2082 if (i == 1 || i == 2 || i == 4) {
2083 x86_mov_membase_imm (s->code, tree->left->reg1, 0, 0, i);
2087 i = tree->data.i / 4;
2088 j = tree->data.i % 4;
2090 if (tree->left->reg1 != X86_EDI) {
2091 x86_push_reg (s->code, X86_EDI);
2092 x86_mov_reg_reg (s->code, X86_EDI, tree->left->reg1, 4);
2096 x86_alu_reg_reg (s->code, X86_XOR, X86_EAX, X86_EAX);
2097 x86_mov_reg_imm (s->code, X86_ECX, i);
2099 x86_prefix (s->code, X86_REP_PREFIX);
2100 x86_stosl (s->code);
2102 for (i = 0; i < j; i++)
2103 x86_stosb (s->code);
2107 x86_mov_membase_imm (s->code, X86_EDI, 0, 0, 2);
2108 x86_mov_membase_imm (s->code, X86_EDI, 2, 0, 1);
2113 if (tree->left->reg1 != X86_EDI)
2114 x86_pop_reg (s->code, X86_EDI);
2117 stmt: CPBLK (reg, CPSRC (reg, CONST_I4)) {
2118 int dest_reg = tree->left->reg1;
2119 int source_reg = tree->right->left->reg1;
2120 int count = tree->right->right->data.i;
2121 int sreg = dest_reg != X86_EAX ? X86_EAX : X86_EDX;
2122 int spill_pos = 0, dest_offset = 0, source_offset = 0;
2123 int save_esi = FALSE, save_edi = FALSE;
2125 // TODO: handle unaligned. prefix
2131 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 1);
2132 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 1);
2135 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 2);
2136 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 2);
2139 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 2);
2140 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 2);
2141 x86_mov_reg_membase (s->code, sreg, source_reg, 2, 1);
2142 x86_mov_membase_reg (s->code, dest_reg, 2, sreg, 1);
2145 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 4);
2146 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 4);
2149 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 4);
2150 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 4);
2151 x86_mov_reg_membase (s->code, sreg, source_reg, 4, 1);
2152 x86_mov_membase_reg (s->code, dest_reg, 4, sreg, 1);
2155 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 4);
2156 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 4);
2157 x86_mov_reg_membase (s->code, sreg, source_reg, 4, 2);
2158 x86_mov_membase_reg (s->code, dest_reg, 4, sreg, 2);
2161 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 4);
2162 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 4);
2163 x86_mov_reg_membase (s->code, sreg, source_reg, 4, 2);
2164 x86_mov_membase_reg (s->code, dest_reg, 4, sreg, 2);
2165 x86_mov_reg_membase (s->code, sreg, source_reg, 6, 1);
2166 x86_mov_membase_reg (s->code, dest_reg, 6, sreg, 1);
2169 x86_fild_membase (s->code, source_reg, 0, TRUE);
2170 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2173 x86_fild_membase (s->code, source_reg, 0, TRUE);
2174 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2175 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 1);
2176 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 1);
2179 x86_fild_membase (s->code, source_reg, 0, TRUE);
2180 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2181 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 2);
2182 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 2);
2185 x86_fild_membase (s->code, source_reg, 0, TRUE);
2186 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2187 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 2);
2188 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 2);
2189 x86_mov_reg_membase (s->code, sreg, source_reg, 10, 1);
2190 x86_mov_membase_reg (s->code, dest_reg, 10, sreg, 1);
2193 x86_fild_membase (s->code, source_reg, 0, TRUE);
2194 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2195 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 4);
2196 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 4);
2199 x86_fild_membase (s->code, source_reg, 0, TRUE);
2200 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2201 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 4);
2202 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 4);
2203 x86_mov_reg_membase (s->code, sreg, source_reg, 12, 1);
2204 x86_mov_membase_reg (s->code, dest_reg, 12, sreg, 1);
2207 x86_fild_membase (s->code, source_reg, 0, TRUE);
2208 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2209 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 4);
2210 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 4);
2211 x86_mov_reg_membase (s->code, sreg, source_reg, 12, 2);
2212 x86_mov_membase_reg (s->code, dest_reg, 12, sreg, 2);
2215 x86_fild_membase (s->code, source_reg, 0, TRUE);
2216 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2217 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 4);
2218 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 4);
2219 x86_mov_reg_membase (s->code, sreg, source_reg, 12, 2);
2220 x86_mov_membase_reg (s->code, dest_reg, 12, sreg, 2);
2221 x86_mov_reg_membase (s->code, sreg, source_reg, 14, 1);
2222 x86_mov_membase_reg (s->code, dest_reg, 14, sreg, 1);
2225 g_assert (count > 15);
2227 if (dest_reg != X86_ESI && source_reg != X86_ESI &&
2228 mono_regset_reg_used (s->rs, X86_ESI))
2230 if (dest_reg != X86_EDI && source_reg != X86_EDI &&
2231 mono_regset_reg_used (s->rs, X86_EDI))
2235 x86_push_reg (s->code, X86_ESI);
2237 x86_push_reg (s->code, X86_EDI);
2239 if (dest_reg == X86_ESI) {
2240 dest_offset = ++spill_pos;
2242 if (source_reg == X86_EDI) {
2243 source_offset = ++spill_pos;
2247 x86_push_reg (s->code, source_reg);
2249 x86_push_reg (s->code, dest_reg);
2251 if (source_reg != X86_ESI) {
2253 x86_mov_reg_membase (s->code, X86_ESI, X86_ESP, (source_offset-1)<<2, 4);
2255 x86_mov_reg_reg (s->code, X86_ESI, source_reg, 4);
2257 if (dest_reg != X86_EDI) {
2259 x86_mov_reg_membase (s->code, X86_EDI, X86_ESP, (dest_offset-1)<<2, 4);
2261 x86_mov_reg_reg (s->code, X86_EDI, dest_reg, 4);
2264 x86_mov_reg_imm (s->code, X86_ECX, count >> 2);
2266 x86_prefix (s->code, X86_REP_PREFIX);
2267 x86_movsd (s->code);
2269 switch (count & 3) {
2271 x86_mov_reg_membase (s->code, sreg, X86_ESI, 0, 1);
2272 x86_mov_membase_reg (s->code, X86_EDI, 0, sreg, 1);
2275 x86_mov_reg_membase (s->code, sreg, X86_ESI, 0, 2);
2276 x86_mov_membase_reg (s->code, X86_EDI, 0, sreg, 2);
2279 x86_mov_reg_membase (s->code, sreg, X86_ESI, 0, 2);
2280 x86_mov_membase_reg (s->code, X86_EDI, 0, sreg, 2);
2281 x86_mov_reg_membase (s->code, sreg, X86_ESI, 2, 1);
2282 x86_mov_membase_reg (s->code, X86_EDI, 2, sreg, 1);
2288 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, spill_pos<<2);
2291 x86_pop_reg (s->code, X86_EDI);
2293 x86_pop_reg (s->code, X86_ESI);
2298 MBCOND (mono_inline_memcpy);
2302 stmt: CPBLK (reg, CPSRC (reg, reg)) {
2303 int dest_reg = tree->left->reg1;
2304 int source_reg = tree->right->left->reg1;
2305 int size_reg = tree->right->right->reg1;
2306 int spill_pos = 0, size_offset = 0, dest_offset = 0, source_offset = 0;
2307 int save_esi = FALSE, save_edi = FALSE;
2309 if (!mono_inline_memcpy) {
2310 x86_push_reg (s->code, size_reg);
2311 x86_push_reg (s->code, source_reg);
2312 x86_push_reg (s->code, dest_reg);
2313 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, memmove);
2314 x86_call_code (s->code, 0);
2315 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
2317 if (dest_reg != X86_ESI && source_reg != X86_ESI && size_reg != X86_ESI &&
2318 mono_regset_reg_used (s->rs, X86_ESI))
2320 if (dest_reg != X86_EDI && source_reg != X86_EDI && size_reg != X86_EDI &&
2321 mono_regset_reg_used (s->rs, X86_EDI))
2325 x86_push_reg (s->code, X86_ESI);
2327 x86_push_reg (s->code, X86_EDI);
2329 if (size_reg == X86_EDI || size_reg == X86_ESI) {
2330 size_offset = ++spill_pos;
2332 if (dest_reg == X86_ECX || dest_reg == X86_ESI) {
2333 dest_offset = ++spill_pos;
2335 if (source_reg == X86_ECX || source_reg == X86_EDI) {
2336 source_offset = ++spill_pos;
2340 x86_push_reg (s->code, source_reg);
2342 x86_push_reg (s->code, dest_reg);
2344 x86_push_reg (s->code, size_reg);
2346 if (source_reg != X86_ESI) {
2348 x86_mov_reg_membase (s->code, X86_ESI, X86_ESP, (source_offset-1)<<2, 4);
2350 x86_mov_reg_reg (s->code, X86_ESI, source_reg, 4);
2352 if (dest_reg != X86_EDI) {
2354 x86_mov_reg_membase (s->code, X86_EDI, X86_ESP, (dest_offset-1)<<2, 4);
2356 x86_mov_reg_reg (s->code, X86_EDI, dest_reg, 4);
2358 if (size_reg != X86_ECX) {
2360 x86_mov_reg_membase (s->code, X86_ECX, X86_ESP, (size_offset-1)<<2, 4);
2362 x86_mov_reg_reg (s->code, X86_ECX, size_reg, 4);
2365 x86_push_reg (s->code, X86_ECX);
2366 x86_shift_reg_imm (s->code, X86_SHR, X86_ECX, 2);
2370 // move whole dwords first
2371 x86_prefix (s->code, X86_REP_PREFIX);
2372 x86_movsd (s->code);
2374 x86_pop_reg (s->code, X86_ECX);
2375 x86_alu_reg_imm (s->code, X86_AND, X86_ECX, 3);
2377 // move remaining bytes (if any)
2378 x86_prefix (s->code, X86_REP_PREFIX);
2379 x86_movsb (s->code);
2381 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, spill_pos<<2);
2384 x86_pop_reg (s->code, X86_EDI);
2386 x86_pop_reg (s->code, X86_ESI);
2390 stmt: INITBLK (reg, CPSRC (reg, CONST_I4)) {
2391 int dest_reg = tree->left->reg1;
2392 int value_reg = tree->right->left->reg1;
2393 int size = tree->right->right->data.i;
2394 int spill_pos = 0, dest_offset = 0, value_offset = 0;
2395 int save_edi = FALSE;
2401 if (mono_inline_memcpy) {
2403 if (dest_reg != X86_EDI && value_reg != X86_EDI &&
2404 mono_regset_reg_used (s->rs, X86_EDI)) {
2406 x86_push_reg (s->code, X86_EDI);
2409 if (dest_reg == X86_ECX || dest_reg == X86_EAX) {
2410 dest_offset = ++spill_pos;
2412 if (value_reg == X86_ECX || value_reg == X86_EDI) {
2413 value_offset = ++spill_pos;
2417 x86_push_reg (s->code, value_reg);
2419 x86_push_reg (s->code, dest_reg);
2421 if (value_reg != X86_EAX) {
2423 x86_mov_reg_membase (s->code, X86_EAX, X86_ESP, (value_offset-1)<<2, 4);
2425 x86_mov_reg_reg (s->code, X86_EAX, value_reg, 4);
2427 if (dest_reg != X86_EDI) {
2429 x86_mov_reg_membase (s->code, X86_EDI, X86_ESP, (dest_offset-1)<<2, 4);
2431 x86_mov_reg_reg (s->code, X86_EDI, dest_reg, 4);
2434 x86_widen_reg (s->code, X86_EAX, X86_EAX, FALSE, FALSE);
2435 x86_mov_reg_reg (s->code, X86_EDX, X86_EAX, 4);
2436 x86_shift_reg_imm (s->code, X86_SHL, X86_EAX, 8);
2437 x86_alu_reg_reg (s->code, X86_OR, X86_EAX, X86_EDX);
2438 x86_mov_reg_reg (s->code, X86_EDX, X86_EAX, 4);
2439 x86_shift_reg_imm (s->code, X86_SHL, X86_EAX, 16);
2440 x86_alu_reg_reg (s->code, X86_OR, X86_EAX, X86_EDX);
2443 x86_mov_reg_imm (s->code, X86_ECX, i);
2445 x86_prefix (s->code, X86_REP_PREFIX);
2446 x86_stosd (s->code);
2449 for (i = 0; i < j; i++)
2450 x86_stosb (s->code);
2452 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, spill_pos<<2);
2455 x86_pop_reg (s->code, X86_EDI);
2458 x86_push_imm (s->code, size);
2459 x86_push_reg (s->code, value_reg);
2460 x86_push_reg (s->code, dest_reg);
2461 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, memset);
2462 x86_call_code (s->code, 0);
2463 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
2466 MBCOND (mono_inline_memcpy);
2470 stmt: INITBLK (reg, CPSRC (reg, reg)) {
2471 int dest_reg = tree->left->reg1;
2472 int value_reg = tree->right->left->reg1;
2473 int size_reg = tree->right->right->reg1;
2474 int spill_pos = 0, size_offset = 0, dest_offset = 0, value_offset = 0;
2475 int save_edi = FALSE;
2477 if (mono_inline_memcpy) {
2479 if (dest_reg != X86_EDI && size_reg != X86_EDI && size_reg != X86_EDI &&
2480 mono_regset_reg_used (s->rs, X86_EDI)) {
2482 x86_push_reg (s->code, X86_EDI);
2485 if (size_reg == X86_EDI || size_reg == X86_EAX) {
2486 size_offset = ++spill_pos;
2488 if (dest_reg == X86_ECX || dest_reg == X86_EAX) {
2489 dest_offset = ++spill_pos;
2491 if (value_reg == X86_ECX || value_reg == X86_EDI) {
2492 value_offset = ++spill_pos;
2496 x86_push_reg (s->code, value_reg);
2498 x86_push_reg (s->code, dest_reg);
2500 x86_push_reg (s->code, size_reg);
2502 if (value_reg != X86_EAX) {
2504 x86_mov_reg_membase (s->code, X86_EAX, X86_ESP, (value_offset-1)<<2, 4);
2506 x86_mov_reg_reg (s->code, X86_EAX, value_reg, 4);
2508 if (dest_reg != X86_EDI) {
2510 x86_mov_reg_membase (s->code, X86_EDI, X86_ESP, (dest_offset-1)<<2, 4);
2512 x86_mov_reg_reg (s->code, X86_EDI, dest_reg, 4);
2514 if (size_reg != X86_ECX) {
2516 x86_mov_reg_membase (s->code, X86_ECX, X86_ESP, (size_offset-1)<<2, 4);
2518 x86_mov_reg_reg (s->code, X86_ECX, size_reg, 4);
2521 x86_widen_reg (s->code, X86_EAX, X86_EAX, FALSE, FALSE);
2522 x86_mov_reg_reg (s->code, X86_EDX, X86_EAX, 4);
2523 x86_shift_reg_imm (s->code, X86_SHL, X86_EAX, 8);
2524 x86_alu_reg_reg (s->code, X86_OR, X86_EAX, X86_EDX);
2525 x86_mov_reg_reg (s->code, X86_EDX, X86_EAX, 4);
2526 x86_shift_reg_imm (s->code, X86_SHL, X86_EAX, 16);
2527 x86_alu_reg_reg (s->code, X86_OR, X86_EAX, X86_EDX);
2529 x86_push_reg (s->code, X86_ECX);
2530 x86_shift_reg_imm (s->code, X86_SHR, X86_ECX, 2);
2533 // init whole dwords first
2534 x86_prefix (s->code, X86_REP_PREFIX);
2535 x86_stosd (s->code);
2537 x86_pop_reg (s->code, X86_ECX);
2538 x86_alu_reg_imm (s->code, X86_AND, X86_ECX, 3);
2540 // init remaining bytes (if any)
2541 x86_prefix (s->code, X86_REP_PREFIX);
2542 x86_stosb (s->code);
2544 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, spill_pos<<2);
2547 x86_pop_reg (s->code, X86_EDI);
2550 x86_push_reg (s->code, size_reg);
2551 x86_push_reg (s->code, value_reg);
2552 x86_push_reg (s->code, dest_reg);
2553 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, memset);
2554 x86_call_code (s->code, 0);
2555 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
2564 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bb);
2565 x86_jump32 (s->code, 0);
2568 cflags: COMPARE (reg, LDIND_I4 (ADDR_L)) {
2569 int treg = VARINFO (s, tree->right->left->data.i).reg;
2570 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, treg);
2572 MBCOND ((VARINFO (data, tree->right->left->data.i).reg >= 0));
2576 cflags: COMPARE (LDIND_I4 (ADDR_L), CONST_I4) {
2577 int treg = VARINFO (s, tree->left->left->data.i).reg;
2578 x86_alu_reg_imm (s->code, X86_CMP, treg, tree->right->data.i);
2580 MBCOND ((VARINFO (data, tree->left->left->data.i).reg >= 0));
2584 cflags: COMPARE (LDIND_I4 (ADDR_L), reg) {
2585 int treg = VARINFO (s, tree->left->left->data.i).reg;
2586 x86_alu_reg_reg (s->code, X86_CMP, treg, tree->right->reg1);
2588 MBCOND ((VARINFO (data, tree->left->left->data.i).reg >= 0));
2592 cflags: COMPARE (LDIND_I4 (ADDR_L), CONST_I4) {
2593 int offset = VARINFO (s, tree->left->left->data.i).offset;
2594 x86_alu_membase_imm (s->code, X86_CMP, X86_EBP, offset, tree->right->data.i);
2596 MBCOND ((VARINFO (data, tree->left->left->data.i).reg < 0));
2600 cflags: COMPARE (reg, CONST_I4) {
2601 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i);
2604 cflags: COMPARE (reg, reg) {
2605 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
2609 stmt: CBRANCH (cflags) {
2610 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
2612 switch (tree->data.bi.cond) {
2614 x86_branch32 (s->code, X86_CC_LT, 0, TRUE);
2617 x86_branch32 (s->code, X86_CC_LT, 0, FALSE);
2620 x86_branch32 (s->code, X86_CC_GT, 0, TRUE);
2623 x86_branch32 (s->code, X86_CC_GT, 0, FALSE);
2626 x86_branch32 (s->code, X86_CC_EQ, 0, TRUE);
2629 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
2632 x86_branch32 (s->code, X86_CC_GE, 0, TRUE);
2635 x86_branch32 (s->code, X86_CC_GE, 0, FALSE);
2638 x86_branch32 (s->code, X86_CC_LE, 0, TRUE);
2641 x86_branch32 (s->code, X86_CC_LE, 0, FALSE);
2644 g_assert_not_reached ();
2648 stmt: BRTRUE (LDIND_I4 (ADDR_L)) {
2649 int treg = VARINFO (s, tree->left->left->data.i).reg;
2650 int offset = VARINFO (s, tree->left->left->data.i).offset;
2653 x86_test_reg_reg (s->code, treg, treg);
2655 x86_alu_membase_imm (s->code, X86_CMP, X86_EBP, offset, 0);
2657 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bb);
2658 x86_branch32 (s->code, X86_CC_NE, 0, TRUE);
2661 stmt: BRTRUE (reg) {
2662 x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1);
2663 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bb);
2664 x86_branch32 (s->code, X86_CC_NE, 0, TRUE);
2667 stmt: BRFALSE (LDIND_I4 (ADDR_L)) {
2668 int treg = VARINFO (s, tree->left->left->data.i).reg;
2669 int offset = VARINFO (s, tree->left->left->data.i).offset;
2672 x86_test_reg_reg (s->code, treg, treg);
2674 x86_alu_membase_imm (s->code, X86_CMP, X86_EBP, offset, 0);
2676 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bb);
2677 x86_branch32 (s->code, X86_CC_EQ, 0, TRUE);
2679 //{static int cx= 0; printf ("CX1 %5d\n", cx++);}
2682 stmt: BRFALSE (reg) {
2683 x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1);
2684 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bb);
2685 x86_branch32 (s->code, X86_CC_EQ, 0, TRUE);
2689 x86_breakpoint (s->code);
2693 if (tree->left->reg1 != X86_EAX)
2694 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
2696 if (!tree->last_instr) {
2697 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_EPILOG, NULL);
2698 x86_jump32 (s->code, 0);
2703 if (!tree->last_instr) {
2704 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_EPILOG, NULL);
2705 x86_jump32 (s->code, 0);
2709 stmt: ARG_I4 (LDIND_I4 (addr)) {
2710 MBTree *at = tree->left->left;
2711 int pad = tree->data.arg_info.pad;
2715 switch (at->data.ainfo.amode) {
2718 x86_push_mem (s->code, at->data.ainfo.offset);
2722 x86_push_membase (s->code, at->data.ainfo.basereg, at->data.ainfo.offset);
2725 x86_push_memindex (s->code, X86_NOBASEREG, at->data.ainfo.offset,
2726 at->data.ainfo.indexreg, at->data.ainfo.shift);
2729 x86_push_memindex (s->code, at->data.ainfo.basereg,
2730 at->data.ainfo.offset, at->data.ainfo.indexreg,
2731 at->data.ainfo.shift);
2736 stmt: ARG_I4 (LDIND_I4 (ADDR_L)) {
2737 int treg = VARINFO (s, tree->left->left->data.i).reg;
2738 int pad = tree->data.arg_info.pad;
2741 x86_push_reg (s->code, treg);
2743 MBCOND ((VARINFO (data, tree->left->left->data.i).reg >= 0));
2747 stmt: ARG_I4 (reg) {
2748 int pad = tree->data.arg_info.pad;
2751 x86_push_reg (s->code, tree->left->reg1);
2754 stmt: ARG_I4 (ADDR_G) {
2755 int pad = tree->data.arg_info.pad;
2758 x86_push_imm (s->code, tree->left->data.p);
2761 stmt: ARG_I4 (CONST_I4) "MB_USE_OPT1(0)" {
2762 int pad = tree->data.arg_info.pad;
2765 x86_push_imm (s->code, tree->left->data.i);
2769 PRINT_REG ("THIS", tree->reg1);
2772 reg: CHECKTHIS (reg) {
2773 /* try to access the vtable - this will raise an exception
2774 * if the object is NULL */
2775 x86_alu_membase_imm (s->code, X86_CMP, tree->left->reg1, 0, 0);
2776 if (tree->reg1 != tree->left->reg1)
2777 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
2780 stmt: CHECKTHIS (reg) {
2781 x86_alu_membase_imm (s->code, X86_CMP, tree->left->reg1, 0, 0);
2788 /* restore callee saved registers */
2789 if (mono_regset_reg_used (s->rs, X86_EBX)) {
2790 x86_mov_reg_membase (s->code, X86_EBX, X86_EBP, pos, 4);
2793 if (mono_regset_reg_used (s->rs, X86_EDI)) {
2794 x86_mov_reg_membase (s->code, X86_EDI, X86_EBP, pos, 4);
2797 if (mono_regset_reg_used (s->rs, X86_ESI)) {
2798 x86_mov_reg_membase (s->code, X86_ESI, X86_EBP, pos, 4);
2801 /* restore ESP/EBP */
2802 x86_leave (s->code);
2804 /* jump to the method */
2805 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->data.p);
2806 x86_jump32 (s->code, 0);
2811 reg: CALL_I4 (this, reg) {
2813 int lreg = tree->left->reg1;
2814 int rreg = tree->right->reg1;
2816 if (lreg == treg || rreg == treg)
2818 if (lreg == treg || rreg == treg)
2820 if (lreg == treg || rreg == treg)
2821 mono_assert_not_reached ();
2825 x86_call_reg (s->code, rreg);
2829 mono_assert (tree->reg1 == X86_EAX);
2832 reg: CALL_I4 (this, ADDR_G) {
2833 int lreg = tree->left->reg1;
2841 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->right->data.p);
2842 x86_call_code (s->code, 0);
2846 mono_assert (tree->reg1 == X86_EAX);
2849 reg: LDVIRTFTN (reg, INTF_ADDR) {
2850 /* we cant return the value in the vtable, because it can be
2851 * a magic trampoline, and we cant pass that to the outside world */
2853 if (tree->reg1 != X86_EAX)
2854 x86_push_reg (s->code, X86_EAX);
2855 x86_push_reg (s->code, X86_ECX);
2856 x86_push_reg (s->code, X86_EDX);
2858 x86_push_imm (s->code, tree->right->data.m->klass->interface_id);
2859 x86_push_reg (s->code, tree->left->reg1);
2860 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_ldintftn);
2861 x86_call_code (s->code, 0);
2862 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
2864 x86_pop_reg (s->code, X86_EDX);
2865 x86_pop_reg (s->code, X86_ECX);
2866 if (tree->reg1 != X86_EAX) {
2867 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
2868 x86_pop_reg (s->code, X86_EAX);
2872 reg: CALL_I4 (this, INTF_ADDR) {
2873 int lreg = tree->left->reg1;
2881 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2882 x86_mov_reg_membase (s->code, lreg, lreg,
2883 G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
2884 x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
2885 x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
2889 mono_assert (tree->reg1 == X86_EAX);
2892 reg: LDVIRTFTN (reg, VFUNC_ADDR) {
2893 /* we cant return the value in the vtable, because it can be
2894 * a magic trampoline, and we cant pass that to the outside world */
2896 if (tree->reg1 != X86_EAX)
2897 x86_push_reg (s->code, X86_EAX);
2898 x86_push_reg (s->code, X86_ECX);
2899 x86_push_reg (s->code, X86_EDX);
2901 x86_push_imm (s->code, tree->right->data.m->slot);
2902 x86_push_reg (s->code, tree->left->reg1);
2903 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_ldvirtftn);
2904 x86_call_code (s->code, 0);
2905 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
2907 x86_pop_reg (s->code, X86_EDX);
2908 x86_pop_reg (s->code, X86_ECX);
2909 if (tree->reg1 != X86_EAX) {
2910 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
2911 x86_pop_reg (s->code, X86_EAX);
2916 if (tree->reg1 != X86_EAX)
2917 x86_push_reg (s->code, X86_EAX);
2918 x86_push_reg (s->code, X86_ECX);
2919 x86_push_reg (s->code, X86_EDX);
2921 x86_push_imm (s->code, tree->data.m);
2922 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_ldftn);
2923 x86_call_code (s->code, 0);
2924 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer));
2926 x86_pop_reg (s->code, X86_EDX);
2927 x86_pop_reg (s->code, X86_ECX);
2928 if (tree->reg1 != X86_EAX) {
2929 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
2930 x86_pop_reg (s->code, X86_EAX);
2935 reg: CALL_I4 (this, VFUNC_ADDR) {
2936 int lreg = tree->left->reg1;
2944 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2945 x86_call_virtual (s->code, lreg,
2946 G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2));
2950 mono_assert (tree->reg1 == X86_EAX);
2953 stmt: CALL_VOID (this, ADDR_G) {
2954 int lreg = tree->left->reg1;
2962 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->right->data.p);
2963 x86_call_code (s->code, 0);
2968 stmt: CALL_VOID (this, reg) {
2970 int lreg = tree->left->reg1;
2971 int rreg = tree->right->reg1;
2973 if (lreg == treg || rreg == treg)
2975 if (lreg == treg || rreg == treg)
2977 if (lreg == treg || rreg == treg)
2978 mono_assert_not_reached ();
2982 x86_call_reg (s->code, tree->right->reg1);
2987 stmt: CALL_VOID (this, INTF_ADDR) {
2988 int lreg = tree->left->reg1;
2996 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2997 x86_mov_reg_membase (s->code, lreg, lreg,
2998 G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
2999 x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
3000 x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
3005 stmt: CALL_VOID (this, VFUNC_ADDR) {
3006 int lreg = tree->left->reg1;
3014 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
3015 x86_call_virtual (s->code, lreg,
3016 G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2));
3021 stmt: SWITCH (reg) {
3023 guint32 *jt = (guint32 *)tree->data.p;
3025 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, jt [0]);
3026 offset = 6 + (guint32)s->code;
3027 x86_branch32 (s->code, X86_CC_GE, jt [jt [0] + 1] - offset, FALSE);
3029 x86_mov_reg_memindex (s->code, X86_EAX, X86_NOBASEREG,
3030 tree->data.i + 4, tree->left->reg1, 2, 4);
3031 x86_jump_reg (s->code, X86_EAX);
3038 reg: CONV_I1 (lreg) {
3039 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, FALSE);
3042 reg: CONV_U1 (lreg) {
3043 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE);
3046 reg: CONV_I2 (lreg) {
3047 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, TRUE);
3050 reg: CONV_U2 (lreg) {
3051 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE);
3054 reg: CONV_I4 (lreg) {
3055 if (tree->reg1 != tree->left->reg1)
3056 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3059 reg: CONV_U4 (lreg) {
3060 if (tree->reg1 != tree->left->reg1)
3061 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3065 reg: CONV_OVF_I4 (lreg){
3066 guint8 *start = s->code;
3067 guchar* o1, *o2, *o3, *o4, *o5;
3071 * Valid ints: 0xffffffff:8000000 to 00000000:0x7f000000
3073 for (i = 0; i < 2; i++) {
3076 x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1);
3078 /* If the low word top bit is set, see if we are negative */
3079 x86_branch8 (s->code, X86_CC_LT, o3 - o1, TRUE);
3082 /* We are not negative (no top bit set, check for our top word to be zero */
3083 x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2);
3084 x86_branch8 (s->code, X86_CC_EQ, o4 - o2, TRUE);
3087 /* throw exception */
3088 x86_push_imm (s->code, "OverflowException");
3089 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS,
3090 arch_get_throw_exception_by_name ());
3091 x86_call_code (s->code, 0);
3094 /* our top bit is set, check that top word is 0xfffffff */
3095 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg2, 0xffffffff);
3098 /* nope, emit exception */
3099 x86_branch8 (s->code, X86_CC_NE, o2 - o5, TRUE);
3102 if (tree->reg1 != tree->left->reg1)
3103 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3106 reg: CONV_OVF_I4 (lreg){
3107 guint8 *br [3], *label [1];
3110 * Valid ints: 0xffffffff:8000000 to 00000000:0x7f000000
3112 x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1);
3114 /* If the low word top bit is set, see if we are negative */
3115 br [0] = s->code; x86_branch8 (s->code, X86_CC_LT, 0, TRUE);
3117 /* We are not negative (no top bit set, check for our top word to be zero */
3118 x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2);
3119 br [1] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, TRUE);
3120 label [0] = s->code;
3122 /* throw exception */
3123 x86_push_imm (s->code, "OverflowException");
3124 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS,
3125 arch_get_throw_exception_by_name ());
3126 x86_call_code (s->code, 0);
3128 x86_patch (br [0], s->code);
3129 /* our top bit is set, check that top word is 0xfffffff */
3130 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg2, 0xffffffff);
3132 x86_patch (br [1], s->code);
3133 /* nope, emit exception */
3134 br [2] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3135 x86_patch (br [2], label [0]);
3137 if (tree->reg1 != tree->left->reg1)
3138 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3141 reg: CONV_OVF_U4 (lreg) {
3142 /* Keep in sync with CONV_OVF_I4_UN below, they are the same on 32-bit machines */
3143 /* top word must be 0 */
3144 x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2);
3145 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "OverflowException");
3146 if (tree->reg1 != tree->left->reg1)
3147 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3150 reg: CONV_OVF_I4_UN (lreg) {
3151 /* Keep in sync with CONV_OVF_U4 above, they are the same on 32-bit machines */
3152 /* top word must be 0 */
3153 x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2);
3154 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "OverflowException");
3155 if (tree->reg1 != tree->left->reg1)
3156 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3162 x86_mov_reg_imm (s->code, tree->reg1, *((gint32 *)&tree->data.p));
3163 x86_mov_reg_imm (s->code, tree->reg2, *((gint32 *)&tree->data.p + 1));
3166 lreg: CONV_I8 (CONST_I4) {
3167 x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i);
3169 if (tree->left->data.i >= 0)
3170 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3172 x86_mov_reg_imm (s->code, tree->reg2, -1);
3175 lreg: CONV_I8 (reg) {
3178 if (tree->reg1 != tree->left->reg1)
3179 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3181 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3182 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, 0);
3183 x86_branch8 (s->code, X86_CC_GE, 5, TRUE);
3185 x86_mov_reg_imm (s->code, tree->reg2, -1);
3186 mono_assert ((s->code - i1) == 5);
3189 lreg: CONV_U8 (CONST_I4) 1 {
3190 x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i);
3191 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3194 lreg: CONV_U8 (reg) {
3195 if (tree->reg1 != tree->left->reg1)
3196 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3197 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3200 lreg: CONV_OVF_U8 (CONST_I4) {
3201 if (tree->left->data.i < 0){
3202 x86_push_imm (s->code, "OverflowException");
3203 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS,
3204 arch_get_throw_exception_by_name ());
3205 x86_call_code (s->code, 0);
3207 x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i);
3208 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3212 lreg: CONV_OVF_I8_UN (CONST_I4) {
3213 x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i);
3214 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3217 lreg: CONV_OVF_U8 (reg) {
3218 x86_test_reg_imm (s->code, tree->left->reg1, 0x8000000);
3219 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "OverflowException");
3221 if (tree->reg1 != tree->left->reg1)
3222 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3223 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3226 lreg: CONV_OVF_I8_UN (reg) {
3227 /* Convert uint value into int64, we pass everything */
3228 if (tree->reg1 != tree->left->reg1)
3229 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3230 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3233 stmt: STIND_I8 (addr, lreg) {
3235 switch (tree->left->data.ainfo.amode) {
3238 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 4);
3239 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset + 4, tree->right->reg2, 4);
3243 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
3244 tree->left->data.ainfo.offset, tree->right->reg1, 4);
3245 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
3246 tree->left->data.ainfo.offset + 4, tree->right->reg2, 4);
3249 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
3250 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
3251 tree->right->reg1, 4);
3252 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset + 4,
3253 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
3254 tree->right->reg2, 4);
3257 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
3258 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
3259 tree->right->reg1, 4);
3260 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset + 4,
3261 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
3262 tree->right->reg2, 4);
3267 stmt: REMOTE_STIND_I8 (reg, lreg) {
3271 x86_push_reg (s->code, tree->right->reg1);
3272 x86_mov_reg_membase (s->code, tree->right->reg1, tree->left->reg1, 0, 4);
3273 x86_alu_membase_imm (s->code, X86_CMP, tree->right->reg1, 0, ((int)mono_defaults.transparent_proxy_class));
3274 x86_pop_reg (s->code, tree->right->reg1);
3276 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3278 /* this is a transparent proxy - remote the call */
3280 /* save value to stack */
3281 x86_push_reg (s->code, tree->right->reg2);
3282 x86_push_reg (s->code, tree->right->reg1);
3284 x86_push_reg (s->code, X86_ESP);
3285 x86_push_imm (s->code, tree->data.fi.field);
3286 x86_push_imm (s->code, tree->data.fi.klass);
3287 x86_push_reg (s->code, tree->left->reg1);
3288 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
3289 x86_call_code (s->code, 0);
3290 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 24);
3292 br [1] = s->code; x86_jump8 (s->code, 0);
3294 x86_patch (br [0], s->code);
3295 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
3296 tree->data.fi.field->offset;
3297 x86_mov_membase_reg (s->code, tree->left->reg1, offset, tree->right->reg1, 4);
3298 x86_mov_membase_reg (s->code, tree->left->reg1, offset + 4, tree->right->reg2, 4);
3300 x86_patch (br [1], s->code);
3304 # an addr can use two address register (base and index register). The must take care
3305 # that we do not override them (thus the use of x86_lea)
3306 lreg: LDIND_I8 (addr) {
3308 switch (tree->left->data.ainfo.amode) {
3311 x86_mov_reg_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, 4);
3312 x86_mov_reg_mem (s->code, tree->reg2, tree->left->data.ainfo.offset + 4, 4);
3316 x86_lea_membase (s->code, tree->reg2, tree->left->data.ainfo.basereg,
3317 tree->left->data.ainfo.offset);
3318 x86_mov_reg_membase (s->code, tree->reg1, tree->reg2, 0, 4);
3319 x86_mov_reg_membase (s->code, tree->reg2, tree->reg2, 4, 4);
3322 x86_lea_memindex (s->code, tree->reg2, X86_NOBASEREG, tree->left->data.ainfo.offset,
3323 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift);
3324 x86_mov_reg_membase (s->code, tree->reg1, tree->reg2, 0, 4);
3325 x86_mov_reg_membase (s->code, tree->reg2, tree->reg2, 4, 4);
3328 x86_lea_memindex (s->code, tree->reg2, tree->left->data.ainfo.basereg,
3329 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
3330 tree->left->data.ainfo.shift);
3331 x86_mov_reg_membase (s->code, tree->reg1, tree->reg2, 0, 4);
3332 x86_mov_reg_membase (s->code, tree->reg2, tree->reg2, 4, 4);
3335 PRINT_REG ("LDIND_I8_0", tree->reg1);
3336 PRINT_REG ("LDIND_I8_1", tree->reg2);
3339 lreg: SHR (lreg, CONST_I4) {
3340 if (tree->right->data.i < 32) {
3341 x86_shrd_reg_imm (s->code, tree->left->reg1, tree->left->reg2, tree->right->data.i);
3342 x86_shift_reg_imm (s->code, X86_SAR, tree->left->reg2, tree->right->data.i);
3343 if (tree->reg1 != tree->left->reg1)
3344 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3345 if (tree->reg2 != tree->left->reg2)
3346 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
3347 } else if (tree->right->data.i < 64) {
3348 if (tree->reg1 != tree->left->reg2)
3349 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg2, 4);
3350 if (tree->reg2 != tree->left->reg2)
3351 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
3352 x86_shift_reg_imm (s->code, X86_SAR, tree->reg2, 31);
3353 x86_shift_reg_imm (s->code, X86_SAR, tree->reg1, (tree->right->data.i - 32));
3354 } /* else unspecified result */
3357 lreg: SHR_UN (lreg, CONST_I4) {
3358 if (tree->right->data.i < 32) {
3359 x86_shrd_reg_imm (s->code, tree->left->reg1, tree->left->reg2, tree->right->data.i);
3360 x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg2, tree->right->data.i);
3361 if (tree->reg1 != tree->left->reg1)
3362 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3363 if (tree->reg2 != tree->left->reg2)
3364 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
3365 } else if (tree->right->data.i < 64) {
3366 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg2, 4);
3367 x86_shift_reg_imm (s->code, X86_SHR, tree->reg1, (tree->right->data.i - 32));
3368 x86_mov_reg_imm (s->code, tree->reg2, 0);
3369 } /* else unspecified result */
3372 lreg: SHR (lreg, reg) {
3375 if (tree->right->reg1 != X86_ECX)
3376 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
3378 x86_shrd_reg (s->code, tree->left->reg1, tree->left->reg2);
3379 x86_shift_reg (s->code, X86_SAR, tree->left->reg2);
3380 x86_test_reg_imm (s->code, X86_ECX, 32);
3381 br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
3382 x86_mov_reg_reg (s->code, tree->left->reg1, tree->left->reg2, 4);
3383 x86_shift_reg_imm (s->code, X86_SAR, tree->reg2, 31);
3384 x86_patch (br [0], s->code);
3386 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3389 lreg: SHR_UN (lreg, reg) {
3392 if (tree->right->reg1 != X86_ECX)
3393 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
3395 x86_shrd_reg (s->code, tree->left->reg1, tree->left->reg2);
3396 x86_shift_reg (s->code, X86_SHR, tree->left->reg2);
3397 x86_test_reg_imm (s->code, X86_ECX, 32);
3398 br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
3399 x86_mov_reg_reg (s->code, tree->left->reg1, tree->left->reg2, 4);
3400 x86_shift_reg_imm (s->code, X86_SHR, tree->reg2, 31);
3401 x86_patch (br [0], s->code);
3403 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3406 lreg: SHL (lreg, CONST_I4) {
3407 if (tree->right->data.i < 32) {
3408 x86_shld_reg_imm (s->code, tree->left->reg2, tree->left->reg1, tree->right->data.i);
3409 x86_shift_reg_imm (s->code, X86_SHL, tree->left->reg1, tree->right->data.i);
3410 if (tree->reg1 != tree->left->reg1)
3411 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3412 if (tree->reg2 != tree->left->reg2)
3413 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
3414 } else if (tree->right->data.i < 64) {
3415 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg1, 4);
3416 x86_shift_reg_imm (s->code, X86_SHL, tree->reg2, (tree->right->data.i - 32));
3417 x86_alu_reg_reg (s->code, X86_XOR, tree->reg1, tree->reg1);
3418 } /* else unspecified result */
3421 lreg: SHL (lreg, reg) {
3424 if (tree->right->reg1 != X86_ECX)
3425 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
3427 x86_shld_reg (s->code, tree->left->reg2, tree->left->reg1);
3428 x86_shift_reg (s->code, X86_SHL, tree->left->reg1);
3429 x86_test_reg_imm (s->code, X86_ECX, 32);
3430 br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
3431 x86_mov_reg_reg (s->code, tree->left->reg2, tree->left->reg1, 4);
3432 x86_alu_reg_reg (s->code, X86_XOR, tree->left->reg1, tree->left->reg1);
3433 x86_patch (br [0], s->code);
3435 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3438 lreg: ADD (lreg, lreg) {
3439 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
3440 x86_alu_reg_reg (s->code, X86_ADC, tree->left->reg2, tree->right->reg2);
3442 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3445 lreg: ADD_OVF (lreg, lreg) {
3446 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
3447 x86_alu_reg_reg (s->code, X86_ADC, tree->left->reg2, tree->right->reg2);
3448 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
3450 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3453 lreg: ADD_OVF_UN (lreg, lreg) {
3454 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
3455 x86_alu_reg_reg (s->code, X86_ADC, tree->left->reg2, tree->right->reg2);
3456 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NC, FALSE, "OverflowException");
3458 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3461 lreg: SUB (lreg, lreg) {
3462 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
3463 x86_alu_reg_reg (s->code, X86_SBB, tree->left->reg2, tree->right->reg2);
3465 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3468 lreg: SUB_OVF (lreg, lreg) {
3469 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
3470 x86_alu_reg_reg (s->code, X86_SBB, tree->left->reg2, tree->right->reg2);
3471 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
3473 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3476 lreg: SUB_OVF_UN (lreg, lreg) {
3477 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
3478 x86_alu_reg_reg (s->code, X86_SBB, tree->left->reg2, tree->right->reg2);
3479 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NC, FALSE, "OverflowException");
3481 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3484 lreg: AND (lreg, lreg) {
3485 x86_alu_reg_reg (s->code, X86_AND, tree->left->reg1, tree->right->reg1);
3486 x86_alu_reg_reg (s->code, X86_AND, tree->left->reg2, tree->right->reg2);
3488 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3491 lreg: OR (lreg, lreg) {
3492 x86_alu_reg_reg (s->code, X86_OR, tree->left->reg1, tree->right->reg1);
3493 x86_alu_reg_reg (s->code, X86_OR, tree->left->reg2, tree->right->reg2);
3495 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3498 lreg: XOR (lreg, lreg) {
3499 x86_alu_reg_reg (s->code, X86_XOR, tree->left->reg1, tree->right->reg1);
3500 x86_alu_reg_reg (s->code, X86_XOR, tree->left->reg2, tree->right->reg2);
3502 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3506 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3508 x86_neg_reg (s->code, tree->reg1);
3509 x86_alu_reg_imm (s->code, X86_ADC, tree->reg2, 0);
3510 x86_neg_reg (s->code, tree->reg2);
3514 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3516 x86_not_reg (s->code, tree->reg1);
3517 x86_not_reg (s->code, tree->reg2);
3520 lreg: MUL (lreg, lreg) {
3521 if (mono_regset_reg_used (s->rs, X86_ECX))
3522 x86_push_reg (s->code, X86_ECX);
3524 x86_push_reg (s->code, tree->right->reg2);
3525 x86_push_reg (s->code, tree->right->reg1);
3526 x86_push_reg (s->code, tree->left->reg2);
3527 x86_push_reg (s->code, tree->left->reg1);
3528 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_llmult);
3529 x86_call_code (s->code, 0);
3530 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
3532 if (mono_regset_reg_used (s->rs, X86_ECX))
3533 x86_pop_reg (s->code, X86_ECX);
3535 mono_assert (tree->reg1 == X86_EAX &&
3536 tree->reg2 == X86_EDX);
3539 lreg: MUL_OVF (lreg, lreg) {
3540 if (mono_regset_reg_used (s->rs, X86_ECX))
3541 x86_push_reg (s->code, X86_ECX);
3543 x86_push_reg (s->code, tree->right->reg2);
3544 x86_push_reg (s->code, tree->right->reg1);
3545 x86_push_reg (s->code, tree->left->reg2);
3546 x86_push_reg (s->code, tree->left->reg1);
3547 /* pass a pointer to store the resulting exception -
3548 * ugly, but it works */
3549 x86_push_reg (s->code, X86_ESP);
3550 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_llmult_ovf);
3551 x86_call_code (s->code, 0);
3552 x86_mov_reg_membase (s->code, X86_ECX, X86_ESP, 4, 4);
3553 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
3554 x86_alu_reg_imm (s->code, X86_CMP, X86_ECX, 0);
3556 /* cond. emit exception */
3557 x86_branch8 (s->code, X86_CC_EQ, 7, FALSE);
3558 x86_push_reg (s->code, X86_ECX);
3559 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, arch_get_throw_exception ());
3560 x86_call_code (s->code, 0);
3562 if (mono_regset_reg_used (s->rs, X86_ECX))
3563 x86_pop_reg (s->code, X86_ECX);
3565 mono_assert (tree->reg1 == X86_EAX &&
3566 tree->reg2 == X86_EDX);
3569 lreg: MUL_OVF_UN (lreg, lreg) {
3570 if (mono_regset_reg_used (s->rs, X86_ECX))
3571 x86_push_reg (s->code, X86_ECX);
3573 x86_push_reg (s->code, tree->right->reg2);
3574 x86_push_reg (s->code, tree->right->reg1);
3575 x86_push_reg (s->code, tree->left->reg2);
3576 x86_push_reg (s->code, tree->left->reg1);
3577 /* pass a pointer to store the resulting exception -
3578 * ugly, but it works */
3579 x86_push_reg (s->code, X86_ESP);
3580 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_llmult_ovf_un);
3581 x86_call_code (s->code, 0);
3582 x86_mov_reg_membase (s->code, X86_ECX, X86_ESP, 4, 4);
3583 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
3584 x86_alu_reg_imm (s->code, X86_CMP, X86_ECX, 0);
3586 /* cond. emit exception */
3587 x86_branch8 (s->code, X86_CC_EQ, 7, FALSE);
3588 x86_push_reg (s->code, X86_ECX);
3589 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, arch_get_throw_exception ());
3590 x86_call_code (s->code, 0);
3592 if (mono_regset_reg_used (s->rs, X86_ECX))
3593 x86_pop_reg (s->code, X86_ECX);
3595 mono_assert (tree->reg1 == X86_EAX &&
3596 tree->reg2 == X86_EDX);
3599 lreg: DIV (lreg, lreg) {
3600 if (mono_regset_reg_used (s->rs, X86_ECX))
3601 x86_push_reg (s->code, X86_ECX);
3603 x86_push_reg (s->code, tree->right->reg2);
3604 x86_push_reg (s->code, tree->right->reg1);
3605 x86_push_reg (s->code, tree->left->reg2);
3606 x86_push_reg (s->code, tree->left->reg1);
3607 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_lldiv);
3608 x86_call_code (s->code, 0);
3609 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
3611 if (mono_regset_reg_used (s->rs, X86_ECX))
3612 x86_pop_reg (s->code, X86_ECX);
3614 mono_assert (tree->reg1 == X86_EAX &&
3615 tree->reg2 == X86_EDX);
3618 lreg: REM (lreg, lreg) {
3619 if (mono_regset_reg_used (s->rs, X86_ECX))
3620 x86_push_reg (s->code, X86_ECX);
3622 x86_push_reg (s->code, tree->right->reg2);
3623 x86_push_reg (s->code, tree->right->reg1);
3624 x86_push_reg (s->code, tree->left->reg2);
3625 x86_push_reg (s->code, tree->left->reg1);
3626 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_llrem);
3627 x86_call_code (s->code, 0);
3628 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
3630 if (mono_regset_reg_used (s->rs, X86_ECX))
3631 x86_pop_reg (s->code, X86_ECX);
3633 mono_assert (tree->reg1 == X86_EAX &&
3634 tree->reg2 == X86_EDX);
3637 lreg: DIV_UN (lreg, lreg) {
3638 if (mono_regset_reg_used (s->rs, X86_ECX))
3639 x86_push_reg (s->code, X86_ECX);
3641 x86_push_reg (s->code, tree->right->reg2);
3642 x86_push_reg (s->code, tree->right->reg1);
3643 x86_push_reg (s->code, tree->left->reg2);
3644 x86_push_reg (s->code, tree->left->reg1);
3645 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_lldiv_un);
3646 x86_call_code (s->code, 0);
3647 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
3649 if (mono_regset_reg_used (s->rs, X86_ECX))
3650 x86_pop_reg (s->code, X86_ECX);
3652 mono_assert (tree->reg1 == X86_EAX &&
3653 tree->reg2 == X86_EDX);
3656 lreg: REM_UN (lreg, lreg) {
3657 if (mono_regset_reg_used (s->rs, X86_ECX))
3658 x86_push_reg (s->code, X86_ECX);
3660 x86_push_reg (s->code, tree->right->reg2);
3661 x86_push_reg (s->code, tree->right->reg1);
3662 x86_push_reg (s->code, tree->left->reg2);
3663 x86_push_reg (s->code, tree->left->reg1);
3664 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_llrem_un);
3665 x86_call_code (s->code, 0);
3666 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
3668 if (mono_regset_reg_used (s->rs, X86_ECX))
3669 x86_pop_reg (s->code, X86_ECX);
3671 mono_assert (tree->reg1 == X86_EAX &&
3672 tree->reg2 == X86_EDX);
3675 lreg: CALL_I8 (this, reg) {
3677 int lreg = tree->left->reg1;
3678 int rreg = tree->right->reg1;
3680 if (lreg == treg || rreg == treg)
3682 if (lreg == treg || rreg == treg)
3684 if (lreg == treg || rreg == treg)
3685 mono_assert_not_reached ();
3689 x86_call_reg (s->code, rreg);
3693 mono_assert (tree->reg1 == X86_EAX);
3694 mono_assert (tree->reg2 == X86_EDX);
3697 lreg: CALL_I8 (this, ADDR_G) {
3698 int lreg = tree->left->reg1;
3706 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->right->data.p);
3707 x86_call_code (s->code, 0);
3711 mono_assert (tree->reg1 == X86_EAX);
3712 mono_assert (tree->reg2 == X86_EDX);
3715 lreg: CALL_I8 (this, VFUNC_ADDR) {
3716 int lreg = tree->left->reg1;
3724 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
3725 x86_call_virtual (s->code, lreg,
3726 G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2));
3730 mono_assert (tree->reg1 == X86_EAX);
3731 mono_assert (tree->reg2 == X86_EDX);
3734 lreg: CALL_I8 (this, INTF_ADDR) {
3735 int lreg = tree->left->reg1;
3743 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
3744 x86_mov_reg_membase (s->code, lreg, lreg,
3745 G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
3746 x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
3747 x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
3751 mono_assert (tree->reg1 == X86_EAX);
3752 mono_assert (tree->reg2 == X86_EDX);
3756 if (tree->left->reg1 != X86_EAX) {
3757 if (tree->left->reg2 != X86_EAX) {
3758 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
3759 if (tree->left->reg2 != X86_EDX)
3760 x86_mov_reg_reg (s->code, X86_EDX, tree->left->reg2, 4);
3762 x86_mov_reg_reg (s->code, X86_ECX, tree->left->reg2, 4);
3763 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
3764 x86_mov_reg_reg (s->code, X86_EDX, X86_ECX, 4);
3766 } else if (tree->left->reg2 != X86_EDX) {
3767 x86_mov_reg_reg (s->code, X86_EDX, tree->left->reg2, 4);
3770 if (!tree->last_instr) {
3771 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_EPILOG, NULL);
3772 x86_jump32 (s->code, 0);
3777 stmt: ARG_I8 (lreg) {
3778 int pad = tree->data.arg_info.pad;
3781 x86_push_reg (s->code, tree->left->reg2);
3782 x86_push_reg (s->code, tree->left->reg1);
3785 reg: CSET (COMPARE (lreg, lreg)) {
3787 int lreg1, lreg2, rreg1, rreg2;
3789 lreg1 = tree->left->left->reg1;
3790 lreg2 = tree->left->left->reg2;
3791 rreg1 = tree->left->right->reg1;
3792 rreg2 = tree->left->right->reg2;
3795 if (tree->data.i == CEE_CEQ) {
3796 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3797 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3798 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3799 x86_patch (br [0], s->code);
3800 x86_set_reg (s->code, X86_CC_EQ, tree->reg1, FALSE);
3801 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE);
3805 switch (tree->data.i) {
3807 x86_alu_reg_reg (s->code, X86_CMP, rreg2, lreg2);
3808 br [0] = s->code; x86_branch8 (s->code, X86_CC_GT, 0, TRUE);
3809 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3810 x86_alu_reg_reg (s->code, X86_CMP, rreg1, lreg1);
3811 br [2] = s->code; x86_branch8 (s->code, X86_CC_GE, 0, FALSE);
3814 x86_alu_reg_reg (s->code, X86_CMP, rreg2, lreg2);
3815 br [0] = s->code; x86_branch8 (s->code, X86_CC_GT, 0, FALSE);
3816 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3817 x86_alu_reg_reg (s->code, X86_CMP, rreg1, lreg1);
3818 br [2] = s->code; x86_branch8 (s->code, X86_CC_GE, 0, FALSE);
3821 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3822 br [0] = s->code; x86_branch8 (s->code, X86_CC_GT, 0, TRUE);
3823 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3824 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3825 br [2] = s->code; x86_branch8 (s->code, X86_CC_GE, 0, FALSE);
3828 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3829 br [0] = s->code; x86_branch8 (s->code, X86_CC_GT, 0, FALSE);
3830 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3831 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3832 br [2] = s->code; x86_branch8 (s->code, X86_CC_GE, 0, FALSE);
3835 g_assert_not_reached ();
3838 /* set result to 1 */
3839 x86_patch (br [1], s->code);
3840 x86_mov_reg_imm (s->code, tree->reg1, 1);
3841 br [3] = s->code; x86_jump8 (s->code, 0);
3843 /* set result to 0 */
3844 x86_patch (br [0], s->code);
3845 x86_patch (br [2], s->code);
3846 x86_mov_reg_imm (s->code, tree->reg1, 0);
3848 x86_patch (br [3], s->code);
3851 stmt: CBRANCH (COMPARE (lreg, lreg)) {
3853 int lreg1, lreg2, rreg1, rreg2;
3855 lreg1 = tree->left->left->reg1;
3856 lreg2 = tree->left->left->reg2;
3857 rreg1 = tree->left->right->reg1;
3858 rreg2 = tree->left->right->reg2;
3860 switch (tree->data.bi.cond) {
3862 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3863 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3864 x86_branch32 (s->code, X86_CC_LT, 0, TRUE);
3865 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3866 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3867 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3868 x86_branch32 (s->code, X86_CC_LT, 0, FALSE);
3869 x86_patch (br [0], s->code);
3872 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3873 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3874 x86_branch32 (s->code, X86_CC_LT, 0, FALSE);
3875 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3876 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3877 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3878 x86_branch32 (s->code, X86_CC_LT, 0, FALSE);
3879 x86_patch (br [0], s->code);
3882 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3883 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3884 x86_branch32 (s->code, X86_CC_GT, 0, TRUE);
3885 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3886 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3887 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3888 x86_branch32 (s->code, X86_CC_GT, 0, FALSE);
3889 x86_patch (br [0], s->code);
3892 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3893 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3894 x86_branch32 (s->code, X86_CC_GT, 0, FALSE);
3895 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3896 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3897 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3898 x86_branch32 (s->code, X86_CC_GT, 0, FALSE);
3899 x86_patch (br [0], s->code);
3902 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3903 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3904 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3905 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3906 x86_branch32 (s->code, X86_CC_EQ, 0, TRUE);
3907 x86_patch (br [0], s->code);
3910 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3911 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3912 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
3913 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3914 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3915 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
3918 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3919 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3920 x86_branch32 (s->code, X86_CC_GT, 0, TRUE);
3921 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3922 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3923 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3924 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3925 x86_branch32 (s->code, X86_CC_GE, 0, FALSE);
3926 x86_patch (br [0], s->code);
3929 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3930 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3931 x86_branch32 (s->code, X86_CC_GT, 0, FALSE);
3932 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3933 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3934 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3935 x86_branch32 (s->code, X86_CC_GE, 0, FALSE);
3936 x86_patch (br [0], s->code);
3939 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3940 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3941 x86_branch32 (s->code, X86_CC_LT, 0, TRUE);
3942 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3943 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3944 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3945 x86_branch32 (s->code, X86_CC_LE, 0, FALSE);
3946 x86_patch (br [0], s->code);
3949 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3950 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3951 x86_branch32 (s->code, X86_CC_LT, 0, FALSE);
3952 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3953 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3954 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3955 x86_branch32 (s->code, X86_CC_LE, 0, FALSE);
3956 x86_patch (br [0], s->code);
3959 g_assert_not_reached ();
3966 #stmt: STLOC (CONV_I4 (freg)) {
3968 # x86_fist_pop_membase (s->code, X86_EBP, tree->data.i, FALSE);
3971 reg: CONV_I1 (freg) {
3972 if (mono_use_fast_iconv) {
3973 mono_emit_fast_iconv(s, tree);
3974 x86_widen_reg (s->code, tree->reg1, tree->reg1, TRUE, FALSE);
3976 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
3977 x86_fnstcw_membase(s->code, X86_ESP, 0);
3978 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
3979 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
3980 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
3981 x86_fldcw_membase (s->code, X86_ESP, 2);
3982 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
3983 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
3984 x86_pop_reg (s->code, tree->reg1);
3985 x86_widen_reg (s->code, tree->reg1, tree->reg1, TRUE, FALSE);
3986 x86_fldcw_membase (s->code, X86_ESP, 0);
3987 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
3991 reg: CONV_U1 (freg) {
3992 if (mono_use_fast_iconv) {
3993 mono_emit_fast_iconv(s, tree);
3994 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE);
3996 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
3997 x86_fnstcw_membase(s->code, X86_ESP, 0);
3998 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
3999 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4000 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4001 x86_fldcw_membase (s->code, X86_ESP, 2);
4002 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
4003 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
4004 x86_pop_reg (s->code, tree->reg1);
4005 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE);
4006 x86_fldcw_membase (s->code, X86_ESP, 0);
4007 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4011 reg: CONV_I2 (freg) {
4012 if (mono_use_fast_iconv) {
4013 mono_emit_fast_iconv(s, tree);
4014 x86_widen_reg (s->code, tree->reg1, tree->reg1, TRUE, TRUE);
4016 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4017 x86_fnstcw_membase(s->code, X86_ESP, 0);
4018 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4019 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4020 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4021 x86_fldcw_membase (s->code, X86_ESP, 2);
4022 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
4023 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
4024 x86_pop_reg (s->code, tree->reg1);
4025 x86_widen_reg (s->code, tree->reg1, tree->reg1, TRUE, TRUE);
4026 x86_fldcw_membase (s->code, X86_ESP, 0);
4027 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4031 reg: CONV_U2 (freg) {
4032 if (mono_use_fast_iconv) {
4033 mono_emit_fast_iconv(s, tree);
4034 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, TRUE);
4036 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4037 x86_fnstcw_membase(s->code, X86_ESP, 0);
4038 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4039 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4040 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4041 x86_fldcw_membase (s->code, X86_ESP, 2);
4042 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
4043 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
4044 x86_pop_reg (s->code, tree->reg1);
4045 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, TRUE);
4046 x86_fldcw_membase (s->code, X86_ESP, 0);
4047 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4051 reg: CONV_I4 (freg) {
4052 if (mono_use_fast_iconv) {
4053 mono_emit_fast_iconv(s, tree);
4055 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4056 x86_fnstcw_membase(s->code, X86_ESP, 0);
4057 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4058 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4059 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4060 x86_fldcw_membase (s->code, X86_ESP, 2);
4061 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
4062 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
4063 x86_pop_reg (s->code, tree->reg1);
4064 x86_fldcw_membase (s->code, X86_ESP, 0);
4065 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4069 reg: CONV_U4 (freg) {
4070 if (mono_use_fast_iconv) {
4071 mono_emit_fast_iconv(s, tree);
4073 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4074 x86_fnstcw_membase(s->code, X86_ESP, 0);
4075 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4076 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4077 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4078 x86_fldcw_membase (s->code, X86_ESP, 2);
4079 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
4080 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
4081 x86_pop_reg (s->code, tree->reg1);
4082 x86_fldcw_membase (s->code, X86_ESP, 0);
4083 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4087 lreg: CONV_I8 (freg) {
4088 if (mono_use_fast_iconv) {
4089 mono_emit_fast_iconv_i8(s, tree);
4091 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4092 x86_fnstcw_membase(s->code, X86_ESP, 0);
4093 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4094 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4095 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4096 x86_fldcw_membase (s->code, X86_ESP, 2);
4097 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8);
4098 x86_fist_pop_membase (s->code, X86_ESP, 0, TRUE);
4099 x86_pop_reg (s->code, tree->reg1);
4100 x86_pop_reg (s->code, tree->reg2);
4101 x86_fldcw_membase (s->code, X86_ESP, 0);
4102 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4106 lreg: CONV_U8 (freg) {
4107 if (mono_use_fast_iconv) {
4108 mono_emit_fast_iconv_i8(s, tree);
4110 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4111 x86_fnstcw_membase(s->code, X86_ESP, 0);
4112 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4113 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4114 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4115 x86_fldcw_membase (s->code, X86_ESP, 2);
4116 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8);
4117 x86_fist_pop_membase (s->code, X86_ESP, 0, TRUE);
4118 x86_pop_reg (s->code, tree->reg1);
4119 x86_pop_reg (s->code, tree->reg2);
4120 x86_fldcw_membase (s->code, X86_ESP, 0);
4121 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4125 reg: CSET (COMPARE (freg, freg)) {
4126 int treg = tree->reg1;
4128 if (treg != X86_EAX)
4129 x86_push_reg (s->code, X86_EAX);
4131 x86_fcompp (s->code);
4132 x86_fnstsw (s->code);
4133 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500);
4135 switch (tree->data.i) {
4137 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x4000);
4138 x86_set_reg (s->code, X86_CC_EQ, treg, TRUE);
4139 x86_widen_reg (s->code, treg, treg, FALSE, FALSE);
4142 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4143 x86_set_reg (s->code, X86_CC_EQ, treg, TRUE);
4144 x86_widen_reg (s->code, treg, treg, FALSE, FALSE);
4147 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4148 x86_set_reg (s->code, X86_CC_EQ, treg, TRUE);
4149 x86_widen_reg (s->code, treg, treg, FALSE, FALSE);
4152 x86_set_reg (s->code, X86_CC_EQ, treg, TRUE);
4153 x86_widen_reg (s->code, treg, treg, FALSE, FALSE);
4156 x86_set_reg (s->code, X86_CC_EQ, tree->reg1, TRUE);
4157 x86_widen_reg (s->code, treg, treg, FALSE, FALSE);
4160 g_assert_not_reached ();
4163 if (treg != X86_EAX)
4164 x86_pop_reg (s->code, X86_EAX);
4167 freg: CONV_R8 (freg) {
4171 freg: CONV_R4 (freg) {
4172 /* fixme: nothing to do ??*/
4175 freg: CONV_R8 (LDIND_I4 (ADDR_G)) {
4176 x86_fild (s->code, tree->left->left->data.p, FALSE);
4179 freg: CONV_R4 (reg) {
4180 x86_push_reg (s->code, tree->left->reg1);
4181 x86_fild_membase (s->code, X86_ESP, 0, FALSE);
4182 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4185 freg: CONV_R8 (reg) {
4186 x86_push_reg (s->code, tree->left->reg1);
4187 x86_fild_membase (s->code, X86_ESP, 0, FALSE);
4188 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4191 freg: CONV_R_UN (reg) {
4192 x86_push_imm (s->code, 0);
4193 x86_push_reg (s->code, tree->left->reg1);
4194 x86_fild_membase (s->code, X86_ESP, 0, TRUE);
4195 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
4198 freg: CONV_R_UN (lreg) {
4199 static guint8 mn[] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x3f, 0x40 };
4202 /* load 64bit integer to FP stack */
4203 x86_push_imm (s->code, 0);
4204 x86_push_reg (s->code, tree->left->reg2);
4205 x86_push_reg (s->code, tree->left->reg1);
4206 x86_fild_membase (s->code, X86_ESP, 0, TRUE);
4207 /* store as 80bit FP value */
4208 x86_fst80_membase (s->code, X86_ESP, 0);
4210 /* test if lreg is negative */
4211 x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2);
4212 br [0] = s->code; x86_branch8 (s->code, X86_CC_GEZ, 0, TRUE);
4214 /* add correction constant mn */
4215 x86_fld80_mem (s->code, mn);
4216 x86_fld80_membase (s->code, X86_ESP, 0);
4217 x86_fp_op_reg (s->code, X86_FADD, 1, TRUE);
4218 x86_fst80_membase (s->code, X86_ESP, 0);
4219 //x86_breakpoint (s->code);
4220 x86_patch (br [0], s->code);
4222 x86_fld80_membase (s->code, X86_ESP, 0);
4223 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
4226 freg: CONV_R4 (lreg) {
4227 x86_push_reg (s->code, tree->left->reg2);
4228 x86_push_reg (s->code, tree->left->reg1);
4229 x86_fild_membase (s->code, X86_ESP, 0, TRUE);
4230 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
4233 freg: CONV_R8 (lreg) {
4234 x86_push_reg (s->code, tree->left->reg2);
4235 x86_push_reg (s->code, tree->left->reg1);
4236 x86_fild_membase (s->code, X86_ESP, 0, TRUE);
4237 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
4241 float f = *(float *)tree->data.p;
4248 x86_fld (s->code, tree->data.p, FALSE);
4252 double d = *(double *)tree->data.p;
4259 x86_fld (s->code, tree->data.p, TRUE);
4262 freg: LDIND_R4 (addr) {
4264 switch (tree->left->data.ainfo.amode) {
4267 x86_fld (s->code, tree->left->data.ainfo.offset, FALSE);
4271 x86_fld_membase (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, FALSE);
4274 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, X86_NOBASEREG,
4275 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4276 tree->left->data.ainfo.shift);
4277 x86_fld_membase (s->code, tree->left->data.ainfo.indexreg, 0, FALSE);
4280 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.basereg,
4281 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4282 tree->left->data.ainfo.shift);
4283 x86_fld_membase (s->code, tree->left->data.ainfo.indexreg, 0, FALSE);
4288 freg: LDIND_R8 (addr) {
4290 switch (tree->left->data.ainfo.amode) {
4293 x86_fld (s->code, tree->left->data.ainfo.offset, TRUE);
4297 x86_fld_membase (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, TRUE);
4300 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, X86_NOBASEREG,
4301 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4302 tree->left->data.ainfo.shift);
4303 x86_fld_membase (s->code, tree->left->data.ainfo.indexreg, 0, TRUE);
4306 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.basereg,
4307 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4308 tree->left->data.ainfo.shift);
4309 x86_fld_membase (s->code, tree->left->data.ainfo.indexreg, 0, TRUE);
4315 freg: ADD (freg, freg) {
4316 x86_fp_op_reg (s->code, X86_FADD, 1, TRUE);
4319 freg: SUB (freg, freg) {
4320 x86_fp_op_reg (s->code, X86_FSUB, 1, TRUE);
4323 freg: MUL (freg, freg) {
4324 x86_fp_op_reg (s->code, X86_FMUL, 1, TRUE);
4327 freg: DIV (freg, freg) {
4328 x86_fp_op_reg (s->code, X86_FDIV, 1, TRUE);
4331 freg: CKFINITE (freg) {
4332 x86_push_reg (s->code, X86_EAX);
4334 x86_fnstsw (s->code);
4335 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4100);
4336 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4337 x86_pop_reg (s->code, X86_EAX);
4338 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NE, FALSE, "ArithmeticException");
4341 freg: REM (freg, freg) {
4344 /* we need to exchange ST(0) with ST(1) */
4345 x86_fxch (s->code, 1);
4347 /* this requires a loop, because fprem1 somtimes
4348 * returns a partial remainder */
4350 x86_fprem (s->code);
4351 x86_fnstsw (s->code);
4352 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x0400);
4354 x86_branch8 (s->code, X86_CC_NE, l1 - l2, FALSE);
4357 x86_fstp (s->code, 1);
4365 x86_fstp (s->code, 0);
4368 stmt: STIND_R4 (addr, freg) {
4370 switch (tree->left->data.ainfo.amode) {
4373 x86_fst (s->code, tree->left->data.ainfo.offset, FALSE, TRUE);
4377 x86_fst_membase (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
4381 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, X86_NOBASEREG,
4382 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4383 tree->left->data.ainfo.shift);
4384 x86_fst_membase (s->code, tree->left->data.ainfo.indexreg, 0, FALSE, TRUE);
4387 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.basereg,
4388 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4389 tree->left->data.ainfo.shift);
4390 x86_fst_membase (s->code, tree->left->data.ainfo.indexreg, 0, FALSE, TRUE);
4395 stmt: STIND_R8 (addr, freg) {
4397 switch (tree->left->data.ainfo.amode) {
4400 x86_fst (s->code, tree->left->data.ainfo.offset, TRUE, TRUE);
4404 x86_fst_membase (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
4408 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, X86_NOBASEREG,
4409 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4410 tree->left->data.ainfo.shift);
4411 x86_fst_membase (s->code, tree->left->data.ainfo.indexreg, 0, TRUE, TRUE);
4414 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.basereg,
4415 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4416 tree->left->data.ainfo.shift);
4417 x86_fst_membase (s->code, tree->left->data.ainfo.indexreg, 0, TRUE, TRUE);
4422 stmt: REMOTE_STIND_R4 (reg, freg) {
4425 int lreg = tree->left->reg1;
4431 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
4432 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
4433 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
4435 /* this is a transparent proxy - remote the call */
4437 /* save value to stack */
4438 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4439 x86_fst_membase (s->code, X86_ESP, 0, FALSE, TRUE);
4441 x86_push_reg (s->code, X86_ESP);
4442 x86_push_imm (s->code, tree->data.fi.field);
4443 x86_push_imm (s->code, tree->data.fi.klass);
4444 x86_push_reg (s->code, lreg);
4445 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
4446 x86_call_code (s->code, 0);
4447 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
4449 br [1] = s->code; x86_jump8 (s->code, 0);
4451 x86_patch (br [0], s->code);
4452 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
4453 tree->data.fi.field->offset;
4454 x86_fst_membase (s->code, lreg, offset, FALSE, TRUE);
4456 x86_patch (br [1], s->code);
4459 stmt: REMOTE_STIND_R8 (reg, freg) {
4462 int lreg = tree->left->reg1;
4468 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
4469 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
4470 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
4472 /* this is a transparent proxy - remote the call */
4474 /* save value to stack */
4475 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8);
4476 x86_fst_membase (s->code, X86_ESP, 0, TRUE, TRUE);
4478 x86_push_reg (s->code, X86_ESP);
4479 x86_push_imm (s->code, tree->data.fi.field);
4480 x86_push_imm (s->code, tree->data.fi.klass);
4481 x86_push_reg (s->code, lreg);
4482 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
4483 x86_call_code (s->code, 0);
4484 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 24);
4486 br [1] = s->code; x86_jump8 (s->code, 0);
4488 x86_patch (br [0], s->code);
4489 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
4490 tree->data.fi.field->offset;
4491 x86_fst_membase (s->code, lreg, offset, TRUE, TRUE);
4493 x86_patch (br [1], s->code);
4496 stmt: ARG_R4 (freg) {
4497 int pad = tree->data.arg_info.pad;
4499 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4 + pad);
4500 x86_fst_membase (s->code, X86_ESP, 0, FALSE, TRUE);
4503 stmt: ARG_R8 (freg) {
4504 int pad = tree->data.arg_info.pad;
4506 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8 + pad);
4507 x86_fst_membase (s->code, X86_ESP, 0, TRUE, TRUE);
4510 # fixme: we need to implement unordered and ordered compares
4512 stmt: CBRANCH (COMPARE (freg, freg)) {
4514 x86_fcompp (s->code);
4515 x86_fnstsw (s->code);
4516 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500);
4518 switch (tree->data.bi.cond) {
4520 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4521 x86_branch32 (s->code, X86_CC_EQ, 0, FALSE);
4524 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4525 x86_branch32 (s->code, X86_CC_EQ, 0, FALSE);
4528 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4529 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4530 x86_branch32 (s->code, X86_CC_EQ, 0, FALSE);
4533 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4534 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4535 x86_branch32 (s->code, X86_CC_EQ, 0, FALSE);
4538 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x4000);
4539 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4540 x86_branch32 (s->code, X86_CC_EQ, 0, TRUE);
4543 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x4000);
4544 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4545 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
4548 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4549 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
4552 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4553 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
4556 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4557 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4558 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
4561 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4562 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4563 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
4566 g_assert_not_reached ();
4570 freg: CALL_R8 (this, reg) {
4572 int lreg = tree->left->reg1;
4573 int rreg = tree->right->reg1;
4575 if (lreg == treg || rreg == treg)
4577 if (lreg == treg || rreg == treg)
4579 if (lreg == treg || rreg == treg)
4580 mono_assert_not_reached ();
4584 x86_call_reg (s->code, rreg);
4589 freg: CALL_R8 (this, ADDR_G) {
4590 int lreg = tree->left->reg1;
4598 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->right->data.p);
4599 x86_call_code (s->code, 0);
4604 freg: CALL_R8 (this, INTF_ADDR) {
4605 int lreg = tree->left->reg1;
4613 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
4614 x86_mov_reg_membase (s->code, lreg, lreg,
4615 G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
4616 x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
4617 x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
4622 freg: CALL_R8 (this, VFUNC_ADDR) {
4623 int lreg = tree->left->reg1;
4631 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
4632 x86_call_virtual (s->code, lreg,
4633 G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2));
4639 if (!tree->last_instr) {
4640 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_EPILOG, NULL);
4641 x86_jump32 (s->code, 0);
4654 x86_fsqrt (s->code);
4657 # support for value types
4659 reg: LDIND_OBJ (reg) {
4660 if (tree->left->reg1 != tree->reg1)
4661 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
4664 stmt: STIND_OBJ (reg, reg) {
4665 mono_assert (tree->data.i > 0);
4667 x86_push_imm (s->code, tree->data.i);
4668 x86_push_reg (s->code, tree->right->reg1);
4669 x86_push_reg (s->code, tree->left->reg1);
4670 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, MEMCOPY);
4671 x86_call_code (s->code, 0);
4672 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
4675 stmt: REMOTE_STIND_OBJ (reg, reg) {
4678 int lreg = tree->left->reg1;
4679 int rreg = tree->right->reg1;
4688 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
4689 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
4690 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
4692 /* this is a transparent proxy - remote the call */
4694 x86_push_reg (s->code, rreg);
4695 x86_push_imm (s->code, tree->data.fi.field);
4696 x86_push_imm (s->code, tree->data.fi.klass);
4697 x86_push_reg (s->code, lreg);
4698 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
4699 x86_call_code (s->code, 0);
4700 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
4702 br [1] = s->code; x86_jump8 (s->code, 0);
4704 x86_patch (br [0], s->code);
4706 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
4707 tree->data.fi.field->offset;
4709 size = mono_class_value_size (tree->data.fi.field->type->data.klass, NULL);
4710 x86_push_imm (s->code, size);
4711 x86_push_reg (s->code, tree->right->reg1);
4712 x86_alu_reg_imm (s->code, X86_ADD, tree->left->reg1, offset);
4713 x86_push_reg (s->code, tree->left->reg1);
4714 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, MEMCOPY);
4715 x86_call_code (s->code, 0);
4716 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
4718 x86_patch (br [1], s->code);
4721 stmt: ARG_OBJ (CONST_I4) {
4722 int pad = tree->data.arg_info.pad;
4725 x86_push_imm (s->code, tree->left->data.i);
4728 stmt: ARG_OBJ (reg) {
4729 int size = tree->data.arg_info.size;
4730 int pad = tree->data.arg_info.pad;
4738 /* reserve space for the argument */
4739 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, sa);
4741 x86_push_imm (s->code, size);
4742 x86_push_reg (s->code, tree->left->reg1);
4743 x86_lea_membase (s->code, X86_EAX, X86_ESP, 2*4);
4744 x86_push_reg (s->code, X86_EAX);
4746 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, MEMCOPY);
4747 x86_call_code (s->code, 0);
4748 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
4751 stmt: RET_OBJ (reg) {
4752 int size = tree->data.i;
4754 x86_push_imm (s->code, size);
4755 x86_push_reg (s->code, tree->left->reg1);
4756 x86_push_membase (s->code, X86_EBP, 8);
4758 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, MEMCOPY);
4759 x86_call_code (s->code, 0);
4761 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
4763 if (!tree->last_instr) {
4764 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_EPILOG, NULL);
4765 x86_jump32 (s->code, 0);
4774 mono_llmult (gint64 a, gint64 b)
4780 mono_llmult_ovf_un (gpointer *exc, guint32 al, guint32 ah, guint32 bl, guint32 bh)
4784 // fixme: this is incredible slow
4787 goto raise_exception;
4789 res = (guint64)al * (guint64)bl;
4791 t1 = (guint64)ah * (guint64)bl + (guint64)al * (guint64)bh;
4793 if (t1 > 0xffffffff)
4794 goto raise_exception;
4796 res += ((guint64)t1) << 32;
4802 *exc = mono_get_exception_overflow ();
4808 mono_llmult_ovf (gpointer *exc, guint32 al, gint32 ah, guint32 bl, gint32 bh) {
4810 Use Karatsuba algorithm where:
4811 a*b is: AhBh(R^2+R)+(Ah-Al)(Bl-Bh)R+AlBl(R+1)
4812 where Ah is the "high half" (most significant 32 bits) of a and
4813 where Al is the "low half" (least significant 32 bits) of a and
4814 where Bh is the "high half" of b and Bl is the "low half" and
4815 where R is the Radix or "size of the half" (in our case 32 bits)
4817 Note, for the product of two 64 bit numbers to fit into a 64
4818 result, ah and/or bh must be 0. This will save us from doing
4819 the AhBh term at all.
4821 Also note that we refactor so that we don't overflow 64 bits with
4822 intermediate results. So we use [(Ah-Al)(Bl-Bh)+AlBl]R+AlBl
4828 /* need to work with absoulte values, so find out what the
4829 resulting sign will be and convert any negative numbers
4830 from two's complement
4834 /* flip the bits and add 1 */
4845 /* flip the bits and add 1 */
4855 /* we overflow for sure if both upper halves are greater
4856 than zero because we would need to shift their
4857 product 64 bits to the left and that will not fit
4858 in a 64 bit result */
4860 goto raise_exception;
4862 /* do the AlBl term first */
4863 t1 = (gint64)al * (gint64)bl;
4867 /* now do the [(Ah-Al)(Bl-Bh)+AlBl]R term */
4868 t1 += (gint64)(ah - al) * (gint64)(bl - bh);
4870 /* check for overflow */
4871 if (t1 > (0x7FFFFFFFFFFFFFFF - res))
4872 goto raise_exception;
4883 *exc = mono_get_exception_overflow ();
4888 mono_lldiv (gint64 a, gint64 b)
4894 mono_llrem (gint64 a, gint64 b)
4900 mono_lldiv_un (guint64 a, guint64 b)
4906 mono_llrem_un (guint64 a, guint64 b)
4912 mono_array_new_wrapper (MonoClass *eclass, guint32 n)
4914 MonoDomain *domain = mono_domain_get ();
4916 return mono_array_new (domain, eclass, n);
4920 mono_object_new_wrapper (MonoClass *klass)
4922 MonoDomain *domain = mono_domain_get ();
4924 return mono_object_new (domain, klass);
4928 mono_ldstr_wrapper (MonoImage *image, guint32 ind)
4930 MonoDomain *domain = mono_domain_get ();
4932 return mono_ldstr (domain, image, ind);
4936 mono_ldsflda (MonoClass *klass, int offset)
4938 MonoDomain *domain = mono_domain_get ();
4942 vt = mono_class_vtable (domain, klass);
4943 addr = (char*)(vt->data) + offset;
4949 debug_memcopy (void *dest, const void *src, size_t n)
4953 printf ("MEMCPY(%p to %p [%d]) ", src, dest, n);
4955 for (i = 0; i < l; i++)
4956 printf ("%02x ", *((guint8 *)src + i));
4959 return memcpy (dest, src, n);
4962 void mono_emit_fast_iconv (MBCGEN_TYPE* s, MBTREE_TYPE* tree)
4965 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 12);
4966 x86_fist_membase (s->code, X86_ESP, 8, TRUE); // rounded value
4967 x86_fst_membase (s->code, X86_ESP, 0, FALSE, FALSE); // float value
4968 x86_fp_int_op_membase (s->code, X86_FSUB, X86_ESP, 8, TRUE);
4969 x86_fst_membase (s->code, X86_ESP, 4, FALSE, TRUE); // diff
4971 x86_pop_reg (s->code, tree->reg1); // float value
4972 x86_test_reg_reg (s->code, tree->reg1, tree->reg1);
4973 br[0] = s->code; x86_branch8 (s->code, X86_CC_S, 0, TRUE);
4975 x86_pop_reg (s->code, tree->reg1); // diff
4976 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->reg1);
4977 x86_pop_reg (s->code, tree->reg1); // rounded value
4978 x86_alu_reg_imm (s->code, X86_SBB, tree->reg1, 0);
4979 br[1] = s->code; x86_jump8 (s->code, 0);
4982 x86_patch (br[0], s->code);
4984 x86_pop_reg (s->code, tree->reg1); // diff
4985 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->reg1);
4986 x86_pop_reg (s->code, tree->reg1); // rounded value
4987 br[2] = s->code; x86_branch8 (s->code, X86_CC_Z, 0, FALSE);
4988 x86_alu_reg_imm (s->code, X86_SBB, tree->reg1, -1);
4989 x86_patch (br[1], s->code);
4990 x86_patch (br[2], s->code);
4993 void mono_emit_fast_iconv_i8 (MBCGEN_TYPE* s, MBTREE_TYPE* tree)
4996 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 16);
4997 x86_fld_reg (s->code, 0);
4998 x86_fist_pop_membase (s->code, X86_ESP, 8, TRUE); // rounded value (qword)
4999 x86_fst_membase (s->code, X86_ESP, 0, FALSE, FALSE); // float value
5000 x86_fild_membase (s->code, X86_ESP, 8, TRUE);
5001 x86_fp_op_reg (s->code, X86_FSUB, 1, TRUE); // diff
5002 x86_fst_membase (s->code, X86_ESP, 4, FALSE, TRUE); // diff
5004 x86_pop_reg (s->code, tree->reg1); // float value
5005 x86_test_reg_reg (s->code, tree->reg1, tree->reg1);
5006 br[0] = s->code; x86_branch8 (s->code, X86_CC_S, 0, TRUE);
5008 x86_pop_reg (s->code, tree->reg1); // diff
5009 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->reg1);
5010 x86_pop_reg (s->code, tree->reg1); // rounded value
5011 x86_pop_reg (s->code, tree->reg2);
5012 x86_alu_reg_imm (s->code, X86_SBB, tree->reg1, 0);
5013 x86_alu_reg_imm (s->code, X86_SBB, tree->reg2, 0);
5014 br[1] = s->code; x86_jump8 (s->code, 0);
5017 x86_patch (br[0], s->code);
5019 x86_pop_reg (s->code, tree->reg1); // diff
5020 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->reg1);
5021 x86_pop_reg (s->code, tree->reg1); // rounded value
5022 x86_pop_reg (s->code, tree->reg2);
5023 br[2] = s->code; x86_branch8 (s->code, X86_CC_Z, 0, FALSE);
5024 x86_alu_reg_imm (s->code, X86_SBB, tree->reg1, -1);
5025 x86_alu_reg_imm (s->code, X86_SBB, tree->reg2, -1);
5026 x86_patch (br[1], s->code);
5027 x86_patch (br[2], s->code);
5030 void mono_emit_stack_alloc (MBCGEN_TYPE* s, MBTREE_TYPE* tree)
5032 #ifdef PLATFORM_WIN32
5038 * If requested stack size is larger than one page,
5039 * perform stack-touch operation
5040 * (see comments in mono_emit_stack_alloc_const below).
5042 x86_test_reg_imm (s->code, tree->left->reg1, ~0xFFF);
5043 br[0] = s->code; x86_branch8 (s->code, X86_CC_Z, 0, FALSE);
5045 sreg = tree->left->reg1;
5047 br[2] = s->code; /* loop */
5048 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 0x1000);
5049 x86_test_membase_reg (s->code, X86_ESP, 0, X86_ESP);
5050 x86_alu_reg_imm (s->code, X86_SUB, sreg, 0x1000);
5051 x86_alu_reg_imm (s->code, X86_CMP, sreg, 0x1000);
5052 br[3] = s->code; x86_branch8 (s->code, X86_CC_AE, 0, FALSE);
5053 x86_patch (br[3], br[2]);
5054 x86_test_reg_reg (s->code, sreg, sreg);
5055 br[4] = s->code; x86_branch8 (s->code, X86_CC_Z, 0, FALSE);
5056 x86_alu_reg_reg (s->code, X86_SUB, X86_ESP, sreg);
5058 br[1] = s->code; x86_jump8 (s->code, 0);
5060 x86_patch (br[0], s->code);
5061 x86_alu_reg_reg (s->code, X86_SUB, X86_ESP, tree->left->reg1);
5062 x86_patch (br[1], s->code);
5063 x86_patch (br[4], s->code);
5064 #else /* PLATFORM_WIN32 */
5065 x86_alu_reg_reg (s->code, X86_SUB, X86_ESP, tree->left->reg1);
5069 void mono_emit_stack_alloc_const (MBCGEN_TYPE* s, MBTREE_TYPE* tree, int size)
5071 #ifdef PLATFORM_WIN32
5077 * Generate stack probe code.
5078 * Under Windows, it is necessary to allocate one page at a time,
5079 * "touching" stack after each successful sub-allocation. This is
5080 * because of the way stack growth is implemented - there is a
5081 * guard page before the lowest stack page that is currently commited.
5082 * Stack normally grows sequentially so OS traps access to the
5083 * guard page and commits more pages when needed.
5085 npages = ((unsigned) size) >> 12;
5087 if (tree->reg1 != X86_EAX && tree->left->reg1 != X86_EAX) {
5088 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 0x1000);
5089 x86_test_membase_reg (s->code, X86_ESP, 0, X86_ESP);
5090 x86_mov_membase_reg (s->code, X86_ESP, 0x1000 - 4, X86_EAX, 4); /* save EAX */
5091 x86_mov_reg_imm (s->code, X86_EAX, npages - 1);
5093 x86_mov_reg_imm (s->code, X86_EAX, npages);
5095 br[0] = s->code; /* loop */
5096 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 0x1000);
5097 x86_test_membase_reg (s->code, X86_ESP, 0, X86_ESP);
5098 x86_dec_reg (s->code, X86_EAX);
5099 br[1] = s->code; x86_branch8 (s->code, X86_CC_NZ, 0, TRUE);
5100 x86_patch (br[1], br[0]);
5101 if (tree->reg1 != X86_EAX && tree->left->reg1 != X86_EAX)
5102 x86_mov_reg_membase (s->code, X86_EAX, X86_ESP, (npages * 0x1000) - 4, 4); /* restore EAX */
5104 /* generate unrolled code for relatively small allocs */
5105 for (i = npages; --i >= 0;) {
5106 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 0x1000);
5107 x86_test_membase_reg (s->code, X86_ESP, 0, X86_ESP);
5112 if (size & 0xFFF) x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, -(size & 0xFFF));
5113 #else /* PLATFORM_WIN32 */
5114 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, size);
5119 mono_ldvirtftn (MonoObject *this, int slot)
5124 gboolean is_proxy = FALSE;
5127 if ((class = this->vtable->klass) == mono_defaults.transparent_proxy_class) {
5128 class = ((MonoTransparentProxy *)this)->klass;
5133 g_assert (slot <= class->vtable_size);
5135 m = class->vtable [slot];
5138 return mono_jit_create_remoting_trampoline (m);
5140 EnterCriticalSection (metadata_section);
5141 addr = mono_compile_method (m);
5142 LeaveCriticalSection (metadata_section);
5148 mono_ldintftn (MonoObject *this, int slot)
5153 gboolean is_proxy = FALSE;
5156 if ((class = this->vtable->klass) == mono_defaults.transparent_proxy_class) {
5157 class = ((MonoTransparentProxy *)this)->klass;
5161 g_assert (slot < class->interface_count);
5163 slot = class->interface_offsets [slot];
5165 m = class->vtable [slot];
5168 return mono_jit_create_remoting_trampoline (m);
5170 EnterCriticalSection (metadata_section);
5171 addr = mono_compile_method (m);
5172 LeaveCriticalSection (metadata_section);
5177 gpointer mono_ldftn (MonoMethod *method)
5181 EnterCriticalSection (metadata_section);
5182 addr = mono_compile_method (method);
5183 LeaveCriticalSection (metadata_section);