2 * x86.brg: X86 code generator
5 * Dietmar Maurer (dietmar@ximian.com)
8 * (C) 2001 Ximian, Inc.
17 #ifndef PLATFORM_WIN32
19 #include <sys/syscall.h>
22 #include <mono/metadata/blob.h>
23 #include <mono/metadata/metadata.h>
24 #include <mono/metadata/loader.h>
25 #include <mono/metadata/object.h>
26 #include <mono/metadata/tabledefs.h>
27 #include <mono/metadata/appdomain.h>
28 #include <mono/metadata/marshal.h>
29 #include <mono/metadata/threads.h>
30 #include <mono/arch/x86/x86-codegen.h>
36 * Pull the list of opcodes
38 #define OPDEF(a,b,c,d,e,f,g,h,i,j) \
42 #include "mono/cil/opcode.def"
47 /* alignment of activation frames */
48 #define MONO_FRAME_ALIGNMENT 4
50 void print_lmf (void);
52 #define MBTREE_TYPE MBTree
53 #define MBCGEN_TYPE MonoFlowGraph
54 #define MBCOST_DATA MonoFlowGraph
55 #define MBALLOC_STATE mono_mempool_alloc (data->mp, sizeof (MBState))
58 AMImmediate = 0, // ptr
60 AMIndex = 2, // V[REG*X]
61 AMBaseIndex = 3, // V[REG*X][REG]
74 unsigned last_instr:1;
97 MonoClassField *field;
100 MonoJitBranchInfo bi;
101 MonoJitCallInfo call_info;
102 MonoJitArgumentInfo arg_info;
103 MonoJitNonVirtualCallInfo nonvirt_info;
107 gint64 mono_llmult (gint64 a, gint64 b);
108 guint64 mono_llmult_ovf (gpointer *exc, guint32 al, gint32 ah, guint32 bl, gint32 bh);
109 guint64 mono_llmult_ovf_un (gpointer *exc, guint32 al, guint32 ah, guint32 bl, guint32 bh);
110 gint64 mono_lldiv (gint64 a, gint64 b);
111 gint64 mono_llrem (gint64 a, gint64 b);
112 guint64 mono_lldiv_un (guint64 a, guint64 b);
113 guint64 mono_llrem_un (guint64 a, guint64 b);
114 gpointer mono_ldsflda (MonoClass *klass, int offset);
116 gpointer mono_ldvirtftn (MonoObject *this, int slot);
117 gpointer mono_ldintftn (MonoObject *this, int slot);
118 gpointer mono_ldftn (MonoMethod *method);
120 void mono_emit_fast_iconv (MBCGEN_TYPE* s, MBTREE_TYPE* tree);
121 void mono_emit_fast_iconv_i8 (MBCGEN_TYPE* s, MBTREE_TYPE* tree);
122 void mono_emit_stack_alloc (MBCGEN_TYPE* s, MBTREE_TYPE* tree);
123 void mono_emit_stack_alloc_const (MBCGEN_TYPE* s, MBTREE_TYPE* tree, int size);
126 mono_array_new_wrapper (MonoClass *eclass, guint32 n);
128 mono_object_new_wrapper (MonoClass *klass);
130 mono_ldstr_wrapper (MonoImage *image, guint32 ind);
133 get_mono_object_isinst (void);
135 #define MB_OPT_LEVEL 1
137 #if MB_OPT_LEVEL == 0
138 #define MB_USE_OPT1(c) 65535
139 #define MB_USE_OPT2(c) 65535
141 #if MB_OPT_LEVEL == 1
142 #define MB_USE_OPT1(c) c
143 #define MB_USE_OPT2(c) 65535
145 #if MB_OPT_LEVEL >= 2
146 #define MB_USE_OPT1(c) c
147 #define MB_USE_OPT2(c) c
152 #define REAL_PRINT_REG(text,reg) \
153 mono_assert (reg >= 0); \
154 x86_push_reg (s->code, X86_EAX); \
155 x86_push_reg (s->code, X86_EDX); \
156 x86_push_reg (s->code, X86_ECX); \
157 x86_push_reg (s->code, reg); \
158 x86_push_imm (s->code, reg); \
159 x86_push_imm (s->code, text " %d %p\n"); \
160 x86_mov_reg_imm (s->code, X86_EAX, printf); \
161 x86_call_reg (s->code, X86_EAX); \
162 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 3*4); \
163 x86_pop_reg (s->code, X86_ECX); \
164 x86_pop_reg (s->code, X86_EDX); \
165 x86_pop_reg (s->code, X86_EAX);
168 debug_memcopy (void *dest, const void *src, size_t n);
171 #define MEMCOPY debug_memcopy
172 #define PRINT_REG(text,reg) REAL_PRINT_REG(text,reg)
175 #define MEMCOPY memcpy
177 #define PRINT_REG(x,y)
181 /* The call instruction for virtual functions must have a known
182 * size (used by x86_magic_trampoline)
184 #define x86_call_virtual(inst,basereg,disp) \
186 *(inst)++ = (unsigned char)0xff; \
187 x86_address_byte ((inst), 2, 2, (basereg)); \
188 x86_imm_emit32 ((inst), (disp)); \
191 /* emit an exception if condition is fail */
192 #define EMIT_COND_SYSTEM_EXCEPTION(cond,signed,exc_name) \
195 x86_branch8 (s->code, cond, 10, signed); \
196 x86_push_imm (s->code, exc_name); \
197 t = arch_get_throw_exception_by_name (); \
198 mono_add_jump_info (s, s->code, \
199 MONO_JUMP_INFO_ABS, t); \
200 x86_call_code (s->code, 0); \
203 #define X86_ARG_PAD(pad) do { \
206 x86_push_reg (s->code, X86_EAX); \
208 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, pad); \
212 #define X86_CALL_END do { \
213 int size = tree->data.call_info.frame_size; \
215 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, size); \
218 #define X86_CALL_BEGIN do { \
219 int pad = tree->data.call_info.pad; \
221 if (tree->left->op != MB_TERM_NOP) { \
222 mono_assert (lreg >= 0); \
223 x86_push_reg (s->code, lreg); \
224 x86_alu_membase_imm (s->code, X86_CMP, lreg, 0, 0); \
226 if (tree->data.call_info.vtype_num) { \
227 int offset = VARINFO (s, tree->data.call_info.vtype_num).offset; \
228 x86_lea_membase (s->code, treg, X86_EBP, offset); \
229 x86_push_reg (s->code, treg); \
233 /* we use this macro to move one lreg to another - source and
234 destination may overlap, but the register allocator has to
235 make sure that ((d1 < d2) && (s1 < s2))
237 #define MOVE_LREG(d1,d2,s1,s2) \
239 g_assert ((d1 < d2) && (s1 < s2)); \
240 if ((d1) <= (s1)) { \
242 x86_mov_reg_reg (s->code, d1, s1, 4); \
244 x86_mov_reg_reg (s->code, d2, s2, 4); \
247 x86_mov_reg_reg (s->code, d2, s2, 4); \
249 x86_mov_reg_reg (s->code, d1, s1, 4); \
253 #define X86_REMOTING_CHECK tree->left->op != MB_TERM_NOP && tree->right->data.nonvirt_info.method && \
254 (tree->right->data.nonvirt_info.method->klass->marshalbyref || \
255 tree->right->data.nonvirt_info.method->klass == mono_defaults.object_class)
258 This macro adds transparant proxy checks for non-virtual methods in a MBR object
259 and methods that belongs to System::Object.
261 #define X86_REMOTING_CALL do { \
263 x86_push_reg (s->code, lreg); \
264 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4); \
265 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4); \
266 x86_alu_reg_imm (s->code, X86_CMP, lreg, (int)mono_defaults.transparent_proxy_class); \
267 x86_pop_reg (s->code, lreg); \
268 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE); \
270 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_jit_create_remoting_trampoline (tree->right->data.nonvirt_info.method)); \
271 x86_call_code (s->code, 0); \
273 br [1] = s->code; x86_jump8 (s->code, 0); \
274 x86_patch (br [0], s->code); \
276 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->right->data.nonvirt_info.p); \
277 x86_call_code (s->code, 0); \
279 x86_patch (br [1], s->code); \
285 # terminal definitions
289 %term CONST_I4 CONST_I8 CONST_R4 CONST_R8
290 %term LDIND_I1 LDIND_U1 LDIND_I2 LDIND_U2 LDIND_I4 LDIND_I8 LDIND_R4 LDIND_R8 LDIND_OBJ
291 %term STIND_I1 STIND_I2 STIND_I4 STIND_I8 STIND_R4 STIND_R8 STIND_OBJ
292 %term ADDR_L ADDR_G ARG_I4 ARG_I8 ARG_R4 ARG_R8 ARG_OBJ CALL_I4 CALL_I8 CALL_R8 CALL_VOID
293 %term BREAK SWITCH BR RET_VOID RET RET_OBJ ENDFINALLY ENDFILTER JMP
294 %term ADD ADD_OVF ADD_OVF_UN SUB SUB_OVF SUB_OVF_UN MUL MUL_OVF MUL_OVF_UN
295 %term DIV DIV_UN REM REM_UN AND OR XOR SHL SHR SHR_UN NEG NOT CKFINITE
296 %term COMPARE CBRANCH BRTRUE BRFALSE CSET
297 %term CONV_I4 CONV_I1 CONV_I2 CONV_I8 CONV_U1 CONV_U2 CONV_U4 CONV_U8 CONV_R4 CONV_R8 CONV_R_UN
298 %term INTF_ADDR VFUNC_ADDR NOP NEWARR NEWARR_SPEC NEWOBJ NEWOBJ_SPEC
299 %term INITBLK CPBLK CPSRC POP INITOBJ LOCALLOC
300 %term ISINST CASTCLASS UNBOX
301 %term CONV_OVF_I1 CONV_OVF_U1 CONV_OVF_I2 CONV_OVF_U2 CONV_OVF_U4 CONV_OVF_U8 CONV_OVF_I4
302 %term CONV_OVF_I4_UN CONV_OVF_U1_UN CONV_OVF_U2_UN
303 %term CONV_OVF_I2_UN CONV_OVF_I8_UN CONV_OVF_I1_UN
304 %term EXCEPTION THROW RETHROW HANDLER CHECKTHIS RETHROW_ABORT
305 %term LDLEN LDELEMA LDFTN LDVIRTFTN LDSTR LDSFLDA
306 %term REMOTE_LDFLDA REMOTE_STIND_I1 REMOTE_STIND_I2 REMOTE_STIND_I4
307 %term REMOTE_STIND_I8 REMOTE_STIND_R4 REMOTE_STIND_R8 REMOTE_STIND_OBJ
310 %term FUNC1 PROC2 PROC3 FREE OBJADDR VTADDR
326 tree->data.ainfo.offset = tree->data.i;
327 tree->data.ainfo.amode = AMImmediate;
331 tree->data.ainfo.offset = tree->data.i;
332 tree->data.ainfo.amode = AMImmediate;
335 acon: ADD (ADDR_G, CONST_I4) {
336 tree->data.ainfo.offset = (unsigned)tree->left->data.p + tree->right->data.i;
337 tree->data.ainfo.amode = AMImmediate;
343 tree->data.ainfo.offset = 0;
344 tree->data.ainfo.basereg = tree->reg1;
345 tree->data.ainfo.amode = AMBase;
348 base: ADD (reg, CONST_I4) {
349 tree->data.ainfo.offset = tree->right->data.i;
350 tree->data.ainfo.basereg = tree->left->reg1;
351 tree->data.ainfo.amode = AMBase;
355 tree->data.ainfo.offset = VARINFO (s, tree->data.i).offset;
356 tree->data.ainfo.basereg = X86_EBP;
357 tree->data.ainfo.amode = AMBase;
359 MBCOND (VARINFO (data, tree->data.i).reg < 0);
364 tree->data.ainfo.offset = 0;
365 tree->data.ainfo.indexreg = tree->reg1;
366 tree->data.ainfo.shift = 0;
367 tree->data.ainfo.amode = AMIndex;
370 index: SHL (reg, CONST_I4) {
371 tree->data.ainfo.offset = 0;
372 tree->data.ainfo.amode = AMIndex;
373 tree->data.ainfo.indexreg = tree->left->reg1;
374 tree->data.ainfo.shift = tree->right->data.i;
376 MBCOND (tree->right->data.i == 0 ||
377 tree->right->data.i == 1 ||
378 tree->right->data.i == 2 ||
379 tree->right->data.i == 3);
384 index: MUL (reg, CONST_I4) {
385 static int fast_log2 [] = { 1, 0, 1, -1, 2, -1, -1, -1, 3 };
387 tree->data.ainfo.offset = 0;
388 tree->data.ainfo.amode = AMIndex;
389 tree->data.ainfo.indexreg = tree->left->reg1;
390 tree->data.ainfo.shift = fast_log2 [tree->right->data.i];
392 MBCOND (tree->right->data.i == 1 ||
393 tree->right->data.i == 2 ||
394 tree->right->data.i == 4 ||
395 tree->right->data.i == 8);
404 addr: ADD (index, base) {
405 tree->data.ainfo.offset = tree->right->data.ainfo.offset;
406 tree->data.ainfo.basereg = tree->right->data.ainfo.basereg;
407 tree->data.ainfo.amode = tree->left->data.ainfo.amode |
408 tree->right->data.ainfo.amode;
409 tree->data.ainfo.shift = tree->left->data.ainfo.shift;
410 tree->data.ainfo.indexreg = tree->left->data.ainfo.indexreg;
413 # we pass exception in ECX to catch handler
415 int offset = VARINFO (s, tree->data.i).offset;
417 if (tree->reg1 != X86_ECX)
418 x86_mov_reg_reg (s->code, tree->reg1, X86_ECX, 4);
420 /* store it so that we can RETHROW it later */
421 x86_mov_membase_reg (s->code, X86_EBP, offset, tree->reg1, 4);
427 x86_push_reg (s->code, tree->left->reg1);
428 target = arch_get_throw_exception ();
429 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, target);
430 x86_call_code (s->code, target);
434 int offset = VARINFO (s, tree->data.i).offset;
437 x86_push_membase (s->code, X86_EBP, offset);
439 target = arch_get_throw_exception ();
440 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, target);
441 x86_call_code (s->code, target);
444 stmt: RETHROW_ABORT {
448 target = mono_thread_current;
449 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, target);
450 x86_call_code (s->code, target);
452 x86_mov_reg_membase (s->code, X86_EAX, X86_EAX, G_STRUCT_OFFSET (MonoThread, abort_exc), 4);
453 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0);
455 br = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
457 x86_push_reg (s->code, X86_EAX);
459 target = arch_get_throw_exception ();
460 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, target);
461 x86_call_code (s->code, target);
463 x86_patch (br, s->code);
467 /* save ESP (used by ENDFINALLY) */
468 x86_mov_membase_reg (s->code, X86_EBP, mono_exc_esp_offset, X86_ESP, 4);
469 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bb);
470 x86_call_imm (s->code, 0);
474 /* restore ESP - which can be modified when we allocate value types
475 * in the finally handler */
476 x86_mov_reg_membase (s->code, X86_ESP, X86_EBP, mono_exc_esp_offset, 4);
477 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
481 stmt: ENDFILTER (reg) {
482 /* restore ESP - which can be modified when we allocate value types
484 x86_mov_reg_membase (s->code, X86_ESP, X86_EBP, mono_exc_esp_offset, 4);
485 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
486 if (tree->left->reg1 != X86_EAX)
487 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
491 stmt: STIND_I4 (ADDR_L, ADD (LDIND_I4 (ADDR_L), CONST_I4)) {
492 int vn = tree->left->data.i;
493 int treg = VARINFO (s, vn).reg;
494 int offset = VARINFO (s, vn).offset;
495 int data = tree->right->right->data.i;
499 x86_inc_reg (s->code, treg);
501 x86_inc_membase (s->code, X86_EBP, offset);
504 x86_alu_reg_imm (s->code, X86_ADD, treg, data);
506 x86_alu_membase_imm (s->code, X86_ADD, X86_EBP, offset, data);
509 MBCOND (tree->right->left->left->data.i == tree->left->data.i);
513 stmt: STIND_I4 (ADDR_L, SUB (LDIND_I4 (ADDR_L), CONST_I4)) {
514 int vn = tree->left->data.i;
515 int treg = VARINFO (s, vn).reg;
516 int offset = VARINFO (s, vn).offset;
517 int data = tree->right->right->data.i;
521 x86_dec_reg (s->code, treg);
523 x86_dec_membase (s->code, X86_EBP, offset);
526 x86_alu_reg_imm (s->code, X86_SUB, treg, data);
528 x86_alu_membase_imm (s->code, X86_SUB, X86_EBP, offset, data);
531 MBCOND (tree->right->left->left->data.i == tree->left->data.i);
535 stmt: STIND_I4 (ADDR_L, ADD (LDIND_I4 (ADDR_L), reg)) {
536 int vn = tree->left->data.i;
537 int treg = VARINFO (s, vn).reg;
538 int sreg = tree->right->right->reg1;
539 int offset = VARINFO (s, vn).offset;
542 x86_alu_reg_reg (s->code, X86_ADD, treg, sreg);
544 x86_alu_membase_reg (s->code, X86_ADD, X86_EBP, offset, sreg);
547 MBCOND (tree->right->left->left->data.i == tree->left->data.i);
551 stmt: STIND_I4 (ADDR_L, LDIND_I4 (ADDR_L)) {
552 int treg1 = VARINFO (s, tree->left->data.i).reg;
553 int treg2 = VARINFO (s, tree->right->left->data.i).reg;
554 int offset1 = VARINFO (s, tree->left->data.i).offset;
555 int offset2 = VARINFO (s, tree->right->left->data.i).offset;
557 //{static int cx= 0; printf ("CX %5d\n", cx++);}
559 if (treg1 >= 0 && treg2 >= 0) {
560 x86_mov_reg_reg (s->code, treg1, treg2, 4);
563 if (treg1 >= 0 && treg2 < 0) {
564 x86_mov_reg_membase (s->code, treg1, X86_EBP, offset2, 4);
567 if (treg1 < 0 && treg2 >= 0) {
568 x86_mov_membase_reg (s->code, X86_EBP, offset1, treg2, 4);
572 g_assert_not_reached ();
575 MBCOND (VARINFO (data, tree->left->data.i).reg >= 0 ||
576 VARINFO (data, tree->right->left->data.i).reg >= 0);
580 stmt: STIND_I4 (addr, CONST_I4) {
581 switch (tree->left->data.ainfo.amode) {
584 x86_mov_mem_imm (s->code, tree->left->data.ainfo.offset, tree->right->data.i, 4);
588 x86_mov_membase_imm (s->code, tree->left->data.ainfo.basereg,
589 tree->left->data.ainfo.offset, tree->right->data.i, 4);
592 x86_mov_memindex_imm (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
593 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
594 tree->right->data.i, 4);
597 x86_mov_memindex_imm (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
598 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
599 tree->right->data.i, 4);
604 stmt: STIND_I4 (addr, reg) {
606 switch (tree->left->data.ainfo.amode) {
609 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 4);
613 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
614 tree->left->data.ainfo.offset, tree->right->reg1, 4);
617 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
618 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
619 tree->right->reg1, 4);
622 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
623 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
624 tree->right->reg1, 4);
629 stmt: REMOTE_STIND_I4 (reg, reg) {
632 int lreg = tree->left->reg1;
633 int rreg = tree->right->reg1;
642 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
643 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
644 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
646 /* this is a transparent proxy - remote the call */
648 /* save value to stack */
649 x86_push_reg (s->code, rreg);
651 x86_push_reg (s->code, X86_ESP);
652 x86_push_imm (s->code, tree->data.fi.field);
653 x86_push_imm (s->code, tree->data.fi.klass);
654 x86_push_reg (s->code, lreg);
655 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
656 x86_call_code (s->code, 0);
657 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
659 br [1] = s->code; x86_jump8 (s->code, 0);
661 x86_patch (br [0], s->code);
662 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
663 tree->data.fi.field->offset;
664 x86_mov_membase_reg (s->code, lreg, offset, rreg, 4);
666 x86_patch (br [1], s->code);
669 stmt: STIND_I1 (addr, reg) {
670 PRINT_REG ("STIND_I1", tree->right->reg1);
672 switch (tree->left->data.ainfo.amode) {
675 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 1);
679 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
680 tree->left->data.ainfo.offset, tree->right->reg1, 1);
683 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
684 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
685 tree->right->reg1, 1);
688 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
689 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
690 tree->right->reg1, 1);
695 stmt: REMOTE_STIND_I1 (reg, reg) {
698 int lreg = tree->left->reg1;
699 int rreg = tree->right->reg1;
708 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
709 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
710 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
712 /* this is a transparent proxy - remote the call */
714 /* save value to stack */
715 x86_push_reg (s->code, rreg);
717 x86_push_reg (s->code, X86_ESP);
718 x86_push_imm (s->code, tree->data.fi.field);
719 x86_push_imm (s->code, tree->data.fi.klass);
720 x86_push_reg (s->code, lreg);
721 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
722 x86_call_code (s->code, 0);
723 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
725 br [1] = s->code; x86_jump8 (s->code, 0);
727 x86_patch (br [0], s->code);
728 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
729 tree->data.fi.field->offset;
730 x86_mov_membase_reg (s->code, lreg, offset, rreg, 1);
732 x86_patch (br [1], s->code);
735 stmt: STIND_I2 (addr, reg) {
736 PRINT_REG ("STIND_I2", tree->right->reg1);
738 switch (tree->left->data.ainfo.amode) {
741 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 2);
745 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
746 tree->left->data.ainfo.offset, tree->right->reg1, 2);
749 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
750 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
751 tree->right->reg1, 2);
754 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
755 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
756 tree->right->reg1, 2);
761 stmt: REMOTE_STIND_I2 (reg, reg) {
764 int lreg = tree->left->reg1;
765 int rreg = tree->right->reg1;
774 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
775 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
776 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
778 /* this is a transparent proxy - remote the call */
780 /* save value to stack */
781 x86_push_reg (s->code, rreg);
783 x86_push_reg (s->code, X86_ESP);
784 x86_push_imm (s->code, tree->data.fi.field);
785 x86_push_imm (s->code, tree->data.fi.klass);
786 x86_push_reg (s->code, lreg);
787 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
788 x86_call_code (s->code, 0);
789 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
791 br [1] = s->code; x86_jump8 (s->code, 0);
793 x86_patch (br [0], s->code);
794 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
795 tree->data.fi.field->offset;
796 x86_mov_membase_reg (s->code, lreg, offset, rreg, 2);
798 x86_patch (br [1], s->code);
801 reg: LDIND_I4 (ADDR_L) {
802 int treg = VARINFO (s, tree->left->data.i).reg;
804 if (treg != tree->reg1)
805 x86_mov_reg_reg (s->code, tree->reg1, treg, 4);
808 MBCOND ((VARINFO (data, tree->left->data.i).reg >= 0));
812 stmt: STIND_I4 (ADDR_L, CONST_I4) {
813 int treg = VARINFO (s, tree->left->data.i).reg;
815 x86_mov_reg_imm (s->code, treg, tree->right->data.i);
818 MBCOND ((VARINFO (data, tree->left->data.i).reg >= 0));
822 stmt: STIND_I4 (ADDR_L, LDIND_I4 (ADDR_L)) {
823 int treg = VARINFO (s, tree->left->data.i).reg;
824 int offset = VARINFO (s, tree->right->left->data.i).offset;
826 x86_mov_reg_membase (s->code, treg, X86_EBP, offset, 4);
828 MBCOND ((VARINFO (data, tree->left->data.i).reg >= 0));
829 MBCOND ((VARINFO (data, tree->right->left->data.i).reg < 0));
833 stmt: STIND_I4 (ADDR_L, reg) {
834 int treg = VARINFO (s, tree->left->data.i).reg;
836 if (treg != tree->right->reg1)
837 x86_mov_reg_reg (s->code, treg, tree->right->reg1, 4);
840 MBCOND ((VARINFO (data, tree->left->data.i).reg >= 0));
845 reg: LDIND_I4 (addr) {
847 switch (tree->left->data.ainfo.amode) {
850 x86_mov_reg_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, 4);
854 x86_mov_reg_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
855 tree->left->data.ainfo.offset, 4);
858 x86_mov_reg_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
859 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, 4);
862 x86_mov_reg_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
863 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
864 tree->left->data.ainfo.shift, 4);
869 PRINT_REG ("LDIND_I4", tree->reg1);
872 reg: LDIND_I1 (addr) {
873 switch (tree->left->data.ainfo.amode) {
876 x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, TRUE, FALSE);
880 x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
881 tree->left->data.ainfo.offset, TRUE, FALSE);
884 x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
885 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, TRUE, FALSE);
888 x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
889 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
890 tree->left->data.ainfo.shift, TRUE, FALSE);
894 PRINT_REG ("LDIND_I1", tree->reg1);
897 reg: LDIND_U1 (addr) {
898 switch (tree->left->data.ainfo.amode) {
901 x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, FALSE, FALSE);
905 x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
906 tree->left->data.ainfo.offset, FALSE, FALSE);
909 x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
910 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, FALSE, FALSE);
913 x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
914 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
915 tree->left->data.ainfo.shift, FALSE, FALSE);
919 PRINT_REG ("LDIND_U1", tree->reg1);
922 reg: LDIND_I2 (addr) {
923 switch (tree->left->data.ainfo.amode) {
926 x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, TRUE, TRUE);
930 x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
931 tree->left->data.ainfo.offset, TRUE, TRUE);
934 x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
935 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, TRUE, TRUE);
938 x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
939 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
940 tree->left->data.ainfo.shift, TRUE, TRUE);
944 PRINT_REG ("LDIND_U2", tree->reg1);
947 reg: LDIND_U2 (addr) {
948 switch (tree->left->data.ainfo.amode) {
951 x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, FALSE, TRUE);
955 x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
956 tree->left->data.ainfo.offset, FALSE, TRUE);
959 x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
960 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, FALSE, TRUE);
963 x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
964 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
965 tree->left->data.ainfo.shift, FALSE, TRUE);
969 PRINT_REG ("LDIND_U2", tree->reg1);
972 reg: REMOTE_LDFLDA (reg) {
975 int lreg = tree->left->reg1;
980 if (tree->reg1 != treg)
981 x86_push_reg (s->code, treg);
983 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
984 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
985 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
987 /* this is a transparent proxy - remote the call */
989 x86_push_reg (s->code, X86_EAX);
991 x86_push_reg (s->code, X86_EDX);
992 x86_push_reg (s->code, X86_ECX);
994 x86_push_imm (s->code, 0);
995 x86_push_imm (s->code, tree->data.fi.field);
996 x86_push_imm (s->code, tree->data.fi.klass);
997 x86_push_reg (s->code, lreg);
998 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_load_remote_field);
999 x86_call_code (s->code, 0);
1000 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
1002 if (treg != X86_EAX)
1003 x86_mov_reg_reg (s->code, treg, X86_EAX, 4);
1005 x86_pop_reg (s->code, X86_ECX);
1006 if (treg != X86_EDX)
1007 x86_pop_reg (s->code, X86_EDX);
1008 if (treg != X86_EAX)
1009 x86_pop_reg (s->code, X86_EAX);
1011 x86_mov_reg_reg (s->code, tree->reg1, treg, 4);
1013 br [1] = s->code; x86_jump8 (s->code, 0);
1015 x86_patch (br [0], s->code);
1016 if (tree->data.fi.klass->valuetype)
1017 x86_lea_membase (s->code, tree->reg1, lreg,
1018 tree->data.fi.field->offset - sizeof (MonoObject));
1020 x86_lea_membase (s->code, tree->reg1, lreg, tree->data.fi.field->offset);
1022 x86_patch (br [1], s->code);
1024 if (tree->reg1 != treg)
1025 x86_pop_reg (s->code, treg);
1029 int offset = VARINFO (s, tree->data.i).offset;
1031 x86_lea_membase (s->code, tree->reg1, X86_EBP, offset);
1033 PRINT_REG ("ADDR_L", tree->reg1);
1035 MBCOND (VARINFO (data, tree->data.i).reg < 0);
1041 x86_mov_reg_imm (s->code, tree->reg1, tree->data.p);
1044 reg: CONV_I1 (reg) {
1045 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, FALSE);
1048 reg: CONV_U1 (reg) {
1049 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE);
1052 reg: CONV_I2 (reg) {
1053 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, TRUE);
1056 reg: CONV_U2 (reg) {
1057 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE);
1061 x86_mov_reg_imm (s->code, tree->reg1, tree->data.i);
1064 reg: CONV_I4 (reg) {
1065 if (tree->reg1 != tree->left->reg1)
1066 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1067 PRINT_REG ("CONV_I4", tree->left->reg1);
1070 reg: CONV_U4 (reg) {
1071 if (tree->reg1 != tree->left->reg1)
1072 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1073 PRINT_REG ("CONV_U4", tree->left->reg1);
1076 reg: CONV_OVF_I4 (reg) {
1077 if (tree->reg1 != tree->left->reg1)
1078 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1079 PRINT_REG ("CONV_OVF_I4", tree->left->reg1);
1082 reg: CONV_OVF_I4 (freg) {
1083 x86_push_reg (s->code, X86_EAX);
1084 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
1085 x86_fnstcw_membase(s->code, X86_ESP, 0);
1086 x86_mov_reg_membase (s->code, tree->left->reg1, X86_ESP, 0, 2);
1087 x86_alu_reg_imm (s->code, X86_OR, tree->left->reg1, 0xc00);
1088 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->left->reg1, 2);
1089 x86_fldcw_membase (s->code, X86_ESP, 2);
1090 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
1091 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
1092 x86_fstsw(s->code); // stores flags in ax
1093 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x80000000);
1095 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1097 x86_pop_reg (s->code, tree->reg1);
1098 x86_fldcw_membase (s->code, X86_ESP, 0);
1099 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
1100 x86_pop_reg (s->code, X86_EAX);
1103 reg: CONV_OVF_U4 (reg) {
1104 /* Keep in sync with CONV_OVF_I4_UN below, they are the same on 32-bit machines */
1105 x86_test_reg_imm (s->code, tree->left->reg1, 0x8000000);
1106 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1107 if (tree->reg1 != tree->left->reg1)
1108 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1111 reg: CONV_OVF_I4_UN (reg) {
1112 /* Keep in sync with CONV_OVF_U4 above, they are the same on 32-bit machines */
1113 x86_test_reg_imm (s->code, tree->left->reg1, 0x8000000);
1114 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1115 if (tree->reg1 != tree->left->reg1)
1116 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1119 reg: CONV_OVF_U4 (freg) {
1120 x86_push_reg (s->code, X86_EAX);
1121 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
1122 x86_fnstcw_membase(s->code, X86_ESP, 0);
1123 x86_mov_reg_membase (s->code, tree->left->reg1, X86_ESP, 0, 2);
1124 x86_alu_reg_imm (s->code, X86_OR, tree->left->reg1, 0xc00);
1125 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->left->reg1, 2);
1126 x86_fldcw_membase (s->code, X86_ESP, 2);
1127 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
1128 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
1129 x86_fstsw(s->code); // stores flags in ax
1130 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x80000000);
1132 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1134 x86_pop_reg (s->code, tree->reg1);
1135 x86_fldcw_membase (s->code, X86_ESP, 0);
1136 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
1137 x86_pop_reg (s->code, X86_EAX);
1139 x86_test_reg_imm (s->code, tree->reg1, 0x8000000);
1140 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1143 reg: CONV_OVF_I1 (reg) {
1144 /* probe value to be within -128 to 127 */
1145 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, 127);
1146 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LE, TRUE, "OverflowException");
1147 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, -128);
1148 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_GT, TRUE, "OverflowException");
1149 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, FALSE);
1152 reg: CONV_OVF_I1_UN (reg) {
1153 /* probe values between 0 to 128 */
1154 x86_test_reg_imm (s->code, tree->left->reg1, 0xffffff80);
1155 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1156 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE);
1159 reg: CONV_OVF_U1 (reg) {
1160 /* Keep in sync with CONV_OVF_U1_UN routine below, they are the same on 32-bit machines */
1161 /* probe value to be within 0 to 255 */
1162 x86_test_reg_imm (s->code, tree->left->reg1, 0xffffff00);
1163 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1164 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE);
1167 reg: CONV_OVF_U1_UN (reg) {
1168 /* Keep in sync with CONV_OVF_U1 routine above, they are the same on 32-bit machines */
1169 /* probe value to be within 0 to 255 */
1170 x86_test_reg_imm (s->code, tree->left->reg1, 0xffffff00);
1171 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1172 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE);
1175 reg: CONV_OVF_I2 (reg) {
1176 /* Probe value to be within -32768 and 32767 */
1177 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, 32767);
1178 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LE, TRUE, "OverflowException");
1179 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, -32768);
1180 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_GE, TRUE, "OverflowException");
1181 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, TRUE);
1184 reg: CONV_OVF_U2 (reg) {
1185 /* Keep in sync with CONV_OVF_U2_UN below, they are the same on 32-bit machines */
1186 /* Probe value to be within 0 and 65535 */
1187 x86_test_reg_imm (s->code, tree->left->reg1, 0xffff0000);
1188 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "OverflowException");
1189 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE);
1192 reg: CONV_OVF_U2_UN (reg) {
1193 /* Keep in sync with CONV_OVF_U2 above, they are the same on 32-bit machines */
1194 /* Probe value to be within 0 and 65535 */
1195 x86_test_reg_imm (s->code, tree->left->reg1, 0xffff0000);
1196 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1197 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE);
1200 reg: CONV_OVF_I2_UN (reg) {
1201 /* Convert uint value into short, value within 0 and 32767 */
1202 x86_test_reg_imm (s->code, tree->left->reg1, 0xffff8000);
1203 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1204 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE);
1207 reg: MUL (reg, CONST_I4) "MB_USE_OPT1(0)" {
1208 unsigned int i, j, k, v;
1210 v = tree->right->data.i;
1211 for (i = 0, j = 1, k = 0xfffffffe; i < 32; i++, j = j << 1, k = k << 1) {
1216 if (v < 0 || i == 32 || v & k) {
1219 /* LEA r1, [r2 + r2*2] */
1220 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 1);
1223 /* LEA r1, [r2 + r2*4] */
1224 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 2);
1227 /* LEA r1, [r2 + r2*2] */
1229 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 1);
1230 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->reg1);
1233 /* LEA r1, [r2 + r2*8] */
1234 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 3);
1237 /* LEA r1, [r2 + r2*4] */
1239 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 2);
1240 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->reg1);
1243 /* LEA r1, [r2 + r2*2] */
1245 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 1);
1246 x86_shift_reg_imm (s->code, X86_SHL, tree->reg1, 2);
1249 /* LEA r1, [r2 + r2*4] */
1250 /* LEA r1, [r1 + r1*4] */
1251 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 2);
1252 x86_lea_memindex (s->code, tree->reg1, tree->reg1, 0, tree->reg1, 2);
1255 /* LEA r1, [r2 + r2*4] */
1257 /* LEA r1, [r1 + r1*4] */
1258 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 2);
1259 x86_shift_reg_imm (s->code, X86_SHL, tree->reg1, 2);
1260 x86_lea_memindex (s->code, tree->reg1, tree->reg1, 0, tree->reg1, 2);
1263 x86_imul_reg_reg_imm (s->code, tree->reg1, tree->left->reg1, tree->right->data.i);
1267 x86_shift_reg_imm (s->code, X86_SHL, tree->left->reg1, i);
1268 if (tree->reg1 != tree->left->reg1)
1269 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1273 reg: MUL (reg, reg) {
1274 x86_imul_reg_reg (s->code, tree->left->reg1, tree->right->reg1);
1276 if (tree->reg1 != tree->left->reg1)
1277 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1280 reg: MUL_OVF (reg, reg) {
1281 x86_imul_reg_reg (s->code, tree->left->reg1, tree->right->reg1);
1282 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
1284 if (tree->reg1 != tree->left->reg1)
1285 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1288 reg: MUL_OVF_UN (reg, reg) {
1289 mono_assert (tree->right->reg1 != X86_EAX);
1291 if (tree->left->reg1 != X86_EAX)
1292 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1294 x86_mul_reg (s->code, tree->right->reg1, FALSE);
1295 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
1297 mono_assert (tree->reg1 == X86_EAX &&
1298 tree->reg2 == X86_EDX);
1301 reg: DIV (reg, CONST_I4) {
1302 unsigned int i, j, k, v;
1304 v = tree->right->data.i;
1305 for (i = 0, j = 1, k = 0xfffffffe; i < 32; i++, j = j << 1, k = k << 1) {
1310 x86_shift_reg_imm (s->code, X86_SAR, tree->left->reg1, i);
1311 if (tree->reg1 != tree->left->reg1)
1312 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1315 unsigned int i, j, k, v;
1320 v = tree->right->data.i;
1321 for (i = 0, j = 1, k = 0xfffffffe; i < 32; i++, j = j << 1, k = k << 1) {
1326 if (i == 32 || v & k)
1333 reg: DIV (reg, reg) {
1334 mono_assert (tree->right->reg1 != X86_EAX);
1336 if (tree->left->reg1 != X86_EAX)
1337 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1340 x86_div_reg (s->code, tree->right->reg1, TRUE);
1342 mono_assert (tree->reg1 == X86_EAX &&
1343 tree->reg2 == X86_EDX);
1346 reg: DIV_UN (reg, CONST_I4) {
1347 unsigned int i, j, k, v;
1350 v = tree->right->data.i;
1351 for (i = 0, j = 1, k = 0xfffffffe; i < 32; i++, j = j << 1, k = k << 1) {
1356 if (i == 32 || v & k) {
1357 for (i = 32, j = 0x80000000; --i >= 0; j >>= 1) {
1361 /* k = 32 + number of significant bits in v - 1 */
1365 for (i = 0; i < k; i++) f *= 2.0f;
1371 x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg1, k - 32);
1372 if (tree->reg1 != tree->left->reg1)
1373 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1374 } else if (r < 0.5f) {
1375 if (tree->left->reg1 != X86_EAX)
1376 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1377 x86_mov_reg_imm (s->code, X86_EDX, (guint32) floor(f));
1378 /* x86_inc_reg (s->code, X86_EAX); */
1379 /* INC is faster but we have to check for overflow. */
1380 x86_alu_reg_imm (s->code, X86_ADD, X86_EAX, 1);
1381 x86_branch8(s->code, X86_CC_C, 2, FALSE);
1382 x86_mul_reg (s->code, X86_EDX, FALSE);
1383 x86_shift_reg_imm (s->code, X86_SHR, X86_EDX, k - 32);
1384 if (tree->reg1 != X86_EDX)
1385 x86_mov_reg_reg (s->code, tree->reg1, X86_EDX, 4);
1387 if (tree->left->reg1 != X86_EAX)
1388 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1389 x86_mov_reg_imm (s->code, X86_EDX, (guint32) ceil(f));
1390 x86_mul_reg (s->code, X86_EDX, FALSE);
1391 x86_shift_reg_imm (s->code, X86_SHR, X86_EDX, k - 32);
1392 if (tree->reg1 != X86_EDX)
1393 x86_mov_reg_reg (s->code, tree->reg1, X86_EDX, 4);
1396 x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg1, i);
1397 if (tree->reg1 != tree->left->reg1)
1398 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1403 reg: DIV_UN (reg, reg) {
1404 mono_assert (tree->right->reg1 != X86_EAX);
1406 if (tree->left->reg1 != X86_EAX)
1407 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1409 x86_mov_reg_imm (s->code, X86_EDX, 0);
1410 x86_div_reg (s->code, tree->right->reg1, FALSE);
1412 mono_assert (tree->reg1 == X86_EAX &&
1413 tree->reg2 == X86_EDX);
1416 reg: REM (reg, reg) {
1417 mono_assert (tree->right->reg1 != X86_EAX);
1418 mono_assert (tree->right->reg1 != X86_EDX);
1420 if (tree->left->reg1 != X86_EAX)
1421 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1423 /* sign extend to 64bit in EAX/EDX */
1425 x86_div_reg (s->code, tree->right->reg1, TRUE);
1426 x86_mov_reg_reg (s->code, X86_EAX, X86_EDX, 4);
1428 mono_assert (tree->reg1 == X86_EAX &&
1429 tree->reg2 == X86_EDX);
1432 reg: REM_UN (reg, reg) {
1433 mono_assert (tree->right->reg1 != X86_EAX);
1434 mono_assert (tree->right->reg1 != X86_EDX);
1436 if (tree->left->reg1 != X86_EAX)
1437 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1439 /* zero extend to 64bit in EAX/EDX */
1440 x86_mov_reg_imm (s->code, X86_EDX, 0);
1441 x86_div_reg (s->code, tree->right->reg1, FALSE);
1442 x86_mov_reg_reg (s->code, X86_EAX, X86_EDX, 4);
1444 mono_assert (tree->reg1 == X86_EAX &&
1445 tree->reg2 == X86_EDX);
1448 reg: ADD (reg, CONST_I4) "MB_USE_OPT1(0)" {
1449 if (tree->right->data.i == 1)
1450 x86_inc_reg (s->code, tree->left->reg1);
1452 x86_alu_reg_imm (s->code, X86_ADD, tree->left->reg1, tree->right->data.i);
1454 if (tree->reg1 != tree->left->reg1)
1455 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1459 reg: ADD (reg, LDIND_I4 (ADDR_L)) {
1460 int treg = VARINFO (s, tree->right->left->data.i).reg;
1462 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, treg);
1464 if (tree->reg1 != tree->left->reg1)
1465 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1467 MBCOND ((VARINFO (data, tree->right->left->data.i).reg >= 0));
1471 reg: ADD (reg, reg) {
1472 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
1474 if (tree->reg1 != tree->left->reg1)
1475 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1478 reg: ADD_OVF (reg, reg) {
1479 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
1480 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
1482 if (tree->reg1 != tree->left->reg1)
1483 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1486 reg: ADD_OVF_UN (reg, reg) {
1487 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
1488 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NC, FALSE, "OverflowException");
1490 if (tree->reg1 != tree->left->reg1)
1491 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1494 reg: SUB (reg, CONST_I4) "MB_USE_OPT1(0)" {
1495 if (tree->right->data.i == 1)
1496 x86_dec_reg (s->code, tree->left->reg1);
1498 x86_alu_reg_imm (s->code, X86_SUB, tree->left->reg1, tree->right->data.i);
1500 if (tree->reg1 != tree->left->reg1)
1501 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1504 reg: SUB (reg, LDIND_I4 (ADDR_L)) {
1505 int treg = VARINFO (s, tree->right->left->data.i).reg;
1507 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, treg);
1509 if (tree->reg1 != tree->left->reg1)
1510 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1512 MBCOND ((VARINFO (data, tree->right->left->data.i).reg >= 0));
1516 reg: SUB (reg, reg) {
1517 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
1519 if (tree->reg1 != tree->left->reg1)
1520 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1523 reg: SUB_OVF (reg, reg) {
1524 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
1525 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
1527 if (tree->reg1 != tree->left->reg1)
1528 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1531 reg: SUB_OVF_UN (reg, reg) {
1532 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
1533 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NC, FALSE, "OverflowException");
1535 if (tree->reg1 != tree->left->reg1)
1536 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1539 reg: CSET (cflags) {
1541 switch (tree->data.i) {
1543 x86_set_reg (s->code, X86_CC_EQ, tree->reg1, TRUE);
1546 x86_set_reg (s->code, X86_CC_GT, tree->reg1, TRUE);
1549 x86_set_reg (s->code, X86_CC_GT, tree->reg1, FALSE);
1552 x86_set_reg (s->code, X86_CC_LT, tree->reg1, TRUE);
1555 x86_set_reg (s->code, X86_CC_LT, tree->reg1, FALSE);
1558 g_assert_not_reached ();
1561 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE);
1564 reg: AND (reg, CONST_I4) "MB_USE_OPT1(0)" {
1565 x86_alu_reg_imm (s->code, X86_AND, tree->left->reg1, tree->right->data.i);
1567 if (tree->reg1 != tree->left->reg1)
1568 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1571 reg: AND (reg, reg) {
1572 x86_alu_reg_reg (s->code, X86_AND, tree->left->reg1, tree->right->reg1);
1574 if (tree->reg1 != tree->left->reg1)
1575 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1578 reg: OR (reg, CONST_I4) "MB_USE_OPT1(0)" {
1579 x86_alu_reg_imm (s->code, X86_OR, tree->left->reg1, tree->right->data.i);
1581 if (tree->reg1 != tree->left->reg1)
1582 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1585 reg: OR (reg, reg) {
1586 x86_alu_reg_reg (s->code, X86_OR, tree->left->reg1, tree->right->reg1);
1588 if (tree->reg1 != tree->left->reg1)
1589 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1592 reg: XOR (reg, CONST_I4) "MB_USE_OPT1(0)" {
1593 x86_alu_reg_imm (s->code, X86_XOR, tree->left->reg1, tree->right->data.i);
1595 if (tree->reg1 != tree->left->reg1)
1596 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1599 reg: XOR (reg, reg) {
1600 x86_alu_reg_reg (s->code, X86_XOR, tree->left->reg1, tree->right->reg1);
1602 if (tree->reg1 != tree->left->reg1)
1603 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1607 x86_neg_reg (s->code, tree->left->reg1);
1609 if (tree->reg1 != tree->left->reg1)
1610 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1614 x86_not_reg (s->code, tree->left->reg1);
1616 if (tree->reg1 != tree->left->reg1)
1617 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1620 reg: SHL (reg, CONST_I4) {
1621 x86_shift_reg_imm (s->code, X86_SHL, tree->left->reg1, tree->right->data.i);
1623 if (tree->reg1 != tree->left->reg1)
1624 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1627 reg: SHL (reg, reg) {
1628 if (tree->right->reg1 != X86_ECX) {
1629 x86_push_reg (s->code, X86_ECX);
1630 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
1632 x86_shift_reg (s->code, X86_SHL, tree->left->reg1);
1634 if (tree->reg1 != tree->left->reg1)
1635 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1637 if (tree->right->reg1 != X86_ECX)
1638 x86_pop_reg (s->code, X86_ECX);
1640 mono_assert (tree->reg1 != X86_ECX &&
1641 tree->left->reg1 != X86_ECX);
1644 reg: SHR (reg, CONST_I4) {
1645 x86_shift_reg_imm (s->code, X86_SAR, tree->left->reg1, tree->right->data.i);
1647 if (tree->reg1 != tree->left->reg1)
1648 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1651 reg: SHR (reg, reg) {
1652 if (tree->right->reg1 != X86_ECX) {
1653 x86_push_reg (s->code, X86_ECX);
1654 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
1657 x86_shift_reg (s->code, X86_SAR, tree->left->reg1);
1659 if (tree->reg1 != tree->left->reg1)
1660 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1662 if (tree->right->reg1 != X86_ECX)
1663 x86_pop_reg (s->code, X86_ECX);
1665 mono_assert (tree->reg1 != X86_ECX &&
1666 tree->left->reg1 != X86_ECX);
1669 reg: SHR_UN (reg, CONST_I4) {
1670 x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg1, tree->right->data.i);
1672 if (tree->reg1 != tree->left->reg1)
1673 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1676 reg: SHR_UN (reg, reg) {
1677 if (tree->right->reg1 != X86_ECX) {
1678 x86_push_reg (s->code, X86_ECX);
1679 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
1682 x86_shift_reg (s->code, X86_SHR, tree->left->reg1);
1684 if (tree->reg1 != tree->left->reg1)
1685 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1687 if (tree->right->reg1 != X86_ECX)
1688 x86_pop_reg (s->code, X86_ECX);
1690 mono_assert (tree->reg1 != X86_ECX &&
1691 tree->left->reg1 != X86_ECX);
1694 reg: LDSFLDA (CONST_I4) {
1695 if (tree->reg1 != X86_EAX)
1696 x86_push_reg (s->code, X86_EAX);
1697 x86_push_reg (s->code, X86_ECX);
1698 x86_push_reg (s->code, X86_EDX);
1700 x86_push_imm (s->code, tree->left->data.i);
1701 x86_push_imm (s->code, tree->data.klass);
1702 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_ldsflda);
1703 x86_call_code (s->code, 0);
1704 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
1706 x86_pop_reg (s->code, X86_EDX);
1707 x86_pop_reg (s->code, X86_ECX);
1708 if (tree->reg1 != X86_EAX) {
1709 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1710 x86_pop_reg (s->code, X86_EAX);
1716 x86_mov_reg_membase (s->code, tree->reg1, tree->left->reg1,
1717 G_STRUCT_OFFSET (MonoArray, max_length), 4);
1720 reg: LDELEMA (reg, CONST_I4) {
1723 if (mono_jit_boundcheck){
1724 x86_alu_membase_imm (s->code, X86_CMP, tree->left->reg1, G_STRUCT_OFFSET (MonoArray, max_length), tree->right->data.i);
1725 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_GT, FALSE, "IndexOutOfRangeException");
1728 ind = tree->data.i * tree->right->data.i + G_STRUCT_OFFSET (MonoArray, vector);
1730 x86_lea_membase (s->code, tree->reg1, tree->left->reg1, ind);
1733 reg: LDELEMA (reg, reg) {
1735 if (mono_jit_boundcheck){
1736 x86_alu_reg_membase (s->code, X86_CMP, tree->right->reg1, tree->left->reg1, G_STRUCT_OFFSET (MonoArray, max_length));
1737 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LT, FALSE, "IndexOutOfRangeException");
1740 if (tree->data.i == 1 || tree->data.i == 2 ||
1741 tree->data.i == 4 || tree->data.i == 8) {
1742 static int fast_log2 [] = { 1, 0, 1, -1, 2, -1, -1, -1, 3 };
1743 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1,
1744 G_STRUCT_OFFSET (MonoArray, vector), tree->right->reg1,
1745 fast_log2 [tree->data.i]);
1747 x86_imul_reg_reg_imm (s->code, tree->right->reg1, tree->right->reg1, tree->data.i);
1748 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->right->reg1);
1749 x86_alu_reg_imm (s->code, X86_ADD, tree->reg1, G_STRUCT_OFFSET (MonoArray, vector));
1754 if (tree->reg1 != X86_EAX)
1755 x86_push_reg (s->code, X86_EAX);
1756 x86_push_reg (s->code, X86_ECX);
1757 x86_push_reg (s->code, X86_EDX);
1759 x86_push_imm (s->code, tree->data.p);
1760 x86_push_imm (s->code, s->method->klass->image);
1761 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_ldstr_wrapper);
1762 x86_call_code (s->code, 0);
1763 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
1765 x86_pop_reg (s->code, X86_EDX);
1766 x86_pop_reg (s->code, X86_ECX);
1767 if (tree->reg1 != X86_EAX) {
1768 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1769 x86_pop_reg (s->code, X86_EAX);
1772 PRINT_REG ("LDSTR", tree->reg1);
1776 if (tree->reg1 != X86_EAX)
1777 x86_push_reg (s->code, X86_EAX);
1778 x86_push_reg (s->code, X86_ECX);
1779 x86_push_reg (s->code, X86_EDX);
1781 x86_push_reg (s->code, tree->left->reg1);
1782 x86_push_imm (s->code, tree->data.p);
1783 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_array_new_wrapper);
1784 x86_call_code (s->code, 0);
1785 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
1787 x86_pop_reg (s->code, X86_EDX);
1788 x86_pop_reg (s->code, X86_ECX);
1789 if (tree->reg1 != X86_EAX) {
1790 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1791 x86_pop_reg (s->code, X86_EAX);
1794 PRINT_REG ("NEWARR", tree->reg1);
1797 reg: NEWARR_SPEC (reg) {
1798 if (tree->reg1 != X86_EAX)
1799 x86_push_reg (s->code, X86_EAX);
1800 x86_push_reg (s->code, X86_ECX);
1801 x86_push_reg (s->code, X86_EDX);
1803 x86_push_reg (s->code, tree->left->reg1);
1804 x86_push_imm (s->code, tree->data.p);
1805 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_array_new_specific);
1806 x86_call_code (s->code, 0);
1807 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
1809 x86_pop_reg (s->code, X86_EDX);
1810 x86_pop_reg (s->code, X86_ECX);
1811 if (tree->reg1 != X86_EAX) {
1812 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1813 x86_pop_reg (s->code, X86_EAX);
1816 PRINT_REG ("NEWARR_SPEC", tree->reg1);
1820 if (tree->reg1 != X86_EAX)
1821 x86_push_reg (s->code, X86_EAX);
1822 x86_push_reg (s->code, X86_ECX);
1823 x86_push_reg (s->code, X86_EDX);
1825 x86_push_imm (s->code, tree->data.klass);
1826 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_object_new_wrapper);
1827 x86_call_code (s->code, 0);
1828 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
1830 x86_pop_reg (s->code, X86_EDX);
1831 x86_pop_reg (s->code, X86_ECX);
1832 if (tree->reg1 != X86_EAX) {
1833 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1834 x86_pop_reg (s->code, X86_EAX);
1836 PRINT_REG ("NEWOBJ", tree->reg1);
1840 if (tree->reg1 != X86_EAX)
1841 x86_push_reg (s->code, X86_EAX);
1842 x86_push_reg (s->code, X86_ECX);
1843 x86_push_reg (s->code, X86_EDX);
1845 x86_push_imm (s->code, tree->data.p);
1846 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_object_new_specific);
1847 x86_call_code (s->code, 0);
1848 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
1850 x86_pop_reg (s->code, X86_EDX);
1851 x86_pop_reg (s->code, X86_ECX);
1852 if (tree->reg1 != X86_EAX) {
1853 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1854 x86_pop_reg (s->code, X86_EAX);
1856 PRINT_REG ("NEWOBJ_SPEC", tree->reg1);
1859 reg: OBJADDR (reg) {
1860 if (tree->left->reg1 != tree->reg1)
1861 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1864 reg: VTADDR (ADDR_L) {
1865 int offset = VARINFO (s, tree->left->data.i).offset;
1867 x86_lea_membase (s->code, tree->reg1, X86_EBP, offset);
1871 x86_push_reg (s->code, tree->left->reg1);
1872 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, g_free);
1873 x86_call_code (s->code, 0);
1874 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
1877 stmt: PROC2 (reg, reg) {
1878 x86_push_reg (s->code, tree->right->reg1);
1879 x86_push_reg (s->code, tree->left->reg1);
1880 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->data.p);
1881 x86_call_code (s->code, 0);
1882 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
1885 stmt: PROC3 (reg, CPSRC (reg, reg)) {
1886 x86_push_reg (s->code, tree->right->right->reg1);
1887 x86_push_reg (s->code, tree->right->left->reg1);
1888 x86_push_reg (s->code, tree->left->reg1);
1889 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->data.p);
1890 x86_call_code (s->code, 0);
1891 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
1895 if (tree->reg1 != X86_EAX)
1896 x86_push_reg (s->code, X86_EAX);
1897 x86_push_reg (s->code, X86_ECX);
1898 x86_push_reg (s->code, X86_EDX);
1900 x86_push_reg (s->code, tree->left->reg1);
1902 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->data.p);
1903 x86_call_code (s->code, 0);
1904 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer));
1906 x86_pop_reg (s->code, X86_EDX);
1907 x86_pop_reg (s->code, X86_ECX);
1908 if (tree->reg1 != X86_EAX) {
1909 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1910 x86_pop_reg (s->code, X86_EAX);
1914 reg: LOCALLOC (CONST_I4) {
1918 size = (tree->left->data.i + (MONO_FRAME_ALIGNMENT - 1)) & ~(MONO_FRAME_ALIGNMENT - 1); // align to MONO_FRAME_ALIGNMENT boundary
1922 mono_emit_stack_alloc_const (s, tree, size);
1924 if (tree->reg1 != X86_EDI && tree->left->reg1 != X86_EDI) {
1925 x86_push_reg (s->code, X86_EDI);
1928 if (tree->reg1 != X86_EAX && tree->left->reg1 != X86_EAX) {
1929 x86_push_reg (s->code, X86_EAX);
1932 if (tree->reg1 != X86_ECX && tree->left->reg1 != X86_ECX) {
1933 x86_push_reg (s->code, X86_ECX);
1937 x86_mov_reg_imm (s->code, X86_ECX, size >> 2);
1938 x86_alu_reg_reg (s->code, X86_SUB, X86_EAX, X86_EAX);
1940 x86_lea_membase (s->code, X86_EDI, X86_ESP, offset);
1942 x86_prefix (s->code, X86_REP_PREFIX);
1943 x86_stosd (s->code);
1945 if (tree->reg1 != X86_ECX && tree->left->reg1 != X86_ECX)
1946 x86_pop_reg (s->code, X86_ECX);
1947 if (tree->reg1 != X86_EAX && tree->left->reg1 != X86_EAX)
1948 x86_pop_reg (s->code, X86_EAX);
1949 if (tree->reg1 != X86_EDI && tree->left->reg1 != X86_EDI)
1950 x86_pop_reg (s->code, X86_EDI);
1953 x86_mov_reg_reg (s->code, tree->reg1, X86_ESP, 4);
1958 reg: LOCALLOC (reg) {
1960 /* size must be aligned to MONO_FRAME_ALIGNMENT bytes */
1961 x86_alu_reg_imm (s->code, X86_ADD, tree->left->reg1, MONO_FRAME_ALIGNMENT - 1);
1962 x86_alu_reg_imm (s->code, X86_AND, tree->left->reg1, ~(MONO_FRAME_ALIGNMENT - 1));
1964 /* allocate space on stack */
1965 mono_emit_stack_alloc (s, tree);
1968 /* initialize with zero */
1969 if (tree->reg1 != X86_EAX && tree->left->reg1 != X86_EAX) {
1970 x86_push_reg (s->code, X86_EAX);
1973 if (tree->reg1 != X86_ECX && tree->left->reg1 != X86_ECX) {
1974 x86_push_reg (s->code, X86_ECX);
1977 if (tree->reg1 != X86_EDI && tree->left->reg1 != X86_EDI) {
1978 x86_push_reg (s->code, X86_EDI);
1982 x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg1, 2);
1983 if (tree->left->reg1 != X86_ECX)
1984 x86_mov_reg_imm (s->code, X86_ECX, tree->left->reg1);
1985 x86_alu_reg_reg (s->code, X86_XOR, X86_EAX, X86_EAX);
1987 x86_lea_membase (s->code, X86_EDI, X86_ESP, offset);
1989 x86_prefix (s->code, X86_REP_PREFIX);
1990 x86_stosl (s->code);
1992 if (tree->reg1 != X86_EDI && tree->left->reg1 != X86_EDI)
1993 x86_pop_reg (s->code, X86_EDI);
1994 if (tree->reg1 != X86_ECX && tree->left->reg1 != X86_ECX)
1995 x86_pop_reg (s->code, X86_ECX);
1996 if (tree->reg1 != X86_EAX && tree->left->reg1 != X86_EAX)
1997 x86_pop_reg (s->code, X86_EAX);
2000 x86_mov_reg_reg (s->code, tree->reg1, X86_ESP, 4);
2004 if (tree->reg1 != tree->left->reg1)
2005 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
2007 x86_push_reg (s->code, tree->reg1);
2008 x86_mov_reg_membase (s->code, tree->reg1, tree->reg1, 0, 4);
2009 x86_mov_reg_membase (s->code, tree->reg1, tree->reg1, 0, 4);
2010 x86_alu_membase_imm (s->code, X86_CMP, tree->reg1,
2011 G_STRUCT_OFFSET (MonoClass, element_class), ((int)(tree->data.klass->element_class)));
2012 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "InvalidCastException");
2013 x86_pop_reg (s->code, tree->reg1);
2014 x86_alu_reg_imm (s->code, X86_ADD, tree->reg1, sizeof (MonoObject));
2017 reg: CASTCLASS (reg) {
2018 MonoClass *klass = tree->data.klass;
2020 int lreg = tree->left->reg1;
2022 x86_push_reg (s->code, lreg);
2023 x86_test_reg_reg (s->code, lreg, lreg);
2024 br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
2026 if (klass->flags & TYPE_ATTRIBUTE_INTERFACE) {
2027 /* lreg = obj->vtable */
2028 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2030 x86_alu_membase_imm (s->code, X86_CMP, lreg, G_STRUCT_OFFSET (MonoVTable, max_interface_id),
2031 klass->interface_id);
2032 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_GE, FALSE, "InvalidCastException");
2033 /* lreg = obj->vtable->interface_offsets */
2034 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
2035 x86_alu_membase_imm (s->code, X86_CMP, lreg, klass->interface_id << 2, 0);
2036 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NE, FALSE, "InvalidCastException");
2039 /* lreg = obj->vtable */
2040 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2041 /* lreg = obj->vtable->klass */
2042 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2046 x86_alu_membase_imm (s->code, X86_CMP, lreg, G_STRUCT_OFFSET (MonoClass, rank), klass->rank);
2047 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "InvalidCastException");
2048 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, cast_class), 4);
2049 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, baseval), 4);
2050 x86_alu_reg_mem (s->code, X86_SUB, lreg, &klass->cast_class->baseval);
2051 x86_alu_reg_mem (s->code, X86_CMP, lreg, &klass->cast_class->diffval);
2052 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LE, FALSE, "InvalidCastException");
2056 if (klass->marshalbyref) {
2057 /* check for transparent_proxy */
2058 x86_alu_reg_imm (s->code, X86_CMP, lreg, (int)mono_defaults.transparent_proxy_class);
2059 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
2062 x86_mov_reg_membase (s->code, lreg, X86_ESP, 0, 4);
2063 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoTransparentProxy,
2066 x86_patch (br [1], s->code);
2069 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, baseval), 4);
2070 x86_alu_reg_mem (s->code, X86_SUB, lreg, &klass->baseval);
2071 x86_alu_reg_mem (s->code, X86_CMP, lreg, &klass->diffval);
2072 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LE, FALSE, "InvalidCastException");
2076 x86_patch (br [0], s->code);
2077 x86_pop_reg (s->code, tree->reg1);
2081 MonoClass *klass = tree->data.klass;
2083 int lreg = tree->left->reg1;
2085 x86_push_reg (s->code, lreg);
2086 x86_test_reg_reg (s->code, lreg, lreg);
2087 br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
2089 if (klass->flags & TYPE_ATTRIBUTE_INTERFACE) {
2090 /* lreg = obj->vtable */
2091 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2093 x86_alu_membase_imm (s->code, X86_CMP, lreg, G_STRUCT_OFFSET (MonoVTable, max_interface_id),
2094 klass->interface_id);
2095 br [1] = s->code; x86_branch8 (s->code, X86_CC_LT, 0, FALSE);
2096 /* lreg = obj->vtable->interface_offsets */
2097 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
2098 x86_alu_membase_imm (s->code, X86_CMP, lreg, klass->interface_id << 2, 0);
2099 br [2] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
2100 x86_patch (br [1], s->code);
2101 x86_mov_membase_imm (s->code, X86_ESP, 0, 0, 4);
2102 x86_patch (br [2], s->code);
2106 /* lreg = obj->vtable */
2107 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2108 /* lreg = obj->vtable->klass */
2109 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2113 x86_alu_membase_imm (s->code, X86_CMP, lreg, G_STRUCT_OFFSET (MonoClass, rank), klass->rank);
2114 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
2115 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, cast_class), 4);
2116 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, baseval), 4);
2117 x86_alu_reg_mem (s->code, X86_SUB, lreg, &klass->cast_class->baseval);
2118 x86_alu_reg_mem (s->code, X86_CMP, lreg, &klass->cast_class->diffval);
2119 br [2] = s->code; x86_branch8 (s->code, X86_CC_LE, 0, FALSE);
2120 x86_patch (br [1], s->code);
2121 x86_mov_membase_imm (s->code, X86_ESP, 0, 0, 4);
2122 x86_patch (br [2], s->code);
2126 if (klass->marshalbyref) {
2127 /* check for transparent_proxy */
2128 x86_alu_reg_imm (s->code, X86_CMP, lreg, (int)mono_defaults.transparent_proxy_class);
2129 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
2132 x86_mov_reg_membase (s->code, lreg, X86_ESP, 0, 4);
2133 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoTransparentProxy,
2135 x86_patch (br [1], s->code);
2138 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, baseval), 4);
2139 x86_alu_reg_mem (s->code, X86_SUB, lreg, &klass->baseval);
2140 x86_alu_reg_mem (s->code, X86_CMP, lreg, &klass->diffval);
2141 br [2] = s->code; x86_branch8 (s->code, X86_CC_LE, 0, FALSE);
2142 x86_mov_membase_imm (s->code, X86_ESP, 0, 0, 4);
2143 x86_patch (br [2], s->code);
2147 x86_patch (br [0], s->code);
2148 x86_pop_reg (s->code, tree->reg1);
2151 stmt: INITOBJ (reg) {
2154 if (!(i = tree->data.i))
2157 if (i == 1 || i == 2 || i == 4) {
2158 x86_mov_membase_imm (s->code, tree->left->reg1, 0, 0, i);
2162 i = tree->data.i / 4;
2163 j = tree->data.i % 4;
2165 if (tree->left->reg1 != X86_EDI) {
2166 x86_push_reg (s->code, X86_EDI);
2167 x86_mov_reg_reg (s->code, X86_EDI, tree->left->reg1, 4);
2171 x86_alu_reg_reg (s->code, X86_XOR, X86_EAX, X86_EAX);
2172 x86_mov_reg_imm (s->code, X86_ECX, i);
2174 x86_prefix (s->code, X86_REP_PREFIX);
2175 x86_stosl (s->code);
2177 for (i = 0; i < j; i++)
2178 x86_stosb (s->code);
2182 x86_mov_membase_imm (s->code, X86_EDI, 0, 0, 2);
2183 x86_mov_membase_imm (s->code, X86_EDI, 2, 0, 1);
2188 if (tree->left->reg1 != X86_EDI)
2189 x86_pop_reg (s->code, X86_EDI);
2192 stmt: CPBLK (reg, CPSRC (reg, CONST_I4)) {
2193 int dest_reg = tree->left->reg1;
2194 int source_reg = tree->right->left->reg1;
2195 int count = tree->right->right->data.i;
2196 int sreg = dest_reg != X86_EAX ? X86_EAX : X86_EDX;
2197 int spill_pos = 0, dest_offset = 0, source_offset = 0;
2198 int save_esi = FALSE, save_edi = FALSE;
2200 // TODO: handle unaligned. prefix
2206 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 1);
2207 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 1);
2210 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 2);
2211 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 2);
2214 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 2);
2215 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 2);
2216 x86_mov_reg_membase (s->code, sreg, source_reg, 2, 1);
2217 x86_mov_membase_reg (s->code, dest_reg, 2, sreg, 1);
2220 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 4);
2221 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 4);
2224 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 4);
2225 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 4);
2226 x86_mov_reg_membase (s->code, sreg, source_reg, 4, 1);
2227 x86_mov_membase_reg (s->code, dest_reg, 4, sreg, 1);
2230 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 4);
2231 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 4);
2232 x86_mov_reg_membase (s->code, sreg, source_reg, 4, 2);
2233 x86_mov_membase_reg (s->code, dest_reg, 4, sreg, 2);
2236 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 4);
2237 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 4);
2238 x86_mov_reg_membase (s->code, sreg, source_reg, 4, 2);
2239 x86_mov_membase_reg (s->code, dest_reg, 4, sreg, 2);
2240 x86_mov_reg_membase (s->code, sreg, source_reg, 6, 1);
2241 x86_mov_membase_reg (s->code, dest_reg, 6, sreg, 1);
2244 x86_fild_membase (s->code, source_reg, 0, TRUE);
2245 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2248 x86_fild_membase (s->code, source_reg, 0, TRUE);
2249 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2250 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 1);
2251 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 1);
2254 x86_fild_membase (s->code, source_reg, 0, TRUE);
2255 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2256 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 2);
2257 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 2);
2260 x86_fild_membase (s->code, source_reg, 0, TRUE);
2261 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2262 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 2);
2263 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 2);
2264 x86_mov_reg_membase (s->code, sreg, source_reg, 10, 1);
2265 x86_mov_membase_reg (s->code, dest_reg, 10, sreg, 1);
2268 x86_fild_membase (s->code, source_reg, 0, TRUE);
2269 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2270 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 4);
2271 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 4);
2274 x86_fild_membase (s->code, source_reg, 0, TRUE);
2275 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2276 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 4);
2277 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 4);
2278 x86_mov_reg_membase (s->code, sreg, source_reg, 12, 1);
2279 x86_mov_membase_reg (s->code, dest_reg, 12, sreg, 1);
2282 x86_fild_membase (s->code, source_reg, 0, TRUE);
2283 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2284 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 4);
2285 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 4);
2286 x86_mov_reg_membase (s->code, sreg, source_reg, 12, 2);
2287 x86_mov_membase_reg (s->code, dest_reg, 12, sreg, 2);
2290 x86_fild_membase (s->code, source_reg, 0, TRUE);
2291 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2292 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 4);
2293 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 4);
2294 x86_mov_reg_membase (s->code, sreg, source_reg, 12, 2);
2295 x86_mov_membase_reg (s->code, dest_reg, 12, sreg, 2);
2296 x86_mov_reg_membase (s->code, sreg, source_reg, 14, 1);
2297 x86_mov_membase_reg (s->code, dest_reg, 14, sreg, 1);
2300 g_assert (count > 15);
2302 if (dest_reg != X86_ESI && source_reg != X86_ESI &&
2303 mono_regset_reg_used (s->rs, X86_ESI))
2305 if (dest_reg != X86_EDI && source_reg != X86_EDI &&
2306 mono_regset_reg_used (s->rs, X86_EDI))
2310 x86_push_reg (s->code, X86_ESI);
2312 x86_push_reg (s->code, X86_EDI);
2314 if (dest_reg == X86_ESI) {
2315 dest_offset = ++spill_pos;
2317 if (source_reg == X86_EDI) {
2318 source_offset = ++spill_pos;
2322 x86_push_reg (s->code, source_reg);
2324 x86_push_reg (s->code, dest_reg);
2326 if (source_reg != X86_ESI) {
2328 x86_mov_reg_membase (s->code, X86_ESI, X86_ESP, (source_offset-1)<<2, 4);
2330 x86_mov_reg_reg (s->code, X86_ESI, source_reg, 4);
2332 if (dest_reg != X86_EDI) {
2334 x86_mov_reg_membase (s->code, X86_EDI, X86_ESP, (dest_offset-1)<<2, 4);
2336 x86_mov_reg_reg (s->code, X86_EDI, dest_reg, 4);
2339 x86_mov_reg_imm (s->code, X86_ECX, count >> 2);
2341 x86_prefix (s->code, X86_REP_PREFIX);
2342 x86_movsd (s->code);
2344 switch (count & 3) {
2346 x86_mov_reg_membase (s->code, sreg, X86_ESI, 0, 1);
2347 x86_mov_membase_reg (s->code, X86_EDI, 0, sreg, 1);
2350 x86_mov_reg_membase (s->code, sreg, X86_ESI, 0, 2);
2351 x86_mov_membase_reg (s->code, X86_EDI, 0, sreg, 2);
2354 x86_mov_reg_membase (s->code, sreg, X86_ESI, 0, 2);
2355 x86_mov_membase_reg (s->code, X86_EDI, 0, sreg, 2);
2356 x86_mov_reg_membase (s->code, sreg, X86_ESI, 2, 1);
2357 x86_mov_membase_reg (s->code, X86_EDI, 2, sreg, 1);
2363 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, spill_pos<<2);
2366 x86_pop_reg (s->code, X86_EDI);
2368 x86_pop_reg (s->code, X86_ESI);
2373 MBCOND (mono_inline_memcpy);
2377 stmt: CPBLK (reg, CPSRC (reg, reg)) {
2378 int dest_reg = tree->left->reg1;
2379 int source_reg = tree->right->left->reg1;
2380 int size_reg = tree->right->right->reg1;
2381 int spill_pos = 0, size_offset = 0, dest_offset = 0, source_offset = 0;
2382 int save_esi = FALSE, save_edi = FALSE;
2384 if (!mono_inline_memcpy) {
2385 x86_push_reg (s->code, size_reg);
2386 x86_push_reg (s->code, source_reg);
2387 x86_push_reg (s->code, dest_reg);
2388 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, memmove);
2389 x86_call_code (s->code, 0);
2390 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
2392 if (dest_reg != X86_ESI && source_reg != X86_ESI && size_reg != X86_ESI &&
2393 mono_regset_reg_used (s->rs, X86_ESI))
2395 if (dest_reg != X86_EDI && source_reg != X86_EDI && size_reg != X86_EDI &&
2396 mono_regset_reg_used (s->rs, X86_EDI))
2400 x86_push_reg (s->code, X86_ESI);
2402 x86_push_reg (s->code, X86_EDI);
2404 if (size_reg == X86_EDI || size_reg == X86_ESI) {
2405 size_offset = ++spill_pos;
2407 if (dest_reg == X86_ECX || dest_reg == X86_ESI) {
2408 dest_offset = ++spill_pos;
2410 if (source_reg == X86_ECX || source_reg == X86_EDI) {
2411 source_offset = ++spill_pos;
2415 x86_push_reg (s->code, source_reg);
2417 x86_push_reg (s->code, dest_reg);
2419 x86_push_reg (s->code, size_reg);
2421 if (source_reg != X86_ESI) {
2423 x86_mov_reg_membase (s->code, X86_ESI, X86_ESP, (source_offset-1)<<2, 4);
2425 x86_mov_reg_reg (s->code, X86_ESI, source_reg, 4);
2427 if (dest_reg != X86_EDI) {
2429 x86_mov_reg_membase (s->code, X86_EDI, X86_ESP, (dest_offset-1)<<2, 4);
2431 x86_mov_reg_reg (s->code, X86_EDI, dest_reg, 4);
2433 if (size_reg != X86_ECX) {
2435 x86_mov_reg_membase (s->code, X86_ECX, X86_ESP, (size_offset-1)<<2, 4);
2437 x86_mov_reg_reg (s->code, X86_ECX, size_reg, 4);
2440 x86_push_reg (s->code, X86_ECX);
2441 x86_shift_reg_imm (s->code, X86_SHR, X86_ECX, 2);
2445 // move whole dwords first
2446 x86_prefix (s->code, X86_REP_PREFIX);
2447 x86_movsd (s->code);
2449 x86_pop_reg (s->code, X86_ECX);
2450 x86_alu_reg_imm (s->code, X86_AND, X86_ECX, 3);
2452 // move remaining bytes (if any)
2453 x86_prefix (s->code, X86_REP_PREFIX);
2454 x86_movsb (s->code);
2456 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, spill_pos<<2);
2459 x86_pop_reg (s->code, X86_EDI);
2461 x86_pop_reg (s->code, X86_ESI);
2465 stmt: INITBLK (reg, CPSRC (reg, CONST_I4)) {
2466 int dest_reg = tree->left->reg1;
2467 int value_reg = tree->right->left->reg1;
2468 int size = tree->right->right->data.i;
2469 int spill_pos = 0, dest_offset = 0, value_offset = 0;
2470 int save_edi = FALSE;
2476 if (mono_inline_memcpy) {
2478 if (dest_reg != X86_EDI && value_reg != X86_EDI &&
2479 mono_regset_reg_used (s->rs, X86_EDI)) {
2481 x86_push_reg (s->code, X86_EDI);
2484 if (dest_reg == X86_ECX || dest_reg == X86_EAX) {
2485 dest_offset = ++spill_pos;
2487 if (value_reg == X86_ECX || value_reg == X86_EDI) {
2488 value_offset = ++spill_pos;
2492 x86_push_reg (s->code, value_reg);
2494 x86_push_reg (s->code, dest_reg);
2496 if (value_reg != X86_EAX) {
2498 x86_mov_reg_membase (s->code, X86_EAX, X86_ESP, (value_offset-1)<<2, 4);
2500 x86_mov_reg_reg (s->code, X86_EAX, value_reg, 4);
2502 if (dest_reg != X86_EDI) {
2504 x86_mov_reg_membase (s->code, X86_EDI, X86_ESP, (dest_offset-1)<<2, 4);
2506 x86_mov_reg_reg (s->code, X86_EDI, dest_reg, 4);
2509 x86_widen_reg (s->code, X86_EAX, X86_EAX, FALSE, FALSE);
2510 x86_mov_reg_reg (s->code, X86_EDX, X86_EAX, 4);
2511 x86_shift_reg_imm (s->code, X86_SHL, X86_EAX, 8);
2512 x86_alu_reg_reg (s->code, X86_OR, X86_EAX, X86_EDX);
2513 x86_mov_reg_reg (s->code, X86_EDX, X86_EAX, 4);
2514 x86_shift_reg_imm (s->code, X86_SHL, X86_EAX, 16);
2515 x86_alu_reg_reg (s->code, X86_OR, X86_EAX, X86_EDX);
2518 x86_mov_reg_imm (s->code, X86_ECX, i);
2520 x86_prefix (s->code, X86_REP_PREFIX);
2521 x86_stosd (s->code);
2524 for (i = 0; i < j; i++)
2525 x86_stosb (s->code);
2527 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, spill_pos<<2);
2530 x86_pop_reg (s->code, X86_EDI);
2533 x86_push_imm (s->code, size);
2534 x86_push_reg (s->code, value_reg);
2535 x86_push_reg (s->code, dest_reg);
2536 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, memset);
2537 x86_call_code (s->code, 0);
2538 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
2541 MBCOND (mono_inline_memcpy);
2545 stmt: INITBLK (reg, CPSRC (reg, reg)) {
2546 int dest_reg = tree->left->reg1;
2547 int value_reg = tree->right->left->reg1;
2548 int size_reg = tree->right->right->reg1;
2549 int spill_pos = 0, size_offset = 0, dest_offset = 0, value_offset = 0;
2550 int save_edi = FALSE;
2552 if (mono_inline_memcpy) {
2554 if (dest_reg != X86_EDI && size_reg != X86_EDI && size_reg != X86_EDI &&
2555 mono_regset_reg_used (s->rs, X86_EDI)) {
2557 x86_push_reg (s->code, X86_EDI);
2560 if (size_reg == X86_EDI || size_reg == X86_EAX) {
2561 size_offset = ++spill_pos;
2563 if (dest_reg == X86_ECX || dest_reg == X86_EAX) {
2564 dest_offset = ++spill_pos;
2566 if (value_reg == X86_ECX || value_reg == X86_EDI) {
2567 value_offset = ++spill_pos;
2571 x86_push_reg (s->code, value_reg);
2573 x86_push_reg (s->code, dest_reg);
2575 x86_push_reg (s->code, size_reg);
2577 if (value_reg != X86_EAX) {
2579 x86_mov_reg_membase (s->code, X86_EAX, X86_ESP, (value_offset-1)<<2, 4);
2581 x86_mov_reg_reg (s->code, X86_EAX, value_reg, 4);
2583 if (dest_reg != X86_EDI) {
2585 x86_mov_reg_membase (s->code, X86_EDI, X86_ESP, (dest_offset-1)<<2, 4);
2587 x86_mov_reg_reg (s->code, X86_EDI, dest_reg, 4);
2589 if (size_reg != X86_ECX) {
2591 x86_mov_reg_membase (s->code, X86_ECX, X86_ESP, (size_offset-1)<<2, 4);
2593 x86_mov_reg_reg (s->code, X86_ECX, size_reg, 4);
2596 x86_widen_reg (s->code, X86_EAX, X86_EAX, FALSE, FALSE);
2597 x86_mov_reg_reg (s->code, X86_EDX, X86_EAX, 4);
2598 x86_shift_reg_imm (s->code, X86_SHL, X86_EAX, 8);
2599 x86_alu_reg_reg (s->code, X86_OR, X86_EAX, X86_EDX);
2600 x86_mov_reg_reg (s->code, X86_EDX, X86_EAX, 4);
2601 x86_shift_reg_imm (s->code, X86_SHL, X86_EAX, 16);
2602 x86_alu_reg_reg (s->code, X86_OR, X86_EAX, X86_EDX);
2604 x86_push_reg (s->code, X86_ECX);
2605 x86_shift_reg_imm (s->code, X86_SHR, X86_ECX, 2);
2608 // init whole dwords first
2609 x86_prefix (s->code, X86_REP_PREFIX);
2610 x86_stosd (s->code);
2612 x86_pop_reg (s->code, X86_ECX);
2613 x86_alu_reg_imm (s->code, X86_AND, X86_ECX, 3);
2615 // init remaining bytes (if any)
2616 x86_prefix (s->code, X86_REP_PREFIX);
2617 x86_stosb (s->code);
2619 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, spill_pos<<2);
2622 x86_pop_reg (s->code, X86_EDI);
2625 x86_push_reg (s->code, size_reg);
2626 x86_push_reg (s->code, value_reg);
2627 x86_push_reg (s->code, dest_reg);
2628 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, memset);
2629 x86_call_code (s->code, 0);
2630 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
2639 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bb);
2640 x86_jump32 (s->code, 0);
2643 cflags: COMPARE (reg, LDIND_I4 (ADDR_L)) {
2644 int treg = VARINFO (s, tree->right->left->data.i).reg;
2645 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, treg);
2647 MBCOND ((VARINFO (data, tree->right->left->data.i).reg >= 0));
2651 cflags: COMPARE (LDIND_I4 (ADDR_L), CONST_I4) {
2652 int treg = VARINFO (s, tree->left->left->data.i).reg;
2653 x86_alu_reg_imm (s->code, X86_CMP, treg, tree->right->data.i);
2655 MBCOND ((VARINFO (data, tree->left->left->data.i).reg >= 0));
2659 cflags: COMPARE (LDIND_I4 (ADDR_L), reg) {
2660 int treg = VARINFO (s, tree->left->left->data.i).reg;
2661 x86_alu_reg_reg (s->code, X86_CMP, treg, tree->right->reg1);
2663 MBCOND ((VARINFO (data, tree->left->left->data.i).reg >= 0));
2667 cflags: COMPARE (LDIND_I4 (ADDR_L), CONST_I4) {
2668 int offset = VARINFO (s, tree->left->left->data.i).offset;
2669 x86_alu_membase_imm (s->code, X86_CMP, X86_EBP, offset, tree->right->data.i);
2671 MBCOND ((VARINFO (data, tree->left->left->data.i).reg < 0));
2675 cflags: COMPARE (reg, CONST_I4) {
2676 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i);
2679 cflags: COMPARE (reg, reg) {
2680 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
2684 stmt: CBRANCH (cflags) {
2685 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
2687 switch (tree->data.bi.cond) {
2689 x86_branch32 (s->code, X86_CC_LT, 0, TRUE);
2692 x86_branch32 (s->code, X86_CC_LT, 0, FALSE);
2695 x86_branch32 (s->code, X86_CC_GT, 0, TRUE);
2698 x86_branch32 (s->code, X86_CC_GT, 0, FALSE);
2701 x86_branch32 (s->code, X86_CC_EQ, 0, TRUE);
2704 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
2707 x86_branch32 (s->code, X86_CC_GE, 0, TRUE);
2710 x86_branch32 (s->code, X86_CC_GE, 0, FALSE);
2713 x86_branch32 (s->code, X86_CC_LE, 0, TRUE);
2716 x86_branch32 (s->code, X86_CC_LE, 0, FALSE);
2719 g_assert_not_reached ();
2723 stmt: BRTRUE (LDIND_I4 (ADDR_L)) {
2724 int treg = VARINFO (s, tree->left->left->data.i).reg;
2725 int offset = VARINFO (s, tree->left->left->data.i).offset;
2728 x86_test_reg_reg (s->code, treg, treg);
2730 x86_alu_membase_imm (s->code, X86_CMP, X86_EBP, offset, 0);
2732 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bb);
2733 x86_branch32 (s->code, X86_CC_NE, 0, TRUE);
2736 stmt: BRTRUE (reg) {
2737 x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1);
2738 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bb);
2739 x86_branch32 (s->code, X86_CC_NE, 0, TRUE);
2742 stmt: BRFALSE (LDIND_I4 (ADDR_L)) {
2743 int treg = VARINFO (s, tree->left->left->data.i).reg;
2744 int offset = VARINFO (s, tree->left->left->data.i).offset;
2747 x86_test_reg_reg (s->code, treg, treg);
2749 x86_alu_membase_imm (s->code, X86_CMP, X86_EBP, offset, 0);
2751 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bb);
2752 x86_branch32 (s->code, X86_CC_EQ, 0, TRUE);
2754 //{static int cx= 0; printf ("CX1 %5d\n", cx++);}
2757 stmt: BRFALSE (reg) {
2758 x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1);
2759 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bb);
2760 x86_branch32 (s->code, X86_CC_EQ, 0, TRUE);
2764 x86_breakpoint (s->code);
2768 if (tree->left->reg1 != X86_EAX)
2769 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
2771 if (!tree->last_instr) {
2772 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_EPILOG, NULL);
2773 x86_jump32 (s->code, 0);
2778 if (!tree->last_instr) {
2779 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_EPILOG, NULL);
2780 x86_jump32 (s->code, 0);
2784 stmt: ARG_I4 (LDIND_I4 (addr)) {
2785 MBTree *at = tree->left->left;
2786 int pad = tree->data.arg_info.pad;
2790 switch (at->data.ainfo.amode) {
2793 x86_push_mem (s->code, at->data.ainfo.offset);
2797 x86_push_membase (s->code, at->data.ainfo.basereg, at->data.ainfo.offset);
2800 x86_push_memindex (s->code, X86_NOBASEREG, at->data.ainfo.offset,
2801 at->data.ainfo.indexreg, at->data.ainfo.shift);
2804 x86_push_memindex (s->code, at->data.ainfo.basereg,
2805 at->data.ainfo.offset, at->data.ainfo.indexreg,
2806 at->data.ainfo.shift);
2811 stmt: ARG_I4 (LDIND_I4 (ADDR_L)) {
2812 int treg = VARINFO (s, tree->left->left->data.i).reg;
2813 int pad = tree->data.arg_info.pad;
2816 x86_push_reg (s->code, treg);
2818 MBCOND ((VARINFO (data, tree->left->left->data.i).reg >= 0));
2822 stmt: ARG_I4 (reg) {
2823 int pad = tree->data.arg_info.pad;
2826 x86_push_reg (s->code, tree->left->reg1);
2829 stmt: ARG_I4 (ADDR_G) {
2830 int pad = tree->data.arg_info.pad;
2833 x86_push_imm (s->code, tree->left->data.p);
2836 stmt: ARG_I4 (CONST_I4) "MB_USE_OPT1(0)" {
2837 int pad = tree->data.arg_info.pad;
2840 x86_push_imm (s->code, tree->left->data.i);
2844 PRINT_REG ("THIS", tree->reg1);
2847 reg: CHECKTHIS (reg) {
2848 /* try to access the vtable - this will raise an exception
2849 * if the object is NULL */
2850 x86_alu_membase_imm (s->code, X86_CMP, tree->left->reg1, 0, 0);
2851 if (tree->reg1 != tree->left->reg1)
2852 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
2855 stmt: CHECKTHIS (reg) {
2856 x86_alu_membase_imm (s->code, X86_CMP, tree->left->reg1, 0, 0);
2863 /* restore callee saved registers */
2864 if (mono_regset_reg_used (s->rs, X86_EBX)) {
2865 x86_mov_reg_membase (s->code, X86_EBX, X86_EBP, pos, 4);
2868 if (mono_regset_reg_used (s->rs, X86_EDI)) {
2869 x86_mov_reg_membase (s->code, X86_EDI, X86_EBP, pos, 4);
2872 if (mono_regset_reg_used (s->rs, X86_ESI)) {
2873 x86_mov_reg_membase (s->code, X86_ESI, X86_EBP, pos, 4);
2876 /* restore ESP/EBP */
2877 x86_leave (s->code);
2879 /* jump to the method */
2880 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->data.p);
2881 x86_jump32 (s->code, 0);
2886 reg: CALL_I4 (this, reg) {
2888 int lreg = tree->left->reg1;
2889 int rreg = tree->right->reg1;
2891 if (lreg == treg || rreg == treg)
2893 if (lreg == treg || rreg == treg)
2895 if (lreg == treg || rreg == treg)
2896 mono_assert_not_reached ();
2900 x86_call_reg (s->code, rreg);
2904 mono_assert (tree->reg1 == X86_EAX);
2907 reg: CALL_I4 (this, ADDR_G) {
2908 int lreg = tree->left->reg1;
2914 if (X86_REMOTING_CHECK)
2921 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->right->data.p);
2922 x86_call_code (s->code, 0);
2927 mono_assert (tree->reg1 == X86_EAX);
2930 reg: LDVIRTFTN (reg, INTF_ADDR) {
2931 /* we cant return the value in the vtable, because it can be
2932 * a magic trampoline, and we cant pass that to the outside world */
2934 if (tree->reg1 != X86_EAX)
2935 x86_push_reg (s->code, X86_EAX);
2936 x86_push_reg (s->code, X86_ECX);
2937 x86_push_reg (s->code, X86_EDX);
2939 x86_push_imm (s->code, tree->right->data.m->klass->interface_id);
2940 x86_push_reg (s->code, tree->left->reg1);
2941 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_ldintftn);
2942 x86_call_code (s->code, 0);
2943 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
2945 x86_pop_reg (s->code, X86_EDX);
2946 x86_pop_reg (s->code, X86_ECX);
2947 if (tree->reg1 != X86_EAX) {
2948 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
2949 x86_pop_reg (s->code, X86_EAX);
2953 reg: CALL_I4 (this, INTF_ADDR) {
2954 int lreg = tree->left->reg1;
2962 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2963 x86_mov_reg_membase (s->code, lreg, lreg,
2964 G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
2965 x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
2966 x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
2970 mono_assert (tree->reg1 == X86_EAX);
2973 reg: LDVIRTFTN (reg, VFUNC_ADDR) {
2974 /* we cant return the value in the vtable, because it can be
2975 * a magic trampoline, and we cant pass that to the outside world */
2977 if (tree->reg1 != X86_EAX)
2978 x86_push_reg (s->code, X86_EAX);
2979 x86_push_reg (s->code, X86_ECX);
2980 x86_push_reg (s->code, X86_EDX);
2982 x86_push_imm (s->code, tree->right->data.m->slot);
2983 x86_push_reg (s->code, tree->left->reg1);
2984 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_ldvirtftn);
2985 x86_call_code (s->code, 0);
2986 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
2988 x86_pop_reg (s->code, X86_EDX);
2989 x86_pop_reg (s->code, X86_ECX);
2990 if (tree->reg1 != X86_EAX) {
2991 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
2992 x86_pop_reg (s->code, X86_EAX);
2997 if (tree->reg1 != X86_EAX)
2998 x86_push_reg (s->code, X86_EAX);
2999 x86_push_reg (s->code, X86_ECX);
3000 x86_push_reg (s->code, X86_EDX);
3002 x86_push_imm (s->code, tree->data.m);
3003 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_ldftn);
3004 x86_call_code (s->code, 0);
3005 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer));
3007 x86_pop_reg (s->code, X86_EDX);
3008 x86_pop_reg (s->code, X86_ECX);
3009 if (tree->reg1 != X86_EAX) {
3010 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
3011 x86_pop_reg (s->code, X86_EAX);
3016 reg: CALL_I4 (this, VFUNC_ADDR) {
3017 int lreg = tree->left->reg1;
3025 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
3026 x86_call_virtual (s->code, lreg,
3027 G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2));
3031 mono_assert (tree->reg1 == X86_EAX);
3034 stmt: CALL_VOID (this, ADDR_G) {
3035 int lreg = tree->left->reg1;
3041 if (X86_REMOTING_CHECK)
3048 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->right->data.p);
3049 x86_call_code (s->code, 0);
3055 stmt: CALL_VOID (this, reg) {
3057 int lreg = tree->left->reg1;
3058 int rreg = tree->right->reg1;
3060 if (lreg == treg || rreg == treg)
3062 if (lreg == treg || rreg == treg)
3064 if (lreg == treg || rreg == treg)
3065 mono_assert_not_reached ();
3069 x86_call_reg (s->code, tree->right->reg1);
3074 stmt: CALL_VOID (this, INTF_ADDR) {
3075 int lreg = tree->left->reg1;
3083 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
3084 x86_mov_reg_membase (s->code, lreg, lreg,
3085 G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
3086 x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
3087 x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
3092 stmt: CALL_VOID (this, VFUNC_ADDR) {
3093 int lreg = tree->left->reg1;
3101 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
3102 x86_call_virtual (s->code, lreg,
3103 G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2));
3108 stmt: SWITCH (reg) {
3110 guint32 *jt = (guint32 *)tree->data.p;
3112 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, jt [0]);
3113 offset = 6 + (guint32)s->code;
3114 x86_branch32 (s->code, X86_CC_GE, jt [jt [0] + 1] - offset, FALSE);
3116 x86_mov_reg_memindex (s->code, X86_EAX, X86_NOBASEREG,
3117 tree->data.i + 4, tree->left->reg1, 2, 4);
3118 x86_jump_reg (s->code, X86_EAX);
3125 reg: CONV_I1 (lreg) {
3126 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, FALSE);
3129 reg: CONV_U1 (lreg) {
3130 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE);
3133 reg: CONV_I2 (lreg) {
3134 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, TRUE);
3137 reg: CONV_U2 (lreg) {
3138 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE);
3141 reg: CONV_I4 (lreg) {
3142 if (tree->reg1 != tree->left->reg1)
3143 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3146 reg: CONV_U4 (lreg) {
3147 if (tree->reg1 != tree->left->reg1)
3148 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3151 reg: CONV_OVF_I4 (lreg) {
3152 guint8 *br [3], *label [1];
3155 * Valid ints: 0xffffffff:8000000 to 00000000:0x7f000000
3157 x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1);
3159 /* If the low word top bit is set, see if we are negative */
3160 br [0] = s->code; x86_branch8 (s->code, X86_CC_LT, 0, TRUE);
3161 /* We are not negative (no top bit set, check for our top word to be zero */
3162 x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2);
3163 br [1] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, TRUE);
3164 label [0] = s->code;
3166 /* throw exception */
3167 x86_push_imm (s->code, "OverflowException");
3168 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS,
3169 arch_get_throw_exception_by_name ());
3170 x86_call_code (s->code, 0);
3172 x86_patch (br [0], s->code);
3173 /* our top bit is set, check that top word is 0xfffffff */
3174 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg2, 0xffffffff);
3176 x86_patch (br [1], s->code);
3177 /* nope, emit exception */
3178 br [2] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3179 x86_patch (br [2], label [0]);
3181 if (tree->reg1 != tree->left->reg1)
3182 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3185 reg: CONV_OVF_U4 (lreg) {
3186 /* Keep in sync with CONV_OVF_I4_UN below, they are the same on 32-bit machines */
3187 /* top word must be 0 */
3188 x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2);
3189 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "OverflowException");
3190 if (tree->reg1 != tree->left->reg1)
3191 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3194 reg: CONV_OVF_I4_UN (lreg) {
3195 /* Keep in sync with CONV_OVF_U4 above, they are the same on 32-bit machines */
3196 /* top word must be 0 */
3197 x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2);
3198 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "OverflowException");
3199 if (tree->reg1 != tree->left->reg1)
3200 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3206 x86_mov_reg_imm (s->code, tree->reg1, *((gint32 *)&tree->data.p));
3207 x86_mov_reg_imm (s->code, tree->reg2, *((gint32 *)&tree->data.p + 1));
3210 lreg: CONV_I8 (CONST_I4) {
3211 x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i);
3213 if (tree->left->data.i >= 0)
3214 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3216 x86_mov_reg_imm (s->code, tree->reg2, -1);
3219 lreg: CONV_I8 (reg) {
3222 if (tree->reg1 != tree->left->reg1)
3223 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3225 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3226 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, 0);
3227 x86_branch8 (s->code, X86_CC_GE, 5, TRUE);
3229 x86_mov_reg_imm (s->code, tree->reg2, -1);
3230 mono_assert ((s->code - i1) == 5);
3233 lreg: CONV_U8 (CONST_I4) 1 {
3234 x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i);
3235 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3238 lreg: CONV_U8 (reg) {
3239 if (tree->reg1 != tree->left->reg1)
3240 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3241 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3244 lreg: CONV_OVF_U8 (CONST_I4) {
3245 if (tree->left->data.i < 0){
3246 x86_push_imm (s->code, "OverflowException");
3247 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS,
3248 arch_get_throw_exception_by_name ());
3249 x86_call_code (s->code, 0);
3251 x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i);
3252 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3256 lreg: CONV_OVF_I8_UN (CONST_I4) {
3257 x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i);
3258 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3261 lreg: CONV_OVF_U8 (reg) {
3262 x86_test_reg_imm (s->code, tree->left->reg1, 0x8000000);
3263 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "OverflowException");
3265 if (tree->reg1 != tree->left->reg1)
3266 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3267 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3270 lreg: CONV_OVF_I8_UN (reg) {
3271 /* Convert uint value into int64, we pass everything */
3272 if (tree->reg1 != tree->left->reg1)
3273 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3274 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3277 stmt: STIND_I8 (addr, lreg) {
3279 switch (tree->left->data.ainfo.amode) {
3282 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 4);
3283 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset + 4, tree->right->reg2, 4);
3287 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
3288 tree->left->data.ainfo.offset, tree->right->reg1, 4);
3289 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
3290 tree->left->data.ainfo.offset + 4, tree->right->reg2, 4);
3293 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
3294 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
3295 tree->right->reg1, 4);
3296 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset + 4,
3297 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
3298 tree->right->reg2, 4);
3301 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
3302 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
3303 tree->right->reg1, 4);
3304 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset + 4,
3305 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
3306 tree->right->reg2, 4);
3311 stmt: REMOTE_STIND_I8 (reg, lreg) {
3315 x86_push_reg (s->code, tree->right->reg1);
3316 x86_mov_reg_membase (s->code, tree->right->reg1, tree->left->reg1, 0, 4);
3317 x86_alu_membase_imm (s->code, X86_CMP, tree->right->reg1, 0, ((int)mono_defaults.transparent_proxy_class));
3318 x86_pop_reg (s->code, tree->right->reg1);
3320 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3322 /* this is a transparent proxy - remote the call */
3324 /* save value to stack */
3325 x86_push_reg (s->code, tree->right->reg2);
3326 x86_push_reg (s->code, tree->right->reg1);
3328 x86_push_reg (s->code, X86_ESP);
3329 x86_push_imm (s->code, tree->data.fi.field);
3330 x86_push_imm (s->code, tree->data.fi.klass);
3331 x86_push_reg (s->code, tree->left->reg1);
3332 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
3333 x86_call_code (s->code, 0);
3334 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 24);
3336 br [1] = s->code; x86_jump8 (s->code, 0);
3338 x86_patch (br [0], s->code);
3339 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
3340 tree->data.fi.field->offset;
3341 x86_mov_membase_reg (s->code, tree->left->reg1, offset, tree->right->reg1, 4);
3342 x86_mov_membase_reg (s->code, tree->left->reg1, offset + 4, tree->right->reg2, 4);
3344 x86_patch (br [1], s->code);
3348 # an addr can use two address register (base and index register). The must take care
3349 # that we do not override them (thus the use of x86_lea)
3350 lreg: LDIND_I8 (addr) {
3352 switch (tree->left->data.ainfo.amode) {
3355 x86_mov_reg_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, 4);
3356 x86_mov_reg_mem (s->code, tree->reg2, tree->left->data.ainfo.offset + 4, 4);
3360 x86_lea_membase (s->code, tree->reg2, tree->left->data.ainfo.basereg,
3361 tree->left->data.ainfo.offset);
3362 x86_mov_reg_membase (s->code, tree->reg1, tree->reg2, 0, 4);
3363 x86_mov_reg_membase (s->code, tree->reg2, tree->reg2, 4, 4);
3366 x86_lea_memindex (s->code, tree->reg2, X86_NOBASEREG, tree->left->data.ainfo.offset,
3367 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift);
3368 x86_mov_reg_membase (s->code, tree->reg1, tree->reg2, 0, 4);
3369 x86_mov_reg_membase (s->code, tree->reg2, tree->reg2, 4, 4);
3372 x86_lea_memindex (s->code, tree->reg2, tree->left->data.ainfo.basereg,
3373 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
3374 tree->left->data.ainfo.shift);
3375 x86_mov_reg_membase (s->code, tree->reg1, tree->reg2, 0, 4);
3376 x86_mov_reg_membase (s->code, tree->reg2, tree->reg2, 4, 4);
3379 PRINT_REG ("LDIND_I8_0", tree->reg1);
3380 PRINT_REG ("LDIND_I8_1", tree->reg2);
3383 lreg: SHR (lreg, CONST_I4) {
3384 if (tree->right->data.i < 32) {
3385 x86_shrd_reg_imm (s->code, tree->left->reg1, tree->left->reg2, tree->right->data.i);
3386 x86_shift_reg_imm (s->code, X86_SAR, tree->left->reg2, tree->right->data.i);
3387 if (tree->reg1 != tree->left->reg1)
3388 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3389 if (tree->reg2 != tree->left->reg2)
3390 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
3391 } else if (tree->right->data.i < 64) {
3392 if (tree->reg1 != tree->left->reg2)
3393 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg2, 4);
3394 if (tree->reg2 != tree->left->reg2)
3395 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
3396 x86_shift_reg_imm (s->code, X86_SAR, tree->reg2, 31);
3397 x86_shift_reg_imm (s->code, X86_SAR, tree->reg1, (tree->right->data.i - 32));
3398 } /* else unspecified result */
3401 lreg: SHR_UN (lreg, CONST_I4) {
3402 if (tree->right->data.i < 32) {
3403 x86_shrd_reg_imm (s->code, tree->left->reg1, tree->left->reg2, tree->right->data.i);
3404 x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg2, tree->right->data.i);
3405 if (tree->reg1 != tree->left->reg1)
3406 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3407 if (tree->reg2 != tree->left->reg2)
3408 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
3409 } else if (tree->right->data.i < 64) {
3410 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg2, 4);
3411 x86_shift_reg_imm (s->code, X86_SHR, tree->reg1, (tree->right->data.i - 32));
3412 x86_mov_reg_imm (s->code, tree->reg2, 0);
3413 } /* else unspecified result */
3416 lreg: SHR (lreg, reg) {
3419 if (tree->right->reg1 != X86_ECX)
3420 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
3422 x86_shrd_reg (s->code, tree->left->reg1, tree->left->reg2);
3423 x86_shift_reg (s->code, X86_SAR, tree->left->reg2);
3424 x86_test_reg_imm (s->code, X86_ECX, 32);
3425 br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
3426 x86_mov_reg_reg (s->code, tree->left->reg1, tree->left->reg2, 4);
3427 x86_shift_reg_imm (s->code, X86_SAR, tree->reg2, 31);
3428 x86_patch (br [0], s->code);
3430 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3433 lreg: SHR_UN (lreg, reg) {
3436 if (tree->right->reg1 != X86_ECX)
3437 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
3439 x86_shrd_reg (s->code, tree->left->reg1, tree->left->reg2);
3440 x86_shift_reg (s->code, X86_SHR, tree->left->reg2);
3441 x86_test_reg_imm (s->code, X86_ECX, 32);
3442 br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
3443 x86_mov_reg_reg (s->code, tree->left->reg1, tree->left->reg2, 4);
3444 x86_shift_reg_imm (s->code, X86_SHR, tree->reg2, 31);
3445 x86_patch (br [0], s->code);
3447 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3450 lreg: SHL (lreg, CONST_I4) {
3451 if (tree->right->data.i < 32) {
3452 x86_shld_reg_imm (s->code, tree->left->reg2, tree->left->reg1, tree->right->data.i);
3453 x86_shift_reg_imm (s->code, X86_SHL, tree->left->reg1, tree->right->data.i);
3454 if (tree->reg1 != tree->left->reg1)
3455 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3456 if (tree->reg2 != tree->left->reg2)
3457 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
3458 } else if (tree->right->data.i < 64) {
3459 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg1, 4);
3460 x86_shift_reg_imm (s->code, X86_SHL, tree->reg2, (tree->right->data.i - 32));
3461 x86_alu_reg_reg (s->code, X86_XOR, tree->reg1, tree->reg1);
3462 } /* else unspecified result */
3465 lreg: SHL (lreg, reg) {
3468 if (tree->right->reg1 != X86_ECX)
3469 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
3471 x86_shld_reg (s->code, tree->left->reg2, tree->left->reg1);
3472 x86_shift_reg (s->code, X86_SHL, tree->left->reg1);
3473 x86_test_reg_imm (s->code, X86_ECX, 32);
3474 br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
3475 x86_mov_reg_reg (s->code, tree->left->reg2, tree->left->reg1, 4);
3476 x86_alu_reg_reg (s->code, X86_XOR, tree->left->reg1, tree->left->reg1);
3477 x86_patch (br [0], s->code);
3479 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3482 lreg: ADD (lreg, lreg) {
3483 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
3484 x86_alu_reg_reg (s->code, X86_ADC, tree->left->reg2, tree->right->reg2);
3486 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3489 lreg: ADD_OVF (lreg, lreg) {
3490 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
3491 x86_alu_reg_reg (s->code, X86_ADC, tree->left->reg2, tree->right->reg2);
3492 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
3494 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3497 lreg: ADD_OVF_UN (lreg, lreg) {
3498 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
3499 x86_alu_reg_reg (s->code, X86_ADC, tree->left->reg2, tree->right->reg2);
3500 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NC, FALSE, "OverflowException");
3502 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3505 lreg: SUB (lreg, lreg) {
3506 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
3507 x86_alu_reg_reg (s->code, X86_SBB, tree->left->reg2, tree->right->reg2);
3509 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3512 lreg: SUB_OVF (lreg, lreg) {
3513 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
3514 x86_alu_reg_reg (s->code, X86_SBB, tree->left->reg2, tree->right->reg2);
3515 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
3517 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3520 lreg: SUB_OVF_UN (lreg, lreg) {
3521 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
3522 x86_alu_reg_reg (s->code, X86_SBB, tree->left->reg2, tree->right->reg2);
3523 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NC, FALSE, "OverflowException");
3525 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3528 lreg: AND (lreg, lreg) {
3529 x86_alu_reg_reg (s->code, X86_AND, tree->left->reg1, tree->right->reg1);
3530 x86_alu_reg_reg (s->code, X86_AND, tree->left->reg2, tree->right->reg2);
3532 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3535 lreg: OR (lreg, lreg) {
3536 x86_alu_reg_reg (s->code, X86_OR, tree->left->reg1, tree->right->reg1);
3537 x86_alu_reg_reg (s->code, X86_OR, tree->left->reg2, tree->right->reg2);
3539 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3542 lreg: XOR (lreg, lreg) {
3543 x86_alu_reg_reg (s->code, X86_XOR, tree->left->reg1, tree->right->reg1);
3544 x86_alu_reg_reg (s->code, X86_XOR, tree->left->reg2, tree->right->reg2);
3546 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3550 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3552 x86_neg_reg (s->code, tree->reg1);
3553 x86_alu_reg_imm (s->code, X86_ADC, tree->reg2, 0);
3554 x86_neg_reg (s->code, tree->reg2);
3558 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3560 x86_not_reg (s->code, tree->reg1);
3561 x86_not_reg (s->code, tree->reg2);
3564 lreg: MUL (lreg, lreg) {
3565 if (mono_regset_reg_used (s->rs, X86_ECX))
3566 x86_push_reg (s->code, X86_ECX);
3568 x86_push_reg (s->code, tree->right->reg2);
3569 x86_push_reg (s->code, tree->right->reg1);
3570 x86_push_reg (s->code, tree->left->reg2);
3571 x86_push_reg (s->code, tree->left->reg1);
3572 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_llmult);
3573 x86_call_code (s->code, 0);
3574 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
3576 if (mono_regset_reg_used (s->rs, X86_ECX))
3577 x86_pop_reg (s->code, X86_ECX);
3579 mono_assert (tree->reg1 == X86_EAX &&
3580 tree->reg2 == X86_EDX);
3583 lreg: MUL_OVF (lreg, lreg) {
3584 if (mono_regset_reg_used (s->rs, X86_ECX))
3585 x86_push_reg (s->code, X86_ECX);
3587 x86_push_reg (s->code, tree->right->reg2);
3588 x86_push_reg (s->code, tree->right->reg1);
3589 x86_push_reg (s->code, tree->left->reg2);
3590 x86_push_reg (s->code, tree->left->reg1);
3591 /* pass a pointer to store the resulting exception -
3592 * ugly, but it works */
3593 x86_push_reg (s->code, X86_ESP);
3594 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_llmult_ovf);
3595 x86_call_code (s->code, 0);
3596 x86_mov_reg_membase (s->code, X86_ECX, X86_ESP, 4, 4);
3597 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
3598 x86_alu_reg_imm (s->code, X86_CMP, X86_ECX, 0);
3600 /* cond. emit exception */
3601 x86_branch8 (s->code, X86_CC_EQ, 7, FALSE);
3602 x86_push_reg (s->code, X86_ECX);
3603 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, arch_get_throw_exception ());
3604 x86_call_code (s->code, 0);
3606 if (mono_regset_reg_used (s->rs, X86_ECX))
3607 x86_pop_reg (s->code, X86_ECX);
3609 mono_assert (tree->reg1 == X86_EAX &&
3610 tree->reg2 == X86_EDX);
3613 lreg: MUL_OVF_UN (lreg, lreg) {
3614 if (mono_regset_reg_used (s->rs, X86_ECX))
3615 x86_push_reg (s->code, X86_ECX);
3617 x86_push_reg (s->code, tree->right->reg2);
3618 x86_push_reg (s->code, tree->right->reg1);
3619 x86_push_reg (s->code, tree->left->reg2);
3620 x86_push_reg (s->code, tree->left->reg1);
3621 /* pass a pointer to store the resulting exception -
3622 * ugly, but it works */
3623 x86_push_reg (s->code, X86_ESP);
3624 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_llmult_ovf_un);
3625 x86_call_code (s->code, 0);
3626 x86_mov_reg_membase (s->code, X86_ECX, X86_ESP, 4, 4);
3627 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
3628 x86_alu_reg_imm (s->code, X86_CMP, X86_ECX, 0);
3630 /* cond. emit exception */
3631 x86_branch8 (s->code, X86_CC_EQ, 7, FALSE);
3632 x86_push_reg (s->code, X86_ECX);
3633 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, arch_get_throw_exception ());
3634 x86_call_code (s->code, 0);
3636 if (mono_regset_reg_used (s->rs, X86_ECX))
3637 x86_pop_reg (s->code, X86_ECX);
3639 mono_assert (tree->reg1 == X86_EAX &&
3640 tree->reg2 == X86_EDX);
3643 lreg: DIV (lreg, lreg) {
3644 if (mono_regset_reg_used (s->rs, X86_ECX))
3645 x86_push_reg (s->code, X86_ECX);
3647 x86_push_reg (s->code, tree->right->reg2);
3648 x86_push_reg (s->code, tree->right->reg1);
3649 x86_push_reg (s->code, tree->left->reg2);
3650 x86_push_reg (s->code, tree->left->reg1);
3651 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_lldiv);
3652 x86_call_code (s->code, 0);
3653 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
3655 if (mono_regset_reg_used (s->rs, X86_ECX))
3656 x86_pop_reg (s->code, X86_ECX);
3658 mono_assert (tree->reg1 == X86_EAX &&
3659 tree->reg2 == X86_EDX);
3662 lreg: REM (lreg, lreg) {
3663 if (mono_regset_reg_used (s->rs, X86_ECX))
3664 x86_push_reg (s->code, X86_ECX);
3666 x86_push_reg (s->code, tree->right->reg2);
3667 x86_push_reg (s->code, tree->right->reg1);
3668 x86_push_reg (s->code, tree->left->reg2);
3669 x86_push_reg (s->code, tree->left->reg1);
3670 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_llrem);
3671 x86_call_code (s->code, 0);
3672 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
3674 if (mono_regset_reg_used (s->rs, X86_ECX))
3675 x86_pop_reg (s->code, X86_ECX);
3677 mono_assert (tree->reg1 == X86_EAX &&
3678 tree->reg2 == X86_EDX);
3681 lreg: DIV_UN (lreg, lreg) {
3682 if (mono_regset_reg_used (s->rs, X86_ECX))
3683 x86_push_reg (s->code, X86_ECX);
3685 x86_push_reg (s->code, tree->right->reg2);
3686 x86_push_reg (s->code, tree->right->reg1);
3687 x86_push_reg (s->code, tree->left->reg2);
3688 x86_push_reg (s->code, tree->left->reg1);
3689 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_lldiv_un);
3690 x86_call_code (s->code, 0);
3691 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
3693 if (mono_regset_reg_used (s->rs, X86_ECX))
3694 x86_pop_reg (s->code, X86_ECX);
3696 mono_assert (tree->reg1 == X86_EAX &&
3697 tree->reg2 == X86_EDX);
3700 lreg: REM_UN (lreg, lreg) {
3701 if (mono_regset_reg_used (s->rs, X86_ECX))
3702 x86_push_reg (s->code, X86_ECX);
3704 x86_push_reg (s->code, tree->right->reg2);
3705 x86_push_reg (s->code, tree->right->reg1);
3706 x86_push_reg (s->code, tree->left->reg2);
3707 x86_push_reg (s->code, tree->left->reg1);
3708 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_llrem_un);
3709 x86_call_code (s->code, 0);
3710 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
3712 if (mono_regset_reg_used (s->rs, X86_ECX))
3713 x86_pop_reg (s->code, X86_ECX);
3715 mono_assert (tree->reg1 == X86_EAX &&
3716 tree->reg2 == X86_EDX);
3719 lreg: CALL_I8 (this, reg) {
3721 int lreg = tree->left->reg1;
3722 int rreg = tree->right->reg1;
3724 if (lreg == treg || rreg == treg)
3726 if (lreg == treg || rreg == treg)
3728 if (lreg == treg || rreg == treg)
3729 mono_assert_not_reached ();
3733 x86_call_reg (s->code, rreg);
3737 mono_assert (tree->reg1 == X86_EAX);
3738 mono_assert (tree->reg2 == X86_EDX);
3741 lreg: CALL_I8 (this, ADDR_G) {
3742 int lreg = tree->left->reg1;
3748 if (X86_REMOTING_CHECK)
3755 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->right->data.p);
3756 x86_call_code (s->code, 0);
3761 mono_assert (tree->reg1 == X86_EAX);
3762 mono_assert (tree->reg2 == X86_EDX);
3765 lreg: CALL_I8 (this, VFUNC_ADDR) {
3766 int lreg = tree->left->reg1;
3774 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
3775 x86_call_virtual (s->code, lreg,
3776 G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2));
3780 mono_assert (tree->reg1 == X86_EAX);
3781 mono_assert (tree->reg2 == X86_EDX);
3784 lreg: CALL_I8 (this, INTF_ADDR) {
3785 int lreg = tree->left->reg1;
3793 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
3794 x86_mov_reg_membase (s->code, lreg, lreg,
3795 G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
3796 x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
3797 x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
3801 mono_assert (tree->reg1 == X86_EAX);
3802 mono_assert (tree->reg2 == X86_EDX);
3806 if (tree->left->reg1 != X86_EAX) {
3807 if (tree->left->reg2 != X86_EAX) {
3808 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
3809 if (tree->left->reg2 != X86_EDX)
3810 x86_mov_reg_reg (s->code, X86_EDX, tree->left->reg2, 4);
3812 x86_mov_reg_reg (s->code, X86_ECX, tree->left->reg2, 4);
3813 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
3814 x86_mov_reg_reg (s->code, X86_EDX, X86_ECX, 4);
3816 } else if (tree->left->reg2 != X86_EDX) {
3817 x86_mov_reg_reg (s->code, X86_EDX, tree->left->reg2, 4);
3820 if (!tree->last_instr) {
3821 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_EPILOG, NULL);
3822 x86_jump32 (s->code, 0);
3827 stmt: ARG_I8 (lreg) {
3828 int pad = tree->data.arg_info.pad;
3831 x86_push_reg (s->code, tree->left->reg2);
3832 x86_push_reg (s->code, tree->left->reg1);
3835 reg: CSET (COMPARE (lreg, lreg)) {
3837 int lreg1, lreg2, rreg1, rreg2;
3839 lreg1 = tree->left->left->reg1;
3840 lreg2 = tree->left->left->reg2;
3841 rreg1 = tree->left->right->reg1;
3842 rreg2 = tree->left->right->reg2;
3845 if (tree->data.i == CEE_CEQ) {
3846 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3847 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3848 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3849 x86_patch (br [0], s->code);
3850 x86_set_reg (s->code, X86_CC_EQ, tree->reg1, FALSE);
3851 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE);
3855 switch (tree->data.i) {
3857 x86_alu_reg_reg (s->code, X86_CMP, rreg2, lreg2);
3858 br [0] = s->code; x86_branch8 (s->code, X86_CC_GT, 0, TRUE);
3859 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3860 x86_alu_reg_reg (s->code, X86_CMP, rreg1, lreg1);
3861 br [2] = s->code; x86_branch8 (s->code, X86_CC_GE, 0, FALSE);
3864 x86_alu_reg_reg (s->code, X86_CMP, rreg2, lreg2);
3865 br [0] = s->code; x86_branch8 (s->code, X86_CC_GT, 0, FALSE);
3866 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3867 x86_alu_reg_reg (s->code, X86_CMP, rreg1, lreg1);
3868 br [2] = s->code; x86_branch8 (s->code, X86_CC_GE, 0, FALSE);
3871 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3872 br [0] = s->code; x86_branch8 (s->code, X86_CC_GT, 0, TRUE);
3873 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3874 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3875 br [2] = s->code; x86_branch8 (s->code, X86_CC_GE, 0, FALSE);
3878 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3879 br [0] = s->code; x86_branch8 (s->code, X86_CC_GT, 0, FALSE);
3880 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3881 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3882 br [2] = s->code; x86_branch8 (s->code, X86_CC_GE, 0, FALSE);
3885 g_assert_not_reached ();
3888 /* set result to 1 */
3889 x86_patch (br [1], s->code);
3890 x86_mov_reg_imm (s->code, tree->reg1, 1);
3891 br [3] = s->code; x86_jump8 (s->code, 0);
3893 /* set result to 0 */
3894 x86_patch (br [0], s->code);
3895 x86_patch (br [2], s->code);
3896 x86_mov_reg_imm (s->code, tree->reg1, 0);
3898 x86_patch (br [3], s->code);
3901 stmt: CBRANCH (COMPARE (lreg, lreg)) {
3903 int lreg1, lreg2, rreg1, rreg2;
3905 lreg1 = tree->left->left->reg1;
3906 lreg2 = tree->left->left->reg2;
3907 rreg1 = tree->left->right->reg1;
3908 rreg2 = tree->left->right->reg2;
3910 switch (tree->data.bi.cond) {
3912 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3913 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3914 x86_branch32 (s->code, X86_CC_LT, 0, TRUE);
3915 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3916 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3917 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3918 x86_branch32 (s->code, X86_CC_LT, 0, FALSE);
3919 x86_patch (br [0], s->code);
3922 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3923 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3924 x86_branch32 (s->code, X86_CC_LT, 0, FALSE);
3925 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3926 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3927 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3928 x86_branch32 (s->code, X86_CC_LT, 0, FALSE);
3929 x86_patch (br [0], s->code);
3932 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3933 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3934 x86_branch32 (s->code, X86_CC_GT, 0, TRUE);
3935 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3936 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3937 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3938 x86_branch32 (s->code, X86_CC_GT, 0, FALSE);
3939 x86_patch (br [0], s->code);
3942 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3943 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3944 x86_branch32 (s->code, X86_CC_GT, 0, FALSE);
3945 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3946 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3947 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3948 x86_branch32 (s->code, X86_CC_GT, 0, FALSE);
3949 x86_patch (br [0], s->code);
3952 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3953 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3954 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3955 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3956 x86_branch32 (s->code, X86_CC_EQ, 0, TRUE);
3957 x86_patch (br [0], s->code);
3960 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3961 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3962 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
3963 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3964 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3965 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
3968 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3969 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3970 x86_branch32 (s->code, X86_CC_GT, 0, TRUE);
3971 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3972 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3973 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3974 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3975 x86_branch32 (s->code, X86_CC_GE, 0, FALSE);
3976 x86_patch (br [0], s->code);
3979 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3980 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3981 x86_branch32 (s->code, X86_CC_GT, 0, FALSE);
3982 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3983 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3984 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3985 x86_branch32 (s->code, X86_CC_GE, 0, FALSE);
3986 x86_patch (br [0], s->code);
3989 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3990 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3991 x86_branch32 (s->code, X86_CC_LT, 0, TRUE);
3992 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3993 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3994 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3995 x86_branch32 (s->code, X86_CC_LE, 0, FALSE);
3996 x86_patch (br [0], s->code);
3999 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
4000 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4001 x86_branch32 (s->code, X86_CC_LT, 0, FALSE);
4002 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
4003 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
4004 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4005 x86_branch32 (s->code, X86_CC_LE, 0, FALSE);
4006 x86_patch (br [0], s->code);
4009 g_assert_not_reached ();
4016 #stmt: STLOC (CONV_I4 (freg)) {
4018 # x86_fist_pop_membase (s->code, X86_EBP, tree->data.i, FALSE);
4021 reg: CONV_I1 (freg) {
4022 if (mono_use_fast_iconv) {
4023 mono_emit_fast_iconv(s, tree);
4024 x86_widen_reg (s->code, tree->reg1, tree->reg1, TRUE, FALSE);
4026 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4027 x86_fnstcw_membase(s->code, X86_ESP, 0);
4028 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4029 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4030 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4031 x86_fldcw_membase (s->code, X86_ESP, 2);
4032 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
4033 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
4034 x86_pop_reg (s->code, tree->reg1);
4035 x86_widen_reg (s->code, tree->reg1, tree->reg1, TRUE, FALSE);
4036 x86_fldcw_membase (s->code, X86_ESP, 0);
4037 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4041 reg: CONV_U1 (freg) {
4042 if (mono_use_fast_iconv) {
4043 mono_emit_fast_iconv(s, tree);
4044 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE);
4046 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4047 x86_fnstcw_membase(s->code, X86_ESP, 0);
4048 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4049 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4050 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4051 x86_fldcw_membase (s->code, X86_ESP, 2);
4052 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
4053 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
4054 x86_pop_reg (s->code, tree->reg1);
4055 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE);
4056 x86_fldcw_membase (s->code, X86_ESP, 0);
4057 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4061 reg: CONV_I2 (freg) {
4062 if (mono_use_fast_iconv) {
4063 mono_emit_fast_iconv(s, tree);
4064 x86_widen_reg (s->code, tree->reg1, tree->reg1, TRUE, TRUE);
4066 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4067 x86_fnstcw_membase(s->code, X86_ESP, 0);
4068 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4069 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4070 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4071 x86_fldcw_membase (s->code, X86_ESP, 2);
4072 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
4073 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
4074 x86_pop_reg (s->code, tree->reg1);
4075 x86_widen_reg (s->code, tree->reg1, tree->reg1, TRUE, TRUE);
4076 x86_fldcw_membase (s->code, X86_ESP, 0);
4077 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4081 reg: CONV_U2 (freg) {
4082 if (mono_use_fast_iconv) {
4083 mono_emit_fast_iconv(s, tree);
4084 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, TRUE);
4086 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4087 x86_fnstcw_membase(s->code, X86_ESP, 0);
4088 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4089 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4090 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4091 x86_fldcw_membase (s->code, X86_ESP, 2);
4092 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
4093 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
4094 x86_pop_reg (s->code, tree->reg1);
4095 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, TRUE);
4096 x86_fldcw_membase (s->code, X86_ESP, 0);
4097 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4101 reg: CONV_I4 (freg) {
4102 if (mono_use_fast_iconv) {
4103 mono_emit_fast_iconv(s, tree);
4105 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4106 x86_fnstcw_membase(s->code, X86_ESP, 0);
4107 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4108 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4109 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4110 x86_fldcw_membase (s->code, X86_ESP, 2);
4111 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
4112 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
4113 x86_pop_reg (s->code, tree->reg1);
4114 x86_fldcw_membase (s->code, X86_ESP, 0);
4115 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4119 reg: CONV_U4 (freg) {
4120 if (mono_use_fast_iconv) {
4121 mono_emit_fast_iconv(s, tree);
4123 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4124 x86_fnstcw_membase(s->code, X86_ESP, 0);
4125 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4126 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4127 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4128 x86_fldcw_membase (s->code, X86_ESP, 2);
4129 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
4130 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
4131 x86_pop_reg (s->code, tree->reg1);
4132 x86_fldcw_membase (s->code, X86_ESP, 0);
4133 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4137 lreg: CONV_I8 (freg) {
4138 if (mono_use_fast_iconv) {
4139 mono_emit_fast_iconv_i8(s, tree);
4141 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4142 x86_fnstcw_membase(s->code, X86_ESP, 0);
4143 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4144 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4145 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4146 x86_fldcw_membase (s->code, X86_ESP, 2);
4147 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8);
4148 x86_fist_pop_membase (s->code, X86_ESP, 0, TRUE);
4149 x86_pop_reg (s->code, tree->reg1);
4150 x86_pop_reg (s->code, tree->reg2);
4151 x86_fldcw_membase (s->code, X86_ESP, 0);
4152 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4156 lreg: CONV_U8 (freg) {
4157 if (mono_use_fast_iconv) {
4158 mono_emit_fast_iconv_i8(s, tree);
4160 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4161 x86_fnstcw_membase(s->code, X86_ESP, 0);
4162 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4163 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4164 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4165 x86_fldcw_membase (s->code, X86_ESP, 2);
4166 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8);
4167 x86_fist_pop_membase (s->code, X86_ESP, 0, TRUE);
4168 x86_pop_reg (s->code, tree->reg1);
4169 x86_pop_reg (s->code, tree->reg2);
4170 x86_fldcw_membase (s->code, X86_ESP, 0);
4171 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4175 reg: CSET (COMPARE (freg, freg)) {
4176 int treg = tree->reg1;
4178 if (treg != X86_EAX)
4179 x86_push_reg (s->code, X86_EAX);
4181 x86_fcompp (s->code);
4182 x86_fnstsw (s->code);
4183 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500);
4185 switch (tree->data.i) {
4187 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x4000);
4188 x86_set_reg (s->code, X86_CC_EQ, treg, TRUE);
4189 x86_widen_reg (s->code, treg, treg, FALSE, FALSE);
4192 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4193 x86_set_reg (s->code, X86_CC_EQ, treg, TRUE);
4194 x86_widen_reg (s->code, treg, treg, FALSE, FALSE);
4197 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4198 x86_set_reg (s->code, X86_CC_EQ, treg, TRUE);
4199 x86_widen_reg (s->code, treg, treg, FALSE, FALSE);
4202 x86_set_reg (s->code, X86_CC_EQ, treg, TRUE);
4203 x86_widen_reg (s->code, treg, treg, FALSE, FALSE);
4206 x86_set_reg (s->code, X86_CC_EQ, tree->reg1, TRUE);
4207 x86_widen_reg (s->code, treg, treg, FALSE, FALSE);
4210 g_assert_not_reached ();
4213 if (treg != X86_EAX)
4214 x86_pop_reg (s->code, X86_EAX);
4217 freg: CONV_R8 (freg) {
4221 freg: CONV_R4 (freg) {
4222 /* fixme: nothing to do ??*/
4225 freg: CONV_R8 (LDIND_I4 (ADDR_G)) {
4226 x86_fild (s->code, tree->left->left->data.p, FALSE);
4229 freg: CONV_R4 (reg) {
4230 x86_push_reg (s->code, tree->left->reg1);
4231 x86_fild_membase (s->code, X86_ESP, 0, FALSE);
4232 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4235 freg: CONV_R8 (reg) {
4236 x86_push_reg (s->code, tree->left->reg1);
4237 x86_fild_membase (s->code, X86_ESP, 0, FALSE);
4238 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4241 freg: CONV_R_UN (reg) {
4242 x86_push_imm (s->code, 0);
4243 x86_push_reg (s->code, tree->left->reg1);
4244 x86_fild_membase (s->code, X86_ESP, 0, TRUE);
4245 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
4248 freg: CONV_R_UN (lreg) {
4249 static guint8 mn[] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x3f, 0x40 };
4252 /* load 64bit integer to FP stack */
4253 x86_push_imm (s->code, 0);
4254 x86_push_reg (s->code, tree->left->reg2);
4255 x86_push_reg (s->code, tree->left->reg1);
4256 x86_fild_membase (s->code, X86_ESP, 0, TRUE);
4257 /* store as 80bit FP value */
4258 x86_fst80_membase (s->code, X86_ESP, 0);
4260 /* test if lreg is negative */
4261 x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2);
4262 br [0] = s->code; x86_branch8 (s->code, X86_CC_GEZ, 0, TRUE);
4264 /* add correction constant mn */
4265 x86_fld80_mem (s->code, mn);
4266 x86_fld80_membase (s->code, X86_ESP, 0);
4267 x86_fp_op_reg (s->code, X86_FADD, 1, TRUE);
4268 x86_fst80_membase (s->code, X86_ESP, 0);
4269 //x86_breakpoint (s->code);
4270 x86_patch (br [0], s->code);
4272 x86_fld80_membase (s->code, X86_ESP, 0);
4273 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
4276 freg: CONV_R4 (lreg) {
4277 x86_push_reg (s->code, tree->left->reg2);
4278 x86_push_reg (s->code, tree->left->reg1);
4279 x86_fild_membase (s->code, X86_ESP, 0, TRUE);
4280 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
4283 freg: CONV_R8 (lreg) {
4284 x86_push_reg (s->code, tree->left->reg2);
4285 x86_push_reg (s->code, tree->left->reg1);
4286 x86_fild_membase (s->code, X86_ESP, 0, TRUE);
4287 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
4291 float f = *(float *)tree->data.p;
4298 x86_fld (s->code, tree->data.p, FALSE);
4302 double d = *(double *)tree->data.p;
4309 x86_fld (s->code, tree->data.p, TRUE);
4312 freg: LDIND_R4 (addr) {
4314 switch (tree->left->data.ainfo.amode) {
4317 x86_fld (s->code, tree->left->data.ainfo.offset, FALSE);
4321 x86_fld_membase (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, FALSE);
4324 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, X86_NOBASEREG,
4325 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4326 tree->left->data.ainfo.shift);
4327 x86_fld_membase (s->code, tree->left->data.ainfo.indexreg, 0, FALSE);
4330 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.basereg,
4331 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4332 tree->left->data.ainfo.shift);
4333 x86_fld_membase (s->code, tree->left->data.ainfo.indexreg, 0, FALSE);
4338 freg: LDIND_R8 (addr) {
4340 switch (tree->left->data.ainfo.amode) {
4343 x86_fld (s->code, tree->left->data.ainfo.offset, TRUE);
4347 x86_fld_membase (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, TRUE);
4350 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, X86_NOBASEREG,
4351 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4352 tree->left->data.ainfo.shift);
4353 x86_fld_membase (s->code, tree->left->data.ainfo.indexreg, 0, TRUE);
4356 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.basereg,
4357 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4358 tree->left->data.ainfo.shift);
4359 x86_fld_membase (s->code, tree->left->data.ainfo.indexreg, 0, TRUE);
4365 freg: ADD (freg, freg) {
4366 x86_fp_op_reg (s->code, X86_FADD, 1, TRUE);
4369 freg: SUB (freg, freg) {
4370 x86_fp_op_reg (s->code, X86_FSUB, 1, TRUE);
4373 freg: MUL (freg, freg) {
4374 x86_fp_op_reg (s->code, X86_FMUL, 1, TRUE);
4377 freg: DIV (freg, freg) {
4378 x86_fp_op_reg (s->code, X86_FDIV, 1, TRUE);
4381 freg: CKFINITE (freg) {
4382 x86_push_reg (s->code, X86_EAX);
4384 x86_fnstsw (s->code);
4385 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4100);
4386 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4387 x86_pop_reg (s->code, X86_EAX);
4388 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NE, FALSE, "ArithmeticException");
4391 freg: REM (freg, freg) {
4394 /* we need to exchange ST(0) with ST(1) */
4395 x86_fxch (s->code, 1);
4397 /* this requires a loop, because fprem1 somtimes
4398 * returns a partial remainder */
4400 x86_fprem (s->code);
4401 x86_fnstsw (s->code);
4402 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x0400);
4404 x86_branch8 (s->code, X86_CC_NE, l1 - l2, FALSE);
4407 x86_fstp (s->code, 1);
4415 x86_fstp (s->code, 0);
4418 stmt: STIND_R4 (addr, freg) {
4420 switch (tree->left->data.ainfo.amode) {
4423 x86_fst (s->code, tree->left->data.ainfo.offset, FALSE, TRUE);
4427 x86_fst_membase (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
4431 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, X86_NOBASEREG,
4432 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4433 tree->left->data.ainfo.shift);
4434 x86_fst_membase (s->code, tree->left->data.ainfo.indexreg, 0, FALSE, TRUE);
4437 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.basereg,
4438 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4439 tree->left->data.ainfo.shift);
4440 x86_fst_membase (s->code, tree->left->data.ainfo.indexreg, 0, FALSE, TRUE);
4445 stmt: STIND_R8 (addr, freg) {
4447 switch (tree->left->data.ainfo.amode) {
4450 x86_fst (s->code, tree->left->data.ainfo.offset, TRUE, TRUE);
4454 x86_fst_membase (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
4458 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, X86_NOBASEREG,
4459 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4460 tree->left->data.ainfo.shift);
4461 x86_fst_membase (s->code, tree->left->data.ainfo.indexreg, 0, TRUE, TRUE);
4464 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.basereg,
4465 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4466 tree->left->data.ainfo.shift);
4467 x86_fst_membase (s->code, tree->left->data.ainfo.indexreg, 0, TRUE, TRUE);
4472 stmt: REMOTE_STIND_R4 (reg, freg) {
4475 int lreg = tree->left->reg1;
4481 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
4482 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
4483 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
4485 /* this is a transparent proxy - remote the call */
4487 /* save value to stack */
4488 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4489 x86_fst_membase (s->code, X86_ESP, 0, FALSE, TRUE);
4491 x86_push_reg (s->code, X86_ESP);
4492 x86_push_imm (s->code, tree->data.fi.field);
4493 x86_push_imm (s->code, tree->data.fi.klass);
4494 x86_push_reg (s->code, lreg);
4495 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
4496 x86_call_code (s->code, 0);
4497 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
4499 br [1] = s->code; x86_jump8 (s->code, 0);
4501 x86_patch (br [0], s->code);
4502 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
4503 tree->data.fi.field->offset;
4504 x86_fst_membase (s->code, lreg, offset, FALSE, TRUE);
4506 x86_patch (br [1], s->code);
4509 stmt: REMOTE_STIND_R8 (reg, freg) {
4512 int lreg = tree->left->reg1;
4518 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
4519 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
4520 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
4522 /* this is a transparent proxy - remote the call */
4524 /* save value to stack */
4525 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8);
4526 x86_fst_membase (s->code, X86_ESP, 0, TRUE, TRUE);
4528 x86_push_reg (s->code, X86_ESP);
4529 x86_push_imm (s->code, tree->data.fi.field);
4530 x86_push_imm (s->code, tree->data.fi.klass);
4531 x86_push_reg (s->code, lreg);
4532 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
4533 x86_call_code (s->code, 0);
4534 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 24);
4536 br [1] = s->code; x86_jump8 (s->code, 0);
4538 x86_patch (br [0], s->code);
4539 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
4540 tree->data.fi.field->offset;
4541 x86_fst_membase (s->code, lreg, offset, TRUE, TRUE);
4543 x86_patch (br [1], s->code);
4546 stmt: ARG_R4 (freg) {
4547 int pad = tree->data.arg_info.pad;
4549 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4 + pad);
4550 x86_fst_membase (s->code, X86_ESP, 0, FALSE, TRUE);
4553 stmt: ARG_R8 (freg) {
4554 int pad = tree->data.arg_info.pad;
4556 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8 + pad);
4557 x86_fst_membase (s->code, X86_ESP, 0, TRUE, TRUE);
4560 # fixme: we need to implement unordered and ordered compares
4562 stmt: CBRANCH (COMPARE (freg, freg)) {
4564 x86_fcompp (s->code);
4565 x86_fnstsw (s->code);
4566 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500);
4568 switch (tree->data.bi.cond) {
4570 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4571 x86_branch32 (s->code, X86_CC_EQ, 0, FALSE);
4574 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4575 x86_branch32 (s->code, X86_CC_EQ, 0, FALSE);
4578 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4579 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4580 x86_branch32 (s->code, X86_CC_EQ, 0, FALSE);
4583 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4584 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4585 x86_branch32 (s->code, X86_CC_EQ, 0, FALSE);
4588 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x4000);
4589 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4590 x86_branch32 (s->code, X86_CC_EQ, 0, TRUE);
4593 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x4000);
4594 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4595 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
4598 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4599 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
4602 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4603 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
4606 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4607 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4608 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
4611 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4612 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4613 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
4616 g_assert_not_reached ();
4620 freg: CALL_R8 (this, reg) {
4622 int lreg = tree->left->reg1;
4623 int rreg = tree->right->reg1;
4625 if (lreg == treg || rreg == treg)
4627 if (lreg == treg || rreg == treg)
4629 if (lreg == treg || rreg == treg)
4630 mono_assert_not_reached ();
4634 x86_call_reg (s->code, rreg);
4639 freg: CALL_R8 (this, ADDR_G) {
4640 int lreg = tree->left->reg1;
4646 if (X86_REMOTING_CHECK)
4653 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->right->data.p);
4654 x86_call_code (s->code, 0);
4660 freg: CALL_R8 (this, INTF_ADDR) {
4661 int lreg = tree->left->reg1;
4669 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
4670 x86_mov_reg_membase (s->code, lreg, lreg,
4671 G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
4672 x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
4673 x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
4678 freg: CALL_R8 (this, VFUNC_ADDR) {
4679 int lreg = tree->left->reg1;
4687 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
4688 x86_call_virtual (s->code, lreg,
4689 G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2));
4695 if (!tree->last_instr) {
4696 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_EPILOG, NULL);
4697 x86_jump32 (s->code, 0);
4710 x86_fsqrt (s->code);
4713 # support for value types
4715 reg: LDIND_OBJ (reg) {
4716 if (tree->left->reg1 != tree->reg1)
4717 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
4720 stmt: STIND_OBJ (reg, reg) {
4721 mono_assert (tree->data.i > 0);
4723 x86_push_imm (s->code, tree->data.i);
4724 x86_push_reg (s->code, tree->right->reg1);
4725 x86_push_reg (s->code, tree->left->reg1);
4726 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, MEMCOPY);
4727 x86_call_code (s->code, 0);
4728 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
4731 stmt: REMOTE_STIND_OBJ (reg, reg) {
4734 int lreg = tree->left->reg1;
4735 int rreg = tree->right->reg1;
4744 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
4745 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
4746 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
4748 /* this is a transparent proxy - remote the call */
4750 x86_push_reg (s->code, rreg);
4751 x86_push_imm (s->code, tree->data.fi.field);
4752 x86_push_imm (s->code, tree->data.fi.klass);
4753 x86_push_reg (s->code, lreg);
4754 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
4755 x86_call_code (s->code, 0);
4756 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
4758 br [1] = s->code; x86_jump8 (s->code, 0);
4760 x86_patch (br [0], s->code);
4762 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
4763 tree->data.fi.field->offset;
4765 size = mono_class_value_size (tree->data.fi.field->type->data.klass, NULL);
4766 x86_push_imm (s->code, size);
4767 x86_push_reg (s->code, tree->right->reg1);
4768 x86_alu_reg_imm (s->code, X86_ADD, tree->left->reg1, offset);
4769 x86_push_reg (s->code, tree->left->reg1);
4770 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, MEMCOPY);
4771 x86_call_code (s->code, 0);
4772 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
4774 x86_patch (br [1], s->code);
4777 stmt: ARG_OBJ (CONST_I4) {
4778 int pad = tree->data.arg_info.pad;
4781 x86_push_imm (s->code, tree->left->data.i);
4784 stmt: ARG_OBJ (reg) {
4785 int size = tree->data.arg_info.size;
4786 int pad = tree->data.arg_info.pad;
4794 /* reserve space for the argument */
4795 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, sa);
4797 x86_push_imm (s->code, size);
4798 x86_push_reg (s->code, tree->left->reg1);
4799 x86_lea_membase (s->code, X86_EAX, X86_ESP, 2*4);
4800 x86_push_reg (s->code, X86_EAX);
4802 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, MEMCOPY);
4803 x86_call_code (s->code, 0);
4804 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
4807 stmt: RET_OBJ (reg) {
4808 int size = tree->data.i;
4810 x86_push_imm (s->code, size);
4811 x86_push_reg (s->code, tree->left->reg1);
4812 x86_push_membase (s->code, X86_EBP, 8);
4814 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, MEMCOPY);
4815 x86_call_code (s->code, 0);
4817 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
4819 if (!tree->last_instr) {
4820 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_EPILOG, NULL);
4821 x86_jump32 (s->code, 0);
4830 mono_llmult (gint64 a, gint64 b)
4836 mono_llmult_ovf_un (gpointer *exc, guint32 al, guint32 ah, guint32 bl, guint32 bh)
4840 // fixme: this is incredible slow
4843 goto raise_exception;
4845 res = (guint64)al * (guint64)bl;
4847 t1 = (guint64)ah * (guint64)bl + (guint64)al * (guint64)bh;
4849 if (t1 > 0xffffffff)
4850 goto raise_exception;
4852 res += ((guint64)t1) << 32;
4858 *exc = mono_get_exception_overflow ();
4864 mono_llmult_ovf (gpointer *exc, guint32 al, gint32 ah, guint32 bl, gint32 bh) {
4866 Use Karatsuba algorithm where:
4867 a*b is: AhBh(R^2+R)+(Ah-Al)(Bl-Bh)R+AlBl(R+1)
4868 where Ah is the "high half" (most significant 32 bits) of a and
4869 where Al is the "low half" (least significant 32 bits) of a and
4870 where Bh is the "high half" of b and Bl is the "low half" and
4871 where R is the Radix or "size of the half" (in our case 32 bits)
4873 Note, for the product of two 64 bit numbers to fit into a 64
4874 result, ah and/or bh must be 0. This will save us from doing
4875 the AhBh term at all.
4877 Also note that we refactor so that we don't overflow 64 bits with
4878 intermediate results. So we use [(Ah-Al)(Bl-Bh)+AlBl]R+AlBl
4884 /* need to work with absoulte values, so find out what the
4885 resulting sign will be and convert any negative numbers
4886 from two's complement
4890 /* flip the bits and add 1 */
4901 /* flip the bits and add 1 */
4911 /* we overflow for sure if both upper halves are greater
4912 than zero because we would need to shift their
4913 product 64 bits to the left and that will not fit
4914 in a 64 bit result */
4916 goto raise_exception;
4918 /* do the AlBl term first */
4919 t1 = (gint64)al * (gint64)bl;
4923 /* now do the [(Ah-Al)(Bl-Bh)+AlBl]R term */
4924 t1 += (gint64)(ah - al) * (gint64)(bl - bh);
4926 /* check for overflow */
4927 if (t1 > (0x7FFFFFFFFFFFFFFF - res))
4928 goto raise_exception;
4939 *exc = mono_get_exception_overflow ();
4944 mono_lldiv (gint64 a, gint64 b)
4950 mono_llrem (gint64 a, gint64 b)
4956 mono_lldiv_un (guint64 a, guint64 b)
4962 mono_llrem_un (guint64 a, guint64 b)
4968 mono_array_new_wrapper (MonoClass *eclass, guint32 n)
4970 MonoDomain *domain = mono_domain_get ();
4972 return mono_array_new (domain, eclass, n);
4976 mono_object_new_wrapper (MonoClass *klass)
4978 MonoDomain *domain = mono_domain_get ();
4980 return mono_object_new (domain, klass);
4984 mono_ldstr_wrapper (MonoImage *image, guint32 ind)
4986 MonoDomain *domain = mono_domain_get ();
4988 return mono_ldstr (domain, image, ind);
4992 mono_ldsflda (MonoClass *klass, int offset)
4994 MonoDomain *domain = mono_domain_get ();
4998 vt = mono_class_vtable (domain, klass);
4999 addr = (char*)(vt->data) + offset;
5005 debug_memcopy (void *dest, const void *src, size_t n)
5009 printf ("MEMCPY(%p to %p [%d]) ", src, dest, n);
5011 for (i = 0; i < l; i++)
5012 printf ("%02x ", *((guint8 *)src + i));
5015 return memcpy (dest, src, n);
5018 void mono_emit_fast_iconv (MBCGEN_TYPE* s, MBTREE_TYPE* tree)
5021 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 12);
5022 x86_fist_membase (s->code, X86_ESP, 8, TRUE); // rounded value
5023 x86_fst_membase (s->code, X86_ESP, 0, FALSE, FALSE); // float value
5024 x86_fp_int_op_membase (s->code, X86_FSUB, X86_ESP, 8, TRUE);
5025 x86_fst_membase (s->code, X86_ESP, 4, FALSE, TRUE); // diff
5027 x86_pop_reg (s->code, tree->reg1); // float value
5028 x86_test_reg_reg (s->code, tree->reg1, tree->reg1);
5029 br[0] = s->code; x86_branch8 (s->code, X86_CC_S, 0, TRUE);
5031 x86_pop_reg (s->code, tree->reg1); // diff
5032 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->reg1);
5033 x86_pop_reg (s->code, tree->reg1); // rounded value
5034 x86_alu_reg_imm (s->code, X86_SBB, tree->reg1, 0);
5035 br[1] = s->code; x86_jump8 (s->code, 0);
5038 x86_patch (br[0], s->code);
5040 x86_pop_reg (s->code, tree->reg1); // diff
5041 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->reg1);
5042 x86_pop_reg (s->code, tree->reg1); // rounded value
5043 br[2] = s->code; x86_branch8 (s->code, X86_CC_Z, 0, FALSE);
5044 x86_alu_reg_imm (s->code, X86_SBB, tree->reg1, -1);
5045 x86_patch (br[1], s->code);
5046 x86_patch (br[2], s->code);
5049 void mono_emit_fast_iconv_i8 (MBCGEN_TYPE* s, MBTREE_TYPE* tree)
5052 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 16);
5053 x86_fld_reg (s->code, 0);
5054 x86_fist_pop_membase (s->code, X86_ESP, 8, TRUE); // rounded value (qword)
5055 x86_fst_membase (s->code, X86_ESP, 0, FALSE, FALSE); // float value
5056 x86_fild_membase (s->code, X86_ESP, 8, TRUE);
5057 x86_fp_op_reg (s->code, X86_FSUB, 1, TRUE); // diff
5058 x86_fst_membase (s->code, X86_ESP, 4, FALSE, TRUE); // diff
5060 x86_pop_reg (s->code, tree->reg1); // float value
5061 x86_test_reg_reg (s->code, tree->reg1, tree->reg1);
5062 br[0] = s->code; x86_branch8 (s->code, X86_CC_S, 0, TRUE);
5064 x86_pop_reg (s->code, tree->reg1); // diff
5065 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->reg1);
5066 x86_pop_reg (s->code, tree->reg1); // rounded value
5067 x86_pop_reg (s->code, tree->reg2);
5068 x86_alu_reg_imm (s->code, X86_SBB, tree->reg1, 0);
5069 x86_alu_reg_imm (s->code, X86_SBB, tree->reg2, 0);
5070 br[1] = s->code; x86_jump8 (s->code, 0);
5073 x86_patch (br[0], s->code);
5075 x86_pop_reg (s->code, tree->reg1); // diff
5076 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->reg1);
5077 x86_pop_reg (s->code, tree->reg1); // rounded value
5078 x86_pop_reg (s->code, tree->reg2);
5079 br[2] = s->code; x86_branch8 (s->code, X86_CC_Z, 0, FALSE);
5080 x86_alu_reg_imm (s->code, X86_SBB, tree->reg1, -1);
5081 x86_alu_reg_imm (s->code, X86_SBB, tree->reg2, -1);
5082 x86_patch (br[1], s->code);
5083 x86_patch (br[2], s->code);
5086 void mono_emit_stack_alloc (MBCGEN_TYPE* s, MBTREE_TYPE* tree)
5088 #ifdef PLATFORM_WIN32
5094 * If requested stack size is larger than one page,
5095 * perform stack-touch operation
5096 * (see comments in mono_emit_stack_alloc_const below).
5098 x86_test_reg_imm (s->code, tree->left->reg1, ~0xFFF);
5099 br[0] = s->code; x86_branch8 (s->code, X86_CC_Z, 0, FALSE);
5101 sreg = tree->left->reg1;
5103 br[2] = s->code; /* loop */
5104 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 0x1000);
5105 x86_test_membase_reg (s->code, X86_ESP, 0, X86_ESP);
5106 x86_alu_reg_imm (s->code, X86_SUB, sreg, 0x1000);
5107 x86_alu_reg_imm (s->code, X86_CMP, sreg, 0x1000);
5108 br[3] = s->code; x86_branch8 (s->code, X86_CC_AE, 0, FALSE);
5109 x86_patch (br[3], br[2]);
5110 x86_test_reg_reg (s->code, sreg, sreg);
5111 br[4] = s->code; x86_branch8 (s->code, X86_CC_Z, 0, FALSE);
5112 x86_alu_reg_reg (s->code, X86_SUB, X86_ESP, sreg);
5114 br[1] = s->code; x86_jump8 (s->code, 0);
5116 x86_patch (br[0], s->code);
5117 x86_alu_reg_reg (s->code, X86_SUB, X86_ESP, tree->left->reg1);
5118 x86_patch (br[1], s->code);
5119 x86_patch (br[4], s->code);
5120 #else /* PLATFORM_WIN32 */
5121 x86_alu_reg_reg (s->code, X86_SUB, X86_ESP, tree->left->reg1);
5125 void mono_emit_stack_alloc_const (MBCGEN_TYPE* s, MBTREE_TYPE* tree, int size)
5127 #ifdef PLATFORM_WIN32
5133 * Generate stack probe code.
5134 * Under Windows, it is necessary to allocate one page at a time,
5135 * "touching" stack after each successful sub-allocation. This is
5136 * because of the way stack growth is implemented - there is a
5137 * guard page before the lowest stack page that is currently commited.
5138 * Stack normally grows sequentially so OS traps access to the
5139 * guard page and commits more pages when needed.
5141 npages = ((unsigned) size) >> 12;
5143 if (tree->reg1 != X86_EAX && tree->left->reg1 != X86_EAX) {
5144 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 0x1000);
5145 x86_test_membase_reg (s->code, X86_ESP, 0, X86_ESP);
5146 x86_mov_membase_reg (s->code, X86_ESP, 0x1000 - 4, X86_EAX, 4); /* save EAX */
5147 x86_mov_reg_imm (s->code, X86_EAX, npages - 1);
5149 x86_mov_reg_imm (s->code, X86_EAX, npages);
5151 br[0] = s->code; /* loop */
5152 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 0x1000);
5153 x86_test_membase_reg (s->code, X86_ESP, 0, X86_ESP);
5154 x86_dec_reg (s->code, X86_EAX);
5155 br[1] = s->code; x86_branch8 (s->code, X86_CC_NZ, 0, TRUE);
5156 x86_patch (br[1], br[0]);
5157 if (tree->reg1 != X86_EAX && tree->left->reg1 != X86_EAX)
5158 x86_mov_reg_membase (s->code, X86_EAX, X86_ESP, (npages * 0x1000) - 4, 4); /* restore EAX */
5160 /* generate unrolled code for relatively small allocs */
5161 for (i = npages; --i >= 0;) {
5162 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 0x1000);
5163 x86_test_membase_reg (s->code, X86_ESP, 0, X86_ESP);
5168 if (size & 0xFFF) x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, -(size & 0xFFF));
5169 #else /* PLATFORM_WIN32 */
5170 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, size);
5175 mono_ldvirtftn (MonoObject *this, int slot)
5180 gboolean is_proxy = FALSE;
5183 if ((class = this->vtable->klass) == mono_defaults.transparent_proxy_class) {
5184 class = ((MonoTransparentProxy *)this)->klass;
5189 g_assert (slot <= class->vtable_size);
5191 m = class->vtable [slot];
5194 return mono_jit_create_remoting_trampoline (m);
5196 EnterCriticalSection (metadata_section);
5197 addr = mono_compile_method (m);
5198 LeaveCriticalSection (metadata_section);
5204 mono_ldintftn (MonoObject *this, int slot)
5209 gboolean is_proxy = FALSE;
5212 if ((class = this->vtable->klass) == mono_defaults.transparent_proxy_class) {
5213 class = ((MonoTransparentProxy *)this)->klass;
5217 g_assert (slot <= class->max_interface_id);
5219 slot = class->interface_offsets [slot];
5221 m = class->vtable [slot];
5224 return mono_jit_create_remoting_trampoline (m);
5226 EnterCriticalSection (metadata_section);
5227 addr = mono_compile_method (m);
5228 LeaveCriticalSection (metadata_section);
5233 gpointer mono_ldftn (MonoMethod *method)
5237 EnterCriticalSection (metadata_section);
5238 addr = mono_compile_method (method);
5239 LeaveCriticalSection (metadata_section);