2 * x86.brg: X86 code generator
5 * Dietmar Maurer (dietmar@ximian.com)
8 * (C) 2001 Ximian, Inc.
17 #ifndef PLATFORM_WIN32
19 #include <sys/syscall.h>
22 #include <mono/metadata/blob.h>
23 #include <mono/metadata/metadata.h>
24 #include <mono/metadata/loader.h>
25 #include <mono/metadata/object.h>
26 #include <mono/metadata/tabledefs.h>
27 #include <mono/metadata/appdomain.h>
28 #include <mono/metadata/marshal.h>
29 #include <mono/metadata/threads.h>
30 #include <mono/arch/x86/x86-codegen.h>
36 * Pull the list of opcodes
38 #define OPDEF(a,b,c,d,e,f,g,h,i,j) \
42 #include "mono/cil/opcode.def"
47 /* alignment of activation frames */
48 #define MONO_FRAME_ALIGNMENT 4
50 void print_lmf (void);
52 #define MBTREE_TYPE MBTree
53 #define MBCGEN_TYPE MonoFlowGraph
54 #define MBCOST_DATA MonoFlowGraph
55 #define MBALLOC_STATE mono_mempool_alloc (data->mp, sizeof (MBState))
58 AMImmediate = 0, // ptr
60 AMIndex = 2, // V[REG*X]
61 AMBaseIndex = 3, // V[REG*X][REG]
74 unsigned last_instr:1;
97 MonoClassField *field;
100 MonoJitBranchInfo bi;
101 MonoJitCallInfo call_info;
102 MonoJitArgumentInfo arg_info;
103 MonoJitNonVirtualCallInfo nonvirt_info;
107 gint64 mono_llmult (gint64 a, gint64 b);
108 guint64 mono_llmult_ovf (gpointer *exc, guint32 al, gint32 ah, guint32 bl, gint32 bh);
109 guint64 mono_llmult_ovf_un (gpointer *exc, guint32 al, guint32 ah, guint32 bl, guint32 bh);
110 gint64 mono_lldiv (gint64 a, gint64 b);
111 gint64 mono_llrem (gint64 a, gint64 b);
112 guint64 mono_lldiv_un (guint64 a, guint64 b);
113 guint64 mono_llrem_un (guint64 a, guint64 b);
114 gpointer mono_ldsflda (MonoClass *klass, int offset);
116 gpointer mono_ldvirtftn (MonoObject *this, int slot);
117 gpointer mono_ldintftn (MonoObject *this, int slot);
118 gpointer mono_ldftn (MonoMethod *method);
120 void mono_emit_fast_iconv (MBCGEN_TYPE* s, MBTREE_TYPE* tree);
121 void mono_emit_fast_iconv_i8 (MBCGEN_TYPE* s, MBTREE_TYPE* tree);
122 void mono_emit_stack_alloc (MBCGEN_TYPE* s, MBTREE_TYPE* tree);
123 void mono_emit_stack_alloc_const (MBCGEN_TYPE* s, MBTREE_TYPE* tree, int size);
126 mono_array_new_wrapper (MonoClass *eclass, guint32 n);
128 mono_object_new_wrapper (MonoClass *klass);
130 mono_ldstr_wrapper (MonoImage *image, guint32 ind);
133 get_mono_object_isinst (void);
135 #define MB_OPT_LEVEL 1
137 #if MB_OPT_LEVEL == 0
138 #define MB_USE_OPT1(c) 65535
139 #define MB_USE_OPT2(c) 65535
141 #if MB_OPT_LEVEL == 1
142 #define MB_USE_OPT1(c) c
143 #define MB_USE_OPT2(c) 65535
145 #if MB_OPT_LEVEL >= 2
146 #define MB_USE_OPT1(c) c
147 #define MB_USE_OPT2(c) c
152 #define REAL_PRINT_REG(text,reg) \
153 mono_assert (reg >= 0); \
154 x86_push_reg (s->code, X86_EAX); \
155 x86_push_reg (s->code, X86_EDX); \
156 x86_push_reg (s->code, X86_ECX); \
157 x86_push_reg (s->code, reg); \
158 x86_push_imm (s->code, reg); \
159 x86_push_imm (s->code, text " %d %p\n"); \
160 x86_mov_reg_imm (s->code, X86_EAX, printf); \
161 x86_call_reg (s->code, X86_EAX); \
162 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 3*4); \
163 x86_pop_reg (s->code, X86_ECX); \
164 x86_pop_reg (s->code, X86_EDX); \
165 x86_pop_reg (s->code, X86_EAX);
168 debug_memcopy (void *dest, const void *src, size_t n);
171 #define MEMCOPY debug_memcopy
172 #define PRINT_REG(text,reg) REAL_PRINT_REG(text,reg)
175 #define MEMCOPY memcpy
177 #define PRINT_REG(x,y)
181 /* The call instruction for virtual functions must have a known
182 * size (used by x86_magic_trampoline)
184 #define x86_call_virtual(inst,basereg,disp) \
186 *(inst)++ = (unsigned char)0xff; \
187 x86_address_byte ((inst), 2, 2, (basereg)); \
188 x86_imm_emit32 ((inst), (disp)); \
191 /* emit an exception if condition is fail */
192 #define EMIT_COND_SYSTEM_EXCEPTION(cond,signed,exc_name) \
195 x86_branch8 (s->code, cond, 10, signed); \
196 x86_push_imm (s->code, exc_name); \
197 t = arch_get_throw_exception_by_name (); \
198 mono_add_jump_info (s, s->code, \
199 MONO_JUMP_INFO_ABS, t); \
200 x86_call_code (s->code, 0); \
203 #define X86_ARG_PAD(pad) do { \
206 x86_push_reg (s->code, X86_EAX); \
208 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, pad); \
212 #define X86_CALL_END do { \
213 int size = tree->data.call_info.frame_size; \
215 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, size); \
218 #define X86_CALL_BEGIN do { \
219 int pad = tree->data.call_info.pad; \
221 if (tree->left->op != MB_TERM_NOP) { \
222 mono_assert (lreg >= 0); \
223 x86_push_reg (s->code, lreg); \
224 x86_alu_membase_imm (s->code, X86_CMP, lreg, 0, 0); \
226 if (tree->data.call_info.vtype_num) { \
227 int offset = VARINFO (s, tree->data.call_info.vtype_num).offset; \
228 x86_lea_membase (s->code, treg, X86_EBP, offset); \
229 x86_push_reg (s->code, treg); \
233 /* we use this macro to move one lreg to another - source and
234 destination may overlap, but the register allocator has to
235 make sure that ((d1 < d2) && (s1 < s2))
237 #define MOVE_LREG(d1,d2,s1,s2) \
239 g_assert ((d1 < d2) && (s1 < s2)); \
240 if ((d1) <= (s1)) { \
242 x86_mov_reg_reg (s->code, d1, s1, 4); \
244 x86_mov_reg_reg (s->code, d2, s2, 4); \
247 x86_mov_reg_reg (s->code, d2, s2, 4); \
249 x86_mov_reg_reg (s->code, d1, s1, 4); \
253 #define X86_REMOTING_CHECK tree->left->op != MB_TERM_NOP && tree->right->data.nonvirt_info.method && \
254 (tree->right->data.nonvirt_info.method->klass->marshalbyref || \
255 tree->right->data.nonvirt_info.method->klass == mono_defaults.object_class)
258 This macro adds transparant proxy checks for non-virtual methods in a MBR object
259 and methods that belongs to System::Object.
261 #define X86_REMOTING_CALL do { \
263 x86_push_reg (s->code, lreg); \
264 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4); \
265 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4); \
266 x86_alu_reg_imm (s->code, X86_CMP, lreg, (int)mono_defaults.transparent_proxy_class); \
267 x86_pop_reg (s->code, lreg); \
268 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE); \
270 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_jit_create_remoting_trampoline (tree->right->data.nonvirt_info.method)); \
271 x86_call_code (s->code, 0); \
273 br [1] = s->code; x86_jump8 (s->code, 0); \
274 x86_patch (br [0], s->code); \
276 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->right->data.nonvirt_info.p); \
277 x86_call_code (s->code, 0); \
279 x86_patch (br [1], s->code); \
285 # terminal definitions
289 %term CONST_I4 CONST_I8 CONST_R4 CONST_R8
290 %term LDIND_I1 LDIND_U1 LDIND_I2 LDIND_U2 LDIND_I4 LDIND_I8 LDIND_R4 LDIND_R8 LDIND_OBJ
291 %term STIND_I1 STIND_I2 STIND_I4 STIND_I8 STIND_R4 STIND_R8 STIND_OBJ
292 %term ADDR_L ADDR_G ARG_I4 ARG_I8 ARG_R4 ARG_R8 ARG_OBJ CALL_I4 CALL_I8 CALL_R8 CALL_VOID
293 %term BREAK SWITCH BR RET_VOID RET RET_OBJ ENDFINALLY ENDFILTER JMP
294 %term ADD ADD_OVF ADD_OVF_UN SUB SUB_OVF SUB_OVF_UN MUL MUL_OVF MUL_OVF_UN
295 %term DIV DIV_UN REM REM_UN AND OR XOR SHL SHR SHR_UN NEG NOT CKFINITE
296 %term COMPARE CBRANCH BRTRUE BRFALSE CSET
297 %term CONV_I4 CONV_I1 CONV_I2 CONV_I8 CONV_U1 CONV_U2 CONV_U4 CONV_U8 CONV_R4 CONV_R8 CONV_R_UN
298 %term INTF_ADDR VFUNC_ADDR NOP NEWARR NEWARR_SPEC NEWOBJ NEWOBJ_SPEC
299 %term INITBLK CPBLK CPSRC POP INITOBJ LOCALLOC
300 %term ISINST CASTCLASS UNBOX
301 %term CONV_OVF_I1 CONV_OVF_U1 CONV_OVF_I2 CONV_OVF_U2 CONV_OVF_U4 CONV_OVF_U8 CONV_OVF_I4
302 %term CONV_OVF_I4_UN CONV_OVF_U1_UN CONV_OVF_U2_UN
303 %term CONV_OVF_I2_UN CONV_OVF_I8_UN CONV_OVF_I1_UN
304 %term EXCEPTION THROW RETHROW HANDLER CHECKTHIS RETHROW_ABORT
305 %term LDLEN LDELEMA LDFTN LDVIRTFTN LDSTR LDSFLDA
306 %term REMOTE_LDFLDA REMOTE_STIND_I1 REMOTE_STIND_I2 REMOTE_STIND_I4
307 %term REMOTE_STIND_I8 REMOTE_STIND_R4 REMOTE_STIND_R8 REMOTE_STIND_OBJ
310 %term FUNC1 PROC2 PROC3 FREE OBJADDR VTADDR
326 tree->data.ainfo.offset = tree->data.i;
327 tree->data.ainfo.amode = AMImmediate;
331 tree->data.ainfo.offset = tree->data.i;
332 tree->data.ainfo.amode = AMImmediate;
335 acon: ADD (ADDR_G, CONST_I4) {
336 tree->data.ainfo.offset = (unsigned)tree->left->data.p + tree->right->data.i;
337 tree->data.ainfo.amode = AMImmediate;
343 tree->data.ainfo.offset = 0;
344 tree->data.ainfo.basereg = tree->reg1;
345 tree->data.ainfo.amode = AMBase;
348 base: ADD (reg, CONST_I4) {
349 tree->data.ainfo.offset = tree->right->data.i;
350 tree->data.ainfo.basereg = tree->left->reg1;
351 tree->data.ainfo.amode = AMBase;
355 tree->data.ainfo.offset = VARINFO (s, tree->data.i).offset;
356 tree->data.ainfo.basereg = X86_EBP;
357 tree->data.ainfo.amode = AMBase;
359 MBCOND (VARINFO (data, tree->data.i).reg < 0);
364 tree->data.ainfo.offset = 0;
365 tree->data.ainfo.indexreg = tree->reg1;
366 tree->data.ainfo.shift = 0;
367 tree->data.ainfo.amode = AMIndex;
370 index: SHL (reg, CONST_I4) {
371 tree->data.ainfo.offset = 0;
372 tree->data.ainfo.amode = AMIndex;
373 tree->data.ainfo.indexreg = tree->left->reg1;
374 tree->data.ainfo.shift = tree->right->data.i;
376 MBCOND (tree->right->data.i == 0 ||
377 tree->right->data.i == 1 ||
378 tree->right->data.i == 2 ||
379 tree->right->data.i == 3);
384 index: MUL (reg, CONST_I4) {
385 static int fast_log2 [] = { 1, 0, 1, -1, 2, -1, -1, -1, 3 };
387 tree->data.ainfo.offset = 0;
388 tree->data.ainfo.amode = AMIndex;
389 tree->data.ainfo.indexreg = tree->left->reg1;
390 tree->data.ainfo.shift = fast_log2 [tree->right->data.i];
392 MBCOND (tree->right->data.i == 1 ||
393 tree->right->data.i == 2 ||
394 tree->right->data.i == 4 ||
395 tree->right->data.i == 8);
404 addr: ADD (index, base) {
405 tree->data.ainfo.offset = tree->right->data.ainfo.offset;
406 tree->data.ainfo.basereg = tree->right->data.ainfo.basereg;
407 tree->data.ainfo.amode = tree->left->data.ainfo.amode |
408 tree->right->data.ainfo.amode;
409 tree->data.ainfo.shift = tree->left->data.ainfo.shift;
410 tree->data.ainfo.indexreg = tree->left->data.ainfo.indexreg;
413 # we pass exception in ECX to catch handler
415 int offset = VARINFO (s, tree->data.i).offset;
417 if (tree->reg1 != X86_ECX)
418 x86_mov_reg_reg (s->code, tree->reg1, X86_ECX, 4);
420 /* store it so that we can RETHROW it later */
421 x86_mov_membase_reg (s->code, X86_EBP, offset, tree->reg1, 4);
427 x86_push_reg (s->code, tree->left->reg1);
428 target = arch_get_throw_exception ();
429 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, target);
430 x86_call_code (s->code, target);
434 int offset = VARINFO (s, tree->data.i).offset;
437 x86_push_membase (s->code, X86_EBP, offset);
439 target = arch_get_throw_exception ();
440 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, target);
441 x86_call_code (s->code, target);
444 stmt: RETHROW_ABORT {
448 target = mono_thread_current;
449 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, target);
450 x86_call_code (s->code, target);
452 x86_mov_reg_membase (s->code, X86_EAX, X86_EAX, G_STRUCT_OFFSET (MonoThread, abort_exc), 4);
453 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0);
455 br = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
457 x86_push_reg (s->code, X86_EAX);
459 target = arch_get_throw_exception ();
460 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, target);
461 x86_call_code (s->code, target);
463 x86_patch (br, s->code);
467 /* save ESP (used by ENDFINALLY) */
468 x86_mov_membase_reg (s->code, X86_EBP, mono_exc_esp_offset, X86_ESP, 4);
469 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bb);
470 x86_call_imm (s->code, 0);
474 /* restore ESP - which can be modified when we allocate value types
475 * in the finally handler */
476 x86_mov_reg_membase (s->code, X86_ESP, X86_EBP, mono_exc_esp_offset, 4);
477 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
481 stmt: ENDFILTER (reg) {
482 /* restore ESP - which can be modified when we allocate value types
484 x86_mov_reg_membase (s->code, X86_ESP, X86_EBP, mono_exc_esp_offset, 4);
485 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
486 if (tree->left->reg1 != X86_EAX)
487 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
491 stmt: STIND_I4 (ADDR_L, ADD (LDIND_I4 (ADDR_L), CONST_I4)) {
492 int vn = tree->left->data.i;
493 int treg = VARINFO (s, vn).reg;
494 int offset = VARINFO (s, vn).offset;
495 int data = tree->right->right->data.i;
499 x86_inc_reg (s->code, treg);
501 x86_inc_membase (s->code, X86_EBP, offset);
504 x86_alu_reg_imm (s->code, X86_ADD, treg, data);
506 x86_alu_membase_imm (s->code, X86_ADD, X86_EBP, offset, data);
509 MBCOND (tree->right->left->left->data.i == tree->left->data.i);
513 stmt: STIND_I4 (ADDR_L, SUB (LDIND_I4 (ADDR_L), CONST_I4)) {
514 int vn = tree->left->data.i;
515 int treg = VARINFO (s, vn).reg;
516 int offset = VARINFO (s, vn).offset;
517 int data = tree->right->right->data.i;
521 x86_dec_reg (s->code, treg);
523 x86_dec_membase (s->code, X86_EBP, offset);
526 x86_alu_reg_imm (s->code, X86_SUB, treg, data);
528 x86_alu_membase_imm (s->code, X86_SUB, X86_EBP, offset, data);
531 MBCOND (tree->right->left->left->data.i == tree->left->data.i);
535 stmt: STIND_I4 (ADDR_L, ADD (LDIND_I4 (ADDR_L), reg)) {
536 int vn = tree->left->data.i;
537 int treg = VARINFO (s, vn).reg;
538 int sreg = tree->right->right->reg1;
539 int offset = VARINFO (s, vn).offset;
542 x86_alu_reg_reg (s->code, X86_ADD, treg, sreg);
544 x86_alu_membase_reg (s->code, X86_ADD, X86_EBP, offset, sreg);
547 MBCOND (tree->right->left->left->data.i == tree->left->data.i);
551 stmt: STIND_I4 (ADDR_L, LDIND_I4 (ADDR_L)) {
552 int treg1 = VARINFO (s, tree->left->data.i).reg;
553 int treg2 = VARINFO (s, tree->right->left->data.i).reg;
554 int offset1 = VARINFO (s, tree->left->data.i).offset;
555 int offset2 = VARINFO (s, tree->right->left->data.i).offset;
557 //{static int cx= 0; printf ("CX %5d\n", cx++);}
559 if (treg1 >= 0 && treg2 >= 0) {
560 x86_mov_reg_reg (s->code, treg1, treg2, 4);
563 if (treg1 >= 0 && treg2 < 0) {
564 x86_mov_reg_membase (s->code, treg1, X86_EBP, offset2, 4);
567 if (treg1 < 0 && treg2 >= 0) {
568 x86_mov_membase_reg (s->code, X86_EBP, offset1, treg2, 4);
572 g_assert_not_reached ();
575 MBCOND (VARINFO (data, tree->left->data.i).reg >= 0 ||
576 VARINFO (data, tree->right->left->data.i).reg >= 0);
580 stmt: STIND_I4 (addr, CONST_I4) {
581 switch (tree->left->data.ainfo.amode) {
584 x86_mov_mem_imm (s->code, tree->left->data.ainfo.offset, tree->right->data.i, 4);
588 x86_mov_membase_imm (s->code, tree->left->data.ainfo.basereg,
589 tree->left->data.ainfo.offset, tree->right->data.i, 4);
592 x86_mov_memindex_imm (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
593 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
594 tree->right->data.i, 4);
597 x86_mov_memindex_imm (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
598 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
599 tree->right->data.i, 4);
604 stmt: STIND_I4 (addr, reg) {
606 switch (tree->left->data.ainfo.amode) {
609 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 4);
613 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
614 tree->left->data.ainfo.offset, tree->right->reg1, 4);
617 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
618 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
619 tree->right->reg1, 4);
622 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
623 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
624 tree->right->reg1, 4);
629 stmt: REMOTE_STIND_I4 (reg, reg) {
632 int lreg = tree->left->reg1;
633 int rreg = tree->right->reg1;
642 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
643 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
644 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
646 /* this is a transparent proxy - remote the call */
648 /* save value to stack */
649 x86_push_reg (s->code, rreg);
651 x86_push_reg (s->code, X86_ESP);
652 x86_push_imm (s->code, tree->data.fi.field);
653 x86_push_imm (s->code, tree->data.fi.klass);
654 x86_push_reg (s->code, lreg);
655 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
656 x86_call_code (s->code, 0);
657 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
659 br [1] = s->code; x86_jump8 (s->code, 0);
661 x86_patch (br [0], s->code);
662 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
663 tree->data.fi.field->offset;
664 x86_mov_membase_reg (s->code, lreg, offset, rreg, 4);
666 x86_patch (br [1], s->code);
669 stmt: STIND_I1 (addr, reg) {
670 PRINT_REG ("STIND_I1", tree->right->reg1);
672 switch (tree->left->data.ainfo.amode) {
675 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 1);
679 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
680 tree->left->data.ainfo.offset, tree->right->reg1, 1);
683 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
684 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
685 tree->right->reg1, 1);
688 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
689 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
690 tree->right->reg1, 1);
695 stmt: REMOTE_STIND_I1 (reg, reg) {
698 int lreg = tree->left->reg1;
699 int rreg = tree->right->reg1;
708 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
709 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
710 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
712 /* this is a transparent proxy - remote the call */
714 /* save value to stack */
715 x86_push_reg (s->code, rreg);
717 x86_push_reg (s->code, X86_ESP);
718 x86_push_imm (s->code, tree->data.fi.field);
719 x86_push_imm (s->code, tree->data.fi.klass);
720 x86_push_reg (s->code, lreg);
721 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
722 x86_call_code (s->code, 0);
723 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
725 br [1] = s->code; x86_jump8 (s->code, 0);
727 x86_patch (br [0], s->code);
728 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
729 tree->data.fi.field->offset;
730 x86_mov_membase_reg (s->code, lreg, offset, rreg, 1);
732 x86_patch (br [1], s->code);
735 stmt: STIND_I2 (addr, reg) {
736 PRINT_REG ("STIND_I2", tree->right->reg1);
738 switch (tree->left->data.ainfo.amode) {
741 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 2);
745 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
746 tree->left->data.ainfo.offset, tree->right->reg1, 2);
749 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
750 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
751 tree->right->reg1, 2);
754 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
755 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
756 tree->right->reg1, 2);
761 stmt: REMOTE_STIND_I2 (reg, reg) {
764 int lreg = tree->left->reg1;
765 int rreg = tree->right->reg1;
774 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
775 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
776 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
778 /* this is a transparent proxy - remote the call */
780 /* save value to stack */
781 x86_push_reg (s->code, rreg);
783 x86_push_reg (s->code, X86_ESP);
784 x86_push_imm (s->code, tree->data.fi.field);
785 x86_push_imm (s->code, tree->data.fi.klass);
786 x86_push_reg (s->code, lreg);
787 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
788 x86_call_code (s->code, 0);
789 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
791 br [1] = s->code; x86_jump8 (s->code, 0);
793 x86_patch (br [0], s->code);
794 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
795 tree->data.fi.field->offset;
796 x86_mov_membase_reg (s->code, lreg, offset, rreg, 2);
798 x86_patch (br [1], s->code);
801 reg: LDIND_I4 (ADDR_L) {
802 int treg = VARINFO (s, tree->left->data.i).reg;
804 if (treg != tree->reg1)
805 x86_mov_reg_reg (s->code, tree->reg1, treg, 4);
808 MBCOND ((VARINFO (data, tree->left->data.i).reg >= 0));
812 stmt: STIND_I4 (ADDR_L, CONST_I4) {
813 int treg = VARINFO (s, tree->left->data.i).reg;
815 x86_mov_reg_imm (s->code, treg, tree->right->data.i);
818 MBCOND ((VARINFO (data, tree->left->data.i).reg >= 0));
822 stmt: STIND_I4 (ADDR_L, LDIND_I4 (ADDR_L)) {
823 int treg = VARINFO (s, tree->left->data.i).reg;
824 int offset = VARINFO (s, tree->right->left->data.i).offset;
826 x86_mov_reg_membase (s->code, treg, X86_EBP, offset, 4);
828 MBCOND ((VARINFO (data, tree->left->data.i).reg >= 0));
829 MBCOND ((VARINFO (data, tree->right->left->data.i).reg < 0));
833 stmt: STIND_I4 (ADDR_L, reg) {
834 int treg = VARINFO (s, tree->left->data.i).reg;
836 if (treg != tree->right->reg1)
837 x86_mov_reg_reg (s->code, treg, tree->right->reg1, 4);
840 MBCOND ((VARINFO (data, tree->left->data.i).reg >= 0));
845 reg: LDIND_I4 (addr) {
847 switch (tree->left->data.ainfo.amode) {
850 x86_mov_reg_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, 4);
854 x86_mov_reg_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
855 tree->left->data.ainfo.offset, 4);
858 x86_mov_reg_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
859 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, 4);
862 x86_mov_reg_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
863 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
864 tree->left->data.ainfo.shift, 4);
869 PRINT_REG ("LDIND_I4", tree->reg1);
872 reg: LDIND_I1 (addr) {
873 switch (tree->left->data.ainfo.amode) {
876 x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, TRUE, FALSE);
880 x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
881 tree->left->data.ainfo.offset, TRUE, FALSE);
884 x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
885 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, TRUE, FALSE);
888 x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
889 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
890 tree->left->data.ainfo.shift, TRUE, FALSE);
894 PRINT_REG ("LDIND_I1", tree->reg1);
897 reg: LDIND_U1 (addr) {
898 switch (tree->left->data.ainfo.amode) {
901 x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, FALSE, FALSE);
905 x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
906 tree->left->data.ainfo.offset, FALSE, FALSE);
909 x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
910 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, FALSE, FALSE);
913 x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
914 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
915 tree->left->data.ainfo.shift, FALSE, FALSE);
919 PRINT_REG ("LDIND_U1", tree->reg1);
922 reg: LDIND_I2 (addr) {
923 switch (tree->left->data.ainfo.amode) {
926 x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, TRUE, TRUE);
930 x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
931 tree->left->data.ainfo.offset, TRUE, TRUE);
934 x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
935 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, TRUE, TRUE);
938 x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
939 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
940 tree->left->data.ainfo.shift, TRUE, TRUE);
944 PRINT_REG ("LDIND_U2", tree->reg1);
947 reg: LDIND_U2 (addr) {
948 switch (tree->left->data.ainfo.amode) {
951 x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, FALSE, TRUE);
955 x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
956 tree->left->data.ainfo.offset, FALSE, TRUE);
959 x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
960 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, FALSE, TRUE);
963 x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
964 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
965 tree->left->data.ainfo.shift, FALSE, TRUE);
969 PRINT_REG ("LDIND_U2", tree->reg1);
972 reg: REMOTE_LDFLDA (reg) {
975 int lreg = tree->left->reg1;
980 if (tree->reg1 != treg)
981 x86_push_reg (s->code, treg);
983 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
984 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
985 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
987 /* this is a transparent proxy - remote the call */
989 x86_push_reg (s->code, X86_EAX);
991 x86_push_reg (s->code, X86_EDX);
992 x86_push_reg (s->code, X86_ECX);
994 x86_push_imm (s->code, 0);
995 x86_push_imm (s->code, tree->data.fi.field);
996 x86_push_imm (s->code, tree->data.fi.klass);
997 x86_push_reg (s->code, lreg);
998 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_load_remote_field);
999 x86_call_code (s->code, 0);
1000 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
1002 if (treg != X86_EAX)
1003 x86_mov_reg_reg (s->code, treg, X86_EAX, 4);
1005 x86_pop_reg (s->code, X86_ECX);
1006 if (treg != X86_EDX)
1007 x86_pop_reg (s->code, X86_EDX);
1008 if (treg != X86_EAX)
1009 x86_pop_reg (s->code, X86_EAX);
1011 x86_mov_reg_reg (s->code, tree->reg1, treg, 4);
1013 br [1] = s->code; x86_jump8 (s->code, 0);
1015 x86_patch (br [0], s->code);
1016 if (tree->data.fi.klass->valuetype)
1017 x86_lea_membase (s->code, tree->reg1, lreg,
1018 tree->data.fi.field->offset - sizeof (MonoObject));
1020 x86_lea_membase (s->code, tree->reg1, lreg, tree->data.fi.field->offset);
1022 x86_patch (br [1], s->code);
1024 if (tree->reg1 != treg)
1025 x86_pop_reg (s->code, treg);
1029 int offset = VARINFO (s, tree->data.i).offset;
1031 x86_lea_membase (s->code, tree->reg1, X86_EBP, offset);
1033 PRINT_REG ("ADDR_L", tree->reg1);
1035 MBCOND (VARINFO (data, tree->data.i).reg < 0);
1041 x86_mov_reg_imm (s->code, tree->reg1, tree->data.p);
1044 reg: CONV_I1 (reg) {
1045 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, FALSE);
1048 reg: CONV_U1 (reg) {
1049 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE);
1052 reg: CONV_I2 (reg) {
1053 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, TRUE);
1056 reg: CONV_U2 (reg) {
1057 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE);
1061 x86_mov_reg_imm (s->code, tree->reg1, tree->data.i);
1064 reg: CONV_I4 (reg) {
1065 if (tree->reg1 != tree->left->reg1)
1066 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1067 PRINT_REG ("CONV_I4", tree->left->reg1);
1070 reg: CONV_U4 (reg) {
1071 if (tree->reg1 != tree->left->reg1)
1072 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1073 PRINT_REG ("CONV_U4", tree->left->reg1);
1076 reg: CONV_OVF_I4 (reg) {
1077 if (tree->reg1 != tree->left->reg1)
1078 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1079 PRINT_REG ("CONV_OVF_I4", tree->left->reg1);
1082 reg: CONV_OVF_U4 (reg) {
1083 /* Keep in sync with CONV_OVF_I4_UN below, they are the same on 32-bit machines */
1084 x86_test_reg_imm (s->code, tree->left->reg1, 0x8000000);
1085 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1086 if (tree->reg1 != tree->left->reg1)
1087 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1090 reg: CONV_OVF_I4_UN (reg) {
1091 /* Keep in sync with CONV_OVF_U4 above, they are the same on 32-bit machines */
1092 x86_test_reg_imm (s->code, tree->left->reg1, 0x8000000);
1093 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1094 if (tree->reg1 != tree->left->reg1)
1095 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1098 reg: CONV_OVF_I1 (reg) {
1099 /* probe value to be within -128 to 127 */
1100 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, 127);
1101 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LE, TRUE, "OverflowException");
1102 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, -128);
1103 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_GT, TRUE, "OverflowException");
1104 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, FALSE);
1107 reg: CONV_OVF_I1_UN (reg) {
1108 /* probe values between 0 to 128 */
1109 x86_test_reg_imm (s->code, tree->left->reg1, 0xffffff80);
1110 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1111 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE);
1114 reg: CONV_OVF_U1 (reg) {
1115 /* Keep in sync with CONV_OVF_U1_UN routine below, they are the same on 32-bit machines */
1116 /* probe value to be within 0 to 255 */
1117 x86_test_reg_imm (s->code, tree->left->reg1, 0xffffff00);
1118 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1119 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE);
1122 reg: CONV_OVF_U1_UN (reg) {
1123 /* Keep in sync with CONV_OVF_U1 routine above, they are the same on 32-bit machines */
1124 /* probe value to be within 0 to 255 */
1125 x86_test_reg_imm (s->code, tree->left->reg1, 0xffffff00);
1126 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1127 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE);
1130 reg: CONV_OVF_I2 (reg) {
1131 /* Probe value to be within -32768 and 32767 */
1132 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, 32767);
1133 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LE, TRUE, "OverflowException");
1134 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, -32768);
1135 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_GE, TRUE, "OverflowException");
1136 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, TRUE);
1139 reg: CONV_OVF_U2 (reg) {
1140 /* Keep in sync with CONV_OVF_U2_UN below, they are the same on 32-bit machines */
1141 /* Probe value to be within 0 and 65535 */
1142 x86_test_reg_imm (s->code, tree->left->reg1, 0xffff0000);
1143 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "OverflowException");
1144 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE);
1147 reg: CONV_OVF_U2_UN (reg) {
1148 /* Keep in sync with CONV_OVF_U2 above, they are the same on 32-bit machines */
1149 /* Probe value to be within 0 and 65535 */
1150 x86_test_reg_imm (s->code, tree->left->reg1, 0xffff0000);
1151 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1152 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE);
1155 reg: CONV_OVF_I2_UN (reg) {
1156 /* Convert uint value into short, value within 0 and 32767 */
1157 x86_test_reg_imm (s->code, tree->left->reg1, 0xffff8000);
1158 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "OverflowException");
1159 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE);
1162 reg: MUL (reg, CONST_I4) "MB_USE_OPT1(0)" {
1163 unsigned int i, j, k, v;
1165 v = tree->right->data.i;
1166 for (i = 0, j = 1, k = 0xfffffffe; i < 32; i++, j = j << 1, k = k << 1) {
1171 if (v < 0 || i == 32 || v & k) {
1174 /* LEA r1, [r2 + r2*2] */
1175 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 1);
1178 /* LEA r1, [r2 + r2*4] */
1179 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 2);
1182 /* LEA r1, [r2 + r2*2] */
1184 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 1);
1185 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->reg1);
1188 /* LEA r1, [r2 + r2*8] */
1189 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 3);
1192 /* LEA r1, [r2 + r2*4] */
1194 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 2);
1195 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->reg1);
1198 /* LEA r1, [r2 + r2*2] */
1200 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 1);
1201 x86_shift_reg_imm (s->code, X86_SHL, tree->reg1, 2);
1204 /* LEA r1, [r2 + r2*4] */
1205 /* LEA r1, [r1 + r1*4] */
1206 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 2);
1207 x86_lea_memindex (s->code, tree->reg1, tree->reg1, 0, tree->reg1, 2);
1210 /* LEA r1, [r2 + r2*4] */
1212 /* LEA r1, [r1 + r1*4] */
1213 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1, 0, tree->left->reg1, 2);
1214 x86_shift_reg_imm (s->code, X86_SHL, tree->reg1, 2);
1215 x86_lea_memindex (s->code, tree->reg1, tree->reg1, 0, tree->reg1, 2);
1218 x86_imul_reg_reg_imm (s->code, tree->reg1, tree->left->reg1, tree->right->data.i);
1222 x86_shift_reg_imm (s->code, X86_SHL, tree->left->reg1, i);
1223 if (tree->reg1 != tree->left->reg1)
1224 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1228 reg: MUL (reg, reg) {
1229 x86_imul_reg_reg (s->code, tree->left->reg1, tree->right->reg1);
1231 if (tree->reg1 != tree->left->reg1)
1232 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1235 reg: MUL_OVF (reg, reg) {
1236 x86_imul_reg_reg (s->code, tree->left->reg1, tree->right->reg1);
1237 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
1239 if (tree->reg1 != tree->left->reg1)
1240 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1243 reg: MUL_OVF_UN (reg, reg) {
1244 mono_assert (tree->right->reg1 != X86_EAX);
1246 if (tree->left->reg1 != X86_EAX)
1247 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1249 x86_mul_reg (s->code, tree->right->reg1, FALSE);
1250 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
1252 mono_assert (tree->reg1 == X86_EAX &&
1253 tree->reg2 == X86_EDX);
1256 reg: DIV (reg, CONST_I4) {
1257 unsigned int i, j, k, v;
1259 v = tree->right->data.i;
1260 for (i = 0, j = 1, k = 0xfffffffe; i < 32; i++, j = j << 1, k = k << 1) {
1265 x86_shift_reg_imm (s->code, X86_SAR, tree->left->reg1, i);
1266 if (tree->reg1 != tree->left->reg1)
1267 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1270 unsigned int i, j, k, v;
1275 v = tree->right->data.i;
1276 for (i = 0, j = 1, k = 0xfffffffe; i < 32; i++, j = j << 1, k = k << 1) {
1281 if (i == 32 || v & k)
1288 reg: DIV (reg, reg) {
1289 mono_assert (tree->right->reg1 != X86_EAX);
1291 if (tree->left->reg1 != X86_EAX)
1292 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1295 x86_div_reg (s->code, tree->right->reg1, TRUE);
1297 mono_assert (tree->reg1 == X86_EAX &&
1298 tree->reg2 == X86_EDX);
1301 reg: DIV_UN (reg, CONST_I4) {
1302 unsigned int i, j, k, v;
1305 v = tree->right->data.i;
1306 for (i = 0, j = 1, k = 0xfffffffe; i < 32; i++, j = j << 1, k = k << 1) {
1311 if (i == 32 || v & k) {
1312 for (i = 32, j = 0x80000000; --i >= 0; j >>= 1) {
1316 /* k = 32 + number of significant bits in v - 1 */
1320 for (i = 0; i < k; i++) f *= 2.0f;
1326 x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg1, k - 32);
1327 if (tree->reg1 != tree->left->reg1)
1328 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1329 } else if (r < 0.5f) {
1330 if (tree->left->reg1 != X86_EAX)
1331 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1332 x86_mov_reg_imm (s->code, X86_EDX, (guint32) floor(f));
1333 /* x86_inc_reg (s->code, X86_EAX); */
1334 /* INC is faster but we have to check for overflow. */
1335 x86_alu_reg_imm (s->code, X86_ADD, X86_EAX, 1);
1336 x86_branch8(s->code, X86_CC_C, 2, FALSE);
1337 x86_mul_reg (s->code, X86_EDX, FALSE);
1338 x86_shift_reg_imm (s->code, X86_SHR, X86_EDX, k - 32);
1339 if (tree->reg1 != X86_EDX)
1340 x86_mov_reg_reg (s->code, tree->reg1, X86_EDX, 4);
1342 if (tree->left->reg1 != X86_EAX)
1343 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1344 x86_mov_reg_imm (s->code, X86_EDX, (guint32) ceil(f));
1345 x86_mul_reg (s->code, X86_EDX, FALSE);
1346 x86_shift_reg_imm (s->code, X86_SHR, X86_EDX, k - 32);
1347 if (tree->reg1 != X86_EDX)
1348 x86_mov_reg_reg (s->code, tree->reg1, X86_EDX, 4);
1351 x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg1, i);
1352 if (tree->reg1 != tree->left->reg1)
1353 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1358 reg: DIV_UN (reg, reg) {
1359 mono_assert (tree->right->reg1 != X86_EAX);
1361 if (tree->left->reg1 != X86_EAX)
1362 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1364 x86_mov_reg_imm (s->code, X86_EDX, 0);
1365 x86_div_reg (s->code, tree->right->reg1, FALSE);
1367 mono_assert (tree->reg1 == X86_EAX &&
1368 tree->reg2 == X86_EDX);
1371 reg: REM (reg, reg) {
1372 mono_assert (tree->right->reg1 != X86_EAX);
1373 mono_assert (tree->right->reg1 != X86_EDX);
1375 if (tree->left->reg1 != X86_EAX)
1376 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1378 /* sign extend to 64bit in EAX/EDX */
1380 x86_div_reg (s->code, tree->right->reg1, TRUE);
1381 x86_mov_reg_reg (s->code, X86_EAX, X86_EDX, 4);
1383 mono_assert (tree->reg1 == X86_EAX &&
1384 tree->reg2 == X86_EDX);
1387 reg: REM_UN (reg, reg) {
1388 mono_assert (tree->right->reg1 != X86_EAX);
1389 mono_assert (tree->right->reg1 != X86_EDX);
1391 if (tree->left->reg1 != X86_EAX)
1392 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1394 /* zero extend to 64bit in EAX/EDX */
1395 x86_mov_reg_imm (s->code, X86_EDX, 0);
1396 x86_div_reg (s->code, tree->right->reg1, FALSE);
1397 x86_mov_reg_reg (s->code, X86_EAX, X86_EDX, 4);
1399 mono_assert (tree->reg1 == X86_EAX &&
1400 tree->reg2 == X86_EDX);
1403 reg: ADD (reg, CONST_I4) "MB_USE_OPT1(0)" {
1404 if (tree->right->data.i == 1)
1405 x86_inc_reg (s->code, tree->left->reg1);
1407 x86_alu_reg_imm (s->code, X86_ADD, tree->left->reg1, tree->right->data.i);
1409 if (tree->reg1 != tree->left->reg1)
1410 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1414 reg: ADD (reg, LDIND_I4 (ADDR_L)) {
1415 int treg = VARINFO (s, tree->right->left->data.i).reg;
1417 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, treg);
1419 if (tree->reg1 != tree->left->reg1)
1420 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1422 MBCOND ((VARINFO (data, tree->right->left->data.i).reg >= 0));
1426 reg: ADD (reg, reg) {
1427 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
1429 if (tree->reg1 != tree->left->reg1)
1430 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1433 reg: ADD_OVF (reg, reg) {
1434 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
1435 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
1437 if (tree->reg1 != tree->left->reg1)
1438 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1441 reg: ADD_OVF_UN (reg, reg) {
1442 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
1443 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NC, FALSE, "OverflowException");
1445 if (tree->reg1 != tree->left->reg1)
1446 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1449 reg: SUB (reg, CONST_I4) "MB_USE_OPT1(0)" {
1450 if (tree->right->data.i == 1)
1451 x86_dec_reg (s->code, tree->left->reg1);
1453 x86_alu_reg_imm (s->code, X86_SUB, tree->left->reg1, tree->right->data.i);
1455 if (tree->reg1 != tree->left->reg1)
1456 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1459 reg: SUB (reg, LDIND_I4 (ADDR_L)) {
1460 int treg = VARINFO (s, tree->right->left->data.i).reg;
1462 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, treg);
1464 if (tree->reg1 != tree->left->reg1)
1465 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1467 MBCOND ((VARINFO (data, tree->right->left->data.i).reg >= 0));
1471 reg: SUB (reg, reg) {
1472 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
1474 if (tree->reg1 != tree->left->reg1)
1475 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1478 reg: SUB_OVF (reg, reg) {
1479 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
1480 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
1482 if (tree->reg1 != tree->left->reg1)
1483 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1486 reg: SUB_OVF_UN (reg, reg) {
1487 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
1488 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NC, FALSE, "OverflowException");
1490 if (tree->reg1 != tree->left->reg1)
1491 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1494 reg: CSET (cflags) {
1496 switch (tree->data.i) {
1498 x86_set_reg (s->code, X86_CC_EQ, tree->reg1, TRUE);
1501 x86_set_reg (s->code, X86_CC_GT, tree->reg1, TRUE);
1504 x86_set_reg (s->code, X86_CC_GT, tree->reg1, FALSE);
1507 x86_set_reg (s->code, X86_CC_LT, tree->reg1, TRUE);
1510 x86_set_reg (s->code, X86_CC_LT, tree->reg1, FALSE);
1513 g_assert_not_reached ();
1516 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE);
1519 reg: AND (reg, CONST_I4) "MB_USE_OPT1(0)" {
1520 x86_alu_reg_imm (s->code, X86_AND, tree->left->reg1, tree->right->data.i);
1522 if (tree->reg1 != tree->left->reg1)
1523 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1526 reg: AND (reg, reg) {
1527 x86_alu_reg_reg (s->code, X86_AND, tree->left->reg1, tree->right->reg1);
1529 if (tree->reg1 != tree->left->reg1)
1530 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1533 reg: OR (reg, CONST_I4) "MB_USE_OPT1(0)" {
1534 x86_alu_reg_imm (s->code, X86_OR, tree->left->reg1, tree->right->data.i);
1536 if (tree->reg1 != tree->left->reg1)
1537 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1540 reg: OR (reg, reg) {
1541 x86_alu_reg_reg (s->code, X86_OR, tree->left->reg1, tree->right->reg1);
1543 if (tree->reg1 != tree->left->reg1)
1544 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1547 reg: XOR (reg, CONST_I4) "MB_USE_OPT1(0)" {
1548 x86_alu_reg_imm (s->code, X86_XOR, tree->left->reg1, tree->right->data.i);
1550 if (tree->reg1 != tree->left->reg1)
1551 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1554 reg: XOR (reg, reg) {
1555 x86_alu_reg_reg (s->code, X86_XOR, tree->left->reg1, tree->right->reg1);
1557 if (tree->reg1 != tree->left->reg1)
1558 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1562 x86_neg_reg (s->code, tree->left->reg1);
1564 if (tree->reg1 != tree->left->reg1)
1565 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1569 x86_not_reg (s->code, tree->left->reg1);
1571 if (tree->reg1 != tree->left->reg1)
1572 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1575 reg: SHL (reg, CONST_I4) {
1576 x86_shift_reg_imm (s->code, X86_SHL, tree->left->reg1, tree->right->data.i);
1578 if (tree->reg1 != tree->left->reg1)
1579 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1582 reg: SHL (reg, reg) {
1583 if (tree->right->reg1 != X86_ECX) {
1584 x86_push_reg (s->code, X86_ECX);
1585 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
1587 x86_shift_reg (s->code, X86_SHL, tree->left->reg1);
1589 if (tree->reg1 != tree->left->reg1)
1590 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1592 if (tree->right->reg1 != X86_ECX)
1593 x86_pop_reg (s->code, X86_ECX);
1595 mono_assert (tree->reg1 != X86_ECX &&
1596 tree->left->reg1 != X86_ECX);
1599 reg: SHR (reg, CONST_I4) {
1600 x86_shift_reg_imm (s->code, X86_SAR, tree->left->reg1, tree->right->data.i);
1602 if (tree->reg1 != tree->left->reg1)
1603 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1606 reg: SHR (reg, reg) {
1607 if (tree->right->reg1 != X86_ECX) {
1608 x86_push_reg (s->code, X86_ECX);
1609 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
1612 x86_shift_reg (s->code, X86_SAR, tree->left->reg1);
1614 if (tree->reg1 != tree->left->reg1)
1615 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1617 if (tree->right->reg1 != X86_ECX)
1618 x86_pop_reg (s->code, X86_ECX);
1620 mono_assert (tree->reg1 != X86_ECX &&
1621 tree->left->reg1 != X86_ECX);
1624 reg: SHR_UN (reg, CONST_I4) {
1625 x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg1, tree->right->data.i);
1627 if (tree->reg1 != tree->left->reg1)
1628 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1631 reg: SHR_UN (reg, reg) {
1632 if (tree->right->reg1 != X86_ECX) {
1633 x86_push_reg (s->code, X86_ECX);
1634 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
1637 x86_shift_reg (s->code, X86_SHR, tree->left->reg1);
1639 if (tree->reg1 != tree->left->reg1)
1640 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1642 if (tree->right->reg1 != X86_ECX)
1643 x86_pop_reg (s->code, X86_ECX);
1645 mono_assert (tree->reg1 != X86_ECX &&
1646 tree->left->reg1 != X86_ECX);
1649 reg: LDSFLDA (CONST_I4) {
1650 if (tree->reg1 != X86_EAX)
1651 x86_push_reg (s->code, X86_EAX);
1652 x86_push_reg (s->code, X86_ECX);
1653 x86_push_reg (s->code, X86_EDX);
1655 x86_push_imm (s->code, tree->left->data.i);
1656 x86_push_imm (s->code, tree->data.klass);
1657 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_ldsflda);
1658 x86_call_code (s->code, 0);
1659 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
1661 x86_pop_reg (s->code, X86_EDX);
1662 x86_pop_reg (s->code, X86_ECX);
1663 if (tree->reg1 != X86_EAX) {
1664 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1665 x86_pop_reg (s->code, X86_EAX);
1671 x86_mov_reg_membase (s->code, tree->reg1, tree->left->reg1,
1672 G_STRUCT_OFFSET (MonoArray, max_length), 4);
1675 reg: LDELEMA (reg, CONST_I4) {
1678 if (mono_jit_boundcheck){
1679 x86_alu_membase_imm (s->code, X86_CMP, tree->left->reg1, G_STRUCT_OFFSET (MonoArray, max_length), tree->right->data.i);
1680 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_GT, FALSE, "IndexOutOfRangeException");
1683 ind = tree->data.i * tree->right->data.i + G_STRUCT_OFFSET (MonoArray, vector);
1685 x86_lea_membase (s->code, tree->reg1, tree->left->reg1, ind);
1688 reg: LDELEMA (reg, reg) {
1690 if (mono_jit_boundcheck){
1691 x86_alu_reg_membase (s->code, X86_CMP, tree->right->reg1, tree->left->reg1, G_STRUCT_OFFSET (MonoArray, max_length));
1692 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LT, FALSE, "IndexOutOfRangeException");
1695 if (tree->data.i == 1 || tree->data.i == 2 ||
1696 tree->data.i == 4 || tree->data.i == 8) {
1697 static int fast_log2 [] = { 1, 0, 1, -1, 2, -1, -1, -1, 3 };
1698 x86_lea_memindex (s->code, tree->reg1, tree->left->reg1,
1699 G_STRUCT_OFFSET (MonoArray, vector), tree->right->reg1,
1700 fast_log2 [tree->data.i]);
1702 x86_imul_reg_reg_imm (s->code, tree->right->reg1, tree->right->reg1, tree->data.i);
1703 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->right->reg1);
1704 x86_alu_reg_imm (s->code, X86_ADD, tree->reg1, G_STRUCT_OFFSET (MonoArray, vector));
1709 if (tree->reg1 != X86_EAX)
1710 x86_push_reg (s->code, X86_EAX);
1711 x86_push_reg (s->code, X86_ECX);
1712 x86_push_reg (s->code, X86_EDX);
1714 x86_push_imm (s->code, tree->data.p);
1715 x86_push_imm (s->code, s->method->klass->image);
1716 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_ldstr_wrapper);
1717 x86_call_code (s->code, 0);
1718 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
1720 x86_pop_reg (s->code, X86_EDX);
1721 x86_pop_reg (s->code, X86_ECX);
1722 if (tree->reg1 != X86_EAX) {
1723 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1724 x86_pop_reg (s->code, X86_EAX);
1727 PRINT_REG ("LDSTR", tree->reg1);
1731 if (tree->reg1 != X86_EAX)
1732 x86_push_reg (s->code, X86_EAX);
1733 x86_push_reg (s->code, X86_ECX);
1734 x86_push_reg (s->code, X86_EDX);
1736 x86_push_reg (s->code, tree->left->reg1);
1737 x86_push_imm (s->code, tree->data.p);
1738 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_array_new_wrapper);
1739 x86_call_code (s->code, 0);
1740 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
1742 x86_pop_reg (s->code, X86_EDX);
1743 x86_pop_reg (s->code, X86_ECX);
1744 if (tree->reg1 != X86_EAX) {
1745 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1746 x86_pop_reg (s->code, X86_EAX);
1749 PRINT_REG ("NEWARR", tree->reg1);
1752 reg: NEWARR_SPEC (reg) {
1753 if (tree->reg1 != X86_EAX)
1754 x86_push_reg (s->code, X86_EAX);
1755 x86_push_reg (s->code, X86_ECX);
1756 x86_push_reg (s->code, X86_EDX);
1758 x86_push_reg (s->code, tree->left->reg1);
1759 x86_push_imm (s->code, tree->data.p);
1760 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_array_new_specific);
1761 x86_call_code (s->code, 0);
1762 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
1764 x86_pop_reg (s->code, X86_EDX);
1765 x86_pop_reg (s->code, X86_ECX);
1766 if (tree->reg1 != X86_EAX) {
1767 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1768 x86_pop_reg (s->code, X86_EAX);
1771 PRINT_REG ("NEWARR_SPEC", tree->reg1);
1775 if (tree->reg1 != X86_EAX)
1776 x86_push_reg (s->code, X86_EAX);
1777 x86_push_reg (s->code, X86_ECX);
1778 x86_push_reg (s->code, X86_EDX);
1780 x86_push_imm (s->code, tree->data.klass);
1781 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_object_new_wrapper);
1782 x86_call_code (s->code, 0);
1783 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
1785 x86_pop_reg (s->code, X86_EDX);
1786 x86_pop_reg (s->code, X86_ECX);
1787 if (tree->reg1 != X86_EAX) {
1788 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1789 x86_pop_reg (s->code, X86_EAX);
1791 PRINT_REG ("NEWOBJ", tree->reg1);
1795 if (tree->reg1 != X86_EAX)
1796 x86_push_reg (s->code, X86_EAX);
1797 x86_push_reg (s->code, X86_ECX);
1798 x86_push_reg (s->code, X86_EDX);
1800 x86_push_imm (s->code, tree->data.p);
1801 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_object_new_specific);
1802 x86_call_code (s->code, 0);
1803 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
1805 x86_pop_reg (s->code, X86_EDX);
1806 x86_pop_reg (s->code, X86_ECX);
1807 if (tree->reg1 != X86_EAX) {
1808 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1809 x86_pop_reg (s->code, X86_EAX);
1811 PRINT_REG ("NEWOBJ_SPEC", tree->reg1);
1814 reg: OBJADDR (reg) {
1815 if (tree->left->reg1 != tree->reg1)
1816 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1819 reg: VTADDR (ADDR_L) {
1820 int offset = VARINFO (s, tree->left->data.i).offset;
1822 x86_lea_membase (s->code, tree->reg1, X86_EBP, offset);
1826 x86_push_reg (s->code, tree->left->reg1);
1827 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, g_free);
1828 x86_call_code (s->code, 0);
1829 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
1832 stmt: PROC2 (reg, reg) {
1833 x86_push_reg (s->code, tree->right->reg1);
1834 x86_push_reg (s->code, tree->left->reg1);
1835 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->data.p);
1836 x86_call_code (s->code, 0);
1837 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
1840 stmt: PROC3 (reg, CPSRC (reg, reg)) {
1841 x86_push_reg (s->code, tree->right->right->reg1);
1842 x86_push_reg (s->code, tree->right->left->reg1);
1843 x86_push_reg (s->code, tree->left->reg1);
1844 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->data.p);
1845 x86_call_code (s->code, 0);
1846 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
1850 if (tree->reg1 != X86_EAX)
1851 x86_push_reg (s->code, X86_EAX);
1852 x86_push_reg (s->code, X86_ECX);
1853 x86_push_reg (s->code, X86_EDX);
1855 x86_push_reg (s->code, tree->left->reg1);
1857 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->data.p);
1858 x86_call_code (s->code, 0);
1859 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer));
1861 x86_pop_reg (s->code, X86_EDX);
1862 x86_pop_reg (s->code, X86_ECX);
1863 if (tree->reg1 != X86_EAX) {
1864 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1865 x86_pop_reg (s->code, X86_EAX);
1869 reg: LOCALLOC (CONST_I4) {
1873 size = (tree->left->data.i + (MONO_FRAME_ALIGNMENT - 1)) & ~(MONO_FRAME_ALIGNMENT - 1); // align to MONO_FRAME_ALIGNMENT boundary
1877 mono_emit_stack_alloc_const (s, tree, size);
1879 if (tree->reg1 != X86_EDI && tree->left->reg1 != X86_EDI) {
1880 x86_push_reg (s->code, X86_EDI);
1883 if (tree->reg1 != X86_EAX && tree->left->reg1 != X86_EAX) {
1884 x86_push_reg (s->code, X86_EAX);
1887 if (tree->reg1 != X86_ECX && tree->left->reg1 != X86_ECX) {
1888 x86_push_reg (s->code, X86_ECX);
1892 x86_mov_reg_imm (s->code, X86_ECX, size >> 2);
1893 x86_alu_reg_reg (s->code, X86_SUB, X86_EAX, X86_EAX);
1895 x86_lea_membase (s->code, X86_EDI, X86_ESP, offset);
1897 x86_prefix (s->code, X86_REP_PREFIX);
1898 x86_stosd (s->code);
1900 if (tree->reg1 != X86_ECX && tree->left->reg1 != X86_ECX)
1901 x86_pop_reg (s->code, X86_ECX);
1902 if (tree->reg1 != X86_EAX && tree->left->reg1 != X86_EAX)
1903 x86_pop_reg (s->code, X86_EAX);
1904 if (tree->reg1 != X86_EDI && tree->left->reg1 != X86_EDI)
1905 x86_pop_reg (s->code, X86_EDI);
1908 x86_mov_reg_reg (s->code, tree->reg1, X86_ESP, 4);
1913 reg: LOCALLOC (reg) {
1915 /* size must be aligned to MONO_FRAME_ALIGNMENT bytes */
1916 x86_alu_reg_imm (s->code, X86_ADD, tree->left->reg1, MONO_FRAME_ALIGNMENT - 1);
1917 x86_alu_reg_imm (s->code, X86_AND, tree->left->reg1, ~(MONO_FRAME_ALIGNMENT - 1));
1919 /* allocate space on stack */
1920 mono_emit_stack_alloc (s, tree);
1923 /* initialize with zero */
1924 if (tree->reg1 != X86_EAX && tree->left->reg1 != X86_EAX) {
1925 x86_push_reg (s->code, X86_EAX);
1928 if (tree->reg1 != X86_ECX && tree->left->reg1 != X86_ECX) {
1929 x86_push_reg (s->code, X86_ECX);
1932 if (tree->reg1 != X86_EDI && tree->left->reg1 != X86_EDI) {
1933 x86_push_reg (s->code, X86_EDI);
1937 x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg1, 2);
1938 if (tree->left->reg1 != X86_ECX)
1939 x86_mov_reg_imm (s->code, X86_ECX, tree->left->reg1);
1940 x86_alu_reg_reg (s->code, X86_XOR, X86_EAX, X86_EAX);
1942 x86_lea_membase (s->code, X86_EDI, X86_ESP, offset);
1944 x86_prefix (s->code, X86_REP_PREFIX);
1945 x86_stosl (s->code);
1947 if (tree->reg1 != X86_EDI && tree->left->reg1 != X86_EDI)
1948 x86_pop_reg (s->code, X86_EDI);
1949 if (tree->reg1 != X86_ECX && tree->left->reg1 != X86_ECX)
1950 x86_pop_reg (s->code, X86_ECX);
1951 if (tree->reg1 != X86_EAX && tree->left->reg1 != X86_EAX)
1952 x86_pop_reg (s->code, X86_EAX);
1955 x86_mov_reg_reg (s->code, tree->reg1, X86_ESP, 4);
1959 if (tree->reg1 != tree->left->reg1)
1960 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1962 x86_push_reg (s->code, tree->reg1);
1963 x86_mov_reg_membase (s->code, tree->reg1, tree->reg1, 0, 4);
1964 x86_mov_reg_membase (s->code, tree->reg1, tree->reg1, 0, 4);
1965 x86_alu_membase_imm (s->code, X86_CMP, tree->reg1,
1966 G_STRUCT_OFFSET (MonoClass, element_class), ((int)(tree->data.klass->element_class)));
1967 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "InvalidCastException");
1968 x86_pop_reg (s->code, tree->reg1);
1969 x86_alu_reg_imm (s->code, X86_ADD, tree->reg1, sizeof (MonoObject));
1972 reg: CASTCLASS (reg) {
1973 MonoClass *klass = tree->data.klass;
1975 int lreg = tree->left->reg1;
1977 x86_push_reg (s->code, lreg);
1978 x86_test_reg_reg (s->code, lreg, lreg);
1979 br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
1981 if (klass->flags & TYPE_ATTRIBUTE_INTERFACE) {
1982 /* lreg = obj->vtable */
1983 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
1985 x86_alu_membase_imm (s->code, X86_CMP, lreg, G_STRUCT_OFFSET (MonoVTable, max_interface_id),
1986 klass->interface_id);
1987 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_GE, FALSE, "InvalidCastException");
1988 /* lreg = obj->vtable->interface_offsets */
1989 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
1990 x86_alu_membase_imm (s->code, X86_CMP, lreg, klass->interface_id << 2, 0);
1991 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NE, FALSE, "InvalidCastException");
1994 /* lreg = obj->vtable */
1995 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
1996 /* lreg = obj->vtable->klass */
1997 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2001 x86_alu_membase_imm (s->code, X86_CMP, lreg, G_STRUCT_OFFSET (MonoClass, rank), klass->rank);
2002 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, FALSE, "InvalidCastException");
2003 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, cast_class), 4);
2004 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, baseval), 4);
2005 x86_alu_reg_mem (s->code, X86_SUB, lreg, &klass->cast_class->baseval);
2006 x86_alu_reg_mem (s->code, X86_CMP, lreg, &klass->cast_class->diffval);
2007 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LE, FALSE, "InvalidCastException");
2011 if (klass->marshalbyref) {
2012 /* check for transparent_proxy */
2013 x86_alu_reg_imm (s->code, X86_CMP, lreg, (int)mono_defaults.transparent_proxy_class);
2014 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
2017 x86_mov_reg_membase (s->code, lreg, X86_ESP, 0, 4);
2018 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoTransparentProxy,
2021 x86_patch (br [1], s->code);
2024 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, baseval), 4);
2025 x86_alu_reg_mem (s->code, X86_SUB, lreg, &klass->baseval);
2026 x86_alu_reg_mem (s->code, X86_CMP, lreg, &klass->diffval);
2027 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_LE, FALSE, "InvalidCastException");
2031 x86_patch (br [0], s->code);
2032 x86_pop_reg (s->code, tree->reg1);
2036 MonoClass *klass = tree->data.klass;
2038 int lreg = tree->left->reg1;
2040 x86_push_reg (s->code, lreg);
2041 x86_test_reg_reg (s->code, lreg, lreg);
2042 br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
2044 if (klass->flags & TYPE_ATTRIBUTE_INTERFACE) {
2045 /* lreg = obj->vtable */
2046 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2048 x86_alu_membase_imm (s->code, X86_CMP, lreg, G_STRUCT_OFFSET (MonoVTable, max_interface_id),
2049 klass->interface_id);
2050 br [1] = s->code; x86_branch8 (s->code, X86_CC_LT, 0, FALSE);
2051 /* lreg = obj->vtable->interface_offsets */
2052 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
2053 x86_alu_membase_imm (s->code, X86_CMP, lreg, klass->interface_id << 2, 0);
2054 br [2] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
2055 x86_patch (br [1], s->code);
2056 x86_mov_membase_imm (s->code, X86_ESP, 0, 0, 4);
2057 x86_patch (br [2], s->code);
2061 /* lreg = obj->vtable */
2062 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2063 /* lreg = obj->vtable->klass */
2064 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2068 x86_alu_membase_imm (s->code, X86_CMP, lreg, G_STRUCT_OFFSET (MonoClass, rank), klass->rank);
2069 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
2070 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, cast_class), 4);
2071 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, baseval), 4);
2072 x86_alu_reg_mem (s->code, X86_SUB, lreg, &klass->cast_class->baseval);
2073 x86_alu_reg_mem (s->code, X86_CMP, lreg, &klass->cast_class->diffval);
2074 br [2] = s->code; x86_branch8 (s->code, X86_CC_LE, 0, FALSE);
2075 x86_patch (br [1], s->code);
2076 x86_mov_membase_imm (s->code, X86_ESP, 0, 0, 4);
2077 x86_patch (br [2], s->code);
2081 if (klass->marshalbyref) {
2082 /* check for transparent_proxy */
2083 x86_alu_reg_imm (s->code, X86_CMP, lreg, (int)mono_defaults.transparent_proxy_class);
2084 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
2087 x86_mov_reg_membase (s->code, lreg, X86_ESP, 0, 4);
2088 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoTransparentProxy,
2090 x86_patch (br [1], s->code);
2093 x86_mov_reg_membase (s->code, lreg, lreg, G_STRUCT_OFFSET (MonoClass, baseval), 4);
2094 x86_alu_reg_mem (s->code, X86_SUB, lreg, &klass->baseval);
2095 x86_alu_reg_mem (s->code, X86_CMP, lreg, &klass->diffval);
2096 br [2] = s->code; x86_branch8 (s->code, X86_CC_LE, 0, FALSE);
2097 x86_mov_membase_imm (s->code, X86_ESP, 0, 0, 4);
2098 x86_patch (br [2], s->code);
2102 x86_patch (br [0], s->code);
2103 x86_pop_reg (s->code, tree->reg1);
2106 stmt: INITOBJ (reg) {
2109 if (!(i = tree->data.i))
2112 if (i == 1 || i == 2 || i == 4) {
2113 x86_mov_membase_imm (s->code, tree->left->reg1, 0, 0, i);
2117 i = tree->data.i / 4;
2118 j = tree->data.i % 4;
2120 if (tree->left->reg1 != X86_EDI) {
2121 x86_push_reg (s->code, X86_EDI);
2122 x86_mov_reg_reg (s->code, X86_EDI, tree->left->reg1, 4);
2126 x86_alu_reg_reg (s->code, X86_XOR, X86_EAX, X86_EAX);
2127 x86_mov_reg_imm (s->code, X86_ECX, i);
2129 x86_prefix (s->code, X86_REP_PREFIX);
2130 x86_stosl (s->code);
2132 for (i = 0; i < j; i++)
2133 x86_stosb (s->code);
2137 x86_mov_membase_imm (s->code, X86_EDI, 0, 0, 2);
2138 x86_mov_membase_imm (s->code, X86_EDI, 2, 0, 1);
2143 if (tree->left->reg1 != X86_EDI)
2144 x86_pop_reg (s->code, X86_EDI);
2147 stmt: CPBLK (reg, CPSRC (reg, CONST_I4)) {
2148 int dest_reg = tree->left->reg1;
2149 int source_reg = tree->right->left->reg1;
2150 int count = tree->right->right->data.i;
2151 int sreg = dest_reg != X86_EAX ? X86_EAX : X86_EDX;
2152 int spill_pos = 0, dest_offset = 0, source_offset = 0;
2153 int save_esi = FALSE, save_edi = FALSE;
2155 // TODO: handle unaligned. prefix
2161 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 1);
2162 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 1);
2165 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 2);
2166 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 2);
2169 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 2);
2170 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 2);
2171 x86_mov_reg_membase (s->code, sreg, source_reg, 2, 1);
2172 x86_mov_membase_reg (s->code, dest_reg, 2, sreg, 1);
2175 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 4);
2176 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 4);
2179 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 4);
2180 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 4);
2181 x86_mov_reg_membase (s->code, sreg, source_reg, 4, 1);
2182 x86_mov_membase_reg (s->code, dest_reg, 4, sreg, 1);
2185 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 4);
2186 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 4);
2187 x86_mov_reg_membase (s->code, sreg, source_reg, 4, 2);
2188 x86_mov_membase_reg (s->code, dest_reg, 4, sreg, 2);
2191 x86_mov_reg_membase (s->code, sreg, source_reg, 0, 4);
2192 x86_mov_membase_reg (s->code, dest_reg, 0, sreg, 4);
2193 x86_mov_reg_membase (s->code, sreg, source_reg, 4, 2);
2194 x86_mov_membase_reg (s->code, dest_reg, 4, sreg, 2);
2195 x86_mov_reg_membase (s->code, sreg, source_reg, 6, 1);
2196 x86_mov_membase_reg (s->code, dest_reg, 6, sreg, 1);
2199 x86_fild_membase (s->code, source_reg, 0, TRUE);
2200 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2203 x86_fild_membase (s->code, source_reg, 0, TRUE);
2204 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2205 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 1);
2206 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 1);
2209 x86_fild_membase (s->code, source_reg, 0, TRUE);
2210 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2211 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 2);
2212 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 2);
2215 x86_fild_membase (s->code, source_reg, 0, TRUE);
2216 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2217 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 2);
2218 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 2);
2219 x86_mov_reg_membase (s->code, sreg, source_reg, 10, 1);
2220 x86_mov_membase_reg (s->code, dest_reg, 10, sreg, 1);
2223 x86_fild_membase (s->code, source_reg, 0, TRUE);
2224 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2225 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 4);
2226 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 4);
2229 x86_fild_membase (s->code, source_reg, 0, TRUE);
2230 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2231 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 4);
2232 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 4);
2233 x86_mov_reg_membase (s->code, sreg, source_reg, 12, 1);
2234 x86_mov_membase_reg (s->code, dest_reg, 12, sreg, 1);
2237 x86_fild_membase (s->code, source_reg, 0, TRUE);
2238 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2239 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 4);
2240 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 4);
2241 x86_mov_reg_membase (s->code, sreg, source_reg, 12, 2);
2242 x86_mov_membase_reg (s->code, dest_reg, 12, sreg, 2);
2245 x86_fild_membase (s->code, source_reg, 0, TRUE);
2246 x86_fist_pop_membase (s->code, dest_reg, 0, TRUE);
2247 x86_mov_reg_membase (s->code, sreg, source_reg, 8, 4);
2248 x86_mov_membase_reg (s->code, dest_reg, 8, sreg, 4);
2249 x86_mov_reg_membase (s->code, sreg, source_reg, 12, 2);
2250 x86_mov_membase_reg (s->code, dest_reg, 12, sreg, 2);
2251 x86_mov_reg_membase (s->code, sreg, source_reg, 14, 1);
2252 x86_mov_membase_reg (s->code, dest_reg, 14, sreg, 1);
2255 g_assert (count > 15);
2257 if (dest_reg != X86_ESI && source_reg != X86_ESI &&
2258 mono_regset_reg_used (s->rs, X86_ESI))
2260 if (dest_reg != X86_EDI && source_reg != X86_EDI &&
2261 mono_regset_reg_used (s->rs, X86_EDI))
2265 x86_push_reg (s->code, X86_ESI);
2267 x86_push_reg (s->code, X86_EDI);
2269 if (dest_reg == X86_ESI) {
2270 dest_offset = ++spill_pos;
2272 if (source_reg == X86_EDI) {
2273 source_offset = ++spill_pos;
2277 x86_push_reg (s->code, source_reg);
2279 x86_push_reg (s->code, dest_reg);
2281 if (source_reg != X86_ESI) {
2283 x86_mov_reg_membase (s->code, X86_ESI, X86_ESP, (source_offset-1)<<2, 4);
2285 x86_mov_reg_reg (s->code, X86_ESI, source_reg, 4);
2287 if (dest_reg != X86_EDI) {
2289 x86_mov_reg_membase (s->code, X86_EDI, X86_ESP, (dest_offset-1)<<2, 4);
2291 x86_mov_reg_reg (s->code, X86_EDI, dest_reg, 4);
2294 x86_mov_reg_imm (s->code, X86_ECX, count >> 2);
2296 x86_prefix (s->code, X86_REP_PREFIX);
2297 x86_movsd (s->code);
2299 switch (count & 3) {
2301 x86_mov_reg_membase (s->code, sreg, X86_ESI, 0, 1);
2302 x86_mov_membase_reg (s->code, X86_EDI, 0, sreg, 1);
2305 x86_mov_reg_membase (s->code, sreg, X86_ESI, 0, 2);
2306 x86_mov_membase_reg (s->code, X86_EDI, 0, sreg, 2);
2309 x86_mov_reg_membase (s->code, sreg, X86_ESI, 0, 2);
2310 x86_mov_membase_reg (s->code, X86_EDI, 0, sreg, 2);
2311 x86_mov_reg_membase (s->code, sreg, X86_ESI, 2, 1);
2312 x86_mov_membase_reg (s->code, X86_EDI, 2, sreg, 1);
2318 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, spill_pos<<2);
2321 x86_pop_reg (s->code, X86_EDI);
2323 x86_pop_reg (s->code, X86_ESI);
2328 MBCOND (mono_inline_memcpy);
2332 stmt: CPBLK (reg, CPSRC (reg, reg)) {
2333 int dest_reg = tree->left->reg1;
2334 int source_reg = tree->right->left->reg1;
2335 int size_reg = tree->right->right->reg1;
2336 int spill_pos = 0, size_offset = 0, dest_offset = 0, source_offset = 0;
2337 int save_esi = FALSE, save_edi = FALSE;
2339 if (!mono_inline_memcpy) {
2340 x86_push_reg (s->code, size_reg);
2341 x86_push_reg (s->code, source_reg);
2342 x86_push_reg (s->code, dest_reg);
2343 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, memmove);
2344 x86_call_code (s->code, 0);
2345 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
2347 if (dest_reg != X86_ESI && source_reg != X86_ESI && size_reg != X86_ESI &&
2348 mono_regset_reg_used (s->rs, X86_ESI))
2350 if (dest_reg != X86_EDI && source_reg != X86_EDI && size_reg != X86_EDI &&
2351 mono_regset_reg_used (s->rs, X86_EDI))
2355 x86_push_reg (s->code, X86_ESI);
2357 x86_push_reg (s->code, X86_EDI);
2359 if (size_reg == X86_EDI || size_reg == X86_ESI) {
2360 size_offset = ++spill_pos;
2362 if (dest_reg == X86_ECX || dest_reg == X86_ESI) {
2363 dest_offset = ++spill_pos;
2365 if (source_reg == X86_ECX || source_reg == X86_EDI) {
2366 source_offset = ++spill_pos;
2370 x86_push_reg (s->code, source_reg);
2372 x86_push_reg (s->code, dest_reg);
2374 x86_push_reg (s->code, size_reg);
2376 if (source_reg != X86_ESI) {
2378 x86_mov_reg_membase (s->code, X86_ESI, X86_ESP, (source_offset-1)<<2, 4);
2380 x86_mov_reg_reg (s->code, X86_ESI, source_reg, 4);
2382 if (dest_reg != X86_EDI) {
2384 x86_mov_reg_membase (s->code, X86_EDI, X86_ESP, (dest_offset-1)<<2, 4);
2386 x86_mov_reg_reg (s->code, X86_EDI, dest_reg, 4);
2388 if (size_reg != X86_ECX) {
2390 x86_mov_reg_membase (s->code, X86_ECX, X86_ESP, (size_offset-1)<<2, 4);
2392 x86_mov_reg_reg (s->code, X86_ECX, size_reg, 4);
2395 x86_push_reg (s->code, X86_ECX);
2396 x86_shift_reg_imm (s->code, X86_SHR, X86_ECX, 2);
2400 // move whole dwords first
2401 x86_prefix (s->code, X86_REP_PREFIX);
2402 x86_movsd (s->code);
2404 x86_pop_reg (s->code, X86_ECX);
2405 x86_alu_reg_imm (s->code, X86_AND, X86_ECX, 3);
2407 // move remaining bytes (if any)
2408 x86_prefix (s->code, X86_REP_PREFIX);
2409 x86_movsb (s->code);
2411 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, spill_pos<<2);
2414 x86_pop_reg (s->code, X86_EDI);
2416 x86_pop_reg (s->code, X86_ESI);
2420 stmt: INITBLK (reg, CPSRC (reg, CONST_I4)) {
2421 int dest_reg = tree->left->reg1;
2422 int value_reg = tree->right->left->reg1;
2423 int size = tree->right->right->data.i;
2424 int spill_pos = 0, dest_offset = 0, value_offset = 0;
2425 int save_edi = FALSE;
2431 if (mono_inline_memcpy) {
2433 if (dest_reg != X86_EDI && value_reg != X86_EDI &&
2434 mono_regset_reg_used (s->rs, X86_EDI)) {
2436 x86_push_reg (s->code, X86_EDI);
2439 if (dest_reg == X86_ECX || dest_reg == X86_EAX) {
2440 dest_offset = ++spill_pos;
2442 if (value_reg == X86_ECX || value_reg == X86_EDI) {
2443 value_offset = ++spill_pos;
2447 x86_push_reg (s->code, value_reg);
2449 x86_push_reg (s->code, dest_reg);
2451 if (value_reg != X86_EAX) {
2453 x86_mov_reg_membase (s->code, X86_EAX, X86_ESP, (value_offset-1)<<2, 4);
2455 x86_mov_reg_reg (s->code, X86_EAX, value_reg, 4);
2457 if (dest_reg != X86_EDI) {
2459 x86_mov_reg_membase (s->code, X86_EDI, X86_ESP, (dest_offset-1)<<2, 4);
2461 x86_mov_reg_reg (s->code, X86_EDI, dest_reg, 4);
2464 x86_widen_reg (s->code, X86_EAX, X86_EAX, FALSE, FALSE);
2465 x86_mov_reg_reg (s->code, X86_EDX, X86_EAX, 4);
2466 x86_shift_reg_imm (s->code, X86_SHL, X86_EAX, 8);
2467 x86_alu_reg_reg (s->code, X86_OR, X86_EAX, X86_EDX);
2468 x86_mov_reg_reg (s->code, X86_EDX, X86_EAX, 4);
2469 x86_shift_reg_imm (s->code, X86_SHL, X86_EAX, 16);
2470 x86_alu_reg_reg (s->code, X86_OR, X86_EAX, X86_EDX);
2473 x86_mov_reg_imm (s->code, X86_ECX, i);
2475 x86_prefix (s->code, X86_REP_PREFIX);
2476 x86_stosd (s->code);
2479 for (i = 0; i < j; i++)
2480 x86_stosb (s->code);
2482 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, spill_pos<<2);
2485 x86_pop_reg (s->code, X86_EDI);
2488 x86_push_imm (s->code, size);
2489 x86_push_reg (s->code, value_reg);
2490 x86_push_reg (s->code, dest_reg);
2491 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, memset);
2492 x86_call_code (s->code, 0);
2493 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
2496 MBCOND (mono_inline_memcpy);
2500 stmt: INITBLK (reg, CPSRC (reg, reg)) {
2501 int dest_reg = tree->left->reg1;
2502 int value_reg = tree->right->left->reg1;
2503 int size_reg = tree->right->right->reg1;
2504 int spill_pos = 0, size_offset = 0, dest_offset = 0, value_offset = 0;
2505 int save_edi = FALSE;
2507 if (mono_inline_memcpy) {
2509 if (dest_reg != X86_EDI && size_reg != X86_EDI && size_reg != X86_EDI &&
2510 mono_regset_reg_used (s->rs, X86_EDI)) {
2512 x86_push_reg (s->code, X86_EDI);
2515 if (size_reg == X86_EDI || size_reg == X86_EAX) {
2516 size_offset = ++spill_pos;
2518 if (dest_reg == X86_ECX || dest_reg == X86_EAX) {
2519 dest_offset = ++spill_pos;
2521 if (value_reg == X86_ECX || value_reg == X86_EDI) {
2522 value_offset = ++spill_pos;
2526 x86_push_reg (s->code, value_reg);
2528 x86_push_reg (s->code, dest_reg);
2530 x86_push_reg (s->code, size_reg);
2532 if (value_reg != X86_EAX) {
2534 x86_mov_reg_membase (s->code, X86_EAX, X86_ESP, (value_offset-1)<<2, 4);
2536 x86_mov_reg_reg (s->code, X86_EAX, value_reg, 4);
2538 if (dest_reg != X86_EDI) {
2540 x86_mov_reg_membase (s->code, X86_EDI, X86_ESP, (dest_offset-1)<<2, 4);
2542 x86_mov_reg_reg (s->code, X86_EDI, dest_reg, 4);
2544 if (size_reg != X86_ECX) {
2546 x86_mov_reg_membase (s->code, X86_ECX, X86_ESP, (size_offset-1)<<2, 4);
2548 x86_mov_reg_reg (s->code, X86_ECX, size_reg, 4);
2551 x86_widen_reg (s->code, X86_EAX, X86_EAX, FALSE, FALSE);
2552 x86_mov_reg_reg (s->code, X86_EDX, X86_EAX, 4);
2553 x86_shift_reg_imm (s->code, X86_SHL, X86_EAX, 8);
2554 x86_alu_reg_reg (s->code, X86_OR, X86_EAX, X86_EDX);
2555 x86_mov_reg_reg (s->code, X86_EDX, X86_EAX, 4);
2556 x86_shift_reg_imm (s->code, X86_SHL, X86_EAX, 16);
2557 x86_alu_reg_reg (s->code, X86_OR, X86_EAX, X86_EDX);
2559 x86_push_reg (s->code, X86_ECX);
2560 x86_shift_reg_imm (s->code, X86_SHR, X86_ECX, 2);
2563 // init whole dwords first
2564 x86_prefix (s->code, X86_REP_PREFIX);
2565 x86_stosd (s->code);
2567 x86_pop_reg (s->code, X86_ECX);
2568 x86_alu_reg_imm (s->code, X86_AND, X86_ECX, 3);
2570 // init remaining bytes (if any)
2571 x86_prefix (s->code, X86_REP_PREFIX);
2572 x86_stosb (s->code);
2574 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, spill_pos<<2);
2577 x86_pop_reg (s->code, X86_EDI);
2580 x86_push_reg (s->code, size_reg);
2581 x86_push_reg (s->code, value_reg);
2582 x86_push_reg (s->code, dest_reg);
2583 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, memset);
2584 x86_call_code (s->code, 0);
2585 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
2594 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bb);
2595 x86_jump32 (s->code, 0);
2598 cflags: COMPARE (reg, LDIND_I4 (ADDR_L)) {
2599 int treg = VARINFO (s, tree->right->left->data.i).reg;
2600 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, treg);
2602 MBCOND ((VARINFO (data, tree->right->left->data.i).reg >= 0));
2606 cflags: COMPARE (LDIND_I4 (ADDR_L), CONST_I4) {
2607 int treg = VARINFO (s, tree->left->left->data.i).reg;
2608 x86_alu_reg_imm (s->code, X86_CMP, treg, tree->right->data.i);
2610 MBCOND ((VARINFO (data, tree->left->left->data.i).reg >= 0));
2614 cflags: COMPARE (LDIND_I4 (ADDR_L), reg) {
2615 int treg = VARINFO (s, tree->left->left->data.i).reg;
2616 x86_alu_reg_reg (s->code, X86_CMP, treg, tree->right->reg1);
2618 MBCOND ((VARINFO (data, tree->left->left->data.i).reg >= 0));
2622 cflags: COMPARE (LDIND_I4 (ADDR_L), CONST_I4) {
2623 int offset = VARINFO (s, tree->left->left->data.i).offset;
2624 x86_alu_membase_imm (s->code, X86_CMP, X86_EBP, offset, tree->right->data.i);
2626 MBCOND ((VARINFO (data, tree->left->left->data.i).reg < 0));
2630 cflags: COMPARE (reg, CONST_I4) {
2631 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i);
2634 cflags: COMPARE (reg, reg) {
2635 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
2639 stmt: CBRANCH (cflags) {
2640 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
2642 switch (tree->data.bi.cond) {
2644 x86_branch32 (s->code, X86_CC_LT, 0, TRUE);
2647 x86_branch32 (s->code, X86_CC_LT, 0, FALSE);
2650 x86_branch32 (s->code, X86_CC_GT, 0, TRUE);
2653 x86_branch32 (s->code, X86_CC_GT, 0, FALSE);
2656 x86_branch32 (s->code, X86_CC_EQ, 0, TRUE);
2659 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
2662 x86_branch32 (s->code, X86_CC_GE, 0, TRUE);
2665 x86_branch32 (s->code, X86_CC_GE, 0, FALSE);
2668 x86_branch32 (s->code, X86_CC_LE, 0, TRUE);
2671 x86_branch32 (s->code, X86_CC_LE, 0, FALSE);
2674 g_assert_not_reached ();
2678 stmt: BRTRUE (LDIND_I4 (ADDR_L)) {
2679 int treg = VARINFO (s, tree->left->left->data.i).reg;
2680 int offset = VARINFO (s, tree->left->left->data.i).offset;
2683 x86_test_reg_reg (s->code, treg, treg);
2685 x86_alu_membase_imm (s->code, X86_CMP, X86_EBP, offset, 0);
2687 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bb);
2688 x86_branch32 (s->code, X86_CC_NE, 0, TRUE);
2691 stmt: BRTRUE (reg) {
2692 x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1);
2693 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bb);
2694 x86_branch32 (s->code, X86_CC_NE, 0, TRUE);
2697 stmt: BRFALSE (LDIND_I4 (ADDR_L)) {
2698 int treg = VARINFO (s, tree->left->left->data.i).reg;
2699 int offset = VARINFO (s, tree->left->left->data.i).offset;
2702 x86_test_reg_reg (s->code, treg, treg);
2704 x86_alu_membase_imm (s->code, X86_CMP, X86_EBP, offset, 0);
2706 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bb);
2707 x86_branch32 (s->code, X86_CC_EQ, 0, TRUE);
2709 //{static int cx= 0; printf ("CX1 %5d\n", cx++);}
2712 stmt: BRFALSE (reg) {
2713 x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1);
2714 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bb);
2715 x86_branch32 (s->code, X86_CC_EQ, 0, TRUE);
2719 x86_breakpoint (s->code);
2723 if (tree->left->reg1 != X86_EAX)
2724 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
2726 if (!tree->last_instr) {
2727 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_EPILOG, NULL);
2728 x86_jump32 (s->code, 0);
2733 if (!tree->last_instr) {
2734 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_EPILOG, NULL);
2735 x86_jump32 (s->code, 0);
2739 stmt: ARG_I4 (LDIND_I4 (addr)) {
2740 MBTree *at = tree->left->left;
2741 int pad = tree->data.arg_info.pad;
2745 switch (at->data.ainfo.amode) {
2748 x86_push_mem (s->code, at->data.ainfo.offset);
2752 x86_push_membase (s->code, at->data.ainfo.basereg, at->data.ainfo.offset);
2755 x86_push_memindex (s->code, X86_NOBASEREG, at->data.ainfo.offset,
2756 at->data.ainfo.indexreg, at->data.ainfo.shift);
2759 x86_push_memindex (s->code, at->data.ainfo.basereg,
2760 at->data.ainfo.offset, at->data.ainfo.indexreg,
2761 at->data.ainfo.shift);
2766 stmt: ARG_I4 (LDIND_I4 (ADDR_L)) {
2767 int treg = VARINFO (s, tree->left->left->data.i).reg;
2768 int pad = tree->data.arg_info.pad;
2771 x86_push_reg (s->code, treg);
2773 MBCOND ((VARINFO (data, tree->left->left->data.i).reg >= 0));
2777 stmt: ARG_I4 (reg) {
2778 int pad = tree->data.arg_info.pad;
2781 x86_push_reg (s->code, tree->left->reg1);
2784 stmt: ARG_I4 (ADDR_G) {
2785 int pad = tree->data.arg_info.pad;
2788 x86_push_imm (s->code, tree->left->data.p);
2791 stmt: ARG_I4 (CONST_I4) "MB_USE_OPT1(0)" {
2792 int pad = tree->data.arg_info.pad;
2795 x86_push_imm (s->code, tree->left->data.i);
2799 PRINT_REG ("THIS", tree->reg1);
2802 reg: CHECKTHIS (reg) {
2803 /* try to access the vtable - this will raise an exception
2804 * if the object is NULL */
2805 x86_alu_membase_imm (s->code, X86_CMP, tree->left->reg1, 0, 0);
2806 if (tree->reg1 != tree->left->reg1)
2807 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
2810 stmt: CHECKTHIS (reg) {
2811 x86_alu_membase_imm (s->code, X86_CMP, tree->left->reg1, 0, 0);
2818 /* restore callee saved registers */
2819 if (mono_regset_reg_used (s->rs, X86_EBX)) {
2820 x86_mov_reg_membase (s->code, X86_EBX, X86_EBP, pos, 4);
2823 if (mono_regset_reg_used (s->rs, X86_EDI)) {
2824 x86_mov_reg_membase (s->code, X86_EDI, X86_EBP, pos, 4);
2827 if (mono_regset_reg_used (s->rs, X86_ESI)) {
2828 x86_mov_reg_membase (s->code, X86_ESI, X86_EBP, pos, 4);
2831 /* restore ESP/EBP */
2832 x86_leave (s->code);
2834 /* jump to the method */
2835 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->data.p);
2836 x86_jump32 (s->code, 0);
2841 reg: CALL_I4 (this, reg) {
2843 int lreg = tree->left->reg1;
2844 int rreg = tree->right->reg1;
2846 if (lreg == treg || rreg == treg)
2848 if (lreg == treg || rreg == treg)
2850 if (lreg == treg || rreg == treg)
2851 mono_assert_not_reached ();
2855 x86_call_reg (s->code, rreg);
2859 mono_assert (tree->reg1 == X86_EAX);
2862 reg: CALL_I4 (this, ADDR_G) {
2863 int lreg = tree->left->reg1;
2869 if (X86_REMOTING_CHECK)
2876 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->right->data.p);
2877 x86_call_code (s->code, 0);
2882 mono_assert (tree->reg1 == X86_EAX);
2885 reg: LDVIRTFTN (reg, INTF_ADDR) {
2886 /* we cant return the value in the vtable, because it can be
2887 * a magic trampoline, and we cant pass that to the outside world */
2889 if (tree->reg1 != X86_EAX)
2890 x86_push_reg (s->code, X86_EAX);
2891 x86_push_reg (s->code, X86_ECX);
2892 x86_push_reg (s->code, X86_EDX);
2894 x86_push_imm (s->code, tree->right->data.m->klass->interface_id);
2895 x86_push_reg (s->code, tree->left->reg1);
2896 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_ldintftn);
2897 x86_call_code (s->code, 0);
2898 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
2900 x86_pop_reg (s->code, X86_EDX);
2901 x86_pop_reg (s->code, X86_ECX);
2902 if (tree->reg1 != X86_EAX) {
2903 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
2904 x86_pop_reg (s->code, X86_EAX);
2908 reg: CALL_I4 (this, INTF_ADDR) {
2909 int lreg = tree->left->reg1;
2917 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2918 x86_mov_reg_membase (s->code, lreg, lreg,
2919 G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
2920 x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
2921 x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
2925 mono_assert (tree->reg1 == X86_EAX);
2928 reg: LDVIRTFTN (reg, VFUNC_ADDR) {
2929 /* we cant return the value in the vtable, because it can be
2930 * a magic trampoline, and we cant pass that to the outside world */
2932 if (tree->reg1 != X86_EAX)
2933 x86_push_reg (s->code, X86_EAX);
2934 x86_push_reg (s->code, X86_ECX);
2935 x86_push_reg (s->code, X86_EDX);
2937 x86_push_imm (s->code, tree->right->data.m->slot);
2938 x86_push_reg (s->code, tree->left->reg1);
2939 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_ldvirtftn);
2940 x86_call_code (s->code, 0);
2941 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
2943 x86_pop_reg (s->code, X86_EDX);
2944 x86_pop_reg (s->code, X86_ECX);
2945 if (tree->reg1 != X86_EAX) {
2946 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
2947 x86_pop_reg (s->code, X86_EAX);
2952 if (tree->reg1 != X86_EAX)
2953 x86_push_reg (s->code, X86_EAX);
2954 x86_push_reg (s->code, X86_ECX);
2955 x86_push_reg (s->code, X86_EDX);
2957 x86_push_imm (s->code, tree->data.m);
2958 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_ldftn);
2959 x86_call_code (s->code, 0);
2960 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer));
2962 x86_pop_reg (s->code, X86_EDX);
2963 x86_pop_reg (s->code, X86_ECX);
2964 if (tree->reg1 != X86_EAX) {
2965 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
2966 x86_pop_reg (s->code, X86_EAX);
2971 reg: CALL_I4 (this, VFUNC_ADDR) {
2972 int lreg = tree->left->reg1;
2980 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2981 x86_call_virtual (s->code, lreg,
2982 G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2));
2986 mono_assert (tree->reg1 == X86_EAX);
2989 stmt: CALL_VOID (this, ADDR_G) {
2990 int lreg = tree->left->reg1;
2996 if (X86_REMOTING_CHECK)
3003 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->right->data.p);
3004 x86_call_code (s->code, 0);
3010 stmt: CALL_VOID (this, reg) {
3012 int lreg = tree->left->reg1;
3013 int rreg = tree->right->reg1;
3015 if (lreg == treg || rreg == treg)
3017 if (lreg == treg || rreg == treg)
3019 if (lreg == treg || rreg == treg)
3020 mono_assert_not_reached ();
3024 x86_call_reg (s->code, tree->right->reg1);
3029 stmt: CALL_VOID (this, INTF_ADDR) {
3030 int lreg = tree->left->reg1;
3038 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
3039 x86_mov_reg_membase (s->code, lreg, lreg,
3040 G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
3041 x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
3042 x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
3047 stmt: CALL_VOID (this, VFUNC_ADDR) {
3048 int lreg = tree->left->reg1;
3056 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
3057 x86_call_virtual (s->code, lreg,
3058 G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2));
3063 stmt: SWITCH (reg) {
3065 guint32 *jt = (guint32 *)tree->data.p;
3067 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, jt [0]);
3068 offset = 6 + (guint32)s->code;
3069 x86_branch32 (s->code, X86_CC_GE, jt [jt [0] + 1] - offset, FALSE);
3071 x86_mov_reg_memindex (s->code, X86_EAX, X86_NOBASEREG,
3072 tree->data.i + 4, tree->left->reg1, 2, 4);
3073 x86_jump_reg (s->code, X86_EAX);
3080 reg: CONV_I1 (lreg) {
3081 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, FALSE);
3084 reg: CONV_U1 (lreg) {
3085 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, FALSE);
3088 reg: CONV_I2 (lreg) {
3089 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, TRUE, TRUE);
3092 reg: CONV_U2 (lreg) {
3093 x86_widen_reg (s->code, tree->reg1, tree->left->reg1, FALSE, TRUE);
3096 reg: CONV_I4 (lreg) {
3097 if (tree->reg1 != tree->left->reg1)
3098 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3101 reg: CONV_U4 (lreg) {
3102 if (tree->reg1 != tree->left->reg1)
3103 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3106 reg: CONV_OVF_I4 (lreg) {
3107 guint8 *br [3], *label [1];
3110 * Valid ints: 0xffffffff:8000000 to 00000000:0x7f000000
3112 x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1);
3114 /* If the low word top bit is set, see if we are negative */
3115 br [0] = s->code; x86_branch8 (s->code, X86_CC_LT, 0, TRUE);
3116 /* We are not negative (no top bit set, check for our top word to be zero */
3117 x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2);
3118 br [1] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, TRUE);
3119 label [0] = s->code;
3121 /* throw exception */
3122 x86_push_imm (s->code, "OverflowException");
3123 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS,
3124 arch_get_throw_exception_by_name ());
3125 x86_call_code (s->code, 0);
3127 x86_patch (br [0], s->code);
3128 /* our top bit is set, check that top word is 0xfffffff */
3129 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg2, 0xffffffff);
3131 x86_patch (br [1], s->code);
3132 /* nope, emit exception */
3133 br [2] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3134 x86_patch (br [2], label [0]);
3136 if (tree->reg1 != tree->left->reg1)
3137 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3140 reg: CONV_OVF_U4 (lreg) {
3141 /* Keep in sync with CONV_OVF_I4_UN below, they are the same on 32-bit machines */
3142 /* top word must be 0 */
3143 x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2);
3144 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "OverflowException");
3145 if (tree->reg1 != tree->left->reg1)
3146 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3149 reg: CONV_OVF_I4_UN (lreg) {
3150 /* Keep in sync with CONV_OVF_U4 above, they are the same on 32-bit machines */
3151 /* top word must be 0 */
3152 x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2);
3153 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "OverflowException");
3154 if (tree->reg1 != tree->left->reg1)
3155 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3161 x86_mov_reg_imm (s->code, tree->reg1, *((gint32 *)&tree->data.p));
3162 x86_mov_reg_imm (s->code, tree->reg2, *((gint32 *)&tree->data.p + 1));
3165 lreg: CONV_I8 (CONST_I4) {
3166 x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i);
3168 if (tree->left->data.i >= 0)
3169 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3171 x86_mov_reg_imm (s->code, tree->reg2, -1);
3174 lreg: CONV_I8 (reg) {
3177 if (tree->reg1 != tree->left->reg1)
3178 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3180 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3181 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, 0);
3182 x86_branch8 (s->code, X86_CC_GE, 5, TRUE);
3184 x86_mov_reg_imm (s->code, tree->reg2, -1);
3185 mono_assert ((s->code - i1) == 5);
3188 lreg: CONV_U8 (CONST_I4) 1 {
3189 x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i);
3190 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3193 lreg: CONV_U8 (reg) {
3194 if (tree->reg1 != tree->left->reg1)
3195 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3196 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3199 lreg: CONV_OVF_U8 (CONST_I4) {
3200 if (tree->left->data.i < 0){
3201 x86_push_imm (s->code, "OverflowException");
3202 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS,
3203 arch_get_throw_exception_by_name ());
3204 x86_call_code (s->code, 0);
3206 x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i);
3207 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3211 lreg: CONV_OVF_I8_UN (CONST_I4) {
3212 x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i);
3213 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3216 lreg: CONV_OVF_U8 (reg) {
3217 x86_test_reg_imm (s->code, tree->left->reg1, 0x8000000);
3218 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_EQ, TRUE, "OverflowException");
3220 if (tree->reg1 != tree->left->reg1)
3221 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3222 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3225 lreg: CONV_OVF_I8_UN (reg) {
3226 /* Convert uint value into int64, we pass everything */
3227 if (tree->reg1 != tree->left->reg1)
3228 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3229 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
3232 stmt: STIND_I8 (addr, lreg) {
3234 switch (tree->left->data.ainfo.amode) {
3237 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 4);
3238 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset + 4, tree->right->reg2, 4);
3242 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
3243 tree->left->data.ainfo.offset, tree->right->reg1, 4);
3244 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
3245 tree->left->data.ainfo.offset + 4, tree->right->reg2, 4);
3248 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
3249 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
3250 tree->right->reg1, 4);
3251 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset + 4,
3252 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
3253 tree->right->reg2, 4);
3256 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
3257 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
3258 tree->right->reg1, 4);
3259 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset + 4,
3260 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
3261 tree->right->reg2, 4);
3266 stmt: REMOTE_STIND_I8 (reg, lreg) {
3270 x86_push_reg (s->code, tree->right->reg1);
3271 x86_mov_reg_membase (s->code, tree->right->reg1, tree->left->reg1, 0, 4);
3272 x86_alu_membase_imm (s->code, X86_CMP, tree->right->reg1, 0, ((int)mono_defaults.transparent_proxy_class));
3273 x86_pop_reg (s->code, tree->right->reg1);
3275 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3277 /* this is a transparent proxy - remote the call */
3279 /* save value to stack */
3280 x86_push_reg (s->code, tree->right->reg2);
3281 x86_push_reg (s->code, tree->right->reg1);
3283 x86_push_reg (s->code, X86_ESP);
3284 x86_push_imm (s->code, tree->data.fi.field);
3285 x86_push_imm (s->code, tree->data.fi.klass);
3286 x86_push_reg (s->code, tree->left->reg1);
3287 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
3288 x86_call_code (s->code, 0);
3289 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 24);
3291 br [1] = s->code; x86_jump8 (s->code, 0);
3293 x86_patch (br [0], s->code);
3294 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
3295 tree->data.fi.field->offset;
3296 x86_mov_membase_reg (s->code, tree->left->reg1, offset, tree->right->reg1, 4);
3297 x86_mov_membase_reg (s->code, tree->left->reg1, offset + 4, tree->right->reg2, 4);
3299 x86_patch (br [1], s->code);
3303 # an addr can use two address register (base and index register). The must take care
3304 # that we do not override them (thus the use of x86_lea)
3305 lreg: LDIND_I8 (addr) {
3307 switch (tree->left->data.ainfo.amode) {
3310 x86_mov_reg_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, 4);
3311 x86_mov_reg_mem (s->code, tree->reg2, tree->left->data.ainfo.offset + 4, 4);
3315 x86_lea_membase (s->code, tree->reg2, tree->left->data.ainfo.basereg,
3316 tree->left->data.ainfo.offset);
3317 x86_mov_reg_membase (s->code, tree->reg1, tree->reg2, 0, 4);
3318 x86_mov_reg_membase (s->code, tree->reg2, tree->reg2, 4, 4);
3321 x86_lea_memindex (s->code, tree->reg2, X86_NOBASEREG, tree->left->data.ainfo.offset,
3322 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift);
3323 x86_mov_reg_membase (s->code, tree->reg1, tree->reg2, 0, 4);
3324 x86_mov_reg_membase (s->code, tree->reg2, tree->reg2, 4, 4);
3327 x86_lea_memindex (s->code, tree->reg2, tree->left->data.ainfo.basereg,
3328 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
3329 tree->left->data.ainfo.shift);
3330 x86_mov_reg_membase (s->code, tree->reg1, tree->reg2, 0, 4);
3331 x86_mov_reg_membase (s->code, tree->reg2, tree->reg2, 4, 4);
3334 PRINT_REG ("LDIND_I8_0", tree->reg1);
3335 PRINT_REG ("LDIND_I8_1", tree->reg2);
3338 lreg: SHR (lreg, CONST_I4) {
3339 if (tree->right->data.i < 32) {
3340 x86_shrd_reg_imm (s->code, tree->left->reg1, tree->left->reg2, tree->right->data.i);
3341 x86_shift_reg_imm (s->code, X86_SAR, tree->left->reg2, tree->right->data.i);
3342 if (tree->reg1 != tree->left->reg1)
3343 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3344 if (tree->reg2 != tree->left->reg2)
3345 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
3346 } else if (tree->right->data.i < 64) {
3347 if (tree->reg1 != tree->left->reg2)
3348 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg2, 4);
3349 if (tree->reg2 != tree->left->reg2)
3350 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
3351 x86_shift_reg_imm (s->code, X86_SAR, tree->reg2, 31);
3352 x86_shift_reg_imm (s->code, X86_SAR, tree->reg1, (tree->right->data.i - 32));
3353 } /* else unspecified result */
3356 lreg: SHR_UN (lreg, CONST_I4) {
3357 if (tree->right->data.i < 32) {
3358 x86_shrd_reg_imm (s->code, tree->left->reg1, tree->left->reg2, tree->right->data.i);
3359 x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg2, tree->right->data.i);
3360 if (tree->reg1 != tree->left->reg1)
3361 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3362 if (tree->reg2 != tree->left->reg2)
3363 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
3364 } else if (tree->right->data.i < 64) {
3365 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg2, 4);
3366 x86_shift_reg_imm (s->code, X86_SHR, tree->reg1, (tree->right->data.i - 32));
3367 x86_mov_reg_imm (s->code, tree->reg2, 0);
3368 } /* else unspecified result */
3371 lreg: SHR (lreg, reg) {
3374 if (tree->right->reg1 != X86_ECX)
3375 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
3377 x86_shrd_reg (s->code, tree->left->reg1, tree->left->reg2);
3378 x86_shift_reg (s->code, X86_SAR, tree->left->reg2);
3379 x86_test_reg_imm (s->code, X86_ECX, 32);
3380 br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
3381 x86_mov_reg_reg (s->code, tree->left->reg1, tree->left->reg2, 4);
3382 x86_shift_reg_imm (s->code, X86_SAR, tree->reg2, 31);
3383 x86_patch (br [0], s->code);
3385 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3388 lreg: SHR_UN (lreg, reg) {
3391 if (tree->right->reg1 != X86_ECX)
3392 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
3394 x86_shrd_reg (s->code, tree->left->reg1, tree->left->reg2);
3395 x86_shift_reg (s->code, X86_SHR, tree->left->reg2);
3396 x86_test_reg_imm (s->code, X86_ECX, 32);
3397 br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
3398 x86_mov_reg_reg (s->code, tree->left->reg1, tree->left->reg2, 4);
3399 x86_shift_reg_imm (s->code, X86_SHR, tree->reg2, 31);
3400 x86_patch (br [0], s->code);
3402 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3405 lreg: SHL (lreg, CONST_I4) {
3406 if (tree->right->data.i < 32) {
3407 x86_shld_reg_imm (s->code, tree->left->reg2, tree->left->reg1, tree->right->data.i);
3408 x86_shift_reg_imm (s->code, X86_SHL, tree->left->reg1, tree->right->data.i);
3409 if (tree->reg1 != tree->left->reg1)
3410 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
3411 if (tree->reg2 != tree->left->reg2)
3412 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
3413 } else if (tree->right->data.i < 64) {
3414 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg1, 4);
3415 x86_shift_reg_imm (s->code, X86_SHL, tree->reg2, (tree->right->data.i - 32));
3416 x86_alu_reg_reg (s->code, X86_XOR, tree->reg1, tree->reg1);
3417 } /* else unspecified result */
3420 lreg: SHL (lreg, reg) {
3423 if (tree->right->reg1 != X86_ECX)
3424 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
3426 x86_shld_reg (s->code, tree->left->reg2, tree->left->reg1);
3427 x86_shift_reg (s->code, X86_SHL, tree->left->reg1);
3428 x86_test_reg_imm (s->code, X86_ECX, 32);
3429 br [0] = s->code; x86_branch8 (s->code, X86_CC_EQ, 0, FALSE);
3430 x86_mov_reg_reg (s->code, tree->left->reg2, tree->left->reg1, 4);
3431 x86_alu_reg_reg (s->code, X86_XOR, tree->left->reg1, tree->left->reg1);
3432 x86_patch (br [0], s->code);
3434 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3437 lreg: ADD (lreg, lreg) {
3438 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
3439 x86_alu_reg_reg (s->code, X86_ADC, tree->left->reg2, tree->right->reg2);
3441 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3444 lreg: ADD_OVF (lreg, lreg) {
3445 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
3446 x86_alu_reg_reg (s->code, X86_ADC, tree->left->reg2, tree->right->reg2);
3447 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
3449 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3452 lreg: ADD_OVF_UN (lreg, lreg) {
3453 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
3454 x86_alu_reg_reg (s->code, X86_ADC, tree->left->reg2, tree->right->reg2);
3455 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NC, FALSE, "OverflowException");
3457 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3460 lreg: SUB (lreg, lreg) {
3461 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
3462 x86_alu_reg_reg (s->code, X86_SBB, tree->left->reg2, tree->right->reg2);
3464 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3467 lreg: SUB_OVF (lreg, lreg) {
3468 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
3469 x86_alu_reg_reg (s->code, X86_SBB, tree->left->reg2, tree->right->reg2);
3470 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NO, TRUE, "OverflowException");
3472 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3475 lreg: SUB_OVF_UN (lreg, lreg) {
3476 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
3477 x86_alu_reg_reg (s->code, X86_SBB, tree->left->reg2, tree->right->reg2);
3478 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NC, FALSE, "OverflowException");
3480 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3483 lreg: AND (lreg, lreg) {
3484 x86_alu_reg_reg (s->code, X86_AND, tree->left->reg1, tree->right->reg1);
3485 x86_alu_reg_reg (s->code, X86_AND, tree->left->reg2, tree->right->reg2);
3487 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3490 lreg: OR (lreg, lreg) {
3491 x86_alu_reg_reg (s->code, X86_OR, tree->left->reg1, tree->right->reg1);
3492 x86_alu_reg_reg (s->code, X86_OR, tree->left->reg2, tree->right->reg2);
3494 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3497 lreg: XOR (lreg, lreg) {
3498 x86_alu_reg_reg (s->code, X86_XOR, tree->left->reg1, tree->right->reg1);
3499 x86_alu_reg_reg (s->code, X86_XOR, tree->left->reg2, tree->right->reg2);
3501 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3505 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3507 x86_neg_reg (s->code, tree->reg1);
3508 x86_alu_reg_imm (s->code, X86_ADC, tree->reg2, 0);
3509 x86_neg_reg (s->code, tree->reg2);
3513 MOVE_LREG (tree->reg1, tree->reg2, tree->left->reg1, tree->left->reg2);
3515 x86_not_reg (s->code, tree->reg1);
3516 x86_not_reg (s->code, tree->reg2);
3519 lreg: MUL (lreg, lreg) {
3520 if (mono_regset_reg_used (s->rs, X86_ECX))
3521 x86_push_reg (s->code, X86_ECX);
3523 x86_push_reg (s->code, tree->right->reg2);
3524 x86_push_reg (s->code, tree->right->reg1);
3525 x86_push_reg (s->code, tree->left->reg2);
3526 x86_push_reg (s->code, tree->left->reg1);
3527 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_llmult);
3528 x86_call_code (s->code, 0);
3529 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
3531 if (mono_regset_reg_used (s->rs, X86_ECX))
3532 x86_pop_reg (s->code, X86_ECX);
3534 mono_assert (tree->reg1 == X86_EAX &&
3535 tree->reg2 == X86_EDX);
3538 lreg: MUL_OVF (lreg, lreg) {
3539 if (mono_regset_reg_used (s->rs, X86_ECX))
3540 x86_push_reg (s->code, X86_ECX);
3542 x86_push_reg (s->code, tree->right->reg2);
3543 x86_push_reg (s->code, tree->right->reg1);
3544 x86_push_reg (s->code, tree->left->reg2);
3545 x86_push_reg (s->code, tree->left->reg1);
3546 /* pass a pointer to store the resulting exception -
3547 * ugly, but it works */
3548 x86_push_reg (s->code, X86_ESP);
3549 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_llmult_ovf);
3550 x86_call_code (s->code, 0);
3551 x86_mov_reg_membase (s->code, X86_ECX, X86_ESP, 4, 4);
3552 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
3553 x86_alu_reg_imm (s->code, X86_CMP, X86_ECX, 0);
3555 /* cond. emit exception */
3556 x86_branch8 (s->code, X86_CC_EQ, 7, FALSE);
3557 x86_push_reg (s->code, X86_ECX);
3558 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, arch_get_throw_exception ());
3559 x86_call_code (s->code, 0);
3561 if (mono_regset_reg_used (s->rs, X86_ECX))
3562 x86_pop_reg (s->code, X86_ECX);
3564 mono_assert (tree->reg1 == X86_EAX &&
3565 tree->reg2 == X86_EDX);
3568 lreg: MUL_OVF_UN (lreg, lreg) {
3569 if (mono_regset_reg_used (s->rs, X86_ECX))
3570 x86_push_reg (s->code, X86_ECX);
3572 x86_push_reg (s->code, tree->right->reg2);
3573 x86_push_reg (s->code, tree->right->reg1);
3574 x86_push_reg (s->code, tree->left->reg2);
3575 x86_push_reg (s->code, tree->left->reg1);
3576 /* pass a pointer to store the resulting exception -
3577 * ugly, but it works */
3578 x86_push_reg (s->code, X86_ESP);
3579 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_llmult_ovf_un);
3580 x86_call_code (s->code, 0);
3581 x86_mov_reg_membase (s->code, X86_ECX, X86_ESP, 4, 4);
3582 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
3583 x86_alu_reg_imm (s->code, X86_CMP, X86_ECX, 0);
3585 /* cond. emit exception */
3586 x86_branch8 (s->code, X86_CC_EQ, 7, FALSE);
3587 x86_push_reg (s->code, X86_ECX);
3588 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, arch_get_throw_exception ());
3589 x86_call_code (s->code, 0);
3591 if (mono_regset_reg_used (s->rs, X86_ECX))
3592 x86_pop_reg (s->code, X86_ECX);
3594 mono_assert (tree->reg1 == X86_EAX &&
3595 tree->reg2 == X86_EDX);
3598 lreg: DIV (lreg, lreg) {
3599 if (mono_regset_reg_used (s->rs, X86_ECX))
3600 x86_push_reg (s->code, X86_ECX);
3602 x86_push_reg (s->code, tree->right->reg2);
3603 x86_push_reg (s->code, tree->right->reg1);
3604 x86_push_reg (s->code, tree->left->reg2);
3605 x86_push_reg (s->code, tree->left->reg1);
3606 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_lldiv);
3607 x86_call_code (s->code, 0);
3608 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
3610 if (mono_regset_reg_used (s->rs, X86_ECX))
3611 x86_pop_reg (s->code, X86_ECX);
3613 mono_assert (tree->reg1 == X86_EAX &&
3614 tree->reg2 == X86_EDX);
3617 lreg: REM (lreg, lreg) {
3618 if (mono_regset_reg_used (s->rs, X86_ECX))
3619 x86_push_reg (s->code, X86_ECX);
3621 x86_push_reg (s->code, tree->right->reg2);
3622 x86_push_reg (s->code, tree->right->reg1);
3623 x86_push_reg (s->code, tree->left->reg2);
3624 x86_push_reg (s->code, tree->left->reg1);
3625 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_llrem);
3626 x86_call_code (s->code, 0);
3627 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
3629 if (mono_regset_reg_used (s->rs, X86_ECX))
3630 x86_pop_reg (s->code, X86_ECX);
3632 mono_assert (tree->reg1 == X86_EAX &&
3633 tree->reg2 == X86_EDX);
3636 lreg: DIV_UN (lreg, lreg) {
3637 if (mono_regset_reg_used (s->rs, X86_ECX))
3638 x86_push_reg (s->code, X86_ECX);
3640 x86_push_reg (s->code, tree->right->reg2);
3641 x86_push_reg (s->code, tree->right->reg1);
3642 x86_push_reg (s->code, tree->left->reg2);
3643 x86_push_reg (s->code, tree->left->reg1);
3644 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_lldiv_un);
3645 x86_call_code (s->code, 0);
3646 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
3648 if (mono_regset_reg_used (s->rs, X86_ECX))
3649 x86_pop_reg (s->code, X86_ECX);
3651 mono_assert (tree->reg1 == X86_EAX &&
3652 tree->reg2 == X86_EDX);
3655 lreg: REM_UN (lreg, lreg) {
3656 if (mono_regset_reg_used (s->rs, X86_ECX))
3657 x86_push_reg (s->code, X86_ECX);
3659 x86_push_reg (s->code, tree->right->reg2);
3660 x86_push_reg (s->code, tree->right->reg1);
3661 x86_push_reg (s->code, tree->left->reg2);
3662 x86_push_reg (s->code, tree->left->reg1);
3663 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_llrem_un);
3664 x86_call_code (s->code, 0);
3665 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
3667 if (mono_regset_reg_used (s->rs, X86_ECX))
3668 x86_pop_reg (s->code, X86_ECX);
3670 mono_assert (tree->reg1 == X86_EAX &&
3671 tree->reg2 == X86_EDX);
3674 lreg: CALL_I8 (this, reg) {
3676 int lreg = tree->left->reg1;
3677 int rreg = tree->right->reg1;
3679 if (lreg == treg || rreg == treg)
3681 if (lreg == treg || rreg == treg)
3683 if (lreg == treg || rreg == treg)
3684 mono_assert_not_reached ();
3688 x86_call_reg (s->code, rreg);
3692 mono_assert (tree->reg1 == X86_EAX);
3693 mono_assert (tree->reg2 == X86_EDX);
3696 lreg: CALL_I8 (this, ADDR_G) {
3697 int lreg = tree->left->reg1;
3703 if (X86_REMOTING_CHECK)
3710 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->right->data.p);
3711 x86_call_code (s->code, 0);
3716 mono_assert (tree->reg1 == X86_EAX);
3717 mono_assert (tree->reg2 == X86_EDX);
3720 lreg: CALL_I8 (this, VFUNC_ADDR) {
3721 int lreg = tree->left->reg1;
3729 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
3730 x86_call_virtual (s->code, lreg,
3731 G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2));
3735 mono_assert (tree->reg1 == X86_EAX);
3736 mono_assert (tree->reg2 == X86_EDX);
3739 lreg: CALL_I8 (this, INTF_ADDR) {
3740 int lreg = tree->left->reg1;
3748 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
3749 x86_mov_reg_membase (s->code, lreg, lreg,
3750 G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
3751 x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
3752 x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
3756 mono_assert (tree->reg1 == X86_EAX);
3757 mono_assert (tree->reg2 == X86_EDX);
3761 if (tree->left->reg1 != X86_EAX) {
3762 if (tree->left->reg2 != X86_EAX) {
3763 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
3764 if (tree->left->reg2 != X86_EDX)
3765 x86_mov_reg_reg (s->code, X86_EDX, tree->left->reg2, 4);
3767 x86_mov_reg_reg (s->code, X86_ECX, tree->left->reg2, 4);
3768 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
3769 x86_mov_reg_reg (s->code, X86_EDX, X86_ECX, 4);
3771 } else if (tree->left->reg2 != X86_EDX) {
3772 x86_mov_reg_reg (s->code, X86_EDX, tree->left->reg2, 4);
3775 if (!tree->last_instr) {
3776 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_EPILOG, NULL);
3777 x86_jump32 (s->code, 0);
3782 stmt: ARG_I8 (lreg) {
3783 int pad = tree->data.arg_info.pad;
3786 x86_push_reg (s->code, tree->left->reg2);
3787 x86_push_reg (s->code, tree->left->reg1);
3790 reg: CSET (COMPARE (lreg, lreg)) {
3792 int lreg1, lreg2, rreg1, rreg2;
3794 lreg1 = tree->left->left->reg1;
3795 lreg2 = tree->left->left->reg2;
3796 rreg1 = tree->left->right->reg1;
3797 rreg2 = tree->left->right->reg2;
3800 if (tree->data.i == CEE_CEQ) {
3801 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3802 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3803 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3804 x86_patch (br [0], s->code);
3805 x86_set_reg (s->code, X86_CC_EQ, tree->reg1, FALSE);
3806 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE);
3810 switch (tree->data.i) {
3812 x86_alu_reg_reg (s->code, X86_CMP, rreg2, lreg2);
3813 br [0] = s->code; x86_branch8 (s->code, X86_CC_GT, 0, TRUE);
3814 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3815 x86_alu_reg_reg (s->code, X86_CMP, rreg1, lreg1);
3816 br [2] = s->code; x86_branch8 (s->code, X86_CC_GE, 0, FALSE);
3819 x86_alu_reg_reg (s->code, X86_CMP, rreg2, lreg2);
3820 br [0] = s->code; x86_branch8 (s->code, X86_CC_GT, 0, FALSE);
3821 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3822 x86_alu_reg_reg (s->code, X86_CMP, rreg1, lreg1);
3823 br [2] = s->code; x86_branch8 (s->code, X86_CC_GE, 0, FALSE);
3826 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3827 br [0] = s->code; x86_branch8 (s->code, X86_CC_GT, 0, TRUE);
3828 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3829 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3830 br [2] = s->code; x86_branch8 (s->code, X86_CC_GE, 0, FALSE);
3833 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3834 br [0] = s->code; x86_branch8 (s->code, X86_CC_GT, 0, FALSE);
3835 br [1] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3836 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3837 br [2] = s->code; x86_branch8 (s->code, X86_CC_GE, 0, FALSE);
3840 g_assert_not_reached ();
3843 /* set result to 1 */
3844 x86_patch (br [1], s->code);
3845 x86_mov_reg_imm (s->code, tree->reg1, 1);
3846 br [3] = s->code; x86_jump8 (s->code, 0);
3848 /* set result to 0 */
3849 x86_patch (br [0], s->code);
3850 x86_patch (br [2], s->code);
3851 x86_mov_reg_imm (s->code, tree->reg1, 0);
3853 x86_patch (br [3], s->code);
3856 stmt: CBRANCH (COMPARE (lreg, lreg)) {
3858 int lreg1, lreg2, rreg1, rreg2;
3860 lreg1 = tree->left->left->reg1;
3861 lreg2 = tree->left->left->reg2;
3862 rreg1 = tree->left->right->reg1;
3863 rreg2 = tree->left->right->reg2;
3865 switch (tree->data.bi.cond) {
3867 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3868 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3869 x86_branch32 (s->code, X86_CC_LT, 0, TRUE);
3870 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3871 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3872 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3873 x86_branch32 (s->code, X86_CC_LT, 0, FALSE);
3874 x86_patch (br [0], s->code);
3877 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3878 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3879 x86_branch32 (s->code, X86_CC_LT, 0, FALSE);
3880 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3881 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3882 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3883 x86_branch32 (s->code, X86_CC_LT, 0, FALSE);
3884 x86_patch (br [0], s->code);
3887 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3888 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3889 x86_branch32 (s->code, X86_CC_GT, 0, TRUE);
3890 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3891 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3892 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3893 x86_branch32 (s->code, X86_CC_GT, 0, FALSE);
3894 x86_patch (br [0], s->code);
3897 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3898 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3899 x86_branch32 (s->code, X86_CC_GT, 0, FALSE);
3900 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3901 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3902 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3903 x86_branch32 (s->code, X86_CC_GT, 0, FALSE);
3904 x86_patch (br [0], s->code);
3907 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3908 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3909 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3910 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3911 x86_branch32 (s->code, X86_CC_EQ, 0, TRUE);
3912 x86_patch (br [0], s->code);
3915 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3916 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3917 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
3918 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3919 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3920 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
3923 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3924 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3925 x86_branch32 (s->code, X86_CC_GT, 0, TRUE);
3926 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3927 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3928 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3929 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3930 x86_branch32 (s->code, X86_CC_GE, 0, FALSE);
3931 x86_patch (br [0], s->code);
3934 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3935 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3936 x86_branch32 (s->code, X86_CC_GT, 0, FALSE);
3937 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3938 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3939 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3940 x86_branch32 (s->code, X86_CC_GE, 0, FALSE);
3941 x86_patch (br [0], s->code);
3944 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3945 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3946 x86_branch32 (s->code, X86_CC_LT, 0, TRUE);
3947 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, TRUE);
3948 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3949 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3950 x86_branch32 (s->code, X86_CC_LE, 0, FALSE);
3951 x86_patch (br [0], s->code);
3954 x86_alu_reg_reg (s->code, X86_CMP, lreg2, rreg2);
3955 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3956 x86_branch32 (s->code, X86_CC_LT, 0, FALSE);
3957 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
3958 x86_alu_reg_reg (s->code, X86_CMP, lreg1, rreg1);
3959 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
3960 x86_branch32 (s->code, X86_CC_LE, 0, FALSE);
3961 x86_patch (br [0], s->code);
3964 g_assert_not_reached ();
3971 #stmt: STLOC (CONV_I4 (freg)) {
3973 # x86_fist_pop_membase (s->code, X86_EBP, tree->data.i, FALSE);
3976 reg: CONV_I1 (freg) {
3977 if (mono_use_fast_iconv) {
3978 mono_emit_fast_iconv(s, tree);
3979 x86_widen_reg (s->code, tree->reg1, tree->reg1, TRUE, FALSE);
3981 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
3982 x86_fnstcw_membase(s->code, X86_ESP, 0);
3983 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
3984 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
3985 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
3986 x86_fldcw_membase (s->code, X86_ESP, 2);
3987 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
3988 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
3989 x86_pop_reg (s->code, tree->reg1);
3990 x86_widen_reg (s->code, tree->reg1, tree->reg1, TRUE, FALSE);
3991 x86_fldcw_membase (s->code, X86_ESP, 0);
3992 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
3996 reg: CONV_U1 (freg) {
3997 if (mono_use_fast_iconv) {
3998 mono_emit_fast_iconv(s, tree);
3999 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE);
4001 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4002 x86_fnstcw_membase(s->code, X86_ESP, 0);
4003 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4004 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4005 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4006 x86_fldcw_membase (s->code, X86_ESP, 2);
4007 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
4008 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
4009 x86_pop_reg (s->code, tree->reg1);
4010 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE);
4011 x86_fldcw_membase (s->code, X86_ESP, 0);
4012 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4016 reg: CONV_I2 (freg) {
4017 if (mono_use_fast_iconv) {
4018 mono_emit_fast_iconv(s, tree);
4019 x86_widen_reg (s->code, tree->reg1, tree->reg1, TRUE, TRUE);
4021 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4022 x86_fnstcw_membase(s->code, X86_ESP, 0);
4023 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4024 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4025 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4026 x86_fldcw_membase (s->code, X86_ESP, 2);
4027 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
4028 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
4029 x86_pop_reg (s->code, tree->reg1);
4030 x86_widen_reg (s->code, tree->reg1, tree->reg1, TRUE, TRUE);
4031 x86_fldcw_membase (s->code, X86_ESP, 0);
4032 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4036 reg: CONV_U2 (freg) {
4037 if (mono_use_fast_iconv) {
4038 mono_emit_fast_iconv(s, tree);
4039 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, TRUE);
4041 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4042 x86_fnstcw_membase(s->code, X86_ESP, 0);
4043 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4044 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4045 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4046 x86_fldcw_membase (s->code, X86_ESP, 2);
4047 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
4048 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
4049 x86_pop_reg (s->code, tree->reg1);
4050 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, TRUE);
4051 x86_fldcw_membase (s->code, X86_ESP, 0);
4052 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4056 reg: CONV_I4 (freg) {
4057 if (mono_use_fast_iconv) {
4058 mono_emit_fast_iconv(s, tree);
4060 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4061 x86_fnstcw_membase(s->code, X86_ESP, 0);
4062 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4063 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4064 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4065 x86_fldcw_membase (s->code, X86_ESP, 2);
4066 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
4067 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
4068 x86_pop_reg (s->code, tree->reg1);
4069 x86_fldcw_membase (s->code, X86_ESP, 0);
4070 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4074 reg: CONV_U4 (freg) {
4075 if (mono_use_fast_iconv) {
4076 mono_emit_fast_iconv(s, tree);
4078 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4079 x86_fnstcw_membase(s->code, X86_ESP, 0);
4080 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4081 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4082 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4083 x86_fldcw_membase (s->code, X86_ESP, 2);
4084 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
4085 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
4086 x86_pop_reg (s->code, tree->reg1);
4087 x86_fldcw_membase (s->code, X86_ESP, 0);
4088 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4092 lreg: CONV_I8 (freg) {
4093 if (mono_use_fast_iconv) {
4094 mono_emit_fast_iconv_i8(s, tree);
4096 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4097 x86_fnstcw_membase(s->code, X86_ESP, 0);
4098 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4099 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4100 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4101 x86_fldcw_membase (s->code, X86_ESP, 2);
4102 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8);
4103 x86_fist_pop_membase (s->code, X86_ESP, 0, TRUE);
4104 x86_pop_reg (s->code, tree->reg1);
4105 x86_pop_reg (s->code, tree->reg2);
4106 x86_fldcw_membase (s->code, X86_ESP, 0);
4107 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4111 lreg: CONV_U8 (freg) {
4112 if (mono_use_fast_iconv) {
4113 mono_emit_fast_iconv_i8(s, tree);
4115 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4116 x86_fnstcw_membase(s->code, X86_ESP, 0);
4117 x86_mov_reg_membase (s->code, tree->reg1, X86_ESP, 0, 2);
4118 x86_alu_reg_imm (s->code, X86_OR, tree->reg1, 0xc00);
4119 x86_mov_membase_reg (s->code, X86_ESP, 2, tree->reg1, 2);
4120 x86_fldcw_membase (s->code, X86_ESP, 2);
4121 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8);
4122 x86_fist_pop_membase (s->code, X86_ESP, 0, TRUE);
4123 x86_pop_reg (s->code, tree->reg1);
4124 x86_pop_reg (s->code, tree->reg2);
4125 x86_fldcw_membase (s->code, X86_ESP, 0);
4126 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4130 reg: CSET (COMPARE (freg, freg)) {
4131 int treg = tree->reg1;
4133 if (treg != X86_EAX)
4134 x86_push_reg (s->code, X86_EAX);
4136 x86_fcompp (s->code);
4137 x86_fnstsw (s->code);
4138 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500);
4140 switch (tree->data.i) {
4142 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x4000);
4143 x86_set_reg (s->code, X86_CC_EQ, treg, TRUE);
4144 x86_widen_reg (s->code, treg, treg, FALSE, FALSE);
4147 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4148 x86_set_reg (s->code, X86_CC_EQ, treg, TRUE);
4149 x86_widen_reg (s->code, treg, treg, FALSE, FALSE);
4152 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4153 x86_set_reg (s->code, X86_CC_EQ, treg, TRUE);
4154 x86_widen_reg (s->code, treg, treg, FALSE, FALSE);
4157 x86_set_reg (s->code, X86_CC_EQ, treg, TRUE);
4158 x86_widen_reg (s->code, treg, treg, FALSE, FALSE);
4161 x86_set_reg (s->code, X86_CC_EQ, tree->reg1, TRUE);
4162 x86_widen_reg (s->code, treg, treg, FALSE, FALSE);
4165 g_assert_not_reached ();
4168 if (treg != X86_EAX)
4169 x86_pop_reg (s->code, X86_EAX);
4172 freg: CONV_R8 (freg) {
4176 freg: CONV_R4 (freg) {
4177 /* fixme: nothing to do ??*/
4180 freg: CONV_R8 (LDIND_I4 (ADDR_G)) {
4181 x86_fild (s->code, tree->left->left->data.p, FALSE);
4184 freg: CONV_R4 (reg) {
4185 x86_push_reg (s->code, tree->left->reg1);
4186 x86_fild_membase (s->code, X86_ESP, 0, FALSE);
4187 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4190 freg: CONV_R8 (reg) {
4191 x86_push_reg (s->code, tree->left->reg1);
4192 x86_fild_membase (s->code, X86_ESP, 0, FALSE);
4193 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
4196 freg: CONV_R_UN (reg) {
4197 x86_push_imm (s->code, 0);
4198 x86_push_reg (s->code, tree->left->reg1);
4199 x86_fild_membase (s->code, X86_ESP, 0, TRUE);
4200 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
4203 freg: CONV_R_UN (lreg) {
4204 static guint8 mn[] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x3f, 0x40 };
4207 /* load 64bit integer to FP stack */
4208 x86_push_imm (s->code, 0);
4209 x86_push_reg (s->code, tree->left->reg2);
4210 x86_push_reg (s->code, tree->left->reg1);
4211 x86_fild_membase (s->code, X86_ESP, 0, TRUE);
4212 /* store as 80bit FP value */
4213 x86_fst80_membase (s->code, X86_ESP, 0);
4215 /* test if lreg is negative */
4216 x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2);
4217 br [0] = s->code; x86_branch8 (s->code, X86_CC_GEZ, 0, TRUE);
4219 /* add correction constant mn */
4220 x86_fld80_mem (s->code, mn);
4221 x86_fld80_membase (s->code, X86_ESP, 0);
4222 x86_fp_op_reg (s->code, X86_FADD, 1, TRUE);
4223 x86_fst80_membase (s->code, X86_ESP, 0);
4224 //x86_breakpoint (s->code);
4225 x86_patch (br [0], s->code);
4227 x86_fld80_membase (s->code, X86_ESP, 0);
4228 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
4231 freg: CONV_R4 (lreg) {
4232 x86_push_reg (s->code, tree->left->reg2);
4233 x86_push_reg (s->code, tree->left->reg1);
4234 x86_fild_membase (s->code, X86_ESP, 0, TRUE);
4235 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
4238 freg: CONV_R8 (lreg) {
4239 x86_push_reg (s->code, tree->left->reg2);
4240 x86_push_reg (s->code, tree->left->reg1);
4241 x86_fild_membase (s->code, X86_ESP, 0, TRUE);
4242 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
4246 float f = *(float *)tree->data.p;
4253 x86_fld (s->code, tree->data.p, FALSE);
4257 double d = *(double *)tree->data.p;
4264 x86_fld (s->code, tree->data.p, TRUE);
4267 freg: LDIND_R4 (addr) {
4269 switch (tree->left->data.ainfo.amode) {
4272 x86_fld (s->code, tree->left->data.ainfo.offset, FALSE);
4276 x86_fld_membase (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, FALSE);
4279 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, X86_NOBASEREG,
4280 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4281 tree->left->data.ainfo.shift);
4282 x86_fld_membase (s->code, tree->left->data.ainfo.indexreg, 0, FALSE);
4285 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.basereg,
4286 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4287 tree->left->data.ainfo.shift);
4288 x86_fld_membase (s->code, tree->left->data.ainfo.indexreg, 0, FALSE);
4293 freg: LDIND_R8 (addr) {
4295 switch (tree->left->data.ainfo.amode) {
4298 x86_fld (s->code, tree->left->data.ainfo.offset, TRUE);
4302 x86_fld_membase (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset, TRUE);
4305 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, X86_NOBASEREG,
4306 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4307 tree->left->data.ainfo.shift);
4308 x86_fld_membase (s->code, tree->left->data.ainfo.indexreg, 0, TRUE);
4311 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.basereg,
4312 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4313 tree->left->data.ainfo.shift);
4314 x86_fld_membase (s->code, tree->left->data.ainfo.indexreg, 0, TRUE);
4320 freg: ADD (freg, freg) {
4321 x86_fp_op_reg (s->code, X86_FADD, 1, TRUE);
4324 freg: SUB (freg, freg) {
4325 x86_fp_op_reg (s->code, X86_FSUB, 1, TRUE);
4328 freg: MUL (freg, freg) {
4329 x86_fp_op_reg (s->code, X86_FMUL, 1, TRUE);
4332 freg: DIV (freg, freg) {
4333 x86_fp_op_reg (s->code, X86_FDIV, 1, TRUE);
4336 freg: CKFINITE (freg) {
4337 x86_push_reg (s->code, X86_EAX);
4339 x86_fnstsw (s->code);
4340 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4100);
4341 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4342 x86_pop_reg (s->code, X86_EAX);
4343 EMIT_COND_SYSTEM_EXCEPTION (X86_CC_NE, FALSE, "ArithmeticException");
4346 freg: REM (freg, freg) {
4349 /* we need to exchange ST(0) with ST(1) */
4350 x86_fxch (s->code, 1);
4352 /* this requires a loop, because fprem1 somtimes
4353 * returns a partial remainder */
4355 x86_fprem (s->code);
4356 x86_fnstsw (s->code);
4357 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x0400);
4359 x86_branch8 (s->code, X86_CC_NE, l1 - l2, FALSE);
4362 x86_fstp (s->code, 1);
4370 x86_fstp (s->code, 0);
4373 stmt: STIND_R4 (addr, freg) {
4375 switch (tree->left->data.ainfo.amode) {
4378 x86_fst (s->code, tree->left->data.ainfo.offset, FALSE, TRUE);
4382 x86_fst_membase (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
4386 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, X86_NOBASEREG,
4387 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4388 tree->left->data.ainfo.shift);
4389 x86_fst_membase (s->code, tree->left->data.ainfo.indexreg, 0, FALSE, TRUE);
4392 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.basereg,
4393 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4394 tree->left->data.ainfo.shift);
4395 x86_fst_membase (s->code, tree->left->data.ainfo.indexreg, 0, FALSE, TRUE);
4400 stmt: STIND_R8 (addr, freg) {
4402 switch (tree->left->data.ainfo.amode) {
4405 x86_fst (s->code, tree->left->data.ainfo.offset, TRUE, TRUE);
4409 x86_fst_membase (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
4413 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, X86_NOBASEREG,
4414 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4415 tree->left->data.ainfo.shift);
4416 x86_fst_membase (s->code, tree->left->data.ainfo.indexreg, 0, TRUE, TRUE);
4419 x86_lea_memindex (s->code, tree->left->data.ainfo.indexreg, tree->left->data.ainfo.basereg,
4420 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
4421 tree->left->data.ainfo.shift);
4422 x86_fst_membase (s->code, tree->left->data.ainfo.indexreg, 0, TRUE, TRUE);
4427 stmt: REMOTE_STIND_R4 (reg, freg) {
4430 int lreg = tree->left->reg1;
4436 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
4437 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
4438 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
4440 /* this is a transparent proxy - remote the call */
4442 /* save value to stack */
4443 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
4444 x86_fst_membase (s->code, X86_ESP, 0, FALSE, TRUE);
4446 x86_push_reg (s->code, X86_ESP);
4447 x86_push_imm (s->code, tree->data.fi.field);
4448 x86_push_imm (s->code, tree->data.fi.klass);
4449 x86_push_reg (s->code, lreg);
4450 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
4451 x86_call_code (s->code, 0);
4452 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 20);
4454 br [1] = s->code; x86_jump8 (s->code, 0);
4456 x86_patch (br [0], s->code);
4457 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
4458 tree->data.fi.field->offset;
4459 x86_fst_membase (s->code, lreg, offset, FALSE, TRUE);
4461 x86_patch (br [1], s->code);
4464 stmt: REMOTE_STIND_R8 (reg, freg) {
4467 int lreg = tree->left->reg1;
4473 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
4474 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
4475 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
4477 /* this is a transparent proxy - remote the call */
4479 /* save value to stack */
4480 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8);
4481 x86_fst_membase (s->code, X86_ESP, 0, TRUE, TRUE);
4483 x86_push_reg (s->code, X86_ESP);
4484 x86_push_imm (s->code, tree->data.fi.field);
4485 x86_push_imm (s->code, tree->data.fi.klass);
4486 x86_push_reg (s->code, lreg);
4487 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
4488 x86_call_code (s->code, 0);
4489 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 24);
4491 br [1] = s->code; x86_jump8 (s->code, 0);
4493 x86_patch (br [0], s->code);
4494 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
4495 tree->data.fi.field->offset;
4496 x86_fst_membase (s->code, lreg, offset, TRUE, TRUE);
4498 x86_patch (br [1], s->code);
4501 stmt: ARG_R4 (freg) {
4502 int pad = tree->data.arg_info.pad;
4504 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4 + pad);
4505 x86_fst_membase (s->code, X86_ESP, 0, FALSE, TRUE);
4508 stmt: ARG_R8 (freg) {
4509 int pad = tree->data.arg_info.pad;
4511 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8 + pad);
4512 x86_fst_membase (s->code, X86_ESP, 0, TRUE, TRUE);
4515 # fixme: we need to implement unordered and ordered compares
4517 stmt: CBRANCH (COMPARE (freg, freg)) {
4519 x86_fcompp (s->code);
4520 x86_fnstsw (s->code);
4521 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500);
4523 switch (tree->data.bi.cond) {
4525 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4526 x86_branch32 (s->code, X86_CC_EQ, 0, FALSE);
4529 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4530 x86_branch32 (s->code, X86_CC_EQ, 0, FALSE);
4533 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4534 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4535 x86_branch32 (s->code, X86_CC_EQ, 0, FALSE);
4538 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4539 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4540 x86_branch32 (s->code, X86_CC_EQ, 0, FALSE);
4543 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x4000);
4544 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4545 x86_branch32 (s->code, X86_CC_EQ, 0, TRUE);
4548 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x4000);
4549 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4550 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
4553 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4554 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
4557 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4558 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
4561 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4562 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4563 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
4566 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
4567 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_BB, tree->data.bi.target);
4568 x86_branch32 (s->code, X86_CC_NE, 0, FALSE);
4571 g_assert_not_reached ();
4575 freg: CALL_R8 (this, reg) {
4577 int lreg = tree->left->reg1;
4578 int rreg = tree->right->reg1;
4580 if (lreg == treg || rreg == treg)
4582 if (lreg == treg || rreg == treg)
4584 if (lreg == treg || rreg == treg)
4585 mono_assert_not_reached ();
4589 x86_call_reg (s->code, rreg);
4594 freg: CALL_R8 (this, ADDR_G) {
4595 int lreg = tree->left->reg1;
4601 if (X86_REMOTING_CHECK)
4608 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, tree->right->data.p);
4609 x86_call_code (s->code, 0);
4615 freg: CALL_R8 (this, INTF_ADDR) {
4616 int lreg = tree->left->reg1;
4624 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
4625 x86_mov_reg_membase (s->code, lreg, lreg,
4626 G_STRUCT_OFFSET (MonoVTable, interface_offsets), 4);
4627 x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
4628 x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
4633 freg: CALL_R8 (this, VFUNC_ADDR) {
4634 int lreg = tree->left->reg1;
4642 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
4643 x86_call_virtual (s->code, lreg,
4644 G_STRUCT_OFFSET (MonoVTable, vtable) + (tree->right->data.m->slot << 2));
4650 if (!tree->last_instr) {
4651 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_EPILOG, NULL);
4652 x86_jump32 (s->code, 0);
4665 x86_fsqrt (s->code);
4668 # support for value types
4670 reg: LDIND_OBJ (reg) {
4671 if (tree->left->reg1 != tree->reg1)
4672 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
4675 stmt: STIND_OBJ (reg, reg) {
4676 mono_assert (tree->data.i > 0);
4678 x86_push_imm (s->code, tree->data.i);
4679 x86_push_reg (s->code, tree->right->reg1);
4680 x86_push_reg (s->code, tree->left->reg1);
4681 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, MEMCOPY);
4682 x86_call_code (s->code, 0);
4683 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
4686 stmt: REMOTE_STIND_OBJ (reg, reg) {
4689 int lreg = tree->left->reg1;
4690 int rreg = tree->right->reg1;
4699 x86_mov_reg_membase (s->code, treg, lreg, 0, 4);
4700 x86_alu_membase_imm (s->code, X86_CMP, treg, 0, ((int)mono_defaults.transparent_proxy_class));
4701 br [0] = s->code; x86_branch8 (s->code, X86_CC_NE, 0, FALSE);
4703 /* this is a transparent proxy - remote the call */
4705 x86_push_reg (s->code, rreg);
4706 x86_push_imm (s->code, tree->data.fi.field);
4707 x86_push_imm (s->code, tree->data.fi.klass);
4708 x86_push_reg (s->code, lreg);
4709 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, mono_store_remote_field);
4710 x86_call_code (s->code, 0);
4711 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
4713 br [1] = s->code; x86_jump8 (s->code, 0);
4715 x86_patch (br [0], s->code);
4717 offset = tree->data.fi.klass->valuetype ? tree->data.fi.field->offset - sizeof (MonoObject) :
4718 tree->data.fi.field->offset;
4720 size = mono_class_value_size (tree->data.fi.field->type->data.klass, NULL);
4721 x86_push_imm (s->code, size);
4722 x86_push_reg (s->code, tree->right->reg1);
4723 x86_alu_reg_imm (s->code, X86_ADD, tree->left->reg1, offset);
4724 x86_push_reg (s->code, tree->left->reg1);
4725 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, MEMCOPY);
4726 x86_call_code (s->code, 0);
4727 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
4729 x86_patch (br [1], s->code);
4732 stmt: ARG_OBJ (CONST_I4) {
4733 int pad = tree->data.arg_info.pad;
4736 x86_push_imm (s->code, tree->left->data.i);
4739 stmt: ARG_OBJ (reg) {
4740 int size = tree->data.arg_info.size;
4741 int pad = tree->data.arg_info.pad;
4749 /* reserve space for the argument */
4750 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, sa);
4752 x86_push_imm (s->code, size);
4753 x86_push_reg (s->code, tree->left->reg1);
4754 x86_lea_membase (s->code, X86_EAX, X86_ESP, 2*4);
4755 x86_push_reg (s->code, X86_EAX);
4757 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, MEMCOPY);
4758 x86_call_code (s->code, 0);
4759 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
4762 stmt: RET_OBJ (reg) {
4763 int size = tree->data.i;
4765 x86_push_imm (s->code, size);
4766 x86_push_reg (s->code, tree->left->reg1);
4767 x86_push_membase (s->code, X86_EBP, 8);
4769 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_ABS, MEMCOPY);
4770 x86_call_code (s->code, 0);
4772 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
4774 if (!tree->last_instr) {
4775 mono_add_jump_info (s, s->code, MONO_JUMP_INFO_EPILOG, NULL);
4776 x86_jump32 (s->code, 0);
4785 mono_llmult (gint64 a, gint64 b)
4791 mono_llmult_ovf_un (gpointer *exc, guint32 al, guint32 ah, guint32 bl, guint32 bh)
4795 // fixme: this is incredible slow
4798 goto raise_exception;
4800 res = (guint64)al * (guint64)bl;
4802 t1 = (guint64)ah * (guint64)bl + (guint64)al * (guint64)bh;
4804 if (t1 > 0xffffffff)
4805 goto raise_exception;
4807 res += ((guint64)t1) << 32;
4813 *exc = mono_get_exception_overflow ();
4819 mono_llmult_ovf (gpointer *exc, guint32 al, gint32 ah, guint32 bl, gint32 bh) {
4821 Use Karatsuba algorithm where:
4822 a*b is: AhBh(R^2+R)+(Ah-Al)(Bl-Bh)R+AlBl(R+1)
4823 where Ah is the "high half" (most significant 32 bits) of a and
4824 where Al is the "low half" (least significant 32 bits) of a and
4825 where Bh is the "high half" of b and Bl is the "low half" and
4826 where R is the Radix or "size of the half" (in our case 32 bits)
4828 Note, for the product of two 64 bit numbers to fit into a 64
4829 result, ah and/or bh must be 0. This will save us from doing
4830 the AhBh term at all.
4832 Also note that we refactor so that we don't overflow 64 bits with
4833 intermediate results. So we use [(Ah-Al)(Bl-Bh)+AlBl]R+AlBl
4839 /* need to work with absoulte values, so find out what the
4840 resulting sign will be and convert any negative numbers
4841 from two's complement
4845 /* flip the bits and add 1 */
4856 /* flip the bits and add 1 */
4866 /* we overflow for sure if both upper halves are greater
4867 than zero because we would need to shift their
4868 product 64 bits to the left and that will not fit
4869 in a 64 bit result */
4871 goto raise_exception;
4873 /* do the AlBl term first */
4874 t1 = (gint64)al * (gint64)bl;
4878 /* now do the [(Ah-Al)(Bl-Bh)+AlBl]R term */
4879 t1 += (gint64)(ah - al) * (gint64)(bl - bh);
4881 /* check for overflow */
4882 if (t1 > (0x7FFFFFFFFFFFFFFF - res))
4883 goto raise_exception;
4894 *exc = mono_get_exception_overflow ();
4899 mono_lldiv (gint64 a, gint64 b)
4905 mono_llrem (gint64 a, gint64 b)
4911 mono_lldiv_un (guint64 a, guint64 b)
4917 mono_llrem_un (guint64 a, guint64 b)
4923 mono_array_new_wrapper (MonoClass *eclass, guint32 n)
4925 MonoDomain *domain = mono_domain_get ();
4927 return mono_array_new (domain, eclass, n);
4931 mono_object_new_wrapper (MonoClass *klass)
4933 MonoDomain *domain = mono_domain_get ();
4935 return mono_object_new (domain, klass);
4939 mono_ldstr_wrapper (MonoImage *image, guint32 ind)
4941 MonoDomain *domain = mono_domain_get ();
4943 return mono_ldstr (domain, image, ind);
4947 mono_ldsflda (MonoClass *klass, int offset)
4949 MonoDomain *domain = mono_domain_get ();
4953 vt = mono_class_vtable (domain, klass);
4954 addr = (char*)(vt->data) + offset;
4960 debug_memcopy (void *dest, const void *src, size_t n)
4964 printf ("MEMCPY(%p to %p [%d]) ", src, dest, n);
4966 for (i = 0; i < l; i++)
4967 printf ("%02x ", *((guint8 *)src + i));
4970 return memcpy (dest, src, n);
4973 void mono_emit_fast_iconv (MBCGEN_TYPE* s, MBTREE_TYPE* tree)
4976 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 12);
4977 x86_fist_membase (s->code, X86_ESP, 8, TRUE); // rounded value
4978 x86_fst_membase (s->code, X86_ESP, 0, FALSE, FALSE); // float value
4979 x86_fp_int_op_membase (s->code, X86_FSUB, X86_ESP, 8, TRUE);
4980 x86_fst_membase (s->code, X86_ESP, 4, FALSE, TRUE); // diff
4982 x86_pop_reg (s->code, tree->reg1); // float value
4983 x86_test_reg_reg (s->code, tree->reg1, tree->reg1);
4984 br[0] = s->code; x86_branch8 (s->code, X86_CC_S, 0, TRUE);
4986 x86_pop_reg (s->code, tree->reg1); // diff
4987 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->reg1);
4988 x86_pop_reg (s->code, tree->reg1); // rounded value
4989 x86_alu_reg_imm (s->code, X86_SBB, tree->reg1, 0);
4990 br[1] = s->code; x86_jump8 (s->code, 0);
4993 x86_patch (br[0], s->code);
4995 x86_pop_reg (s->code, tree->reg1); // diff
4996 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->reg1);
4997 x86_pop_reg (s->code, tree->reg1); // rounded value
4998 br[2] = s->code; x86_branch8 (s->code, X86_CC_Z, 0, FALSE);
4999 x86_alu_reg_imm (s->code, X86_SBB, tree->reg1, -1);
5000 x86_patch (br[1], s->code);
5001 x86_patch (br[2], s->code);
5004 void mono_emit_fast_iconv_i8 (MBCGEN_TYPE* s, MBTREE_TYPE* tree)
5007 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 16);
5008 x86_fld_reg (s->code, 0);
5009 x86_fist_pop_membase (s->code, X86_ESP, 8, TRUE); // rounded value (qword)
5010 x86_fst_membase (s->code, X86_ESP, 0, FALSE, FALSE); // float value
5011 x86_fild_membase (s->code, X86_ESP, 8, TRUE);
5012 x86_fp_op_reg (s->code, X86_FSUB, 1, TRUE); // diff
5013 x86_fst_membase (s->code, X86_ESP, 4, FALSE, TRUE); // diff
5015 x86_pop_reg (s->code, tree->reg1); // float value
5016 x86_test_reg_reg (s->code, tree->reg1, tree->reg1);
5017 br[0] = s->code; x86_branch8 (s->code, X86_CC_S, 0, TRUE);
5019 x86_pop_reg (s->code, tree->reg1); // diff
5020 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->reg1);
5021 x86_pop_reg (s->code, tree->reg1); // rounded value
5022 x86_pop_reg (s->code, tree->reg2);
5023 x86_alu_reg_imm (s->code, X86_SBB, tree->reg1, 0);
5024 x86_alu_reg_imm (s->code, X86_SBB, tree->reg2, 0);
5025 br[1] = s->code; x86_jump8 (s->code, 0);
5028 x86_patch (br[0], s->code);
5030 x86_pop_reg (s->code, tree->reg1); // diff
5031 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->reg1);
5032 x86_pop_reg (s->code, tree->reg1); // rounded value
5033 x86_pop_reg (s->code, tree->reg2);
5034 br[2] = s->code; x86_branch8 (s->code, X86_CC_Z, 0, FALSE);
5035 x86_alu_reg_imm (s->code, X86_SBB, tree->reg1, -1);
5036 x86_alu_reg_imm (s->code, X86_SBB, tree->reg2, -1);
5037 x86_patch (br[1], s->code);
5038 x86_patch (br[2], s->code);
5041 void mono_emit_stack_alloc (MBCGEN_TYPE* s, MBTREE_TYPE* tree)
5043 #ifdef PLATFORM_WIN32
5049 * If requested stack size is larger than one page,
5050 * perform stack-touch operation
5051 * (see comments in mono_emit_stack_alloc_const below).
5053 x86_test_reg_imm (s->code, tree->left->reg1, ~0xFFF);
5054 br[0] = s->code; x86_branch8 (s->code, X86_CC_Z, 0, FALSE);
5056 sreg = tree->left->reg1;
5058 br[2] = s->code; /* loop */
5059 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 0x1000);
5060 x86_test_membase_reg (s->code, X86_ESP, 0, X86_ESP);
5061 x86_alu_reg_imm (s->code, X86_SUB, sreg, 0x1000);
5062 x86_alu_reg_imm (s->code, X86_CMP, sreg, 0x1000);
5063 br[3] = s->code; x86_branch8 (s->code, X86_CC_AE, 0, FALSE);
5064 x86_patch (br[3], br[2]);
5065 x86_test_reg_reg (s->code, sreg, sreg);
5066 br[4] = s->code; x86_branch8 (s->code, X86_CC_Z, 0, FALSE);
5067 x86_alu_reg_reg (s->code, X86_SUB, X86_ESP, sreg);
5069 br[1] = s->code; x86_jump8 (s->code, 0);
5071 x86_patch (br[0], s->code);
5072 x86_alu_reg_reg (s->code, X86_SUB, X86_ESP, tree->left->reg1);
5073 x86_patch (br[1], s->code);
5074 x86_patch (br[4], s->code);
5075 #else /* PLATFORM_WIN32 */
5076 x86_alu_reg_reg (s->code, X86_SUB, X86_ESP, tree->left->reg1);
5080 void mono_emit_stack_alloc_const (MBCGEN_TYPE* s, MBTREE_TYPE* tree, int size)
5082 #ifdef PLATFORM_WIN32
5088 * Generate stack probe code.
5089 * Under Windows, it is necessary to allocate one page at a time,
5090 * "touching" stack after each successful sub-allocation. This is
5091 * because of the way stack growth is implemented - there is a
5092 * guard page before the lowest stack page that is currently commited.
5093 * Stack normally grows sequentially so OS traps access to the
5094 * guard page and commits more pages when needed.
5096 npages = ((unsigned) size) >> 12;
5098 if (tree->reg1 != X86_EAX && tree->left->reg1 != X86_EAX) {
5099 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 0x1000);
5100 x86_test_membase_reg (s->code, X86_ESP, 0, X86_ESP);
5101 x86_mov_membase_reg (s->code, X86_ESP, 0x1000 - 4, X86_EAX, 4); /* save EAX */
5102 x86_mov_reg_imm (s->code, X86_EAX, npages - 1);
5104 x86_mov_reg_imm (s->code, X86_EAX, npages);
5106 br[0] = s->code; /* loop */
5107 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 0x1000);
5108 x86_test_membase_reg (s->code, X86_ESP, 0, X86_ESP);
5109 x86_dec_reg (s->code, X86_EAX);
5110 br[1] = s->code; x86_branch8 (s->code, X86_CC_NZ, 0, TRUE);
5111 x86_patch (br[1], br[0]);
5112 if (tree->reg1 != X86_EAX && tree->left->reg1 != X86_EAX)
5113 x86_mov_reg_membase (s->code, X86_EAX, X86_ESP, (npages * 0x1000) - 4, 4); /* restore EAX */
5115 /* generate unrolled code for relatively small allocs */
5116 for (i = npages; --i >= 0;) {
5117 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 0x1000);
5118 x86_test_membase_reg (s->code, X86_ESP, 0, X86_ESP);
5123 if (size & 0xFFF) x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, -(size & 0xFFF));
5124 #else /* PLATFORM_WIN32 */
5125 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, size);
5130 mono_ldvirtftn (MonoObject *this, int slot)
5135 gboolean is_proxy = FALSE;
5138 if ((class = this->vtable->klass) == mono_defaults.transparent_proxy_class) {
5139 class = ((MonoTransparentProxy *)this)->klass;
5144 g_assert (slot <= class->vtable_size);
5146 m = class->vtable [slot];
5149 return mono_jit_create_remoting_trampoline (m);
5151 EnterCriticalSection (metadata_section);
5152 addr = mono_compile_method (m);
5153 LeaveCriticalSection (metadata_section);
5159 mono_ldintftn (MonoObject *this, int slot)
5164 gboolean is_proxy = FALSE;
5167 if ((class = this->vtable->klass) == mono_defaults.transparent_proxy_class) {
5168 class = ((MonoTransparentProxy *)this)->klass;
5172 g_assert (slot <= class->max_interface_id);
5174 slot = class->interface_offsets [slot];
5176 m = class->vtable [slot];
5179 return mono_jit_create_remoting_trampoline (m);
5181 EnterCriticalSection (metadata_section);
5182 addr = mono_compile_method (m);
5183 LeaveCriticalSection (metadata_section);
5188 gpointer mono_ldftn (MonoMethod *method)
5192 EnterCriticalSection (metadata_section);
5193 addr = mono_compile_method (method);
5194 LeaveCriticalSection (metadata_section);