2 * x86.brg: X86 code generator
5 * Dietmar Maurer (dietmar@ximian.com)
7 * (C) 2001 Ximian, Inc.
14 #include <sys/syscall.h>
16 #include <mono/metadata/blob.h>
17 #include <mono/metadata/metadata.h>
18 #include <mono/metadata/loader.h>
19 #include <mono/metadata/object.h>
20 #include <mono/metadata/tabledefs.h>
21 #include <mono/arch/x86/x86-codegen.h>
27 #define MBTREE_TYPE MBTree
28 #define MBCGEN_TYPE MonoFlowGraph
29 #define MBCOST_DATA MonoFlowGraph
30 #define MBALLOC_STATE mono_mempool_alloc (data->mp, sizeof (MBState))
33 AMImmediate = 0, // ptr
35 AMIndex = 2, // V[REG*X]
36 AMBaseIndex = 3, // V[REG*X][REG]
57 unsigned last_instr:1;
79 gint64 mono_llmult (gint64 a, gint64 b);
80 gint64 mono_lldiv (gint64 a, gint64 b);
81 gint64 mono_llrem (gint64 a, gint64 b);
82 guint64 mono_lldiv_un (guint64 a, guint64 b);
83 guint64 mono_llrem_un (guint64 a, guint64 b);
85 gpointer arch_get_lmf_addr (void);
88 get_mono_object_isinst (void);
90 #define MB_OPT_LEVEL 1
93 #define MB_USE_OPT1(c) 65535
94 #define MB_USE_OPT2(c) 65535
97 #define MB_USE_OPT1(c) c
98 #define MB_USE_OPT2(c) 65535
100 #if MB_OPT_LEVEL >= 2
101 #define MB_USE_OPT1(c) c
102 #define MB_USE_OPT2(c) c
108 #define MEMCOPY debug_memcpy
109 void *MEMCOPY (void *dest, const void *src, size_t n);
111 #define PRINT_REG(text,reg) \
112 g_assert (reg >= 0); \
113 x86_push_reg (s->code, X86_EAX); \
114 x86_push_reg (s->code, X86_EDX); \
115 x86_push_reg (s->code, X86_ECX); \
116 x86_push_reg (s->code, reg); \
117 x86_push_imm (s->code, reg); \
118 x86_push_imm (s->code, text " %d %p\n"); \
119 x86_mov_reg_imm (s->code, X86_EAX, printf); \
120 x86_call_reg (s->code, X86_EAX); \
121 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 3*4); \
122 x86_pop_reg (s->code, X86_ECX); \
123 x86_pop_reg (s->code, X86_EDX); \
124 x86_pop_reg (s->code, X86_EAX);
127 #define MEMCOPY memcpy
129 #define PRINT_REG(x,y)
133 /* The call instruction for virtual functions must have a known
134 * size (used by x86_magic_trampoline)
136 #define x86_call_virtual(inst,basereg,disp) \
138 *(inst)++ = (unsigned char)0xff; \
139 x86_address_byte ((inst), 2, 2, (basereg)); \
140 x86_imm_emit32 ((inst), (disp)); \
143 /* emit an exception if condition is fail */
144 #define EMIT_COND_EXCEPTION(cond, exc) \
146 x86_branch8 (s->code, cond, 12, TRUE); \
147 x86_push_imm (s->code, exc); \
148 x86_mov_reg_imm (s->code, X86_EAX, arch_get_throw_exception ()); \
149 x86_call_reg (s->code, X86_EAX); \
155 # terminal definitions
159 %term CONST_I4 CONST_I8 CONST_R4 CONST_R8
160 %term LDIND_I1 LDIND_U1 LDIND_I2 LDIND_U2 LDIND_I4 LDIND_I8 LDIND_R4 LDIND_R8
161 %term LDIND_U4 LDIND_OBJ
162 %term STIND_I1 STIND_I2 STIND_I4 STIND_I8 STIND_R4 STIND_R8 STIND_OBJ
163 %term ADDR_L ADDR_G ARG_I4 ARG_I8 ARG_R4 ARG_R8 ARG_OBJ ARG_STRING CALL_I4 CALL_I8 CALL_R8 CALL_VOID
164 %term BREAK SWITCH BR RET_VOID RET RET_OBJ ENDFINALLY
165 %term ADD SUB MUL DIV DIV_UN REM REM_UN AND OR XOR SHL SHR SHR_UN NEG NOT
166 %term BLT BLT_UN BEQ BNE_UN BRTRUE BRFALSE BGE BGE_UN BLE BLE_UN BGT BGT_UN
168 %term CONV_I4 CONV_I1 CONV_I2 CONV_I8 CONV_U8 CONV_R4 CONV_R8
169 %term INTF_ADDR VFUNC_ADDR NOP NEWARR NEWOBJ NEWSTRUCT CPOBJ POP INITOBJ
170 %term ISINST CASTCLASS UNBOX
171 %term CONV_OVF_I1 CONV_OVF_U1 CONV_OVF_I2 CONV_OVF_U2 CONV_OVF_U4 CONV_OVF_U8 CONV_OVF_I4
172 %term CONV_OVF_I2_UN CONV_OVF_I8_UN CONV_OVF_I1_UN
173 %term EXCEPTION THROW RETHROW HANDLER SAVE_LMF RESTORE_LMF
190 tree->data.ainfo.offset = tree->data.i;
191 tree->data.ainfo.amode = AMImmediate;
195 tree->data.ainfo.offset = tree->data.i;
196 tree->data.ainfo.amode = AMImmediate;
199 acon: ADD (ADDR_G, CONST_I4) {
200 tree->data.ainfo.offset = (unsigned)tree->left->data.p + tree->right->data.i;
201 tree->data.ainfo.amode = AMImmediate;
207 tree->data.ainfo.offset = 0;
208 tree->data.ainfo.basereg = tree->reg1;
209 tree->data.ainfo.amode = AMBase;
212 base: ADD (reg, acon) {
213 tree->data.ainfo.offset = tree->right->data.i;
214 tree->data.ainfo.basereg = tree->left->reg1;
215 tree->data.ainfo.amode = AMBase;
219 tree->data.ainfo.offset = g_array_index (s->varinfo, MonoVarInfo, tree->data.i).offset;
220 tree->data.ainfo.basereg = X86_EBP;
221 tree->data.ainfo.amode = AMBase;
225 tree->data.ainfo.offset = 0;
226 tree->data.ainfo.indexreg = tree->reg1;
227 tree->data.ainfo.shift = 0;
228 tree->data.ainfo.amode = AMIndex;
231 index: SHL (reg, CONST_I4) {
232 tree->data.ainfo.offset = 0;
233 tree->data.ainfo.amode = AMIndex;
234 tree->data.ainfo.indexreg = tree->left->reg1;
235 tree->data.ainfo.shift = tree->right->data.i;
237 MBCOND (tree->right->data.i == 0 ||
238 tree->right->data.i == 1 ||
239 tree->right->data.i == 2 ||
240 tree->right->data.i == 3);
245 index: MUL (reg, CONST_I4) {
246 static int fast_log2 [] = { 1, 0, 1, -1, 2, -1, -1, -1, 3 };
248 tree->data.ainfo.offset = 0;
249 tree->data.ainfo.amode = AMIndex;
250 tree->data.ainfo.indexreg = tree->left->reg1;
251 tree->data.ainfo.shift = fast_log2 [tree->right->data.i];
253 MBCOND (tree->right->data.i == 1 ||
254 tree->right->data.i == 2 ||
255 tree->right->data.i == 4 ||
256 tree->right->data.i == 8);
265 addr: ADD (index, base) {
266 tree->data.ainfo.offset = tree->right->data.ainfo.offset;
267 tree->data.ainfo.basereg = tree->right->data.ainfo.basereg;
268 tree->data.ainfo.amode = tree->left->data.ainfo.amode |
269 tree->right->data.ainfo.amode;
270 tree->data.ainfo.shift = tree->left->data.ainfo.shift;
271 tree->data.ainfo.indexreg = tree->left->data.ainfo.indexreg;
274 # we pass exception in ECX to catch handler
276 int offset = g_array_index (s->varinfo, MonoVarInfo, tree->data.i).offset;
278 if (tree->reg1 != X86_ECX)
279 x86_mov_reg_reg (s->code, tree->reg1, X86_ECX, 4);
281 /* store it so that we can RETHROW it later */
282 x86_mov_membase_reg (s->code, X86_EBP, offset, tree->reg1, 4);
286 tree->is_jump = TRUE;
288 x86_push_reg (s->code, tree->left->reg1);
289 x86_call_code (s->code, arch_get_throw_exception ());
293 int offset = g_array_index (s->varinfo, MonoVarInfo, tree->data.i).offset;
295 tree->is_jump = TRUE;
297 x86_push_membase (s->code, X86_EBP, offset);
298 x86_call_code (s->code, arch_get_throw_exception ());
302 gint32 addr = tree->data.bb->addr - tree->addr - 5;
303 tree->is_jump = TRUE;
304 x86_call_imm (s->code, addr);
312 tree->is_jump = TRUE;
315 /* save all caller saved regs */
316 x86_push_reg (s->code, X86_EBX);
317 x86_push_reg (s->code, X86_EDI);
318 x86_push_reg (s->code, X86_ESI);
319 x86_push_reg (s->code, X86_EBP);
322 x86_push_imm (s->code, s->code);
324 /* save method info */
325 x86_push_imm (s->code, tree->data.m);
326 /* get the address of lmf for the current thread */
327 x86_call_code (s->code, arch_get_lmf_addr);
329 x86_push_reg (s->code, X86_EAX);
330 /* push *lfm (previous_lmf) */
331 x86_push_membase (s->code, X86_EAX, 0);
333 x86_mov_membase_reg (s->code, X86_EAX, 0, X86_ESP, 4);
337 /* ebx = previous_lmf */
338 x86_pop_reg (s->code, X86_EBX);
340 x86_pop_reg (s->code, X86_EDI);
341 /* *(lmf) = previous_lmf */
342 x86_mov_membase_reg (s->code, X86_EDI, 0, X86_EBX, 4);
344 /* discard method info */
345 x86_pop_reg (s->code, X86_ESI);
347 /* discard save IP */
348 x86_pop_reg (s->code, X86_ESI);
350 /* restore caller saved regs */
351 x86_pop_reg (s->code, X86_EBP);
352 x86_pop_reg (s->code, X86_ESI);
353 x86_pop_reg (s->code, X86_EDI);
354 x86_pop_reg (s->code, X86_EBX);
357 stmt: STIND_I4 (addr, reg) {
358 PRINT_REG ("STIND_I4", tree->right->reg1);
360 switch (tree->left->data.ainfo.amode) {
363 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 4);
367 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
368 tree->left->data.ainfo.offset, tree->right->reg1, 4);
371 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
372 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
373 tree->right->reg1, 4);
376 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
377 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
378 tree->right->reg1, 4);
383 stmt: STIND_I1 (addr, reg) {
384 PRINT_REG ("STIND_I1", tree->right->reg1);
386 switch (tree->left->data.ainfo.amode) {
389 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 1);
393 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
394 tree->left->data.ainfo.offset, tree->right->reg1, 1);
397 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
398 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
399 tree->right->reg1, 1);
402 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
403 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
404 tree->right->reg1, 1);
409 stmt: STIND_I2 (addr, reg) {
410 PRINT_REG ("STIND_I2", tree->right->reg1);
412 switch (tree->left->data.ainfo.amode) {
415 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 2);
419 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
420 tree->left->data.ainfo.offset, tree->right->reg1, 2);
423 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
424 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
425 tree->right->reg1, 2);
428 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
429 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
430 tree->right->reg1, 2);
435 reg: LDIND_I4 (addr) {
437 switch (tree->left->data.ainfo.amode) {
440 x86_mov_reg_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, 4);
444 x86_mov_reg_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
445 tree->left->data.ainfo.offset, 4);
448 x86_mov_reg_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
449 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, 4);
452 x86_mov_reg_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
453 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
454 tree->left->data.ainfo.shift, 4);
459 PRINT_REG ("LDIND_I4", tree->reg1);
462 reg: LDIND_I1 (addr) {
463 switch (tree->left->data.ainfo.amode) {
466 x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, TRUE, FALSE);
470 x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
471 tree->left->data.ainfo.offset, TRUE, FALSE);
474 x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
475 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, TRUE, FALSE);
478 x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
479 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
480 tree->left->data.ainfo.shift, TRUE, FALSE);
484 PRINT_REG ("LDIND_I1", tree->reg1);
487 reg: LDIND_U1 (addr) {
488 switch (tree->left->data.ainfo.amode) {
491 x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, FALSE, FALSE);
495 x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
496 tree->left->data.ainfo.offset, FALSE, FALSE);
499 x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
500 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, FALSE, FALSE);
503 x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
504 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
505 tree->left->data.ainfo.shift, FALSE, FALSE);
509 PRINT_REG ("LDIND_U1", tree->reg1);
512 reg: LDIND_I2 (addr) {
513 switch (tree->left->data.ainfo.amode) {
516 x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, TRUE, TRUE);
520 x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
521 tree->left->data.ainfo.offset, TRUE, TRUE);
524 x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
525 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, TRUE, TRUE);
528 x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
529 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
530 tree->left->data.ainfo.shift, TRUE, TRUE);
534 PRINT_REG ("LDIND_U2", tree->reg1);
537 reg: LDIND_U2 (addr) {
538 switch (tree->left->data.ainfo.amode) {
541 x86_widen_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, FALSE, TRUE);
545 x86_widen_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
546 tree->left->data.ainfo.offset, FALSE, TRUE);
549 x86_widen_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
550 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, FALSE, TRUE);
553 x86_widen_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
554 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
555 tree->left->data.ainfo.shift, FALSE, TRUE);
559 PRINT_REG ("LDIND_U2", tree->reg1);
562 reg: LDIND_U4 (addr) {
563 switch (tree->left->data.ainfo.amode) {
566 x86_mov_reg_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, 4);
570 x86_mov_reg_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
571 tree->left->data.ainfo.offset, 4);
574 x86_mov_reg_memindex (s->code, tree->reg1, X86_NOBASEREG, tree->left->data.ainfo.offset,
575 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, 4);
578 x86_mov_reg_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
579 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
580 tree->left->data.ainfo.shift, 4);
584 PRINT_REG ("LDIND_U4", tree->reg1);
588 int offset = g_array_index (s->varinfo, MonoVarInfo, tree->data.i).offset;
589 x86_lea_membase (s->code, tree->reg1, X86_EBP, offset);
591 PRINT_REG ("ADDR_L", tree->reg1);
596 x86_mov_reg_imm (s->code, tree->reg1, tree->data.p);
600 x86_alu_reg_imm (s->code, X86_AND, tree->left->reg1, 0xff);
602 if (tree->reg1 != tree->left->reg1)
603 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
607 x86_alu_reg_imm (s->code, X86_AND, tree->left->reg1, 0xffff);
609 if (tree->reg1 != tree->left->reg1)
610 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
614 x86_mov_reg_imm (s->code, tree->reg1, tree->data.i);
618 if (tree->reg1 != tree->left->reg1)
619 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
620 PRINT_REG ("CONV_I4", tree->left->reg1);
623 reg: CONV_OVF_U4 (reg) {
624 x86_test_reg_imm (s->code, tree->left->reg1, 0x8000000);
625 EMIT_COND_EXCEPTION (X86_CC_EQ, get_exception_overflow ());
626 if (tree->reg1 != tree->left->reg1)
627 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
630 reg: CONV_OVF_I1 (reg) {
631 /* probe value to be within -128 to 127 */
632 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, 127);
633 EMIT_COND_EXCEPTION (X86_CC_LE, get_exception_overflow ());
634 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, -128);
635 // fixme: check that branch distance
636 g_assert_not_reached ();
637 x86_branch8 (s->code, X86_CC_LT, -19, TRUE);
638 if (tree->reg1 != tree->left->reg1)
639 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
642 reg: CONV_OVF_I1_UN (reg) {
643 /* probe values between 0 to 128 */
644 x86_test_reg_imm (s->code, tree->left->reg1, 0xffffff80);
645 EMIT_COND_EXCEPTION (X86_CC_EQ, get_exception_overflow ());
646 if (tree->reg1 != tree->left->reg1)
647 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
650 reg: CONV_OVF_U1 (reg) {
651 /* probe value to be within 0 to 255 */
652 x86_test_reg_imm (s->code, tree->left->reg1, 0xffffff00);
653 EMIT_COND_EXCEPTION (X86_CC_EQ, get_exception_overflow ());
654 if (tree->reg1 != tree->left->reg1)
655 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
658 reg: CONV_OVF_I2 (reg) {
659 /* Probe value to be within -32768 and 32767 */
660 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, 32767);
661 EMIT_COND_EXCEPTION (X86_CC_LE, get_exception_overflow ());
662 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, -32768);
663 // fixme: check branch
664 g_assert_not_reached ();
665 x86_branch8 (s->code, X86_CC_LT, -17, TRUE);
666 if (tree->reg1 != tree->left->reg1)
667 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
670 reg: CONV_OVF_U2 (reg) {
671 /* Probe value to be within 0 and 65535 */
672 x86_test_reg_imm (s->code, tree->left->reg1, 0xffff0000);
673 EMIT_COND_EXCEPTION (X86_CC_EQ, get_exception_overflow ());
674 if (tree->reg1 != tree->left->reg1)
675 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
678 reg: CONV_OVF_I2_UN (reg) {
679 /* Convert uint value into short, value within 0 and 32767 */
680 x86_test_reg_imm (s->code, tree->left->reg1, 0xffff8000);
681 EMIT_COND_EXCEPTION (X86_CC_EQ, get_exception_overflow ());
682 if (tree->reg1 != tree->left->reg1)
683 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
686 reg: MUL (reg, reg) {
687 x86_imul_reg_reg (s->code, tree->left->reg1, tree->right->reg1);
689 if (tree->reg1 != tree->left->reg1)
690 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
693 reg: DIV (reg, reg) {
694 g_assert (tree->right->reg1 != X86_EAX);
696 if (tree->left->reg1 != X86_EAX)
697 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
700 x86_div_reg (s->code, tree->right->reg1, TRUE);
702 g_assert (tree->reg1 == X86_EAX &&
703 tree->reg2 == X86_EDX);
706 reg: DIV_UN (reg, reg) {
707 g_assert (tree->right->reg1 != X86_EAX);
709 if (tree->left->reg1 != X86_EAX)
710 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
713 x86_div_reg (s->code, tree->right->reg1, FALSE);
715 g_assert (tree->reg1 == X86_EAX &&
716 tree->reg2 == X86_EDX);
719 reg: REM (reg, reg) {
720 g_assert (tree->right->reg1 != X86_EAX);
722 if (tree->left->reg1 != X86_EAX)
723 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
726 x86_div_reg (s->code, tree->right->reg1, TRUE);
727 x86_mov_reg_reg (s->code, X86_EAX, X86_EDX, 4);
729 g_assert (tree->reg1 == X86_EAX &&
730 tree->reg2 == X86_EDX);
733 reg: REM_UN (reg, reg) {
734 g_assert (tree->right->reg1 != X86_EAX);
736 if (tree->left->reg1 != X86_EAX)
737 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
740 x86_div_reg (s->code, tree->right->reg1, FALSE);
741 x86_mov_reg_reg (s->code, X86_EAX, X86_EDX, 4);
743 g_assert (tree->reg1 == X86_EAX &&
744 tree->reg2 == X86_EDX);
747 reg: ADD (reg, CONST_I4) "MB_USE_OPT1(0)" {
748 if (tree->right->data.i == 1)
749 x86_inc_reg (s->code, tree->left->reg1);
751 x86_alu_reg_imm (s->code, X86_ADD, tree->left->reg1, tree->right->data.i);
753 if (tree->reg1 != tree->left->reg1)
754 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
758 reg: ADD (reg, reg) {
759 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
761 if (tree->reg1 != tree->left->reg1)
762 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
765 reg: SUB (reg, CONST_I4) "MB_USE_OPT1(0)" {
766 if (tree->right->data.i == 1)
767 x86_dec_reg (s->code, tree->left->reg1);
769 x86_alu_reg_imm (s->code, X86_SUB, tree->left->reg1, tree->right->data.i);
771 if (tree->reg1 != tree->left->reg1)
772 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
775 reg: SUB (reg, reg) {
776 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
778 if (tree->reg1 != tree->left->reg1)
779 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
782 reg: CEQ (reg, reg) {
783 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
784 x86_set_reg (s->code, X86_CC_EQ, tree->reg1, TRUE);
785 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE);
788 reg: CGT (reg, reg) {
789 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
790 x86_set_reg (s->code, X86_CC_GT, tree->reg1, TRUE);
791 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE);
794 reg: CLT (reg, reg) {
795 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
796 x86_set_reg (s->code, X86_CC_LT, tree->reg1, TRUE);
797 x86_widen_reg (s->code, tree->reg1, tree->reg1, FALSE, FALSE);
800 reg: AND (reg, reg) {
801 x86_alu_reg_reg (s->code, X86_AND, tree->left->reg1, tree->right->reg1);
803 if (tree->reg1 != tree->left->reg1)
804 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
808 x86_alu_reg_reg (s->code, X86_OR, tree->left->reg1, tree->right->reg1);
810 if (tree->reg1 != tree->left->reg1)
811 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
814 reg: XOR (reg, reg) {
815 x86_alu_reg_reg (s->code, X86_XOR, tree->left->reg1, tree->right->reg1);
817 if (tree->reg1 != tree->left->reg1)
818 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
822 x86_neg_reg (s->code, tree->left->reg1);
824 if (tree->reg1 != tree->left->reg1)
825 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
829 x86_not_reg (s->code, tree->left->reg1);
831 if (tree->reg1 != tree->left->reg1)
832 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
835 reg: SHL (reg, CONST_I4) {
836 x86_shift_reg_imm (s->code, X86_SHL, tree->left->reg1, tree->right->data.i);
838 if (tree->reg1 != tree->left->reg1)
839 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
842 reg: SHL (reg, reg) {
843 if (tree->right->reg1 != X86_ECX)
844 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
845 x86_shift_reg (s->code, X86_SHL, tree->left->reg1);
847 if (tree->reg1 != tree->left->reg1)
848 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
850 g_assert (tree->reg1 != X86_ECX &&
851 tree->left->reg1 != X86_ECX);
854 reg: SHR (reg, CONST_I4) {
855 x86_shift_reg_imm (s->code, X86_SAR, tree->left->reg1, tree->right->data.i);
857 if (tree->reg1 != tree->left->reg1)
858 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
861 reg: SHR (reg, reg) {
862 if (tree->right->reg1 != X86_ECX)
863 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
864 x86_shift_reg (s->code, X86_SAR, tree->left->reg1);
866 if (tree->reg1 != tree->left->reg1)
867 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
869 g_assert (tree->reg1 != X86_ECX &&
870 tree->left->reg1 != X86_ECX);
873 reg: SHR_UN (reg, CONST_I4) {
874 x86_shift_reg_imm (s->code, X86_SHR, tree->left->reg1, tree->right->data.i);
876 if (tree->reg1 != tree->left->reg1)
877 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
880 reg: SHR_UN (reg, reg) {
881 if (tree->right->reg1 != X86_ECX)
882 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
883 x86_shift_reg (s->code, X86_SHR, tree->left->reg1);
885 if (tree->reg1 != tree->left->reg1)
886 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
888 g_assert (tree->reg1 != X86_ECX &&
889 tree->left->reg1 != X86_ECX);
894 x86_mov_reg_membase (s->code, tree->reg1, tree->left->reg1,
895 G_STRUCT_OFFSET (MonoArray, bounds), 4);
896 x86_mov_reg_membase (s->code, tree->reg1, tree->reg1,
897 G_STRUCT_OFFSET (MonoArrayBounds, length), 4);
900 #reg: LDELEMA (reg, reg) {
901 # x86_imul_reg_reg_imm (s->code, tree->right->reg1, tree->right->reg1, tree->data.i);
902 # x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->right->reg1);
903 # x86_alu_reg_imm (s->code, X86_ADD, tree->reg1, G_STRUCT_OFFSET (MonoArray, vector));
907 if (tree->reg1 != X86_EAX)
908 x86_push_reg (s->code, X86_EAX);
909 x86_push_reg (s->code, X86_ECX);
910 x86_push_reg (s->code, X86_EDX);
912 x86_push_reg (s->code, tree->left->reg1);
913 x86_push_imm (s->code, tree->data.p);
914 x86_mov_reg_imm (s->code, X86_EAX, mono_array_new);
915 x86_call_reg (s->code, X86_EAX);
916 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer) + 4);
918 x86_pop_reg (s->code, X86_EDX);
919 x86_pop_reg (s->code, X86_ECX);
920 if (tree->reg1 != X86_EAX) {
921 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
922 x86_pop_reg (s->code, X86_EAX);
927 if (tree->reg1 != X86_EAX)
928 x86_push_reg (s->code, X86_EAX);
929 x86_push_reg (s->code, X86_ECX);
930 x86_push_reg (s->code, X86_EDX);
932 x86_push_imm (s->code, tree->data.klass);
933 x86_mov_reg_imm (s->code, X86_EAX, mono_object_new);
934 x86_call_reg (s->code, X86_EAX);
935 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, sizeof (gpointer));
937 x86_pop_reg (s->code, X86_EDX);
938 x86_pop_reg (s->code, X86_ECX);
939 if (tree->reg1 != X86_EAX) {
940 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
941 x86_pop_reg (s->code, X86_EAX);
946 int size = tree->data.i;
954 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, sa);
955 x86_mov_reg_reg (s->code, tree->reg1, X86_ESP, 4);
959 if (tree->reg1 != tree->left->reg1)
960 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
962 x86_alu_reg_imm (s->code, X86_CMP, tree->reg1, 0);
963 EMIT_COND_EXCEPTION (X86_CC_NE, get_exception_null_reference ());
964 x86_alu_membase_imm (s->code, X86_CMP, tree->reg1, 0, ((int)(tree->data.klass)));
965 EMIT_COND_EXCEPTION (X86_CC_EQ, get_exception_invalid_cast ());
966 x86_alu_reg_imm (s->code, X86_ADD, tree->reg1, sizeof (MonoObject));
969 reg: CASTCLASS (reg) {
970 guint8 *start = s->code, *l1, *l2, *le;
973 tree->is_jump = TRUE;
976 for (i = 0; i < 2; i++) {
979 if (tree->reg1 != X86_EAX)
980 x86_push_reg (s->code, X86_EAX);
982 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, 0);
983 x86_branch8 (s->code, X86_CC_EQ, le - l2, FALSE);
985 x86_push_reg (s->code, X86_ECX);
986 x86_push_reg (s->code, X86_EDX);
988 x86_push_imm (s->code, tree->data.klass);
989 x86_push_reg (s->code, tree->left->reg1);
990 x86_mov_reg_imm (s->code, X86_EAX, mono_object_isinst);
991 x86_call_reg (s->code, X86_EAX);
992 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
993 x86_pop_reg (s->code, X86_EDX);
994 x86_pop_reg (s->code, X86_ECX);
996 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0);
997 EMIT_COND_EXCEPTION (X86_CC_NE, get_exception_invalid_cast ());
1004 if (tree->reg1 != X86_EAX)
1005 x86_push_reg (s->code, X86_EAX);
1006 x86_push_reg (s->code, X86_ECX);
1007 x86_push_reg (s->code, X86_EDX);
1009 x86_push_imm (s->code, tree->data.klass);
1010 x86_push_reg (s->code, tree->left->reg1);
1011 x86_mov_reg_imm (s->code, X86_EAX, mono_object_isinst);
1012 x86_call_reg (s->code, X86_EAX);
1013 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 8);
1015 x86_pop_reg (s->code, X86_EDX);
1016 x86_pop_reg (s->code, X86_ECX);
1017 if (tree->reg1 != X86_EAX) {
1018 x86_mov_reg_reg (s->code, tree->reg1, X86_EAX, 4);
1019 x86_pop_reg (s->code, X86_EAX);
1024 stmt: INITOBJ (reg) {
1029 if (i == 1 || i == 2 || i == 4) {
1032 if (tree->left->reg1 != X86_EAX)
1035 x86_push_reg (s->code, t);
1036 x86_alu_reg_reg (s->code, X86_XOR, t, t);
1038 switch (tree->data.i) {
1040 x86_mov_regp_reg (s->code, tree->left->reg1, t, 4);
1043 x86_mov_regp_reg (s->code, tree->left->reg1, t, 4);
1046 x86_mov_regp_reg (s->code, tree->left->reg1, t, 4);
1049 x86_pop_reg (s->code, t);
1054 i = tree->data.i / 4;
1055 j = tree->data.i % 4;
1057 x86_push_reg (s->code, X86_EAX);
1059 if (tree->left->reg1 != X86_EDI) {
1060 x86_push_reg (s->code, X86_EDI);
1061 x86_mov_reg_reg (s->code, X86_EDI, tree->left->reg1, 4);
1065 x86_push_reg (s->code, X86_ECX);
1066 x86_alu_reg_reg (s->code, X86_XOR, X86_EAX, X86_EAX);
1067 x86_mov_reg_imm (s->code, X86_ECX, i);
1069 x86_prefix (s->code, X86_REP_PREFIX);
1070 x86_stosl (s->code);
1071 x86_pop_reg (s->code, X86_ECX);
1075 for (i = 0; i < j; i++)
1076 x86_stosb (s->code);
1078 if (tree->left->reg1 != X86_EDI)
1079 x86_pop_reg (s->code, X86_EDI);
1081 x86_pop_reg (s->code, X86_EAX);
1089 gint32 addr = tree->data.bb->addr - tree->addr - 5;
1090 tree->is_jump = TRUE;
1092 x86_jump32 (s->code, addr);
1095 stmt: BLT (reg, reg) 1 {
1098 tree->is_jump = TRUE;
1099 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
1100 offset = 6 + s->code - s->start;
1101 x86_branch32 (s->code, X86_CC_LT, tree->data.bb->addr - offset, TRUE);
1104 stmt: BLT (reg, CONST_I4) "MB_USE_OPT1(0)" {
1107 tree->is_jump = TRUE;
1108 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i);
1109 offset = 6 + s->code - s->start;
1110 x86_branch32 (s->code, X86_CC_LT, tree->data.bb->addr - offset, TRUE);
1113 stmt: BLT_UN (reg, reg) 1 {
1116 tree->is_jump = TRUE;
1117 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
1118 offset = 6 + s->code - s->start;
1119 x86_branch32 (s->code, X86_CC_LT, tree->data.bb->addr - offset, FALSE);
1122 stmt: BLT_UN (reg, CONST_I4) "MB_USE_OPT1(0)" {
1125 tree->is_jump = TRUE;
1126 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i);
1127 offset = 6 + s->code - s->start;
1128 x86_branch32 (s->code, X86_CC_LT, tree->data.bb->addr - offset, FALSE);
1131 stmt: BGT (reg, reg) 1 {
1134 tree->is_jump = TRUE;
1135 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
1136 offset = 6 + s->code - s->start;
1137 x86_branch32 (s->code, X86_CC_GT, tree->data.bb->addr - offset, TRUE);
1140 stmt: BGT (reg, CONST_I4) "MB_USE_OPT1(0)" {
1143 tree->is_jump = TRUE;
1144 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i);
1145 offset = 6 + s->code - s->start;
1146 x86_branch32 (s->code, X86_CC_GT, tree->data.bb->addr - offset, TRUE);
1149 stmt: BGT_UN (reg, reg) 1 {
1152 tree->is_jump = TRUE;
1153 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
1154 offset = 6 + s->code - s->start;
1155 x86_branch32 (s->code, X86_CC_GT, tree->data.bb->addr - offset, FALSE);
1158 stmt: BGT_UN (reg, CONST_I4) "MB_USE_OPT1(0)" {
1161 tree->is_jump = TRUE;
1162 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i);
1163 offset = 6 + s->code - s->start;
1164 x86_branch32 (s->code, X86_CC_GT, tree->data.bb->addr - offset, FALSE);
1167 stmt: BEQ (reg, CONST_I4) "MB_USE_OPT1(0)" {
1170 tree->is_jump = TRUE;
1171 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i);
1172 offset = 6 + s->code - s->start;
1173 x86_branch32 (s->code, X86_CC_EQ, tree->data.bb->addr - offset, TRUE);
1176 stmt: BEQ (reg, reg) 1 {
1179 tree->is_jump = TRUE;
1180 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
1181 offset = 6 + s->code - s->start;
1182 x86_branch32 (s->code, X86_CC_EQ, tree->data.bb->addr - offset, TRUE);
1185 stmt: BNE_UN (reg, reg) 1 {
1188 tree->is_jump = TRUE;
1189 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
1190 offset = 6 + s->code - s->start;
1191 x86_branch32 (s->code, X86_CC_NE, tree->data.bb->addr - offset, FALSE);
1194 stmt: BNE_UN (reg, CONST_I4) "MB_USE_OPT1(0)" {
1197 tree->is_jump = TRUE;
1198 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i);
1199 offset = 6 + s->code - s->start;
1200 x86_branch32 (s->code, X86_CC_NE, tree->data.bb->addr - offset, FALSE);
1203 stmt: BGE (reg, reg) 1 {
1206 tree->is_jump = TRUE;
1207 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
1208 offset = 6 + s->code - s->start;
1209 x86_branch32 (s->code, X86_CC_GE, tree->data.bb->addr - offset, TRUE);
1212 stmt: BGE (reg, CONST_I4) "MB_USE_OPT1(0)" {
1215 tree->is_jump = TRUE;
1216 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i);
1217 offset = 6 + s->code - s->start;
1218 x86_branch32 (s->code, X86_CC_GE, tree->data.bb->addr - offset, TRUE);
1221 stmt: BGE_UN (reg, reg) 1 {
1224 tree->is_jump = TRUE;
1225 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
1226 offset = 6 + s->code - s->start;
1227 x86_branch32 (s->code, X86_CC_GE, tree->data.bb->addr - offset, FALSE);
1230 stmt: BGE_UN (reg, CONST_I4) "MB_USE_OPT1(0)" {
1233 tree->is_jump = TRUE;
1234 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i);
1235 offset = 6 + s->code - s->start;
1236 x86_branch32 (s->code, X86_CC_GE, tree->data.bb->addr - offset, FALSE);
1239 stmt: BLE (reg, reg) 1 {
1242 tree->is_jump = TRUE;
1243 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
1244 offset = 6 + s->code - s->start;
1245 x86_branch32 (s->code, X86_CC_LE, tree->data.bb->addr - offset, TRUE);
1248 stmt: BLE (reg, CONST_I4) "MB_USE_OPT1(0)" {
1251 tree->is_jump = TRUE;
1252 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i);
1253 offset = 6 + s->code - s->start;
1254 x86_branch32 (s->code, X86_CC_LE, tree->data.bb->addr - offset, TRUE);
1257 stmt: BLE_UN (reg, reg) 1 {
1260 tree->is_jump = TRUE;
1261 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
1262 offset = 6 + s->code - s->start;
1263 x86_branch32 (s->code, X86_CC_LE, tree->data.bb->addr - offset, FALSE);
1266 stmt: BLE_UN (reg, CONST_I4) "MB_USE_OPT1(0)" {
1269 tree->is_jump = TRUE;
1270 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i);
1271 offset = 6 + s->code - s->start;
1272 x86_branch32 (s->code, X86_CC_LE, tree->data.bb->addr - offset, FALSE);
1275 stmt: BRTRUE (reg) {
1278 tree->is_jump = TRUE;
1279 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, 0);
1280 offset = 6 + s->code - s->start;
1281 x86_branch32 (s->code, X86_CC_NE, tree->data.bb->addr - offset, TRUE);
1284 stmt: BRFALSE (reg) {
1287 tree->is_jump = TRUE;
1288 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, 0);
1289 offset = 6 + s->code - s->start;
1290 x86_branch32 (s->code, X86_CC_EQ, tree->data.bb->addr - offset, TRUE);
1294 x86_breakpoint (s->code);
1298 if (tree->left->reg1 != X86_EAX)
1299 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1301 if (!tree->last_instr) {
1302 tree->is_jump = TRUE;
1303 x86_jump32 (s->code, s->epilog - 5);
1308 if (!tree->last_instr) {
1309 tree->is_jump = TRUE;
1310 x86_jump32 (s->code, s->epilog - 5);
1315 stmt: ARG_I4 (LDIND_I4 (addr)) {
1316 MBTree *at = tree->left->left;
1318 switch (at->data.ainfo.amode) {
1321 x86_push_mem (s->code, at->data.ainfo.offset);
1325 x86_push_membase (s->code, at->data.ainfo.basereg, at->data.ainfo.offset);
1328 x86_push_memindex (s->code, X86_NOBASEREG, at->data.ainfo.offset,
1329 at->data.ainfo.indexreg, at->data.ainfo.shift);
1332 x86_push_memindex (s->code, at->data.ainfo.basereg,
1333 at->data.ainfo.offset, at->data.ainfo.indexreg,
1334 at->data.ainfo.shift);
1339 stmt: ARG_I4 (LDIND_U4 (addr)) {
1340 MBTree *at = tree->left->left;
1342 switch (at->data.ainfo.amode) {
1345 x86_push_mem (s->code, at->data.ainfo.offset);
1349 x86_push_membase (s->code, at->data.ainfo.basereg, at->data.ainfo.offset);
1352 x86_push_memindex (s->code, X86_NOBASEREG, at->data.ainfo.offset,
1353 at->data.ainfo.indexreg, at->data.ainfo.shift);
1356 x86_push_memindex (s->code, at->data.ainfo.basereg,
1357 at->data.ainfo.offset, at->data.ainfo.indexreg,
1358 at->data.ainfo.shift);
1363 stmt: ARG_I4 (reg) {
1364 x86_push_reg (s->code, tree->left->reg1);
1365 PRINT_REG ("ARG_I4", tree->left->reg1);
1368 # fixme: we must free the allocated strings somewhere
1369 stmt: ARG_STRING (reg) {
1370 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
1371 x86_push_reg (s->code, X86_EAX);
1372 x86_push_reg (s->code, X86_ECX);
1373 x86_push_reg (s->code, X86_EDX);
1375 x86_push_reg (s->code, tree->left->reg1);
1376 x86_mov_reg_imm (s->code, X86_EAX, mono_string_to_utf8);
1377 x86_call_reg (s->code, X86_EAX);
1378 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 4);
1380 x86_mov_membase_reg (s->code, X86_ESP, 12, X86_EAX, 4);
1382 x86_pop_reg (s->code, X86_EDX);
1383 x86_pop_reg (s->code, X86_ECX);
1384 x86_pop_reg (s->code, X86_EAX);
1387 stmt: ARG_I4 (ADDR_G) {
1388 x86_push_imm (s->code, tree->left->data.p);
1391 stmt: ARG_I4 (CONST_I4) "MB_USE_OPT1(0)" {
1392 x86_push_imm (s->code, tree->left->data.i);
1396 PRINT_REG ("THIS", tree->reg1);
1401 reg: CALL_I4 (this, reg) {
1402 MethodCallInfo *ci = tree->data.ci;
1404 int lreg = tree->left->reg1;
1405 int rreg = tree->right->reg1;
1407 if (lreg == treg || rreg == treg)
1409 if (lreg == treg || rreg == treg)
1411 if (lreg == treg || rreg == treg)
1412 g_assert_not_reached ();
1414 if (tree->left->op != MB_TERM_NOP) {
1415 g_assert (lreg >= 0);
1416 x86_push_reg (s->code, lreg);
1419 if (ci->vtype_num) {
1420 int offset = g_array_index (s->varinfo, MonoVarInfo, ci->vtype_num).offset;
1421 x86_lea_membase (s->code, treg, X86_EBP, offset);
1422 x86_push_reg (s->code, treg);
1425 x86_call_reg (s->code, rreg);
1428 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, ci->args_size);
1430 PRINT_REG ("CALL_I4", tree->reg1);
1432 g_assert (tree->reg1 == X86_EAX);
1435 reg: CALL_I4 (this, LDIND_I4 (ADDR_G)) {
1436 MethodCallInfo *ci = tree->data.ci;
1437 int lreg = tree->left->reg1;
1443 if (tree->left->op != MB_TERM_NOP) {
1444 g_assert (lreg >= 0);
1445 x86_push_reg (s->code, lreg);
1448 if (ci->vtype_num) {
1449 int offset = g_array_index (s->varinfo, MonoVarInfo, ci->vtype_num).offset;
1450 x86_lea_membase (s->code, treg, X86_EBP, offset);
1451 x86_push_reg (s->code, treg);
1454 x86_call_mem (s->code, tree->right->left->data.p);
1457 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, ci->args_size);
1459 PRINT_REG ("CALL_I4", tree->reg1);
1461 g_assert (tree->reg1 == X86_EAX);
1464 reg: CALL_I4 (this, INTF_ADDR) {
1465 MethodCallInfo *ci = tree->data.ci;
1466 int lreg = tree->left->reg1;
1472 if (tree->left->op != MB_TERM_NOP) {
1473 g_assert (lreg >= 0);
1474 x86_push_reg (s->code, lreg);
1477 if (ci->vtype_num) {
1478 int offset = g_array_index (s->varinfo, MonoVarInfo, ci->vtype_num).offset;
1479 x86_lea_membase (s->code, treg, X86_EBP, offset);
1480 x86_push_reg (s->code, treg);
1483 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
1484 x86_mov_reg_membase (s->code, lreg, lreg,
1485 G_STRUCT_OFFSET (MonoClass, interface_offsets), 4);
1486 x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
1487 x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
1490 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, ci->args_size);
1492 PRINT_REG ("CALL_I4(INTERFACE)", tree->reg1);
1494 g_assert (tree->reg1 == X86_EAX);
1497 reg: CALL_I4 (this, VFUNC_ADDR) {
1498 MethodCallInfo *ci = tree->data.ci;
1499 int lreg = tree->left->reg1;
1505 if (tree->left->op != MB_TERM_NOP) {
1506 g_assert (lreg >= 0);
1507 x86_push_reg (s->code, lreg);
1510 if (ci->vtype_num) {
1511 int offset = g_array_index (s->varinfo, MonoVarInfo, ci->vtype_num).offset;
1512 x86_lea_membase (s->code, treg, X86_EBP, offset);
1513 x86_push_reg (s->code, treg);
1516 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
1517 x86_call_virtual (s->code, lreg,
1518 G_STRUCT_OFFSET (MonoClass, vtable) + (tree->right->data.m->slot << 2));
1521 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, ci->args_size);
1523 PRINT_REG ("CALL_I4(VIRTUAL)", tree->reg1);
1525 g_assert (tree->reg1 == X86_EAX);
1528 stmt: CALL_VOID (this, LDIND_I4 (ADDR_G)) {
1529 MethodCallInfo *ci = tree->data.ci;
1530 int lreg = tree->left->reg1;
1536 if (tree->left->op != MB_TERM_NOP) {
1537 g_assert (lreg >= 0);
1538 x86_push_reg (s->code, lreg);
1541 if (ci->vtype_num) {
1542 int offset = g_array_index (s->varinfo, MonoVarInfo, ci->vtype_num).offset;
1543 x86_lea_membase (s->code, treg, X86_EBP, offset);
1544 x86_push_reg (s->code, treg);
1547 x86_call_mem (s->code, tree->right->left->data.p);
1550 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, ci->args_size);
1553 stmt: CALL_VOID (this, INTF_ADDR) {
1554 MethodCallInfo *ci = tree->data.ci;
1555 int lreg = tree->left->reg1;
1561 if (tree->left->op != MB_TERM_NOP) {
1562 g_assert (lreg >= 0);
1563 x86_push_reg (s->code, lreg);
1566 if (ci->vtype_num) {
1567 int offset = g_array_index (s->varinfo, MonoVarInfo, ci->vtype_num).offset;
1568 x86_lea_membase (s->code, treg, X86_EBP, offset);
1569 x86_push_reg (s->code, treg);
1572 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
1573 x86_mov_reg_membase (s->code, lreg, lreg,
1574 G_STRUCT_OFFSET (MonoClass, interface_offsets), 4);
1575 x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
1576 x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
1579 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, ci->args_size);
1582 stmt: CALL_VOID (this, VFUNC_ADDR) {
1583 MethodCallInfo *ci = tree->data.ci;
1584 int lreg = tree->left->reg1;
1590 if (tree->left->op != MB_TERM_NOP) {
1591 g_assert (lreg >= 0);
1592 x86_push_reg (s->code, lreg);
1595 if (ci->vtype_num) {
1596 int offset = g_array_index (s->varinfo, MonoVarInfo, ci->vtype_num).offset;
1597 x86_lea_membase (s->code, treg, X86_EBP, offset);
1598 x86_push_reg (s->code, treg);
1601 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
1602 x86_call_virtual (s->code, lreg,
1603 G_STRUCT_OFFSET (MonoClass, vtable) + (tree->right->data.m->slot << 2));
1606 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, ci->args_size);
1609 stmt: SWITCH (reg) {
1611 guint32 *jt = (guint32 *)tree->data.p;
1613 tree->is_jump = TRUE;
1615 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, jt [0]);
1616 offset = 6 + (guint32)s->code;
1617 x86_branch32 (s->code, X86_CC_GE, jt [jt [0] + 1] - offset, FALSE);
1619 x86_mov_reg_memindex (s->code, X86_EAX, X86_NOBASEREG,
1620 tree->data.i + 4, tree->left->reg1, 2, 4);
1621 x86_jump_reg (s->code, X86_EAX);
1628 reg: CONV_I4 (lreg) {
1629 if (tree->reg1 != tree->left->reg1)
1630 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1633 reg: CONV_OVF_I4 (lreg){
1635 * Valid ints: 0xffffffff:8000000 to 00000000:0x7f000000
1637 x86_test_reg_reg (s->code, tree->left->reg1, tree->left->reg1);
1639 /* If the low word top bit is set, see if we are negative */
1640 x86_branch8 (s->code, X86_CC_LT, 14, TRUE);
1642 /* We are not negative (no top bit set, check for our top word to be zero */
1643 x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2);
1644 x86_branch8 (s->code, X86_CC_EQ, 17, TRUE);
1646 /* throw exception */
1647 x86_push_imm (s->code, get_exception_overflow ());
1648 x86_mov_reg_imm (s->code, X86_EAX, arch_get_throw_exception ());
1649 x86_call_reg (s->code, X86_EAX);
1651 /* our top bit is set, check that top word is 0xfffffff */
1652 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg2, 0xffffffff);
1654 /* nope, emit exception */
1655 x86_branch8 (s->code, X86_CC_NE, -17, TRUE);
1657 if (tree->reg1 != tree->left->reg1)
1658 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1661 reg: CONV_OVF_U4 (lreg) {
1662 /* top word must be 0 */
1663 x86_test_reg_reg (s->code, tree->left->reg2, tree->left->reg2);
1664 EMIT_COND_EXCEPTION (X86_CC_EQ, get_exception_overflow ());
1665 if (tree->reg1 != tree->left->reg1)
1666 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1672 x86_mov_reg_imm (s->code, tree->reg1, *((gint32 *)&tree->data.p));
1673 x86_mov_reg_imm (s->code, tree->reg2, *((gint32 *)&tree->data.p + 1));
1676 lreg: CONV_I8 (CONST_I4) {
1677 x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i);
1679 if (tree->left->data.i >= 0)
1680 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
1682 x86_mov_reg_imm (s->code, tree->reg2, -1);
1685 lreg: CONV_I8 (reg) {
1688 if (tree->reg1 != tree->left->reg1)
1689 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1691 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, 0);
1692 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
1693 x86_branch8 (s->code, X86_CC_GE, 5, TRUE);
1695 x86_mov_reg_imm (s->code, tree->reg2, -1);
1696 g_assert ((s->code - i1) == 5);
1699 lreg: CONV_U8 (CONST_I4) 1 {
1700 x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i);
1701 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
1704 lreg: CONV_OVF_U8 (CONST_I4) {
1705 if (tree->left->data.i < 0){
1706 x86_push_imm (s->code, get_exception_overflow ());
1707 x86_mov_reg_imm (s->code, X86_EAX, arch_get_throw_exception ());
1708 x86_call_reg (s->code, X86_EAX);
1710 x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i);
1711 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
1715 lreg: CONV_OVF_I8_UN (CONST_I4) {
1716 x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i);
1717 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
1720 lreg: CONV_OVF_U8 (reg) {
1721 x86_test_reg_imm (s->code, tree->left->reg1, 0x8000000);
1722 EMIT_COND_EXCEPTION (X86_CC_EQ, get_exception_overflow ());
1724 if (tree->reg1 != tree->left->reg1)
1725 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1726 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
1729 lreg: CONV_OVF_I8_UN (reg) {
1730 /* Convert uint value into int64, we pass everything */
1731 if (tree->reg1 != tree->left->reg1)
1732 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1733 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
1736 stmt: STIND_I8 (addr, lreg) {
1738 switch (tree->left->data.ainfo.amode) {
1741 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 4);
1742 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset + 4, tree->right->reg2, 4);
1746 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
1747 tree->left->data.ainfo.offset, tree->right->reg1, 4);
1748 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
1749 tree->left->data.ainfo.offset + 4, tree->right->reg2, 4);
1752 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
1753 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
1754 tree->right->reg1, 4);
1755 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset + 4,
1756 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
1757 tree->right->reg2, 4);
1760 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
1761 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
1762 tree->right->reg1, 4);
1763 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset + 4,
1764 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
1765 tree->right->reg2, 4);
1771 # an addr can use two address register (base and index register). The must take care
1772 # that we do not override them (thus the use of x86_lea)
1773 lreg: LDIND_I8 (addr) {
1775 switch (tree->left->data.ainfo.amode) {
1778 x86_mov_reg_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, 4);
1779 x86_mov_reg_mem (s->code, tree->reg2, tree->left->data.ainfo.offset + 4, 4);
1783 x86_lea_membase (s->code, tree->reg2, tree->left->data.ainfo.basereg,
1784 tree->left->data.ainfo.offset);
1785 x86_mov_reg_membase (s->code, tree->reg1, tree->reg2, 0, 4);
1786 x86_mov_reg_membase (s->code, tree->reg2, tree->reg2, 4, 4);
1789 x86_lea_memindex (s->code, tree->reg2, X86_NOBASEREG, tree->left->data.ainfo.offset,
1790 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift);
1791 x86_mov_reg_membase (s->code, tree->reg1, tree->reg2, 0, 4);
1792 x86_mov_reg_membase (s->code, tree->reg2, tree->reg2, 4, 4);
1795 x86_lea_memindex (s->code, tree->reg2, tree->left->data.ainfo.basereg,
1796 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
1797 tree->left->data.ainfo.shift);
1798 x86_mov_reg_membase (s->code, tree->reg1, tree->reg2, 0, 4);
1799 x86_mov_reg_membase (s->code, tree->reg2, tree->reg2, 4, 4);
1802 PRINT_REG ("LDIND_I8_0", tree->reg1);
1803 PRINT_REG ("LDIND_I8_1", tree->reg2);
1806 lreg: ADD (lreg, lreg) {
1807 x86_alu_reg_reg (s->code, X86_ADD, tree->left->reg1, tree->right->reg1);
1808 x86_alu_reg_reg (s->code, X86_ADC, tree->left->reg2, tree->right->reg2);
1810 if (tree->reg1 != tree->left->reg1)
1811 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1812 if (tree->reg2 != tree->left->reg2)
1813 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
1816 lreg: SUB (lreg, lreg) {
1817 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg1, tree->right->reg1);
1818 x86_alu_reg_reg (s->code, X86_SUB, tree->left->reg2, tree->right->reg2);
1820 if (tree->reg1 != tree->left->reg1)
1821 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1822 if (tree->reg2 != tree->left->reg2)
1823 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
1826 lreg: AND (lreg, lreg) {
1827 x86_alu_reg_reg (s->code, X86_AND, tree->left->reg1, tree->right->reg1);
1828 x86_alu_reg_reg (s->code, X86_AND, tree->left->reg2, tree->right->reg2);
1830 if (tree->reg1 != tree->left->reg1)
1831 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1832 if (tree->reg2 != tree->left->reg2)
1833 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
1836 lreg: OR (lreg, lreg) {
1837 x86_alu_reg_reg (s->code, X86_OR, tree->left->reg1, tree->right->reg1);
1838 x86_alu_reg_reg (s->code, X86_OR, tree->left->reg2, tree->right->reg2);
1840 if (tree->reg1 != tree->left->reg1)
1841 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1842 if (tree->reg2 != tree->left->reg2)
1843 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
1847 if (tree->reg1 != tree->left->reg1)
1848 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1849 if (tree->reg2 != tree->left->reg2)
1850 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
1852 x86_neg_reg (s->code, tree->reg1);
1853 x86_alu_reg_imm (s->code, X86_ADC, tree->reg2, 0);
1854 x86_neg_reg (s->code, tree->reg2);
1858 if (tree->reg1 != tree->left->reg1)
1859 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
1860 if (tree->reg2 != tree->left->reg2)
1861 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
1863 x86_not_reg (s->code, tree->reg1);
1864 x86_not_reg (s->code, tree->reg2);
1867 lreg: MUL (lreg, lreg) {
1868 if (mono_regset_reg_used (s->rs, X86_ECX))
1869 x86_push_reg (s->code, X86_ECX);
1871 x86_push_reg (s->code, tree->right->reg2);
1872 x86_push_reg (s->code, tree->right->reg1);
1873 x86_push_reg (s->code, tree->left->reg2);
1874 x86_push_reg (s->code, tree->left->reg1);
1875 x86_mov_reg_imm (s->code, X86_EAX, mono_llmult);
1876 x86_call_reg (s->code, X86_EAX);
1877 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
1879 if (mono_regset_reg_used (s->rs, X86_ECX))
1880 x86_pop_reg (s->code, X86_ECX);
1883 lreg: DIV (lreg, lreg) {
1884 if (mono_regset_reg_used (s->rs, X86_ECX))
1885 x86_push_reg (s->code, X86_ECX);
1887 x86_push_reg (s->code, tree->right->reg2);
1888 x86_push_reg (s->code, tree->right->reg1);
1889 x86_push_reg (s->code, tree->left->reg2);
1890 x86_push_reg (s->code, tree->left->reg1);
1891 x86_mov_reg_imm (s->code, X86_EAX, mono_lldiv);
1892 x86_call_reg (s->code, X86_EAX);
1893 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
1895 if (mono_regset_reg_used (s->rs, X86_ECX))
1896 x86_pop_reg (s->code, X86_ECX);
1899 lreg: REM (lreg, lreg) {
1900 if (mono_regset_reg_used (s->rs, X86_ECX))
1901 x86_push_reg (s->code, X86_ECX);
1903 x86_push_reg (s->code, tree->right->reg2);
1904 x86_push_reg (s->code, tree->right->reg1);
1905 x86_push_reg (s->code, tree->left->reg2);
1906 x86_push_reg (s->code, tree->left->reg1);
1907 x86_mov_reg_imm (s->code, X86_EAX, mono_llrem);
1908 x86_call_reg (s->code, X86_EAX);
1909 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
1911 if (mono_regset_reg_used (s->rs, X86_ECX))
1912 x86_pop_reg (s->code, X86_ECX);
1915 lreg: DIV_UN (lreg, lreg) {
1916 if (mono_regset_reg_used (s->rs, X86_ECX))
1917 x86_push_reg (s->code, X86_ECX);
1919 x86_push_reg (s->code, tree->right->reg2);
1920 x86_push_reg (s->code, tree->right->reg1);
1921 x86_push_reg (s->code, tree->left->reg2);
1922 x86_push_reg (s->code, tree->left->reg1);
1923 x86_mov_reg_imm (s->code, X86_EAX, mono_lldiv_un);
1924 x86_call_reg (s->code, X86_EAX);
1925 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
1927 if (mono_regset_reg_used (s->rs, X86_ECX))
1928 x86_pop_reg (s->code, X86_ECX);
1931 lreg: REM_UN (lreg, lreg) {
1932 if (mono_regset_reg_used (s->rs, X86_ECX))
1933 x86_push_reg (s->code, X86_ECX);
1935 x86_push_reg (s->code, tree->right->reg2);
1936 x86_push_reg (s->code, tree->right->reg1);
1937 x86_push_reg (s->code, tree->left->reg2);
1938 x86_push_reg (s->code, tree->left->reg1);
1939 x86_mov_reg_imm (s->code, X86_EAX, mono_llrem_un);
1940 x86_call_reg (s->code, X86_EAX);
1941 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
1943 if (mono_regset_reg_used (s->rs, X86_ECX))
1944 x86_pop_reg (s->code, X86_ECX);
1947 lreg: CALL_I8 (this, LDIND_I4 (ADDR_G)) {
1948 MethodCallInfo *ci = tree->data.ci;
1949 int lreg = tree->left->reg1;
1955 if (tree->left->op != MB_TERM_NOP) {
1956 g_assert (lreg >= 0);
1957 x86_push_reg (s->code, lreg);
1960 if (ci->vtype_num) {
1961 int offset = g_array_index (s->varinfo, MonoVarInfo, ci->vtype_num).offset;
1962 x86_lea_membase (s->code, treg, X86_EBP, offset);
1963 x86_push_reg (s->code, treg);
1966 x86_call_mem (s->code, tree->right->left->data.p);
1969 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, ci->args_size);
1971 g_assert (tree->reg1 == X86_EAX);
1972 g_assert (tree->reg2 == X86_EDX);
1975 lreg: CALL_I8 (this, VFUNC_ADDR) {
1976 MethodCallInfo *ci = tree->data.ci;
1977 int lreg = tree->left->reg1;
1983 if (tree->left->op != MB_TERM_NOP) {
1984 g_assert (lreg >= 0);
1985 x86_push_reg (s->code, lreg);
1988 if (ci->vtype_num) {
1989 int offset = g_array_index (s->varinfo, MonoVarInfo, ci->vtype_num).offset;
1990 x86_lea_membase (s->code, treg, X86_EBP, offset);
1991 x86_push_reg (s->code, treg);
1994 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
1995 x86_call_virtual (s->code, lreg,
1996 G_STRUCT_OFFSET (MonoClass, vtable) + (tree->right->data.m->slot << 2));
1999 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, ci->args_size);
2001 PRINT_REG ("CALL0_I8(VIRTUAL)", tree->reg1);
2002 PRINT_REG ("CALL1_I8(VIRTUAL)", tree->reg2);
2004 g_assert (tree->reg1 == X86_EAX);
2005 g_assert (tree->reg2 == X86_EDX);
2009 if (tree->left->reg1 != X86_EAX) {
2010 if (tree->left->reg2 != X86_EAX) {
2011 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
2012 if (tree->left->reg2 != X86_EDX)
2013 x86_mov_reg_reg (s->code, X86_EDX, tree->left->reg2, 4);
2015 x86_mov_reg_reg (s->code, X86_ECX, tree->left->reg2, 4);
2016 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
2017 x86_mov_reg_reg (s->code, X86_EDX, X86_ECX, 4);
2019 } else if (tree->left->reg2 != X86_EDX) {
2020 x86_mov_reg_reg (s->code, X86_EDX, tree->left->reg2, 4);
2023 if (!tree->last_instr) {
2024 tree->is_jump = TRUE;
2025 x86_jump32 (s->code, s->epilog - 5);
2030 stmt: ARG_I8 (lreg) {
2031 x86_push_reg (s->code, tree->left->reg2);
2032 x86_push_reg (s->code, tree->left->reg1);
2035 stmt: BEQ (lreg, lreg) {
2036 guint8 *start = s->code;
2039 tree->is_jump = TRUE;
2041 for (i = 0; i < 2; i ++) {
2043 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
2044 o1 = 2 + s->code - s->start;
2045 x86_branch8 (s->code, X86_CC_NE, o2 - o1, FALSE);
2046 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
2047 o2 = 6 + s->code - s->start;
2048 x86_branch32 (s->code, X86_CC_EQ, tree->data.bb->addr - o2, TRUE);
2052 stmt: BNE_UN (lreg, lreg) {
2055 tree->is_jump = TRUE;
2057 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
2058 offset = 6 + s->code - s->start;
2059 x86_branch8 (s->code, X86_CC_NE, tree->data.bb->addr - offset, FALSE);
2060 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
2061 offset = 6 + s->code - s->start;
2062 x86_branch32 (s->code, X86_CC_NE, tree->data.bb->addr - offset, FALSE);
2065 stmt: BGE (lreg, lreg) {
2066 guint8 *start = s->code;
2067 gint32 o1, o2, oe, i;
2069 tree->is_jump = TRUE;
2071 for (i = 0; i < 2; i ++) {
2073 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
2074 o1 = 6 + s->code - s->start;
2075 x86_branch32 (s->code, X86_CC_GT, tree->data.bb->addr - o1, TRUE);
2076 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
2077 o2 = 2 + s->code - s->start;
2078 x86_branch8 (s->code, X86_CC_NE, oe - o2, TRUE);
2079 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
2080 oe = 6 + s->code - s->start;
2081 x86_branch32 (s->code, X86_CC_GE, tree->data.bb->addr - oe, FALSE);
2085 stmt: BGE_UN (lreg, lreg) {
2086 guint8 *start = s->code;
2087 gint32 o1, o2, oe, i;
2089 tree->is_jump = TRUE;
2091 for (i = 0; i < 2; i ++) {
2093 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
2094 o1 = 6 + s->code - s->start;
2095 x86_branch32 (s->code, X86_CC_GT, tree->data.bb->addr - o1, FALSE);
2096 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
2097 o2 = 2 + s->code - s->start;
2098 x86_branch8 (s->code, X86_CC_NE, oe - o2, FALSE);
2099 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
2100 oe = 6 + s->code - s->start;
2101 x86_branch32 (s->code, X86_CC_GE, tree->data.bb->addr - oe, FALSE);
2105 stmt: BGT (lreg, lreg) {
2106 guint8 *start = s->code;
2107 gint32 o1, o2, oe, i;
2109 tree->is_jump = TRUE;
2111 for (i = 0; i < 2; i ++) {
2113 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
2114 o1 = 6 + s->code - s->start;
2115 x86_branch32 (s->code, X86_CC_GT, tree->data.bb->addr - o1, TRUE);
2116 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
2117 o2 = 2 + s->code - s->start;
2118 x86_branch8 (s->code, X86_CC_NE, oe - o2, TRUE);
2119 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
2120 oe = 6 + s->code - s->start;
2121 x86_branch32 (s->code, X86_CC_GT, tree->data.bb->addr - oe, FALSE);
2125 stmt: BGT_UN (lreg, lreg) {
2126 guint8 *start = s->code;
2127 gint32 o1, o2, oe, i;
2129 tree->is_jump = TRUE;
2131 for (i = 0; i < 2; i ++) {
2133 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
2134 o1 = 6 + s->code - s->start;
2135 x86_branch32 (s->code, X86_CC_GT, tree->data.bb->addr - o1, FALSE);
2136 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
2137 o2 = 2 + s->code - s->start;
2138 x86_branch8 (s->code, X86_CC_NE, oe - o2, FALSE);
2139 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
2140 oe = 6 + s->code - s->start;
2141 x86_branch32 (s->code, X86_CC_GT, tree->data.bb->addr - oe, FALSE);
2145 stmt: BLT (lreg, lreg) {
2146 guint8 *start = s->code;
2147 gint32 o1, o2, oe, i;
2149 tree->is_jump = TRUE;
2151 for (i = 0; i < 2; i ++) {
2153 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
2154 o1 = 6 + s->code - s->start;
2155 x86_branch32 (s->code, X86_CC_LT, tree->data.bb->addr - o1, TRUE);
2156 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
2157 o2 = 2 + s->code - s->start;
2158 x86_branch8 (s->code, X86_CC_NE, oe - o2, TRUE);
2159 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
2160 oe = 6 + s->code - s->start;
2161 x86_branch32 (s->code, X86_CC_LT, tree->data.bb->addr - oe, FALSE);
2165 stmt: BLT_UN (lreg, lreg) {
2166 guint8 *start = s->code;
2167 gint32 o1, o2, oe, i;
2169 tree->is_jump = TRUE;
2171 for (i = 0; i < 2; i ++) {
2173 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
2174 o1 = 6 + s->code - s->start;
2175 x86_branch32 (s->code, X86_CC_LT, tree->data.bb->addr - o1, FALSE);
2176 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
2177 o2 = 2 + s->code - s->start;
2178 x86_branch8 (s->code, X86_CC_NE, oe - o2, FALSE);
2179 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
2180 oe = 6 + s->code - s->start;
2181 x86_branch32 (s->code, X86_CC_LT, tree->data.bb->addr - oe, FALSE);
2185 stmt: BLE (lreg, lreg) {
2186 guint8 *start = s->code;
2187 gint32 o1, o2, oe, i;
2189 tree->is_jump = TRUE;
2191 for (i = 0; i < 2; i ++) {
2193 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
2194 o1 = 6 + s->code - s->start;
2195 x86_branch32 (s->code, X86_CC_LT, tree->data.bb->addr - o1, TRUE);
2196 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
2197 o2 = 2 + s->code - s->start;
2198 x86_branch8 (s->code, X86_CC_NE, oe - o2, TRUE);
2199 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
2200 oe = 6 + s->code - s->start;
2201 x86_branch32 (s->code, X86_CC_LE, tree->data.bb->addr - oe, FALSE);
2205 stmt: BLE_UN (lreg, lreg) {
2206 guint8 *start = s->code;
2207 gint32 o1, o2, oe, i;
2209 tree->is_jump = TRUE;
2211 for (i = 0; i < 2; i ++) {
2213 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
2214 o1 = 6 + s->code - s->start;
2215 x86_branch32 (s->code, X86_CC_LT, tree->data.bb->addr - o1, FALSE);
2216 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
2217 o2 = 2 + s->code - s->start;
2218 x86_branch8 (s->code, X86_CC_NE, oe - o2, FALSE);
2219 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
2220 oe = 6 + s->code - s->start;
2221 x86_branch32 (s->code, X86_CC_LE, tree->data.bb->addr - oe, FALSE);
2228 #stmt: STLOC (CONV_I4 (freg)) {
2230 # x86_fist_pop_membase (s->code, X86_EBP, tree->data.i, FALSE);
2233 reg: CONV_I4 (freg) {
2234 x86_push_reg (s->code, X86_EAX); // SP = SP - 4
2235 x86_fist_pop_membase (s->code, X86_ESP, 0, FALSE);
2236 x86_pop_reg (s->code, tree->reg1);
2239 reg: CEQ (freg, freg) {
2240 int treg = tree->reg1;
2242 if (treg != X86_EAX)
2243 x86_push_reg (s->code, X86_EAX); // save EAX
2245 x86_fcompp (s->code);
2246 x86_fnstsw (s->code);
2247 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500);
2248 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x4000);
2249 x86_set_reg (s->code, X86_CC_EQ, tree->reg1, TRUE);
2250 x86_widen_reg (s->code, treg, treg, FALSE, FALSE);
2252 if (treg != X86_EAX)
2253 x86_pop_reg (s->code, X86_EAX); // save EAX
2256 freg: CONV_R8 (freg) {
2260 freg: CONV_R8 (LDIND_I4 (ADDR_G)) {
2261 x86_fild (s->code, tree->left->left->data.p, FALSE);
2264 freg: CONV_R8 (reg) {
2265 /* I found no direct way to move an integer register to
2266 * the floating point stack, so we need to store the register
2269 x86_push_reg (s->code, tree->left->reg1);
2270 x86_fild_membase (s->code, X86_ESP, 0, FALSE);
2271 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
2274 freg: CONV_R4 (reg) {
2275 /* I found no direct way to move an integer register to
2276 * the floating point stack, so we need to store the register
2279 x86_push_reg (s->code, tree->left->reg1);
2280 x86_fild_membase (s->code, X86_ESP, 0, FALSE);
2281 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
2285 float f = *(float *)tree->data.p;
2292 x86_fld (s->code, tree->data.p, FALSE);
2296 double d = *(double *)tree->data.p;
2303 x86_fld (s->code, tree->data.p, TRUE);
2306 freg: LDIND_R4 (reg) {
2307 x86_fld_membase (s->code, tree->left->reg1, 0, FALSE);
2310 freg: LDIND_R8 (reg) {
2311 x86_fld_membase (s->code, tree->left->reg1, 0, TRUE);
2314 freg: ADD (freg, freg) {
2315 x86_fp_op_reg (s->code, X86_FADD, 1, TRUE);
2318 freg: SUB (freg, freg) {
2319 x86_fp_op_reg (s->code, X86_FSUB, 1, TRUE);
2322 freg: MUL (freg, freg) {
2323 x86_fp_op_reg (s->code, X86_FMUL, 1, TRUE);
2326 freg: DIV (freg, freg) {
2327 x86_fp_op_reg (s->code, X86_FDIV, 1, TRUE);
2330 #freg: REM (freg, freg) {
2331 # this does not work, since it does not pop a value from the stack,
2332 # and we need to test if the instruction is ready
2333 # x86_fprem1 (s->code);
2342 stmt: STIND_R4 (ADDR_L, freg) {
2343 int offset = g_array_index (s->varinfo, MonoVarInfo, tree->left->data.i).offset;
2344 x86_fst_membase (s->code, X86_EBP, offset, FALSE, TRUE);
2347 stmt: STIND_R4 (reg, freg) {
2348 x86_fst_membase (s->code, tree->left->reg1, 0, FALSE, TRUE);
2351 stmt: STIND_R8 (ADDR_L, freg) {
2352 int offset = g_array_index (s->varinfo, MonoVarInfo, tree->left->data.i).offset;
2353 x86_fst_membase (s->code, X86_EBP, offset, TRUE, TRUE);
2356 stmt: STIND_R8 (reg, freg) {
2357 x86_fst_membase (s->code, tree->left->reg1, 0, TRUE, TRUE);
2360 stmt: ARG_R4 (freg) {
2361 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 4);
2362 x86_fst_membase (s->code, X86_ESP, 0, FALSE, TRUE);
2365 stmt: ARG_R8 (freg) {
2366 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8);
2367 x86_fst_membase (s->code, X86_ESP, 0, TRUE, TRUE);
2370 stmt: BEQ (freg, freg) {
2373 tree->is_jump = TRUE;
2374 x86_fcompp (s->code);
2375 x86_fnstsw (s->code);
2376 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500);
2377 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x4000);
2378 offset = 6 + s->code - s->start;
2379 x86_branch32 (s->code, X86_CC_EQ, tree->data.bb->addr - offset, TRUE);
2382 stmt: BNE_UN (freg, freg) {
2385 tree->is_jump = TRUE;
2386 x86_fcompp (s->code);
2387 x86_fnstsw (s->code);
2388 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500);
2389 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x4000);
2390 offset = 6 + s->code - s->start;
2391 x86_branch32 (s->code, X86_CC_NE, tree->data.bb->addr - offset, FALSE);
2394 stmt: BLT (freg, freg) {
2397 tree->is_jump = TRUE;
2398 x86_fcompp (s->code);
2399 x86_fnstsw (s->code);
2400 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500);
2401 offset = 6 + s->code - s->start;
2402 x86_branch32 (s->code, X86_CC_EQ, tree->data.bb->addr - offset, TRUE);
2405 stmt: BLT_UN (freg, freg) {
2408 tree->is_jump = TRUE;
2409 x86_fcompp (s->code);
2410 x86_fnstsw (s->code);
2411 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500);
2412 offset = 6 + s->code - s->start;
2413 x86_branch32 (s->code, X86_CC_EQ, tree->data.bb->addr - offset, FALSE);
2416 stmt: BGE_UN (freg, freg) {
2419 tree->is_jump = TRUE;
2420 x86_fcompp (s->code);
2421 x86_fnstsw (s->code);
2422 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500);
2423 offset = 6 + s->code - s->start;
2424 x86_branch32 (s->code, X86_CC_NE, tree->data.bb->addr - offset, FALSE);
2427 stmt: BGT_UN (freg, freg) {
2430 tree->is_jump = TRUE;
2431 x86_fcompp (s->code);
2432 x86_fnstsw (s->code);
2433 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500);
2434 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
2435 offset = 6 + s->code - s->start;
2436 x86_branch32 (s->code, X86_CC_EQ, tree->data.bb->addr - offset, FALSE);
2439 stmt: BLE_UN (freg, freg) {
2442 tree->is_jump = TRUE;
2443 x86_fcompp (s->code);
2444 x86_fnstsw (s->code);
2445 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500);
2446 offset = 6 + s->code - s->start;
2447 x86_branch32 (s->code, X86_CC_NE, tree->data.bb->addr - offset, FALSE);
2450 freg: CALL_R8 (this, LDIND_I4 (ADDR_G)) {
2451 MethodCallInfo *ci = tree->data.ci;
2452 int lreg = tree->left->reg1;
2458 if (tree->left->op != MB_TERM_NOP) {
2459 g_assert (lreg >= 0);
2460 x86_push_reg (s->code, lreg);
2463 if (ci->vtype_num) {
2464 int offset = g_array_index (s->varinfo, MonoVarInfo, ci->vtype_num).offset;
2465 x86_lea_membase (s->code, treg, X86_EBP, offset);
2466 x86_push_reg (s->code, treg);
2469 x86_call_mem (s->code, tree->right->left->data.p);
2472 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, ci->args_size);
2475 freg: CALL_R8 (this, INTF_ADDR) {
2476 MethodCallInfo *ci = tree->data.ci;
2477 int lreg = tree->left->reg1;
2483 if (tree->left->op != MB_TERM_NOP) {
2484 g_assert (lreg >= 0);
2485 x86_push_reg (s->code, lreg);
2488 if (ci->vtype_num) {
2489 int offset = g_array_index (s->varinfo, MonoVarInfo, ci->vtype_num).offset;
2490 x86_lea_membase (s->code, treg, X86_EBP, offset);
2491 x86_push_reg (s->code, treg);
2494 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2495 x86_mov_reg_membase (s->code, lreg, lreg,
2496 G_STRUCT_OFFSET (MonoClass, interface_offsets), 4);
2497 x86_mov_reg_membase (s->code, lreg, lreg, tree->right->data.m->klass->interface_id << 2, 4);
2498 x86_call_virtual (s->code, lreg, tree->right->data.m->slot << 2);
2501 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, ci->args_size);
2504 freg: CALL_R8 (this, VFUNC_ADDR) {
2505 MethodCallInfo *ci = tree->data.ci;
2506 int lreg = tree->left->reg1;
2512 if (tree->left->op != MB_TERM_NOP) {
2513 g_assert (lreg >= 0);
2514 x86_push_reg (s->code, lreg);
2517 if (ci->vtype_num) {
2518 int offset = g_array_index (s->varinfo, MonoVarInfo, ci->vtype_num).offset;
2519 x86_lea_membase (s->code, treg, X86_EBP, offset);
2520 x86_push_reg (s->code, treg);
2523 x86_mov_reg_membase (s->code, lreg, lreg, 0, 4);
2524 x86_call_virtual (s->code, lreg,
2525 G_STRUCT_OFFSET (MonoClass, vtable) + (tree->right->data.m->slot << 2));
2528 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, ci->args_size);
2533 if (!tree->last_instr) {
2534 tree->is_jump = TRUE;
2535 x86_jump32 (s->code, s->epilog - 5);
2539 # support for value types
2541 reg: LDIND_OBJ (reg) {
2542 if (tree->left->reg1 != tree->reg1)
2543 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
2546 stmt: STIND_OBJ (reg, reg) {
2547 x86_push_reg (s->code, X86_EAX);
2548 x86_push_reg (s->code, X86_EDX);
2549 x86_push_reg (s->code, X86_ECX);
2551 g_assert (tree->data.i > 0);
2552 x86_push_imm (s->code, tree->data.i);
2553 x86_push_reg (s->code, tree->right->reg1);
2554 x86_push_reg (s->code, tree->left->reg1);
2555 x86_mov_reg_imm (s->code, X86_EAX, MEMCOPY);
2556 x86_call_reg (s->code, X86_EAX);
2557 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
2559 x86_pop_reg (s->code, X86_ECX);
2560 x86_pop_reg (s->code, X86_EDX);
2561 x86_pop_reg (s->code, X86_EAX);
2564 stmt: ARG_OBJ (CONST_I4) {
2565 x86_push_imm (s->code, tree->left->data.i);
2568 stmt: ARG_OBJ (reg) {
2569 int size = tree->data.i;
2572 g_assert (size > 0);
2577 /* reserve space for the argument */
2578 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, sa);
2580 x86_push_reg (s->code, X86_EAX);
2581 x86_push_reg (s->code, X86_EDX);
2582 x86_push_reg (s->code, X86_ECX);
2584 x86_push_imm (s->code, size);
2585 x86_push_reg (s->code, tree->left->reg1);
2586 x86_lea_membase (s->code, X86_EAX, X86_ESP, 5*4);
2587 x86_push_reg (s->code, X86_EAX);
2589 x86_mov_reg_imm (s->code, X86_EAX, MEMCOPY);
2590 x86_call_reg (s->code, X86_EAX);
2591 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
2593 x86_pop_reg (s->code, X86_ECX);
2594 x86_pop_reg (s->code, X86_EDX);
2595 x86_pop_reg (s->code, X86_EAX);
2598 stmt: RET_OBJ (reg) {
2599 int size = tree->data.i;
2601 x86_push_imm (s->code, size);
2602 x86_push_reg (s->code, tree->left->reg1);
2603 x86_push_membase (s->code, X86_EBP, 8);
2605 x86_mov_reg_imm (s->code, X86_EAX, MEMCOPY);
2606 x86_call_reg (s->code, X86_EAX);
2608 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 12);
2610 if (!tree->last_instr) {
2611 tree->is_jump = TRUE;
2612 x86_jump32 (s->code, s->epilog - 5);
2621 mono_llmult (gint64 a, gint64 b)
2627 mono_lldiv (gint64 a, gint64 b)
2633 mono_llrem (gint64 a, gint64 b)
2639 mono_lldiv_un (guint64 a, guint64 b)
2645 mono_llrem_un (guint64 a, guint64 b)
2651 mono_ctree_new (MonoMemPool *mp, int op, MBTree *left, MBTree *right)
2653 MBTree *t = mono_mempool_alloc0 (mp, sizeof (MBTree));
2661 t->svt = VAL_UNKNOWN;
2667 mono_ctree_new_leaf (MonoMemPool *mp, int op)
2669 return mono_ctree_new (mp, op, NULL, NULL);
2673 arch_get_lmf_addr (void)
2677 if ((lmf = TlsGetValue (lmf_thread_id)))
2680 lmf = g_malloc (sizeof (gpointer));
2683 TlsSetValue (lmf_thread_id, lmf);
2691 MEMCOPY (void *dest, const void *src, size_t n)
2695 printf ("MEMCPY(%p to %p [%d]) ", src, dest, n);
2697 for (i = 0; i < l; i++)
2698 printf ("%02x ", *((guint8 *)src + i));
2701 return memcpy (dest, src, n);