2 * x86.brg: X86 code generator
5 * Dietmar Maurer (dietmar@ximian.com)
7 * (C) 2001 Ximian, Inc.
13 #include <mono/metadata/blob.h>
14 #include <mono/metadata/metadata.h>
15 #include <mono/metadata/loader.h>
16 #include <mono/arch/x86/x86-codegen.h>
21 #define MBTREE_TYPE MBTree
22 #define MBCGEN_TYPE MBCodeGenStatus
23 #define MBCOST_DATA MBCodeGenStatus
24 #define MBALLOC_STATE mono_mempool_alloc (data->mp, sizeof (MBState))
28 unsigned int has_basereg:1;
29 unsigned int has_indexreg:1;
30 unsigned int has_offset:1;
46 typedef struct _MBTree MBTree;
59 gint32 cli_addr; /* virtual cli address */
60 gint32 addr; /* address of emitted instruction */
61 gint32 first_addr; /* first code address of a tree */
76 gint64 mono_llmult (gint64 a, gint64 b);
77 gint64 mono_lldiv (gint64 a, gint64 b);
78 gint64 mono_llrem (gint64 a, gint64 b);
79 guint64 mono_lldiv_un (guint64 a, guint64 b);
80 guint64 mono_llrem_un (guint64 a, guint64 b);
82 #define MB_OPT_LEVEL 1
85 #define MB_USE_OPT1(c) 65535
86 #define MB_USE_OPT2(c) 65535
89 #define MB_USE_OPT1(c) c
90 #define MB_USE_OPT2(c) 65535
93 #define MB_USE_OPT1(c) c
94 #define MB_USE_OPT2(c) c
100 # terminal definitions
104 %term CONST_I4 CONST_I8 CONST_R4 CONST_R8
105 %term LDIND_I1 LDIND_U1 LDIND_I2 LDIND_U2 LDIND_I4 LDIND_I8 LDIND_R4 LDIND_R8
107 %term STIND_I1 STIND_I2 STIND_I4 STIND_I8 STIND_R4 STIND_R8
108 %term ADDR_L ADDR_A ADDR_G ARG CALL_I4 CALL_I8 CALL_R8
109 %term BREAK SWITCH BR RET RETV
110 %term ADD SUB MUL DIV DIV_UN REM REM_UN AND OR XOR SHL SHR SHR_UN NEG NOT
111 %term BLT BLT_UN BEQ BNE_UN BRTRUE BRFALSE BGE BGE_UN BLE BLE_UN BGT BGT_UN
112 %term CONV_I4 CONV_I1 CONV_I2 CONV_I8 CONV_R8
128 tree->data.ainfo.offset = tree->data.i;
129 tree->data.ainfo.has_offset = 1;
133 tree->data.ainfo.offset = tree->data.i;
134 tree->data.ainfo.has_offset = 1;
140 tree->data.ainfo.offset = 0;
141 tree->data.ainfo.has_offset = 0;
142 tree->data.ainfo.basereg = tree->reg1;
143 tree->data.ainfo.has_basereg = 1;
146 base: ADD (reg, acon) {
147 tree->data.ainfo.offset = tree->right->data.i;
148 tree->data.ainfo.has_offset = 1;
149 tree->data.ainfo.basereg = tree->left->reg1;
150 tree->data.ainfo.has_basereg = 1;
154 tree->data.ainfo.offset = tree->data.i;
155 tree->data.ainfo.has_offset = 1;
156 tree->data.ainfo.basereg = X86_EBP;
157 tree->data.ainfo.has_basereg = 1;
161 tree->data.ainfo.offset = tree->data.i + 8;
162 tree->data.ainfo.has_offset = 1;
163 tree->data.ainfo.basereg = X86_EBP;
164 tree->data.ainfo.has_basereg = 1;
168 tree->data.ainfo.offset = 0;
169 tree->data.ainfo.has_offset = 1;
170 tree->data.ainfo.has_indexreg = 1;
171 tree->data.ainfo.indexreg = tree->left->reg1;
172 tree->data.ainfo.shift = 0;
175 index: SHL (reg, CONST_I4) {
176 tree->data.ainfo.offset = 0;
177 tree->data.ainfo.has_offset = 1;
178 tree->data.ainfo.has_indexreg = 1;
179 tree->data.ainfo.indexreg = tree->left->reg1;
180 tree->data.ainfo.shift = tree->right->data.i;
182 MBCOND (tree->right->data.i == 0 ||
183 tree->right->data.i == 1 ||
184 tree->right->data.i == 2 ||
185 tree->right->data.i == 3);
190 index: MUL (reg, CONST_I4) {
191 static int fast_log2 [] = { 1, 0, 1, -1, 2, -1, -1, -1, 3 };
193 tree->data.ainfo.offset = 0;
194 tree->data.ainfo.has_offset = 1;
195 tree->data.ainfo.has_indexreg = 1;
196 tree->data.ainfo.indexreg = tree->left->reg1;
197 tree->data.ainfo.shift = fast_log2 [tree->right->data.i];
199 MBCOND (tree->right->data.i == 1 ||
200 tree->right->data.i == 2 ||
201 tree->right->data.i == 4 ||
202 tree->right->data.i == 8);
211 addr: ADD (index, base) {
212 tree->data.ainfo.offset = tree->right->data.ainfo.offset;
213 tree->data.ainfo.basereg = tree->right->data.ainfo.basereg;
214 tree->data.ainfo.has_basereg = tree->right->data.ainfo.has_basereg;
215 tree->data.ainfo.has_offset = tree->right->data.ainfo.has_offset;
217 tree->data.ainfo.shift = tree->left->data.ainfo.shift;
218 tree->data.ainfo.indexreg = tree->left->data.ainfo.indexreg;
219 tree->data.ainfo.has_indexreg = tree->left->data.ainfo.has_indexreg;
222 stmt: STIND_I4 (addr, reg) {
225 d = (tree->left->data.ainfo.has_offset << 2 |
226 tree->left->data.ainfo.has_indexreg << 1 |
227 tree->left->data.ainfo.has_basereg);
232 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg, 0, tree->right->reg1, 4);
235 x86_mov_memindex_reg (s->code, X86_NOBASEREG, 0,
236 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
237 tree->right->reg1, 4);
240 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, 0,
241 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
242 tree->right->reg1, 4);
245 x86_mov_memindex_reg (s->code, X86_NOBASEREG, tree->left->data.ainfo.offset,
246 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
247 tree->right->reg1, 4);
250 x86_mov_mem_reg (s->code, tree->left->data.ainfo.offset, tree->right->reg1, 4);
254 x86_mov_membase_reg (s->code, tree->left->data.ainfo.basereg,
255 tree->left->data.ainfo.offset, tree->right->reg1, 4);
258 x86_mov_memindex_reg (s->code, tree->left->data.ainfo.basereg, tree->left->data.ainfo.offset,
259 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift,
260 tree->right->reg1, 4);
264 g_assert_not_reached ();
269 reg: LDIND_I4 (addr) {
272 d = (tree->left->data.ainfo.has_offset << 2 |
273 tree->left->data.ainfo.has_indexreg << 1 |
274 tree->left->data.ainfo.has_basereg);
279 x86_mov_reg_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg, 0, 4);
282 x86_mov_reg_memindex (s->code, tree->reg1, X86_NOBASEREG, 0, tree->left->data.ainfo.indexreg,
283 tree->left->data.ainfo.shift, 4);
286 x86_mov_reg_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg, 0,
287 tree->left->data.ainfo.indexreg, tree->left->data.ainfo.shift, 4);
290 x86_mov_reg_memindex (s->code, tree->reg1, X86_NOBASEREG,
291 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
292 tree->left->data.ainfo.shift, 4);
295 x86_mov_reg_mem (s->code, tree->reg1, tree->left->data.ainfo.offset, 4);
299 x86_mov_reg_membase (s->code, tree->reg1, tree->left->data.ainfo.basereg,
300 tree->left->data.ainfo.offset, 4);
303 x86_mov_reg_memindex (s->code, tree->reg1, tree->left->data.ainfo.basereg,
304 tree->left->data.ainfo.offset, tree->left->data.ainfo.indexreg,
305 tree->left->data.ainfo.shift, 4);
308 g_assert_not_reached ();
316 tree->data.i = tree->data.i + 8;
320 x86_mov_reg_reg (s->code, tree->reg1, X86_EBP, 4);
321 x86_alu_reg_imm (s->code, X86_ADD, tree->reg1, tree->data.i);
325 if (tree->reg1 != tree->left->reg1)
326 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
327 x86_alu_reg_imm (s->code, X86_AND, tree->reg1, 0xff);
331 if (tree->reg1 != tree->left->reg1)
332 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
333 x86_alu_reg_imm (s->code, X86_AND, tree->reg1, 0xffff);
337 x86_mov_reg_imm (s->code, tree->reg1, tree->data.i);
343 reg: LDIND_I4 (reg) {
344 x86_mov_reg_membase (s->code, tree->reg1, tree->left->reg1, 0, 4);
347 reg: LDIND_I1 (locaddr) {
348 x86_widen_membase (s->code, tree->reg1, X86_EBP, tree->left->data.i, TRUE, FALSE);
351 reg: LDIND_U1 (locaddr) {
352 x86_widen_membase (s->code, tree->reg1, X86_EBP, tree->left->data.i, FALSE, FALSE);
355 reg: LDIND_I2 (locaddr) {
356 x86_widen_membase (s->code, tree->reg1, X86_EBP, tree->left->data.i, TRUE, TRUE);
359 reg: LDIND_U2 (locaddr) {
360 x86_widen_membase (s->code, tree->reg1, X86_EBP, tree->left->data.i, FALSE, TRUE);
363 reg: LDIND_I4 (locaddr) {
364 x86_mov_reg_membase (s->code, tree->reg1, X86_EBP, tree->left->data.i, 4);
367 reg: LDIND_U4 (locaddr) {
368 x86_mov_reg_membase (s->code, tree->reg1, X86_EBP, tree->left->data.i, 4);
371 reg: LDIND_I4 (ADDR_G) {
372 x86_mov_reg_mem (s->code, tree->reg1, tree->left->data.p, 4);
375 reg: MUL (reg, reg) {
376 if (tree->reg1 != tree->left->reg1)
377 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
378 x86_imul_reg_reg (s->code, tree->reg1, tree->right->reg1);
381 reg: DIV (reg, reg) {
382 if (tree->left->reg1 != X86_EAX)
383 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
386 x86_div_reg (s->code, tree->right->reg1, TRUE);
388 g_assert (tree->reg1 == X86_EAX &&
389 tree->reg2 == X86_EDX);
392 reg: DIV_UN (reg, reg) {
393 if (tree->left->reg1 != X86_EAX)
394 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
397 x86_div_reg (s->code, tree->right->reg1, FALSE);
399 g_assert (tree->reg1 == X86_EAX &&
400 tree->reg2 == X86_EDX);
403 reg: REM (reg, reg) {
404 if (tree->left->reg1 != X86_EAX)
405 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
408 x86_div_reg (s->code, tree->right->reg1, TRUE);
409 x86_mov_reg_reg (s->code, X86_EAX, X86_EDX, 4);
411 g_assert (tree->reg1 == X86_EAX &&
412 tree->reg2 == X86_EDX);
415 reg: REM_UN (reg, reg) {
416 if (tree->left->reg1 != X86_EAX)
417 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
420 x86_div_reg (s->code, tree->right->reg1, FALSE);
421 x86_mov_reg_reg (s->code, X86_EAX, X86_EDX, 4);
423 g_assert (tree->reg1 == X86_EAX &&
424 tree->reg2 == X86_EDX);
427 reg: ADD (reg, CONST_I4) "MB_USE_OPT1(0)" {
428 if (tree->reg1 != tree->left->reg1)
429 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
430 x86_alu_reg_imm (s->code, X86_ADD, tree->reg1, tree->right->data.i);
433 reg: ADD (reg, reg) {
434 if (tree->reg1 != tree->left->reg1)
435 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
436 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->right->reg1);
439 reg: SUB (reg, CONST_I4) "MB_USE_OPT1(0)" {
440 if (tree->reg1 != tree->left->reg1)
441 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
442 x86_alu_reg_imm (s->code, X86_SUB, tree->reg1, tree->right->data.i);
445 reg: SUB (reg, reg) {
446 if (tree->reg1 != tree->left->reg1)
447 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
448 x86_alu_reg_reg (s->code, X86_SUB, tree->reg1, tree->right->reg1);
451 reg: AND (reg, reg) {
452 if (tree->reg1 != tree->left->reg1)
453 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
454 x86_alu_reg_reg (s->code, X86_AND, tree->reg1, tree->right->reg1);
458 if (tree->reg1 != tree->left->reg1)
459 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
460 x86_alu_reg_reg (s->code, X86_OR, tree->reg1, tree->right->reg1);
463 reg: XOR (reg, reg) {
464 if (tree->reg1 != tree->left->reg1)
465 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
466 x86_alu_reg_reg (s->code, X86_XOR, tree->reg1, tree->right->reg1);
470 if (tree->reg1 != tree->left->reg1)
471 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
472 x86_neg_reg (s->code, tree->reg1);
476 if (tree->reg1 != tree->left->reg1)
477 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
478 x86_not_reg (s->code, tree->reg1);
481 reg: SHL (reg, CONST_I4) {
482 if (tree->reg1 != tree->left->reg1)
483 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
484 x86_shift_reg_imm (s->code, X86_SHL, tree->reg1, tree->right->data.i);
487 reg: SHL (reg, reg) {
488 if (tree->reg1 != tree->left->reg1)
489 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
490 if (tree->right->reg1 != X86_ECX)
491 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
492 x86_shift_reg (s->code, X86_SHL, tree->reg1);
494 g_assert (tree->reg1 != X86_ECX &&
495 tree->left->reg1 != X86_ECX);
498 reg: SHR (reg, CONST_I4) {
499 if (tree->reg1 != tree->left->reg1)
500 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
501 x86_shift_reg_imm (s->code, X86_SAR, tree->reg1, tree->right->data.i);
504 reg: SHR (reg, reg) {
505 if (tree->reg1 != tree->left->reg1)
506 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
507 if (tree->right->reg1 != X86_ECX)
508 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
509 x86_shift_reg (s->code, X86_SAR, tree->reg1);
511 g_assert (tree->reg1 != X86_ECX &&
512 tree->left->reg1 != X86_ECX);
515 reg: SHR_UN (reg, CONST_I4) {
516 if (tree->reg1 != tree->left->reg1)
517 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
518 x86_shift_reg_imm (s->code, X86_SHR, tree->reg1, tree->right->data.i);
521 reg: SHR_UN (reg, reg) {
522 if (tree->reg1 != tree->left->reg1)
523 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
524 if (tree->right->reg1 != X86_ECX)
525 x86_mov_reg_reg (s->code, X86_ECX, tree->right->reg1, 4);
526 x86_shift_reg (s->code, X86_SHR, tree->reg1);
528 g_assert (tree->reg1 != X86_ECX &&
529 tree->left->reg1 != X86_ECX);
532 stmt: STIND_I4 (reg, CONST_I4) "MB_USE_OPT1(0)" {
533 x86_mov_membase_imm (s->code, tree->left->reg1, 0,
534 tree->right->data.i, 4);
537 stmt: STIND_I4 (reg, reg) {
538 printf ("STIND %d %d\n", tree->left->reg1, tree->right->reg1);
539 x86_mov_membase_reg (s->code, tree->left->reg1, 0,
540 tree->right->reg1, 4);
543 stmt: STIND_I4 (locaddr, CONST_I4) "MB_USE_OPT1(0)" {
544 x86_mov_membase_imm (s->code, X86_EBP, tree->left->data.i,
545 tree->right->data.i, 4);
548 stmt: STIND_I1 (locaddr, reg) {
549 x86_mov_membase_reg (s->code, X86_EBP, tree->left->data.i,
550 tree->right->reg1, 1);
553 stmt: STIND_I2 (locaddr, reg) {
554 x86_mov_membase_reg (s->code, X86_EBP, tree->left->data.i,
555 tree->right->reg1, 2);
558 stmt: STIND_I4 (locaddr, reg) {
559 x86_mov_membase_reg (s->code, X86_EBP, tree->left->data.i,
560 tree->right->reg1, 4);
563 stmt: STIND_I4 (ADDR_G, reg) {
564 x86_mov_mem_reg (s->code, tree->left->data.p,
565 tree->right->reg1, 4);
571 x86_jump32 (s->code, tree->data.i - 5);
574 stmt: BLT (reg, reg) 1 {
575 guint8 *start = s->code;
579 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
580 offset = 6 + s->code - start;
581 x86_branch32 (s->code, X86_CC_LT, tree->data.i - offset, TRUE);
584 stmt: BLT (reg, CONST_I4) "MB_USE_OPT1(0)" {
585 guint8 *start = s->code;
589 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i);
590 offset = 6 + s->code - start;
591 x86_branch32 (s->code, X86_CC_LT, tree->data.i - offset, TRUE);
594 stmt: BLT_UN (reg, reg) 1 {
595 guint8 *start = s->code;
599 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
600 offset = 6 + s->code - start;
601 x86_branch32 (s->code, X86_CC_LT, tree->data.i - offset, FALSE);
604 stmt: BLT_UN (reg, CONST_I4) "MB_USE_OPT1(0)" {
605 guint8 *start = s->code;
609 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i);
610 offset = 6 + s->code - start;
611 x86_branch32 (s->code, X86_CC_LT, tree->data.i - offset, FALSE);
614 stmt: BGT (reg, reg) 1 {
615 guint8 *start = s->code;
619 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
620 offset = 6 + s->code - start;
621 x86_branch32 (s->code, X86_CC_GT, tree->data.i - offset, TRUE);
624 stmt: BGT (reg, CONST_I4) "MB_USE_OPT1(0)" {
625 guint8 *start = s->code;
629 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i);
630 offset = 6 + s->code - start;
631 x86_branch32 (s->code, X86_CC_GT, tree->data.i - offset, TRUE);
634 stmt: BGT_UN (reg, reg) 1 {
635 guint8 *start = s->code;
639 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
640 offset = 6 + s->code - start;
641 x86_branch32 (s->code, X86_CC_GT, tree->data.i - offset, FALSE);
644 stmt: BGT_UN (reg, CONST_I4) "MB_USE_OPT1(0)" {
645 guint8 *start = s->code;
649 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i);
650 offset = 6 + s->code - start;
651 x86_branch32 (s->code, X86_CC_GT, tree->data.i - offset, FALSE);
654 stmt: BEQ (reg, CONST_I4) "MB_USE_OPT1(0)" {
655 guint8 *start = s->code;
659 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i);
660 offset = 6 + s->code - start;
661 x86_branch32 (s->code, X86_CC_EQ, tree->data.i - offset, TRUE);
664 stmt: BEQ (reg, reg) 1 {
665 guint8 *start = s->code;
669 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
670 offset = 6 + s->code - start;
671 x86_branch32 (s->code, X86_CC_EQ, tree->data.i - offset, TRUE);
674 stmt: BNE_UN (reg, reg) 1 {
675 guint8 *start = s->code;
679 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
680 offset = 6 + s->code - start;
681 x86_branch32 (s->code, X86_CC_NE, tree->data.i - offset, FALSE);
684 stmt: BNE_UN (reg, CONST_I4) "MB_USE_OPT1(0)" {
685 guint8 *start = s->code;
689 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i);
690 offset = 6 + s->code - start;
691 x86_branch32 (s->code, X86_CC_NE, tree->data.i - offset, FALSE);
694 stmt: BGE (reg, reg) 1 {
695 guint8 *start = s->code;
699 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
700 offset = 6 + s->code - start;
701 x86_branch32 (s->code, X86_CC_GE, tree->data.i - offset, TRUE);
704 stmt: BGE (reg, CONST_I4) "MB_USE_OPT1(0)" {
705 guint8 *start = s->code;
709 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i);
710 offset = 6 + s->code - start;
711 x86_branch32 (s->code, X86_CC_GE, tree->data.i - offset, TRUE);
714 stmt: BGE_UN (reg, reg) 1 {
715 guint8 *start = s->code;
719 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
720 offset = 6 + s->code - start;
721 x86_branch32 (s->code, X86_CC_GE, tree->data.i - offset, FALSE);
724 stmt: BGE_UN (reg, CONST_I4) "MB_USE_OPT1(0)" {
725 guint8 *start = s->code;
729 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i);
730 offset = 6 + s->code - start;
731 x86_branch32 (s->code, X86_CC_GE, tree->data.i - offset, FALSE);
734 stmt: BLE (reg, reg) 1 {
735 guint8 *start = s->code;
739 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
740 offset = 6 + s->code - start;
741 x86_branch32 (s->code, X86_CC_LE, tree->data.i - offset, TRUE);
744 stmt: BLE (reg, CONST_I4) "MB_USE_OPT1(0)" {
745 guint8 *start = s->code;
749 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i);
750 offset = 6 + s->code - start;
751 x86_branch32 (s->code, X86_CC_LE, tree->data.i - offset, TRUE);
754 stmt: BLE_UN (reg, reg) 1 {
755 guint8 *start = s->code;
759 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
760 offset = 6 + s->code - start;
761 x86_branch32 (s->code, X86_CC_LE, tree->data.i - offset, FALSE);
764 stmt: BLE_UN (reg, CONST_I4) "MB_USE_OPT1(0)" {
765 guint8 *start = s->code;
769 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, tree->right->data.i);
770 offset = 6 + s->code - start;
771 x86_branch32 (s->code, X86_CC_LE, tree->data.i - offset, FALSE);
775 guint8 *start = s->code;
779 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, 0);
780 offset = 6 + s->code - start;
781 x86_branch32 (s->code, X86_CC_NE, tree->data.i - offset, TRUE);
784 stmt: BRFALSE (reg) {
785 guint8 *start = s->code;
789 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, 0);
790 offset = 6 + s->code - start;
791 x86_branch32 (s->code, X86_CC_EQ, tree->data.i - offset, TRUE);
795 x86_breakpoint (s->code);
799 if (tree->left->reg1 != X86_EAX)
800 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
802 if (!tree->last_instr) {
804 x86_jump32 (s->code, tree->data.i - 5);
809 if (!tree->last_instr) {
811 x86_jump32 (s->code, tree->data.i - 5);
816 x86_push_reg (s->code, tree->left->reg1);
819 stmt: ARG (CONST_I4) "MB_USE_OPT1(0)" {
820 x86_push_imm (s->code, tree->left->data.i);
824 x86_mov_reg_imm (s->code, X86_EAX, tree->data.p);
825 x86_call_membase (s->code, X86_EAX,
826 G_STRUCT_OFFSET (MonoMethod, addr));
828 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->size);
830 g_assert (tree->reg1 == X86_EAX);
834 x86_mov_reg_imm (s->code, X86_EAX, tree->data.p);
835 x86_call_membase (s->code, X86_EAX, G_STRUCT_OFFSET (MonoMethod, addr));
837 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->size);
841 guint8 *start = s->code;
843 guint32 *jt = (guint32 *)tree->data.p;
847 x86_alu_reg_imm (s->code, X86_CMP, tree->left->reg1, jt [0]);
848 offset = 6 + s->code - start;
849 x86_branch32 (s->code, X86_CC_GE, jt [jt [0] + 1] - offset, FALSE);
851 my_x86_mov_reg_memindex (s->code, X86_EAX, tree->data.i + 4,
852 tree->left->reg1, 2, 4);
853 x86_jump_reg (s->code, X86_EAX);
860 reg: CONV_I4 (lreg) {
861 if (tree->reg1 != tree->left->reg1)
862 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
866 x86_mov_reg_imm (s->code, tree->reg1, tree->data.i);
867 x86_mov_reg_imm (s->code, tree->reg2, *(gint32 *)(&tree->data.p + 4));
870 lreg: CONV_I8 (CONST_I4) 1 {
871 x86_mov_reg_imm (s->code, tree->reg1, tree->left->data.i);
873 if (tree->left->data.i >= 0)
874 x86_alu_reg_reg (s->code, X86_XOR, tree->reg2, tree->reg2);
876 x86_mov_reg_imm (s->code, tree->reg2, -1);
879 stmt: STIND_I8 (locaddr, lreg) {
880 x86_mov_membase_reg (s->code, X86_EBP, tree->left->data.i,
881 tree->right->reg1, 4);
882 x86_mov_membase_reg (s->code, X86_EBP, tree->left->data.i + 4,
883 tree->right->reg2, 4);
886 lreg: LDIND_I8 (locaddr) {
887 x86_mov_reg_membase (s->code, tree->reg1, X86_EBP,
888 tree->left->data.i, 4);
889 x86_mov_reg_membase (s->code, tree->reg2, X86_EBP,
890 tree->left->data.i + 4, 4);
893 lreg: ADD (lreg, lreg) {
894 if (tree->reg1 != tree->left->reg1)
895 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
896 if (tree->reg2 != tree->left->reg2)
897 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
898 x86_alu_reg_reg (s->code, X86_ADD, tree->reg1, tree->right->reg1);
899 x86_alu_reg_reg (s->code, X86_ADC, tree->reg2, tree->right->reg2);
902 lreg: SUB (lreg, lreg) {
903 if (tree->reg1 != tree->left->reg1)
904 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
905 if (tree->reg2 != tree->left->reg2)
906 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
907 x86_alu_reg_reg (s->code, X86_SUB, tree->reg1, tree->right->reg1);
908 x86_alu_reg_reg (s->code, X86_SUB, tree->reg2, tree->right->reg2);
911 lreg: AND (lreg, lreg) {
912 if (tree->reg1 != tree->left->reg1)
913 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
914 if (tree->reg2 != tree->left->reg2)
915 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
916 x86_alu_reg_reg (s->code, X86_AND, tree->reg1, tree->right->reg1);
917 x86_alu_reg_reg (s->code, X86_AND, tree->reg2, tree->right->reg2);
920 lreg: OR (lreg, lreg) {
921 if (tree->reg1 != tree->left->reg1)
922 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
923 if (tree->reg2 != tree->left->reg2)
924 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
925 x86_alu_reg_reg (s->code, X86_OR, tree->reg1, tree->right->reg1);
926 x86_alu_reg_reg (s->code, X86_OR, tree->reg2, tree->right->reg2);
930 if (tree->reg1 != tree->left->reg1)
931 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
932 if (tree->reg2 != tree->left->reg2)
933 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
934 x86_neg_reg (s->code, tree->reg1);
935 x86_alu_reg_imm (s->code, X86_ADC, tree->reg2, 0);
936 x86_neg_reg (s->code, tree->reg2);
940 if (tree->reg1 != tree->left->reg1)
941 x86_mov_reg_reg (s->code, tree->reg1, tree->left->reg1, 4);
942 if (tree->reg2 != tree->left->reg2)
943 x86_mov_reg_reg (s->code, tree->reg2, tree->left->reg2, 4);
944 x86_not_reg (s->code, tree->reg1);
945 x86_not_reg (s->code, tree->reg2);
948 lreg: MUL (lreg, lreg) {
949 if (mono_regset_reg_used (s->rs, X86_ECX))
950 x86_push_reg (s->code, X86_ECX);
952 x86_push_reg (s->code, tree->right->reg2);
953 x86_push_reg (s->code, tree->right->reg1);
954 x86_push_reg (s->code, tree->left->reg2);
955 x86_push_reg (s->code, tree->left->reg1);
956 x86_call_code (s->code, mono_llmult);
957 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
959 if (mono_regset_reg_used (s->rs, X86_ECX))
960 x86_pop_reg (s->code, X86_ECX);
963 lreg: DIV (lreg, lreg) {
964 if (mono_regset_reg_used (s->rs, X86_ECX))
965 x86_push_reg (s->code, X86_ECX);
967 x86_push_reg (s->code, tree->right->reg2);
968 x86_push_reg (s->code, tree->right->reg1);
969 x86_push_reg (s->code, tree->left->reg2);
970 x86_push_reg (s->code, tree->left->reg1);
971 x86_call_code (s->code, mono_lldiv);
972 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
974 if (mono_regset_reg_used (s->rs, X86_ECX))
975 x86_pop_reg (s->code, X86_ECX);
978 lreg: REM (lreg, lreg) {
979 if (mono_regset_reg_used (s->rs, X86_ECX))
980 x86_push_reg (s->code, X86_ECX);
982 x86_push_reg (s->code, tree->right->reg2);
983 x86_push_reg (s->code, tree->right->reg1);
984 x86_push_reg (s->code, tree->left->reg2);
985 x86_push_reg (s->code, tree->left->reg1);
986 x86_call_code (s->code, mono_llrem);
987 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
989 if (mono_regset_reg_used (s->rs, X86_ECX))
990 x86_pop_reg (s->code, X86_ECX);
993 lreg: DIV_UN (lreg, lreg) {
994 if (mono_regset_reg_used (s->rs, X86_ECX))
995 x86_push_reg (s->code, X86_ECX);
997 x86_push_reg (s->code, tree->right->reg2);
998 x86_push_reg (s->code, tree->right->reg1);
999 x86_push_reg (s->code, tree->left->reg2);
1000 x86_push_reg (s->code, tree->left->reg1);
1001 x86_call_code (s->code, mono_lldiv_un);
1002 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
1004 if (mono_regset_reg_used (s->rs, X86_ECX))
1005 x86_pop_reg (s->code, X86_ECX);
1008 lreg: REM_UN (lreg, lreg) {
1009 if (mono_regset_reg_used (s->rs, X86_ECX))
1010 x86_push_reg (s->code, X86_ECX);
1012 x86_push_reg (s->code, tree->right->reg2);
1013 x86_push_reg (s->code, tree->right->reg1);
1014 x86_push_reg (s->code, tree->left->reg2);
1015 x86_push_reg (s->code, tree->left->reg1);
1016 x86_call_code (s->code, mono_llrem_un);
1017 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, 16);
1019 if (mono_regset_reg_used (s->rs, X86_ECX))
1020 x86_pop_reg (s->code, X86_ECX);
1024 x86_mov_reg_imm (s->code, X86_EAX, tree->data.p);
1025 x86_call_membase (s->code, X86_EAX,
1026 G_STRUCT_OFFSET (MonoMethod, addr));
1028 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->size);
1030 g_assert (tree->reg1 == X86_EAX);
1031 g_assert (tree->reg2 == X86_EDX);
1035 if (tree->left->reg1 != X86_EAX) {
1036 if (tree->left->reg2 != X86_EAX) {
1037 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1038 if (tree->left->reg2 != X86_EDX)
1039 x86_mov_reg_reg (s->code, X86_EDX, tree->left->reg2, 4);
1041 x86_mov_reg_reg (s->code, X86_ECX, tree->left->reg2, 4);
1042 x86_mov_reg_reg (s->code, X86_EAX, tree->left->reg1, 4);
1043 x86_mov_reg_reg (s->code, X86_EDX, X86_ECX, 4);
1045 } else if (tree->left->reg2 != X86_EDX) {
1046 x86_mov_reg_reg (s->code, X86_EDX, tree->left->reg2, 4);
1049 if (!tree->last_instr) {
1051 x86_jump32 (s->code, tree->data.i - 5);
1057 x86_push_reg (s->code, tree->left->reg2);
1058 x86_push_reg (s->code, tree->left->reg1);
1061 stmt: BEQ (lreg, lreg) {
1062 guint8 *start = s->code;
1067 for (i = 0; i < 2; i ++) {
1069 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
1070 o1 = 2 + s->code - start;
1071 x86_branch8 (s->code, X86_CC_NE, o2 - o1, FALSE);
1072 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
1073 o2 = 6 + s->code - start;
1074 x86_branch32 (s->code, X86_CC_EQ, tree->data.i - o2, TRUE);
1078 stmt: BNE_UN (lreg, lreg) {
1079 guint8 *start = s->code;
1084 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
1085 offset = 6 + s->code - start;
1086 x86_branch8 (s->code, X86_CC_NE, tree->data.i - offset, FALSE);
1087 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
1088 offset = 6 + s->code - start;
1089 x86_branch32 (s->code, X86_CC_NE, tree->data.i - offset, FALSE);
1092 stmt: BGE (lreg, lreg) {
1093 guint8 *start = s->code;
1094 gint32 o1, o2, oe, i;
1098 for (i = 0; i < 2; i ++) {
1100 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
1101 o1 = 6 + s->code - start;
1102 x86_branch32 (s->code, X86_CC_GT, tree->data.i - o1, TRUE);
1103 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
1104 o2 = 2 + s->code - start;
1105 x86_branch8 (s->code, X86_CC_NE, oe - o2, TRUE);
1106 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
1107 oe = 6 + s->code - start;
1108 x86_branch32 (s->code, X86_CC_GE, tree->data.i - oe, FALSE);
1112 stmt: BGE_UN (lreg, lreg) {
1113 guint8 *start = s->code;
1114 gint32 o1, o2, oe, i;
1118 for (i = 0; i < 2; i ++) {
1120 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
1121 o1 = 6 + s->code - start;
1122 x86_branch32 (s->code, X86_CC_GT, tree->data.i - o1, FALSE);
1123 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
1124 o2 = 2 + s->code - start;
1125 x86_branch8 (s->code, X86_CC_NE, oe - o2, FALSE);
1126 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
1127 oe = 6 + s->code - start;
1128 x86_branch32 (s->code, X86_CC_GE, tree->data.i - oe, FALSE);
1132 stmt: BGT (lreg, lreg) {
1133 guint8 *start = s->code;
1134 gint32 o1, o2, oe, i;
1138 for (i = 0; i < 2; i ++) {
1140 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
1141 o1 = 6 + s->code - start;
1142 x86_branch32 (s->code, X86_CC_GT, tree->data.i - o1, TRUE);
1143 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
1144 o2 = 2 + s->code - start;
1145 x86_branch8 (s->code, X86_CC_NE, oe - o2, TRUE);
1146 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
1147 oe = 6 + s->code - start;
1148 x86_branch32 (s->code, X86_CC_GT, tree->data.i - oe, FALSE);
1152 stmt: BGT_UN (lreg, lreg) {
1153 guint8 *start = s->code;
1154 gint32 o1, o2, oe, i;
1158 for (i = 0; i < 2; i ++) {
1160 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
1161 o1 = 6 + s->code - start;
1162 x86_branch32 (s->code, X86_CC_GT, tree->data.i - o1, FALSE);
1163 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
1164 o2 = 2 + s->code - start;
1165 x86_branch8 (s->code, X86_CC_NE, oe - o2, FALSE);
1166 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
1167 oe = 6 + s->code - start;
1168 x86_branch32 (s->code, X86_CC_GT, tree->data.i - oe, FALSE);
1172 stmt: BLT (lreg, lreg) {
1173 guint8 *start = s->code;
1174 gint32 o1, o2, oe, i;
1178 for (i = 0; i < 2; i ++) {
1180 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
1181 o1 = 6 + s->code - start;
1182 x86_branch32 (s->code, X86_CC_LT, tree->data.i - o1, TRUE);
1183 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
1184 o2 = 2 + s->code - start;
1185 x86_branch8 (s->code, X86_CC_NE, oe - o2, TRUE);
1186 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
1187 oe = 6 + s->code - start;
1188 x86_branch32 (s->code, X86_CC_LT, tree->data.i - oe, FALSE);
1192 stmt: BLT_UN (lreg, lreg) {
1193 guint8 *start = s->code;
1194 gint32 o1, o2, oe, i;
1198 for (i = 0; i < 2; i ++) {
1200 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
1201 o1 = 6 + s->code - start;
1202 x86_branch32 (s->code, X86_CC_LT, tree->data.i - o1, FALSE);
1203 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
1204 o2 = 2 + s->code - start;
1205 x86_branch8 (s->code, X86_CC_NE, oe - o2, FALSE);
1206 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
1207 oe = 6 + s->code - start;
1208 x86_branch32 (s->code, X86_CC_LT, tree->data.i - oe, FALSE);
1212 stmt: BLE (lreg, lreg) {
1213 guint8 *start = s->code;
1214 gint32 o1, o2, oe, i;
1218 for (i = 0; i < 2; i ++) {
1220 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
1221 o1 = 6 + s->code - start;
1222 x86_branch32 (s->code, X86_CC_LT, tree->data.i - o1, TRUE);
1223 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
1224 o2 = 2 + s->code - start;
1225 x86_branch8 (s->code, X86_CC_NE, oe - o2, TRUE);
1226 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
1227 oe = 6 + s->code - start;
1228 x86_branch32 (s->code, X86_CC_LE, tree->data.i - oe, FALSE);
1232 stmt: BLE_UN (lreg, lreg) {
1233 guint8 *start = s->code;
1234 gint32 o1, o2, oe, i;
1238 for (i = 0; i < 2; i ++) {
1240 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
1241 o1 = 6 + s->code - start;
1242 x86_branch32 (s->code, X86_CC_LT, tree->data.i - o1, FALSE);
1243 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg2, tree->right->reg2);
1244 o2 = 2 + s->code - start;
1245 x86_branch8 (s->code, X86_CC_NE, oe - o2, FALSE);
1246 x86_alu_reg_reg (s->code, X86_CMP, tree->left->reg1, tree->right->reg1);
1247 oe = 6 + s->code - start;
1248 x86_branch32 (s->code, X86_CC_LE, tree->data.i - oe, FALSE);
1255 #stmt: STLOC (CONV_I4 (freg)) {
1257 # x86_fist_pop_membase (s->code, X86_EBP, tree->data.i, FALSE);
1260 freg: CONV_R8 (LDIND_I4 (ADDR_G)) {
1261 x86_fild (s->code, tree->left->left->data.p, FALSE);
1265 x86_fld (s->code, tree->data.p, FALSE);
1269 x86_fld (s->code, tree->data.p, TRUE);
1272 freg: LDIND_R4 (locaddr) {
1273 x86_fld_membase (s->code, X86_EBP, tree->left->data.i, FALSE);
1276 freg: LDIND_R8 (locaddr) {
1277 x86_fld_membase (s->code, X86_EBP, tree->left->data.i, TRUE);
1280 freg: ADD (freg, freg) {
1281 x86_fp_op_reg (s->code, X86_FADD, 1, TRUE);
1284 freg: SUB (freg, freg) {
1285 x86_fp_op_reg (s->code, X86_FSUB, 1, TRUE);
1288 freg: MUL (freg, freg) {
1289 x86_fp_op_reg (s->code, X86_FMUL, 1, TRUE);
1292 freg: DIV (freg, freg) {
1293 x86_fp_op_reg (s->code, X86_FDIV, 1, TRUE);
1296 #freg: REM (freg, freg) {
1297 # this does not work, since it does not pop a value from the stack,
1298 # and we need to test if the instruction is ready
1299 # x86_fprem1 (s->code);
1306 stmt: STIND_R4 (locaddr, freg) {
1307 x86_fst_membase (s->code, X86_EBP, tree->left->data.i, FALSE, TRUE);
1310 stmt: STIND_R8 (locaddr, freg) {
1311 x86_fst_membase (s->code, X86_EBP, tree->left->data.i, TRUE, TRUE);
1315 x86_alu_reg_imm (s->code, X86_SUB, X86_ESP, 8);
1316 x86_fst_membase (s->code, X86_ESP, 0, TRUE, TRUE);
1319 stmt: BEQ (freg, freg) {
1320 guint8 *start = s->code;
1324 x86_fcompp (s->code);
1325 x86_fnstsw (s->code);
1326 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500);
1327 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x4000);
1328 offset = 6 + s->code - start;
1329 x86_branch32 (s->code, X86_CC_EQ, tree->data.i - offset, TRUE);
1332 stmt: BNE_UN (freg, freg) {
1333 guint8 *start = s->code;
1337 x86_fcompp (s->code);
1338 x86_fnstsw (s->code);
1339 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500);
1340 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x4000);
1341 offset = 6 + s->code - start;
1342 x86_branch32 (s->code, X86_CC_NE, tree->data.i - offset, TRUE);
1345 stmt: BLT_UN (freg, freg) {
1346 guint8 *start = s->code;
1350 x86_fcompp (s->code);
1351 x86_fnstsw (s->code);
1352 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500);
1353 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
1354 offset = 6 + s->code - start;
1355 x86_branch32 (s->code, X86_CC_EQ, tree->data.i - offset, TRUE);
1358 stmt: BGE_UN (freg, freg) {
1359 guint8 *start = s->code;
1363 x86_fcompp (s->code);
1364 x86_fnstsw (s->code);
1365 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500);
1366 x86_alu_reg_imm (s->code, X86_CMP, X86_EAX, 0x0100);
1367 offset = 6 + s->code - start;
1368 x86_branch32 (s->code, X86_CC_NE, tree->data.i - offset, TRUE);
1371 stmt: BGT_UN (freg, freg) {
1372 guint8 *start = s->code;
1376 x86_fcompp (s->code);
1377 x86_fnstsw (s->code);
1378 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500);
1379 offset = 6 + s->code - start;
1380 x86_branch32 (s->code, X86_CC_EQ, tree->data.i - offset, TRUE);
1383 stmt: BLE_UN (freg, freg) {
1384 guint8 *start = s->code;
1388 x86_fcompp (s->code);
1389 x86_fnstsw (s->code);
1390 x86_alu_reg_imm (s->code, X86_AND, X86_EAX, 0x4500);
1391 offset = 6 + s->code - start;
1392 x86_branch32 (s->code, X86_CC_NE, tree->data.i - offset, TRUE);
1396 x86_mov_reg_imm (s->code, X86_EAX, tree->data.p);
1397 x86_call_membase (s->code, X86_EAX,
1398 G_STRUCT_OFFSET (MonoMethod, addr));
1400 x86_alu_reg_imm (s->code, X86_ADD, X86_ESP, tree->size);
1405 if (!tree->last_instr) {
1407 x86_jump32 (s->code, tree->data.i - 5);
1416 mono_llmult (gint64 a, gint64 b)
1422 mono_lldiv (gint64 a, gint64 b)
1428 mono_llrem (gint64 a, gint64 b)
1434 mono_lldiv_un (guint64 a, guint64 b)
1440 mono_llrem_un (guint64 a, guint64 b)
1446 mono_ctree_new (MonoMemPool *mp, int op, MBTree *left, MBTree *right)
1448 MBTree *t = mono_mempool_alloc0 (mp, sizeof (MBTree));
1460 mono_ctree_new_leaf (MonoMemPool *mp, int op)
1462 return mono_ctree_new (mp, op, NULL, NULL);