1 /* Copyright (C) 2000 Intel Corporation. All rights reserved.
2 Copyright (C) 2001 Ximian, Inc.
4 // $Header: /home/miguel/third-conversion/public/mono/mono/arch/x86/x86-codegen.h,v 1.9 2001/09/18 07:26:43 lupus Exp $
11 // x86 register numbers
25 // opcodes for alu instructions
39 // opcodes for shift instructions
50 // opcodes for floating-point instructions
64 // integer conditions codes
83 X86_LOCK_PREFIX = 0xF0,
84 X86_REPNZ_PREFIX = 0xF2,
85 X86_REPZ_PREFIX = 0xF3,
86 X86_REP_PREFIX = 0xF3,
93 X86_OPERAND_PREFIX = 0x66,
94 X86_ADDRESS_PREFIX = 0x67
97 static const unsigned char
98 x86_cc_unsigned_map [X86_NCC] = {
111 static const unsigned char
112 x86_cc_signed_map [X86_NCC] = {
131 // bitvector mask for callee-saved registers
133 #define X86_ESI_MASK (1<<X86_ESI)
134 #define X86_EDI_MASK (1<<X86_EDI)
135 #define X86_EBX_MASK (1<<X86_EBX)
136 #define X86_EBP_MASK (1<<X86_EBP)
138 #define X86_CALLEE_REGS ((1<<X86_EAX) | (1<<X86_ECX) | (1<<X86_EDX))
139 #define X86_CALLER_REGS ((1<<X86_EBX) | (1<<X86_EBP) | (1<<X86_ESI) | (1<<X86_EDI))
140 #define X86_BYTE_REGS ((1<<X86_EAX) | (1<<X86_ECX) | (1<<X86_EDX) | (1<<X86_EBX))
142 #define X86_IS_SCRATCH(reg) (X86_CALLER_REGS & (1 << (reg))) /* X86_EAX, X86_ECX, or X86_EDX */
143 #define X86_IS_CALLEE(reg) (X86_CALLEE_REGS & (1 << (reg))) /* X86_ESI, X86_EDI, X86_EBX, or X86_EBP */
148 // +--------------------------------+
149 // | in_arg[0] = var[0] |
150 // | in_arg[1] = var[1] |
152 // | in_arg[n_arg-1] = var[n_arg-1] |
153 // +--------------------------------+
155 // +--------------------------------+
156 // | saved EBP | <-- frame pointer (EBP)
157 // +--------------------------------+
159 // +--------------------------------+
161 // | var[n_arg+1] | local variables area
164 // +--------------------------------+
167 // | spill area | area for spilling mimic stack
169 // +--------------------------------|
171 // | ebp [ESP_Frame only] |
172 // | esi | 0..3 callee-saved regs
173 // | edi | <-- stack pointer (ESP)
174 // +--------------------------------+
176 // | stk1 | operand stack area/
177 // | . . . | out args
179 // +--------------------------------|
186 * useful building blocks
188 #define x86_address_byte(inst,m,o,r) do { *(inst)++ = ((((m)&0x03)<<6)|(((o)&0x07)<<3)|(((r)&0x07))); } while (0)
189 #define x86_imm_emit32(inst,imm) \
191 x86_imm_buf imb; imb.val = (int) (imm); \
192 *(inst)++ = imb.b [0]; \
193 *(inst)++ = imb.b [1]; \
194 *(inst)++ = imb.b [2]; \
195 *(inst)++ = imb.b [3]; \
197 #define x86_imm_emit16(inst,imm) do { *(short*)(inst) = (imm); (inst) += 2; } while (0)
198 #define x86_imm_emit8(inst,imm) do { *(inst) = (unsigned char)((imm) & 0xff); ++(inst); } while (0)
199 #define x86_is_imm8(imm) (((int)(imm) >= -128 && (int)(imm) <= 127))
200 #define x86_is_imm16(imm) (((int)(imm) >= -(1<<16) && (int)(imm) <= ((1<<16)-1)))
202 #define x86_reg_emit(inst,r,regno) do { x86_address_byte ((inst), 3, (r), (regno)); } while (0)
203 #define x86_regp_emit(inst,r,regno) do { x86_address_byte ((inst), 0, (r), (regno)); } while (0)
204 #define x86_mem_emit(inst,r,disp) do { x86_address_byte ((inst), 0, (r), 5); x86_imm_emit32((inst), (disp)); } while (0)
206 #define x86_membase_emit(inst,r,basereg,disp) do {\
207 if ((basereg) == X86_ESP) { \
209 x86_address_byte ((inst), 0, (r), X86_ESP); \
210 x86_address_byte ((inst), 0, X86_ESP, X86_ESP); \
211 } else if (x86_is_imm8((disp))) { \
212 x86_address_byte ((inst), 1, (r), X86_ESP); \
213 x86_address_byte ((inst), 0, X86_ESP, X86_ESP); \
214 x86_imm_emit8 ((inst), (disp)); \
216 x86_address_byte ((inst), 2, (r), X86_ESP); \
217 x86_address_byte ((inst), 0, X86_ESP, X86_ESP); \
218 x86_imm_emit32 ((inst), (disp)); \
222 if ((disp) == 0 && (basereg) != X86_EBP) { \
223 x86_address_byte ((inst), 0, (r), (basereg)); \
226 if (x86_is_imm8((disp))) { \
227 x86_address_byte ((inst), 1, (r), (basereg)); \
228 x86_imm_emit8 ((inst), (disp)); \
230 x86_address_byte ((inst), 2, (r), (basereg)); \
231 x86_imm_emit32 ((inst), (disp)); \
236 * TODO: memindex_emit
239 #define x86_prefix(inst,p) do { *(inst)++ =(unsigned char) (p); } while (0)
241 #define x86_rdtsc(inst) \
247 #define x86_cmpxchg_reg_reg(inst,dreg,reg) \
249 *(inst)++ = (unsigned char)0x0f; \
250 *(inst)++ = (unsigned char)0xb1; \
251 x86_reg_emit ((inst), (reg), (dreg)); \
254 #define x86_cmpxchg_mem_reg(inst,mem,reg) \
256 *(inst)++ = (unsigned char)0x0f; \
257 *(inst)++ = (unsigned char)0xb1; \
258 x86_mem_emit ((inst), (reg), (mem)); \
261 #define x86_cmpxchg_membase_reg(inst,basereg,disp,reg) \
263 *(inst)++ = (unsigned char)0x0f; \
264 *(inst)++ = (unsigned char)0xb1; \
265 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
268 #define x86_xchg_reg_reg(inst,dreg,reg,size) \
271 *(inst)++ = (unsigned char)0x86; \
273 *(inst)++ = (unsigned char)0x87; \
274 x86_reg_emit ((inst), (reg), (dreg)); \
277 #define x86_xchg_mem_reg(inst,mem,reg,size) \
280 *(inst)++ = (unsigned char)0x86; \
282 *(inst)++ = (unsigned char)0x87; \
283 x86_mem_emit ((inst), (reg), (mem)); \
286 #define x86_xchg_membase_reg(inst,basereg,disp,reg,size) \
289 *(inst)++ = (unsigned char)0x86; \
291 *(inst)++ = (unsigned char)0x87; \
292 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
295 #define x86_inc_mem(inst,mem) \
297 *(inst)++ = (unsigned char)0xff; \
298 x86_mem_emit ((inst), 0, (mem)); \
301 #define x86_inc_membase(inst,basereg,disp) \
303 *(inst)++ = (unsigned char)0xff; \
304 x86_membase_emit ((inst), 0, (basereg), (disp)); \
307 #define x86_inc_reg(inst,reg) do { *(inst)++ = (unsigned char)0x40 + (reg); } while (0)
309 #define x86_dec_mem(inst,mem) \
311 *(inst)++ = (unsigned char)0xff; \
312 x86_mem_emit ((inst), 1, (mem)); \
315 #define x86_dec_membase(inst,basereg,disp) \
317 *(inst)++ = (unsigned char)0xff; \
318 x86_membase_emit ((inst), 1, (basereg), (disp)); \
321 #define x86_dec_reg(inst,reg) do { *(inst)++ = (unsigned char)0x48 + (reg); } while (0)
323 #define x86_not_mem(inst,mem) \
325 *(inst)++ = (unsigned char)0xf7; \
326 x86_mem_emit ((inst), 2, (mem)); \
329 #define x86_not_membase(inst,basereg,disp) \
331 *(inst)++ = (unsigned char)0xf7; \
332 x86_membase_emit ((inst), 2, (basereg), (disp)); \
335 #define x86_not_reg(inst,reg) \
337 *(inst)++ = (unsigned char)0xf7; \
338 x86_reg_emit ((inst), 2, (reg)); \
341 #define x86_neg_mem(inst,mem) \
343 *(inst)++ = (unsigned char)0xf7; \
344 x86_mem_emit ((inst), 3, (mem)); \
347 #define x86_neg_membase(inst,basereg,disp) \
349 *(inst)++ = (unsigned char)0xf7; \
350 x86_membase_emit ((inst), 3, (basereg), (disp)); \
353 #define x86_neg_reg(inst,reg) \
355 *(inst)++ = (unsigned char)0xf7; \
356 x86_reg_emit ((inst), 3, (reg)); \
359 #define x86_nop(inst) do { *(inst)++ = (unsigned char)0x90; } while (0)
361 #define x86_alu_reg_imm(inst,opc,reg,imm) \
363 if ((reg) == X86_EAX) { \
364 *(inst)++ = (((unsigned char)(opc)) << 3) + 5; \
365 x86_imm_emit32 ((inst), (imm)); \
368 if (x86_is_imm8((imm))) { \
369 *(inst)++ = (unsigned char)0x83; \
370 x86_reg_emit ((inst), (opc), (reg)); \
371 x86_imm_emit8 ((inst), (imm)); \
373 *(inst)++ = (unsigned char)0x81; \
374 x86_reg_emit ((inst), (opc), (reg)); \
375 x86_imm_emit32 ((inst), (imm)); \
379 #define x86_alu_mem_imm(inst,opc,mem,imm) \
381 if (x86_is_imm8((imm))) { \
382 *(inst)++ = (unsigned char)0x83; \
383 x86_mem_emit ((inst), (opc), (mem)); \
384 x86_imm_emit8 ((inst), (imm)); \
386 *(inst)++ = (unsigned char)0x81; \
387 x86_mem_emit ((inst), (opc), (mem)); \
388 x86_imm_emit32 ((inst), (imm)); \
392 #define x86_alu_membase_imm(inst,opc,basereg,disp,imm) \
394 if (x86_is_imm8((imm))) { \
395 *(inst)++ = (unsigned char)0x83; \
396 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
397 x86_imm_emit8 ((inst), (imm)); \
399 *(inst)++ = (unsigned char)0x81; \
400 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
401 x86_imm_emit32 ((inst), (imm)); \
405 #define x86_alu_mem_reg(inst,opc,mem,reg) \
407 *(inst)++ = (((unsigned char)(opc)) << 3) + 1; \
408 x86_mem_emit ((inst), (reg), (mem)); \
411 #define x86_alu_membase_reg(inst,opc,basereg,disp,reg) \
413 *(inst)++ = (((unsigned char)(opc)) << 3) + 1; \
414 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
417 #define x86_alu_reg_reg(inst,opc,dreg,reg) \
419 *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
420 x86_reg_emit ((inst), (dreg), (reg)); \
423 #define x86_alu_reg_mem(inst,opc,reg,mem) \
425 *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
426 x86_mem_emit ((inst), (reg), (mem)); \
429 #define x86_alu_reg_membase(inst,opc,reg,basereg,disp) \
431 *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
432 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
435 #define x86_test_reg_imm(inst,reg,imm) \
437 if ((reg) == X86_EAX) { \
438 *(inst)++ = (unsigned char)0xa9; \
440 *(inst)++ = (unsigned char)0xf7; \
441 x86_reg_emit ((inst), 0, (reg)); \
443 x86_imm_emit32 ((inst), (imm)); \
446 #define x86_test_mem_imm(inst,mem,imm) \
448 *(inst)++ = (unsigned char)0xf7; \
449 x86_mem_emit ((inst), 0, (mem)); \
450 x86_imm_emit32 ((inst), (imm)); \
453 #define x86_test_membase_imm(inst,basereg,disp,imm) \
455 *(inst)++ = (unsigned char)0xf7; \
456 x86_membase_emit ((inst), 0, (basereg), (disp)); \
457 x86_imm_emit32 ((inst), (imm)); \
460 #define x86_test_reg_reg(inst,dreg,reg) \
462 *(inst)++ = (unsigned char)0x85; \
463 x86_reg_emit ((inst), (reg), (dreg)); \
466 #define x86_test_mem_reg(inst,mem,reg) \
468 *(inst)++ = (unsigned char)0x85; \
469 x86_mem_emit ((inst), (reg), (mem)); \
472 #define x86_test_membase_reg(inst,basereg,disp,reg) \
474 *(inst)++ = (unsigned char)0x85; \
475 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
478 #define x86_shift_reg_imm(inst,opc,reg,imm) \
481 *(inst)++ = (unsigned char)0xd1; \
482 x86_reg_emit ((inst), (opc), (reg)); \
484 *(inst)++ = (unsigned char)0xc1; \
485 x86_reg_emit ((inst), (opc), (reg)); \
486 x86_imm_emit8 ((inst), (imm)); \
490 #define x86_shift_mem_imm(inst,opc,mem,imm) \
493 *(inst)++ = (unsigned char)0xd1; \
494 x86_mem_emit ((inst), (opc), (mem)); \
496 *(inst)++ = (unsigned char)0xc1; \
497 x86_mem_emit ((inst), (opc), (mem)); \
498 x86_imm_emit8 ((inst), (imm)); \
502 #define x86_shift_membase_imm(inst,opc,basereg,disp,imm) \
505 *(inst)++ = (unsigned char)0xd1; \
506 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
508 *(inst)++ = (unsigned char)0xc1; \
509 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
510 x86_imm_emit8 ((inst), (imm)); \
514 #define x86_shift_reg(inst,opc,reg) \
516 *(inst)++ = (unsigned char)0xd3; \
517 x86_reg_emit ((inst), (opc), (reg)); \
520 #define x86_shift_mem(inst,opc,mem) \
522 *(inst)++ = (unsigned char)0xd3; \
523 x86_mem_emit ((inst), (opc), (mem)); \
526 #define x86_shift_membase(inst,opc,basereg,disp) \
528 *(inst)++ = (unsigned char)0xd3; \
529 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
533 * Multi op shift missing.
539 #define x86_mul_reg(inst,reg,is_signed) \
541 *(inst)++ = (unsigned char)0xf7; \
542 x86_reg_emit ((inst), 4 + ((is_signed) ? 1 : 0), (reg)); \
545 #define x86_mul_mem(inst,mem,is_signed) \
547 *(inst)++ = (unsigned char)0xf7; \
548 x86_mem_emit ((inst), 4 + ((is_signed) ? 1 : 0), (mem)); \
551 #define x86_mul_membase(inst,basereg,disp,is_signed) \
553 *(inst)++ = (unsigned char)0xf7; \
554 x86_membase_emit ((inst), 4 + ((is_signed) ? 1 : 0), (basereg), (disp)); \
560 #define x86_imul_reg_reg(inst,dreg,reg) \
562 *(inst)++ = (unsigned char)0x0f; \
563 *(inst)++ = (unsigned char)0xaf; \
564 x86_reg_emit ((inst), (dreg), (reg)); \
567 #define x86_imul_reg_mem(inst,reg,mem) \
569 *(inst)++ = (unsigned char)0x0f; \
570 *(inst)++ = (unsigned char)0xaf; \
571 x86_mem_emit ((inst), (reg), (mem)); \
574 #define x86_imul_reg_membase(inst,reg,basereg,disp) \
576 *(inst)++ = (unsigned char)0x0f; \
577 *(inst)++ = (unsigned char)0xaf; \
578 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
584 #define x86_imul_reg_reg_imm(inst,dreg,reg,imm) \
586 if (x86_is_imm8 ((imm))) { \
587 *(inst)++ = (unsigned char)0x6b; \
588 x86_reg_emit ((inst), (dreg), (reg)); \
589 x86_imm_emit8 ((inst), (imm)); \
591 *(inst)++ = (unsigned char)0x69; \
592 x86_reg_emit ((inst), (dreg), (reg)); \
593 x86_imm_emit32 ((inst), (imm)); \
597 #define x86_imul_reg_mem_imm(inst,reg,mem,imm) \
599 if (x86_is_imm8 ((imm))) { \
600 *(inst)++ = (unsigned char)0x6b; \
601 x86_mem_emit ((inst), (reg), (mem)); \
602 x86_imm_emit8 ((inst), (imm)); \
604 *(inst)++ = (unsigned char)0x69; \
605 x86_reg_emit ((inst), (reg), (mem)); \
606 x86_imm_emit32 ((inst), (imm)); \
610 #define x86_imul_reg_membase_imm(inst,reg,basereg,disp,imm) \
612 if (x86_is_imm8 ((imm))) { \
613 *(inst)++ = (unsigned char)0x6b; \
614 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
615 x86_imm_emit8 ((inst), (imm)); \
617 *(inst)++ = (unsigned char)0x69; \
618 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
619 x86_imm_emit32 ((inst), (imm)); \
624 * divide EDX:EAX by rm;
625 * eax = quotient, edx = remainder
628 #define x86_div_reg(inst,reg,is_signed) \
630 *(inst)++ = (unsigned char)0xf7; \
631 x86_reg_emit ((inst), 6 + ((is_signed) ? 1 : 0), (reg)); \
634 #define x86_div_mem(inst,mem,is_signed) \
636 *(inst)++ = (unsigned char)0xf7; \
637 x86_mem_emit ((inst), 6 + ((is_signed) ? 1 : 0), (mem)); \
640 #define x86_div_membase(inst,basereg,disp,is_signed) \
642 *(inst)++ = (unsigned char)0xf7; \
643 x86_membase_emit ((inst), 6 + ((is_signed) ? 1 : 0), (basereg), (disp)); \
646 #define x86_mov_mem_reg(inst,mem,reg,size) \
649 case 1: *(inst)++ = (unsigned char)0x88; break; \
650 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
651 case 4: *(inst)++ = (unsigned char)0x89; break; \
652 default: assert (0); \
654 x86_mem_emit ((inst), (reg), (mem)); \
657 #define x86_mov_regp_reg(inst,regp,reg,size) \
660 case 1: *(inst)++ = (unsigned char)0x88; break; \
661 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
662 case 4: *(inst)++ = (unsigned char)0x89; break; \
663 default: assert (0); \
665 x86_regp_emit ((inst), (reg), (regp)); \
668 #define x86_mov_membase_reg(inst,basereg,disp,reg,size) \
671 case 1: *(inst)++ = (unsigned char)0x88; break; \
672 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
673 case 4: *(inst)++ = (unsigned char)0x89; break; \
674 default: assert (0); \
676 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
679 #define x86_mov_reg_reg(inst,dreg,reg,size) \
682 case 1: *(inst)++ = (unsigned char)0x8a; break; \
683 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
684 case 4: *(inst)++ = (unsigned char)0x8b; break; \
685 default: assert (0); \
687 x86_reg_emit ((inst), (dreg), (reg)); \
690 #define x86_mov_reg_mem(inst,reg,mem,size) \
693 case 1: *(inst)++ = (unsigned char)0x8a; break; \
694 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
695 case 4: *(inst)++ = (unsigned char)0x8b; break; \
696 default: assert (0); \
698 x86_mem_emit ((inst), (reg), (mem)); \
701 #define x86_mov_reg_membase(inst,reg,basereg,disp,size) \
704 case 1: *(inst)++ = (unsigned char)0x8a; break; \
705 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
706 case 4: *(inst)++ = (unsigned char)0x8b; break; \
707 default: assert (0); \
709 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
712 #define x86_mov_reg_imm(inst,reg,imm) \
715 x86_alu_reg_reg ((inst), X86_XOR, (reg), (reg)); \
717 *(inst)++ = (unsigned char)0xb8 + (reg); \
718 x86_imm_emit32 ((inst), (imm)); \
722 #define x86_mov_mem_imm(inst,mem,imm,size) \
725 *(inst)++ = (unsigned char)0xc6; \
726 x86_mem_emit ((inst), 0, (mem)); \
727 x86_imm_emit8 ((inst), (imm)); \
728 } else if ((size) == 2) { \
729 *(inst)++ = (unsigned char)0x66; \
730 *(inst)++ = (unsigned char)0xc7; \
731 x86_mem_emit ((inst), 0, (mem)); \
732 x86_imm_emit16 ((inst), (imm)); \
734 *(inst)++ = (unsigned char)0xc7; \
735 x86_mem_emit ((inst), 0, (mem)); \
736 x86_imm_emit32 ((inst), (imm)); \
740 #define x86_mov_membase_imm(inst,basereg,disp,imm,size) \
743 *(inst)++ = (unsigned char)0xc6; \
744 x86_membase_emit ((inst), 0, (basereg), (disp)); \
745 x86_imm_emit8 ((inst), (imm)); \
746 } else if ((size) == 2) { \
747 *(inst)++ = (unsigned char)0x66; \
748 *(inst)++ = (unsigned char)0xc7; \
749 x86_membase_emit ((inst), 0, (basereg), (disp)); \
750 x86_imm_emit16 ((inst), (imm)); \
752 *(inst)++ = (unsigned char)0xc7; \
753 x86_membase_emit ((inst), 0, (basereg), (disp)); \
754 x86_imm_emit32 ((inst), (imm)); \
758 #define x86_lea_mem(inst,reg,mem) \
760 *(inst)++ = (unsigned char)0x8d; \
761 x86_mem_emit ((inst), (reg), (mem)); \
764 #define x86_lea_membase(inst,reg,basereg,disp) \
766 *(inst)++ = (unsigned char)0x8d; \
767 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
770 #define x86_widen_reg(inst,dreg,reg,is_signed,is_half) \
772 unsigned char op = 0xb6; \
773 *(inst)++ = (unsigned char)0x0f; \
774 if ((is_signed)) op += 0x08; \
775 if ((is_half)) op += 0x01; \
777 x86_reg_emit ((inst), (dreg), (reg)); \
780 #define x86_widen_mem(inst,dreg,mem,is_signed,is_half) \
782 unsigned char op = 0xb6; \
783 *(inst)++ = (unsigned char)0x0f; \
784 if ((is_signed)) op += 0x08; \
785 if ((is_half)) op += 0x01; \
787 x86_mem_emit ((inst), (dreg), (mem)); \
790 #define x86_widen_membase(inst,dreg,basereg,disp,is_signed,is_half) \
792 unsigned char op = 0xb6; \
793 *(inst)++ = (unsigned char)0x0f; \
794 if ((is_signed)) op += 0x08; \
795 if ((is_half)) op += 0x01; \
797 x86_membase_emit ((inst), (dreg), (basereg), (disp)); \
800 #define x86_cdq(inst) do { *(inst)++ = (unsigned char)0x99; } while (0)
801 #define x86_wait(inst) do { *(inst)++ = (unsigned char)0x9b; } while (0)
803 #define x86_fp_op_mem(inst,opc,mem,is_double) \
805 *(inst)++ = (is_double) ? (unsigned char)0xdc : (unsigned char)0xd8; \
806 x86_mem_emit ((inst), (opc), (mem)); \
809 #define x86_fp_op(inst,opc,index) \
811 *(inst)++ = (unsigned char)0xd8; \
812 *(inst)++ = (unsigned char)0xc0+((opc)<<3)+((index)&0x07); \
815 #define x86_fp_op_reg(inst,opc,index,pop_stack) \
817 static const unsigned char map[] = { 0, 1, 2, 3, 5, 4, 7, 6, 8}; \
818 *(inst)++ = (pop_stack) ? (unsigned char)0xde : (unsigned char)0xdc; \
819 *(inst)++ = (unsigned char)0xc0+(map[(opc)]<<3)+((index)&0x07); \
822 #define x86_fstp(inst,index) \
824 *(inst)++ = (unsigned char)0xdd; \
825 *(inst)++ = (unsigned char)0xd8+(index); \
828 #define x86_fcompp(inst) \
830 *(inst)++ = (unsigned char)0xde; \
831 *(inst)++ = (unsigned char)0xd9; \
834 #define x86_fnstsw(inst) \
836 *(inst)++ = (unsigned char)0xdf; \
837 *(inst)++ = (unsigned char)0xe0; \
840 #define x86_fnstcw(inst,mem) \
842 *(inst)++ = (unsigned char)0xd9; \
843 x86_mem_emit ((inst), 7, (mem)); \
846 #define x86_fnstcw_membase(inst,basereg,disp) \
848 *(inst)++ = (unsigned char)0xd9; \
849 x86_membase_emit ((inst), 7, (basereg), (disp)); \
852 #define x86_fldcw(inst,mem) \
854 *(inst)++ = (unsigned char)0xd9; \
855 x86_mem_emit ((inst), 5, (mem)); \
858 #define x86_fldcw_membase(inst,basereg,disp) \
860 *(inst)++ = (unsigned char)0xd9; \
861 x86_membase_emit ((inst), 5, (basereg), (disp)); \
864 #define x86_fchs(inst) \
866 *(inst)++ = (unsigned char)0xd9; \
867 *(inst)++ = (unsigned char)0xe0; \
870 #define x86_frem(inst) \
872 *(inst)++ = (unsigned char)0xd9; \
873 *(inst)++ = (unsigned char)0xf8; \
876 #define x86_fxch(inst,index) \
878 *(inst)++ = (unsigned char)0xd9; \
879 *(inst)++ = (unsigned char)0xc8 + ((index) & 0x07); \
882 #define x86_fcomip(inst,index) \
884 *(inst)++ = (unsigned char)0xdf; \
885 *(inst)++ = (unsigned char)0xf0 + ((index) & 0x07); \
888 #define x86_fld(inst,mem,is_double) \
890 *(inst)++ = (is_double) ? (unsigned char)0xdd : (unsigned char)0xd9; \
891 x86_mem_emit ((inst), 0, (mem)); \
894 #define x86_fld_membase(inst,basereg,disp,is_double) \
896 *(inst)++ = (is_double) ? (unsigned char)0xdd : (unsigned char)0xd9; \
897 x86_membase_emit ((inst), 0, (basereg), (disp)); \
900 #define x86_fld80(inst,mem) \
902 *(inst)++ = (unsigned char)0xdb; \
903 x86_mem_emit ((inst), 5, (mem)); \
906 #define x86_fld80_membase(inst,basereg,disp) \
908 *(inst)++ = (unsigned char)0xdb; \
909 x86_membase_emit ((inst), 5, (basereg), (disp)); \
912 #define x86_fild(inst,mem,is_long) \
915 *(inst)++ = (unsigned char)0xdf; \
916 x86_mem_emit ((inst), 5, (mem)); \
918 *(inst)++ = (unsigned char)0xdb; \
919 x86_mem_emit ((inst), 0, (mem)); \
923 #define x86_fild_membase(inst,basereg,disp,is_long) \
926 *(inst)++ = (unsigned char)0xdf; \
927 x86_membase_emit ((inst), 5, (basereg), (disp)); \
929 *(inst)++ = (unsigned char)0xdb; \
930 x86_membase_emit ((inst), 0, (basereg), (disp)); \
934 #define x86_fld_reg(inst,index) \
936 *(inst)++ = (unsigned char)0xd9; \
937 *(inst)++ = (unsigned char)0xc0 + ((index) & 0x07); \
940 #define x86_fldz(inst) \
942 *(inst)++ = (unsigned char)0xd9; \
943 *(inst)++ = (unsigned char)0xee; \
946 #define x86_fld1(inst) \
948 *(inst)++ = (unsigned char)0xd9; \
949 *(inst)++ = (unsigned char)0xe8; \
952 #define x86_fst(inst,mem,is_double,pop_stack) \
954 *(inst)++ = (is_double) ? (unsigned char)0xdd: (unsigned char)0xd9; \
955 x86_mem_emit ((inst), 2 + ((pop_stack) ? 1 : 0), (mem)); \
958 #define x86_fst_membase(inst,basereg,disp,is_double,pop_stack) \
960 *(inst)++ = (is_double) ? (unsigned char)0xdd: (unsigned char)0xd9; \
961 x86_membase_emit ((inst), 2 + ((pop_stack) ? 1 : 0), (basereg), (disp)); \
964 #define x86_fist_pop(inst,mem,is_long) \
967 *(inst)++ = (unsigned char)0xdf; \
968 x86_mem_emit ((inst), 7, (mem)); \
970 *(inst)++ = (unsigned char)0xdb; \
971 x86_mem_emit ((inst), 3, (mem)); \
975 #define x86_fist_pop_membase(inst,basereg,disp,is_long) \
978 *(inst)++ = (unsigned char)0xdf; \
979 x86_membase_emit ((inst), 7, (basereg), (disp)); \
981 *(inst)++ = (unsigned char)0xdb; \
982 x86_membase_emit ((inst), 3, (basereg), (disp)); \
986 #define x86_push_reg(inst,reg) \
988 *(inst)++ = (unsigned char)0x50 + (reg); \
991 #define x86_push_regp(inst,reg) \
993 *(inst)++ = (unsigned char)0xff; \
994 x86_regp_emit ((inst), 6, (reg)); \
997 #define x86_push_mem(inst,mem) \
999 *(inst)++ = (unsigned char)0xff; \
1000 x86_mem_emit ((inst), 6, (mem)); \
1003 #define x86_push_membase(inst,basereg,disp) \
1005 *(inst)++ = (unsigned char)0xff; \
1006 x86_membase_emit ((inst), 6, (basereg), (disp)); \
1009 #define x86_push_imm(inst,imm) \
1011 *(inst)++ = (unsigned char)0x68; \
1012 x86_imm_emit32 ((inst), (imm)); \
1015 #define x86_pop_reg(inst,reg) \
1017 *(inst)++ = (unsigned char)0x58 + (reg); \
1020 #define x86_pop_mem(inst,mem) \
1022 *(inst)++ = (unsigned char)0x87; \
1023 x86_mem_emit ((inst), 0, (mem)); \
1026 #define x86_pop_membase(inst,basereg,disp) \
1028 *(inst)++ = (unsigned char)0x87; \
1029 x86_membase_emit ((inst), 0, (basereg), (disp)); \
1032 #define x86_pushad(inst) do { *(inst)++ = (unsigned char)0x60; } while (0)
1033 #define x86_pushfd(inst) do { *(inst)++ = (unsigned char)0x9c; } while (0)
1034 #define x86_popad(inst) do { *(inst)++ = (unsigned char)0x61; } while (0)
1035 #define x86_popfd(inst) do { *(inst)++ = (unsigned char)0x9d; } while (0)
1037 #define x86_jump32(inst,imm) \
1039 *(inst)++ = (unsigned char)0xe9; \
1040 x86_imm_emit32 ((inst), (imm)); \
1043 #define x86_jump8(inst,imm) \
1045 *(inst)++ = (unsigned char)0xeb; \
1046 x86_imm_emit8 ((inst), (imm)); \
1049 #define x86_jump_reg(inst,reg) \
1051 *(inst)++ = (unsigned char)0xff; \
1052 x86_reg_emit ((inst), 4, (reg)); \
1055 #define x86_jump_mem(inst,mem) \
1057 *(inst)++ = (unsigned char)0xff; \
1058 x86_mem_emit ((inst), 4, (mem)); \
1061 #define x86_jump_membase(inst,basereg,disp) \
1063 *(inst)++ = (unsigned char)0xff; \
1064 x86_membase_emit ((inst), 4, (basereg), (disp)); \
1068 * target is a pointer in our buffer.
1070 #define x86_jump_code(inst,target) \
1072 int t = (target) - (inst) - 2; \
1073 if (x86_is_imm8(t)) { \
1074 x86_jump8 ((inst), t); \
1077 x86_jump32 ((inst), t); \
1081 #define x86_jump_disp(inst,disp) \
1083 int t = (disp) - 2; \
1084 if (x86_is_imm8(t)) { \
1085 x86_jump8 ((inst), t); \
1088 x86_jump32 ((inst), t); \
1092 #define x86_branch8(inst,cond,imm,is_signed) \
1095 *(inst)++ = x86_cc_signed_map [(cond)]; \
1097 *(inst)++ = x86_cc_unsigned_map [(cond)]; \
1098 x86_imm_emit8 ((inst), (imm)); \
1101 #define x86_branch32(inst,cond,imm,is_signed) \
1103 *(inst)++ = (unsigned char)0x0f; \
1105 *(inst)++ = x86_cc_signed_map [(cond)] + 0x10; \
1107 *(inst)++ = x86_cc_unsigned_map [(cond)] + 0x10; \
1108 x86_imm_emit32 ((inst), (imm)); \
1111 #define x86_branch(inst,cond,target,is_signed) \
1113 int offset = (target) - (inst) - 2; \
1114 if (x86_is_imm8 ((offset))) \
1115 x86_branch8 ((inst), (cond), offset, (is_signed)); \
1118 x86_branch32 ((inst), (cond), offset, (is_signed)); \
1122 #define x86_branch_disp(inst,cond,disp,is_signed) \
1124 int offset = (disp) - 2; \
1125 if (x86_is_imm8 ((offset))) \
1126 x86_branch8 ((inst), (cond), offset, (is_signed)); \
1129 x86_branch32 ((inst), (cond), offset, (is_signed)); \
1133 #define x86_call_imm(inst,disp) \
1135 *(inst)++ = (unsigned char)0xe8; \
1136 x86_imm_emit32 ((inst), (int)(disp)); \
1139 #define x86_call_reg(inst,reg) \
1141 *(inst)++ = (unsigned char)0xff; \
1142 x86_reg_emit ((inst), 2, (reg)); \
1145 #define x86_call_mem(inst,mem) \
1147 *(inst)++ = (unsigned char)0xff; \
1148 x86_mem_emit ((inst), 2, (mem)); \
1151 #define x86_call_membase(inst,basereg,disp) \
1153 *(inst)++ = (unsigned char)0xff; \
1154 x86_membase_emit ((inst), 2, (basereg), (disp)); \
1157 #define x86_call_code(inst,target) \
1159 int offset = (unsigned char*)(target) - (inst); \
1161 x86_call_imm ((inst), offset); \
1164 #define x86_ret(inst) do { *(inst)++ = (unsigned char)0xc3; } while (0)
1166 #define x86_ret_imm(inst,imm) \
1171 *(inst)++ = (unsigned char)0xc2; \
1172 x86_imm_emit16 ((inst), (imm)); \
1176 #define x86_cmov_reg(inst,cond,is_signed,dreg,reg) \
1178 *(inst)++ = (unsigned char) 0x0f; \
1180 *(inst)++ = x86_cc_signed_map [(cond)] - 0x30; \
1182 *(inst)++ = x86_cc_unsigned_map [(cond)] - 0x30; \
1183 x86_reg_emit ((inst), (dreg), (reg)); \
1186 #define x86_cmov_mem(inst,cond,is_signed,reg,mem) \
1188 *(inst)++ = (unsigned char) 0x0f; \
1190 *(inst)++ = x86_cc_signed_map [(cond)] - 0x30; \
1192 *(inst)++ = x86_cc_unsigned_map [(cond)] - 0x30; \
1193 x86_mem_emit ((inst), (reg), (mem)); \
1196 #define x86_cmov_membase(inst,cond,is_signed,reg,basereg,disp) \
1198 *(inst)++ = (unsigned char) 0x0f; \
1200 *(inst)++ = x86_cc_signed_map [(cond)] - 0x30; \
1202 *(inst)++ = x86_cc_unsigned_map [(cond)] - 0x30; \
1203 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
1206 #define x86_enter(inst,framesize) \
1208 *(inst)++ = (unsigned char)0xc8; \
1209 x86_imm_emit16 ((inst), (framesize)); \
1213 #define x86_leave(inst) do { *(inst)++ = (unsigned char)0xc9; } while (0)
1214 #define x86_sahf(inst) do { *(inst)++ = (unsigned char)0x9e; } while (0)
1216 #define x86_fsin(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xfe; } while (0)
1217 #define x86_fcos(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xff; } while (0)
1218 #define x86_fabs(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xe1; } while (0)
1219 #define x86_fpatan(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf3; } while (0)
1220 #define x86_fprem(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf8; } while (0)
1221 #define x86_fprem1(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf5; } while (0)
1222 #define x86_frndint(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xfc; } while (0)
1223 #define x86_fsqrt(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xfa; } while (0)
1224 #define x86_fptan(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf2; } while (0)
1226 #define x86_padding(inst,size) \
1229 case 1: x86_nop ((inst)); break; \
1230 case 2: *(inst)++ = 0x8b; \
1231 *(inst)++ = 0xc0; break; \
1232 case 3: *(inst)++ = 0x8d; *(inst)++ = 0x6d; \
1233 *(inst)++ = 0x00; break; \
1234 case 4: *(inst)++ = 0x8d; *(inst)++ = 0x64; \
1235 *(inst)++ = 0x24; *(inst)++ = 0x00; \
1237 case 5: *(inst)++ = 0x8d; *(inst)++ = 0x64; \
1238 *(inst)++ = 0x24; *(inst)++ = 0x00; \
1239 x86_nop ((inst)); break; \
1240 case 6: *(inst)++ = 0x8d; *(inst)++ = 0xad; \
1241 *(inst)++ = 0x00; *(inst)++ = 0x00; \
1242 *(inst)++ = 0x00; *(inst)++ = 0x00; \
1244 case 7: *(inst)++ = 0x8d; *(inst)++ = 0xa4; \
1245 *(inst)++ = 0x24; *(inst)++ = 0x00; \
1246 *(inst)++ = 0x00; *(inst)++ = 0x00; \
1247 *(inst)++ = 0x00; break; \
1248 default: assert (0); \
1252 #define x86_prolog(inst,frame_size,reg_mask) \
1254 unsigned i, m = 1; \
1255 x86_enter ((inst), (frame_size)); \
1256 for (i = 0; i < X86_NREG; ++i, m <<= 1) { \
1257 if ((reg_mask) & m) \
1258 x86_push_reg ((inst), i); \
1262 #define x86_epilog(inst,reg_mask) \
1264 unsigned i, m = 1 << X86_EDI; \
1265 for (i = X86_EDI; m != 0; i--, m=m>>1) { \
1266 if ((reg_mask) & m) \
1267 x86_pop_reg ((inst), i); \
1269 x86_leave ((inst)); \