1 /* Copyright (C) 2000 Intel Corporation. All rights reserved.
2 Copyright (C) 2001 Ximian, Inc.
4 // $Header: /home/miguel/third-conversion/public/mono/mono/arch/x86/x86-codegen.h,v 1.11 2001/09/26 10:33:18 lupus Exp $
11 // x86 register numbers
25 // opcodes for alu instructions
39 // opcodes for shift instructions
50 // opcodes for floating-point instructions
64 // integer conditions codes
83 X86_LOCK_PREFIX = 0xF0,
84 X86_REPNZ_PREFIX = 0xF2,
85 X86_REPZ_PREFIX = 0xF3,
86 X86_REP_PREFIX = 0xF3,
93 X86_OPERAND_PREFIX = 0x66,
94 X86_ADDRESS_PREFIX = 0x67
97 static const unsigned char
98 x86_cc_unsigned_map [X86_NCC] = {
111 static const unsigned char
112 x86_cc_signed_map [X86_NCC] = {
131 // bitvector mask for callee-saved registers
133 #define X86_ESI_MASK (1<<X86_ESI)
134 #define X86_EDI_MASK (1<<X86_EDI)
135 #define X86_EBX_MASK (1<<X86_EBX)
136 #define X86_EBP_MASK (1<<X86_EBP)
138 #define X86_CALLEE_REGS ((1<<X86_EAX) | (1<<X86_ECX) | (1<<X86_EDX))
139 #define X86_CALLER_REGS ((1<<X86_EBX) | (1<<X86_EBP) | (1<<X86_ESI) | (1<<X86_EDI))
140 #define X86_BYTE_REGS ((1<<X86_EAX) | (1<<X86_ECX) | (1<<X86_EDX) | (1<<X86_EBX))
142 #define X86_IS_SCRATCH(reg) (X86_CALLER_REGS & (1 << (reg))) /* X86_EAX, X86_ECX, or X86_EDX */
143 #define X86_IS_CALLEE(reg) (X86_CALLEE_REGS & (1 << (reg))) /* X86_ESI, X86_EDI, X86_EBX, or X86_EBP */
148 // +--------------------------------+
149 // | in_arg[0] = var[0] |
150 // | in_arg[1] = var[1] |
152 // | in_arg[n_arg-1] = var[n_arg-1] |
153 // +--------------------------------+
155 // +--------------------------------+
156 // | saved EBP | <-- frame pointer (EBP)
157 // +--------------------------------+
159 // +--------------------------------+
161 // | var[n_arg+1] | local variables area
164 // +--------------------------------+
167 // | spill area | area for spilling mimic stack
169 // +--------------------------------|
171 // | ebp [ESP_Frame only] |
172 // | esi | 0..3 callee-saved regs
173 // | edi | <-- stack pointer (ESP)
174 // +--------------------------------+
176 // | stk1 | operand stack area/
177 // | . . . | out args
179 // +--------------------------------|
186 * useful building blocks
188 #define x86_address_byte(inst,m,o,r) do { *(inst)++ = ((((m)&0x03)<<6)|(((o)&0x07)<<3)|(((r)&0x07))); } while (0)
189 #define x86_imm_emit32(inst,imm) \
191 x86_imm_buf imb; imb.val = (int) (imm); \
192 *(inst)++ = imb.b [0]; \
193 *(inst)++ = imb.b [1]; \
194 *(inst)++ = imb.b [2]; \
195 *(inst)++ = imb.b [3]; \
197 #define x86_imm_emit16(inst,imm) do { *(short*)(inst) = (imm); (inst) += 2; } while (0)
198 #define x86_imm_emit8(inst,imm) do { *(inst) = (unsigned char)((imm) & 0xff); ++(inst); } while (0)
199 #define x86_is_imm8(imm) (((int)(imm) >= -128 && (int)(imm) <= 127))
200 #define x86_is_imm16(imm) (((int)(imm) >= -(1<<16) && (int)(imm) <= ((1<<16)-1)))
202 #define x86_reg_emit(inst,r,regno) do { x86_address_byte ((inst), 3, (r), (regno)); } while (0)
203 #define x86_regp_emit(inst,r,regno) do { x86_address_byte ((inst), 0, (r), (regno)); } while (0)
204 #define x86_mem_emit(inst,r,disp) do { x86_address_byte ((inst), 0, (r), 5); x86_imm_emit32((inst), (disp)); } while (0)
206 #define x86_membase_emit(inst,r,basereg,disp) do {\
207 if ((basereg) == X86_ESP) { \
209 x86_address_byte ((inst), 0, (r), X86_ESP); \
210 x86_address_byte ((inst), 0, X86_ESP, X86_ESP); \
211 } else if (x86_is_imm8((disp))) { \
212 x86_address_byte ((inst), 1, (r), X86_ESP); \
213 x86_address_byte ((inst), 0, X86_ESP, X86_ESP); \
214 x86_imm_emit8 ((inst), (disp)); \
216 x86_address_byte ((inst), 2, (r), X86_ESP); \
217 x86_address_byte ((inst), 0, X86_ESP, X86_ESP); \
218 x86_imm_emit32 ((inst), (disp)); \
222 if ((disp) == 0 && (basereg) != X86_EBP) { \
223 x86_address_byte ((inst), 0, (r), (basereg)); \
226 if (x86_is_imm8((disp))) { \
227 x86_address_byte ((inst), 1, (r), (basereg)); \
228 x86_imm_emit8 ((inst), (disp)); \
230 x86_address_byte ((inst), 2, (r), (basereg)); \
231 x86_imm_emit32 ((inst), (disp)); \
235 #define x86_memindex_emit(inst,r,basereg,disp,indexreg,shift) \
237 if ((disp) == 0 && (basereg) != X86_EBP) { \
238 x86_address_byte ((inst), 0, (r), 4); \
239 x86_address_byte ((inst), (shift), (indexreg), (basereg)); \
240 } else if (x86_is_imm8((disp))) { \
241 x86_address_byte ((inst), 1, (r), 4); \
242 x86_address_byte ((inst), (shift), (indexreg), (basereg)); \
243 x86_imm_emit8 ((inst), (disp)); \
245 x86_address_byte ((inst), 2, (r), 4); \
246 x86_address_byte ((inst), (shift), (indexreg), (basereg)); \
247 x86_imm_emit32 ((inst), (disp)); \
251 #define x86_breakpoint(inst) \
256 #define x86_prefix(inst,p) do { *(inst)++ =(unsigned char) (p); } while (0)
258 #define x86_rdtsc(inst) \
264 #define x86_cmpxchg_reg_reg(inst,dreg,reg) \
266 *(inst)++ = (unsigned char)0x0f; \
267 *(inst)++ = (unsigned char)0xb1; \
268 x86_reg_emit ((inst), (reg), (dreg)); \
271 #define x86_cmpxchg_mem_reg(inst,mem,reg) \
273 *(inst)++ = (unsigned char)0x0f; \
274 *(inst)++ = (unsigned char)0xb1; \
275 x86_mem_emit ((inst), (reg), (mem)); \
278 #define x86_cmpxchg_membase_reg(inst,basereg,disp,reg) \
280 *(inst)++ = (unsigned char)0x0f; \
281 *(inst)++ = (unsigned char)0xb1; \
282 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
285 #define x86_xchg_reg_reg(inst,dreg,reg,size) \
288 *(inst)++ = (unsigned char)0x86; \
290 *(inst)++ = (unsigned char)0x87; \
291 x86_reg_emit ((inst), (reg), (dreg)); \
294 #define x86_xchg_mem_reg(inst,mem,reg,size) \
297 *(inst)++ = (unsigned char)0x86; \
299 *(inst)++ = (unsigned char)0x87; \
300 x86_mem_emit ((inst), (reg), (mem)); \
303 #define x86_xchg_membase_reg(inst,basereg,disp,reg,size) \
306 *(inst)++ = (unsigned char)0x86; \
308 *(inst)++ = (unsigned char)0x87; \
309 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
312 #define x86_inc_mem(inst,mem) \
314 *(inst)++ = (unsigned char)0xff; \
315 x86_mem_emit ((inst), 0, (mem)); \
318 #define x86_inc_membase(inst,basereg,disp) \
320 *(inst)++ = (unsigned char)0xff; \
321 x86_membase_emit ((inst), 0, (basereg), (disp)); \
324 #define x86_inc_reg(inst,reg) do { *(inst)++ = (unsigned char)0x40 + (reg); } while (0)
326 #define x86_dec_mem(inst,mem) \
328 *(inst)++ = (unsigned char)0xff; \
329 x86_mem_emit ((inst), 1, (mem)); \
332 #define x86_dec_membase(inst,basereg,disp) \
334 *(inst)++ = (unsigned char)0xff; \
335 x86_membase_emit ((inst), 1, (basereg), (disp)); \
338 #define x86_dec_reg(inst,reg) do { *(inst)++ = (unsigned char)0x48 + (reg); } while (0)
340 #define x86_not_mem(inst,mem) \
342 *(inst)++ = (unsigned char)0xf7; \
343 x86_mem_emit ((inst), 2, (mem)); \
346 #define x86_not_membase(inst,basereg,disp) \
348 *(inst)++ = (unsigned char)0xf7; \
349 x86_membase_emit ((inst), 2, (basereg), (disp)); \
352 #define x86_not_reg(inst,reg) \
354 *(inst)++ = (unsigned char)0xf7; \
355 x86_reg_emit ((inst), 2, (reg)); \
358 #define x86_neg_mem(inst,mem) \
360 *(inst)++ = (unsigned char)0xf7; \
361 x86_mem_emit ((inst), 3, (mem)); \
364 #define x86_neg_membase(inst,basereg,disp) \
366 *(inst)++ = (unsigned char)0xf7; \
367 x86_membase_emit ((inst), 3, (basereg), (disp)); \
370 #define x86_neg_reg(inst,reg) \
372 *(inst)++ = (unsigned char)0xf7; \
373 x86_reg_emit ((inst), 3, (reg)); \
376 #define x86_nop(inst) do { *(inst)++ = (unsigned char)0x90; } while (0)
378 #define x86_alu_reg_imm(inst,opc,reg,imm) \
380 if ((reg) == X86_EAX) { \
381 *(inst)++ = (((unsigned char)(opc)) << 3) + 5; \
382 x86_imm_emit32 ((inst), (imm)); \
385 if (x86_is_imm8((imm))) { \
386 *(inst)++ = (unsigned char)0x83; \
387 x86_reg_emit ((inst), (opc), (reg)); \
388 x86_imm_emit8 ((inst), (imm)); \
390 *(inst)++ = (unsigned char)0x81; \
391 x86_reg_emit ((inst), (opc), (reg)); \
392 x86_imm_emit32 ((inst), (imm)); \
396 #define x86_alu_mem_imm(inst,opc,mem,imm) \
398 if (x86_is_imm8((imm))) { \
399 *(inst)++ = (unsigned char)0x83; \
400 x86_mem_emit ((inst), (opc), (mem)); \
401 x86_imm_emit8 ((inst), (imm)); \
403 *(inst)++ = (unsigned char)0x81; \
404 x86_mem_emit ((inst), (opc), (mem)); \
405 x86_imm_emit32 ((inst), (imm)); \
409 #define x86_alu_membase_imm(inst,opc,basereg,disp,imm) \
411 if (x86_is_imm8((imm))) { \
412 *(inst)++ = (unsigned char)0x83; \
413 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
414 x86_imm_emit8 ((inst), (imm)); \
416 *(inst)++ = (unsigned char)0x81; \
417 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
418 x86_imm_emit32 ((inst), (imm)); \
422 #define x86_alu_mem_reg(inst,opc,mem,reg) \
424 *(inst)++ = (((unsigned char)(opc)) << 3) + 1; \
425 x86_mem_emit ((inst), (reg), (mem)); \
428 #define x86_alu_membase_reg(inst,opc,basereg,disp,reg) \
430 *(inst)++ = (((unsigned char)(opc)) << 3) + 1; \
431 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
434 #define x86_alu_reg_reg(inst,opc,dreg,reg) \
436 *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
437 x86_reg_emit ((inst), (dreg), (reg)); \
440 #define x86_alu_reg_mem(inst,opc,reg,mem) \
442 *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
443 x86_mem_emit ((inst), (reg), (mem)); \
446 #define x86_alu_reg_membase(inst,opc,reg,basereg,disp) \
448 *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
449 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
452 #define x86_test_reg_imm(inst,reg,imm) \
454 if ((reg) == X86_EAX) { \
455 *(inst)++ = (unsigned char)0xa9; \
457 *(inst)++ = (unsigned char)0xf7; \
458 x86_reg_emit ((inst), 0, (reg)); \
460 x86_imm_emit32 ((inst), (imm)); \
463 #define x86_test_mem_imm(inst,mem,imm) \
465 *(inst)++ = (unsigned char)0xf7; \
466 x86_mem_emit ((inst), 0, (mem)); \
467 x86_imm_emit32 ((inst), (imm)); \
470 #define x86_test_membase_imm(inst,basereg,disp,imm) \
472 *(inst)++ = (unsigned char)0xf7; \
473 x86_membase_emit ((inst), 0, (basereg), (disp)); \
474 x86_imm_emit32 ((inst), (imm)); \
477 #define x86_test_reg_reg(inst,dreg,reg) \
479 *(inst)++ = (unsigned char)0x85; \
480 x86_reg_emit ((inst), (reg), (dreg)); \
483 #define x86_test_mem_reg(inst,mem,reg) \
485 *(inst)++ = (unsigned char)0x85; \
486 x86_mem_emit ((inst), (reg), (mem)); \
489 #define x86_test_membase_reg(inst,basereg,disp,reg) \
491 *(inst)++ = (unsigned char)0x85; \
492 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
495 #define x86_shift_reg_imm(inst,opc,reg,imm) \
498 *(inst)++ = (unsigned char)0xd1; \
499 x86_reg_emit ((inst), (opc), (reg)); \
501 *(inst)++ = (unsigned char)0xc1; \
502 x86_reg_emit ((inst), (opc), (reg)); \
503 x86_imm_emit8 ((inst), (imm)); \
507 #define x86_shift_mem_imm(inst,opc,mem,imm) \
510 *(inst)++ = (unsigned char)0xd1; \
511 x86_mem_emit ((inst), (opc), (mem)); \
513 *(inst)++ = (unsigned char)0xc1; \
514 x86_mem_emit ((inst), (opc), (mem)); \
515 x86_imm_emit8 ((inst), (imm)); \
519 #define x86_shift_membase_imm(inst,opc,basereg,disp,imm) \
522 *(inst)++ = (unsigned char)0xd1; \
523 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
525 *(inst)++ = (unsigned char)0xc1; \
526 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
527 x86_imm_emit8 ((inst), (imm)); \
531 #define x86_shift_reg(inst,opc,reg) \
533 *(inst)++ = (unsigned char)0xd3; \
534 x86_reg_emit ((inst), (opc), (reg)); \
537 #define x86_shift_mem(inst,opc,mem) \
539 *(inst)++ = (unsigned char)0xd3; \
540 x86_mem_emit ((inst), (opc), (mem)); \
543 #define x86_shift_membase(inst,opc,basereg,disp) \
545 *(inst)++ = (unsigned char)0xd3; \
546 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
550 * Multi op shift missing.
556 #define x86_mul_reg(inst,reg,is_signed) \
558 *(inst)++ = (unsigned char)0xf7; \
559 x86_reg_emit ((inst), 4 + ((is_signed) ? 1 : 0), (reg)); \
562 #define x86_mul_mem(inst,mem,is_signed) \
564 *(inst)++ = (unsigned char)0xf7; \
565 x86_mem_emit ((inst), 4 + ((is_signed) ? 1 : 0), (mem)); \
568 #define x86_mul_membase(inst,basereg,disp,is_signed) \
570 *(inst)++ = (unsigned char)0xf7; \
571 x86_membase_emit ((inst), 4 + ((is_signed) ? 1 : 0), (basereg), (disp)); \
577 #define x86_imul_reg_reg(inst,dreg,reg) \
579 *(inst)++ = (unsigned char)0x0f; \
580 *(inst)++ = (unsigned char)0xaf; \
581 x86_reg_emit ((inst), (dreg), (reg)); \
584 #define x86_imul_reg_mem(inst,reg,mem) \
586 *(inst)++ = (unsigned char)0x0f; \
587 *(inst)++ = (unsigned char)0xaf; \
588 x86_mem_emit ((inst), (reg), (mem)); \
591 #define x86_imul_reg_membase(inst,reg,basereg,disp) \
593 *(inst)++ = (unsigned char)0x0f; \
594 *(inst)++ = (unsigned char)0xaf; \
595 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
601 #define x86_imul_reg_reg_imm(inst,dreg,reg,imm) \
603 if (x86_is_imm8 ((imm))) { \
604 *(inst)++ = (unsigned char)0x6b; \
605 x86_reg_emit ((inst), (dreg), (reg)); \
606 x86_imm_emit8 ((inst), (imm)); \
608 *(inst)++ = (unsigned char)0x69; \
609 x86_reg_emit ((inst), (dreg), (reg)); \
610 x86_imm_emit32 ((inst), (imm)); \
614 #define x86_imul_reg_mem_imm(inst,reg,mem,imm) \
616 if (x86_is_imm8 ((imm))) { \
617 *(inst)++ = (unsigned char)0x6b; \
618 x86_mem_emit ((inst), (reg), (mem)); \
619 x86_imm_emit8 ((inst), (imm)); \
621 *(inst)++ = (unsigned char)0x69; \
622 x86_reg_emit ((inst), (reg), (mem)); \
623 x86_imm_emit32 ((inst), (imm)); \
627 #define x86_imul_reg_membase_imm(inst,reg,basereg,disp,imm) \
629 if (x86_is_imm8 ((imm))) { \
630 *(inst)++ = (unsigned char)0x6b; \
631 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
632 x86_imm_emit8 ((inst), (imm)); \
634 *(inst)++ = (unsigned char)0x69; \
635 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
636 x86_imm_emit32 ((inst), (imm)); \
641 * divide EDX:EAX by rm;
642 * eax = quotient, edx = remainder
645 #define x86_div_reg(inst,reg,is_signed) \
647 *(inst)++ = (unsigned char)0xf7; \
648 x86_reg_emit ((inst), 6 + ((is_signed) ? 1 : 0), (reg)); \
651 #define x86_div_mem(inst,mem,is_signed) \
653 *(inst)++ = (unsigned char)0xf7; \
654 x86_mem_emit ((inst), 6 + ((is_signed) ? 1 : 0), (mem)); \
657 #define x86_div_membase(inst,basereg,disp,is_signed) \
659 *(inst)++ = (unsigned char)0xf7; \
660 x86_membase_emit ((inst), 6 + ((is_signed) ? 1 : 0), (basereg), (disp)); \
663 #define x86_mov_mem_reg(inst,mem,reg,size) \
666 case 1: *(inst)++ = (unsigned char)0x88; break; \
667 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
668 case 4: *(inst)++ = (unsigned char)0x89; break; \
669 default: assert (0); \
671 x86_mem_emit ((inst), (reg), (mem)); \
674 #define x86_mov_regp_reg(inst,regp,reg,size) \
677 case 1: *(inst)++ = (unsigned char)0x88; break; \
678 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
679 case 4: *(inst)++ = (unsigned char)0x89; break; \
680 default: assert (0); \
682 x86_regp_emit ((inst), (reg), (regp)); \
685 #define x86_mov_membase_reg(inst,basereg,disp,reg,size) \
688 case 1: *(inst)++ = (unsigned char)0x88; break; \
689 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
690 case 4: *(inst)++ = (unsigned char)0x89; break; \
691 default: assert (0); \
693 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
696 #define x86_mov_memindex_reg(inst,basereg,disp,indexreg,shift,reg,size) \
699 case 1: *(inst)++ = (unsigned char)0x88; break; \
700 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
701 case 4: *(inst)++ = (unsigned char)0x89; break; \
702 default: assert (0); \
704 x86_memindex_emit ((inst), (reg), (basereg), (disp), (indexreg), (shift)); \
707 #define x86_mov_reg_reg(inst,dreg,reg,size) \
710 case 1: *(inst)++ = (unsigned char)0x8a; break; \
711 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
712 case 4: *(inst)++ = (unsigned char)0x8b; break; \
713 default: assert (0); \
715 x86_reg_emit ((inst), (dreg), (reg)); \
718 #define x86_mov_reg_mem(inst,reg,mem,size) \
721 case 1: *(inst)++ = (unsigned char)0x8a; break; \
722 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
723 case 4: *(inst)++ = (unsigned char)0x8b; break; \
724 default: assert (0); \
726 x86_mem_emit ((inst), (reg), (mem)); \
729 #define x86_mov_reg_membase(inst,reg,basereg,disp,size) \
732 case 1: *(inst)++ = (unsigned char)0x8a; break; \
733 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
734 case 4: *(inst)++ = (unsigned char)0x8b; break; \
735 default: assert (0); \
737 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
740 #define x86_mov_reg_memindex(inst,reg,basereg,disp,indexreg,shift,size) \
743 case 1: *(inst)++ = (unsigned char)0x8a; break; \
744 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
745 case 4: *(inst)++ = (unsigned char)0x8b; break; \
746 default: assert (0); \
748 x86_memindex_emit ((inst), (reg), (basereg), (disp), (indexreg), (shift)); \
751 #define x86_mov_reg_imm(inst,reg,imm) \
754 x86_alu_reg_reg ((inst), X86_XOR, (reg), (reg)); \
756 *(inst)++ = (unsigned char)0xb8 + (reg); \
757 x86_imm_emit32 ((inst), (imm)); \
761 #define x86_mov_mem_imm(inst,mem,imm,size) \
764 *(inst)++ = (unsigned char)0xc6; \
765 x86_mem_emit ((inst), 0, (mem)); \
766 x86_imm_emit8 ((inst), (imm)); \
767 } else if ((size) == 2) { \
768 *(inst)++ = (unsigned char)0x66; \
769 *(inst)++ = (unsigned char)0xc7; \
770 x86_mem_emit ((inst), 0, (mem)); \
771 x86_imm_emit16 ((inst), (imm)); \
773 *(inst)++ = (unsigned char)0xc7; \
774 x86_mem_emit ((inst), 0, (mem)); \
775 x86_imm_emit32 ((inst), (imm)); \
779 #define x86_mov_membase_imm(inst,basereg,disp,imm,size) \
782 *(inst)++ = (unsigned char)0xc6; \
783 x86_membase_emit ((inst), 0, (basereg), (disp)); \
784 x86_imm_emit8 ((inst), (imm)); \
785 } else if ((size) == 2) { \
786 *(inst)++ = (unsigned char)0x66; \
787 *(inst)++ = (unsigned char)0xc7; \
788 x86_membase_emit ((inst), 0, (basereg), (disp)); \
789 x86_imm_emit16 ((inst), (imm)); \
791 *(inst)++ = (unsigned char)0xc7; \
792 x86_membase_emit ((inst), 0, (basereg), (disp)); \
793 x86_imm_emit32 ((inst), (imm)); \
797 #define x86_lea_mem(inst,reg,mem) \
799 *(inst)++ = (unsigned char)0x8d; \
800 x86_mem_emit ((inst), (reg), (mem)); \
803 #define x86_lea_membase(inst,reg,basereg,disp) \
805 *(inst)++ = (unsigned char)0x8d; \
806 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
809 #define x86_widen_reg(inst,dreg,reg,is_signed,is_half) \
811 unsigned char op = 0xb6; \
812 *(inst)++ = (unsigned char)0x0f; \
813 if ((is_signed)) op += 0x08; \
814 if ((is_half)) op += 0x01; \
816 x86_reg_emit ((inst), (dreg), (reg)); \
819 #define x86_widen_mem(inst,dreg,mem,is_signed,is_half) \
821 unsigned char op = 0xb6; \
822 *(inst)++ = (unsigned char)0x0f; \
823 if ((is_signed)) op += 0x08; \
824 if ((is_half)) op += 0x01; \
826 x86_mem_emit ((inst), (dreg), (mem)); \
829 #define x86_widen_membase(inst,dreg,basereg,disp,is_signed,is_half) \
831 unsigned char op = 0xb6; \
832 *(inst)++ = (unsigned char)0x0f; \
833 if ((is_signed)) op += 0x08; \
834 if ((is_half)) op += 0x01; \
836 x86_membase_emit ((inst), (dreg), (basereg), (disp)); \
839 #define x86_cdq(inst) do { *(inst)++ = (unsigned char)0x99; } while (0)
840 #define x86_wait(inst) do { *(inst)++ = (unsigned char)0x9b; } while (0)
842 #define x86_fp_op_mem(inst,opc,mem,is_double) \
844 *(inst)++ = (is_double) ? (unsigned char)0xdc : (unsigned char)0xd8; \
845 x86_mem_emit ((inst), (opc), (mem)); \
848 #define x86_fp_op(inst,opc,index) \
850 *(inst)++ = (unsigned char)0xd8; \
851 *(inst)++ = (unsigned char)0xc0+((opc)<<3)+((index)&0x07); \
854 #define x86_fp_op_reg(inst,opc,index,pop_stack) \
856 static const unsigned char map[] = { 0, 1, 2, 3, 5, 4, 7, 6, 8}; \
857 *(inst)++ = (pop_stack) ? (unsigned char)0xde : (unsigned char)0xdc; \
858 *(inst)++ = (unsigned char)0xc0+(map[(opc)]<<3)+((index)&0x07); \
861 #define x86_fstp(inst,index) \
863 *(inst)++ = (unsigned char)0xdd; \
864 *(inst)++ = (unsigned char)0xd8+(index); \
867 #define x86_fcompp(inst) \
869 *(inst)++ = (unsigned char)0xde; \
870 *(inst)++ = (unsigned char)0xd9; \
873 #define x86_fnstsw(inst) \
875 *(inst)++ = (unsigned char)0xdf; \
876 *(inst)++ = (unsigned char)0xe0; \
879 #define x86_fnstcw(inst,mem) \
881 *(inst)++ = (unsigned char)0xd9; \
882 x86_mem_emit ((inst), 7, (mem)); \
885 #define x86_fnstcw_membase(inst,basereg,disp) \
887 *(inst)++ = (unsigned char)0xd9; \
888 x86_membase_emit ((inst), 7, (basereg), (disp)); \
891 #define x86_fldcw(inst,mem) \
893 *(inst)++ = (unsigned char)0xd9; \
894 x86_mem_emit ((inst), 5, (mem)); \
897 #define x86_fldcw_membase(inst,basereg,disp) \
899 *(inst)++ = (unsigned char)0xd9; \
900 x86_membase_emit ((inst), 5, (basereg), (disp)); \
903 #define x86_fchs(inst) \
905 *(inst)++ = (unsigned char)0xd9; \
906 *(inst)++ = (unsigned char)0xe0; \
909 #define x86_frem(inst) \
911 *(inst)++ = (unsigned char)0xd9; \
912 *(inst)++ = (unsigned char)0xf8; \
915 #define x86_fxch(inst,index) \
917 *(inst)++ = (unsigned char)0xd9; \
918 *(inst)++ = (unsigned char)0xc8 + ((index) & 0x07); \
921 #define x86_fcomip(inst,index) \
923 *(inst)++ = (unsigned char)0xdf; \
924 *(inst)++ = (unsigned char)0xf0 + ((index) & 0x07); \
927 #define x86_fld(inst,mem,is_double) \
929 *(inst)++ = (is_double) ? (unsigned char)0xdd : (unsigned char)0xd9; \
930 x86_mem_emit ((inst), 0, (mem)); \
933 #define x86_fld_membase(inst,basereg,disp,is_double) \
935 *(inst)++ = (is_double) ? (unsigned char)0xdd : (unsigned char)0xd9; \
936 x86_membase_emit ((inst), 0, (basereg), (disp)); \
939 #define x86_fld80(inst,mem) \
941 *(inst)++ = (unsigned char)0xdb; \
942 x86_mem_emit ((inst), 5, (mem)); \
945 #define x86_fld80_membase(inst,basereg,disp) \
947 *(inst)++ = (unsigned char)0xdb; \
948 x86_membase_emit ((inst), 5, (basereg), (disp)); \
951 #define x86_fild(inst,mem,is_long) \
954 *(inst)++ = (unsigned char)0xdf; \
955 x86_mem_emit ((inst), 5, (mem)); \
957 *(inst)++ = (unsigned char)0xdb; \
958 x86_mem_emit ((inst), 0, (mem)); \
962 #define x86_fild_membase(inst,basereg,disp,is_long) \
965 *(inst)++ = (unsigned char)0xdf; \
966 x86_membase_emit ((inst), 5, (basereg), (disp)); \
968 *(inst)++ = (unsigned char)0xdb; \
969 x86_membase_emit ((inst), 0, (basereg), (disp)); \
973 #define x86_fld_reg(inst,index) \
975 *(inst)++ = (unsigned char)0xd9; \
976 *(inst)++ = (unsigned char)0xc0 + ((index) & 0x07); \
979 #define x86_fldz(inst) \
981 *(inst)++ = (unsigned char)0xd9; \
982 *(inst)++ = (unsigned char)0xee; \
985 #define x86_fld1(inst) \
987 *(inst)++ = (unsigned char)0xd9; \
988 *(inst)++ = (unsigned char)0xe8; \
991 #define x86_fst(inst,mem,is_double,pop_stack) \
993 *(inst)++ = (is_double) ? (unsigned char)0xdd: (unsigned char)0xd9; \
994 x86_mem_emit ((inst), 2 + ((pop_stack) ? 1 : 0), (mem)); \
997 #define x86_fst_membase(inst,basereg,disp,is_double,pop_stack) \
999 *(inst)++ = (is_double) ? (unsigned char)0xdd: (unsigned char)0xd9; \
1000 x86_membase_emit ((inst), 2 + ((pop_stack) ? 1 : 0), (basereg), (disp)); \
1003 #define x86_fist_pop(inst,mem,is_long) \
1006 *(inst)++ = (unsigned char)0xdf; \
1007 x86_mem_emit ((inst), 7, (mem)); \
1009 *(inst)++ = (unsigned char)0xdb; \
1010 x86_mem_emit ((inst), 3, (mem)); \
1014 #define x86_fist_pop_membase(inst,basereg,disp,is_long) \
1017 *(inst)++ = (unsigned char)0xdf; \
1018 x86_membase_emit ((inst), 7, (basereg), (disp)); \
1020 *(inst)++ = (unsigned char)0xdb; \
1021 x86_membase_emit ((inst), 3, (basereg), (disp)); \
1025 #define x86_push_reg(inst,reg) \
1027 *(inst)++ = (unsigned char)0x50 + (reg); \
1030 #define x86_push_regp(inst,reg) \
1032 *(inst)++ = (unsigned char)0xff; \
1033 x86_regp_emit ((inst), 6, (reg)); \
1036 #define x86_push_mem(inst,mem) \
1038 *(inst)++ = (unsigned char)0xff; \
1039 x86_mem_emit ((inst), 6, (mem)); \
1042 #define x86_push_membase(inst,basereg,disp) \
1044 *(inst)++ = (unsigned char)0xff; \
1045 x86_membase_emit ((inst), 6, (basereg), (disp)); \
1048 #define x86_push_imm(inst,imm) \
1050 *(inst)++ = (unsigned char)0x68; \
1051 x86_imm_emit32 ((inst), (imm)); \
1054 #define x86_pop_reg(inst,reg) \
1056 *(inst)++ = (unsigned char)0x58 + (reg); \
1059 #define x86_pop_mem(inst,mem) \
1061 *(inst)++ = (unsigned char)0x87; \
1062 x86_mem_emit ((inst), 0, (mem)); \
1065 #define x86_pop_membase(inst,basereg,disp) \
1067 *(inst)++ = (unsigned char)0x87; \
1068 x86_membase_emit ((inst), 0, (basereg), (disp)); \
1071 #define x86_pushad(inst) do { *(inst)++ = (unsigned char)0x60; } while (0)
1072 #define x86_pushfd(inst) do { *(inst)++ = (unsigned char)0x9c; } while (0)
1073 #define x86_popad(inst) do { *(inst)++ = (unsigned char)0x61; } while (0)
1074 #define x86_popfd(inst) do { *(inst)++ = (unsigned char)0x9d; } while (0)
1076 #define x86_jump32(inst,imm) \
1078 *(inst)++ = (unsigned char)0xe9; \
1079 x86_imm_emit32 ((inst), (imm)); \
1082 #define x86_jump8(inst,imm) \
1084 *(inst)++ = (unsigned char)0xeb; \
1085 x86_imm_emit8 ((inst), (imm)); \
1088 #define x86_jump_reg(inst,reg) \
1090 *(inst)++ = (unsigned char)0xff; \
1091 x86_reg_emit ((inst), 4, (reg)); \
1094 #define x86_jump_mem(inst,mem) \
1096 *(inst)++ = (unsigned char)0xff; \
1097 x86_mem_emit ((inst), 4, (mem)); \
1100 #define x86_jump_membase(inst,basereg,disp) \
1102 *(inst)++ = (unsigned char)0xff; \
1103 x86_membase_emit ((inst), 4, (basereg), (disp)); \
1107 * target is a pointer in our buffer.
1109 #define x86_jump_code(inst,target) \
1111 int t = (target) - (inst) - 2; \
1112 if (x86_is_imm8(t)) { \
1113 x86_jump8 ((inst), t); \
1116 x86_jump32 ((inst), t); \
1120 #define x86_jump_disp(inst,disp) \
1122 int t = (disp) - 2; \
1123 if (x86_is_imm8(t)) { \
1124 x86_jump8 ((inst), t); \
1127 x86_jump32 ((inst), t); \
1131 #define x86_branch8(inst,cond,imm,is_signed) \
1134 *(inst)++ = x86_cc_signed_map [(cond)]; \
1136 *(inst)++ = x86_cc_unsigned_map [(cond)]; \
1137 x86_imm_emit8 ((inst), (imm)); \
1140 #define x86_branch32(inst,cond,imm,is_signed) \
1142 *(inst)++ = (unsigned char)0x0f; \
1144 *(inst)++ = x86_cc_signed_map [(cond)] + 0x10; \
1146 *(inst)++ = x86_cc_unsigned_map [(cond)] + 0x10; \
1147 x86_imm_emit32 ((inst), (imm)); \
1150 #define x86_branch(inst,cond,target,is_signed) \
1152 int offset = (target) - (inst) - 2; \
1153 if (x86_is_imm8 ((offset))) \
1154 x86_branch8 ((inst), (cond), offset, (is_signed)); \
1157 x86_branch32 ((inst), (cond), offset, (is_signed)); \
1161 #define x86_branch_disp(inst,cond,disp,is_signed) \
1163 int offset = (disp) - 2; \
1164 if (x86_is_imm8 ((offset))) \
1165 x86_branch8 ((inst), (cond), offset, (is_signed)); \
1168 x86_branch32 ((inst), (cond), offset, (is_signed)); \
1172 #define x86_call_imm(inst,disp) \
1174 *(inst)++ = (unsigned char)0xe8; \
1175 x86_imm_emit32 ((inst), (int)(disp)); \
1178 #define x86_call_reg(inst,reg) \
1180 *(inst)++ = (unsigned char)0xff; \
1181 x86_reg_emit ((inst), 2, (reg)); \
1184 #define x86_call_mem(inst,mem) \
1186 *(inst)++ = (unsigned char)0xff; \
1187 x86_mem_emit ((inst), 2, (mem)); \
1190 #define x86_call_membase(inst,basereg,disp) \
1192 *(inst)++ = (unsigned char)0xff; \
1193 x86_membase_emit ((inst), 2, (basereg), (disp)); \
1196 #define x86_call_code(inst,target) \
1198 int offset = (unsigned char*)(target) - (inst); \
1200 x86_call_imm ((inst), offset); \
1203 #define x86_ret(inst) do { *(inst)++ = (unsigned char)0xc3; } while (0)
1205 #define x86_ret_imm(inst,imm) \
1210 *(inst)++ = (unsigned char)0xc2; \
1211 x86_imm_emit16 ((inst), (imm)); \
1215 #define x86_cmov_reg(inst,cond,is_signed,dreg,reg) \
1217 *(inst)++ = (unsigned char) 0x0f; \
1219 *(inst)++ = x86_cc_signed_map [(cond)] - 0x30; \
1221 *(inst)++ = x86_cc_unsigned_map [(cond)] - 0x30; \
1222 x86_reg_emit ((inst), (dreg), (reg)); \
1225 #define x86_cmov_mem(inst,cond,is_signed,reg,mem) \
1227 *(inst)++ = (unsigned char) 0x0f; \
1229 *(inst)++ = x86_cc_signed_map [(cond)] - 0x30; \
1231 *(inst)++ = x86_cc_unsigned_map [(cond)] - 0x30; \
1232 x86_mem_emit ((inst), (reg), (mem)); \
1235 #define x86_cmov_membase(inst,cond,is_signed,reg,basereg,disp) \
1237 *(inst)++ = (unsigned char) 0x0f; \
1239 *(inst)++ = x86_cc_signed_map [(cond)] - 0x30; \
1241 *(inst)++ = x86_cc_unsigned_map [(cond)] - 0x30; \
1242 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
1245 #define x86_enter(inst,framesize) \
1247 *(inst)++ = (unsigned char)0xc8; \
1248 x86_imm_emit16 ((inst), (framesize)); \
1252 #define x86_leave(inst) do { *(inst)++ = (unsigned char)0xc9; } while (0)
1253 #define x86_sahf(inst) do { *(inst)++ = (unsigned char)0x9e; } while (0)
1255 #define x86_fsin(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xfe; } while (0)
1256 #define x86_fcos(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xff; } while (0)
1257 #define x86_fabs(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xe1; } while (0)
1258 #define x86_fpatan(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf3; } while (0)
1259 #define x86_fprem(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf8; } while (0)
1260 #define x86_fprem1(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf5; } while (0)
1261 #define x86_frndint(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xfc; } while (0)
1262 #define x86_fsqrt(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xfa; } while (0)
1263 #define x86_fptan(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf2; } while (0)
1265 #define x86_padding(inst,size) \
1268 case 1: x86_nop ((inst)); break; \
1269 case 2: *(inst)++ = 0x8b; \
1270 *(inst)++ = 0xc0; break; \
1271 case 3: *(inst)++ = 0x8d; *(inst)++ = 0x6d; \
1272 *(inst)++ = 0x00; break; \
1273 case 4: *(inst)++ = 0x8d; *(inst)++ = 0x64; \
1274 *(inst)++ = 0x24; *(inst)++ = 0x00; \
1276 case 5: *(inst)++ = 0x8d; *(inst)++ = 0x64; \
1277 *(inst)++ = 0x24; *(inst)++ = 0x00; \
1278 x86_nop ((inst)); break; \
1279 case 6: *(inst)++ = 0x8d; *(inst)++ = 0xad; \
1280 *(inst)++ = 0x00; *(inst)++ = 0x00; \
1281 *(inst)++ = 0x00; *(inst)++ = 0x00; \
1283 case 7: *(inst)++ = 0x8d; *(inst)++ = 0xa4; \
1284 *(inst)++ = 0x24; *(inst)++ = 0x00; \
1285 *(inst)++ = 0x00; *(inst)++ = 0x00; \
1286 *(inst)++ = 0x00; break; \
1287 default: assert (0); \
1291 #define x86_prolog(inst,frame_size,reg_mask) \
1293 unsigned i, m = 1; \
1294 x86_enter ((inst), (frame_size)); \
1295 for (i = 0; i < X86_NREG; ++i, m <<= 1) { \
1296 if ((reg_mask) & m) \
1297 x86_push_reg ((inst), i); \
1301 #define x86_epilog(inst,reg_mask) \
1303 unsigned i, m = 1 << X86_EDI; \
1304 for (i = X86_EDI; m != 0; i--, m=m>>1) { \
1305 if ((reg_mask) & m) \
1306 x86_pop_reg ((inst), i); \
1308 x86_leave ((inst)); \