1 /* Copyright (C) 2000 Intel Corporation. All rights reserved.
2 Copyright (C) 2001 Ximian, Inc.
4 // $Header: /home/miguel/third-conversion/public/mono/mono/arch/x86/x86-codegen.h,v 1.17 2001/11/27 10:30:39 lupus Exp $
11 // x86 register numbers
25 // opcodes for alu instructions
39 // opcodes for shift instructions
50 // opcodes for floating-point instructions
64 // integer conditions codes
83 X86_LOCK_PREFIX = 0xF0,
84 X86_REPNZ_PREFIX = 0xF2,
85 X86_REPZ_PREFIX = 0xF3,
86 X86_REP_PREFIX = 0xF3,
93 X86_OPERAND_PREFIX = 0x66,
94 X86_ADDRESS_PREFIX = 0x67
97 static const unsigned char
98 x86_cc_unsigned_map [X86_NCC] = {
111 static const unsigned char
112 x86_cc_signed_map [X86_NCC] = {
130 #define X86_NOBASEREG (-1)
133 // bitvector mask for callee-saved registers
135 #define X86_ESI_MASK (1<<X86_ESI)
136 #define X86_EDI_MASK (1<<X86_EDI)
137 #define X86_EBX_MASK (1<<X86_EBX)
138 #define X86_EBP_MASK (1<<X86_EBP)
140 #define X86_CALLEE_REGS ((1<<X86_EAX) | (1<<X86_ECX) | (1<<X86_EDX))
141 #define X86_CALLER_REGS ((1<<X86_EBX) | (1<<X86_EBP) | (1<<X86_ESI) | (1<<X86_EDI))
142 #define X86_BYTE_REGS ((1<<X86_EAX) | (1<<X86_ECX) | (1<<X86_EDX) | (1<<X86_EBX))
144 #define X86_IS_SCRATCH(reg) (X86_CALLER_REGS & (1 << (reg))) /* X86_EAX, X86_ECX, or X86_EDX */
145 #define X86_IS_CALLEE(reg) (X86_CALLEE_REGS & (1 << (reg))) /* X86_ESI, X86_EDI, X86_EBX, or X86_EBP */
150 // +--------------------------------+
151 // | in_arg[0] = var[0] |
152 // | in_arg[1] = var[1] |
154 // | in_arg[n_arg-1] = var[n_arg-1] |
155 // +--------------------------------+
157 // +--------------------------------+
158 // | saved EBP | <-- frame pointer (EBP)
159 // +--------------------------------+
161 // +--------------------------------+
163 // | var[n_arg+1] | local variables area
166 // +--------------------------------+
169 // | spill area | area for spilling mimic stack
171 // +--------------------------------|
173 // | ebp [ESP_Frame only] |
174 // | esi | 0..3 callee-saved regs
175 // | edi | <-- stack pointer (ESP)
176 // +--------------------------------+
178 // | stk1 | operand stack area/
179 // | . . . | out args
181 // +--------------------------------|
188 * useful building blocks
190 #define x86_address_byte(inst,m,o,r) do { *(inst)++ = ((((m)&0x03)<<6)|(((o)&0x07)<<3)|(((r)&0x07))); } while (0)
191 #define x86_imm_emit32(inst,imm) \
193 x86_imm_buf imb; imb.val = (int) (imm); \
194 *(inst)++ = imb.b [0]; \
195 *(inst)++ = imb.b [1]; \
196 *(inst)++ = imb.b [2]; \
197 *(inst)++ = imb.b [3]; \
199 #define x86_imm_emit16(inst,imm) do { *(short*)(inst) = (imm); (inst) += 2; } while (0)
200 #define x86_imm_emit8(inst,imm) do { *(inst) = (unsigned char)((imm) & 0xff); ++(inst); } while (0)
201 #define x86_is_imm8(imm) (((int)(imm) >= -128 && (int)(imm) <= 127))
202 #define x86_is_imm16(imm) (((int)(imm) >= -(1<<16) && (int)(imm) <= ((1<<16)-1)))
204 #define x86_reg_emit(inst,r,regno) do { x86_address_byte ((inst), 3, (r), (regno)); } while (0)
205 #define x86_regp_emit(inst,r,regno) do { x86_address_byte ((inst), 0, (r), (regno)); } while (0)
206 #define x86_mem_emit(inst,r,disp) do { x86_address_byte ((inst), 0, (r), 5); x86_imm_emit32((inst), (disp)); } while (0)
208 #define x86_membase_emit(inst,r,basereg,disp) do {\
209 if ((basereg) == X86_ESP) { \
211 x86_address_byte ((inst), 0, (r), X86_ESP); \
212 x86_address_byte ((inst), 0, X86_ESP, X86_ESP); \
213 } else if (x86_is_imm8((disp))) { \
214 x86_address_byte ((inst), 1, (r), X86_ESP); \
215 x86_address_byte ((inst), 0, X86_ESP, X86_ESP); \
216 x86_imm_emit8 ((inst), (disp)); \
218 x86_address_byte ((inst), 2, (r), X86_ESP); \
219 x86_address_byte ((inst), 0, X86_ESP, X86_ESP); \
220 x86_imm_emit32 ((inst), (disp)); \
224 if ((disp) == 0 && (basereg) != X86_EBP) { \
225 x86_address_byte ((inst), 0, (r), (basereg)); \
228 if (x86_is_imm8((disp))) { \
229 x86_address_byte ((inst), 1, (r), (basereg)); \
230 x86_imm_emit8 ((inst), (disp)); \
232 x86_address_byte ((inst), 2, (r), (basereg)); \
233 x86_imm_emit32 ((inst), (disp)); \
237 #define x86_memindex_emit(inst,r,basereg,disp,indexreg,shift) \
239 if ((basereg) == X86_NOBASEREG) { \
240 x86_address_byte ((inst), 0, (r), 4); \
241 x86_address_byte ((inst), (shift), (indexreg), 5); \
242 x86_imm_emit32 ((inst), (disp)); \
243 } else if ((disp) == 0 && (basereg) != X86_EBP) { \
244 x86_address_byte ((inst), 0, (r), 4); \
245 x86_address_byte ((inst), (shift), (indexreg), (basereg)); \
246 } else if (x86_is_imm8((disp))) { \
247 x86_address_byte ((inst), 1, (r), 4); \
248 x86_address_byte ((inst), (shift), (indexreg), (basereg)); \
249 x86_imm_emit8 ((inst), (disp)); \
251 x86_address_byte ((inst), 0, (r), 4); \
252 x86_address_byte ((inst), (shift), (indexreg), 5); \
253 x86_imm_emit32 ((inst), (disp)); \
257 #define x86_breakpoint(inst) \
262 #define x86_cld(inst) do { *(inst)++ =(unsigned char)0xfc; } while (0)
263 #define x86_stosb(inst) do { *(inst)++ =(unsigned char)0xaa; } while (0)
264 #define x86_stosl(inst) do { *(inst)++ =(unsigned char)0xab; } while (0)
266 #define x86_prefix(inst,p) do { *(inst)++ =(unsigned char) (p); } while (0)
268 #define x86_rdtsc(inst) \
274 #define x86_cmpxchg_reg_reg(inst,dreg,reg) \
276 *(inst)++ = (unsigned char)0x0f; \
277 *(inst)++ = (unsigned char)0xb1; \
278 x86_reg_emit ((inst), (reg), (dreg)); \
281 #define x86_cmpxchg_mem_reg(inst,mem,reg) \
283 *(inst)++ = (unsigned char)0x0f; \
284 *(inst)++ = (unsigned char)0xb1; \
285 x86_mem_emit ((inst), (reg), (mem)); \
288 #define x86_cmpxchg_membase_reg(inst,basereg,disp,reg) \
290 *(inst)++ = (unsigned char)0x0f; \
291 *(inst)++ = (unsigned char)0xb1; \
292 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
295 #define x86_xchg_reg_reg(inst,dreg,reg,size) \
298 *(inst)++ = (unsigned char)0x86; \
300 *(inst)++ = (unsigned char)0x87; \
301 x86_reg_emit ((inst), (reg), (dreg)); \
304 #define x86_xchg_mem_reg(inst,mem,reg,size) \
307 *(inst)++ = (unsigned char)0x86; \
309 *(inst)++ = (unsigned char)0x87; \
310 x86_mem_emit ((inst), (reg), (mem)); \
313 #define x86_xchg_membase_reg(inst,basereg,disp,reg,size) \
316 *(inst)++ = (unsigned char)0x86; \
318 *(inst)++ = (unsigned char)0x87; \
319 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
322 #define x86_inc_mem(inst,mem) \
324 *(inst)++ = (unsigned char)0xff; \
325 x86_mem_emit ((inst), 0, (mem)); \
328 #define x86_inc_membase(inst,basereg,disp) \
330 *(inst)++ = (unsigned char)0xff; \
331 x86_membase_emit ((inst), 0, (basereg), (disp)); \
334 #define x86_inc_reg(inst,reg) do { *(inst)++ = (unsigned char)0x40 + (reg); } while (0)
336 #define x86_dec_mem(inst,mem) \
338 *(inst)++ = (unsigned char)0xff; \
339 x86_mem_emit ((inst), 1, (mem)); \
342 #define x86_dec_membase(inst,basereg,disp) \
344 *(inst)++ = (unsigned char)0xff; \
345 x86_membase_emit ((inst), 1, (basereg), (disp)); \
348 #define x86_dec_reg(inst,reg) do { *(inst)++ = (unsigned char)0x48 + (reg); } while (0)
350 #define x86_not_mem(inst,mem) \
352 *(inst)++ = (unsigned char)0xf7; \
353 x86_mem_emit ((inst), 2, (mem)); \
356 #define x86_not_membase(inst,basereg,disp) \
358 *(inst)++ = (unsigned char)0xf7; \
359 x86_membase_emit ((inst), 2, (basereg), (disp)); \
362 #define x86_not_reg(inst,reg) \
364 *(inst)++ = (unsigned char)0xf7; \
365 x86_reg_emit ((inst), 2, (reg)); \
368 #define x86_neg_mem(inst,mem) \
370 *(inst)++ = (unsigned char)0xf7; \
371 x86_mem_emit ((inst), 3, (mem)); \
374 #define x86_neg_membase(inst,basereg,disp) \
376 *(inst)++ = (unsigned char)0xf7; \
377 x86_membase_emit ((inst), 3, (basereg), (disp)); \
380 #define x86_neg_reg(inst,reg) \
382 *(inst)++ = (unsigned char)0xf7; \
383 x86_reg_emit ((inst), 3, (reg)); \
386 #define x86_nop(inst) do { *(inst)++ = (unsigned char)0x90; } while (0)
388 #define x86_alu_reg_imm(inst,opc,reg,imm) \
390 if ((reg) == X86_EAX) { \
391 *(inst)++ = (((unsigned char)(opc)) << 3) + 5; \
392 x86_imm_emit32 ((inst), (imm)); \
395 if (x86_is_imm8((imm))) { \
396 *(inst)++ = (unsigned char)0x83; \
397 x86_reg_emit ((inst), (opc), (reg)); \
398 x86_imm_emit8 ((inst), (imm)); \
400 *(inst)++ = (unsigned char)0x81; \
401 x86_reg_emit ((inst), (opc), (reg)); \
402 x86_imm_emit32 ((inst), (imm)); \
406 #define x86_alu_mem_imm(inst,opc,mem,imm) \
408 if (x86_is_imm8((imm))) { \
409 *(inst)++ = (unsigned char)0x83; \
410 x86_mem_emit ((inst), (opc), (mem)); \
411 x86_imm_emit8 ((inst), (imm)); \
413 *(inst)++ = (unsigned char)0x81; \
414 x86_mem_emit ((inst), (opc), (mem)); \
415 x86_imm_emit32 ((inst), (imm)); \
419 #define x86_alu_membase_imm(inst,opc,basereg,disp,imm) \
421 if (x86_is_imm8((imm))) { \
422 *(inst)++ = (unsigned char)0x83; \
423 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
424 x86_imm_emit8 ((inst), (imm)); \
426 *(inst)++ = (unsigned char)0x81; \
427 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
428 x86_imm_emit32 ((inst), (imm)); \
432 #define x86_alu_mem_reg(inst,opc,mem,reg) \
434 *(inst)++ = (((unsigned char)(opc)) << 3) + 1; \
435 x86_mem_emit ((inst), (reg), (mem)); \
438 #define x86_alu_membase_reg(inst,opc,basereg,disp,reg) \
440 *(inst)++ = (((unsigned char)(opc)) << 3) + 1; \
441 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
444 #define x86_alu_reg_reg(inst,opc,dreg,reg) \
446 *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
447 x86_reg_emit ((inst), (dreg), (reg)); \
450 #define x86_alu_reg_mem(inst,opc,reg,mem) \
452 *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
453 x86_mem_emit ((inst), (reg), (mem)); \
456 #define x86_alu_reg_membase(inst,opc,reg,basereg,disp) \
458 *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
459 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
462 #define x86_test_reg_imm(inst,reg,imm) \
464 if ((reg) == X86_EAX) { \
465 *(inst)++ = (unsigned char)0xa9; \
467 *(inst)++ = (unsigned char)0xf7; \
468 x86_reg_emit ((inst), 0, (reg)); \
470 x86_imm_emit32 ((inst), (imm)); \
473 #define x86_test_mem_imm(inst,mem,imm) \
475 *(inst)++ = (unsigned char)0xf7; \
476 x86_mem_emit ((inst), 0, (mem)); \
477 x86_imm_emit32 ((inst), (imm)); \
480 #define x86_test_membase_imm(inst,basereg,disp,imm) \
482 *(inst)++ = (unsigned char)0xf7; \
483 x86_membase_emit ((inst), 0, (basereg), (disp)); \
484 x86_imm_emit32 ((inst), (imm)); \
487 #define x86_test_reg_reg(inst,dreg,reg) \
489 *(inst)++ = (unsigned char)0x85; \
490 x86_reg_emit ((inst), (reg), (dreg)); \
493 #define x86_test_mem_reg(inst,mem,reg) \
495 *(inst)++ = (unsigned char)0x85; \
496 x86_mem_emit ((inst), (reg), (mem)); \
499 #define x86_test_membase_reg(inst,basereg,disp,reg) \
501 *(inst)++ = (unsigned char)0x85; \
502 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
505 #define x86_shift_reg_imm(inst,opc,reg,imm) \
508 *(inst)++ = (unsigned char)0xd1; \
509 x86_reg_emit ((inst), (opc), (reg)); \
511 *(inst)++ = (unsigned char)0xc1; \
512 x86_reg_emit ((inst), (opc), (reg)); \
513 x86_imm_emit8 ((inst), (imm)); \
517 #define x86_shift_mem_imm(inst,opc,mem,imm) \
520 *(inst)++ = (unsigned char)0xd1; \
521 x86_mem_emit ((inst), (opc), (mem)); \
523 *(inst)++ = (unsigned char)0xc1; \
524 x86_mem_emit ((inst), (opc), (mem)); \
525 x86_imm_emit8 ((inst), (imm)); \
529 #define x86_shift_membase_imm(inst,opc,basereg,disp,imm) \
532 *(inst)++ = (unsigned char)0xd1; \
533 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
535 *(inst)++ = (unsigned char)0xc1; \
536 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
537 x86_imm_emit8 ((inst), (imm)); \
541 #define x86_shift_reg(inst,opc,reg) \
543 *(inst)++ = (unsigned char)0xd3; \
544 x86_reg_emit ((inst), (opc), (reg)); \
547 #define x86_shift_mem(inst,opc,mem) \
549 *(inst)++ = (unsigned char)0xd3; \
550 x86_mem_emit ((inst), (opc), (mem)); \
553 #define x86_shift_membase(inst,opc,basereg,disp) \
555 *(inst)++ = (unsigned char)0xd3; \
556 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
560 * Multi op shift missing.
566 #define x86_mul_reg(inst,reg,is_signed) \
568 *(inst)++ = (unsigned char)0xf7; \
569 x86_reg_emit ((inst), 4 + ((is_signed) ? 1 : 0), (reg)); \
572 #define x86_mul_mem(inst,mem,is_signed) \
574 *(inst)++ = (unsigned char)0xf7; \
575 x86_mem_emit ((inst), 4 + ((is_signed) ? 1 : 0), (mem)); \
578 #define x86_mul_membase(inst,basereg,disp,is_signed) \
580 *(inst)++ = (unsigned char)0xf7; \
581 x86_membase_emit ((inst), 4 + ((is_signed) ? 1 : 0), (basereg), (disp)); \
587 #define x86_imul_reg_reg(inst,dreg,reg) \
589 *(inst)++ = (unsigned char)0x0f; \
590 *(inst)++ = (unsigned char)0xaf; \
591 x86_reg_emit ((inst), (dreg), (reg)); \
594 #define x86_imul_reg_mem(inst,reg,mem) \
596 *(inst)++ = (unsigned char)0x0f; \
597 *(inst)++ = (unsigned char)0xaf; \
598 x86_mem_emit ((inst), (reg), (mem)); \
601 #define x86_imul_reg_membase(inst,reg,basereg,disp) \
603 *(inst)++ = (unsigned char)0x0f; \
604 *(inst)++ = (unsigned char)0xaf; \
605 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
611 #define x86_imul_reg_reg_imm(inst,dreg,reg,imm) \
613 if (x86_is_imm8 ((imm))) { \
614 *(inst)++ = (unsigned char)0x6b; \
615 x86_reg_emit ((inst), (dreg), (reg)); \
616 x86_imm_emit8 ((inst), (imm)); \
618 *(inst)++ = (unsigned char)0x69; \
619 x86_reg_emit ((inst), (dreg), (reg)); \
620 x86_imm_emit32 ((inst), (imm)); \
624 #define x86_imul_reg_mem_imm(inst,reg,mem,imm) \
626 if (x86_is_imm8 ((imm))) { \
627 *(inst)++ = (unsigned char)0x6b; \
628 x86_mem_emit ((inst), (reg), (mem)); \
629 x86_imm_emit8 ((inst), (imm)); \
631 *(inst)++ = (unsigned char)0x69; \
632 x86_reg_emit ((inst), (reg), (mem)); \
633 x86_imm_emit32 ((inst), (imm)); \
637 #define x86_imul_reg_membase_imm(inst,reg,basereg,disp,imm) \
639 if (x86_is_imm8 ((imm))) { \
640 *(inst)++ = (unsigned char)0x6b; \
641 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
642 x86_imm_emit8 ((inst), (imm)); \
644 *(inst)++ = (unsigned char)0x69; \
645 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
646 x86_imm_emit32 ((inst), (imm)); \
651 * divide EDX:EAX by rm;
652 * eax = quotient, edx = remainder
655 #define x86_div_reg(inst,reg,is_signed) \
657 *(inst)++ = (unsigned char)0xf7; \
658 x86_reg_emit ((inst), 6 + ((is_signed) ? 1 : 0), (reg)); \
661 #define x86_div_mem(inst,mem,is_signed) \
663 *(inst)++ = (unsigned char)0xf7; \
664 x86_mem_emit ((inst), 6 + ((is_signed) ? 1 : 0), (mem)); \
667 #define x86_div_membase(inst,basereg,disp,is_signed) \
669 *(inst)++ = (unsigned char)0xf7; \
670 x86_membase_emit ((inst), 6 + ((is_signed) ? 1 : 0), (basereg), (disp)); \
673 #define x86_mov_mem_reg(inst,mem,reg,size) \
676 case 1: *(inst)++ = (unsigned char)0x88; break; \
677 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
678 case 4: *(inst)++ = (unsigned char)0x89; break; \
679 default: assert (0); \
681 x86_mem_emit ((inst), (reg), (mem)); \
684 #define x86_mov_regp_reg(inst,regp,reg,size) \
687 case 1: *(inst)++ = (unsigned char)0x88; break; \
688 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
689 case 4: *(inst)++ = (unsigned char)0x89; break; \
690 default: assert (0); \
692 x86_regp_emit ((inst), (reg), (regp)); \
695 #define x86_mov_membase_reg(inst,basereg,disp,reg,size) \
698 case 1: *(inst)++ = (unsigned char)0x88; break; \
699 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
700 case 4: *(inst)++ = (unsigned char)0x89; break; \
701 default: assert (0); \
703 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
706 #define x86_mov_memindex_reg(inst,basereg,disp,indexreg,shift,reg,size) \
709 case 1: *(inst)++ = (unsigned char)0x88; break; \
710 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
711 case 4: *(inst)++ = (unsigned char)0x89; break; \
712 default: assert (0); \
714 x86_memindex_emit ((inst), (reg), (basereg), (disp), (indexreg), (shift)); \
717 #define x86_mov_reg_reg(inst,dreg,reg,size) \
720 case 1: *(inst)++ = (unsigned char)0x8a; break; \
721 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
722 case 4: *(inst)++ = (unsigned char)0x8b; break; \
723 default: assert (0); \
725 x86_reg_emit ((inst), (dreg), (reg)); \
728 #define x86_mov_reg_mem(inst,reg,mem,size) \
731 case 1: *(inst)++ = (unsigned char)0x8a; break; \
732 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
733 case 4: *(inst)++ = (unsigned char)0x8b; break; \
734 default: assert (0); \
736 x86_mem_emit ((inst), (reg), (mem)); \
739 #define x86_mov_reg_membase(inst,reg,basereg,disp,size) \
742 case 1: *(inst)++ = (unsigned char)0x8a; break; \
743 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
744 case 4: *(inst)++ = (unsigned char)0x8b; break; \
745 default: assert (0); \
747 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
750 #define x86_mov_reg_memindex(inst,reg,basereg,disp,indexreg,shift,size) \
753 case 1: *(inst)++ = (unsigned char)0x8a; break; \
754 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
755 case 4: *(inst)++ = (unsigned char)0x8b; break; \
756 default: assert (0); \
758 x86_memindex_emit ((inst), (reg), (basereg), (disp), (indexreg), (shift)); \
761 #define x86_mov_reg_imm(inst,reg,imm) \
764 x86_alu_reg_reg ((inst), X86_XOR, (reg), (reg)); \
766 *(inst)++ = (unsigned char)0xb8 + (reg); \
767 x86_imm_emit32 ((inst), (imm)); \
771 #define x86_mov_mem_imm(inst,mem,imm,size) \
774 *(inst)++ = (unsigned char)0xc6; \
775 x86_mem_emit ((inst), 0, (mem)); \
776 x86_imm_emit8 ((inst), (imm)); \
777 } else if ((size) == 2) { \
778 *(inst)++ = (unsigned char)0x66; \
779 *(inst)++ = (unsigned char)0xc7; \
780 x86_mem_emit ((inst), 0, (mem)); \
781 x86_imm_emit16 ((inst), (imm)); \
783 *(inst)++ = (unsigned char)0xc7; \
784 x86_mem_emit ((inst), 0, (mem)); \
785 x86_imm_emit32 ((inst), (imm)); \
789 #define x86_mov_membase_imm(inst,basereg,disp,imm,size) \
792 *(inst)++ = (unsigned char)0xc6; \
793 x86_membase_emit ((inst), 0, (basereg), (disp)); \
794 x86_imm_emit8 ((inst), (imm)); \
795 } else if ((size) == 2) { \
796 *(inst)++ = (unsigned char)0x66; \
797 *(inst)++ = (unsigned char)0xc7; \
798 x86_membase_emit ((inst), 0, (basereg), (disp)); \
799 x86_imm_emit16 ((inst), (imm)); \
801 *(inst)++ = (unsigned char)0xc7; \
802 x86_membase_emit ((inst), 0, (basereg), (disp)); \
803 x86_imm_emit32 ((inst), (imm)); \
807 #define x86_lea_mem(inst,reg,mem) \
809 *(inst)++ = (unsigned char)0x8d; \
810 x86_mem_emit ((inst), (reg), (mem)); \
813 #define x86_lea_membase(inst,reg,basereg,disp) \
815 *(inst)++ = (unsigned char)0x8d; \
816 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
819 #define x86_lea_memindex(inst,reg,basereg,disp,indexreg,shift) \
821 *(inst)++ = (unsigned char)0x8d; \
822 x86_memindex_emit ((inst), (reg), (basereg), (disp), (indexreg), (shift)); \
825 #define x86_widen_reg(inst,dreg,reg,is_signed,is_half) \
827 unsigned char op = 0xb6; \
828 *(inst)++ = (unsigned char)0x0f; \
829 if ((is_signed)) op += 0x08; \
830 if ((is_half)) op += 0x01; \
832 x86_reg_emit ((inst), (dreg), (reg)); \
835 #define x86_widen_mem(inst,dreg,mem,is_signed,is_half) \
837 unsigned char op = 0xb6; \
838 *(inst)++ = (unsigned char)0x0f; \
839 if ((is_signed)) op += 0x08; \
840 if ((is_half)) op += 0x01; \
842 x86_mem_emit ((inst), (dreg), (mem)); \
845 #define x86_widen_membase(inst,dreg,basereg,disp,is_signed,is_half) \
847 unsigned char op = 0xb6; \
848 *(inst)++ = (unsigned char)0x0f; \
849 if ((is_signed)) op += 0x08; \
850 if ((is_half)) op += 0x01; \
852 x86_membase_emit ((inst), (dreg), (basereg), (disp)); \
855 #define x86_widen_memindex(inst,dreg,basereg,disp,indexreg,shift,is_signed,is_half) \
857 unsigned char op = 0xb6; \
858 *(inst)++ = (unsigned char)0x0f; \
859 if ((is_signed)) op += 0x08; \
860 if ((is_half)) op += 0x01; \
862 x86_memindex_emit ((inst), (dreg), (basereg), (disp), (indexreg), (shift)); \
865 #define x86_cdq(inst) do { *(inst)++ = (unsigned char)0x99; } while (0)
866 #define x86_wait(inst) do { *(inst)++ = (unsigned char)0x9b; } while (0)
868 #define x86_fp_op_mem(inst,opc,mem,is_double) \
870 *(inst)++ = (is_double) ? (unsigned char)0xdc : (unsigned char)0xd8; \
871 x86_mem_emit ((inst), (opc), (mem)); \
874 #define x86_fp_op(inst,opc,index) \
876 *(inst)++ = (unsigned char)0xd8; \
877 *(inst)++ = (unsigned char)0xc0+((opc)<<3)+((index)&0x07); \
880 #define x86_fp_op_reg(inst,opc,index,pop_stack) \
882 static const unsigned char map[] = { 0, 1, 2, 3, 5, 4, 7, 6, 8}; \
883 *(inst)++ = (pop_stack) ? (unsigned char)0xde : (unsigned char)0xdc; \
884 *(inst)++ = (unsigned char)0xc0+(map[(opc)]<<3)+((index)&0x07); \
887 #define x86_fstp(inst,index) \
889 *(inst)++ = (unsigned char)0xdd; \
890 *(inst)++ = (unsigned char)0xd8+(index); \
893 #define x86_fcompp(inst) \
895 *(inst)++ = (unsigned char)0xde; \
896 *(inst)++ = (unsigned char)0xd9; \
899 #define x86_fnstsw(inst) \
901 *(inst)++ = (unsigned char)0xdf; \
902 *(inst)++ = (unsigned char)0xe0; \
905 #define x86_fnstcw(inst,mem) \
907 *(inst)++ = (unsigned char)0xd9; \
908 x86_mem_emit ((inst), 7, (mem)); \
911 #define x86_fnstcw_membase(inst,basereg,disp) \
913 *(inst)++ = (unsigned char)0xd9; \
914 x86_membase_emit ((inst), 7, (basereg), (disp)); \
917 #define x86_fldcw(inst,mem) \
919 *(inst)++ = (unsigned char)0xd9; \
920 x86_mem_emit ((inst), 5, (mem)); \
923 #define x86_fldcw_membase(inst,basereg,disp) \
925 *(inst)++ = (unsigned char)0xd9; \
926 x86_membase_emit ((inst), 5, (basereg), (disp)); \
929 #define x86_fchs(inst) \
931 *(inst)++ = (unsigned char)0xd9; \
932 *(inst)++ = (unsigned char)0xe0; \
935 #define x86_frem(inst) \
937 *(inst)++ = (unsigned char)0xd9; \
938 *(inst)++ = (unsigned char)0xf8; \
941 #define x86_fxch(inst,index) \
943 *(inst)++ = (unsigned char)0xd9; \
944 *(inst)++ = (unsigned char)0xc8 + ((index) & 0x07); \
947 #define x86_fcomip(inst,index) \
949 *(inst)++ = (unsigned char)0xdf; \
950 *(inst)++ = (unsigned char)0xf0 + ((index) & 0x07); \
953 #define x86_fld(inst,mem,is_double) \
955 *(inst)++ = (is_double) ? (unsigned char)0xdd : (unsigned char)0xd9; \
956 x86_mem_emit ((inst), 0, (mem)); \
959 #define x86_fld_membase(inst,basereg,disp,is_double) \
961 *(inst)++ = (is_double) ? (unsigned char)0xdd : (unsigned char)0xd9; \
962 x86_membase_emit ((inst), 0, (basereg), (disp)); \
965 #define x86_fld80(inst,mem) \
967 *(inst)++ = (unsigned char)0xdb; \
968 x86_mem_emit ((inst), 5, (mem)); \
971 #define x86_fld80_membase(inst,basereg,disp) \
973 *(inst)++ = (unsigned char)0xdb; \
974 x86_membase_emit ((inst), 5, (basereg), (disp)); \
977 #define x86_fild(inst,mem,is_long) \
980 *(inst)++ = (unsigned char)0xdf; \
981 x86_mem_emit ((inst), 5, (mem)); \
983 *(inst)++ = (unsigned char)0xdb; \
984 x86_mem_emit ((inst), 0, (mem)); \
988 #define x86_fild_membase(inst,basereg,disp,is_long) \
991 *(inst)++ = (unsigned char)0xdf; \
992 x86_membase_emit ((inst), 5, (basereg), (disp)); \
994 *(inst)++ = (unsigned char)0xdb; \
995 x86_membase_emit ((inst), 0, (basereg), (disp)); \
999 #define x86_fld_reg(inst,index) \
1001 *(inst)++ = (unsigned char)0xd9; \
1002 *(inst)++ = (unsigned char)0xc0 + ((index) & 0x07); \
1005 #define x86_fldz(inst) \
1007 *(inst)++ = (unsigned char)0xd9; \
1008 *(inst)++ = (unsigned char)0xee; \
1011 #define x86_fld1(inst) \
1013 *(inst)++ = (unsigned char)0xd9; \
1014 *(inst)++ = (unsigned char)0xe8; \
1017 #define x86_fst(inst,mem,is_double,pop_stack) \
1019 *(inst)++ = (is_double) ? (unsigned char)0xdd: (unsigned char)0xd9; \
1020 x86_mem_emit ((inst), 2 + ((pop_stack) ? 1 : 0), (mem)); \
1023 #define x86_fst_membase(inst,basereg,disp,is_double,pop_stack) \
1025 *(inst)++ = (is_double) ? (unsigned char)0xdd: (unsigned char)0xd9; \
1026 x86_membase_emit ((inst), 2 + ((pop_stack) ? 1 : 0), (basereg), (disp)); \
1029 #define x86_fist_pop(inst,mem,is_long) \
1032 *(inst)++ = (unsigned char)0xdf; \
1033 x86_mem_emit ((inst), 7, (mem)); \
1035 *(inst)++ = (unsigned char)0xdb; \
1036 x86_mem_emit ((inst), 3, (mem)); \
1040 #define x86_fist_pop_membase(inst,basereg,disp,is_long) \
1043 *(inst)++ = (unsigned char)0xdf; \
1044 x86_membase_emit ((inst), 7, (basereg), (disp)); \
1046 *(inst)++ = (unsigned char)0xdb; \
1047 x86_membase_emit ((inst), 3, (basereg), (disp)); \
1051 #define x86_push_reg(inst,reg) \
1053 *(inst)++ = (unsigned char)0x50 + (reg); \
1056 #define x86_push_regp(inst,reg) \
1058 *(inst)++ = (unsigned char)0xff; \
1059 x86_regp_emit ((inst), 6, (reg)); \
1062 #define x86_push_mem(inst,mem) \
1064 *(inst)++ = (unsigned char)0xff; \
1065 x86_mem_emit ((inst), 6, (mem)); \
1068 #define x86_push_membase(inst,basereg,disp) \
1070 *(inst)++ = (unsigned char)0xff; \
1071 x86_membase_emit ((inst), 6, (basereg), (disp)); \
1074 #define x86_push_imm(inst,imm) \
1076 *(inst)++ = (unsigned char)0x68; \
1077 x86_imm_emit32 ((inst), (imm)); \
1080 #define x86_pop_reg(inst,reg) \
1082 *(inst)++ = (unsigned char)0x58 + (reg); \
1085 #define x86_pop_mem(inst,mem) \
1087 *(inst)++ = (unsigned char)0x87; \
1088 x86_mem_emit ((inst), 0, (mem)); \
1091 #define x86_pop_membase(inst,basereg,disp) \
1093 *(inst)++ = (unsigned char)0x87; \
1094 x86_membase_emit ((inst), 0, (basereg), (disp)); \
1097 #define x86_pushad(inst) do { *(inst)++ = (unsigned char)0x60; } while (0)
1098 #define x86_pushfd(inst) do { *(inst)++ = (unsigned char)0x9c; } while (0)
1099 #define x86_popad(inst) do { *(inst)++ = (unsigned char)0x61; } while (0)
1100 #define x86_popfd(inst) do { *(inst)++ = (unsigned char)0x9d; } while (0)
1102 #define x86_jump32(inst,imm) \
1104 *(inst)++ = (unsigned char)0xe9; \
1105 x86_imm_emit32 ((inst), (imm)); \
1108 #define x86_jump8(inst,imm) \
1110 *(inst)++ = (unsigned char)0xeb; \
1111 x86_imm_emit8 ((inst), (imm)); \
1114 #define x86_jump_reg(inst,reg) \
1116 *(inst)++ = (unsigned char)0xff; \
1117 x86_reg_emit ((inst), 4, (reg)); \
1120 #define x86_jump_mem(inst,mem) \
1122 *(inst)++ = (unsigned char)0xff; \
1123 x86_mem_emit ((inst), 4, (mem)); \
1126 #define x86_jump_membase(inst,basereg,disp) \
1128 *(inst)++ = (unsigned char)0xff; \
1129 x86_membase_emit ((inst), 4, (basereg), (disp)); \
1133 * target is a pointer in our buffer.
1135 #define x86_jump_code(inst,target) \
1137 int t = (unsigned char*)(target) - (inst) - 2; \
1138 if (x86_is_imm8(t)) { \
1139 x86_jump8 ((inst), t); \
1142 x86_jump32 ((inst), t); \
1146 #define x86_jump_disp(inst,disp) \
1148 int t = (disp) - 2; \
1149 if (x86_is_imm8(t)) { \
1150 x86_jump8 ((inst), t); \
1153 x86_jump32 ((inst), t); \
1157 #define x86_branch8(inst,cond,imm,is_signed) \
1160 *(inst)++ = x86_cc_signed_map [(cond)]; \
1162 *(inst)++ = x86_cc_unsigned_map [(cond)]; \
1163 x86_imm_emit8 ((inst), (imm)); \
1166 #define x86_branch32(inst,cond,imm,is_signed) \
1168 *(inst)++ = (unsigned char)0x0f; \
1170 *(inst)++ = x86_cc_signed_map [(cond)] + 0x10; \
1172 *(inst)++ = x86_cc_unsigned_map [(cond)] + 0x10; \
1173 x86_imm_emit32 ((inst), (imm)); \
1176 #define x86_branch(inst,cond,target,is_signed) \
1178 int offset = (target) - (inst) - 2; \
1179 if (x86_is_imm8 ((offset))) \
1180 x86_branch8 ((inst), (cond), offset, (is_signed)); \
1183 x86_branch32 ((inst), (cond), offset, (is_signed)); \
1187 #define x86_branch_disp(inst,cond,disp,is_signed) \
1189 int offset = (disp) - 2; \
1190 if (x86_is_imm8 ((offset))) \
1191 x86_branch8 ((inst), (cond), offset, (is_signed)); \
1194 x86_branch32 ((inst), (cond), offset, (is_signed)); \
1198 #define x86_set_reg(inst,cond,reg,is_signed) \
1200 *(inst)++ = (unsigned char)0x0f; \
1202 *(inst)++ = x86_cc_signed_map [(cond)] + 0x20; \
1204 *(inst)++ = x86_cc_unsigned_map [(cond)] + 0x20; \
1205 x86_reg_emit ((inst), 0, (reg)); \
1208 #define x86_set_mem(inst,cond,mem,is_signed) \
1210 *(inst)++ = (unsigned char)0x0f; \
1212 *(inst)++ = x86_cc_signed_map [(cond)] + 0x20; \
1214 *(inst)++ = x86_cc_unsigned_map [(cond)] + 0x20; \
1215 x86_mem_emit ((inst), 0, (mem)); \
1218 #define x86_set_membase(inst,cond,basereg,disp,is_signed) \
1220 *(inst)++ = (unsigned char)0x0f; \
1222 *(inst)++ = x86_cc_signed_map [(cond)] + 0x20; \
1224 *(inst)++ = x86_cc_unsigned_map [(cond)] + 0x20; \
1225 x86_membase_emit ((inst), 0, (basereg), (disp)); \
1228 #define x86_call_imm(inst,disp) \
1230 *(inst)++ = (unsigned char)0xe8; \
1231 x86_imm_emit32 ((inst), (int)(disp)); \
1234 #define x86_call_reg(inst,reg) \
1236 *(inst)++ = (unsigned char)0xff; \
1237 x86_reg_emit ((inst), 2, (reg)); \
1240 #define x86_call_mem(inst,mem) \
1242 *(inst)++ = (unsigned char)0xff; \
1243 x86_mem_emit ((inst), 2, (mem)); \
1246 #define x86_call_membase(inst,basereg,disp) \
1248 *(inst)++ = (unsigned char)0xff; \
1249 x86_membase_emit ((inst), 2, (basereg), (disp)); \
1252 #define x86_call_code(inst,target) \
1254 int offset = (unsigned char*)(target) - (inst); \
1256 x86_call_imm ((inst), offset); \
1259 #define x86_ret(inst) do { *(inst)++ = (unsigned char)0xc3; } while (0)
1261 #define x86_ret_imm(inst,imm) \
1266 *(inst)++ = (unsigned char)0xc2; \
1267 x86_imm_emit16 ((inst), (imm)); \
1271 #define x86_cmov_reg(inst,cond,is_signed,dreg,reg) \
1273 *(inst)++ = (unsigned char) 0x0f; \
1275 *(inst)++ = x86_cc_signed_map [(cond)] - 0x30; \
1277 *(inst)++ = x86_cc_unsigned_map [(cond)] - 0x30; \
1278 x86_reg_emit ((inst), (dreg), (reg)); \
1281 #define x86_cmov_mem(inst,cond,is_signed,reg,mem) \
1283 *(inst)++ = (unsigned char) 0x0f; \
1285 *(inst)++ = x86_cc_signed_map [(cond)] - 0x30; \
1287 *(inst)++ = x86_cc_unsigned_map [(cond)] - 0x30; \
1288 x86_mem_emit ((inst), (reg), (mem)); \
1291 #define x86_cmov_membase(inst,cond,is_signed,reg,basereg,disp) \
1293 *(inst)++ = (unsigned char) 0x0f; \
1295 *(inst)++ = x86_cc_signed_map [(cond)] - 0x30; \
1297 *(inst)++ = x86_cc_unsigned_map [(cond)] - 0x30; \
1298 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
1301 #define x86_enter(inst,framesize) \
1303 *(inst)++ = (unsigned char)0xc8; \
1304 x86_imm_emit16 ((inst), (framesize)); \
1308 #define x86_leave(inst) do { *(inst)++ = (unsigned char)0xc9; } while (0)
1309 #define x86_sahf(inst) do { *(inst)++ = (unsigned char)0x9e; } while (0)
1311 #define x86_fsin(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xfe; } while (0)
1312 #define x86_fcos(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xff; } while (0)
1313 #define x86_fabs(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xe1; } while (0)
1314 #define x86_fpatan(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf3; } while (0)
1315 #define x86_fprem(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf8; } while (0)
1316 #define x86_fprem1(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf5; } while (0)
1317 #define x86_frndint(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xfc; } while (0)
1318 #define x86_fsqrt(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xfa; } while (0)
1319 #define x86_fptan(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf2; } while (0)
1321 #define x86_padding(inst,size) \
1324 case 1: x86_nop ((inst)); break; \
1325 case 2: *(inst)++ = 0x8b; \
1326 *(inst)++ = 0xc0; break; \
1327 case 3: *(inst)++ = 0x8d; *(inst)++ = 0x6d; \
1328 *(inst)++ = 0x00; break; \
1329 case 4: *(inst)++ = 0x8d; *(inst)++ = 0x64; \
1330 *(inst)++ = 0x24; *(inst)++ = 0x00; \
1332 case 5: *(inst)++ = 0x8d; *(inst)++ = 0x64; \
1333 *(inst)++ = 0x24; *(inst)++ = 0x00; \
1334 x86_nop ((inst)); break; \
1335 case 6: *(inst)++ = 0x8d; *(inst)++ = 0xad; \
1336 *(inst)++ = 0x00; *(inst)++ = 0x00; \
1337 *(inst)++ = 0x00; *(inst)++ = 0x00; \
1339 case 7: *(inst)++ = 0x8d; *(inst)++ = 0xa4; \
1340 *(inst)++ = 0x24; *(inst)++ = 0x00; \
1341 *(inst)++ = 0x00; *(inst)++ = 0x00; \
1342 *(inst)++ = 0x00; break; \
1343 default: assert (0); \
1347 #define x86_prolog(inst,frame_size,reg_mask) \
1349 unsigned i, m = 1; \
1350 x86_enter ((inst), (frame_size)); \
1351 for (i = 0; i < X86_NREG; ++i, m <<= 1) { \
1352 if ((reg_mask) & m) \
1353 x86_push_reg ((inst), i); \
1357 #define x86_epilog(inst,reg_mask) \
1359 unsigned i, m = 1 << X86_EDI; \
1360 for (i = X86_EDI; m != 0; i--, m=m>>1) { \
1361 if ((reg_mask) & m) \
1362 x86_pop_reg ((inst), i); \
1364 x86_leave ((inst)); \