2 * x86-codegen.h: Macros for generating x86 code
5 * Paolo Molaro (lupus@ximian.com)
6 * Intel Corporation (ORP Project)
7 * Sergey Chaban (serge@wildwestsoftware.com)
8 * Dietmar Maurer (dietmar@ximian.com)
11 * Copyright (C) 2000 Intel Corporation. All rights reserved.
12 * Copyright (C) 2001, 2002 Ximian, Inc.
19 // x86 register numbers
33 // opcodes for alu instructions
47 // opcodes for shift instructions
62 // opcodes for floating-point instructions
76 // integer conditions codes
79 X86_CC_EQ = 0, X86_CC_E = 0, X86_CC_Z = 0,
80 X86_CC_NE = 1, X86_CC_NZ = 1,
81 X86_CC_LT = 2, X86_CC_B = 2, X86_CC_C = 2, X86_CC_NAE = 2,
82 X86_CC_LE = 3, X86_CC_BE = 3, X86_CC_NA = 3,
83 X86_CC_GT = 4, X86_CC_A = 4, X86_CC_NBE = 4,
84 X86_CC_GE = 5, X86_CC_AE = 5, X86_CC_NB = 5, X86_CC_NC = 5,
85 X86_CC_LZ = 6, X86_CC_S = 6,
86 X86_CC_GEZ = 7, X86_CC_NS = 7,
87 X86_CC_P = 8, X86_CC_PE = 8,
88 X86_CC_NP = 9, X86_CC_PO = 9,
100 X86_FP_CC_MASK = 0x4500
103 /* FP control word */
105 X86_FPCW_INVOPEX_MASK = 0x1,
106 X86_FPCW_DENOPEX_MASK = 0x2,
107 X86_FPCW_ZERODIV_MASK = 0x4,
108 X86_FPCW_OVFEX_MASK = 0x8,
109 X86_FPCW_UNDFEX_MASK = 0x10,
110 X86_FPCW_PRECEX_MASK = 0x20,
111 X86_FPCW_PRECC_MASK = 0x300,
112 X86_FPCW_ROUNDC_MASK = 0xc00,
114 /* values for precision control */
115 X86_FPCW_PREC_SINGLE = 0,
116 X86_FPCW_PREC_DOUBLE = 0x200,
117 X86_FPCW_PREC_EXTENDED = 0x300,
119 /* values for rounding control */
120 X86_FPCW_ROUND_NEAREST = 0,
121 X86_FPCW_ROUND_DOWN = 0x400,
122 X86_FPCW_ROUND_UP = 0x800,
123 X86_FPCW_ROUND_TOZERO = 0xc00
130 X86_LOCK_PREFIX = 0xF0,
131 X86_REPNZ_PREFIX = 0xF2,
132 X86_REPZ_PREFIX = 0xF3,
133 X86_REP_PREFIX = 0xF3,
134 X86_CS_PREFIX = 0x2E,
135 X86_SS_PREFIX = 0x36,
136 X86_DS_PREFIX = 0x3E,
137 X86_ES_PREFIX = 0x26,
138 X86_FS_PREFIX = 0x64,
139 X86_GS_PREFIX = 0x65,
140 X86_UNLIKELY_PREFIX = 0x2E,
141 X86_LIKELY_PREFIX = 0x3E,
142 X86_OPERAND_PREFIX = 0x66,
143 X86_ADDRESS_PREFIX = 0x67
146 static const unsigned char
147 x86_cc_unsigned_map [X86_NCC] = {
162 static const unsigned char
163 x86_cc_signed_map [X86_NCC] = {
183 #define X86_NOBASEREG (-1)
186 // bitvector mask for callee-saved registers
188 #define X86_ESI_MASK (1<<X86_ESI)
189 #define X86_EDI_MASK (1<<X86_EDI)
190 #define X86_EBX_MASK (1<<X86_EBX)
191 #define X86_EBP_MASK (1<<X86_EBP)
193 #define X86_CALLEE_REGS ((1<<X86_EAX) | (1<<X86_ECX) | (1<<X86_EDX))
194 #define X86_CALLER_REGS ((1<<X86_EBX) | (1<<X86_EBP) | (1<<X86_ESI) | (1<<X86_EDI))
195 #define X86_BYTE_REGS ((1<<X86_EAX) | (1<<X86_ECX) | (1<<X86_EDX) | (1<<X86_EBX))
197 #define X86_IS_SCRATCH(reg) (X86_CALLER_REGS & (1 << (reg))) /* X86_EAX, X86_ECX, or X86_EDX */
198 #define X86_IS_CALLEE(reg) (X86_CALLEE_REGS & (1 << (reg))) /* X86_ESI, X86_EDI, X86_EBX, or X86_EBP */
200 #define X86_IS_BYTE_REG(reg) ((reg) < 4)
205 // +--------------------------------+
206 // | in_arg[0] = var[0] |
207 // | in_arg[1] = var[1] |
209 // | in_arg[n_arg-1] = var[n_arg-1] |
210 // +--------------------------------+
212 // +--------------------------------+
213 // | saved EBP | <-- frame pointer (EBP)
214 // +--------------------------------+
216 // +--------------------------------+
218 // | var[n_arg+1] | local variables area
221 // +--------------------------------+
224 // | spill area | area for spilling mimic stack
226 // +--------------------------------|
228 // | ebp [ESP_Frame only] |
229 // | esi | 0..3 callee-saved regs
230 // | edi | <-- stack pointer (ESP)
231 // +--------------------------------+
233 // | stk1 | operand stack area/
234 // | . . . | out args
236 // +--------------------------------|
243 * useful building blocks
245 #define x86_modrm_mod(modrm) ((modrm) >> 6)
246 #define x86_modrm_reg(modrm) (((modrm) >> 3) & 0x7)
247 #define x86_modrm_rm(modrm) ((modrm) & 0x7)
249 #define x86_address_byte(inst,m,o,r) do { *(inst)++ = ((((m)&0x03)<<6)|(((o)&0x07)<<3)|(((r)&0x07))); } while (0)
250 #define x86_imm_emit32(inst,imm) \
252 x86_imm_buf imb; imb.val = (int) (imm); \
253 *(inst)++ = imb.b [0]; \
254 *(inst)++ = imb.b [1]; \
255 *(inst)++ = imb.b [2]; \
256 *(inst)++ = imb.b [3]; \
258 #define x86_imm_emit16(inst,imm) do { *(short*)(inst) = (imm); (inst) += 2; } while (0)
259 #define x86_imm_emit8(inst,imm) do { *(inst) = (unsigned char)((imm) & 0xff); ++(inst); } while (0)
260 #define x86_is_imm8(imm) (((int)(imm) >= -128 && (int)(imm) <= 127))
261 #define x86_is_imm16(imm) (((int)(imm) >= -(1<<16) && (int)(imm) <= ((1<<16)-1)))
263 #define x86_reg_emit(inst,r,regno) do { x86_address_byte ((inst), 3, (r), (regno)); } while (0)
264 #define x86_reg8_emit(inst,r,regno,is_rh,is_rnoh) do {x86_address_byte ((inst), 3, (is_rh)?((r)|4):(r), (is_rnoh)?((regno)|4):(regno));} while (0)
265 #define x86_regp_emit(inst,r,regno) do { x86_address_byte ((inst), 0, (r), (regno)); } while (0)
266 #define x86_mem_emit(inst,r,disp) do { x86_address_byte ((inst), 0, (r), 5); x86_imm_emit32((inst), (disp)); } while (0)
268 #define x86_membase_emit(inst,r,basereg,disp) do {\
269 if ((basereg) == X86_ESP) { \
271 x86_address_byte ((inst), 0, (r), X86_ESP); \
272 x86_address_byte ((inst), 0, X86_ESP, X86_ESP); \
273 } else if (x86_is_imm8((disp))) { \
274 x86_address_byte ((inst), 1, (r), X86_ESP); \
275 x86_address_byte ((inst), 0, X86_ESP, X86_ESP); \
276 x86_imm_emit8 ((inst), (disp)); \
278 x86_address_byte ((inst), 2, (r), X86_ESP); \
279 x86_address_byte ((inst), 0, X86_ESP, X86_ESP); \
280 x86_imm_emit32 ((inst), (disp)); \
284 if ((disp) == 0 && (basereg) != X86_EBP) { \
285 x86_address_byte ((inst), 0, (r), (basereg)); \
288 if (x86_is_imm8((disp))) { \
289 x86_address_byte ((inst), 1, (r), (basereg)); \
290 x86_imm_emit8 ((inst), (disp)); \
292 x86_address_byte ((inst), 2, (r), (basereg)); \
293 x86_imm_emit32 ((inst), (disp)); \
297 #define x86_memindex_emit(inst,r,basereg,disp,indexreg,shift) \
299 if ((basereg) == X86_NOBASEREG) { \
300 x86_address_byte ((inst), 0, (r), 4); \
301 x86_address_byte ((inst), (shift), (indexreg), 5); \
302 x86_imm_emit32 ((inst), (disp)); \
303 } else if ((disp) == 0 && (basereg) != X86_EBP) { \
304 x86_address_byte ((inst), 0, (r), 4); \
305 x86_address_byte ((inst), (shift), (indexreg), (basereg)); \
306 } else if (x86_is_imm8((disp))) { \
307 x86_address_byte ((inst), 1, (r), 4); \
308 x86_address_byte ((inst), (shift), (indexreg), (basereg)); \
309 x86_imm_emit8 ((inst), (disp)); \
311 x86_address_byte ((inst), 2, (r), 4); \
312 x86_address_byte ((inst), (shift), (indexreg), 5); \
313 x86_imm_emit32 ((inst), (disp)); \
318 * target is the position in the code where to jump to:
320 * .. output loop code...
321 * x86_mov_reg_imm (code, X86_EAX, 0);
323 * x86_loop (code, -1);
327 * x86_patch (loop, target);
329 * ins should point at the start of the instruction that encodes a target.
330 * the instruction is inspected for validity and the correct displacement
333 #define x86_patch(ins,target) \
335 unsigned char* pos = (ins) + 1; \
336 int disp, size = 0; \
337 switch (*(unsigned char*)(ins)) { \
338 case 0xe8: case 0xe9: ++size; break; /* call, jump32 */ \
339 case 0x0f: if (!(*pos >= 0x70 && *pos <= 0x8f)) assert (0); \
340 ++size; ++pos; break; /* prefix for 32-bit disp */ \
341 case 0xe0: case 0xe1: case 0xe2: /* loop */ \
342 case 0xeb: /* jump8 */ \
343 /* conditional jump opcodes */ \
344 case 0x70: case 0x71: case 0x72: case 0x73: \
345 case 0x74: case 0x75: case 0x76: case 0x77: \
346 case 0x78: case 0x79: case 0x7a: case 0x7b: \
347 case 0x7c: case 0x7d: case 0x7e: case 0x7f: \
349 default: assert (0); \
351 disp = (target) - pos; \
352 if (size) x86_imm_emit32 (pos, disp - 4); \
353 else if (x86_is_imm8 (disp - 1)) x86_imm_emit8 (pos, disp - 1); \
357 #define x86_breakpoint(inst) \
362 #define x86_cld(inst) do { *(inst)++ =(unsigned char)0xfc; } while (0)
363 #define x86_stosb(inst) do { *(inst)++ =(unsigned char)0xaa; } while (0)
364 #define x86_stosl(inst) do { *(inst)++ =(unsigned char)0xab; } while (0)
365 #define x86_stosd(inst) x86_stosl((inst))
366 #define x86_movsb(inst) do { *(inst)++ =(unsigned char)0xa4; } while (0)
367 #define x86_movsl(inst) do { *(inst)++ =(unsigned char)0xa5; } while (0)
368 #define x86_movsd(inst) x86_movsl((inst))
370 #define x86_prefix(inst,p) do { *(inst)++ =(unsigned char) (p); } while (0)
372 #define x86_rdtsc(inst) \
378 #define x86_cmpxchg_reg_reg(inst,dreg,reg) \
380 *(inst)++ = (unsigned char)0x0f; \
381 *(inst)++ = (unsigned char)0xb1; \
382 x86_reg_emit ((inst), (reg), (dreg)); \
385 #define x86_cmpxchg_mem_reg(inst,mem,reg) \
387 *(inst)++ = (unsigned char)0x0f; \
388 *(inst)++ = (unsigned char)0xb1; \
389 x86_mem_emit ((inst), (reg), (mem)); \
392 #define x86_cmpxchg_membase_reg(inst,basereg,disp,reg) \
394 *(inst)++ = (unsigned char)0x0f; \
395 *(inst)++ = (unsigned char)0xb1; \
396 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
399 #define x86_xchg_reg_reg(inst,dreg,reg,size) \
402 *(inst)++ = (unsigned char)0x86; \
404 *(inst)++ = (unsigned char)0x87; \
405 x86_reg_emit ((inst), (reg), (dreg)); \
408 #define x86_xchg_mem_reg(inst,mem,reg,size) \
411 *(inst)++ = (unsigned char)0x86; \
413 *(inst)++ = (unsigned char)0x87; \
414 x86_mem_emit ((inst), (reg), (mem)); \
417 #define x86_xchg_membase_reg(inst,basereg,disp,reg,size) \
420 *(inst)++ = (unsigned char)0x86; \
422 *(inst)++ = (unsigned char)0x87; \
423 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
426 #define x86_xadd_reg_reg(inst,dreg,reg,size) \
428 *(inst)++ = (unsigned char)0x0F; \
430 *(inst)++ = (unsigned char)0xC0; \
432 *(inst)++ = (unsigned char)0xC1; \
433 x86_reg_emit ((inst), (reg), (dreg)); \
436 #define x86_xadd_mem_reg(inst,mem,reg,size) \
438 *(inst)++ = (unsigned char)0x0F; \
440 *(inst)++ = (unsigned char)0xC0; \
442 *(inst)++ = (unsigned char)0xC1; \
443 x86_mem_emit ((inst), (reg), (mem)); \
446 #define x86_xadd_membase_reg(inst,basereg,disp,reg,size) \
448 *(inst)++ = (unsigned char)0x0F; \
450 *(inst)++ = (unsigned char)0xC0; \
452 *(inst)++ = (unsigned char)0xC1; \
453 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
456 #define x86_inc_mem(inst,mem) \
458 *(inst)++ = (unsigned char)0xff; \
459 x86_mem_emit ((inst), 0, (mem)); \
462 #define x86_inc_membase(inst,basereg,disp) \
464 *(inst)++ = (unsigned char)0xff; \
465 x86_membase_emit ((inst), 0, (basereg), (disp)); \
468 #define x86_inc_reg(inst,reg) do { *(inst)++ = (unsigned char)0x40 + (reg); } while (0)
470 #define x86_dec_mem(inst,mem) \
472 *(inst)++ = (unsigned char)0xff; \
473 x86_mem_emit ((inst), 1, (mem)); \
476 #define x86_dec_membase(inst,basereg,disp) \
478 *(inst)++ = (unsigned char)0xff; \
479 x86_membase_emit ((inst), 1, (basereg), (disp)); \
482 #define x86_dec_reg(inst,reg) do { *(inst)++ = (unsigned char)0x48 + (reg); } while (0)
484 #define x86_not_mem(inst,mem) \
486 *(inst)++ = (unsigned char)0xf7; \
487 x86_mem_emit ((inst), 2, (mem)); \
490 #define x86_not_membase(inst,basereg,disp) \
492 *(inst)++ = (unsigned char)0xf7; \
493 x86_membase_emit ((inst), 2, (basereg), (disp)); \
496 #define x86_not_reg(inst,reg) \
498 *(inst)++ = (unsigned char)0xf7; \
499 x86_reg_emit ((inst), 2, (reg)); \
502 #define x86_neg_mem(inst,mem) \
504 *(inst)++ = (unsigned char)0xf7; \
505 x86_mem_emit ((inst), 3, (mem)); \
508 #define x86_neg_membase(inst,basereg,disp) \
510 *(inst)++ = (unsigned char)0xf7; \
511 x86_membase_emit ((inst), 3, (basereg), (disp)); \
514 #define x86_neg_reg(inst,reg) \
516 *(inst)++ = (unsigned char)0xf7; \
517 x86_reg_emit ((inst), 3, (reg)); \
520 #define x86_nop(inst) do { *(inst)++ = (unsigned char)0x90; } while (0)
522 #define x86_alu_reg_imm(inst,opc,reg,imm) \
524 if ((reg) == X86_EAX) { \
525 *(inst)++ = (((unsigned char)(opc)) << 3) + 5; \
526 x86_imm_emit32 ((inst), (imm)); \
529 if (x86_is_imm8((imm))) { \
530 *(inst)++ = (unsigned char)0x83; \
531 x86_reg_emit ((inst), (opc), (reg)); \
532 x86_imm_emit8 ((inst), (imm)); \
534 *(inst)++ = (unsigned char)0x81; \
535 x86_reg_emit ((inst), (opc), (reg)); \
536 x86_imm_emit32 ((inst), (imm)); \
540 #define x86_alu_mem_imm(inst,opc,mem,imm) \
542 if (x86_is_imm8((imm))) { \
543 *(inst)++ = (unsigned char)0x83; \
544 x86_mem_emit ((inst), (opc), (mem)); \
545 x86_imm_emit8 ((inst), (imm)); \
547 *(inst)++ = (unsigned char)0x81; \
548 x86_mem_emit ((inst), (opc), (mem)); \
549 x86_imm_emit32 ((inst), (imm)); \
553 #define x86_alu_membase_imm(inst,opc,basereg,disp,imm) \
555 if (x86_is_imm8((imm))) { \
556 *(inst)++ = (unsigned char)0x83; \
557 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
558 x86_imm_emit8 ((inst), (imm)); \
560 *(inst)++ = (unsigned char)0x81; \
561 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
562 x86_imm_emit32 ((inst), (imm)); \
566 #define x86_alu_membase8_imm(inst,opc,basereg,disp,imm) \
568 *(inst)++ = (unsigned char)0x80; \
569 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
570 x86_imm_emit8 ((inst), (imm)); \
573 #define x86_alu_mem_reg(inst,opc,mem,reg) \
575 *(inst)++ = (((unsigned char)(opc)) << 3) + 1; \
576 x86_mem_emit ((inst), (reg), (mem)); \
579 #define x86_alu_membase_reg(inst,opc,basereg,disp,reg) \
581 *(inst)++ = (((unsigned char)(opc)) << 3) + 1; \
582 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
585 #define x86_alu_reg_reg(inst,opc,dreg,reg) \
587 *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
588 x86_reg_emit ((inst), (dreg), (reg)); \
592 * @x86_alu_reg8_reg8:
593 * Supports ALU operations between two 8-bit registers.
594 * dreg := dreg opc reg
595 * X86_Reg_No enum is used to specify the registers.
596 * Additionally is_*_h flags are used to specify what part
597 * of a given 32-bit register is used - high (TRUE) or low (FALSE).
598 * For example: dreg = X86_EAX, is_dreg_h = TRUE -> use AH
600 #define x86_alu_reg8_reg8(inst,opc,dreg,reg,is_dreg_h,is_reg_h) \
602 *(inst)++ = (((unsigned char)(opc)) << 3) + 2; \
603 x86_reg8_emit ((inst), (dreg), (reg), (is_dreg_h), (is_reg_h)); \
606 #define x86_alu_reg_mem(inst,opc,reg,mem) \
608 *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
609 x86_mem_emit ((inst), (reg), (mem)); \
612 #define x86_alu_reg_membase(inst,opc,reg,basereg,disp) \
614 *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
615 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
618 #define x86_test_reg_imm(inst,reg,imm) \
620 if ((reg) == X86_EAX) { \
621 *(inst)++ = (unsigned char)0xa9; \
623 *(inst)++ = (unsigned char)0xf7; \
624 x86_reg_emit ((inst), 0, (reg)); \
626 x86_imm_emit32 ((inst), (imm)); \
629 #define x86_test_mem_imm(inst,mem,imm) \
631 *(inst)++ = (unsigned char)0xf7; \
632 x86_mem_emit ((inst), 0, (mem)); \
633 x86_imm_emit32 ((inst), (imm)); \
636 #define x86_test_membase_imm(inst,basereg,disp,imm) \
638 *(inst)++ = (unsigned char)0xf7; \
639 x86_membase_emit ((inst), 0, (basereg), (disp)); \
640 x86_imm_emit32 ((inst), (imm)); \
643 #define x86_test_reg_reg(inst,dreg,reg) \
645 *(inst)++ = (unsigned char)0x85; \
646 x86_reg_emit ((inst), (reg), (dreg)); \
649 #define x86_test_mem_reg(inst,mem,reg) \
651 *(inst)++ = (unsigned char)0x85; \
652 x86_mem_emit ((inst), (reg), (mem)); \
655 #define x86_test_membase_reg(inst,basereg,disp,reg) \
657 *(inst)++ = (unsigned char)0x85; \
658 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
661 #define x86_shift_reg_imm(inst,opc,reg,imm) \
664 *(inst)++ = (unsigned char)0xd1; \
665 x86_reg_emit ((inst), (opc), (reg)); \
667 *(inst)++ = (unsigned char)0xc1; \
668 x86_reg_emit ((inst), (opc), (reg)); \
669 x86_imm_emit8 ((inst), (imm)); \
673 #define x86_shift_mem_imm(inst,opc,mem,imm) \
676 *(inst)++ = (unsigned char)0xd1; \
677 x86_mem_emit ((inst), (opc), (mem)); \
679 *(inst)++ = (unsigned char)0xc1; \
680 x86_mem_emit ((inst), (opc), (mem)); \
681 x86_imm_emit8 ((inst), (imm)); \
685 #define x86_shift_membase_imm(inst,opc,basereg,disp,imm) \
688 *(inst)++ = (unsigned char)0xd1; \
689 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
691 *(inst)++ = (unsigned char)0xc1; \
692 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
693 x86_imm_emit8 ((inst), (imm)); \
697 #define x86_shift_reg(inst,opc,reg) \
699 *(inst)++ = (unsigned char)0xd3; \
700 x86_reg_emit ((inst), (opc), (reg)); \
703 #define x86_shift_mem(inst,opc,mem) \
705 *(inst)++ = (unsigned char)0xd3; \
706 x86_mem_emit ((inst), (opc), (mem)); \
709 #define x86_shift_membase(inst,opc,basereg,disp) \
711 *(inst)++ = (unsigned char)0xd3; \
712 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
716 * Multi op shift missing.
719 #define x86_shrd_reg(inst,dreg,reg) \
721 *(inst)++ = (unsigned char)0x0f; \
722 *(inst)++ = (unsigned char)0xad; \
723 x86_reg_emit ((inst), (reg), (dreg)); \
726 #define x86_shrd_reg_imm(inst,dreg,reg,shamt) \
728 *(inst)++ = (unsigned char)0x0f; \
729 *(inst)++ = (unsigned char)0xac; \
730 x86_reg_emit ((inst), (reg), (dreg)); \
731 x86_imm_emit8 ((inst), (shamt)); \
734 #define x86_shld_reg(inst,dreg,reg) \
736 *(inst)++ = (unsigned char)0x0f; \
737 *(inst)++ = (unsigned char)0xa5; \
738 x86_reg_emit ((inst), (reg), (dreg)); \
741 #define x86_shld_reg_imm(inst,dreg,reg,shamt) \
743 *(inst)++ = (unsigned char)0x0f; \
744 *(inst)++ = (unsigned char)0xa4; \
745 x86_reg_emit ((inst), (reg), (dreg)); \
746 x86_imm_emit8 ((inst), (shamt)); \
752 #define x86_mul_reg(inst,reg,is_signed) \
754 *(inst)++ = (unsigned char)0xf7; \
755 x86_reg_emit ((inst), 4 + ((is_signed) ? 1 : 0), (reg)); \
758 #define x86_mul_mem(inst,mem,is_signed) \
760 *(inst)++ = (unsigned char)0xf7; \
761 x86_mem_emit ((inst), 4 + ((is_signed) ? 1 : 0), (mem)); \
764 #define x86_mul_membase(inst,basereg,disp,is_signed) \
766 *(inst)++ = (unsigned char)0xf7; \
767 x86_membase_emit ((inst), 4 + ((is_signed) ? 1 : 0), (basereg), (disp)); \
773 #define x86_imul_reg_reg(inst,dreg,reg) \
775 *(inst)++ = (unsigned char)0x0f; \
776 *(inst)++ = (unsigned char)0xaf; \
777 x86_reg_emit ((inst), (dreg), (reg)); \
780 #define x86_imul_reg_mem(inst,reg,mem) \
782 *(inst)++ = (unsigned char)0x0f; \
783 *(inst)++ = (unsigned char)0xaf; \
784 x86_mem_emit ((inst), (reg), (mem)); \
787 #define x86_imul_reg_membase(inst,reg,basereg,disp) \
789 *(inst)++ = (unsigned char)0x0f; \
790 *(inst)++ = (unsigned char)0xaf; \
791 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
797 #define x86_imul_reg_reg_imm(inst,dreg,reg,imm) \
799 if (x86_is_imm8 ((imm))) { \
800 *(inst)++ = (unsigned char)0x6b; \
801 x86_reg_emit ((inst), (dreg), (reg)); \
802 x86_imm_emit8 ((inst), (imm)); \
804 *(inst)++ = (unsigned char)0x69; \
805 x86_reg_emit ((inst), (dreg), (reg)); \
806 x86_imm_emit32 ((inst), (imm)); \
810 #define x86_imul_reg_mem_imm(inst,reg,mem,imm) \
812 if (x86_is_imm8 ((imm))) { \
813 *(inst)++ = (unsigned char)0x6b; \
814 x86_mem_emit ((inst), (reg), (mem)); \
815 x86_imm_emit8 ((inst), (imm)); \
817 *(inst)++ = (unsigned char)0x69; \
818 x86_reg_emit ((inst), (reg), (mem)); \
819 x86_imm_emit32 ((inst), (imm)); \
823 #define x86_imul_reg_membase_imm(inst,reg,basereg,disp,imm) \
825 if (x86_is_imm8 ((imm))) { \
826 *(inst)++ = (unsigned char)0x6b; \
827 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
828 x86_imm_emit8 ((inst), (imm)); \
830 *(inst)++ = (unsigned char)0x69; \
831 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
832 x86_imm_emit32 ((inst), (imm)); \
837 * divide EDX:EAX by rm;
838 * eax = quotient, edx = remainder
841 #define x86_div_reg(inst,reg,is_signed) \
843 *(inst)++ = (unsigned char)0xf7; \
844 x86_reg_emit ((inst), 6 + ((is_signed) ? 1 : 0), (reg)); \
847 #define x86_div_mem(inst,mem,is_signed) \
849 *(inst)++ = (unsigned char)0xf7; \
850 x86_mem_emit ((inst), 6 + ((is_signed) ? 1 : 0), (mem)); \
853 #define x86_div_membase(inst,basereg,disp,is_signed) \
855 *(inst)++ = (unsigned char)0xf7; \
856 x86_membase_emit ((inst), 6 + ((is_signed) ? 1 : 0), (basereg), (disp)); \
859 #define x86_mov_mem_reg(inst,mem,reg,size) \
862 case 1: *(inst)++ = (unsigned char)0x88; break; \
863 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
864 case 4: *(inst)++ = (unsigned char)0x89; break; \
865 default: assert (0); \
867 x86_mem_emit ((inst), (reg), (mem)); \
870 #define x86_mov_regp_reg(inst,regp,reg,size) \
873 case 1: *(inst)++ = (unsigned char)0x88; break; \
874 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
875 case 4: *(inst)++ = (unsigned char)0x89; break; \
876 default: assert (0); \
878 x86_regp_emit ((inst), (reg), (regp)); \
881 #define x86_mov_membase_reg(inst,basereg,disp,reg,size) \
884 case 1: *(inst)++ = (unsigned char)0x88; break; \
885 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
886 case 4: *(inst)++ = (unsigned char)0x89; break; \
887 default: assert (0); \
889 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
892 #define x86_mov_memindex_reg(inst,basereg,disp,indexreg,shift,reg,size) \
895 case 1: *(inst)++ = (unsigned char)0x88; break; \
896 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
897 case 4: *(inst)++ = (unsigned char)0x89; break; \
898 default: assert (0); \
900 x86_memindex_emit ((inst), (reg), (basereg), (disp), (indexreg), (shift)); \
903 #define x86_mov_reg_reg(inst,dreg,reg,size) \
906 case 1: *(inst)++ = (unsigned char)0x8a; break; \
907 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
908 case 4: *(inst)++ = (unsigned char)0x8b; break; \
909 default: assert (0); \
911 x86_reg_emit ((inst), (dreg), (reg)); \
914 #define x86_mov_reg_mem(inst,reg,mem,size) \
917 case 1: *(inst)++ = (unsigned char)0x8a; break; \
918 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
919 case 4: *(inst)++ = (unsigned char)0x8b; break; \
920 default: assert (0); \
922 x86_mem_emit ((inst), (reg), (mem)); \
925 #define x86_mov_reg_membase(inst,reg,basereg,disp,size) \
928 case 1: *(inst)++ = (unsigned char)0x8a; break; \
929 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
930 case 4: *(inst)++ = (unsigned char)0x8b; break; \
931 default: assert (0); \
933 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
936 #define x86_mov_reg_memindex(inst,reg,basereg,disp,indexreg,shift,size) \
939 case 1: *(inst)++ = (unsigned char)0x8a; break; \
940 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
941 case 4: *(inst)++ = (unsigned char)0x8b; break; \
942 default: assert (0); \
944 x86_memindex_emit ((inst), (reg), (basereg), (disp), (indexreg), (shift)); \
948 * Note: x86_clear_reg () chacnges the condition code!
950 #define x86_clear_reg(inst,reg) x86_alu_reg_reg((inst), X86_XOR, (reg), (reg))
952 #define x86_mov_reg_imm(inst,reg,imm) \
954 *(inst)++ = (unsigned char)0xb8 + (reg); \
955 x86_imm_emit32 ((inst), (imm)); \
958 #define x86_mov_mem_imm(inst,mem,imm,size) \
961 *(inst)++ = (unsigned char)0xc6; \
962 x86_mem_emit ((inst), 0, (mem)); \
963 x86_imm_emit8 ((inst), (imm)); \
964 } else if ((size) == 2) { \
965 *(inst)++ = (unsigned char)0x66; \
966 *(inst)++ = (unsigned char)0xc7; \
967 x86_mem_emit ((inst), 0, (mem)); \
968 x86_imm_emit16 ((inst), (imm)); \
970 *(inst)++ = (unsigned char)0xc7; \
971 x86_mem_emit ((inst), 0, (mem)); \
972 x86_imm_emit32 ((inst), (imm)); \
976 #define x86_mov_membase_imm(inst,basereg,disp,imm,size) \
979 *(inst)++ = (unsigned char)0xc6; \
980 x86_membase_emit ((inst), 0, (basereg), (disp)); \
981 x86_imm_emit8 ((inst), (imm)); \
982 } else if ((size) == 2) { \
983 *(inst)++ = (unsigned char)0x66; \
984 *(inst)++ = (unsigned char)0xc7; \
985 x86_membase_emit ((inst), 0, (basereg), (disp)); \
986 x86_imm_emit16 ((inst), (imm)); \
988 *(inst)++ = (unsigned char)0xc7; \
989 x86_membase_emit ((inst), 0, (basereg), (disp)); \
990 x86_imm_emit32 ((inst), (imm)); \
994 #define x86_mov_memindex_imm(inst,basereg,disp,indexreg,shift,imm,size) \
997 *(inst)++ = (unsigned char)0xc6; \
998 x86_memindex_emit ((inst), 0, (basereg), (disp), (indexreg), (shift)); \
999 x86_imm_emit8 ((inst), (imm)); \
1000 } else if ((size) == 2) { \
1001 *(inst)++ = (unsigned char)0x66; \
1002 *(inst)++ = (unsigned char)0xc7; \
1003 x86_memindex_emit ((inst), 0, (basereg), (disp), (indexreg), (shift)); \
1004 x86_imm_emit16 ((inst), (imm)); \
1006 *(inst)++ = (unsigned char)0xc7; \
1007 x86_memindex_emit ((inst), 0, (basereg), (disp), (indexreg), (shift)); \
1008 x86_imm_emit32 ((inst), (imm)); \
1012 #define x86_lea_mem(inst,reg,mem) \
1014 *(inst)++ = (unsigned char)0x8d; \
1015 x86_mem_emit ((inst), (reg), (mem)); \
1018 #define x86_lea_membase(inst,reg,basereg,disp) \
1020 *(inst)++ = (unsigned char)0x8d; \
1021 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
1024 #define x86_lea_memindex(inst,reg,basereg,disp,indexreg,shift) \
1026 *(inst)++ = (unsigned char)0x8d; \
1027 x86_memindex_emit ((inst), (reg), (basereg), (disp), (indexreg), (shift)); \
1030 #define x86_widen_reg(inst,dreg,reg,is_signed,is_half) \
1032 unsigned char op = 0xb6; \
1033 g_assert (is_half || X86_IS_BYTE_REG (reg)); \
1034 *(inst)++ = (unsigned char)0x0f; \
1035 if ((is_signed)) op += 0x08; \
1036 if ((is_half)) op += 0x01; \
1038 x86_reg_emit ((inst), (dreg), (reg)); \
1041 #define x86_widen_mem(inst,dreg,mem,is_signed,is_half) \
1043 unsigned char op = 0xb6; \
1044 *(inst)++ = (unsigned char)0x0f; \
1045 if ((is_signed)) op += 0x08; \
1046 if ((is_half)) op += 0x01; \
1048 x86_mem_emit ((inst), (dreg), (mem)); \
1051 #define x86_widen_membase(inst,dreg,basereg,disp,is_signed,is_half) \
1053 unsigned char op = 0xb6; \
1054 *(inst)++ = (unsigned char)0x0f; \
1055 if ((is_signed)) op += 0x08; \
1056 if ((is_half)) op += 0x01; \
1058 x86_membase_emit ((inst), (dreg), (basereg), (disp)); \
1061 #define x86_widen_memindex(inst,dreg,basereg,disp,indexreg,shift,is_signed,is_half) \
1063 unsigned char op = 0xb6; \
1064 *(inst)++ = (unsigned char)0x0f; \
1065 if ((is_signed)) op += 0x08; \
1066 if ((is_half)) op += 0x01; \
1068 x86_memindex_emit ((inst), (dreg), (basereg), (disp), (indexreg), (shift)); \
1071 #define x86_cdq(inst) do { *(inst)++ = (unsigned char)0x99; } while (0)
1072 #define x86_wait(inst) do { *(inst)++ = (unsigned char)0x9b; } while (0)
1074 #define x86_fp_op_mem(inst,opc,mem,is_double) \
1076 *(inst)++ = (is_double) ? (unsigned char)0xdc : (unsigned char)0xd8; \
1077 x86_mem_emit ((inst), (opc), (mem)); \
1080 #define x86_fp_op_membase(inst,opc,basereg,disp,is_double) \
1082 *(inst)++ = (is_double) ? (unsigned char)0xdc : (unsigned char)0xd8; \
1083 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
1086 #define x86_fp_op(inst,opc,index) \
1088 *(inst)++ = (unsigned char)0xd8; \
1089 *(inst)++ = (unsigned char)0xc0+((opc)<<3)+((index)&0x07); \
1092 #define x86_fp_op_reg(inst,opc,index,pop_stack) \
1094 static const unsigned char map[] = { 0, 1, 2, 3, 5, 4, 7, 6, 8}; \
1095 *(inst)++ = (pop_stack) ? (unsigned char)0xde : (unsigned char)0xdc; \
1096 *(inst)++ = (unsigned char)0xc0+(map[(opc)]<<3)+((index)&0x07); \
1100 * @x86_fp_int_op_membase
1101 * Supports FPU operations between ST(0) and integer operand in memory.
1102 * Operation encoded using X86_FP_Opcode enum.
1103 * Operand is addressed by [basereg + disp].
1104 * is_int specifies whether operand is int32 (TRUE) or int16 (FALSE).
1106 #define x86_fp_int_op_membase(inst,opc,basereg,disp,is_int) \
1108 *(inst)++ = (is_int) ? (unsigned char)0xda : (unsigned char)0xde; \
1109 x86_membase_emit ((inst), opc, (basereg), (disp)); \
1112 #define x86_fstp(inst,index) \
1114 *(inst)++ = (unsigned char)0xdd; \
1115 *(inst)++ = (unsigned char)0xd8+(index); \
1118 #define x86_fcompp(inst) \
1120 *(inst)++ = (unsigned char)0xde; \
1121 *(inst)++ = (unsigned char)0xd9; \
1124 #define x86_fucompp(inst) \
1126 *(inst)++ = (unsigned char)0xda; \
1127 *(inst)++ = (unsigned char)0xe9; \
1130 #define x86_fnstsw(inst) \
1132 *(inst)++ = (unsigned char)0xdf; \
1133 *(inst)++ = (unsigned char)0xe0; \
1136 #define x86_fnstcw(inst,mem) \
1138 *(inst)++ = (unsigned char)0xd9; \
1139 x86_mem_emit ((inst), 7, (mem)); \
1142 #define x86_fnstcw_membase(inst,basereg,disp) \
1144 *(inst)++ = (unsigned char)0xd9; \
1145 x86_membase_emit ((inst), 7, (basereg), (disp)); \
1148 #define x86_fldcw(inst,mem) \
1150 *(inst)++ = (unsigned char)0xd9; \
1151 x86_mem_emit ((inst), 5, (mem)); \
1154 #define x86_fldcw_membase(inst,basereg,disp) \
1156 *(inst)++ = (unsigned char)0xd9; \
1157 x86_membase_emit ((inst), 5, (basereg), (disp)); \
1160 #define x86_fchs(inst) \
1162 *(inst)++ = (unsigned char)0xd9; \
1163 *(inst)++ = (unsigned char)0xe0; \
1166 #define x86_frem(inst) \
1168 *(inst)++ = (unsigned char)0xd9; \
1169 *(inst)++ = (unsigned char)0xf8; \
1172 #define x86_fxch(inst,index) \
1174 *(inst)++ = (unsigned char)0xd9; \
1175 *(inst)++ = (unsigned char)0xc8 + ((index) & 0x07); \
1178 #define x86_fcomi(inst,index) \
1180 *(inst)++ = (unsigned char)0xdb; \
1181 *(inst)++ = (unsigned char)0xf0 + ((index) & 0x07); \
1184 #define x86_fcomip(inst,index) \
1186 *(inst)++ = (unsigned char)0xdf; \
1187 *(inst)++ = (unsigned char)0xf0 + ((index) & 0x07); \
1190 #define x86_fucomi(inst,index) \
1192 *(inst)++ = (unsigned char)0xdb; \
1193 *(inst)++ = (unsigned char)0xe8 + ((index) & 0x07); \
1196 #define x86_fucomip(inst,index) \
1198 *(inst)++ = (unsigned char)0xdf; \
1199 *(inst)++ = (unsigned char)0xe8 + ((index) & 0x07); \
1202 #define x86_fld(inst,mem,is_double) \
1204 *(inst)++ = (is_double) ? (unsigned char)0xdd : (unsigned char)0xd9; \
1205 x86_mem_emit ((inst), 0, (mem)); \
1208 #define x86_fld_membase(inst,basereg,disp,is_double) \
1210 *(inst)++ = (is_double) ? (unsigned char)0xdd : (unsigned char)0xd9; \
1211 x86_membase_emit ((inst), 0, (basereg), (disp)); \
1214 #define x86_fld80_mem(inst,mem) \
1216 *(inst)++ = (unsigned char)0xdb; \
1217 x86_mem_emit ((inst), 5, (mem)); \
1220 #define x86_fld80_membase(inst,basereg,disp) \
1222 *(inst)++ = (unsigned char)0xdb; \
1223 x86_membase_emit ((inst), 5, (basereg), (disp)); \
1226 #define x86_fild(inst,mem,is_long) \
1229 *(inst)++ = (unsigned char)0xdf; \
1230 x86_mem_emit ((inst), 5, (mem)); \
1232 *(inst)++ = (unsigned char)0xdb; \
1233 x86_mem_emit ((inst), 0, (mem)); \
1237 #define x86_fild_membase(inst,basereg,disp,is_long) \
1240 *(inst)++ = (unsigned char)0xdf; \
1241 x86_membase_emit ((inst), 5, (basereg), (disp)); \
1243 *(inst)++ = (unsigned char)0xdb; \
1244 x86_membase_emit ((inst), 0, (basereg), (disp)); \
1248 #define x86_fld_reg(inst,index) \
1250 *(inst)++ = (unsigned char)0xd9; \
1251 *(inst)++ = (unsigned char)0xc0 + ((index) & 0x07); \
1254 #define x86_fldz(inst) \
1256 *(inst)++ = (unsigned char)0xd9; \
1257 *(inst)++ = (unsigned char)0xee; \
1260 #define x86_fld1(inst) \
1262 *(inst)++ = (unsigned char)0xd9; \
1263 *(inst)++ = (unsigned char)0xe8; \
1266 #define x86_fldpi(inst) \
1268 *(inst)++ = (unsigned char)0xd9; \
1269 *(inst)++ = (unsigned char)0xeb; \
1272 #define x86_fst(inst,mem,is_double,pop_stack) \
1274 *(inst)++ = (is_double) ? (unsigned char)0xdd: (unsigned char)0xd9; \
1275 x86_mem_emit ((inst), 2 + ((pop_stack) ? 1 : 0), (mem)); \
1278 #define x86_fst_membase(inst,basereg,disp,is_double,pop_stack) \
1280 *(inst)++ = (is_double) ? (unsigned char)0xdd: (unsigned char)0xd9; \
1281 x86_membase_emit ((inst), 2 + ((pop_stack) ? 1 : 0), (basereg), (disp)); \
1284 #define x86_fst80_mem(inst,mem) \
1286 *(inst)++ = (unsigned char)0xdb; \
1287 x86_mem_emit ((inst), 7, (mem)); \
1291 #define x86_fst80_membase(inst,basereg,disp) \
1293 *(inst)++ = (unsigned char)0xdb; \
1294 x86_membase_emit ((inst), 7, (basereg), (disp)); \
1298 #define x86_fist_pop(inst,mem,is_long) \
1301 *(inst)++ = (unsigned char)0xdf; \
1302 x86_mem_emit ((inst), 7, (mem)); \
1304 *(inst)++ = (unsigned char)0xdb; \
1305 x86_mem_emit ((inst), 3, (mem)); \
1309 #define x86_fist_pop_membase(inst,basereg,disp,is_long) \
1312 *(inst)++ = (unsigned char)0xdf; \
1313 x86_membase_emit ((inst), 7, (basereg), (disp)); \
1315 *(inst)++ = (unsigned char)0xdb; \
1316 x86_membase_emit ((inst), 3, (basereg), (disp)); \
1320 #define x86_fstsw(inst) \
1322 *(inst)++ = (unsigned char)0x9b; \
1323 *(inst)++ = (unsigned char)0xdf; \
1324 *(inst)++ = (unsigned char)0xe0; \
1329 * Converts content of ST(0) to integer and stores it at memory location
1330 * addressed by [basereg + disp].
1331 * is_int specifies whether destination is int32 (TRUE) or int16 (FALSE).
1333 #define x86_fist_membase(inst,basereg,disp,is_int) \
1336 *(inst)++ = (unsigned char)0xdb; \
1337 x86_membase_emit ((inst), 2, (basereg), (disp)); \
1339 *(inst)++ = (unsigned char)0xdf; \
1340 x86_membase_emit ((inst), 2, (basereg), (disp)); \
1345 #define x86_push_reg(inst,reg) \
1347 *(inst)++ = (unsigned char)0x50 + (reg); \
1350 #define x86_push_regp(inst,reg) \
1352 *(inst)++ = (unsigned char)0xff; \
1353 x86_regp_emit ((inst), 6, (reg)); \
1356 #define x86_push_mem(inst,mem) \
1358 *(inst)++ = (unsigned char)0xff; \
1359 x86_mem_emit ((inst), 6, (mem)); \
1362 #define x86_push_membase(inst,basereg,disp) \
1364 *(inst)++ = (unsigned char)0xff; \
1365 x86_membase_emit ((inst), 6, (basereg), (disp)); \
1368 #define x86_push_memindex(inst,basereg,disp,indexreg,shift) \
1370 *(inst)++ = (unsigned char)0xff; \
1371 x86_memindex_emit ((inst), 6, (basereg), (disp), (indexreg), (shift)); \
1374 #define x86_push_imm_template(inst) x86_push_imm (inst, 0xf0f0f0f0)
1376 #define x86_push_imm(inst,imm) \
1378 int _imm = (int) (imm); \
1379 if (x86_is_imm8 (_imm)) { \
1380 *(inst)++ = (unsigned char)0x6A; \
1381 x86_imm_emit8 ((inst), (_imm)); \
1383 *(inst)++ = (unsigned char)0x68; \
1384 x86_imm_emit32 ((inst), (_imm)); \
1388 #define x86_pop_reg(inst,reg) \
1390 *(inst)++ = (unsigned char)0x58 + (reg); \
1393 #define x86_pop_mem(inst,mem) \
1395 *(inst)++ = (unsigned char)0x87; \
1396 x86_mem_emit ((inst), 0, (mem)); \
1399 #define x86_pop_membase(inst,basereg,disp) \
1401 *(inst)++ = (unsigned char)0x87; \
1402 x86_membase_emit ((inst), 0, (basereg), (disp)); \
1405 #define x86_pushad(inst) do { *(inst)++ = (unsigned char)0x60; } while (0)
1406 #define x86_pushfd(inst) do { *(inst)++ = (unsigned char)0x9c; } while (0)
1407 #define x86_popad(inst) do { *(inst)++ = (unsigned char)0x61; } while (0)
1408 #define x86_popfd(inst) do { *(inst)++ = (unsigned char)0x9d; } while (0)
1410 #define x86_loop(inst,imm) \
1412 *(inst)++ = (unsigned char)0xe2; \
1413 x86_imm_emit8 ((inst), (imm)); \
1416 #define x86_loope(inst,imm) \
1418 *(inst)++ = (unsigned char)0xe1; \
1419 x86_imm_emit8 ((inst), (imm)); \
1422 #define x86_loopne(inst,imm) \
1424 *(inst)++ = (unsigned char)0xe0; \
1425 x86_imm_emit8 ((inst), (imm)); \
1428 #define x86_jump32(inst,imm) \
1430 *(inst)++ = (unsigned char)0xe9; \
1431 x86_imm_emit32 ((inst), (imm)); \
1434 #define x86_jump8(inst,imm) \
1436 *(inst)++ = (unsigned char)0xeb; \
1437 x86_imm_emit8 ((inst), (imm)); \
1440 #define x86_jump_reg(inst,reg) \
1442 *(inst)++ = (unsigned char)0xff; \
1443 x86_reg_emit ((inst), 4, (reg)); \
1446 #define x86_jump_mem(inst,mem) \
1448 *(inst)++ = (unsigned char)0xff; \
1449 x86_mem_emit ((inst), 4, (mem)); \
1452 #define x86_jump_membase(inst,basereg,disp) \
1454 *(inst)++ = (unsigned char)0xff; \
1455 x86_membase_emit ((inst), 4, (basereg), (disp)); \
1459 * target is a pointer in our buffer.
1461 #define x86_jump_code(inst,target) \
1463 int t = (unsigned char*)(target) - (inst) - 2; \
1464 if (x86_is_imm8(t)) { \
1465 x86_jump8 ((inst), t); \
1468 x86_jump32 ((inst), t); \
1472 #define x86_jump_disp(inst,disp) \
1474 int t = (disp) - 2; \
1475 if (x86_is_imm8(t)) { \
1476 x86_jump8 ((inst), t); \
1479 x86_jump32 ((inst), t); \
1483 #define x86_branch8(inst,cond,imm,is_signed) \
1486 *(inst)++ = x86_cc_signed_map [(cond)]; \
1488 *(inst)++ = x86_cc_unsigned_map [(cond)]; \
1489 x86_imm_emit8 ((inst), (imm)); \
1492 #define x86_branch32(inst,cond,imm,is_signed) \
1494 *(inst)++ = (unsigned char)0x0f; \
1496 *(inst)++ = x86_cc_signed_map [(cond)] + 0x10; \
1498 *(inst)++ = x86_cc_unsigned_map [(cond)] + 0x10; \
1499 x86_imm_emit32 ((inst), (imm)); \
1502 #define x86_branch(inst,cond,target,is_signed) \
1504 int offset = (target) - (inst) - 2; \
1505 if (x86_is_imm8 ((offset))) \
1506 x86_branch8 ((inst), (cond), offset, (is_signed)); \
1509 x86_branch32 ((inst), (cond), offset, (is_signed)); \
1513 #define x86_branch_disp(inst,cond,disp,is_signed) \
1515 int offset = (disp) - 2; \
1516 if (x86_is_imm8 ((offset))) \
1517 x86_branch8 ((inst), (cond), offset, (is_signed)); \
1520 x86_branch32 ((inst), (cond), offset, (is_signed)); \
1524 #define x86_set_reg(inst,cond,reg,is_signed) \
1526 g_assert (X86_IS_BYTE_REG (reg)); \
1527 *(inst)++ = (unsigned char)0x0f; \
1529 *(inst)++ = x86_cc_signed_map [(cond)] + 0x20; \
1531 *(inst)++ = x86_cc_unsigned_map [(cond)] + 0x20; \
1532 x86_reg_emit ((inst), 0, (reg)); \
1535 #define x86_set_mem(inst,cond,mem,is_signed) \
1537 *(inst)++ = (unsigned char)0x0f; \
1539 *(inst)++ = x86_cc_signed_map [(cond)] + 0x20; \
1541 *(inst)++ = x86_cc_unsigned_map [(cond)] + 0x20; \
1542 x86_mem_emit ((inst), 0, (mem)); \
1545 #define x86_set_membase(inst,cond,basereg,disp,is_signed) \
1547 *(inst)++ = (unsigned char)0x0f; \
1549 *(inst)++ = x86_cc_signed_map [(cond)] + 0x20; \
1551 *(inst)++ = x86_cc_unsigned_map [(cond)] + 0x20; \
1552 x86_membase_emit ((inst), 0, (basereg), (disp)); \
1555 #define x86_call_imm(inst,disp) \
1557 *(inst)++ = (unsigned char)0xe8; \
1558 x86_imm_emit32 ((inst), (int)(disp)); \
1561 #define x86_call_reg(inst,reg) \
1563 *(inst)++ = (unsigned char)0xff; \
1564 x86_reg_emit ((inst), 2, (reg)); \
1567 #define x86_call_mem(inst,mem) \
1569 *(inst)++ = (unsigned char)0xff; \
1570 x86_mem_emit ((inst), 2, (mem)); \
1573 #define x86_call_membase(inst,basereg,disp) \
1575 *(inst)++ = (unsigned char)0xff; \
1576 x86_membase_emit ((inst), 2, (basereg), (disp)); \
1579 #define x86_call_code(inst,target) \
1581 int _x86_offset = (unsigned char*)(target) - (inst); \
1583 x86_call_imm ((inst), _x86_offset); \
1586 #define x86_ret(inst) do { *(inst)++ = (unsigned char)0xc3; } while (0)
1588 #define x86_ret_imm(inst,imm) \
1593 *(inst)++ = (unsigned char)0xc2; \
1594 x86_imm_emit16 ((inst), (imm)); \
1598 #define x86_cmov_reg(inst,cond,is_signed,dreg,reg) \
1600 *(inst)++ = (unsigned char) 0x0f; \
1602 *(inst)++ = x86_cc_signed_map [(cond)] - 0x30; \
1604 *(inst)++ = x86_cc_unsigned_map [(cond)] - 0x30; \
1605 x86_reg_emit ((inst), (dreg), (reg)); \
1608 #define x86_cmov_mem(inst,cond,is_signed,reg,mem) \
1610 *(inst)++ = (unsigned char) 0x0f; \
1612 *(inst)++ = x86_cc_signed_map [(cond)] - 0x30; \
1614 *(inst)++ = x86_cc_unsigned_map [(cond)] - 0x30; \
1615 x86_mem_emit ((inst), (reg), (mem)); \
1618 #define x86_cmov_membase(inst,cond,is_signed,reg,basereg,disp) \
1620 *(inst)++ = (unsigned char) 0x0f; \
1622 *(inst)++ = x86_cc_signed_map [(cond)] - 0x30; \
1624 *(inst)++ = x86_cc_unsigned_map [(cond)] - 0x30; \
1625 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
1628 #define x86_enter(inst,framesize) \
1630 *(inst)++ = (unsigned char)0xc8; \
1631 x86_imm_emit16 ((inst), (framesize)); \
1635 #define x86_leave(inst) do { *(inst)++ = (unsigned char)0xc9; } while (0)
1636 #define x86_sahf(inst) do { *(inst)++ = (unsigned char)0x9e; } while (0)
1638 #define x86_fsin(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xfe; } while (0)
1639 #define x86_fcos(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xff; } while (0)
1640 #define x86_fabs(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xe1; } while (0)
1641 #define x86_ftst(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xe4; } while (0)
1642 #define x86_fxam(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xe5; } while (0)
1643 #define x86_fpatan(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf3; } while (0)
1644 #define x86_fprem(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf8; } while (0)
1645 #define x86_fprem1(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf5; } while (0)
1646 #define x86_frndint(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xfc; } while (0)
1647 #define x86_fsqrt(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xfa; } while (0)
1648 #define x86_fptan(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf2; } while (0)
1650 #define x86_padding(inst,size) \
1653 case 1: x86_nop ((inst)); break; \
1654 case 2: *(inst)++ = 0x8b; \
1655 *(inst)++ = 0xc0; break; \
1656 case 3: *(inst)++ = 0x8d; *(inst)++ = 0x6d; \
1657 *(inst)++ = 0x00; break; \
1658 case 4: *(inst)++ = 0x8d; *(inst)++ = 0x64; \
1659 *(inst)++ = 0x24; *(inst)++ = 0x00; \
1661 case 5: *(inst)++ = 0x8d; *(inst)++ = 0x64; \
1662 *(inst)++ = 0x24; *(inst)++ = 0x00; \
1663 x86_nop ((inst)); break; \
1664 case 6: *(inst)++ = 0x8d; *(inst)++ = 0xad; \
1665 *(inst)++ = 0x00; *(inst)++ = 0x00; \
1666 *(inst)++ = 0x00; *(inst)++ = 0x00; \
1668 case 7: *(inst)++ = 0x8d; *(inst)++ = 0xa4; \
1669 *(inst)++ = 0x24; *(inst)++ = 0x00; \
1670 *(inst)++ = 0x00; *(inst)++ = 0x00; \
1671 *(inst)++ = 0x00; break; \
1672 default: assert (0); \
1676 #define x86_prolog(inst,frame_size,reg_mask) \
1678 unsigned i, m = 1; \
1679 x86_enter ((inst), (frame_size)); \
1680 for (i = 0; i < X86_NREG; ++i, m <<= 1) { \
1681 if ((reg_mask) & m) \
1682 x86_push_reg ((inst), i); \
1686 #define x86_epilog(inst,reg_mask) \
1688 unsigned i, m = 1 << X86_EDI; \
1689 for (i = X86_EDI; m != 0; i--, m=m>>1) { \
1690 if ((reg_mask) & m) \
1691 x86_pop_reg ((inst), i); \
1693 x86_leave ((inst)); \