* Intel Corporation (ORP Project)
* Sergey Chaban (serge@wildwestsoftware.com)
* Dietmar Maurer (dietmar@ximian.com)
+ * Patrik Torstensson
*
* Copyright (C) 2000 Intel Corporation. All rights reserved.
* Copyright (C) 2001, 2002 Ximian, Inc.
X86_EDI = 7,
X86_NREG
} X86_Reg_No;
+
+typedef enum {
+ X86_XMM0,
+ X86_XMM1,
+ X86_XMM2,
+ X86_XMM3,
+ X86_XMM4,
+ X86_XMM5,
+ X86_XMM6,
+ X86_XMM7,
+ X86_XMM_NREG
+} X86_XMM_Reg_No;
+
/*
// opcodes for alu instructions
*/
X86_CC_NO = 11,
X86_NCC
} X86_CC;
+
+/* FP status */
+enum {
+ X86_FP_C0 = 0x100,
+ X86_FP_C1 = 0x200,
+ X86_FP_C2 = 0x400,
+ X86_FP_C3 = 0x4000,
+ X86_FP_CC_MASK = 0x4500
+};
+
+/* FP control word */
+enum {
+ X86_FPCW_INVOPEX_MASK = 0x1,
+ X86_FPCW_DENOPEX_MASK = 0x2,
+ X86_FPCW_ZERODIV_MASK = 0x4,
+ X86_FPCW_OVFEX_MASK = 0x8,
+ X86_FPCW_UNDFEX_MASK = 0x10,
+ X86_FPCW_PRECEX_MASK = 0x20,
+ X86_FPCW_PRECC_MASK = 0x300,
+ X86_FPCW_ROUNDC_MASK = 0xc00,
+
+ /* values for precision control */
+ X86_FPCW_PREC_SINGLE = 0,
+ X86_FPCW_PREC_DOUBLE = 0x200,
+ X86_FPCW_PREC_EXTENDED = 0x300,
+
+ /* values for rounding control */
+ X86_FPCW_ROUND_NEAREST = 0,
+ X86_FPCW_ROUND_DOWN = 0x400,
+ X86_FPCW_ROUND_UP = 0x800,
+ X86_FPCW_ROUND_TOZERO = 0xc00
+};
+
/*
// prefix code
*/
X86_ES_PREFIX = 0x26,
X86_FS_PREFIX = 0x64,
X86_GS_PREFIX = 0x65,
+ X86_UNLIKELY_PREFIX = 0x2E,
+ X86_LIKELY_PREFIX = 0x3E,
X86_OPERAND_PREFIX = 0x66,
X86_ADDRESS_PREFIX = 0x67
} X86_Prefix;
#define X86_IS_SCRATCH(reg) (X86_CALLER_REGS & (1 << (reg))) /* X86_EAX, X86_ECX, or X86_EDX */
#define X86_IS_CALLEE(reg) (X86_CALLEE_REGS & (1 << (reg))) /* X86_ESI, X86_EDI, X86_EBX, or X86_EBP */
+#define X86_IS_BYTE_REG(reg) ((reg) < 4)
+
/*
// Frame structure:
//
/*
* useful building blocks
*/
+#define x86_modrm_mod(modrm) ((modrm) >> 6)
+#define x86_modrm_reg(modrm) (((modrm) >> 3) & 0x7)
+#define x86_modrm_rm(modrm) ((modrm) & 0x7)
+
#define x86_address_byte(inst,m,o,r) do { *(inst)++ = ((((m)&0x03)<<6)|(((o)&0x07)<<3)|(((r)&0x07))); } while (0)
#define x86_imm_emit32(inst,imm) \
do { \
x86_address_byte ((inst), (shift), (indexreg), (basereg)); \
x86_imm_emit8 ((inst), (disp)); \
} else { \
- x86_address_byte ((inst), 0, (r), 4); \
+ x86_address_byte ((inst), 2, (r), 4); \
x86_address_byte ((inst), (shift), (indexreg), 5); \
x86_imm_emit32 ((inst), (disp)); \
} \
#define x86_cld(inst) do { *(inst)++ =(unsigned char)0xfc; } while (0)
#define x86_stosb(inst) do { *(inst)++ =(unsigned char)0xaa; } while (0)
#define x86_stosl(inst) do { *(inst)++ =(unsigned char)0xab; } while (0)
+#define x86_stosd(inst) x86_stosl((inst))
+#define x86_movsb(inst) do { *(inst)++ =(unsigned char)0xa4; } while (0)
+#define x86_movsl(inst) do { *(inst)++ =(unsigned char)0xa5; } while (0)
+#define x86_movsd(inst) x86_movsl((inst))
#define x86_prefix(inst,p) do { *(inst)++ =(unsigned char) (p); } while (0)
x86_membase_emit ((inst), (reg), (basereg), (disp)); \
} while (0)
+#define x86_xadd_reg_reg(inst,dreg,reg,size) \
+ do { \
+ *(inst)++ = (unsigned char)0x0F; \
+ if ((size) == 1) \
+ *(inst)++ = (unsigned char)0xC0; \
+ else \
+ *(inst)++ = (unsigned char)0xC1; \
+ x86_reg_emit ((inst), (reg), (dreg)); \
+ } while (0)
+
+#define x86_xadd_mem_reg(inst,mem,reg,size) \
+ do { \
+ *(inst)++ = (unsigned char)0x0F; \
+ if ((size) == 1) \
+ *(inst)++ = (unsigned char)0xC0; \
+ else \
+ *(inst)++ = (unsigned char)0xC1; \
+ x86_mem_emit ((inst), (reg), (mem)); \
+ } while (0)
+
+#define x86_xadd_membase_reg(inst,basereg,disp,reg,size) \
+ do { \
+ *(inst)++ = (unsigned char)0x0F; \
+ if ((size) == 1) \
+ *(inst)++ = (unsigned char)0xC0; \
+ else \
+ *(inst)++ = (unsigned char)0xC1; \
+ x86_membase_emit ((inst), (reg), (basereg), (disp)); \
+ } while (0)
+
#define x86_inc_mem(inst,mem) \
do { \
*(inst)++ = (unsigned char)0xff; \
x86_imm_emit32 ((inst), (imm)); \
} \
} while (0)
+
+#define x86_alu_membase8_imm(inst,opc,basereg,disp,imm) \
+ do { \
+ *(inst)++ = (unsigned char)0x80; \
+ x86_membase_emit ((inst), (opc), (basereg), (disp)); \
+ x86_imm_emit8 ((inst), (imm)); \
+ } while (0)
#define x86_alu_mem_reg(inst,opc,mem,reg) \
do { \
#define x86_widen_reg(inst,dreg,reg,is_signed,is_half) \
do { \
unsigned char op = 0xb6; \
+ g_assert (is_half || X86_IS_BYTE_REG (reg)); \
*(inst)++ = (unsigned char)0x0f; \
if ((is_signed)) op += 0x08; \
if ((is_half)) op += 0x01; \
*(inst)++ = (unsigned char)0xe8; \
} while (0)
+#define x86_fldpi(inst) \
+ do { \
+ *(inst)++ = (unsigned char)0xd9; \
+ *(inst)++ = (unsigned char)0xeb; \
+ } while (0)
+
#define x86_fst(inst,mem,is_double,pop_stack) \
do { \
*(inst)++ = (is_double) ? (unsigned char)0xdd: (unsigned char)0xd9; \
} \
} while (0)
+#define x86_fstsw(inst) \
+ do { \
+ *(inst)++ = (unsigned char)0x9b; \
+ *(inst)++ = (unsigned char)0xdf; \
+ *(inst)++ = (unsigned char)0xe0; \
+ } while (0)
+
/**
* @x86_fist_membase
* Converts content of ST(0) to integer and stores it at memory location
x86_memindex_emit ((inst), 6, (basereg), (disp), (indexreg), (shift)); \
} while (0)
+#define x86_push_imm_template(inst) x86_push_imm (inst, 0xf0f0f0f0)
+
#define x86_push_imm(inst,imm) \
do { \
- *(inst)++ = (unsigned char)0x68; \
- x86_imm_emit32 ((inst), (imm)); \
+ int _imm = (int) (imm); \
+ if (x86_is_imm8 (_imm)) { \
+ *(inst)++ = (unsigned char)0x6A; \
+ x86_imm_emit8 ((inst), (_imm)); \
+ } else { \
+ *(inst)++ = (unsigned char)0x68; \
+ x86_imm_emit32 ((inst), (_imm)); \
+ } \
} while (0)
#define x86_pop_reg(inst,reg) \
#define x86_set_reg(inst,cond,reg,is_signed) \
do { \
+ g_assert (X86_IS_BYTE_REG (reg)); \
*(inst)++ = (unsigned char)0x0f; \
if ((is_signed)) \
*(inst)++ = x86_cc_signed_map [(cond)] + 0x20; \
x86_ret ((inst)); \
} while (0)
+
+typedef enum {
+ X86_SSE_SQRT = 0x51,
+ X86_SSE_RSQRT = 0x52,
+ X86_SSE_RCP = 0x53,
+ X86_SSE_ADD = 0x58,
+ X86_SSE_DIV = 0x5E,
+ X86_SSE_MUL = 0x59,
+ X86_SSE_SUB = 0x5C,
+ X86_SSE_MIN = 0x5D,
+ X86_SSE_MAX = 0x5F,
+ X86_SSE_COMP = 0xC2,
+ X86_SSE_AND = 0x54,
+ X86_SSE_ANDN = 0x55,
+ X86_SSE_OR = 0x56,
+ X86_SSE_XOR = 0x57,
+ X86_SSE_UNPCKL = 0x14,
+ X86_SSE_UNPCKH = 0x15,
+
+ X86_SSE_ADDSUB = 0xD0,
+ X86_SSE_HADD = 0x7C,
+ X86_SSE_HSUB = 0x7D,
+ X86_SSE_MOVSHDUP = 0x16,
+ X86_SSE_MOVSLDUP = 0x12,
+
+ X86_SSE_PAND = 0xDB,
+ X86_SSE_POR = 0xEB,
+ X86_SSE_PXOR = 0xEF,
+
+ X86_SSE_PADDB = 0xFC,
+ X86_SSE_PADDW = 0xFD,
+ X86_SSE_PADDD = 0xFE,
+
+ X86_SSE_PSUBB = 0xF8,
+ X86_SSE_PSUBW = 0xF9,
+ X86_SSE_PSUBD = 0xFA,
+
+ X86_SSE_PMAXSB = 0x3C, /*sse41*/
+ X86_SSE_PMAXSW = 0xEE,
+ X86_SSE_PMAXSD = 0x3D, /*sse41*/
+
+ X86_SSE_PMAXUB = 0xDE,
+ X86_SSE_PMAXUW = 0x3E, /*sse41*/
+ X86_SSE_PMAXUD = 0x3F, /*sse41*/
+
+ X86_SSE_PMINSB = 0x38, /*sse41*/
+ X86_SSE_PMINSW = 0x39,
+ X86_SSE_PMINSD = 0xEA,/*sse41*/
+
+ X86_SSE_PMINUB = 0xDA,
+ X86_SSE_PMINUW = 0x3A, /*sse41*/
+ X86_SSE_PMINUD = 0x3B, /*sse41*/
+
+ X86_SSE_PAVGB = 0xE0,
+ X86_SSE_PAVGW = 0xE3,
+
+ X86_SSE_PCMPEQB = 0x74,
+ X86_SSE_PCMPEQW = 0x75,
+ X86_SSE_PCMPEQD = 0x76,
+
+ X86_SSE_PCMPGTB = 0x64,
+ X86_SSE_PCMPGTW = 0x65,
+ X86_SSE_PCMPGTD = 0x66,
+
+ X86_SSE_PSADBW = 0xf6,
+
+ X86_SSE_PSHUFD = 0x70,
+
+ X86_SSE_PUNPCKLBW = 0x60,
+ X86_SSE_PUNPCKLWD = 0x61,
+ X86_SSE_PUNPCKLDQ = 0x62,
+ X86_SSE_PUNPCKLQDQ = 0x6C,
+
+ X86_SSE_PUNPCKHBW = 0x68,
+ X86_SSE_PUNPCKHWD = 0x69,
+ X86_SSE_PUNPCKHDQ = 0x6A,
+ X86_SSE_PUNPCKHQDQ = 0x6D,
+
+ X86_SSE_PACKUSWB = 0x67,
+ X86_SSE_PACKUSDW = 0x2B,/*sse41*/
+
+ X86_SSE_PADDUSB = 0xDC,
+ X86_SSE_PADDUSW = 0xDD,
+ X86_SSE_PSUBUSB = 0xD8,
+ X86_SSE_PSUBUSW = 0xD9,
+
+ X86_SSE_PADDSB = 0xEC,
+ X86_SSE_PADDSW = 0xED,
+ X86_SSE_PSUBSB = 0xE8,
+ X86_SSE_PSUBSW = 0xE9,
+
+ X86_SSE_PMULLW = 0xD5,
+ X86_SSE_PMULLD = 0x40,/*sse41*/
+ X86_SSE_PMULHUW = 0xE4,
+ X86_SSE_PMULHW = 0xE5,
+
+ X86_SSE_PMOVMSKB = 0xD7,
+
+ X86_SSE_PSHIFTW = 0x71,
+ X86_SSE_PSHIFTD = 0x72,
+ X86_SSE_SHR = 2,
+ X86_SSE_SAR = 4,
+ X86_SSE_SHL = 6,
+
+ X86_SSE_PSRLW_REG = 0xD1,
+ X86_SSE_PSRAW_REG = 0xE1,
+ X86_SSE_PSLLW_REG = 0xF1,
+
+ X86_SSE_PSRLD_REG = 0xD2,
+ X86_SSE_PSRAD_REG = 0xE2,
+ X86_SSE_PSLLD_REG = 0xF2,
+
+} X86_SSE_Opcode;
+
+
+/* minimal SSE* support */
+#define x86_movsd_reg_membase(inst,dreg,basereg,disp) \
+ do { \
+ *(inst)++ = (unsigned char)0xf2; \
+ *(inst)++ = (unsigned char)0x0f; \
+ *(inst)++ = (unsigned char)0x10; \
+ x86_membase_emit ((inst), (dreg), (basereg), (disp)); \
+ } while (0)
+
+#define x86_cvttsd2si(inst,dreg,reg) \
+ do { \
+ *(inst)++ = (unsigned char)0xf2; \
+ *(inst)++ = (unsigned char)0x0f; \
+ *(inst)++ = (unsigned char)0x2c; \
+ x86_reg_emit ((inst), (dreg), (reg)); \
+ } while (0)
+
+#define x86_sse_alu_reg_reg(inst,opc,dreg,reg) \
+ do { \
+ *(inst)++ = (unsigned char)0x0F; \
+ *(inst)++ = (unsigned char)(opc); \
+ x86_reg_emit ((inst), (dreg), (reg)); \
+ } while (0)
+
+#define x86_sse_alu_pd_reg_reg(inst,opc,dreg,reg) \
+ do { \
+ *(inst)++ = (unsigned char)0x66; \
+ x86_sse_alu_reg_reg ((inst), (opc), (dreg), (reg)); \
+ } while (0)
+
+#define x86_sse_alu_ps_reg_reg(inst,opc,dreg,reg) \
+ do { \
+ x86_sse_alu_reg_reg ((inst), (opc), (dreg), (reg)); \
+ } while (0)
+
+#define x86_sse_alu_sd_reg_reg(inst,opc,dreg,reg) \
+ do { \
+ *(inst)++ = (unsigned char)0xF2; \
+ x86_sse_alu_reg_reg ((inst), (opc), (dreg), (reg)); \
+ } while (0)
+
+#define x86_sse_alu_ss_reg_reg(inst,opc,dreg,reg) \
+ do { \
+ *(inst)++ = (unsigned char)0xF3; \
+ x86_sse_alu_reg_reg ((inst), (opc), (dreg), (reg)); \
+ } while (0)
+
+#define x86_sse_alu_ps_reg_reg_imm(inst,opc,dreg,reg, imm) \
+ do { \
+ x86_sse_alu_reg_reg ((inst), (opc), (dreg), (reg)); \
+ *(inst)++ = (unsigned char)imm; \
+ } while (0)
+
+#define x86_sse_alu_sse41_reg_reg(inst,opc,dreg,reg) \
+ do { \
+ *(inst)++ = (unsigned char)0x66; \
+ *(inst)++ = (unsigned char)0x0F; \
+ *(inst)++ = (unsigned char)0x38; \
+ *(inst)++ = (unsigned char)(opc); \
+ x86_reg_emit ((inst), (dreg), (reg)); \
+ } while (0)
+
+
+#define x86_movups_reg_membase(inst,sreg,basereg,disp) \
+ do { \
+ *(inst)++ = (unsigned char)0x0f; \
+ *(inst)++ = (unsigned char)0x10; \
+ x86_membase_emit ((inst), (sreg), (basereg), (disp)); \
+ } while (0)
+
+#define x86_movups_membase_reg(inst,basereg,disp,reg) \
+ do { \
+ *(inst)++ = (unsigned char)0x0f; \
+ *(inst)++ = (unsigned char)0x11; \
+ x86_membase_emit ((inst), (reg), (basereg), (disp)); \
+ } while (0)
+
+#define x86_movaps_reg_membase(inst,sreg,basereg,disp) \
+ do { \
+ *(inst)++ = (unsigned char)0x0f; \
+ *(inst)++ = (unsigned char)0x28; \
+ x86_membase_emit ((inst), (sreg), (basereg), (disp)); \
+ } while (0)
+
+#define x86_movaps_membase_reg(inst,basereg,disp,reg) \
+ do { \
+ *(inst)++ = (unsigned char)0x0f; \
+ *(inst)++ = (unsigned char)0x29; \
+ x86_membase_emit ((inst), (reg), (basereg), (disp)); \
+ } while (0)
+
+#define x86_movaps_reg_reg(inst,dreg,sreg) \
+ do { \
+ *(inst)++ = (unsigned char)0x0f; \
+ *(inst)++ = (unsigned char)0x28; \
+ x86_reg_emit ((inst), (dreg), (sreg)); \
+ } while (0)
+
+
+#define x86_movd_reg_xreg(inst,dreg,sreg) \
+ do { \
+ *(inst)++ = (unsigned char)0x66; \
+ *(inst)++ = (unsigned char)0x0f; \
+ *(inst)++ = (unsigned char)0x7e; \
+ x86_reg_emit ((inst), (sreg), (dreg)); \
+ } while (0)
+
+#define x86_movd_xreg_reg(inst,dreg,sreg) \
+ do { \
+ *(inst)++ = (unsigned char)0x66; \
+ *(inst)++ = (unsigned char)0x0f; \
+ *(inst)++ = (unsigned char)0x6e; \
+ x86_reg_emit ((inst), (dreg), (sreg)); \
+ } while (0)
+
+#define x86_pshufd_reg_reg(inst,dreg,sreg,mask) \
+ do { \
+ *(inst)++ = (unsigned char)0x66; \
+ *(inst)++ = (unsigned char)0x0f; \
+ *(inst)++ = (unsigned char)0x70; \
+ x86_reg_emit ((inst), (dreg), (sreg)); \
+ *(inst)++ = (unsigned char)mask; \
+ } while (0)
+
+#define x86_pshufw_reg_reg(inst,dreg,sreg,mask,high_words) \
+ do { \
+ *(inst)++ = (unsigned char)(high_words) ? 0xF3 : 0xF2; \
+ *(inst)++ = (unsigned char)0x0f; \
+ *(inst)++ = (unsigned char)0x70; \
+ x86_reg_emit ((inst), (dreg), (sreg)); \
+ *(inst)++ = (unsigned char)mask; \
+ } while (0)
+
+#define x86_sse_shift_reg_imm(inst,opc,mode, dreg,imm) \
+ do { \
+ x86_sse_alu_pd_reg_reg (inst, opc, mode, dreg); \
+ x86_imm_emit8 ((inst), (imm)); \
+ } while (0)
+
+#define x86_sse_shift_reg_reg(inst,opc,dreg,sreg) \
+ do { \
+ x86_sse_alu_pd_reg_reg (inst, opc, dreg, sreg); \
+ } while (0)
+
+
+
#endif // X86_H
+