+/* SIMD opcodes. */
+
+#if defined(TARGET_X86) || defined(TARGET_AMD64)
+
+MINI_OP(OP_ADDPS, "addps", XREG, XREG, XREG)
+MINI_OP(OP_DIVPS, "divps", XREG, XREG, XREG)
+MINI_OP(OP_MULPS, "mulps", XREG, XREG, XREG)
+MINI_OP(OP_SUBPS, "subps", XREG, XREG, XREG)
+MINI_OP(OP_MAXPS, "maxps", XREG, XREG, XREG)
+MINI_OP(OP_MINPS, "minps", XREG, XREG, XREG)
+MINI_OP(OP_COMPPS, "compps", XREG, XREG, XREG)
+MINI_OP(OP_ANDPS, "andps", XREG, XREG, XREG)
+MINI_OP(OP_ANDNPS, "andnps", XREG, XREG, XREG)
+MINI_OP(OP_ORPS, "orps", XREG, XREG, XREG)
+MINI_OP(OP_XORPS, "xorps", XREG, XREG, XREG)
+MINI_OP(OP_HADDPS, "haddps", XREG, XREG, XREG)
+MINI_OP(OP_HSUBPS, "hsubps", XREG, XREG, XREG)
+MINI_OP(OP_ADDSUBPS, "addsubps", XREG, XREG, XREG)
+MINI_OP(OP_DUPPS_LOW, "dupps_low", XREG, XREG, NONE)
+MINI_OP(OP_DUPPS_HIGH, "dupps_high", XREG, XREG, NONE)
+
+MINI_OP(OP_RSQRTPS, "rsqrtps", XREG, XREG, NONE)
+MINI_OP(OP_SQRTPS, "sqrtps", XREG, XREG, NONE)
+MINI_OP(OP_RCPPS, "rcpps", XREG, XREG, NONE)
+
+MINI_OP(OP_PSHUFLEW_HIGH, "pshufflew_high", XREG, XREG, NONE)
+MINI_OP(OP_PSHUFLEW_LOW, "pshufflew_low", XREG, XREG, NONE)
+MINI_OP(OP_PSHUFLED, "pshuffled", XREG, XREG, NONE)
+
+MINI_OP(OP_ADDPD, "addpd", XREG, XREG, XREG)
+MINI_OP(OP_DIVPD, "divpd", XREG, XREG, XREG)
+MINI_OP(OP_MULPD, "mulpd", XREG, XREG, XREG)
+MINI_OP(OP_SUBPD, "subpd", XREG, XREG, XREG)
+MINI_OP(OP_MAXPD, "maxpd", XREG, XREG, XREG)
+MINI_OP(OP_MINPD, "minpd", XREG, XREG, XREG)
+MINI_OP(OP_COMPPD, "comppd", XREG, XREG, XREG)
+MINI_OP(OP_ANDPD, "andpd", XREG, XREG, XREG)
+MINI_OP(OP_ANDNPD, "andnpd", XREG, XREG, XREG)
+MINI_OP(OP_ORPD, "orpd", XREG, XREG, XREG)
+MINI_OP(OP_XORPD, "xorpd", XREG, XREG, XREG)
+MINI_OP(OP_HADDPD, "haddpd", XREG, XREG, XREG)
+MINI_OP(OP_HSUBPD, "hsubpd", XREG, XREG, XREG)
+MINI_OP(OP_ADDSUBPD, "addsubpd", XREG, XREG, XREG)
+MINI_OP(OP_DUPPD, "duppd", XREG, XREG, NONE)
+
+MINI_OP(OP_SQRTPD, "sqrtpd", XREG, XREG, NONE)
+
+MINI_OP(OP_EXTRACT_MASK, "extract_mask", IREG, XREG, NONE)
+
+MINI_OP(OP_PAND, "pand", XREG, XREG, XREG)
+MINI_OP(OP_POR, "por", XREG, XREG, XREG)
+MINI_OP(OP_PXOR, "pxor", XREG, XREG, XREG)
+
+MINI_OP(OP_PADDB, "paddb", XREG, XREG, XREG)
+MINI_OP(OP_PADDW, "paddw", XREG, XREG, XREG)
+MINI_OP(OP_PADDD, "paddd", XREG, XREG, XREG)
+MINI_OP(OP_PADDQ, "paddq", XREG, XREG, XREG)
+
+MINI_OP(OP_PSUBB, "psubb", XREG, XREG, XREG)
+MINI_OP(OP_PSUBW, "psubw", XREG, XREG, XREG)
+MINI_OP(OP_PSUBD, "psubd", XREG, XREG, XREG)
+MINI_OP(OP_PSUBQ, "psubq", XREG, XREG, XREG)
+
+MINI_OP(OP_PMAXB_UN, "pmaxb_un", XREG, XREG, XREG)
+MINI_OP(OP_PMAXW_UN, "pmaxw_un", XREG, XREG, XREG)
+MINI_OP(OP_PMAXD_UN, "pmaxd_un", XREG, XREG, XREG)
+
+MINI_OP(OP_PMAXB, "pmaxb", XREG, XREG, XREG)
+MINI_OP(OP_PMAXW, "pmaxw", XREG, XREG, XREG)
+MINI_OP(OP_PMAXD, "pmaxd", XREG, XREG, XREG)
+
+MINI_OP(OP_PAVGB_UN, "pavgb_un", XREG, XREG, XREG)
+MINI_OP(OP_PAVGW_UN, "pavgw_un", XREG, XREG, XREG)
+
+MINI_OP(OP_PMINB_UN, "pminb_un", XREG, XREG, XREG)
+MINI_OP(OP_PMINW_UN, "pminw_un", XREG, XREG, XREG)
+MINI_OP(OP_PMIND_UN, "pmind_un", XREG, XREG, XREG)
+
+MINI_OP(OP_PMINB, "pminb", XREG, XREG, XREG)
+MINI_OP(OP_PMINW, "pminw", XREG, XREG, XREG)
+MINI_OP(OP_PMIND, "pmind", XREG, XREG, XREG)
+
+MINI_OP(OP_PCMPEQB, "pcmpeqb", XREG, XREG, XREG)
+MINI_OP(OP_PCMPEQW, "pcmpeqw", XREG, XREG, XREG)
+MINI_OP(OP_PCMPEQD, "pcmpeqd", XREG, XREG, XREG)
+MINI_OP(OP_PCMPEQQ, "pcmpeqq", XREG, XREG, XREG)
+
+MINI_OP(OP_PCMPGTB, "pcmpgtb", XREG, XREG, XREG)
+MINI_OP(OP_PCMPGTW, "pcmpgtw", XREG, XREG, XREG)
+MINI_OP(OP_PCMPGTD, "pcmpgtd", XREG, XREG, XREG)
+MINI_OP(OP_PCMPGTQ, "pcmpgtq", XREG, XREG, XREG)
+
+MINI_OP(OP_PSUM_ABS_DIFF, "psumabsdiff", XREG, XREG, XREG)
+
+MINI_OP(OP_UNPACK_LOWB, "unpack_lowb", XREG, XREG, XREG)
+MINI_OP(OP_UNPACK_LOWW, "unpack_loww", XREG, XREG, XREG)
+MINI_OP(OP_UNPACK_LOWD, "unpack_lowd", XREG, XREG, XREG)
+MINI_OP(OP_UNPACK_LOWQ, "unpack_lowq", XREG, XREG, XREG)
+MINI_OP(OP_UNPACK_LOWPS, "unpack_lowps", XREG, XREG, XREG)
+MINI_OP(OP_UNPACK_LOWPD, "unpack_lowpd", XREG, XREG, XREG)
+
+MINI_OP(OP_UNPACK_HIGHB, "unpack_highb", XREG, XREG, XREG)
+MINI_OP(OP_UNPACK_HIGHW, "unpack_highw", XREG, XREG, XREG)
+MINI_OP(OP_UNPACK_HIGHD, "unpack_highd", XREG, XREG, XREG)
+MINI_OP(OP_UNPACK_HIGHQ, "unpack_highq", XREG, XREG, XREG)
+MINI_OP(OP_UNPACK_HIGHPS, "unpack_highps", XREG, XREG, XREG)
+MINI_OP(OP_UNPACK_HIGHPD, "unpack_highpd", XREG, XREG, XREG)
+
+MINI_OP(OP_PACKW, "packw", XREG, XREG, XREG)
+MINI_OP(OP_PACKD, "packd", XREG, XREG, XREG)
+
+MINI_OP(OP_PACKW_UN, "packw_un", XREG, XREG, XREG)
+MINI_OP(OP_PACKD_UN, "packd_un", XREG, XREG, XREG)
+
+MINI_OP(OP_PADDB_SAT, "paddb_sat", XREG, XREG, XREG)
+MINI_OP(OP_PADDB_SAT_UN, "paddb_sat_un", XREG, XREG, XREG)
+
+MINI_OP(OP_PADDW_SAT, "paddw_sat", XREG, XREG, XREG)
+MINI_OP(OP_PADDW_SAT_UN, "paddw_sat_un", XREG, XREG, XREG)
+
+MINI_OP(OP_PSUBB_SAT, "psubb_sat", XREG, XREG, XREG)
+MINI_OP(OP_PSUBB_SAT_UN, "psubb_sat_un", XREG, XREG, XREG)
+
+MINI_OP(OP_PSUBW_SAT, "psubw_sat", XREG, XREG, XREG)
+MINI_OP(OP_PSUBW_SAT_UN, "psubw_sat_un", XREG, XREG, XREG)
+
+MINI_OP(OP_PMULW, "pmulw", XREG, XREG, XREG)
+MINI_OP(OP_PMULD, "pmuld", XREG, XREG, XREG)
+MINI_OP(OP_PMULQ, "pmulq", XREG, XREG, XREG)
+
+MINI_OP(OP_PMULW_HIGH_UN, "pmul_high_un", XREG, XREG, XREG)
+MINI_OP(OP_PMULW_HIGH, "pmul_high", XREG, XREG, XREG)
+
+/*SSE2 Shift ops must have the _reg version right after as code depends on this ordering.*/
+MINI_OP(OP_PSHRW, "pshrw", XREG, XREG, NONE)
+MINI_OP(OP_PSHRW_REG, "pshrw_reg", XREG, XREG, XREG)
+
+MINI_OP(OP_PSARW, "psarw", XREG, XREG, NONE)
+MINI_OP(OP_PSARW_REG, "psarw_reg", XREG, XREG, XREG)
+
+MINI_OP(OP_PSHLW, "pshlw", XREG, XREG, NONE)
+MINI_OP(OP_PSHLW_REG, "pshlw_reg", XREG, XREG, XREG)
+
+MINI_OP(OP_PSHRD, "pshrd", XREG, XREG, NONE)
+MINI_OP(OP_PSHRD_REG, "pshrd_reg", XREG, XREG, XREG)
+
+MINI_OP(OP_PSHRQ, "pshrq", XREG, XREG, NONE)
+MINI_OP(OP_PSHRQ_REG, "pshrq_reg", XREG, XREG, XREG)
+
+MINI_OP(OP_PSARD, "psard", XREG, XREG, NONE)
+MINI_OP(OP_PSARD_REG, "psard_reg", XREG, XREG, XREG)
+
+MINI_OP(OP_PSHLD, "pshld", XREG, XREG, NONE)
+MINI_OP(OP_PSHLD_REG, "pshld_reg", XREG, XREG, XREG)
+
+MINI_OP(OP_PSHLQ, "pshlq", XREG, XREG, NONE)
+MINI_OP(OP_PSHLQ_REG, "pshlq_reg", XREG, XREG, XREG)
+
+MINI_OP(OP_EXTRACT_I4, "extract_i4", IREG, XREG, NONE)
+MINI_OP(OP_ICONV_TO_R8_RAW, "iconv_to_r8_raw", FREG, IREG, NONE)
+
+MINI_OP(OP_EXTRACT_I2, "extract_i2", IREG, XREG, NONE)
+MINI_OP(OP_EXTRACT_U2, "extract_u2", IREG, XREG, NONE)
+MINI_OP(OP_EXTRACT_I1, "extract_i1", IREG, XREG, NONE)
+MINI_OP(OP_EXTRACT_U1, "extract_u1", IREG, XREG, NONE)
+MINI_OP(OP_EXTRACT_R8, "extract_r8", FREG, XREG, NONE)
+MINI_OP(OP_EXTRACT_I8, "extract_i8", LREG, XREG, NONE)
+
+MINI_OP(OP_INSERT_I2, "insert_i2", XREG, XREG, IREG)
+
+MINI_OP(OP_EXTRACTX_U2, "extractx_u2", IREG, XREG, NONE)
+
+/*these slow ops are modeled around the availability of a fast 2 bytes insert op*/
+/*insertx_u1_slow takes old value and new value as source regs */
+MINI_OP(OP_INSERTX_U1_SLOW, "insertx_u1_slow", XREG, IREG, IREG)
+/*insertx_i4_slow takes target xreg and new value as source regs */
+MINI_OP(OP_INSERTX_I4_SLOW, "insertx_i4_slow", XREG, XREG, IREG)
+
+MINI_OP(OP_INSERTX_R4_SLOW, "insertx_r4_slow", XREG, XREG, FREG)
+MINI_OP(OP_INSERTX_R8_SLOW, "insertx_r8_slow", XREG, XREG, FREG)
+MINI_OP(OP_INSERTX_I8_SLOW, "insertx_i8_slow", XREG, XREG, LREG)
+
+MINI_OP(OP_FCONV_TO_R8_X, "fconv_to_r8_x", XREG, FREG, NONE)
+MINI_OP(OP_XCONV_R8_TO_I4, "xconv_r8_to_i4", IREG, XREG, NONE)
+MINI_OP(OP_ICONV_TO_X, "iconv_to_x", XREG, IREG, NONE)
+
+MINI_OP(OP_EXPAND_I1, "expand_i1", XREG, IREG, NONE)
+MINI_OP(OP_EXPAND_I2, "expand_i2", XREG, IREG, NONE)
+MINI_OP(OP_EXPAND_I4, "expand_i4", XREG, IREG, NONE)
+MINI_OP(OP_EXPAND_R4, "expand_r4", XREG, FREG, NONE)
+MINI_OP(OP_EXPAND_I8, "expand_i8", XREG, IREG, NONE)
+MINI_OP(OP_EXPAND_R8, "expand_r8", XREG, FREG, NONE)
+
+MINI_OP(OP_PREFETCH_MEMBASE, "prefetch_membase", NONE, IREG, NONE)
+
+#endif
+
+MINI_OP(OP_XMOVE, "xmove", XREG, XREG, NONE)
+MINI_OP(OP_XZERO, "xzero", XREG, NONE, NONE)
+MINI_OP(OP_XPHI, "xphi", XREG, NONE, NONE)
+