2 * amd64-codegen.h: Macros for generating amd64 code
5 * Paolo Molaro (lupus@ximian.com)
6 * Intel Corporation (ORP Project)
7 * Sergey Chaban (serge@wildwestsoftware.com)
8 * Dietmar Maurer (dietmar@ximian.com)
12 * Copyright (C) 2000 Intel Corporation. All rights reserved.
13 * Copyright (C) 2001, 2002 Ximian, Inc.
64 AMD64_REX_B = 1, /* The register in r/m field, base register in SIB byte, or reg in opcode is 8-15 rather than 0-7 */
65 AMD64_REX_X = 2, /* The index register in SIB byte is 8-15 rather than 0-7 */
66 AMD64_REX_R = 4, /* The reg field of ModRM byte is 8-15 rather than 0-7 */
67 AMD64_REX_W = 8 /* Opeartion is 64-bits instead of 32 (default) or 16 (with 0x66 prefix) */
70 #if defined(__default_codegen__)
72 #define amd64_codegen_pre(inst)
73 #define amd64_codegen_post(inst)
75 #elif defined(__native_client_codegen__)
77 #define amd64_codegen_pre(inst) guint8* _codegen_start = (inst); amd64_nacl_instruction_pre();
78 #define amd64_codegen_post(inst) (amd64_nacl_instruction_post(&_codegen_start, &(inst)), _codegen_start);
80 /* Because of rex prefixes, etc, call sequences are not constant size. */
81 /* These pre- and post-sequence hooks remedy this by aligning the call */
82 /* sequence after we emit it, since we will know the exact size then. */
83 #define amd64_call_sequence_pre(inst) guint8* _code_start = (inst);
84 #define amd64_call_sequence_post(inst) \
85 (mono_nacl_align_call(&_code_start, &(inst)), _code_start);
87 /* Native client can load/store using one of the following registers */
88 /* as a base: rip, r15, rbp, rsp. Any other base register needs to have */
89 /* its upper 32 bits cleared and reference memory using r15 as the base. */
90 #define amd64_is_valid_nacl_base(reg) \
91 ((reg) == AMD64_RIP || (reg) == AMD64_R15 || \
92 (reg) == AMD64_RBP || (reg) == AMD64_RSP)
94 #endif /*__native_client_codegen__*/
97 #define AMD64_ARG_REG1 AMD64_RCX
98 #define AMD64_ARG_REG2 AMD64_RDX
99 #define AMD64_ARG_REG3 AMD64_R8
100 #define AMD64_ARG_REG4 AMD64_R9
102 #define AMD64_ARG_REG1 AMD64_RDI
103 #define AMD64_ARG_REG2 AMD64_RSI
104 #define AMD64_ARG_REG3 AMD64_RDX
105 #define AMD64_ARG_REG4 AMD64_RCX
109 #define AMD64_CALLEE_REGS ((1<<AMD64_RAX) | (1<<AMD64_RCX) | (1<<AMD64_RDX) | (1<<AMD64_R8) | (1<<AMD64_R9) | (1<<AMD64_R10))
110 #define AMD64_IS_CALLEE_REG(reg) (AMD64_CALLEE_REGS & (1 << (reg)))
112 #define AMD64_ARGUMENT_REGS ((1<<AMD64_RDX) | (1<<AMD64_RCX) | (1<<AMD64_R8) | (1<<AMD64_R9))
113 #define AMD64_IS_ARGUMENT_REG(reg) (AMD64_ARGUMENT_REGS & (1 << (reg)))
115 #define AMD64_CALLEE_SAVED_REGS ((1<<AMD64_RDI) | (1<<AMD64_RSI) | (1<<AMD64_RBX) | (1<<AMD64_R12) | (1<<AMD64_R13) | (1<<AMD64_R14) | (1<<AMD64_R15) | (1<<AMD64_RBP))
116 #define AMD64_IS_CALLEE_SAVED_REG(reg) (AMD64_CALLEE_SAVED_REGS & (1 << (reg)))
117 #elif defined(__native_client_codegen__)
118 /* AMD64 Native Client code may not write R15 */
119 #define AMD64_CALLEE_REGS ((1<<AMD64_RAX) | (1<<AMD64_RCX) | (1<<AMD64_RDX) | (1<<AMD64_RSI) | (1<<AMD64_RDI) | (1<<AMD64_R8) | (1<<AMD64_R9) | (1<<AMD64_R10))
120 #define AMD64_IS_CALLEE_REG(reg) (AMD64_CALLEE_REGS & (1 << (reg)))
122 #define AMD64_ARGUMENT_REGS ((1<<AMD64_RDI) | (1<<AMD64_RSI) | (1<<AMD64_RDX) | (1<<AMD64_RCX) | (1<<AMD64_R8) | (1<<AMD64_R9))
123 #define AMD64_IS_ARGUMENT_REG(reg) (AMD64_ARGUMENT_REGS & (1 << (reg)))
125 #define AMD64_CALLEE_SAVED_REGS ((1<<AMD64_RBX) | (1<<AMD64_R12) | (1<<AMD64_R13) | (1<<AMD64_R14) | (1<<AMD64_RBP))
126 #define AMD64_IS_CALLEE_SAVED_REG(reg) (AMD64_CALLEE_SAVED_REGS & (1 << (reg)))
128 #define AMD64_CALLEE_REGS ((1<<AMD64_RAX) | (1<<AMD64_RCX) | (1<<AMD64_RDX) | (1<<AMD64_RSI) | (1<<AMD64_RDI) | (1<<AMD64_R8) | (1<<AMD64_R9) | (1<<AMD64_R10))
129 #define AMD64_IS_CALLEE_REG(reg) (AMD64_CALLEE_REGS & (1 << (reg)))
131 #define AMD64_ARGUMENT_REGS ((1<<AMD64_RDI) | (1<<AMD64_RSI) | (1<<AMD64_RDX) | (1<<AMD64_RCX) | (1<<AMD64_R8) | (1<<AMD64_R9))
132 #define AMD64_IS_ARGUMENT_REG(reg) (AMD64_ARGUMENT_REGS & (1 << (reg)))
134 #define AMD64_CALLEE_SAVED_REGS ((1<<AMD64_RBX) | (1<<AMD64_R12) | (1<<AMD64_R13) | (1<<AMD64_R14) | (1<<AMD64_R15) | (1<<AMD64_RBP))
135 #define AMD64_IS_CALLEE_SAVED_REG(reg) (AMD64_CALLEE_SAVED_REGS & (1 << (reg)))
138 #define AMD64_REX(bits) ((unsigned char)(0x40 | (bits)))
139 #if defined(__default_codegen__)
140 #define amd64_emit_rex(inst, width, reg_modrm, reg_index, reg_rm_base_opcode) do \
142 unsigned char _amd64_rex_bits = \
143 (((width) > 4) ? AMD64_REX_W : 0) | \
144 (((reg_modrm) > 7) ? AMD64_REX_R : 0) | \
145 (((reg_index) > 7) ? AMD64_REX_X : 0) | \
146 (((reg_rm_base_opcode) > 7) ? AMD64_REX_B : 0); \
147 if ((_amd64_rex_bits != 0) || (((width) == 1))) *(inst)++ = AMD64_REX(_amd64_rex_bits); \
149 #elif defined(__native_client_codegen__)
150 #define amd64_emit_rex(inst, width, reg_modrm, reg_index, reg_rm_base_opcode) do \
152 unsigned char _amd64_rex_bits = \
153 (((width) > 4) ? AMD64_REX_W : 0) | \
154 (((reg_modrm) > 7) ? AMD64_REX_R : 0) | \
155 (((reg_index) > 7) ? AMD64_REX_X : 0) | \
156 (((reg_rm_base_opcode) > 7) ? AMD64_REX_B : 0); \
157 amd64_nacl_tag_rex((inst)); \
158 if ((_amd64_rex_bits != 0) || (((width) == 1))) *(inst)++ = AMD64_REX(_amd64_rex_bits); \
167 #include "../x86/x86-codegen.h"
169 /* In 64 bit mode, all registers have a low byte subregister */
170 #undef X86_IS_BYTE_REG
171 #define X86_IS_BYTE_REG(reg) 1
173 #define amd64_modrm_mod(modrm) ((modrm) >> 6)
174 #define amd64_modrm_reg(modrm) (((modrm) >> 3) & 0x7)
175 #define amd64_modrm_rm(modrm) ((modrm) & 0x7)
177 #define amd64_rex_r(rex) ((((rex) >> 2) & 0x1) << 3)
178 #define amd64_rex_x(rex) ((((rex) >> 1) & 0x1) << 3)
179 #define amd64_rex_b(rex) ((((rex) >> 0) & 0x1) << 3)
181 #define amd64_sib_scale(sib) ((sib) >> 6)
182 #define amd64_sib_index(sib) (((sib) >> 3) & 0x7)
183 #define amd64_sib_base(sib) ((sib) & 0x7)
185 #define amd64_is_imm32(val) ((gint64)val >= -((gint64)1<<31) && (gint64)val <= (((gint64)1<<31)-1))
187 #define x86_imm_emit64(inst,imm) \
190 imb.val = (guint64) (imm); \
191 *(inst)++ = imb.b [0]; \
192 *(inst)++ = imb.b [1]; \
193 *(inst)++ = imb.b [2]; \
194 *(inst)++ = imb.b [3]; \
195 *(inst)++ = imb.b [4]; \
196 *(inst)++ = imb.b [5]; \
197 *(inst)++ = imb.b [6]; \
198 *(inst)++ = imb.b [7]; \
201 #define amd64_membase_emit(inst,reg,basereg,disp) do { \
202 if ((basereg) == AMD64_RIP) { \
203 x86_address_byte ((inst), 0, (reg)&0x7, 5); \
204 x86_imm_emit32 ((inst), (disp)); \
207 x86_membase_emit ((inst),(reg)&0x7, (basereg)&0x7, (disp)); \
210 #define amd64_alu_reg_imm_size_body(inst,opc,reg,imm,size) \
212 if (x86_is_imm8((imm))) { \
213 amd64_emit_rex(inst, size, 0, 0, (reg)); \
214 *(inst)++ = (unsigned char)0x83; \
215 x86_reg_emit ((inst), (opc), (reg)); \
216 x86_imm_emit8 ((inst), (imm)); \
217 } else if ((reg) == AMD64_RAX) { \
218 amd64_emit_rex(inst, size, 0, 0, 0); \
219 *(inst)++ = (((unsigned char)(opc)) << 3) + 5; \
220 x86_imm_emit32 ((inst), (imm)); \
222 amd64_emit_rex(inst, size, 0, 0, (reg)); \
223 *(inst)++ = (unsigned char)0x81; \
224 x86_reg_emit ((inst), (opc), (reg)); \
225 x86_imm_emit32 ((inst), (imm)); \
229 #define amd64_alu_reg_reg_size_body(inst,opc,dreg,reg,size) \
231 amd64_emit_rex(inst, size, (dreg), 0, (reg)); \
232 *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
233 x86_reg_emit ((inst), (dreg), (reg)); \
236 #if defined(__default_codegen__)
238 #define amd64_alu_reg_imm_size(inst,opc,reg,imm,size) \
239 amd64_alu_reg_imm_size_body((inst), (opc), (reg), (imm), (size))
241 #define amd64_alu_reg_reg_size(inst,opc,dreg,reg,size) \
242 amd64_alu_reg_reg_size_body((inst), (opc), (dreg), (reg), (size))
244 #elif defined(__native_client_codegen__)
245 /* NaCl modules may not directly update RSP or RBP other than direct copies */
246 /* between them. Instead the lower 4 bytes are updated and then added to R15 */
247 #define amd64_is_nacl_stack_reg(reg) (((reg) == AMD64_RSP) || ((reg) == AMD64_RBP))
249 #define amd64_alu_reg_imm_size(inst,opc,reg,imm,size) \
251 amd64_codegen_pre(inst); \
252 if (amd64_is_nacl_stack_reg(reg)) { \
253 if (((opc) != X86_ADD) && ((opc) != X86_SUB)) \
254 g_assert_not_reached(); \
255 amd64_alu_reg_imm_size_body((inst), (opc), (reg), (imm), 4); \
256 /* Use LEA instead of ADD to preserve flags */ \
257 amd64_lea_memindex_size((inst), (reg), (reg), 0, AMD64_R15, 0, 8); \
259 amd64_alu_reg_imm_size_body((inst), (opc), (reg), (imm), (size)); \
261 amd64_codegen_post(inst); \
264 #define amd64_alu_reg_reg_size(inst,opc,dreg,reg,size) \
266 amd64_codegen_pre(inst); \
267 if (amd64_is_nacl_stack_reg((dreg)) && ((reg) != AMD64_R15)) { \
268 if (((opc) != X86_ADD && (opc) != X86_SUB)) \
269 g_assert_not_reached(); \
270 amd64_alu_reg_reg_size_body((inst), (opc), (dreg), (reg), 4); \
271 /* Use LEA instead of ADD to preserve flags */ \
272 amd64_lea_memindex_size((inst), (dreg), (dreg), 0, AMD64_R15, 0, 8); \
274 amd64_alu_reg_reg_size_body((inst), (opc), (dreg), (reg), (size)); \
276 amd64_codegen_post(inst); \
279 #endif /*__native_client_codegen__*/
281 #define amd64_alu_reg_imm(inst,opc,reg,imm) amd64_alu_reg_imm_size((inst),(opc),(reg),(imm),8)
283 #define amd64_alu_reg_reg(inst,opc,dreg,reg) amd64_alu_reg_reg_size ((inst),(opc),(dreg),(reg),8)
285 #define amd64_alu_reg_membase_size(inst,opc,reg,basereg,disp,size) \
287 amd64_codegen_pre(inst); \
288 amd64_emit_rex ((inst),(size),(reg),0,(basereg)); \
289 *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
290 amd64_membase_emit (inst, reg, basereg, disp); \
291 amd64_codegen_post(inst); \
294 #define amd64_mov_regp_reg(inst,regp,reg,size) \
296 amd64_codegen_pre(inst); \
298 x86_prefix((inst), X86_OPERAND_PREFIX); \
299 amd64_emit_rex(inst, (size), (reg), 0, (regp)); \
301 case 1: *(inst)++ = (unsigned char)0x88; break; \
302 case 2: case 4: case 8: *(inst)++ = (unsigned char)0x89; break; \
303 default: assert (0); \
305 x86_regp_emit ((inst), (reg), (regp)); \
306 amd64_codegen_post(inst); \
309 #define amd64_mov_membase_reg(inst,basereg,disp,reg,size) \
311 amd64_codegen_pre(inst); \
313 x86_prefix((inst), X86_OPERAND_PREFIX); \
314 amd64_emit_rex(inst, (size), (reg), 0, (basereg)); \
316 case 1: *(inst)++ = (unsigned char)0x88; break; \
317 case 2: case 4: case 8: *(inst)++ = (unsigned char)0x89; break; \
318 default: assert (0); \
320 x86_membase_emit ((inst), ((reg)&0x7), ((basereg)&0x7), (disp)); \
321 amd64_codegen_post(inst); \
324 #define amd64_mov_mem_reg(inst,mem,reg,size) \
326 amd64_codegen_pre(inst); \
328 x86_prefix((inst), X86_OPERAND_PREFIX); \
329 amd64_emit_rex(inst, (size), (reg), 0, 0); \
331 case 1: *(inst)++ = (unsigned char)0x88; break; \
332 case 2: case 4: case 8: *(inst)++ = (unsigned char)0x89; break; \
333 default: assert (0); \
335 x86_address_byte ((inst), 0, (reg), 4); \
336 x86_address_byte ((inst), 0, 4, 5); \
337 x86_imm_emit32 ((inst), (mem)); \
338 amd64_codegen_post(inst); \
341 #define amd64_mov_reg_reg(inst,dreg,reg,size) \
343 amd64_codegen_pre(inst); \
345 x86_prefix((inst), X86_OPERAND_PREFIX); \
346 amd64_emit_rex(inst, (size), (dreg), 0, (reg)); \
348 case 1: *(inst)++ = (unsigned char)0x8a; break; \
349 case 2: case 4: case 8: *(inst)++ = (unsigned char)0x8b; break; \
350 default: assert (0); \
352 x86_reg_emit ((inst), (dreg), (reg)); \
353 amd64_codegen_post(inst); \
356 #define amd64_mov_reg_mem_body(inst,reg,mem,size) \
358 amd64_codegen_pre(inst); \
360 x86_prefix((inst), X86_OPERAND_PREFIX); \
361 amd64_emit_rex(inst, (size), (reg), 0, 0); \
363 case 1: *(inst)++ = (unsigned char)0x8a; break; \
364 case 2: case 4: case 8: *(inst)++ = (unsigned char)0x8b; break; \
365 default: assert (0); \
367 x86_address_byte ((inst), 0, (reg), 4); \
368 x86_address_byte ((inst), 0, 4, 5); \
369 x86_imm_emit32 ((inst), (mem)); \
370 amd64_codegen_post(inst); \
373 #if defined(__default_codegen__)
374 #define amd64_mov_reg_mem(inst,reg,mem,size) \
376 amd64_mov_reg_mem_body((inst),(reg),(mem),(size)); \
378 #elif defined(__native_client_codegen__)
379 /* We have to re-base memory reads because memory isn't zero based. */
380 #define amd64_mov_reg_mem(inst,reg,mem,size) \
382 amd64_mov_reg_membase((inst),(reg),AMD64_R15,(mem),(size)); \
384 #endif /* __native_client_codegen__ */
386 #define amd64_mov_reg_membase_body(inst,reg,basereg,disp,size) \
389 x86_prefix((inst), X86_OPERAND_PREFIX); \
390 amd64_emit_rex(inst, (size), (reg), 0, (basereg)); \
392 case 1: *(inst)++ = (unsigned char)0x8a; break; \
393 case 2: case 4: case 8: *(inst)++ = (unsigned char)0x8b; break; \
394 default: assert (0); \
396 amd64_membase_emit ((inst), (reg), (basereg), (disp)); \
399 #define amd64_mov_reg_memindex_size_body(inst,reg,basereg,disp,indexreg,shift,size) \
401 amd64_emit_rex ((inst),(size),(reg),(indexreg),(basereg)); \
402 x86_mov_reg_memindex((inst),((reg)&0x7),((basereg)&0x7),(disp),((indexreg)&0x7),(shift),(size) == 8 ? 4 : (size)); \
405 #if defined(__default_codegen__)
407 #define amd64_mov_reg_memindex_size(inst,reg,basereg,disp,indexreg,shift,size) \
408 amd64_mov_reg_memindex_size_body((inst),(reg),(basereg),(disp),(indexreg),(shift),(size))
409 #define amd64_mov_reg_membase(inst,reg,basereg,disp,size) \
411 amd64_mov_reg_membase_body((inst), (reg), (basereg), (disp), (size)); \
414 #elif defined(__native_client_codegen__)
416 #define amd64_mov_reg_memindex_size(inst,reg,basereg,disp,indexreg,shift,size) \
418 amd64_codegen_pre(inst); \
419 if (amd64_is_nacl_stack_reg((reg))) { \
420 /* Clear upper 32 bits with mov of size 4 */ \
421 amd64_mov_reg_memindex_size_body((inst), (reg), (basereg), (disp), (indexreg), (shift), 4); \
422 /* Add %r15 using LEA to preserve flags */ \
423 amd64_lea_memindex_size((inst), (reg), (reg), 0, AMD64_R15, 0, 8); \
425 amd64_mov_reg_memindex_size_body((inst), (reg), (basereg), (disp), (indexreg), (shift), (size)); \
427 amd64_codegen_post(inst); \
430 #define amd64_mov_reg_membase(inst,reg,basereg,disp,size) \
432 amd64_codegen_pre(inst); \
433 if (amd64_is_nacl_stack_reg((reg))) { \
434 /* Clear upper 32 bits with mov of size 4 */ \
435 amd64_mov_reg_membase_body((inst), (reg), (basereg), (disp), 4); \
437 amd64_lea_memindex_size((inst), (reg), (reg), 0, AMD64_R15, 0, 8); \
439 amd64_mov_reg_membase_body((inst), (reg), (basereg), (disp), (size)); \
441 amd64_codegen_post(inst); \
444 #endif /*__native_client_codegen__*/
446 #define amd64_movzx_reg_membase(inst,reg,basereg,disp,size) \
448 amd64_codegen_pre(inst); \
449 amd64_emit_rex(inst, (size), (reg), 0, (basereg)); \
451 case 1: *(inst)++ = (unsigned char)0x0f; *(inst)++ = (unsigned char)0xb6; break; \
452 case 2: *(inst)++ = (unsigned char)0x0f; *(inst)++ = (unsigned char)0xb7; break; \
453 case 4: case 8: *(inst)++ = (unsigned char)0x8b; break; \
454 default: assert (0); \
456 x86_membase_emit ((inst), ((reg)&0x7), ((basereg)&0x7), (disp)); \
457 amd64_codegen_post(inst); \
460 #define amd64_movsxd_reg_mem(inst,reg,mem) \
462 amd64_codegen_pre(inst); \
463 amd64_emit_rex(inst,8,(reg),0,0); \
464 *(inst)++ = (unsigned char)0x63; \
465 x86_mem_emit ((inst), ((reg)&0x7), (mem)); \
466 amd64_codegen_post(inst); \
469 #define amd64_movsxd_reg_membase(inst,reg,basereg,disp) \
471 amd64_codegen_pre(inst); \
472 amd64_emit_rex(inst,8,(reg),0,(basereg)); \
473 *(inst)++ = (unsigned char)0x63; \
474 x86_membase_emit ((inst), ((reg)&0x7), ((basereg)&0x7), (disp)); \
475 amd64_codegen_post(inst); \
478 #define amd64_movsxd_reg_reg(inst,dreg,reg) \
480 amd64_codegen_pre(inst); \
481 amd64_emit_rex(inst,8,(dreg),0,(reg)); \
482 *(inst)++ = (unsigned char)0x63; \
483 x86_reg_emit ((inst), (dreg), (reg)); \
484 amd64_codegen_post(inst); \
487 /* Pretty much the only instruction that supports a 64-bit immediate. Optimize for common case of
488 * 32-bit immediate. Pepper with casts to avoid warnings.
490 #define amd64_mov_reg_imm_size(inst,reg,imm,size) \
492 amd64_codegen_pre(inst); \
493 amd64_emit_rex(inst, (size), 0, 0, (reg)); \
494 *(inst)++ = (unsigned char)0xb8 + ((reg) & 0x7); \
496 x86_imm_emit64 ((inst), (guint64)(imm)); \
498 x86_imm_emit32 ((inst), (int)(guint64)(imm)); \
499 amd64_codegen_post(inst); \
502 #define amd64_mov_reg_imm(inst,reg,imm) \
504 int _amd64_width_temp = ((guint64)(imm) == (guint64)(int)(guint64)(imm)); \
505 amd64_codegen_pre(inst); \
506 amd64_mov_reg_imm_size ((inst), (reg), (imm), (_amd64_width_temp ? 4 : 8)); \
507 amd64_codegen_post(inst); \
510 #define amd64_set_reg_template(inst,reg) amd64_mov_reg_imm_size ((inst),(reg), 0, 8)
512 #define amd64_set_template(inst,reg) amd64_set_reg_template((inst),(reg))
514 #define amd64_mov_membase_imm(inst,basereg,disp,imm,size) \
516 amd64_codegen_pre(inst); \
518 x86_prefix((inst), X86_OPERAND_PREFIX); \
519 amd64_emit_rex(inst, (size) == 1 ? 0 : (size), 0, 0, (basereg)); \
521 *(inst)++ = (unsigned char)0xc6; \
522 x86_membase_emit ((inst), 0, (basereg) & 0x7, (disp)); \
523 x86_imm_emit8 ((inst), (imm)); \
524 } else if ((size) == 2) { \
525 *(inst)++ = (unsigned char)0xc7; \
526 x86_membase_emit ((inst), 0, (basereg) & 0x7, (disp)); \
527 x86_imm_emit16 ((inst), (imm)); \
529 *(inst)++ = (unsigned char)0xc7; \
530 x86_membase_emit ((inst), 0, (basereg) & 0x7, (disp)); \
531 x86_imm_emit32 ((inst), (imm)); \
533 amd64_codegen_post(inst); \
537 #define amd64_lea_membase_body(inst,reg,basereg,disp) \
539 amd64_emit_rex(inst, 8, (reg), 0, (basereg)); \
540 *(inst)++ = (unsigned char)0x8d; \
541 amd64_membase_emit ((inst), (reg), (basereg), (disp)); \
544 #if defined(__default_codegen__)
545 #define amd64_lea_membase(inst,reg,basereg,disp) \
546 amd64_lea_membase_body((inst), (reg), (basereg), (disp))
547 #elif defined(__native_client_codegen__)
548 /* NaCl modules may not write directly into RSP/RBP. Instead, use a */
549 /* 32-bit LEA and add R15 to the effective address */
550 #define amd64_lea_membase(inst,reg,basereg,disp) \
552 amd64_codegen_pre(inst); \
553 if (amd64_is_nacl_stack_reg(reg)) { \
555 amd64_emit_rex((inst), 4, (reg), 0, (basereg)); \
556 *(inst)++ = (unsigned char)0x8d; \
557 amd64_membase_emit((inst), (reg), (basereg), (disp)); \
558 /* Use a 64-bit LEA instead of an ADD to preserve flags */ \
559 amd64_lea_memindex_size((inst), (reg), (reg), 0, AMD64_R15, 0, 8); \
561 amd64_lea_membase_body((inst), (reg), (basereg), (disp)); \
563 amd64_codegen_post(inst); \
565 #endif /*__native_client_codegen__*/
567 /* Instruction are implicitly 64-bits so don't generate REX for just the size. */
568 #define amd64_push_reg(inst,reg) \
570 amd64_codegen_pre(inst); \
571 amd64_emit_rex(inst, 0, 0, 0, (reg)); \
572 *(inst)++ = (unsigned char)0x50 + ((reg) & 0x7); \
573 amd64_codegen_post(inst); \
576 /* Instruction is implicitly 64-bits so don't generate REX for just the size. */
577 #define amd64_push_membase(inst,basereg,disp) \
579 amd64_codegen_pre(inst); \
580 amd64_emit_rex(inst, 0, 0, 0, (basereg)); \
581 *(inst)++ = (unsigned char)0xff; \
582 x86_membase_emit ((inst), 6, (basereg) & 0x7, (disp)); \
583 amd64_codegen_post(inst); \
586 #define amd64_pop_reg_body(inst,reg) \
588 amd64_codegen_pre(inst); \
589 amd64_emit_rex(inst, 0, 0, 0, (reg)); \
590 *(inst)++ = (unsigned char)0x58 + ((reg) & 0x7); \
591 amd64_codegen_post(inst); \
594 #if defined(__default_codegen__)
596 #define amd64_call_reg(inst,reg) \
598 amd64_emit_rex(inst, 0, 0, 0, (reg)); \
599 *(inst)++ = (unsigned char)0xff; \
600 x86_reg_emit ((inst), 2, ((reg) & 0x7)); \
604 #define amd64_ret(inst) do { *(inst)++ = (unsigned char)0xc3; } while (0)
605 #define amd64_leave(inst) do { *(inst)++ = (unsigned char)0xc9; } while (0)
607 #define amd64_pop_reg(inst,reg) amd64_pop_reg_body((inst), (reg))
609 #elif defined(__native_client_codegen__)
611 /* Size is ignored for Native Client jumps, we restrict jumping to 32-bits */
612 #define amd64_jump_reg_size(inst,reg,size) \
614 amd64_codegen_pre((inst)); \
615 amd64_alu_reg_imm_size((inst), X86_AND, (reg), (nacl_align_byte), 4); \
616 amd64_alu_reg_reg_size((inst), X86_ADD, (reg), AMD64_R15, 8); \
617 amd64_emit_rex ((inst),0,0,0,(reg)); \
618 x86_jump_reg((inst),((reg)&0x7)); \
619 amd64_codegen_post((inst)); \
622 /* Size is ignored for Native Client jumps, we restrict jumping to 32-bits */
623 #define amd64_jump_mem_size(inst,mem,size) \
625 amd64_codegen_pre((inst)); \
626 amd64_mov_reg_mem((inst), (mem), AMD64_R11, 4); \
627 amd64_jump_reg_size((inst), AMD64_R11, 4); \
628 amd64_codegen_post((inst)); \
631 #define amd64_call_reg_internal(inst,reg) \
633 amd64_codegen_pre((inst)); \
634 amd64_alu_reg_imm_size((inst), X86_AND, (reg), (nacl_align_byte), 4); \
635 amd64_alu_reg_reg_size((inst), X86_ADD, (reg), AMD64_R15, 8); \
636 amd64_emit_rex((inst), 0, 0, 0, (reg)); \
637 x86_call_reg((inst), ((reg) & 0x7)); \
638 amd64_codegen_post((inst)); \
641 #define amd64_call_reg(inst,reg) \
643 amd64_codegen_pre((inst)); \
644 amd64_call_sequence_pre(inst); \
645 amd64_call_reg_internal((inst), (reg)); \
646 amd64_call_sequence_post(inst); \
647 amd64_codegen_post((inst)); \
651 #define amd64_ret(inst) \
653 amd64_codegen_pre(inst); \
654 amd64_pop_reg_body((inst), AMD64_R11); \
655 amd64_jump_reg_size((inst), AMD64_R11, 8); \
656 amd64_codegen_post(inst); \
659 #define amd64_leave(inst) \
661 amd64_codegen_pre(inst); \
662 amd64_mov_reg_reg((inst), AMD64_RSP, AMD64_RBP, 8); \
663 amd64_pop_reg_body((inst), AMD64_R11); \
664 amd64_mov_reg_reg_size((inst), AMD64_RBP, AMD64_R11, 4); \
665 amd64_alu_reg_reg_size((inst), X86_ADD, AMD64_RBP, AMD64_R15, 8); \
666 amd64_codegen_post(inst); \
669 #define amd64_pop_reg(inst,reg) \
671 amd64_codegen_pre(inst); \
672 if (amd64_is_nacl_stack_reg((reg))) { \
673 amd64_pop_reg_body((inst), AMD64_R11); \
674 amd64_mov_reg_reg_size((inst), (reg), AMD64_R11, 4); \
675 amd64_alu_reg_reg_size((inst), X86_ADD, (reg), AMD64_R15, 8); \
677 amd64_pop_reg_body((inst), (reg)); \
679 amd64_codegen_post(inst); \
682 #endif /*__native_client_codegen__*/
684 #define amd64_movsd_reg_regp(inst,reg,regp) \
686 amd64_codegen_pre(inst); \
687 x86_prefix((inst), 0xf2); \
688 amd64_emit_rex(inst, 0, (reg), 0, (regp)); \
689 *(inst)++ = (unsigned char)0x0f; \
690 *(inst)++ = (unsigned char)0x10; \
691 x86_regp_emit ((inst), (reg) & 0x7, (regp) & 0x7); \
692 amd64_codegen_post(inst); \
695 #define amd64_movsd_regp_reg(inst,regp,reg) \
697 amd64_codegen_pre(inst); \
698 x86_prefix((inst), 0xf2); \
699 amd64_emit_rex(inst, 0, (reg), 0, (regp)); \
700 *(inst)++ = (unsigned char)0x0f; \
701 *(inst)++ = (unsigned char)0x11; \
702 x86_regp_emit ((inst), (reg) & 0x7, (regp) & 0x7); \
703 amd64_codegen_post(inst); \
706 #define amd64_movss_reg_regp(inst,reg,regp) \
708 amd64_codegen_pre(inst); \
709 x86_prefix((inst), 0xf3); \
710 amd64_emit_rex(inst, 0, (reg), 0, (regp)); \
711 *(inst)++ = (unsigned char)0x0f; \
712 *(inst)++ = (unsigned char)0x10; \
713 x86_regp_emit ((inst), (reg) & 0x7, (regp) & 0x7); \
714 amd64_codegen_post(inst); \
717 #define amd64_movss_regp_reg(inst,regp,reg) \
719 amd64_codegen_pre(inst); \
720 x86_prefix((inst), 0xf3); \
721 amd64_emit_rex(inst, 0, (reg), 0, (regp)); \
722 *(inst)++ = (unsigned char)0x0f; \
723 *(inst)++ = (unsigned char)0x11; \
724 x86_regp_emit ((inst), (reg) & 0x7, (regp) & 0x7); \
725 amd64_codegen_post(inst); \
728 #define amd64_movsd_reg_membase(inst,reg,basereg,disp) \
730 amd64_codegen_pre(inst); \
731 x86_prefix((inst), 0xf2); \
732 amd64_emit_rex(inst, 0, (reg), 0, (basereg)); \
733 *(inst)++ = (unsigned char)0x0f; \
734 *(inst)++ = (unsigned char)0x10; \
735 x86_membase_emit ((inst), (reg) & 0x7, (basereg) & 0x7, (disp)); \
736 amd64_codegen_post(inst); \
739 #define amd64_movss_reg_membase(inst,reg,basereg,disp) \
741 amd64_codegen_pre(inst); \
742 x86_prefix((inst), 0xf3); \
743 amd64_emit_rex(inst, 0, (reg), 0, (basereg)); \
744 *(inst)++ = (unsigned char)0x0f; \
745 *(inst)++ = (unsigned char)0x10; \
746 x86_membase_emit ((inst), (reg) & 0x7, (basereg) & 0x7, (disp)); \
747 amd64_codegen_post(inst); \
750 #define amd64_movsd_membase_reg(inst,basereg,disp,reg) \
752 amd64_codegen_pre(inst); \
753 x86_prefix((inst), 0xf2); \
754 amd64_emit_rex(inst, 0, (reg), 0, (basereg)); \
755 *(inst)++ = (unsigned char)0x0f; \
756 *(inst)++ = (unsigned char)0x11; \
757 x86_membase_emit ((inst), (reg) & 0x7, (basereg) & 0x7, (disp)); \
758 amd64_codegen_post(inst); \
761 #define amd64_movss_membase_reg(inst,basereg,disp,reg) \
763 amd64_codegen_pre(inst); \
764 x86_prefix((inst), 0xf3); \
765 amd64_emit_rex(inst, 0, (reg), 0, (basereg)); \
766 *(inst)++ = (unsigned char)0x0f; \
767 *(inst)++ = (unsigned char)0x11; \
768 x86_membase_emit ((inst), (reg) & 0x7, (basereg) & 0x7, (disp)); \
769 amd64_codegen_post(inst); \
772 /* The original inc_reg opcode is used as the REX prefix */
773 #define amd64_inc_reg_size(inst,reg,size) \
775 amd64_codegen_pre(inst); \
776 amd64_emit_rex ((inst),(size),0,0,(reg)); \
777 *(inst)++ = (unsigned char)0xff; \
778 x86_reg_emit ((inst),0,(reg) & 0x7); \
779 amd64_codegen_post(inst); \
782 #define amd64_dec_reg_size(inst,reg,size) \
784 amd64_codegen_pre(inst); \
785 amd64_emit_rex ((inst),(size),0,0,(reg)); \
786 *(inst)++ = (unsigned char)0xff; \
787 x86_reg_emit ((inst),1,(reg) & 0x7); \
788 amd64_codegen_post(inst); \
791 #define amd64_fld_membase_size(inst,basereg,disp,is_double,size) do { \
792 amd64_codegen_pre(inst); \
793 amd64_emit_rex ((inst),0,0,0,(basereg)); \
794 *(inst)++ = (is_double) ? (unsigned char)0xdd : (unsigned char)0xd9; \
795 amd64_membase_emit ((inst), 0, (basereg), (disp)); \
796 amd64_codegen_post(inst); \
799 #if defined (__default_codegen__)
801 /* From the AMD64 Software Optimization Manual */
802 #define amd64_padding_size(inst,size) \
805 case 1: *(inst)++ = 0x90; break; \
806 case 2: *(inst)++ = 0x66; *(inst)++ = 0x90; break; \
807 case 3: *(inst)++ = 0x66; *(inst)++ = 0x66; *(inst)++ = 0x90; break; \
808 default: amd64_emit_rex ((inst),8,0,0,0); x86_padding ((inst), (size) - 1); \
812 #define amd64_call_membase_size(inst,basereg,disp,size) do { amd64_emit_rex ((inst),0,0,0,(basereg)); *(inst)++ = (unsigned char)0xff; amd64_membase_emit ((inst),2, (basereg),(disp)); } while (0)
813 #define amd64_jump_membase_size(inst,basereg,disp,size) do { amd64_emit_rex ((inst),0,0,0,(basereg)); *(inst)++ = (unsigned char)0xff; amd64_membase_emit ((inst), 4, (basereg), (disp)); } while (0)
815 #define amd64_jump_code_size(inst,target,size) do { \
816 if (amd64_is_imm32 ((gint64)(target) - (gint64)(inst))) { \
817 x86_jump_code((inst),(target)); \
819 amd64_jump_membase ((inst), AMD64_RIP, 0); \
820 *(guint64*)(inst) = (guint64)(target); \
825 #elif defined(__native_client_codegen__)
827 /* The 3-7 byte NOP sequences in amd64_padding_size below are all illegal in */
828 /* 64-bit Native Client because they load into rSP/rBP or use duplicate */
829 /* prefixes. Instead we use the NOPs recommended in Section 3.5.1.8 of the */
830 /* Intel64 and IA-32 Architectures Optimization Reference Manual and */
831 /* Section 4.13 of AMD Software Optimization Guide for Family 10h Processors. */
833 #define amd64_padding_size(inst,size) \
835 unsigned char *code_start = (inst); \
837 /* xchg %eax,%eax, recognized by hardware as a NOP */ \
838 case 1: *(inst)++ = 0x90; break; \
840 case 2: *(inst)++ = 0x66; *(inst)++ = 0x90; \
843 case 3: *(inst)++ = 0x0f; *(inst)++ = 0x1f; \
846 /* nop 0x0(%rax) */ \
847 case 4: *(inst)++ = 0x0f; *(inst)++ = 0x1f; \
848 x86_address_byte ((inst), 1, 0, AMD64_RAX); \
849 x86_imm_emit8 ((inst), 0); \
851 /* nop 0x0(%rax,%rax) */ \
852 case 5: *(inst)++ = 0x0f; *(inst)++ = 0x1f; \
853 x86_address_byte ((inst), 1, 0, 4); \
854 x86_address_byte ((inst), 0, AMD64_RAX, AMD64_RAX); \
855 x86_imm_emit8 ((inst), 0); \
857 /* nopw 0x0(%rax,%rax) */ \
858 case 6: *(inst)++ = 0x66; *(inst)++ = 0x0f; \
860 x86_address_byte ((inst), 1, 0, 4); \
861 x86_address_byte ((inst), 0, AMD64_RAX, AMD64_RAX); \
862 x86_imm_emit8 ((inst), 0); \
864 /* nop 0x0(%rax) (32-bit displacement) */ \
865 case 7: *(inst)++ = 0x0f; *(inst)++ = 0x1f; \
866 x86_address_byte ((inst), 2, 0, AMD64_RAX); \
867 x86_imm_emit32((inst), 0); \
869 /* nop 0x0(%rax,%rax) (32-bit displacement) */ \
870 case 8: *(inst)++ = 0x0f; *(inst)++ = 0x1f; \
871 x86_address_byte ((inst), 2, 0, 4); \
872 x86_address_byte ((inst), 0, AMD64_RAX, AMD64_RAX); \
873 x86_imm_emit32 ((inst), 0); \
876 g_assert_not_reached(); \
878 g_assert(code_start + (size) == (unsigned char *)(inst)); \
882 /* Size is ignored for Native Client calls, we restrict jumping to 32-bits */
883 #define amd64_call_membase_size(inst,basereg,disp,size) \
885 amd64_codegen_pre((inst)); \
886 amd64_call_sequence_pre(inst); \
887 amd64_mov_reg_membase((inst), AMD64_R11, (basereg), (disp), 4); \
888 amd64_call_reg_internal((inst), AMD64_R11); \
889 amd64_call_sequence_post(inst); \
890 amd64_codegen_post((inst)); \
893 /* Size is ignored for Native Client jumps, we restrict jumping to 32-bits */
894 #define amd64_jump_membase_size(inst,basereg,disp,size) \
896 amd64_mov_reg_membase((inst), AMD64_R11, (basereg), (disp), 4); \
897 amd64_jump_reg_size((inst), AMD64_R11, 4); \
900 /* On Native Client we can't jump more than INT_MAX in either direction */
901 #define amd64_jump_code_size(inst,target,size) \
903 /* x86_jump_code used twice in case of */ \
904 /* relocation by amd64_codegen_post */ \
905 guint8* jump_start; \
906 amd64_codegen_pre(inst); \
907 assert(amd64_is_imm32 ((gint64)(target) - (gint64)(inst))); \
908 x86_jump_code((inst),(target)); \
909 inst = amd64_codegen_post(inst); \
910 jump_start = (inst); \
911 x86_jump_code((inst),(target)); \
912 mono_amd64_patch(jump_start, (target)); \
915 #endif /*__native_client_codegen__*/
921 //TODO Reorganize SSE opcode defines.
923 /* Two opcode SSE defines */
925 #define emit_sse_reg_reg_op2_size(inst,dreg,reg,op1,op2,size) do { \
926 amd64_codegen_pre(inst); \
927 amd64_emit_rex ((inst), size, (dreg), 0, (reg)); \
928 *(inst)++ = (unsigned char)(op1); \
929 *(inst)++ = (unsigned char)(op2); \
930 x86_reg_emit ((inst), (dreg), (reg)); \
931 amd64_codegen_post(inst); \
934 #define emit_sse_reg_reg_op2(inst,dreg,reg,op1,op2) emit_sse_reg_reg_op2_size ((inst), (dreg), (reg), (op1), (op2), 0)
936 #define emit_sse_reg_reg_op2_imm(inst,dreg,reg,op1,op2,imm) do { \
937 amd64_codegen_pre(inst); \
938 emit_sse_reg_reg_op2 ((inst), (dreg), (reg), (op1), (op2)); \
939 x86_imm_emit8 ((inst), (imm)); \
940 amd64_codegen_post(inst); \
943 #define emit_sse_membase_reg_op2(inst,basereg,disp,reg,op1,op2) do { \
944 amd64_codegen_pre(inst); \
945 amd64_emit_rex ((inst), 0, (reg), 0, (basereg)); \
946 *(inst)++ = (unsigned char)(op1); \
947 *(inst)++ = (unsigned char)(op2); \
948 amd64_membase_emit ((inst), (reg), (basereg), (disp)); \
949 amd64_codegen_post(inst); \
952 #define emit_sse_reg_membase_op2(inst,dreg,basereg,disp,op1,op2) do { \
953 amd64_codegen_pre(inst); \
954 amd64_emit_rex ((inst), 0, (dreg), 0, (basereg) == AMD64_RIP ? 0 : (basereg)); \
955 *(inst)++ = (unsigned char)(op1); \
956 *(inst)++ = (unsigned char)(op2); \
957 amd64_membase_emit ((inst), (dreg), (basereg), (disp)); \
958 amd64_codegen_post(inst); \
961 /* Three opcode SSE defines */
963 #define emit_opcode3(inst,op1,op2,op3) do { \
964 *(inst)++ = (unsigned char)(op1); \
965 *(inst)++ = (unsigned char)(op2); \
966 *(inst)++ = (unsigned char)(op3); \
969 #define emit_sse_reg_reg_size(inst,dreg,reg,op1,op2,op3,size) do { \
970 amd64_codegen_pre(inst); \
971 *(inst)++ = (unsigned char)(op1); \
972 amd64_emit_rex ((inst), size, (dreg), 0, (reg)); \
973 *(inst)++ = (unsigned char)(op2); \
974 *(inst)++ = (unsigned char)(op3); \
975 x86_reg_emit ((inst), (dreg), (reg)); \
976 amd64_codegen_post(inst); \
979 #define emit_sse_reg_reg(inst,dreg,reg,op1,op2,op3) emit_sse_reg_reg_size ((inst), (dreg), (reg), (op1), (op2), (op3), 0)
981 #define emit_sse_reg_reg_imm(inst,dreg,reg,op1,op2,op3,imm) do { \
982 amd64_codegen_pre(inst); \
983 emit_sse_reg_reg ((inst), (dreg), (reg), (op1), (op2), (op3)); \
984 x86_imm_emit8 ((inst), (imm)); \
985 amd64_codegen_post(inst); \
988 #define emit_sse_membase_reg(inst,basereg,disp,reg,op1,op2,op3) do { \
989 amd64_codegen_pre(inst); \
990 x86_prefix((inst), (unsigned char)(op1)); \
991 amd64_emit_rex ((inst), 0, (reg), 0, (basereg)); \
992 *(inst)++ = (unsigned char)(op2); \
993 *(inst)++ = (unsigned char)(op3); \
994 amd64_membase_emit ((inst), (reg), (basereg), (disp)); \
995 amd64_codegen_post(inst); \
998 #define emit_sse_reg_membase(inst,dreg,basereg,disp,op1,op2,op3) do { \
999 amd64_codegen_pre(inst); \
1000 x86_prefix((inst), (unsigned char)(op1)); \
1001 amd64_emit_rex ((inst), 0, (dreg), 0, (basereg) == AMD64_RIP ? 0 : (basereg)); \
1002 *(inst)++ = (unsigned char)(op2); \
1003 *(inst)++ = (unsigned char)(op3); \
1004 amd64_membase_emit ((inst), (dreg), (basereg), (disp)); \
1005 amd64_codegen_post(inst); \
1008 /* Four opcode SSE defines */
1010 #define emit_sse_reg_reg_op4_size(inst,dreg,reg,op1,op2,op3,op4,size) do { \
1011 amd64_codegen_pre(inst); \
1012 x86_prefix((inst), (unsigned char)(op1)); \
1013 amd64_emit_rex ((inst), size, (dreg), 0, (reg)); \
1014 *(inst)++ = (unsigned char)(op2); \
1015 *(inst)++ = (unsigned char)(op3); \
1016 *(inst)++ = (unsigned char)(op4); \
1017 x86_reg_emit ((inst), (dreg), (reg)); \
1018 amd64_codegen_post(inst); \
1021 #define emit_sse_reg_reg_op4(inst,dreg,reg,op1,op2,op3,op4) emit_sse_reg_reg_op4_size ((inst), (dreg), (reg), (op1), (op2), (op3), (op4), 0)
1023 /* specific SSE opcode defines */
1025 #define amd64_sse_xorpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst),(dreg),(reg), 0x66, 0x0f, 0x57)
1027 #define amd64_sse_xorpd_reg_membase(inst,dreg,basereg,disp) emit_sse_reg_membase ((inst),(dreg),(basereg), (disp), 0x66, 0x0f, 0x57)
1029 #define amd64_sse_andpd_reg_membase(inst,dreg,basereg,disp) emit_sse_reg_membase ((inst),(dreg),(basereg), (disp), 0x66, 0x0f, 0x54)
1031 #define amd64_sse_movsd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf2, 0x0f, 0x10)
1033 #define amd64_sse_movsd_reg_membase(inst,dreg,basereg,disp) emit_sse_reg_membase ((inst), (dreg), (basereg), (disp), 0xf2, 0x0f, 0x10)
1035 #define amd64_sse_movsd_membase_reg(inst,basereg,disp,reg) emit_sse_membase_reg ((inst), (basereg), (disp), (reg), 0xf2, 0x0f, 0x11)
1037 #define amd64_sse_movss_membase_reg(inst,basereg,disp,reg) emit_sse_membase_reg ((inst), (basereg), (disp), (reg), 0xf3, 0x0f, 0x11)
1039 #define amd64_sse_movss_reg_membase(inst,dreg,basereg,disp) emit_sse_reg_membase ((inst), (dreg), (basereg), (disp), 0xf3, 0x0f, 0x10)
1041 #define amd64_sse_comisd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst),(dreg),(reg),0x66,0x0f,0x2f)
1043 #define amd64_sse_comisd_reg_membase(inst,dreg,basereg,disp) emit_sse_reg_membase ((inst), (dreg), (basereg), (disp), 0x66, 0x0f, 0x2f)
1045 #define amd64_sse_ucomisd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst),(dreg),(reg),0x66,0x0f,0x2e)
1047 #define amd64_sse_cvtsd2si_reg_reg(inst,dreg,reg) emit_sse_reg_reg_size ((inst), (dreg), (reg), 0xf2, 0x0f, 0x2d, 8)
1049 #define amd64_sse_cvttsd2si_reg_reg_size(inst,dreg,reg,size) emit_sse_reg_reg_size ((inst), (dreg), (reg), 0xf2, 0x0f, 0x2c, (size))
1051 #define amd64_sse_cvttsd2si_reg_reg(inst,dreg,reg) amd64_sse_cvttsd2si_reg_reg_size ((inst), (dreg), (reg), 8)
1053 #define amd64_sse_cvtsi2sd_reg_reg_size(inst,dreg,reg,size) emit_sse_reg_reg_size ((inst), (dreg), (reg), 0xf2, 0x0f, 0x2a, (size))
1055 #define amd64_sse_cvtsi2sd_reg_reg(inst,dreg,reg) amd64_sse_cvtsi2sd_reg_reg_size ((inst), (dreg), (reg), 8)
1057 #define amd64_sse_cvtsd2ss_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf2, 0x0f, 0x5a)
1059 #define amd64_sse_cvtss2sd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf3, 0x0f, 0x5a)
1061 #define amd64_sse_addsd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf2, 0x0f, 0x58)
1063 #define amd64_sse_subsd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf2, 0x0f, 0x5c)
1065 #define amd64_sse_mulsd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf2, 0x0f, 0x59)
1067 #define amd64_sse_divsd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf2, 0x0f, 0x5e)
1069 #define amd64_sse_sqrtsd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xf2, 0x0f, 0x51)
1072 #define amd64_sse_pinsrw_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_imm ((inst), (dreg), (reg), 0x66, 0x0f, 0xc4, (imm))
1074 #define amd64_sse_pextrw_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_imm ((inst), (dreg), (reg), 0x66, 0x0f, 0xc5, (imm))
1077 #define amd64_sse_cvttsd2si_reg_xreg_size(inst,reg,xreg,size) emit_sse_reg_reg_size ((inst), (reg), (xreg), 0xf2, 0x0f, 0x2c, (size))
1080 #define amd64_sse_addps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x58)
1082 #define amd64_sse_divps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x5e)
1084 #define amd64_sse_mulps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x59)
1086 #define amd64_sse_subps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x5c)
1088 #define amd64_sse_maxps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x5f)
1090 #define amd64_sse_minps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x5d)
1092 #define amd64_sse_cmpps_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_op2_imm((inst), (dreg), (reg), 0x0f, 0xc2, (imm))
1094 #define amd64_sse_andps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x54)
1096 #define amd64_sse_andnps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x55)
1098 #define amd64_sse_orps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x56)
1100 #define amd64_sse_xorps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x57)
1102 #define amd64_sse_sqrtps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x51)
1104 #define amd64_sse_rsqrtps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x52)
1106 #define amd64_sse_rcpps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x53)
1108 #define amd64_sse_addsubps_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xf2, 0x0f, 0xd0)
1110 #define amd64_sse_haddps_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xf2, 0x0f, 0x7c)
1112 #define amd64_sse_hsubps_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xf2, 0x0f, 0x7d)
1114 #define amd64_sse_movshdup_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xf3, 0x0f, 0x16)
1116 #define amd64_sse_movsldup_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xf3, 0x0f, 0x12)
1119 #define amd64_sse_pshufhw_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_imm((inst), (dreg), (reg), 0xf3, 0x0f, 0x70, (imm))
1121 #define amd64_sse_pshuflw_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_imm((inst), (dreg), (reg), 0xf2, 0x0f, 0x70, (imm))
1123 #define amd64_sse_pshufd_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_imm((inst), (dreg), (reg), 0x66, 0x0f, 0x70, (imm))
1125 #define amd64_sse_shufps_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_op2_imm((inst), (dreg), (reg), 0x0f, 0xC6, (imm))
1127 #define amd64_sse_shufpd_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_imm((inst), (dreg), (reg), 0x66, 0x0f, 0xC6, (imm))
1130 #define amd64_sse_addpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x58)
1132 #define amd64_sse_divpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x5e)
1134 #define amd64_sse_mulpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x59)
1136 #define amd64_sse_subpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x5c)
1138 #define amd64_sse_maxpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x5f)
1140 #define amd64_sse_minpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x5d)
1142 #define amd64_sse_cmppd_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_imm((inst), (dreg), (reg), 0x66, 0x0f, 0xc2, (imm))
1144 #define amd64_sse_andpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x54)
1146 #define amd64_sse_andnpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x55)
1148 #define amd64_sse_orpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x56)
1150 #define amd64_sse_sqrtpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x51)
1152 #define amd64_sse_rsqrtpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x52)
1154 #define amd64_sse_rcppd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x53)
1156 #define amd64_sse_addsubpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd0)
1158 #define amd64_sse_haddpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x7c)
1160 #define amd64_sse_hsubpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x7d)
1162 #define amd64_sse_movddup_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xf2, 0x0f, 0x12)
1165 #define amd64_sse_pmovmskb_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd7)
1168 #define amd64_sse_pand_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xdb)
1170 #define amd64_sse_por_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xeb)
1172 #define amd64_sse_pxor_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xef)
1175 #define amd64_sse_paddb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xfc)
1177 #define amd64_sse_paddw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xfd)
1179 #define amd64_sse_paddd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xfe)
1181 #define amd64_sse_paddq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd4)
1184 #define amd64_sse_psubb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xf8)
1186 #define amd64_sse_psubw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xf9)
1188 #define amd64_sse_psubd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xfa)
1190 #define amd64_sse_psubq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xfb)
1193 #define amd64_sse_pmaxub_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xde)
1195 #define amd64_sse_pmaxuw_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x3e)
1197 #define amd64_sse_pmaxud_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x3f)
1200 #define amd64_sse_pmaxsb_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x3c)
1202 #define amd64_sse_pmaxsw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xee)
1204 #define amd64_sse_pmaxsd_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x3d)
1207 #define amd64_sse_pavgb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe0)
1209 #define amd64_sse_pavgw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe3)
1212 #define amd64_sse_pminub_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xda)
1214 #define amd64_sse_pminuw_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x3a)
1216 #define amd64_sse_pminud_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x3b)
1219 #define amd64_sse_pminsb_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x38)
1221 #define amd64_sse_pminsw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xea)
1223 #define amd64_sse_pminsd_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x39)
1226 #define amd64_sse_pcmpeqb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x74)
1228 #define amd64_sse_pcmpeqw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x75)
1230 #define amd64_sse_pcmpeqd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x76)
1232 #define amd64_sse_pcmpeqq_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x29)
1235 #define amd64_sse_pcmpgtb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x64)
1237 #define amd64_sse_pcmpgtw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x65)
1239 #define amd64_sse_pcmpgtd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x66)
1241 #define amd64_sse_pcmpgtq_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x37)
1244 #define amd64_sse_psadbw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xf6)
1247 #define amd64_sse_punpcklbw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x60)
1249 #define amd64_sse_punpcklwd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x61)
1251 #define amd64_sse_punpckldq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x62)
1253 #define amd64_sse_punpcklqdq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x6c)
1255 #define amd64_sse_unpcklpd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x14)
1257 #define amd64_sse_unpcklps_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x14)
1260 #define amd64_sse_punpckhbw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x68)
1262 #define amd64_sse_punpckhwd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x69)
1264 #define amd64_sse_punpckhdq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x6a)
1266 #define amd64_sse_punpckhqdq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x6d)
1268 #define amd64_sse_unpckhpd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x15)
1270 #define amd64_sse_unpckhps_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x15)
1273 #define amd64_sse_packsswb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x63)
1275 #define amd64_sse_packssdw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x6b)
1277 #define amd64_sse_packuswb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x67)
1279 #define amd64_sse_packusdw_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x2b)
1282 #define amd64_sse_paddusb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xdc)
1284 #define amd64_sse_psubusb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd8)
1286 #define amd64_sse_paddusw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xdd)
1288 #define amd64_sse_psubusw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd8)
1291 #define amd64_sse_paddsb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xec)
1293 #define amd64_sse_psubsb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe8)
1295 #define amd64_sse_paddsw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xed)
1297 #define amd64_sse_psubsw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe9)
1300 #define amd64_sse_pmullw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd5)
1302 #define amd64_sse_pmulld_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x40)
1304 #define amd64_sse_pmuludq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xf4)
1306 #define amd64_sse_pmulhuw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe4)
1308 #define amd64_sse_pmulhw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe5)
1311 #define amd64_sse_psrlw_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SHR, (reg), 0x66, 0x0f, 0x71, (imm))
1313 #define amd64_sse_psrlw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd1)
1316 #define amd64_sse_psraw_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SAR, (reg), 0x66, 0x0f, 0x71, (imm))
1318 #define amd64_sse_psraw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe1)
1321 #define amd64_sse_psllw_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SHL, (reg), 0x66, 0x0f, 0x71, (imm))
1323 #define amd64_sse_psllw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xf1)
1326 #define amd64_sse_psrld_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SHR, (reg), 0x66, 0x0f, 0x72, (imm))
1328 #define amd64_sse_psrld_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd2)
1331 #define amd64_sse_psrad_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SAR, (reg), 0x66, 0x0f, 0x72, (imm))
1333 #define amd64_sse_psrad_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe2)
1336 #define amd64_sse_pslld_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SHL, (reg), 0x66, 0x0f, 0x72, (imm))
1338 #define amd64_sse_pslld_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xf2)
1341 #define amd64_sse_psrlq_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SHR, (reg), 0x66, 0x0f, 0x73, (imm))
1343 #define amd64_sse_psrlq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd3)
1346 #define amd64_sse_psraq_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SAR, (reg), 0x66, 0x0f, 0x73, (imm))
1348 #define amd64_sse_psraq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe3)
1351 #define amd64_sse_psllq_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SHL, (reg), 0x66, 0x0f, 0x73, (imm))
1353 #define amd64_sse_psllq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xf3)
1356 #define amd64_sse_cvtdq2pd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xF3, 0x0F, 0xE6)
1358 #define amd64_sse_cvtdq2ps_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0F, 0x5B)
1360 #define amd64_sse_cvtpd2dq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xF2, 0x0F, 0xE6)
1362 #define amd64_sse_cvtpd2ps_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0F, 0x5A)
1364 #define amd64_sse_cvtps2dq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0F, 0x5B)
1366 #define amd64_sse_cvtps2pd_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0F, 0x5A)
1368 #define amd64_sse_cvttpd2dq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0F, 0xE6)
1370 #define amd64_sse_cvttps2dq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xF3, 0x0F, 0x5B)
1373 #define amd64_movd_xreg_reg_size(inst,dreg,sreg,size) emit_sse_reg_reg_size((inst), (dreg), (sreg), 0x66, 0x0f, 0x6e, (size))
1375 #define amd64_movd_reg_xreg_size(inst,dreg,sreg,size) emit_sse_reg_reg_size((inst), (sreg), (dreg), 0x66, 0x0f, 0x7e, (size))
1377 #define amd64_movd_xreg_membase(inst,dreg,basereg,disp) emit_sse_reg_membase((inst), (dreg), (basereg), (disp), 0x66, 0x0f, 0x6e)
1380 #define amd64_movlhps_reg_reg(inst,dreg,sreg) emit_sse_reg_reg_op2((inst), (dreg), (sreg), 0x0f, 0x16)
1382 #define amd64_movhlps_reg_reg(inst,dreg,sreg) emit_sse_reg_reg_op2((inst), (dreg), (sreg), 0x0f, 0x12)
1385 #define amd64_sse_movups_membase_reg(inst, basereg, disp, reg) emit_sse_membase_reg_op2((inst), (basereg), (disp), (reg), 0x0f, 0x11)
1387 #define amd64_sse_movups_reg_membase(inst, dreg, basereg, disp) emit_sse_reg_membase_op2((inst), (dreg), (basereg), (disp), 0x0f, 0x10)
1389 #define amd64_sse_movaps_membase_reg(inst, basereg, disp, reg) emit_sse_membase_reg_op2((inst), (basereg), (disp), (reg), 0x0f, 0x29)
1391 #define amd64_sse_movaps_reg_membase(inst, dreg, basereg, disp) emit_sse_reg_membase_op2((inst), (dreg), (basereg), (disp), 0x0f, 0x28)
1393 #define amd64_sse_movaps_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x28)
1395 #define amd64_sse_movntps_reg_membase(inst, dreg, basereg, disp) emit_sse_reg_membase_op2((inst), (dreg), (basereg), (disp), 0x0f, 0x2b)
1397 #define amd64_sse_prefetch_reg_membase(inst, arg, basereg, disp) emit_sse_reg_membase_op2((inst), (arg), (basereg), (disp), 0x0f, 0x18)
1399 /* Generated from x86-codegen.h */
1401 #define amd64_breakpoint_size(inst,size) do { x86_breakpoint(inst); } while (0)
1402 #define amd64_cld_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_cld(inst); amd64_codegen_post(inst); } while (0)
1403 #define amd64_stosb_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_stosb(inst); amd64_codegen_post(inst); } while (0)
1404 #define amd64_stosl_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_stosl(inst); amd64_codegen_post(inst); } while (0)
1405 #define amd64_stosd_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_stosd(inst); amd64_codegen_post(inst); } while (0)
1406 #define amd64_movsb_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_movsb(inst); amd64_codegen_post(inst); } while (0)
1407 #define amd64_movsl_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_movsl(inst); amd64_codegen_post(inst); } while (0)
1408 #define amd64_movsd_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_movsd(inst); amd64_codegen_post(inst); } while (0)
1409 #define amd64_prefix_size(inst,p,size) do { x86_prefix((inst), p); } while (0)
1410 #define amd64_rdtsc_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_rdtsc(inst); amd64_codegen_post(inst); } while (0)
1411 #define amd64_cmpxchg_reg_reg_size(inst,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_cmpxchg_reg_reg((inst),((dreg)&0x7),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1412 #define amd64_cmpxchg_mem_reg_size(inst,mem,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_cmpxchg_mem_reg((inst),(mem),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1413 #define amd64_cmpxchg_membase_reg_size(inst,basereg,disp,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_cmpxchg_membase_reg((inst),((basereg)&0x7),(disp),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1414 #define amd64_xchg_reg_reg_size(inst,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_xchg_reg_reg((inst),((dreg)&0x7),((reg)&0x7),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1415 #define amd64_xchg_mem_reg_size(inst,mem,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_xchg_mem_reg((inst),(mem),((reg)&0x7),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1416 #define amd64_xchg_membase_reg_size(inst,basereg,disp,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_xchg_membase_reg((inst),((basereg)&0x7),(disp),((reg)&0x7),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1417 #define amd64_inc_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_inc_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1418 #define amd64_inc_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_inc_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1419 //#define amd64_inc_reg_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_inc_reg((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1420 #define amd64_dec_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_dec_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1421 #define amd64_dec_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_dec_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1422 //#define amd64_dec_reg_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_dec_reg((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1423 #define amd64_not_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_not_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1424 #define amd64_not_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_not_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1425 #define amd64_not_reg_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_not_reg((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1426 #define amd64_neg_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_neg_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1427 #define amd64_neg_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_neg_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1428 #define amd64_neg_reg_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_neg_reg((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1429 #define amd64_nop_size(inst,size) do { amd64_codegen_pre(inst); x86_nop(inst); amd64_codegen_post(inst); } while (0)
1430 //#define amd64_alu_reg_imm_size(inst,opc,reg,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_alu_reg_imm((inst),(opc),((reg)&0x7),(imm)); amd64_codegen_post(inst); } while (0)
1431 #define amd64_alu_mem_imm_size(inst,opc,mem,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_alu_mem_imm((inst),(opc),(mem),(imm)); amd64_codegen_post(inst); } while (0)
1432 #define amd64_alu_membase_imm_size(inst,opc,basereg,disp,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_alu_membase_imm((inst),(opc),((basereg)&0x7),(disp),(imm)); amd64_codegen_post(inst); } while (0)
1433 #define amd64_alu_membase8_imm_size(inst,opc,basereg,disp,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_alu_membase8_imm((inst),(opc),((basereg)&0x7),(disp),(imm)); amd64_codegen_post(inst); } while (0)
1434 #define amd64_alu_mem_reg_size(inst,opc,mem,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_alu_mem_reg((inst),(opc),(mem),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1435 #define amd64_alu_membase_reg_size(inst,opc,basereg,disp,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_alu_membase_reg((inst),(opc),((basereg)&0x7),(disp),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1436 //#define amd64_alu_reg_reg_size(inst,opc,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_alu_reg_reg((inst),(opc),((dreg)&0x7),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1437 #define amd64_alu_reg8_reg8_size(inst,opc,dreg,reg,is_dreg_h,is_reg_h,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_alu_reg8_reg8((inst),(opc),((dreg)&0x7),((reg)&0x7),(is_dreg_h),(is_reg_h)); amd64_codegen_post(inst); } while (0)
1438 #define amd64_alu_reg_mem_size(inst,opc,reg,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_alu_reg_mem((inst),(opc),((reg)&0x7),(mem)); amd64_codegen_post(inst); } while (0)
1439 //#define amd64_alu_reg_membase_size(inst,opc,reg,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_alu_reg_membase((inst),(opc),((reg)&0x7),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1440 #define amd64_test_reg_imm_size(inst,reg,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_test_reg_imm((inst),((reg)&0x7),(imm)); amd64_codegen_post(inst); } while (0)
1441 #define amd64_test_mem_imm_size(inst,mem,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_test_mem_imm((inst),(mem),(imm)); amd64_codegen_post(inst); } while (0)
1442 #define amd64_test_membase_imm_size(inst,basereg,disp,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_test_membase_imm((inst),((basereg)&0x7),(disp),(imm)); amd64_codegen_post(inst); } while (0)
1443 #define amd64_test_reg_reg_size(inst,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_test_reg_reg((inst),((dreg)&0x7),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1444 #define amd64_test_mem_reg_size(inst,mem,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_test_mem_reg((inst),(mem),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1445 #define amd64_test_membase_reg_size(inst,basereg,disp,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_test_membase_reg((inst),((basereg)&0x7),(disp),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1446 #define amd64_shift_reg_imm_size(inst,opc,reg,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_shift_reg_imm((inst),(opc),((reg)&0x7),(imm)); amd64_codegen_post(inst); } while (0)
1447 #define amd64_shift_mem_imm_size(inst,opc,mem,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_shift_mem_imm((inst),(opc),(mem),(imm)); amd64_codegen_post(inst); } while (0)
1448 #define amd64_shift_membase_imm_size(inst,opc,basereg,disp,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_shift_membase_imm((inst),(opc),((basereg)&0x7),(disp),(imm)); amd64_codegen_post(inst); } while (0)
1449 #define amd64_shift_reg_size(inst,opc,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_shift_reg((inst),(opc),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1450 #define amd64_shift_mem_size(inst,opc,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_shift_mem((inst),(opc),(mem)); amd64_codegen_post(inst); } while (0)
1451 #define amd64_shift_membase_size(inst,opc,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_shift_membase((inst),(opc),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1452 #define amd64_shrd_reg_size(inst,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_shrd_reg((inst),((dreg)&0x7),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1453 #define amd64_shrd_reg_imm_size(inst,dreg,reg,shamt,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_shrd_reg_imm((inst),((dreg)&0x7),((reg)&0x7),(shamt)); amd64_codegen_post(inst); } while (0)
1454 #define amd64_shld_reg_size(inst,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_shld_reg((inst),((dreg)&0x7),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1455 #define amd64_shld_reg_imm_size(inst,dreg,reg,shamt,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_shld_reg_imm((inst),((dreg)&0x7),((reg)&0x7),(shamt)); amd64_codegen_post(inst); } while (0)
1456 #define amd64_mul_reg_size(inst,reg,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_mul_reg((inst),((reg)&0x7),(is_signed)); amd64_codegen_post(inst); } while (0)
1457 #define amd64_mul_mem_size(inst,mem,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_mul_mem((inst),(mem),(is_signed)); amd64_codegen_post(inst); } while (0)
1458 #define amd64_mul_membase_size(inst,basereg,disp,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_mul_membase((inst),((basereg)&0x7),(disp),(is_signed)); amd64_codegen_post(inst); } while (0)
1459 #define amd64_imul_reg_reg_size(inst,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_imul_reg_reg((inst),((dreg)&0x7),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1460 #define amd64_imul_reg_mem_size(inst,reg,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_imul_reg_mem((inst),((reg)&0x7),(mem)); amd64_codegen_post(inst); } while (0)
1461 #define amd64_imul_reg_membase_size(inst,reg,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_imul_reg_membase((inst),((reg)&0x7),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1462 #define amd64_imul_reg_reg_imm_size(inst,dreg,reg,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_imul_reg_reg_imm((inst),((dreg)&0x7),((reg)&0x7),(imm)); amd64_codegen_post(inst); } while (0)
1463 #define amd64_imul_reg_mem_imm_size(inst,reg,mem,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_imul_reg_mem_imm((inst),((reg)&0x7),(mem),(imm)); amd64_codegen_post(inst); } while (0)
1464 #define amd64_imul_reg_membase_imm_size(inst,reg,basereg,disp,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_imul_reg_membase_imm((inst),((reg)&0x7),((basereg)&0x7),(disp),(imm)); amd64_codegen_post(inst); } while (0)
1465 #define amd64_div_reg_size(inst,reg,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_div_reg((inst),((reg)&0x7),(is_signed)); amd64_codegen_post(inst); } while (0)
1466 #define amd64_div_mem_size(inst,mem,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_div_mem((inst),(mem),(is_signed)); amd64_codegen_post(inst); } while (0)
1467 #define amd64_div_membase_size(inst,basereg,disp,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_div_membase((inst),((basereg)&0x7),(disp),(is_signed)); amd64_codegen_post(inst); } while (0)
1468 #define amd64_mov_mem_reg_size(inst,mem,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_mov_mem_reg((inst),(mem),((reg)&0x7),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1469 //#define amd64_mov_regp_reg_size(inst,regp,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(regp),0,(reg)); x86_mov_regp_reg((inst),(regp),((reg)&0x7),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1470 //#define amd64_mov_membase_reg_size(inst,basereg,disp,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_mov_membase_reg((inst),((basereg)&0x7),(disp),((reg)&0x7),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1471 #define amd64_mov_memindex_reg_size(inst,basereg,disp,indexreg,shift,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),(indexreg),(basereg)); x86_mov_memindex_reg((inst),((basereg)&0x7),(disp),((indexreg)&0x7),(shift),((reg)&0x7),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1472 #define amd64_mov_reg_reg_size(inst,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_mov_reg_reg((inst),((dreg)&0x7),((reg)&0x7),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1473 //#define amd64_mov_reg_mem_size(inst,reg,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_mov_reg_mem((inst),((reg)&0x7),(mem),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1474 //#define amd64_mov_reg_membase_size(inst,reg,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_mov_reg_membase((inst),((reg)&0x7),((basereg)&0x7),(disp),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1475 //#define amd64_mov_reg_memindex_size(inst,reg,basereg,disp,indexreg,shift,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),(indexreg),(basereg)); x86_mov_reg_memindex((inst),((reg)&0x7),((basereg)&0x7),(disp),((indexreg)&0x7),(shift),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1476 #define amd64_clear_reg_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_clear_reg((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1477 //#define amd64_mov_reg_imm_size(inst,reg,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_mov_reg_imm((inst),((reg)&0x7),(imm)); amd64_codegen_post(inst); } while (0)
1478 #define amd64_mov_mem_imm_size(inst,mem,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_mov_mem_imm((inst),(mem),(imm),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1479 //#define amd64_mov_membase_imm_size(inst,basereg,disp,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_mov_membase_imm((inst),((basereg)&0x7),(disp),(imm),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1480 #define amd64_mov_memindex_imm_size(inst,basereg,disp,indexreg,shift,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,(indexreg),(basereg)); x86_mov_memindex_imm((inst),((basereg)&0x7),(disp),((indexreg)&0x7),(shift),(imm),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1481 #define amd64_lea_mem_size(inst,reg,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_lea_mem((inst),((reg)&0x7),(mem)); amd64_codegen_post(inst); } while (0)
1482 //#define amd64_lea_membase_size(inst,reg,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_lea_membase((inst),((reg)&0x7),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1483 #define amd64_lea_memindex_size(inst,reg,basereg,disp,indexreg,shift,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),(indexreg),(basereg)); x86_lea_memindex((inst),((reg)&0x7),((basereg)&0x7),(disp),((indexreg)&0x7),(shift)); amd64_codegen_post(inst); } while (0)
1484 #define amd64_widen_reg_size(inst,dreg,reg,is_signed,is_half,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_widen_reg((inst),((dreg)&0x7),((reg)&0x7),(is_signed),(is_half)); amd64_codegen_post(inst); } while (0)
1485 #define amd64_widen_mem_size(inst,dreg,mem,is_signed,is_half,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,0); x86_widen_mem((inst),((dreg)&0x7),(mem),(is_signed),(is_half)); amd64_codegen_post(inst); } while (0)
1486 #define amd64_widen_membase_size(inst,dreg,basereg,disp,is_signed,is_half,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(basereg)); x86_widen_membase((inst),((dreg)&0x7),((basereg)&0x7),(disp),(is_signed),(is_half)); amd64_codegen_post(inst); } while (0)
1487 #define amd64_widen_memindex_size(inst,dreg,basereg,disp,indexreg,shift,is_signed,is_half,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),(indexreg),(basereg)); x86_widen_memindex((inst),((dreg)&0x7),((basereg)&0x7),(disp),((indexreg)&0x7),(shift),(is_signed),(is_half)); amd64_codegen_post(inst); } while (0)
1488 #define amd64_cdq_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_cdq(inst); amd64_codegen_post(inst); } while (0)
1489 #define amd64_wait_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_wait(inst); amd64_codegen_post(inst); } while (0)
1490 #define amd64_fp_op_mem_size(inst,opc,mem,is_double,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fp_op_mem((inst),(opc),(mem),(is_double)); amd64_codegen_post(inst); } while (0)
1491 #define amd64_fp_op_membase_size(inst,opc,basereg,disp,is_double,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fp_op_membase((inst),(opc),((basereg)&0x7),(disp),(is_double)); amd64_codegen_post(inst); } while (0)
1492 #define amd64_fp_op_size(inst,opc,index,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fp_op((inst),(opc),(index)); amd64_codegen_post(inst); } while (0)
1493 #define amd64_fp_op_reg_size(inst,opc,index,pop_stack,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fp_op_reg((inst),(opc),(index),(pop_stack)); amd64_codegen_post(inst); } while (0)
1494 #define amd64_fp_int_op_membase_size(inst,opc,basereg,disp,is_int,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fp_int_op_membase((inst),(opc),((basereg)&0x7),(disp),(is_int)); amd64_codegen_post(inst); } while (0)
1495 #define amd64_fstp_size(inst,index,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fstp((inst),(index)); amd64_codegen_post(inst); } while (0)
1496 #define amd64_fcompp_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fcompp(inst); amd64_codegen_post(inst); } while (0)
1497 #define amd64_fucompp_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fucompp(inst); amd64_codegen_post(inst); } while (0)
1498 #define amd64_fnstsw_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fnstsw(inst); amd64_codegen_post(inst); } while (0)
1499 #define amd64_fnstcw_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fnstcw((inst),(mem)); amd64_codegen_post(inst); } while (0)
1500 #define amd64_fnstcw_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_fnstcw_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1501 #define amd64_fldcw_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fldcw((inst),(mem)); amd64_codegen_post(inst); } while (0)
1502 #define amd64_fldcw_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fldcw_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1503 #define amd64_fchs_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fchs(inst); amd64_codegen_post(inst); } while (0)
1504 #define amd64_frem_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_frem(inst); amd64_codegen_post(inst); } while (0)
1505 #define amd64_fxch_size(inst,index,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fxch((inst),(index)); amd64_codegen_post(inst); } while (0)
1506 #define amd64_fcomi_size(inst,index,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fcomi((inst),(index)); amd64_codegen_post(inst); } while (0)
1507 #define amd64_fcomip_size(inst,index,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fcomip((inst),(index)); amd64_codegen_post(inst); } while (0)
1508 #define amd64_fucomi_size(inst,index,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fucomi((inst),(index)); amd64_codegen_post(inst); } while (0)
1509 #define amd64_fucomip_size(inst,index,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fucomip((inst),(index)); amd64_codegen_post(inst); } while (0)
1510 #define amd64_fld_size(inst,mem,is_double,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fld((inst),(mem),(is_double)); amd64_codegen_post(inst); } while (0)
1511 //#define amd64_fld_membase_size(inst,basereg,disp,is_double,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fld_membase((inst),((basereg)&0x7),(disp),(is_double)); amd64_codegen_post(inst); } while (0)
1512 #define amd64_fld80_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fld80_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1513 #define amd64_fld80_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_fld80_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1514 #define amd64_fild_size(inst,mem,is_long,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fild((inst),(mem),(is_long)); amd64_codegen_post(inst); } while (0)
1515 #define amd64_fild_membase_size(inst,basereg,disp,is_long,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fild_membase((inst),((basereg)&0x7),(disp),(is_long)); amd64_codegen_post(inst); } while (0)
1516 #define amd64_fld_reg_size(inst,index,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fld_reg((inst),(index)); amd64_codegen_post(inst); } while (0)
1517 #define amd64_fldz_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fldz(inst); amd64_codegen_post(inst); } while (0)
1518 #define amd64_fld1_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fld1(inst); amd64_codegen_post(inst); } while (0)
1519 #define amd64_fldpi_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fldpi(inst); amd64_codegen_post(inst); } while (0)
1520 #define amd64_fst_size(inst,mem,is_double,pop_stack,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fst((inst),(mem),(is_double),(pop_stack)); amd64_codegen_post(inst); } while (0)
1521 #define amd64_fst_membase_size(inst,basereg,disp,is_double,pop_stack,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fst_membase((inst),((basereg)&0x7),(disp),(is_double),(pop_stack)); amd64_codegen_post(inst); } while (0)
1522 #define amd64_fst80_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fst80_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1523 #define amd64_fst80_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fst80_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1524 #define amd64_fist_pop_size(inst,mem,is_long,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fist_pop((inst),(mem),(is_long)); amd64_codegen_post(inst); } while (0)
1525 #define amd64_fist_pop_membase_size(inst,basereg,disp,is_long,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fist_pop_membase((inst),((basereg)&0x7),(disp),(is_long)); amd64_codegen_post(inst); } while (0)
1526 #define amd64_fstsw_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_fstsw(inst); amd64_codegen_post(inst); } while (0)
1527 #define amd64_fist_membase_size(inst,basereg,disp,is_int,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fist_membase((inst),((basereg)&0x7),(disp),(is_int)); amd64_codegen_post(inst); } while (0)
1528 //#define amd64_push_reg_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_push_reg((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1529 #define amd64_push_regp_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_push_regp((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1530 #define amd64_push_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_push_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1531 //#define amd64_push_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_push_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1532 #define amd64_push_memindex_size(inst,basereg,disp,indexreg,shift,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,(indexreg),(basereg)); x86_push_memindex((inst),((basereg)&0x7),(disp),((indexreg)&0x7),(shift)); amd64_codegen_post(inst); } while (0)
1533 #define amd64_push_imm_size(inst,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_push_imm((inst),(imm)); amd64_codegen_post(inst); } while (0)
1534 //#define amd64_pop_reg_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_pop_reg((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1535 #define amd64_pop_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_pop_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1536 #define amd64_pop_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_pop_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1537 #define amd64_pushad_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_pushad(inst); amd64_codegen_post(inst); } while (0)
1538 #define amd64_pushfd_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_pushfd(inst); amd64_codegen_post(inst); } while (0)
1539 #define amd64_popad_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_popad(inst); amd64_codegen_post(inst); } while (0)
1540 #define amd64_popfd_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_popfd(inst); amd64_codegen_post(inst); } while (0)
1541 #define amd64_loop_size(inst,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_loop((inst),(imm)); amd64_codegen_post(inst); } while (0)
1542 #define amd64_loope_size(inst,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_loope((inst),(imm)); amd64_codegen_post(inst); } while (0)
1543 #define amd64_loopne_size(inst,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_loopne((inst),(imm)); amd64_codegen_post(inst); } while (0)
1544 #define amd64_jump32_size(inst,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_jump32((inst),(imm)); amd64_codegen_post(inst); } while (0)
1545 #define amd64_jump8_size(inst,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_jump8((inst),(imm)); amd64_codegen_post(inst); } while (0)
1546 #if !defined( __native_client_codegen__ )
1547 /* Defined above for Native Client, so they can be used in other macros */
1548 #define amd64_jump_reg_size(inst,reg,size) do { amd64_emit_rex ((inst),0,0,0,(reg)); x86_jump_reg((inst),((reg)&0x7)); } while (0)
1549 #define amd64_jump_mem_size(inst,mem,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_jump_mem((inst),(mem)); } while (0)
1551 #define amd64_jump_disp_size(inst,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_jump_disp((inst),(disp)); amd64_codegen_post(inst); } while (0)
1552 #define amd64_branch8_size(inst,cond,imm,is_signed,size) do { x86_branch8((inst),(cond),(imm),(is_signed)); } while (0)
1553 #define amd64_branch32_size(inst,cond,imm,is_signed,size) do { x86_branch32((inst),(cond),(imm),(is_signed)); } while (0)
1554 #define amd64_branch_size_body(inst,cond,target,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_branch((inst),(cond),(target),(is_signed)); amd64_codegen_post(inst); } while (0)
1555 #if defined(__default_codegen__)
1556 #define amd64_branch_size(inst,cond,target,is_signed,size) do { amd64_branch_size_body((inst),(cond),(target),(is_signed),(size)); } while (0)
1557 #elif defined(__native_client_codegen__)
1558 #define amd64_branch_size(inst,cond,target,is_signed,size) \
1560 /* amd64_branch_size_body used twice in */ \
1561 /* case of relocation by amd64_codegen_post */ \
1562 guint8* branch_start; \
1563 amd64_codegen_pre(inst); \
1564 amd64_branch_size_body((inst),(cond),(target),(is_signed),(size)); \
1565 inst = amd64_codegen_post(inst); \
1566 branch_start = inst; \
1567 amd64_branch_size_body((inst),(cond),(target),(is_signed),(size)); \
1568 mono_amd64_patch(branch_start, (target)); \
1572 #define amd64_branch_disp_size(inst,cond,disp,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_branch_disp((inst),(cond),(disp),(is_signed)); amd64_codegen_post(inst); } while (0)
1573 #define amd64_set_reg_size(inst,cond,reg,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex((inst),1,0,0,(reg)); x86_set_reg((inst),(cond),((reg)&0x7),(is_signed)); amd64_codegen_post(inst); } while (0)
1574 #define amd64_set_mem_size(inst,cond,mem,is_signed,size) do { amd64_codegen_pre(inst); x86_set_mem((inst),(cond),(mem),(is_signed)); amd64_codegen_post(inst); } while (0)
1575 #define amd64_set_membase_size(inst,cond,basereg,disp,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_set_membase((inst),(cond),((basereg)&0x7),(disp),(is_signed)); amd64_codegen_post(inst); } while (0)
1576 //#define amd64_call_reg_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_call_reg((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1577 #define amd64_call_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_call_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1579 #if defined(__default_codegen__)
1581 #define amd64_call_imm_size(inst,disp,size) do { x86_call_imm((inst),(disp)); } while (0)
1582 #define amd64_call_code_size(inst,target,size) do { x86_call_code((inst),(target)); } while (0)
1584 #elif defined(__native_client_codegen__)
1585 /* Size is ignored for Native Client calls, we restrict jumping to 32-bits */
1586 #define amd64_call_imm_size(inst,disp,size) \
1588 amd64_codegen_pre((inst)); \
1589 amd64_call_sequence_pre((inst)); \
1590 x86_call_imm((inst),(disp)); \
1591 amd64_call_sequence_post((inst)); \
1592 amd64_codegen_post((inst)); \
1595 /* x86_call_code is called twice below, first so we can get the size of the */
1596 /* call sequence, and again so the exact offset from "inst" is used, since */
1597 /* the sequence could have moved from amd64_call_sequence_post. */
1598 /* Size is ignored for Native Client jumps, we restrict jumping to 32-bits */
1599 #define amd64_call_code_size(inst,target,size) \
1601 amd64_codegen_pre((inst)); \
1602 guint8* adjusted_start; \
1603 guint8* call_start; \
1604 amd64_call_sequence_pre((inst)); \
1605 x86_call_code((inst),(target)); \
1606 adjusted_start = amd64_call_sequence_post((inst)); \
1607 call_start = adjusted_start; \
1608 x86_call_code(adjusted_start, (target)); \
1609 amd64_codegen_post((inst)); \
1610 mono_amd64_patch(call_start, (target)); \
1613 #endif /*__native_client_codegen__*/
1615 //#define amd64_ret_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_ret(inst); amd64_codegen_post(inst); } while (0)
1616 #define amd64_ret_imm_size(inst,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_ret_imm((inst),(imm)); amd64_codegen_post(inst); } while (0)
1617 #define amd64_cmov_reg_size(inst,cond,is_signed,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_cmov_reg((inst),(cond),(is_signed),((dreg)&0x7),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1618 #define amd64_cmov_mem_size(inst,cond,is_signed,reg,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_cmov_mem((inst),(cond),(is_signed),((reg)&0x7),(mem)); amd64_codegen_post(inst); } while (0)
1619 #define amd64_cmov_membase_size(inst,cond,is_signed,reg,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_cmov_membase((inst),(cond),(is_signed),((reg)&0x7),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1620 #define amd64_enter_size(inst,framesize) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_enter((inst),(framesize)); amd64_codegen_post(inst); } while (0)
1621 //#define amd64_leave_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_leave(inst); amd64_codegen_post(inst); } while (0)
1622 #define amd64_sahf_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_sahf(inst); amd64_codegen_post(inst); } while (0)
1623 #define amd64_fsin_size(inst,size) do { amd64_codegen_pre(inst); x86_fsin(inst); amd64_codegen_post(inst); } while (0)
1624 #define amd64_fcos_size(inst,size) do { amd64_codegen_pre(inst); x86_fcos(inst); amd64_codegen_post(inst); } while (0)
1625 #define amd64_fabs_size(inst,size) do { amd64_codegen_pre(inst); x86_fabs(inst); amd64_codegen_post(inst); } while (0)
1626 #define amd64_ftst_size(inst,size) do { amd64_codegen_pre(inst); x86_ftst(inst); amd64_codegen_post(inst); } while (0)
1627 #define amd64_fxam_size(inst,size) do { amd64_codegen_pre(inst); x86_fxam(inst); amd64_codegen_post(inst); } while (0)
1628 #define amd64_fpatan_size(inst,size) do { amd64_codegen_pre(inst); x86_fpatan(inst); amd64_codegen_post(inst); } while (0)
1629 #define amd64_fprem_size(inst,size) do { amd64_codegen_pre(inst); x86_fprem(inst); amd64_codegen_post(inst); } while (0)
1630 #define amd64_fprem1_size(inst,size) do { amd64_codegen_pre(inst); x86_fprem1(inst); amd64_codegen_post(inst); } while (0)
1631 #define amd64_frndint_size(inst,size) do { amd64_codegen_pre(inst); x86_frndint(inst); amd64_codegen_post(inst); } while (0)
1632 #define amd64_fsqrt_size(inst,size) do { amd64_codegen_pre(inst); x86_fsqrt(inst); amd64_codegen_post(inst); } while (0)
1633 #define amd64_fptan_size(inst,size) do { amd64_codegen_pre(inst); x86_fptan(inst); amd64_codegen_post(inst); } while (0)
1634 //#define amd64_padding_size(inst,size) do { amd64_codegen_pre(inst); x86_padding((inst),(size)); amd64_codegen_post(inst); } while (0)
1635 #define amd64_prolog_size(inst,frame_size,reg_mask,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_prolog((inst),(frame_size),(reg_mask)); amd64_codegen_post(inst); } while (0)
1636 #define amd64_epilog_size(inst,reg_mask,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_epilog((inst),(reg_mask)); amd64_codegen_post(inst); } while (0)
1637 #define amd64_xadd_reg_reg_size(inst,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_xadd_reg_reg ((inst), (dreg), (reg), (size)); amd64_codegen_post(inst); } while (0)
1638 #define amd64_xadd_mem_reg_size(inst,mem,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_xadd_mem_reg((inst),(mem),((reg)&0x7), (size)); amd64_codegen_post(inst); } while (0)
1639 #define amd64_xadd_membase_reg_size(inst,basereg,disp,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_xadd_membase_reg((inst),((basereg)&0x7),(disp),((reg)&0x7),(size)); amd64_codegen_post(inst); } while (0)
1644 #define amd64_breakpoint(inst) amd64_breakpoint_size(inst,8)
1645 #define amd64_cld(inst) amd64_cld_size(inst,8)
1646 #define amd64_stosb(inst) amd64_stosb_size(inst,8)
1647 #define amd64_stosl(inst) amd64_stosl_size(inst,8)
1648 #define amd64_stosd(inst) amd64_stosd_size(inst,8)
1649 #define amd64_movsb(inst) amd64_movsb_size(inst,8)
1650 #define amd64_movsl(inst) amd64_movsl_size(inst,8)
1651 #define amd64_movsd(inst) amd64_movsd_size(inst,8)
1652 #define amd64_prefix(inst,p) amd64_prefix_size(inst,p,8)
1653 #define amd64_rdtsc(inst) amd64_rdtsc_size(inst,8)
1654 #define amd64_cmpxchg_reg_reg(inst,dreg,reg) amd64_cmpxchg_reg_reg_size(inst,dreg,reg,8)
1655 #define amd64_cmpxchg_mem_reg(inst,mem,reg) amd64_cmpxchg_mem_reg_size(inst,mem,reg,8)
1656 #define amd64_cmpxchg_membase_reg(inst,basereg,disp,reg) amd64_cmpxchg_membase_reg_size(inst,basereg,disp,reg,8)
1657 #define amd64_xchg_reg_reg(inst,dreg,reg,size) amd64_xchg_reg_reg_size(inst,dreg,reg,size)
1658 #define amd64_xchg_mem_reg(inst,mem,reg,size) amd64_xchg_mem_reg_size(inst,mem,reg,size)
1659 #define amd64_xchg_membase_reg(inst,basereg,disp,reg,size) amd64_xchg_membase_reg_size(inst,basereg,disp,reg,size)
1660 #define amd64_xadd_reg_reg(inst,dreg,reg,size) amd64_xadd_reg_reg_size(inst,dreg,reg,size)
1661 #define amd64_xadd_mem_reg(inst,mem,reg,size) amd64_xadd_mem_reg_size(inst,mem,reg,size)
1662 #define amd64_xadd_membase_reg(inst,basereg,disp,reg,size) amd64_xadd_membase_reg_size(inst,basereg,disp,reg,size)
1663 #define amd64_inc_mem(inst,mem) amd64_inc_mem_size(inst,mem,8)
1664 #define amd64_inc_membase(inst,basereg,disp) amd64_inc_membase_size(inst,basereg,disp,8)
1665 #define amd64_inc_reg(inst,reg) amd64_inc_reg_size(inst,reg,8)
1666 #define amd64_dec_mem(inst,mem) amd64_dec_mem_size(inst,mem,8)
1667 #define amd64_dec_membase(inst,basereg,disp) amd64_dec_membase_size(inst,basereg,disp,8)
1668 #define amd64_dec_reg(inst,reg) amd64_dec_reg_size(inst,reg,8)
1669 #define amd64_not_mem(inst,mem) amd64_not_mem_size(inst,mem,8)
1670 #define amd64_not_membase(inst,basereg,disp) amd64_not_membase_size(inst,basereg,disp,8)
1671 #define amd64_not_reg(inst,reg) amd64_not_reg_size(inst,reg,8)
1672 #define amd64_neg_mem(inst,mem) amd64_neg_mem_size(inst,mem,8)
1673 #define amd64_neg_membase(inst,basereg,disp) amd64_neg_membase_size(inst,basereg,disp,8)
1674 #define amd64_neg_reg(inst,reg) amd64_neg_reg_size(inst,reg,8)
1675 #define amd64_nop(inst) amd64_nop_size(inst,8)
1676 //#define amd64_alu_reg_imm(inst,opc,reg,imm) amd64_alu_reg_imm_size(inst,opc,reg,imm,8)
1677 #define amd64_alu_mem_imm(inst,opc,mem,imm) amd64_alu_mem_imm_size(inst,opc,mem,imm,8)
1678 #define amd64_alu_membase_imm(inst,opc,basereg,disp,imm) amd64_alu_membase_imm_size(inst,opc,basereg,disp,imm,8)
1679 #define amd64_alu_mem_reg(inst,opc,mem,reg) amd64_alu_mem_reg_size(inst,opc,mem,reg,8)
1680 #define amd64_alu_membase_reg(inst,opc,basereg,disp,reg) amd64_alu_membase_reg_size(inst,opc,basereg,disp,reg,8)
1681 //#define amd64_alu_reg_reg(inst,opc,dreg,reg) amd64_alu_reg_reg_size(inst,opc,dreg,reg,8)
1682 #define amd64_alu_reg8_reg8(inst,opc,dreg,reg,is_dreg_h,is_reg_h) amd64_alu_reg8_reg8_size(inst,opc,dreg,reg,is_dreg_h,is_reg_h,8)
1683 #define amd64_alu_reg_mem(inst,opc,reg,mem) amd64_alu_reg_mem_size(inst,opc,reg,mem,8)
1684 #define amd64_alu_reg_membase(inst,opc,reg,basereg,disp) amd64_alu_reg_membase_size(inst,opc,reg,basereg,disp,8)
1685 #define amd64_test_reg_imm(inst,reg,imm) amd64_test_reg_imm_size(inst,reg,imm,8)
1686 #define amd64_test_mem_imm(inst,mem,imm) amd64_test_mem_imm_size(inst,mem,imm,8)
1687 #define amd64_test_membase_imm(inst,basereg,disp,imm) amd64_test_membase_imm_size(inst,basereg,disp,imm,8)
1688 #define amd64_test_reg_reg(inst,dreg,reg) amd64_test_reg_reg_size(inst,dreg,reg,8)
1689 #define amd64_test_mem_reg(inst,mem,reg) amd64_test_mem_reg_size(inst,mem,reg,8)
1690 #define amd64_test_membase_reg(inst,basereg,disp,reg) amd64_test_membase_reg_size(inst,basereg,disp,reg,8)
1691 #define amd64_shift_reg_imm(inst,opc,reg,imm) amd64_shift_reg_imm_size(inst,opc,reg,imm,8)
1692 #define amd64_shift_mem_imm(inst,opc,mem,imm) amd64_shift_mem_imm_size(inst,opc,mem,imm,8)
1693 #define amd64_shift_membase_imm(inst,opc,basereg,disp,imm) amd64_shift_membase_imm_size(inst,opc,basereg,disp,imm,8)
1694 #define amd64_shift_reg(inst,opc,reg) amd64_shift_reg_size(inst,opc,reg,8)
1695 #define amd64_shift_mem(inst,opc,mem) amd64_shift_mem_size(inst,opc,mem,8)
1696 #define amd64_shift_membase(inst,opc,basereg,disp) amd64_shift_membase_size(inst,opc,basereg,disp,8)
1697 #define amd64_shrd_reg(inst,dreg,reg) amd64_shrd_reg_size(inst,dreg,reg,8)
1698 #define amd64_shrd_reg_imm(inst,dreg,reg,shamt) amd64_shrd_reg_imm_size(inst,dreg,reg,shamt,8)
1699 #define amd64_shld_reg(inst,dreg,reg) amd64_shld_reg_size(inst,dreg,reg,8)
1700 #define amd64_shld_reg_imm(inst,dreg,reg,shamt) amd64_shld_reg_imm_size(inst,dreg,reg,shamt,8)
1701 #define amd64_mul_reg(inst,reg,is_signed) amd64_mul_reg_size(inst,reg,is_signed,8)
1702 #define amd64_mul_mem(inst,mem,is_signed) amd64_mul_mem_size(inst,mem,is_signed,8)
1703 #define amd64_mul_membase(inst,basereg,disp,is_signed) amd64_mul_membase_size(inst,basereg,disp,is_signed,8)
1704 #define amd64_imul_reg_reg(inst,dreg,reg) amd64_imul_reg_reg_size(inst,dreg,reg,8)
1705 #define amd64_imul_reg_mem(inst,reg,mem) amd64_imul_reg_mem_size(inst,reg,mem,8)
1706 #define amd64_imul_reg_membase(inst,reg,basereg,disp) amd64_imul_reg_membase_size(inst,reg,basereg,disp,8)
1707 #define amd64_imul_reg_reg_imm(inst,dreg,reg,imm) amd64_imul_reg_reg_imm_size(inst,dreg,reg,imm,8)
1708 #define amd64_imul_reg_mem_imm(inst,reg,mem,imm) amd64_imul_reg_mem_imm_size(inst,reg,mem,imm,8)
1709 #define amd64_imul_reg_membase_imm(inst,reg,basereg,disp,imm) amd64_imul_reg_membase_imm_size(inst,reg,basereg,disp,imm,8)
1710 #define amd64_div_reg(inst,reg,is_signed) amd64_div_reg_size(inst,reg,is_signed,8)
1711 #define amd64_div_mem(inst,mem,is_signed) amd64_div_mem_size(inst,mem,is_signed,8)
1712 #define amd64_div_membase(inst,basereg,disp,is_signed) amd64_div_membase_size(inst,basereg,disp,is_signed,8)
1713 //#define amd64_mov_mem_reg(inst,mem,reg,size) amd64_mov_mem_reg_size(inst,mem,reg,size)
1714 //#define amd64_mov_regp_reg(inst,regp,reg,size) amd64_mov_regp_reg_size(inst,regp,reg,size)
1715 //#define amd64_mov_membase_reg(inst,basereg,disp,reg,size) amd64_mov_membase_reg_size(inst,basereg,disp,reg,size)
1716 #define amd64_mov_memindex_reg(inst,basereg,disp,indexreg,shift,reg,size) amd64_mov_memindex_reg_size(inst,basereg,disp,indexreg,shift,reg,size)
1717 //#define amd64_mov_reg_reg(inst,dreg,reg,size) amd64_mov_reg_reg_size(inst,dreg,reg,size)
1718 //#define amd64_mov_reg_mem(inst,reg,mem,size) amd64_mov_reg_mem_size(inst,reg,mem,size)
1719 //#define amd64_mov_reg_membase(inst,reg,basereg,disp,size) amd64_mov_reg_membase_size(inst,reg,basereg,disp,size)
1720 #define amd64_mov_reg_memindex(inst,reg,basereg,disp,indexreg,shift,size) amd64_mov_reg_memindex_size(inst,reg,basereg,disp,indexreg,shift,size)
1721 #define amd64_clear_reg(inst,reg) amd64_clear_reg_size(inst,reg,8)
1722 //#define amd64_mov_reg_imm(inst,reg,imm) amd64_mov_reg_imm_size(inst,reg,imm,8)
1723 #define amd64_mov_mem_imm(inst,mem,imm,size) amd64_mov_mem_imm_size(inst,mem,imm,size)
1724 //#define amd64_mov_membase_imm(inst,basereg,disp,imm,size) amd64_mov_membase_imm_size(inst,basereg,disp,imm,size)
1725 #define amd64_mov_memindex_imm(inst,basereg,disp,indexreg,shift,imm,size) amd64_mov_memindex_imm_size(inst,basereg,disp,indexreg,shift,imm,size)
1726 #define amd64_lea_mem(inst,reg,mem) amd64_lea_mem_size(inst,reg,mem,8)
1727 //#define amd64_lea_membase(inst,reg,basereg,disp) amd64_lea_membase_size(inst,reg,basereg,disp,8)
1728 #define amd64_lea_memindex(inst,reg,basereg,disp,indexreg,shift) amd64_lea_memindex_size(inst,reg,basereg,disp,indexreg,shift,8)
1729 #define amd64_widen_reg(inst,dreg,reg,is_signed,is_half) amd64_widen_reg_size(inst,dreg,reg,is_signed,is_half,8)
1730 #define amd64_widen_mem(inst,dreg,mem,is_signed,is_half) amd64_widen_mem_size(inst,dreg,mem,is_signed,is_half,8)
1731 #define amd64_widen_membase(inst,dreg,basereg,disp,is_signed,is_half) amd64_widen_membase_size(inst,dreg,basereg,disp,is_signed,is_half,8)
1732 #define amd64_widen_memindex(inst,dreg,basereg,disp,indexreg,shift,is_signed,is_half) amd64_widen_memindex_size(inst,dreg,basereg,disp,indexreg,shift,is_signed,is_half,8)
1733 #define amd64_cdq(inst) amd64_cdq_size(inst,8)
1734 #define amd64_wait(inst) amd64_wait_size(inst,8)
1735 #define amd64_fp_op_mem(inst,opc,mem,is_double) amd64_fp_op_mem_size(inst,opc,mem,is_double,8)
1736 #define amd64_fp_op_membase(inst,opc,basereg,disp,is_double) amd64_fp_op_membase_size(inst,opc,basereg,disp,is_double,8)
1737 #define amd64_fp_op(inst,opc,index) amd64_fp_op_size(inst,opc,index,8)
1738 #define amd64_fp_op_reg(inst,opc,index,pop_stack) amd64_fp_op_reg_size(inst,opc,index,pop_stack,8)
1739 #define amd64_fp_int_op_membase(inst,opc,basereg,disp,is_int) amd64_fp_int_op_membase_size(inst,opc,basereg,disp,is_int,8)
1740 #define amd64_fstp(inst,index) amd64_fstp_size(inst,index,8)
1741 #define amd64_fcompp(inst) amd64_fcompp_size(inst,8)
1742 #define amd64_fucompp(inst) amd64_fucompp_size(inst,8)
1743 #define amd64_fnstsw(inst) amd64_fnstsw_size(inst,8)
1744 #define amd64_fnstcw(inst,mem) amd64_fnstcw_size(inst,mem,8)
1745 #define amd64_fnstcw_membase(inst,basereg,disp) amd64_fnstcw_membase_size(inst,basereg,disp,8)
1746 #define amd64_fldcw(inst,mem) amd64_fldcw_size(inst,mem,8)
1747 #define amd64_fldcw_membase(inst,basereg,disp) amd64_fldcw_membase_size(inst,basereg,disp,8)
1748 #define amd64_fchs(inst) amd64_fchs_size(inst,8)
1749 #define amd64_frem(inst) amd64_frem_size(inst,8)
1750 #define amd64_fxch(inst,index) amd64_fxch_size(inst,index,8)
1751 #define amd64_fcomi(inst,index) amd64_fcomi_size(inst,index,8)
1752 #define amd64_fcomip(inst,index) amd64_fcomip_size(inst,index,8)
1753 #define amd64_fucomi(inst,index) amd64_fucomi_size(inst,index,8)
1754 #define amd64_fucomip(inst,index) amd64_fucomip_size(inst,index,8)
1755 #define amd64_fld(inst,mem,is_double) amd64_fld_size(inst,mem,is_double,8)
1756 #define amd64_fld_membase(inst,basereg,disp,is_double) amd64_fld_membase_size(inst,basereg,disp,is_double,8)
1757 #define amd64_fld80_mem(inst,mem) amd64_fld80_mem_size(inst,mem,8)
1758 #define amd64_fld80_membase(inst,basereg,disp) amd64_fld80_membase_size(inst,basereg,disp,8)
1759 #define amd64_fild(inst,mem,is_long) amd64_fild_size(inst,mem,is_long,8)
1760 #define amd64_fild_membase(inst,basereg,disp,is_long) amd64_fild_membase_size(inst,basereg,disp,is_long,8)
1761 #define amd64_fld_reg(inst,index) amd64_fld_reg_size(inst,index,8)
1762 #define amd64_fldz(inst) amd64_fldz_size(inst,8)
1763 #define amd64_fld1(inst) amd64_fld1_size(inst,8)
1764 #define amd64_fldpi(inst) amd64_fldpi_size(inst,8)
1765 #define amd64_fst(inst,mem,is_double,pop_stack) amd64_fst_size(inst,mem,is_double,pop_stack,8)
1766 #define amd64_fst_membase(inst,basereg,disp,is_double,pop_stack) amd64_fst_membase_size(inst,basereg,disp,is_double,pop_stack,8)
1767 #define amd64_fst80_mem(inst,mem) amd64_fst80_mem_size(inst,mem,8)
1768 #define amd64_fst80_membase(inst,basereg,disp) amd64_fst80_membase_size(inst,basereg,disp,8)
1769 #define amd64_fist_pop(inst,mem,is_long) amd64_fist_pop_size(inst,mem,is_long,8)
1770 #define amd64_fist_pop_membase(inst,basereg,disp,is_long) amd64_fist_pop_membase_size(inst,basereg,disp,is_long,8)
1771 #define amd64_fstsw(inst) amd64_fstsw_size(inst,8)
1772 #define amd64_fist_membase(inst,basereg,disp,is_int) amd64_fist_membase_size(inst,basereg,disp,is_int,8)
1773 //#define amd64_push_reg(inst,reg) amd64_push_reg_size(inst,reg,8)
1774 #define amd64_push_regp(inst,reg) amd64_push_regp_size(inst,reg,8)
1775 #define amd64_push_mem(inst,mem) amd64_push_mem_size(inst,mem,8)
1776 //#define amd64_push_membase(inst,basereg,disp) amd64_push_membase_size(inst,basereg,disp,8)
1777 #define amd64_push_memindex(inst,basereg,disp,indexreg,shift) amd64_push_memindex_size(inst,basereg,disp,indexreg,shift,8)
1778 #define amd64_push_imm(inst,imm) amd64_push_imm_size(inst,imm,8)
1779 //#define amd64_pop_reg(inst,reg) amd64_pop_reg_size(inst,reg,8)
1780 #define amd64_pop_mem(inst,mem) amd64_pop_mem_size(inst,mem,8)
1781 #define amd64_pop_membase(inst,basereg,disp) amd64_pop_membase_size(inst,basereg,disp,8)
1782 #define amd64_pushad(inst) amd64_pushad_size(inst,8)
1783 #define amd64_pushfd(inst) amd64_pushfd_size(inst,8)
1784 #define amd64_popad(inst) amd64_popad_size(inst,8)
1785 #define amd64_popfd(inst) amd64_popfd_size(inst,8)
1786 #define amd64_loop(inst,imm) amd64_loop_size(inst,imm,8)
1787 #define amd64_loope(inst,imm) amd64_loope_size(inst,imm,8)
1788 #define amd64_loopne(inst,imm) amd64_loopne_size(inst,imm,8)
1789 #define amd64_jump32(inst,imm) amd64_jump32_size(inst,imm,8)
1790 #define amd64_jump8(inst,imm) amd64_jump8_size(inst,imm,8)
1791 #define amd64_jump_reg(inst,reg) amd64_jump_reg_size(inst,reg,8)
1792 #define amd64_jump_mem(inst,mem) amd64_jump_mem_size(inst,mem,8)
1793 #define amd64_jump_membase(inst,basereg,disp) amd64_jump_membase_size(inst,basereg,disp,8)
1794 #define amd64_jump_code(inst,target) amd64_jump_code_size(inst,target,8)
1795 #define amd64_jump_disp(inst,disp) amd64_jump_disp_size(inst,disp,8)
1796 #define amd64_branch8(inst,cond,imm,is_signed) amd64_branch8_size(inst,cond,imm,is_signed,8)
1797 #define amd64_branch32(inst,cond,imm,is_signed) amd64_branch32_size(inst,cond,imm,is_signed,8)
1798 #define amd64_branch(inst,cond,target,is_signed) amd64_branch_size(inst,cond,target,is_signed,8)
1799 #define amd64_branch_disp(inst,cond,disp,is_signed) amd64_branch_disp_size(inst,cond,disp,is_signed,8)
1800 #define amd64_set_reg(inst,cond,reg,is_signed) amd64_set_reg_size(inst,cond,reg,is_signed,8)
1801 #define amd64_set_mem(inst,cond,mem,is_signed) amd64_set_mem_size(inst,cond,mem,is_signed,8)
1802 #define amd64_set_membase(inst,cond,basereg,disp,is_signed) amd64_set_membase_size(inst,cond,basereg,disp,is_signed,8)
1803 #define amd64_call_imm(inst,disp) amd64_call_imm_size(inst,disp,8)
1804 //#define amd64_call_reg(inst,reg) amd64_call_reg_size(inst,reg,8)
1805 #define amd64_call_mem(inst,mem) amd64_call_mem_size(inst,mem,8)
1806 #define amd64_call_membase(inst,basereg,disp) amd64_call_membase_size(inst,basereg,disp,8)
1807 #define amd64_call_code(inst,target) amd64_call_code_size(inst,target,8)
1808 //#define amd64_ret(inst) amd64_ret_size(inst,8)
1809 #define amd64_ret_imm(inst,imm) amd64_ret_imm_size(inst,imm,8)
1810 #define amd64_cmov_reg(inst,cond,is_signed,dreg,reg) amd64_cmov_reg_size(inst,cond,is_signed,dreg,reg,8)
1811 #define amd64_cmov_mem(inst,cond,is_signed,reg,mem) amd64_cmov_mem_size(inst,cond,is_signed,reg,mem,8)
1812 #define amd64_cmov_membase(inst,cond,is_signed,reg,basereg,disp) amd64_cmov_membase_size(inst,cond,is_signed,reg,basereg,disp,8)
1813 #define amd64_enter(inst,framesize) amd64_enter_size(inst,framesize)
1814 //#define amd64_leave(inst) amd64_leave_size(inst,8)
1815 #define amd64_sahf(inst) amd64_sahf_size(inst,8)
1816 #define amd64_fsin(inst) amd64_fsin_size(inst,8)
1817 #define amd64_fcos(inst) amd64_fcos_size(inst,8)
1818 #define amd64_fabs(inst) amd64_fabs_size(inst,8)
1819 #define amd64_ftst(inst) amd64_ftst_size(inst,8)
1820 #define amd64_fxam(inst) amd64_fxam_size(inst,8)
1821 #define amd64_fpatan(inst) amd64_fpatan_size(inst,8)
1822 #define amd64_fprem(inst) amd64_fprem_size(inst,8)
1823 #define amd64_fprem1(inst) amd64_fprem1_size(inst,8)
1824 #define amd64_frndint(inst) amd64_frndint_size(inst,8)
1825 #define amd64_fsqrt(inst) amd64_fsqrt_size(inst,8)
1826 #define amd64_fptan(inst) amd64_fptan_size(inst,8)
1827 #define amd64_padding(inst,size) amd64_padding_size(inst,size)
1828 #define amd64_prolog(inst,frame,reg_mask) amd64_prolog_size(inst,frame,reg_mask,8)
1829 #define amd64_epilog(inst,reg_mask) amd64_epilog_size(inst,reg_mask,8)