2 * amd64-codegen.h: Macros for generating amd64 code
5 * Paolo Molaro (lupus@ximian.com)
6 * Intel Corporation (ORP Project)
7 * Sergey Chaban (serge@wildwestsoftware.com)
8 * Dietmar Maurer (dietmar@ximian.com)
12 * Copyright (C) 2000 Intel Corporation. All rights reserved.
13 * Copyright (C) 2001, 2002 Ximian, Inc.
19 // Conventions in this file:
21 // body: implementation. other macros call this one
24 // is_half: short if true, byte if false (then why is it named is_half...?)
26 // mem: read from (immediate-supplied address?)
27 // membase: read from address in a base register plus a displacement
28 // memindex: SIP addressing: (address in base register) + (displacement in index register)<<(shift)
29 // reg: register, encode modR/M bits 00
30 // regp: register, encode modR/M bits 11
31 // size: Expected 1,2,4 or 8
32 // widen: extends from 1 or 2 bytes
79 AMD64_REX_B = 1, /* The register in r/m field, base register in SIB byte, or reg in opcode is 8-15 rather than 0-7 */
80 AMD64_REX_X = 2, /* The index register in SIB byte is 8-15 rather than 0-7 */
81 AMD64_REX_R = 4, /* The reg field of ModRM byte is 8-15 rather than 0-7 */
82 AMD64_REX_W = 8 /* Opeartion is 64-bits instead of 32 (default) or 16 (with 0x66 prefix) */
85 #if defined(__default_codegen__)
87 #define amd64_codegen_pre(inst)
88 #define amd64_codegen_post(inst)
90 #elif defined(__native_client_codegen__)
92 #define amd64_codegen_pre(inst) guint8* _codegen_start = (inst); amd64_nacl_instruction_pre();
93 #define amd64_codegen_post(inst) (amd64_nacl_instruction_post(&_codegen_start, &(inst)), _codegen_start);
95 /* Because of rex prefixes, etc, call sequences are not constant size. */
96 /* These pre- and post-sequence hooks remedy this by aligning the call */
97 /* sequence after we emit it, since we will know the exact size then. */
98 #define amd64_call_sequence_pre(inst) guint8* _code_start = (inst);
99 #define amd64_call_sequence_post(inst) \
100 (mono_nacl_align_call(&_code_start, &(inst)), _code_start);
102 /* Native client can load/store using one of the following registers */
103 /* as a base: rip, r15, rbp, rsp. Any other base register needs to have */
104 /* its upper 32 bits cleared and reference memory using r15 as the base. */
105 #define amd64_is_valid_nacl_base(reg) \
106 ((reg) == AMD64_RIP || (reg) == AMD64_R15 || \
107 (reg) == AMD64_RBP || (reg) == AMD64_RSP)
109 #endif /*__native_client_codegen__*/
112 #define AMD64_ARG_REG1 AMD64_RCX
113 #define AMD64_ARG_REG2 AMD64_RDX
114 #define AMD64_ARG_REG3 AMD64_R8
115 #define AMD64_ARG_REG4 AMD64_R9
117 #define AMD64_ARG_REG1 AMD64_RDI
118 #define AMD64_ARG_REG2 AMD64_RSI
119 #define AMD64_ARG_REG3 AMD64_RDX
120 #define AMD64_ARG_REG4 AMD64_RCX
124 #define AMD64_CALLEE_REGS ((1<<AMD64_RAX) | (1<<AMD64_RCX) | (1<<AMD64_RDX) | (1<<AMD64_R8) | (1<<AMD64_R9) | (1<<AMD64_R10))
125 #define AMD64_IS_CALLEE_REG(reg) (AMD64_CALLEE_REGS & (1 << (reg)))
127 #define AMD64_ARGUMENT_REGS ((1<<AMD64_RDX) | (1<<AMD64_RCX) | (1<<AMD64_R8) | (1<<AMD64_R9))
128 #define AMD64_IS_ARGUMENT_REG(reg) (AMD64_ARGUMENT_REGS & (1 << (reg)))
130 #define AMD64_CALLEE_SAVED_REGS ((1<<AMD64_RDI) | (1<<AMD64_RSI) | (1<<AMD64_RBX) | (1<<AMD64_R12) | (1<<AMD64_R13) | (1<<AMD64_R14) | (1<<AMD64_R15) | (1<<AMD64_RBP))
131 #define AMD64_IS_CALLEE_SAVED_REG(reg) (AMD64_CALLEE_SAVED_REGS & (1 << (reg)))
132 #elif defined(__native_client_codegen__)
133 /* AMD64 Native Client code may not write R15 */
134 #define AMD64_CALLEE_REGS ((1<<AMD64_RAX) | (1<<AMD64_RCX) | (1<<AMD64_RDX) | (1<<AMD64_RSI) | (1<<AMD64_RDI) | (1<<AMD64_R8) | (1<<AMD64_R9) | (1<<AMD64_R10))
135 #define AMD64_IS_CALLEE_REG(reg) (AMD64_CALLEE_REGS & (1 << (reg)))
137 #define AMD64_ARGUMENT_REGS ((1<<AMD64_RDI) | (1<<AMD64_RSI) | (1<<AMD64_RDX) | (1<<AMD64_RCX) | (1<<AMD64_R8) | (1<<AMD64_R9))
138 #define AMD64_IS_ARGUMENT_REG(reg) (AMD64_ARGUMENT_REGS & (1 << (reg)))
140 #define AMD64_CALLEE_SAVED_REGS ((1<<AMD64_RBX) | (1<<AMD64_R12) | (1<<AMD64_R13) | (1<<AMD64_R14) | (1<<AMD64_RBP))
141 #define AMD64_IS_CALLEE_SAVED_REG(reg) (AMD64_CALLEE_SAVED_REGS & (1 << (reg)))
143 #define AMD64_CALLEE_REGS ((1<<AMD64_RAX) | (1<<AMD64_RCX) | (1<<AMD64_RDX) | (1<<AMD64_RSI) | (1<<AMD64_RDI) | (1<<AMD64_R8) | (1<<AMD64_R9) | (1<<AMD64_R10))
144 #define AMD64_IS_CALLEE_REG(reg) (AMD64_CALLEE_REGS & (1 << (reg)))
146 #define AMD64_ARGUMENT_REGS ((1<<AMD64_RDI) | (1<<AMD64_RSI) | (1<<AMD64_RDX) | (1<<AMD64_RCX) | (1<<AMD64_R8) | (1<<AMD64_R9))
147 #define AMD64_IS_ARGUMENT_REG(reg) (AMD64_ARGUMENT_REGS & (1 << (reg)))
149 #define AMD64_CALLEE_SAVED_REGS ((1<<AMD64_RBX) | (1<<AMD64_R12) | (1<<AMD64_R13) | (1<<AMD64_R14) | (1<<AMD64_R15) | (1<<AMD64_RBP))
150 #define AMD64_IS_CALLEE_SAVED_REG(reg) (AMD64_CALLEE_SAVED_REGS & (1 << (reg)))
153 #define AMD64_REX(bits) ((unsigned char)(0x40 | (bits)))
154 #if defined(__default_codegen__)
155 #define amd64_emit_rex(inst, width, reg_modrm, reg_index, reg_rm_base_opcode) do \
157 unsigned char _amd64_rex_bits = \
158 (((width) > 4) ? AMD64_REX_W : 0) | \
159 (((reg_modrm) > 7) ? AMD64_REX_R : 0) | \
160 (((reg_index) > 7) ? AMD64_REX_X : 0) | \
161 (((reg_rm_base_opcode) > 7) ? AMD64_REX_B : 0); \
162 if ((_amd64_rex_bits != 0) || (((width) == 1))) *(inst)++ = AMD64_REX(_amd64_rex_bits); \
164 #elif defined(__native_client_codegen__)
165 #define amd64_emit_rex(inst, width, reg_modrm, reg_index, reg_rm_base_opcode) do \
167 unsigned char _amd64_rex_bits = \
168 (((width) > 4) ? AMD64_REX_W : 0) | \
169 (((reg_modrm) > 7) ? AMD64_REX_R : 0) | \
170 (((reg_index) > 7) ? AMD64_REX_X : 0) | \
171 (((reg_rm_base_opcode) > 7) ? AMD64_REX_B : 0); \
172 amd64_nacl_tag_rex((inst)); \
173 if ((_amd64_rex_bits != 0) || (((width) == 1))) *(inst)++ = AMD64_REX(_amd64_rex_bits); \
182 #include "../x86/x86-codegen.h"
184 /* In 64 bit mode, all registers have a low byte subregister */
185 #undef X86_IS_BYTE_REG
186 #define X86_IS_BYTE_REG(reg) 1
188 #define amd64_modrm_mod(modrm) ((modrm) >> 6)
189 #define amd64_modrm_reg(modrm) (((modrm) >> 3) & 0x7)
190 #define amd64_modrm_rm(modrm) ((modrm) & 0x7)
192 #define amd64_rex_r(rex) ((((rex) >> 2) & 0x1) << 3)
193 #define amd64_rex_x(rex) ((((rex) >> 1) & 0x1) << 3)
194 #define amd64_rex_b(rex) ((((rex) >> 0) & 0x1) << 3)
196 #define amd64_sib_scale(sib) ((sib) >> 6)
197 #define amd64_sib_index(sib) (((sib) >> 3) & 0x7)
198 #define amd64_sib_base(sib) ((sib) & 0x7)
200 #define amd64_is_imm32(val) ((gint64)val >= -((gint64)1<<31) && (gint64)val <= (((gint64)1<<31)-1))
202 #define x86_imm_emit64(inst,imm) \
205 imb.val = (guint64) (imm); \
206 *(inst)++ = imb.b [0]; \
207 *(inst)++ = imb.b [1]; \
208 *(inst)++ = imb.b [2]; \
209 *(inst)++ = imb.b [3]; \
210 *(inst)++ = imb.b [4]; \
211 *(inst)++ = imb.b [5]; \
212 *(inst)++ = imb.b [6]; \
213 *(inst)++ = imb.b [7]; \
216 #define amd64_membase_emit(inst,reg,basereg,disp) do { \
217 if ((basereg) == AMD64_RIP) { \
218 x86_address_byte ((inst), 0, (reg)&0x7, 5); \
219 x86_imm_emit32 ((inst), (disp)); \
222 x86_membase_emit ((inst),(reg)&0x7, (basereg)&0x7, (disp)); \
225 #define amd64_alu_reg_imm_size_body(inst,opc,reg,imm,size) \
227 if (x86_is_imm8((imm))) { \
228 amd64_emit_rex(inst, size, 0, 0, (reg)); \
229 *(inst)++ = (unsigned char)0x83; \
230 x86_reg_emit ((inst), (opc), (reg)); \
231 x86_imm_emit8 ((inst), (imm)); \
232 } else if ((reg) == AMD64_RAX) { \
233 amd64_emit_rex(inst, size, 0, 0, 0); \
234 *(inst)++ = (((unsigned char)(opc)) << 3) + 5; \
235 x86_imm_emit32 ((inst), (imm)); \
237 amd64_emit_rex(inst, size, 0, 0, (reg)); \
238 *(inst)++ = (unsigned char)0x81; \
239 x86_reg_emit ((inst), (opc), (reg)); \
240 x86_imm_emit32 ((inst), (imm)); \
244 #define amd64_alu_reg_reg_size_body(inst,opc,dreg,reg,size) \
246 amd64_emit_rex(inst, size, (dreg), 0, (reg)); \
247 *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
248 x86_reg_emit ((inst), (dreg), (reg)); \
251 #if defined(__default_codegen__)
253 #define amd64_alu_reg_imm_size(inst,opc,reg,imm,size) \
254 amd64_alu_reg_imm_size_body((inst), (opc), (reg), (imm), (size))
256 #define amd64_alu_reg_reg_size(inst,opc,dreg,reg,size) \
257 amd64_alu_reg_reg_size_body((inst), (opc), (dreg), (reg), (size))
259 #elif defined(__native_client_codegen__)
260 /* NaCl modules may not directly update RSP or RBP other than direct copies */
261 /* between them. Instead the lower 4 bytes are updated and then added to R15 */
262 #define amd64_is_nacl_stack_reg(reg) (((reg) == AMD64_RSP) || ((reg) == AMD64_RBP))
264 #define amd64_alu_reg_imm_size(inst,opc,reg,imm,size) \
266 amd64_codegen_pre(inst); \
267 if (amd64_is_nacl_stack_reg(reg)) { \
268 if (((opc) != X86_ADD) && ((opc) != X86_SUB)) \
269 g_assert_not_reached(); \
270 amd64_alu_reg_imm_size_body((inst), (opc), (reg), (imm), 4); \
271 /* Use LEA instead of ADD to preserve flags */ \
272 amd64_lea_memindex_size((inst), (reg), (reg), 0, AMD64_R15, 0, 8); \
274 amd64_alu_reg_imm_size_body((inst), (opc), (reg), (imm), (size)); \
276 amd64_codegen_post(inst); \
279 #define amd64_alu_reg_reg_size(inst,opc,dreg,reg,size) \
281 amd64_codegen_pre(inst); \
282 if (amd64_is_nacl_stack_reg((dreg)) && ((reg) != AMD64_R15)) { \
283 if (((opc) != X86_ADD && (opc) != X86_SUB)) \
284 g_assert_not_reached(); \
285 amd64_alu_reg_reg_size_body((inst), (opc), (dreg), (reg), 4); \
286 /* Use LEA instead of ADD to preserve flags */ \
287 amd64_lea_memindex_size((inst), (dreg), (dreg), 0, AMD64_R15, 0, 8); \
289 amd64_alu_reg_reg_size_body((inst), (opc), (dreg), (reg), (size)); \
291 amd64_codegen_post(inst); \
294 #endif /*__native_client_codegen__*/
296 #define amd64_alu_reg_imm(inst,opc,reg,imm) amd64_alu_reg_imm_size((inst),(opc),(reg),(imm),8)
298 #define amd64_alu_reg_reg(inst,opc,dreg,reg) amd64_alu_reg_reg_size ((inst),(opc),(dreg),(reg),8)
300 #define amd64_alu_reg_membase_size(inst,opc,reg,basereg,disp,size) \
302 amd64_codegen_pre(inst); \
303 amd64_emit_rex ((inst),(size),(reg),0,(basereg)); \
304 *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
305 amd64_membase_emit (inst, reg, basereg, disp); \
306 amd64_codegen_post(inst); \
309 #define amd64_mov_regp_reg(inst,regp,reg,size) \
311 amd64_codegen_pre(inst); \
313 x86_prefix((inst), X86_OPERAND_PREFIX); \
314 amd64_emit_rex(inst, (size), (reg), 0, (regp)); \
316 case 1: *(inst)++ = (unsigned char)0x88; break; \
317 case 2: case 4: case 8: *(inst)++ = (unsigned char)0x89; break; \
318 default: assert (0); \
320 x86_regp_emit ((inst), (reg), (regp)); \
321 amd64_codegen_post(inst); \
324 #define amd64_mov_membase_reg(inst,basereg,disp,reg,size) \
326 amd64_codegen_pre(inst); \
328 x86_prefix((inst), X86_OPERAND_PREFIX); \
329 amd64_emit_rex(inst, (size), (reg), 0, (basereg)); \
331 case 1: *(inst)++ = (unsigned char)0x88; break; \
332 case 2: case 4: case 8: *(inst)++ = (unsigned char)0x89; break; \
333 default: assert (0); \
335 x86_membase_emit ((inst), ((reg)&0x7), ((basereg)&0x7), (disp)); \
336 amd64_codegen_post(inst); \
339 #define amd64_mov_mem_reg(inst,mem,reg,size) \
341 amd64_codegen_pre(inst); \
343 x86_prefix((inst), X86_OPERAND_PREFIX); \
344 amd64_emit_rex(inst, (size), (reg), 0, 0); \
346 case 1: *(inst)++ = (unsigned char)0x88; break; \
347 case 2: case 4: case 8: *(inst)++ = (unsigned char)0x89; break; \
348 default: assert (0); \
350 x86_address_byte ((inst), 0, (reg), 4); \
351 x86_address_byte ((inst), 0, 4, 5); \
352 x86_imm_emit32 ((inst), (mem)); \
353 amd64_codegen_post(inst); \
356 #define amd64_mov_reg_reg(inst,dreg,reg,size) \
358 amd64_codegen_pre(inst); \
360 x86_prefix((inst), X86_OPERAND_PREFIX); \
361 amd64_emit_rex(inst, (size), (dreg), 0, (reg)); \
363 case 1: *(inst)++ = (unsigned char)0x8a; break; \
364 case 2: case 4: case 8: *(inst)++ = (unsigned char)0x8b; break; \
365 default: assert (0); \
367 x86_reg_emit ((inst), (dreg), (reg)); \
368 amd64_codegen_post(inst); \
371 #define amd64_mov_reg_mem_body(inst,reg,mem,size) \
373 amd64_codegen_pre(inst); \
375 x86_prefix((inst), X86_OPERAND_PREFIX); \
376 amd64_emit_rex(inst, (size), (reg), 0, 0); \
378 case 1: *(inst)++ = (unsigned char)0x8a; break; \
379 case 2: case 4: case 8: *(inst)++ = (unsigned char)0x8b; break; \
380 default: assert (0); \
382 x86_address_byte ((inst), 0, (reg), 4); \
383 x86_address_byte ((inst), 0, 4, 5); \
384 x86_imm_emit32 ((inst), (mem)); \
385 amd64_codegen_post(inst); \
388 #if defined(__default_codegen__)
389 #define amd64_mov_reg_mem(inst,reg,mem,size) \
391 amd64_mov_reg_mem_body((inst),(reg),(mem),(size)); \
393 #elif defined(__native_client_codegen__)
394 /* We have to re-base memory reads because memory isn't zero based. */
395 #define amd64_mov_reg_mem(inst,reg,mem,size) \
397 amd64_mov_reg_membase((inst),(reg),AMD64_R15,(mem),(size)); \
399 #endif /* __native_client_codegen__ */
401 #define amd64_mov_reg_membase_body(inst,reg,basereg,disp,size) \
404 x86_prefix((inst), X86_OPERAND_PREFIX); \
405 amd64_emit_rex(inst, (size), (reg), 0, (basereg)); \
407 case 1: *(inst)++ = (unsigned char)0x8a; break; \
408 case 2: case 4: case 8: *(inst)++ = (unsigned char)0x8b; break; \
409 default: assert (0); \
411 amd64_membase_emit ((inst), (reg), (basereg), (disp)); \
414 #define amd64_mov_reg_memindex_size_body(inst,reg,basereg,disp,indexreg,shift,size) \
416 amd64_emit_rex ((inst),(size),(reg),(indexreg),(basereg)); \
417 x86_mov_reg_memindex((inst),((reg)&0x7),((basereg)&0x7),(disp),((indexreg)&0x7),(shift),(size) == 8 ? 4 : (size)); \
420 #if defined(__default_codegen__)
422 #define amd64_mov_reg_memindex_size(inst,reg,basereg,disp,indexreg,shift,size) \
423 amd64_mov_reg_memindex_size_body((inst),(reg),(basereg),(disp),(indexreg),(shift),(size))
424 #define amd64_mov_reg_membase(inst,reg,basereg,disp,size) \
426 amd64_mov_reg_membase_body((inst), (reg), (basereg), (disp), (size)); \
429 #elif defined(__native_client_codegen__)
431 #define amd64_mov_reg_memindex_size(inst,reg,basereg,disp,indexreg,shift,size) \
433 amd64_codegen_pre(inst); \
434 if (amd64_is_nacl_stack_reg((reg))) { \
435 /* Clear upper 32 bits with mov of size 4 */ \
436 amd64_mov_reg_memindex_size_body((inst), (reg), (basereg), (disp), (indexreg), (shift), 4); \
437 /* Add %r15 using LEA to preserve flags */ \
438 amd64_lea_memindex_size((inst), (reg), (reg), 0, AMD64_R15, 0, 8); \
440 amd64_mov_reg_memindex_size_body((inst), (reg), (basereg), (disp), (indexreg), (shift), (size)); \
442 amd64_codegen_post(inst); \
445 #define amd64_mov_reg_membase(inst,reg,basereg,disp,size) \
447 amd64_codegen_pre(inst); \
448 if (amd64_is_nacl_stack_reg((reg))) { \
449 /* Clear upper 32 bits with mov of size 4 */ \
450 amd64_mov_reg_membase_body((inst), (reg), (basereg), (disp), 4); \
452 amd64_lea_memindex_size((inst), (reg), (reg), 0, AMD64_R15, 0, 8); \
454 amd64_mov_reg_membase_body((inst), (reg), (basereg), (disp), (size)); \
456 amd64_codegen_post(inst); \
459 #endif /*__native_client_codegen__*/
461 #define amd64_movzx_reg_membase(inst,reg,basereg,disp,size) \
463 amd64_codegen_pre(inst); \
464 amd64_emit_rex(inst, (size), (reg), 0, (basereg)); \
466 case 1: *(inst)++ = (unsigned char)0x0f; *(inst)++ = (unsigned char)0xb6; break; \
467 case 2: *(inst)++ = (unsigned char)0x0f; *(inst)++ = (unsigned char)0xb7; break; \
468 case 4: case 8: *(inst)++ = (unsigned char)0x8b; break; \
469 default: assert (0); \
471 x86_membase_emit ((inst), ((reg)&0x7), ((basereg)&0x7), (disp)); \
472 amd64_codegen_post(inst); \
475 #define amd64_movsxd_reg_mem(inst,reg,mem) \
477 amd64_codegen_pre(inst); \
478 amd64_emit_rex(inst,8,(reg),0,0); \
479 *(inst)++ = (unsigned char)0x63; \
480 x86_mem_emit ((inst), ((reg)&0x7), (mem)); \
481 amd64_codegen_post(inst); \
484 #define amd64_movsxd_reg_membase(inst,reg,basereg,disp) \
486 amd64_codegen_pre(inst); \
487 amd64_emit_rex(inst,8,(reg),0,(basereg)); \
488 *(inst)++ = (unsigned char)0x63; \
489 x86_membase_emit ((inst), ((reg)&0x7), ((basereg)&0x7), (disp)); \
490 amd64_codegen_post(inst); \
493 #define amd64_movsxd_reg_reg(inst,dreg,reg) \
495 amd64_codegen_pre(inst); \
496 amd64_emit_rex(inst,8,(dreg),0,(reg)); \
497 *(inst)++ = (unsigned char)0x63; \
498 x86_reg_emit ((inst), (dreg), (reg)); \
499 amd64_codegen_post(inst); \
502 /* Pretty much the only instruction that supports a 64-bit immediate. Optimize for common case of
503 * 32-bit immediate. Pepper with casts to avoid warnings.
505 #define amd64_mov_reg_imm_size(inst,reg,imm,size) \
507 amd64_codegen_pre(inst); \
508 amd64_emit_rex(inst, (size), 0, 0, (reg)); \
509 *(inst)++ = (unsigned char)0xb8 + ((reg) & 0x7); \
511 x86_imm_emit64 ((inst), (guint64)(imm)); \
513 x86_imm_emit32 ((inst), (int)(guint64)(imm)); \
514 amd64_codegen_post(inst); \
517 #define amd64_mov_reg_imm(inst,reg,imm) \
519 int _amd64_width_temp = ((guint64)(imm) == (guint64)(int)(guint64)(imm)); \
520 amd64_codegen_pre(inst); \
521 amd64_mov_reg_imm_size ((inst), (reg), (imm), (_amd64_width_temp ? 4 : 8)); \
522 amd64_codegen_post(inst); \
525 #define amd64_set_reg_template(inst,reg) amd64_mov_reg_imm_size ((inst),(reg), 0, 8)
527 #define amd64_set_template(inst,reg) amd64_set_reg_template((inst),(reg))
529 #define amd64_mov_membase_imm(inst,basereg,disp,imm,size) \
531 amd64_codegen_pre(inst); \
533 x86_prefix((inst), X86_OPERAND_PREFIX); \
534 amd64_emit_rex(inst, (size) == 1 ? 0 : (size), 0, 0, (basereg)); \
536 *(inst)++ = (unsigned char)0xc6; \
537 x86_membase_emit ((inst), 0, (basereg) & 0x7, (disp)); \
538 x86_imm_emit8 ((inst), (imm)); \
539 } else if ((size) == 2) { \
540 *(inst)++ = (unsigned char)0xc7; \
541 x86_membase_emit ((inst), 0, (basereg) & 0x7, (disp)); \
542 x86_imm_emit16 ((inst), (imm)); \
544 *(inst)++ = (unsigned char)0xc7; \
545 x86_membase_emit ((inst), 0, (basereg) & 0x7, (disp)); \
546 x86_imm_emit32 ((inst), (imm)); \
548 amd64_codegen_post(inst); \
552 #define amd64_lea_membase_body(inst,reg,basereg,disp) \
554 amd64_emit_rex(inst, 8, (reg), 0, (basereg)); \
555 *(inst)++ = (unsigned char)0x8d; \
556 amd64_membase_emit ((inst), (reg), (basereg), (disp)); \
559 #if defined(__default_codegen__)
560 #define amd64_lea_membase(inst,reg,basereg,disp) \
561 amd64_lea_membase_body((inst), (reg), (basereg), (disp))
562 #elif defined(__native_client_codegen__)
563 /* NaCl modules may not write directly into RSP/RBP. Instead, use a */
564 /* 32-bit LEA and add R15 to the effective address */
565 #define amd64_lea_membase(inst,reg,basereg,disp) \
567 amd64_codegen_pre(inst); \
568 if (amd64_is_nacl_stack_reg(reg)) { \
570 amd64_emit_rex((inst), 4, (reg), 0, (basereg)); \
571 *(inst)++ = (unsigned char)0x8d; \
572 amd64_membase_emit((inst), (reg), (basereg), (disp)); \
573 /* Use a 64-bit LEA instead of an ADD to preserve flags */ \
574 amd64_lea_memindex_size((inst), (reg), (reg), 0, AMD64_R15, 0, 8); \
576 amd64_lea_membase_body((inst), (reg), (basereg), (disp)); \
578 amd64_codegen_post(inst); \
580 #endif /*__native_client_codegen__*/
582 /* Instruction are implicitly 64-bits so don't generate REX for just the size. */
583 #define amd64_push_reg(inst,reg) \
585 amd64_codegen_pre(inst); \
586 amd64_emit_rex(inst, 0, 0, 0, (reg)); \
587 *(inst)++ = (unsigned char)0x50 + ((reg) & 0x7); \
588 amd64_codegen_post(inst); \
591 /* Instruction is implicitly 64-bits so don't generate REX for just the size. */
592 #define amd64_push_membase(inst,basereg,disp) \
594 amd64_codegen_pre(inst); \
595 amd64_emit_rex(inst, 0, 0, 0, (basereg)); \
596 *(inst)++ = (unsigned char)0xff; \
597 x86_membase_emit ((inst), 6, (basereg) & 0x7, (disp)); \
598 amd64_codegen_post(inst); \
601 #define amd64_pop_reg_body(inst,reg) \
603 amd64_codegen_pre(inst); \
604 amd64_emit_rex(inst, 0, 0, 0, (reg)); \
605 *(inst)++ = (unsigned char)0x58 + ((reg) & 0x7); \
606 amd64_codegen_post(inst); \
609 #if defined(__default_codegen__)
611 #define amd64_call_reg(inst,reg) \
613 amd64_emit_rex(inst, 0, 0, 0, (reg)); \
614 *(inst)++ = (unsigned char)0xff; \
615 x86_reg_emit ((inst), 2, ((reg) & 0x7)); \
619 #define amd64_ret(inst) do { *(inst)++ = (unsigned char)0xc3; } while (0)
620 #define amd64_leave(inst) do { *(inst)++ = (unsigned char)0xc9; } while (0)
622 #define amd64_pop_reg(inst,reg) amd64_pop_reg_body((inst), (reg))
624 #elif defined(__native_client_codegen__)
626 /* Size is ignored for Native Client jumps, we restrict jumping to 32-bits */
627 #define amd64_jump_reg_size(inst,reg,size) \
629 amd64_codegen_pre((inst)); \
630 amd64_alu_reg_imm_size((inst), X86_AND, (reg), (nacl_align_byte), 4); \
631 amd64_alu_reg_reg_size((inst), X86_ADD, (reg), AMD64_R15, 8); \
632 amd64_emit_rex ((inst),0,0,0,(reg)); \
633 x86_jump_reg((inst),((reg)&0x7)); \
634 amd64_codegen_post((inst)); \
637 /* Size is ignored for Native Client jumps, we restrict jumping to 32-bits */
638 #define amd64_jump_mem_size(inst,mem,size) \
640 amd64_codegen_pre((inst)); \
641 amd64_mov_reg_mem((inst), (mem), AMD64_R11, 4); \
642 amd64_jump_reg_size((inst), AMD64_R11, 4); \
643 amd64_codegen_post((inst)); \
646 #define amd64_call_reg_internal(inst,reg) \
648 amd64_codegen_pre((inst)); \
649 amd64_alu_reg_imm_size((inst), X86_AND, (reg), (nacl_align_byte), 4); \
650 amd64_alu_reg_reg_size((inst), X86_ADD, (reg), AMD64_R15, 8); \
651 amd64_emit_rex((inst), 0, 0, 0, (reg)); \
652 x86_call_reg((inst), ((reg) & 0x7)); \
653 amd64_codegen_post((inst)); \
656 #define amd64_call_reg(inst,reg) \
658 amd64_codegen_pre((inst)); \
659 amd64_call_sequence_pre(inst); \
660 amd64_call_reg_internal((inst), (reg)); \
661 amd64_call_sequence_post(inst); \
662 amd64_codegen_post((inst)); \
666 #define amd64_ret(inst) \
668 amd64_codegen_pre(inst); \
669 amd64_pop_reg_body((inst), AMD64_R11); \
670 amd64_jump_reg_size((inst), AMD64_R11, 8); \
671 amd64_codegen_post(inst); \
674 #define amd64_leave(inst) \
676 amd64_codegen_pre(inst); \
677 amd64_mov_reg_reg((inst), AMD64_RSP, AMD64_RBP, 8); \
678 amd64_pop_reg_body((inst), AMD64_R11); \
679 amd64_mov_reg_reg_size((inst), AMD64_RBP, AMD64_R11, 4); \
680 amd64_alu_reg_reg_size((inst), X86_ADD, AMD64_RBP, AMD64_R15, 8); \
681 amd64_codegen_post(inst); \
684 #define amd64_pop_reg(inst,reg) \
686 amd64_codegen_pre(inst); \
687 if (amd64_is_nacl_stack_reg((reg))) { \
688 amd64_pop_reg_body((inst), AMD64_R11); \
689 amd64_mov_reg_reg_size((inst), (reg), AMD64_R11, 4); \
690 amd64_alu_reg_reg_size((inst), X86_ADD, (reg), AMD64_R15, 8); \
692 amd64_pop_reg_body((inst), (reg)); \
694 amd64_codegen_post(inst); \
697 #endif /*__native_client_codegen__*/
699 #define amd64_movsd_reg_regp(inst,reg,regp) \
701 amd64_codegen_pre(inst); \
702 x86_prefix((inst), 0xf2); \
703 amd64_emit_rex(inst, 0, (reg), 0, (regp)); \
704 *(inst)++ = (unsigned char)0x0f; \
705 *(inst)++ = (unsigned char)0x10; \
706 x86_regp_emit ((inst), (reg) & 0x7, (regp) & 0x7); \
707 amd64_codegen_post(inst); \
710 #define amd64_movsd_regp_reg(inst,regp,reg) \
712 amd64_codegen_pre(inst); \
713 x86_prefix((inst), 0xf2); \
714 amd64_emit_rex(inst, 0, (reg), 0, (regp)); \
715 *(inst)++ = (unsigned char)0x0f; \
716 *(inst)++ = (unsigned char)0x11; \
717 x86_regp_emit ((inst), (reg) & 0x7, (regp) & 0x7); \
718 amd64_codegen_post(inst); \
721 #define amd64_movss_reg_regp(inst,reg,regp) \
723 amd64_codegen_pre(inst); \
724 x86_prefix((inst), 0xf3); \
725 amd64_emit_rex(inst, 0, (reg), 0, (regp)); \
726 *(inst)++ = (unsigned char)0x0f; \
727 *(inst)++ = (unsigned char)0x10; \
728 x86_regp_emit ((inst), (reg) & 0x7, (regp) & 0x7); \
729 amd64_codegen_post(inst); \
732 #define amd64_movss_regp_reg(inst,regp,reg) \
734 amd64_codegen_pre(inst); \
735 x86_prefix((inst), 0xf3); \
736 amd64_emit_rex(inst, 0, (reg), 0, (regp)); \
737 *(inst)++ = (unsigned char)0x0f; \
738 *(inst)++ = (unsigned char)0x11; \
739 x86_regp_emit ((inst), (reg) & 0x7, (regp) & 0x7); \
740 amd64_codegen_post(inst); \
743 #define amd64_movsd_reg_membase(inst,reg,basereg,disp) \
745 amd64_codegen_pre(inst); \
746 x86_prefix((inst), 0xf2); \
747 amd64_emit_rex(inst, 0, (reg), 0, (basereg)); \
748 *(inst)++ = (unsigned char)0x0f; \
749 *(inst)++ = (unsigned char)0x10; \
750 x86_membase_emit ((inst), (reg) & 0x7, (basereg) & 0x7, (disp)); \
751 amd64_codegen_post(inst); \
754 #define amd64_movss_reg_membase(inst,reg,basereg,disp) \
756 amd64_codegen_pre(inst); \
757 x86_prefix((inst), 0xf3); \
758 amd64_emit_rex(inst, 0, (reg), 0, (basereg)); \
759 *(inst)++ = (unsigned char)0x0f; \
760 *(inst)++ = (unsigned char)0x10; \
761 x86_membase_emit ((inst), (reg) & 0x7, (basereg) & 0x7, (disp)); \
762 amd64_codegen_post(inst); \
765 #define amd64_movsd_membase_reg(inst,basereg,disp,reg) \
767 amd64_codegen_pre(inst); \
768 x86_prefix((inst), 0xf2); \
769 amd64_emit_rex(inst, 0, (reg), 0, (basereg)); \
770 *(inst)++ = (unsigned char)0x0f; \
771 *(inst)++ = (unsigned char)0x11; \
772 x86_membase_emit ((inst), (reg) & 0x7, (basereg) & 0x7, (disp)); \
773 amd64_codegen_post(inst); \
776 #define amd64_movss_membase_reg(inst,basereg,disp,reg) \
778 amd64_codegen_pre(inst); \
779 x86_prefix((inst), 0xf3); \
780 amd64_emit_rex(inst, 0, (reg), 0, (basereg)); \
781 *(inst)++ = (unsigned char)0x0f; \
782 *(inst)++ = (unsigned char)0x11; \
783 x86_membase_emit ((inst), (reg) & 0x7, (basereg) & 0x7, (disp)); \
784 amd64_codegen_post(inst); \
787 /* The original inc_reg opcode is used as the REX prefix */
788 #define amd64_inc_reg_size(inst,reg,size) \
790 amd64_codegen_pre(inst); \
791 amd64_emit_rex ((inst),(size),0,0,(reg)); \
792 *(inst)++ = (unsigned char)0xff; \
793 x86_reg_emit ((inst),0,(reg) & 0x7); \
794 amd64_codegen_post(inst); \
797 #define amd64_dec_reg_size(inst,reg,size) \
799 amd64_codegen_pre(inst); \
800 amd64_emit_rex ((inst),(size),0,0,(reg)); \
801 *(inst)++ = (unsigned char)0xff; \
802 x86_reg_emit ((inst),1,(reg) & 0x7); \
803 amd64_codegen_post(inst); \
806 #define amd64_fld_membase_size(inst,basereg,disp,is_double,size) do { \
807 amd64_codegen_pre(inst); \
808 amd64_emit_rex ((inst),0,0,0,(basereg)); \
809 *(inst)++ = (is_double) ? (unsigned char)0xdd : (unsigned char)0xd9; \
810 amd64_membase_emit ((inst), 0, (basereg), (disp)); \
811 amd64_codegen_post(inst); \
814 #if defined (__default_codegen__)
816 /* From the AMD64 Software Optimization Manual */
817 #define amd64_padding_size(inst,size) \
820 case 1: *(inst)++ = 0x90; break; \
821 case 2: *(inst)++ = 0x66; *(inst)++ = 0x90; break; \
822 case 3: *(inst)++ = 0x66; *(inst)++ = 0x66; *(inst)++ = 0x90; break; \
823 default: amd64_emit_rex ((inst),8,0,0,0); x86_padding ((inst), (size) - 1); \
827 #define amd64_call_membase_size(inst,basereg,disp,size) do { amd64_emit_rex ((inst),0,0,0,(basereg)); *(inst)++ = (unsigned char)0xff; amd64_membase_emit ((inst),2, (basereg),(disp)); } while (0)
828 #define amd64_jump_membase_size(inst,basereg,disp,size) do { amd64_emit_rex ((inst),0,0,0,(basereg)); *(inst)++ = (unsigned char)0xff; amd64_membase_emit ((inst), 4, (basereg), (disp)); } while (0)
830 #define amd64_jump_code_size(inst,target,size) do { \
831 if (amd64_is_imm32 ((gint64)(target) - (gint64)(inst))) { \
832 x86_jump_code((inst),(target)); \
834 amd64_jump_membase ((inst), AMD64_RIP, 0); \
835 *(guint64*)(inst) = (guint64)(target); \
840 #elif defined(__native_client_codegen__)
842 /* The 3-7 byte NOP sequences in amd64_padding_size below are all illegal in */
843 /* 64-bit Native Client because they load into rSP/rBP or use duplicate */
844 /* prefixes. Instead we use the NOPs recommended in Section 3.5.1.8 of the */
845 /* Intel64 and IA-32 Architectures Optimization Reference Manual and */
846 /* Section 4.13 of AMD Software Optimization Guide for Family 10h Processors. */
848 #define amd64_padding_size(inst,size) \
850 unsigned char *code_start = (inst); \
852 /* xchg %eax,%eax, recognized by hardware as a NOP */ \
853 case 1: *(inst)++ = 0x90; break; \
855 case 2: *(inst)++ = 0x66; *(inst)++ = 0x90; \
858 case 3: *(inst)++ = 0x0f; *(inst)++ = 0x1f; \
861 /* nop 0x0(%rax) */ \
862 case 4: *(inst)++ = 0x0f; *(inst)++ = 0x1f; \
863 x86_address_byte ((inst), 1, 0, AMD64_RAX); \
864 x86_imm_emit8 ((inst), 0); \
866 /* nop 0x0(%rax,%rax) */ \
867 case 5: *(inst)++ = 0x0f; *(inst)++ = 0x1f; \
868 x86_address_byte ((inst), 1, 0, 4); \
869 x86_address_byte ((inst), 0, AMD64_RAX, AMD64_RAX); \
870 x86_imm_emit8 ((inst), 0); \
872 /* nopw 0x0(%rax,%rax) */ \
873 case 6: *(inst)++ = 0x66; *(inst)++ = 0x0f; \
875 x86_address_byte ((inst), 1, 0, 4); \
876 x86_address_byte ((inst), 0, AMD64_RAX, AMD64_RAX); \
877 x86_imm_emit8 ((inst), 0); \
879 /* nop 0x0(%rax) (32-bit displacement) */ \
880 case 7: *(inst)++ = 0x0f; *(inst)++ = 0x1f; \
881 x86_address_byte ((inst), 2, 0, AMD64_RAX); \
882 x86_imm_emit32((inst), 0); \
884 /* nop 0x0(%rax,%rax) (32-bit displacement) */ \
885 case 8: *(inst)++ = 0x0f; *(inst)++ = 0x1f; \
886 x86_address_byte ((inst), 2, 0, 4); \
887 x86_address_byte ((inst), 0, AMD64_RAX, AMD64_RAX); \
888 x86_imm_emit32 ((inst), 0); \
891 g_assert_not_reached(); \
893 g_assert(code_start + (size) == (unsigned char *)(inst)); \
897 /* Size is ignored for Native Client calls, we restrict jumping to 32-bits */
898 #define amd64_call_membase_size(inst,basereg,disp,size) \
900 amd64_codegen_pre((inst)); \
901 amd64_call_sequence_pre(inst); \
902 amd64_mov_reg_membase((inst), AMD64_R11, (basereg), (disp), 4); \
903 amd64_call_reg_internal((inst), AMD64_R11); \
904 amd64_call_sequence_post(inst); \
905 amd64_codegen_post((inst)); \
908 /* Size is ignored for Native Client jumps, we restrict jumping to 32-bits */
909 #define amd64_jump_membase_size(inst,basereg,disp,size) \
911 amd64_mov_reg_membase((inst), AMD64_R11, (basereg), (disp), 4); \
912 amd64_jump_reg_size((inst), AMD64_R11, 4); \
915 /* On Native Client we can't jump more than INT_MAX in either direction */
916 #define amd64_jump_code_size(inst,target,size) \
918 /* x86_jump_code used twice in case of */ \
919 /* relocation by amd64_codegen_post */ \
920 guint8* jump_start; \
921 amd64_codegen_pre(inst); \
922 assert(amd64_is_imm32 ((gint64)(target) - (gint64)(inst))); \
923 x86_jump_code((inst),(target)); \
924 inst = amd64_codegen_post(inst); \
925 jump_start = (inst); \
926 x86_jump_code((inst),(target)); \
927 mono_amd64_patch(jump_start, (target)); \
930 #endif /*__native_client_codegen__*/
936 //TODO Reorganize SSE opcode defines.
938 /* Two opcode SSE defines */
940 #define emit_sse_reg_reg_op2_size(inst,dreg,reg,op1,op2,size) do { \
941 amd64_codegen_pre(inst); \
942 amd64_emit_rex ((inst), size, (dreg), 0, (reg)); \
943 *(inst)++ = (unsigned char)(op1); \
944 *(inst)++ = (unsigned char)(op2); \
945 x86_reg_emit ((inst), (dreg), (reg)); \
946 amd64_codegen_post(inst); \
949 #define emit_sse_reg_reg_op2(inst,dreg,reg,op1,op2) emit_sse_reg_reg_op2_size ((inst), (dreg), (reg), (op1), (op2), 0)
951 #define emit_sse_reg_reg_op2_imm(inst,dreg,reg,op1,op2,imm) do { \
952 amd64_codegen_pre(inst); \
953 emit_sse_reg_reg_op2 ((inst), (dreg), (reg), (op1), (op2)); \
954 x86_imm_emit8 ((inst), (imm)); \
955 amd64_codegen_post(inst); \
958 #define emit_sse_membase_reg_op2(inst,basereg,disp,reg,op1,op2) do { \
959 amd64_codegen_pre(inst); \
960 amd64_emit_rex ((inst), 0, (reg), 0, (basereg)); \
961 *(inst)++ = (unsigned char)(op1); \
962 *(inst)++ = (unsigned char)(op2); \
963 amd64_membase_emit ((inst), (reg), (basereg), (disp)); \
964 amd64_codegen_post(inst); \
967 #define emit_sse_reg_membase_op2(inst,dreg,basereg,disp,op1,op2) do { \
968 amd64_codegen_pre(inst); \
969 amd64_emit_rex ((inst), 0, (dreg), 0, (basereg) == AMD64_RIP ? 0 : (basereg)); \
970 *(inst)++ = (unsigned char)(op1); \
971 *(inst)++ = (unsigned char)(op2); \
972 amd64_membase_emit ((inst), (dreg), (basereg), (disp)); \
973 amd64_codegen_post(inst); \
976 /* Three opcode SSE defines */
978 #define emit_opcode3(inst,op1,op2,op3) do { \
979 *(inst)++ = (unsigned char)(op1); \
980 *(inst)++ = (unsigned char)(op2); \
981 *(inst)++ = (unsigned char)(op3); \
984 #define emit_sse_reg_reg_size(inst,dreg,reg,op1,op2,op3,size) do { \
985 amd64_codegen_pre(inst); \
986 *(inst)++ = (unsigned char)(op1); \
987 amd64_emit_rex ((inst), size, (dreg), 0, (reg)); \
988 *(inst)++ = (unsigned char)(op2); \
989 *(inst)++ = (unsigned char)(op3); \
990 x86_reg_emit ((inst), (dreg), (reg)); \
991 amd64_codegen_post(inst); \
994 #define emit_sse_reg_reg(inst,dreg,reg,op1,op2,op3) emit_sse_reg_reg_size ((inst), (dreg), (reg), (op1), (op2), (op3), 0)
996 #define emit_sse_reg_reg_imm(inst,dreg,reg,op1,op2,op3,imm) do { \
997 amd64_codegen_pre(inst); \
998 emit_sse_reg_reg ((inst), (dreg), (reg), (op1), (op2), (op3)); \
999 x86_imm_emit8 ((inst), (imm)); \
1000 amd64_codegen_post(inst); \
1003 #define emit_sse_membase_reg(inst,basereg,disp,reg,op1,op2,op3) do { \
1004 amd64_codegen_pre(inst); \
1005 x86_prefix((inst), (unsigned char)(op1)); \
1006 amd64_emit_rex ((inst), 0, (reg), 0, (basereg)); \
1007 *(inst)++ = (unsigned char)(op2); \
1008 *(inst)++ = (unsigned char)(op3); \
1009 amd64_membase_emit ((inst), (reg), (basereg), (disp)); \
1010 amd64_codegen_post(inst); \
1013 #define emit_sse_reg_membase(inst,dreg,basereg,disp,op1,op2,op3) do { \
1014 amd64_codegen_pre(inst); \
1015 x86_prefix((inst), (unsigned char)(op1)); \
1016 amd64_emit_rex ((inst), 0, (dreg), 0, (basereg) == AMD64_RIP ? 0 : (basereg)); \
1017 *(inst)++ = (unsigned char)(op2); \
1018 *(inst)++ = (unsigned char)(op3); \
1019 amd64_membase_emit ((inst), (dreg), (basereg), (disp)); \
1020 amd64_codegen_post(inst); \
1023 /* Four opcode SSE defines */
1025 #define emit_sse_reg_reg_op4_size(inst,dreg,reg,op1,op2,op3,op4,size) do { \
1026 amd64_codegen_pre(inst); \
1027 x86_prefix((inst), (unsigned char)(op1)); \
1028 amd64_emit_rex ((inst), size, (dreg), 0, (reg)); \
1029 *(inst)++ = (unsigned char)(op2); \
1030 *(inst)++ = (unsigned char)(op3); \
1031 *(inst)++ = (unsigned char)(op4); \
1032 x86_reg_emit ((inst), (dreg), (reg)); \
1033 amd64_codegen_post(inst); \
1036 #define emit_sse_reg_reg_op4(inst,dreg,reg,op1,op2,op3,op4) emit_sse_reg_reg_op4_size ((inst), (dreg), (reg), (op1), (op2), (op3), (op4), 0)
1038 /* specific SSE opcode defines */
1040 #define amd64_sse_xorpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst),(dreg),(reg), 0x66, 0x0f, 0x57)
1042 #define amd64_sse_xorpd_reg_membase(inst,dreg,basereg,disp) emit_sse_reg_membase ((inst),(dreg),(basereg), (disp), 0x66, 0x0f, 0x57)
1044 #define amd64_sse_andpd_reg_membase(inst,dreg,basereg,disp) emit_sse_reg_membase ((inst),(dreg),(basereg), (disp), 0x66, 0x0f, 0x54)
1046 #define amd64_sse_movsd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf2, 0x0f, 0x10)
1047 #define amd64_sse_movss_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf3, 0x0f, 0x10)
1049 #define amd64_sse_movsd_reg_membase(inst,dreg,basereg,disp) emit_sse_reg_membase ((inst), (dreg), (basereg), (disp), 0xf2, 0x0f, 0x10)
1051 #define amd64_sse_movsd_membase_reg(inst,basereg,disp,reg) emit_sse_membase_reg ((inst), (basereg), (disp), (reg), 0xf2, 0x0f, 0x11)
1053 #define amd64_sse_movss_membase_reg(inst,basereg,disp,reg) emit_sse_membase_reg ((inst), (basereg), (disp), (reg), 0xf3, 0x0f, 0x11)
1055 #define amd64_sse_movss_reg_membase(inst,dreg,basereg,disp) emit_sse_reg_membase ((inst), (dreg), (basereg), (disp), 0xf3, 0x0f, 0x10)
1057 #define amd64_sse_comisd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst),(dreg),(reg),0x66,0x0f,0x2f)
1058 #define amd64_sse_comiss_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst),(dreg),(reg),0x67,0x0f,0x2f)
1060 #define amd64_sse_comisd_reg_membase(inst,dreg,basereg,disp) emit_sse_reg_membase ((inst), (dreg), (basereg), (disp), 0x66, 0x0f, 0x2f)
1062 #define amd64_sse_ucomisd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst),(dreg),(reg),0x66,0x0f,0x2e)
1064 #define amd64_sse_cvtsd2si_reg_reg(inst,dreg,reg) emit_sse_reg_reg_size ((inst), (dreg), (reg), 0xf2, 0x0f, 0x2d, 8)
1065 #define amd64_sse_cvtss2si_reg_reg(inst,dreg,reg) emit_sse_reg_reg_size ((inst), (dreg), (reg), 0xf3, 0x0f, 0x2d, 8)
1067 #define amd64_sse_cvttsd2si_reg_reg_size(inst,dreg,reg,size) emit_sse_reg_reg_size ((inst), (dreg), (reg), 0xf2, 0x0f, 0x2c, (size))
1068 #define amd64_sse_cvtss2si_reg_reg_size(inst,dreg,reg,size) emit_sse_reg_reg_size ((inst), (dreg), (reg), 0xf3, 0x0f, 0x2c, (size))
1070 #define amd64_sse_cvttsd2si_reg_reg(inst,dreg,reg) amd64_sse_cvttsd2si_reg_reg_size ((inst), (dreg), (reg), 8)
1072 #define amd64_sse_cvtsi2sd_reg_reg_size(inst,dreg,reg,size) emit_sse_reg_reg_size ((inst), (dreg), (reg), 0xf2, 0x0f, 0x2a, (size))
1074 #define amd64_sse_cvtsi2sd_reg_reg(inst,dreg,reg) amd64_sse_cvtsi2sd_reg_reg_size ((inst), (dreg), (reg), 8)
1076 #define amd64_sse_cvtsi2ss_reg_reg_size(inst,dreg,reg,size) emit_sse_reg_reg_size ((inst), (dreg), (reg), 0xf3, 0x0f, 0x2a, (size))
1078 #define amd64_sse_cvtsi2ss_reg_reg(inst,dreg,reg) amd64_sse_cvtsi2ss_reg_reg_size ((inst), (dreg), (reg), 8)
1080 #define amd64_sse_cvtsd2ss_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf2, 0x0f, 0x5a)
1082 #define amd64_sse_cvtss2sd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf3, 0x0f, 0x5a)
1084 #define amd64_sse_addsd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf2, 0x0f, 0x58)
1085 #define amd64_sse_addss_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf3, 0x0f, 0x58)
1087 #define amd64_sse_subsd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf2, 0x0f, 0x5c)
1088 #define amd64_sse_subss_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf3, 0x0f, 0x5c)
1090 #define amd64_sse_mulsd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf2, 0x0f, 0x59)
1091 #define amd64_sse_mulss_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf3, 0x0f, 0x59)
1093 #define amd64_sse_divsd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf2, 0x0f, 0x5e)
1094 #define amd64_sse_divss_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf3, 0x0f, 0x5e)
1096 #define amd64_sse_sqrtsd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xf2, 0x0f, 0x51)
1099 #define amd64_sse_pinsrw_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_imm ((inst), (dreg), (reg), 0x66, 0x0f, 0xc4, (imm))
1101 #define amd64_sse_pextrw_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_imm ((inst), (dreg), (reg), 0x66, 0x0f, 0xc5, (imm))
1104 #define amd64_sse_cvttsd2si_reg_xreg_size(inst,reg,xreg,size) emit_sse_reg_reg_size ((inst), (reg), (xreg), 0xf2, 0x0f, 0x2c, (size))
1107 #define amd64_sse_addps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x58)
1109 #define amd64_sse_divps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x5e)
1111 #define amd64_sse_mulps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x59)
1113 #define amd64_sse_subps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x5c)
1115 #define amd64_sse_maxps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x5f)
1117 #define amd64_sse_minps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x5d)
1119 #define amd64_sse_cmpps_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_op2_imm((inst), (dreg), (reg), 0x0f, 0xc2, (imm))
1121 #define amd64_sse_andps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x54)
1123 #define amd64_sse_andnps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x55)
1125 #define amd64_sse_orps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x56)
1127 #define amd64_sse_xorps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x57)
1129 #define amd64_sse_sqrtps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x51)
1131 #define amd64_sse_rsqrtps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x52)
1133 #define amd64_sse_rcpps_reg_reg(inst,dreg,reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x53)
1135 #define amd64_sse_addsubps_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xf2, 0x0f, 0xd0)
1137 #define amd64_sse_haddps_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xf2, 0x0f, 0x7c)
1139 #define amd64_sse_hsubps_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xf2, 0x0f, 0x7d)
1141 #define amd64_sse_movshdup_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xf3, 0x0f, 0x16)
1143 #define amd64_sse_movsldup_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xf3, 0x0f, 0x12)
1146 #define amd64_sse_pshufhw_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_imm((inst), (dreg), (reg), 0xf3, 0x0f, 0x70, (imm))
1148 #define amd64_sse_pshuflw_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_imm((inst), (dreg), (reg), 0xf2, 0x0f, 0x70, (imm))
1150 #define amd64_sse_pshufd_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_imm((inst), (dreg), (reg), 0x66, 0x0f, 0x70, (imm))
1152 #define amd64_sse_shufps_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_op2_imm((inst), (dreg), (reg), 0x0f, 0xC6, (imm))
1154 #define amd64_sse_shufpd_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_imm((inst), (dreg), (reg), 0x66, 0x0f, 0xC6, (imm))
1157 #define amd64_sse_addpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x58)
1159 #define amd64_sse_divpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x5e)
1161 #define amd64_sse_mulpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x59)
1163 #define amd64_sse_subpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x5c)
1165 #define amd64_sse_maxpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x5f)
1167 #define amd64_sse_minpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x5d)
1169 #define amd64_sse_cmppd_reg_reg_imm(inst,dreg,reg,imm) emit_sse_reg_reg_imm((inst), (dreg), (reg), 0x66, 0x0f, 0xc2, (imm))
1171 #define amd64_sse_andpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x54)
1173 #define amd64_sse_andnpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x55)
1175 #define amd64_sse_orpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x56)
1177 #define amd64_sse_sqrtpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x51)
1179 #define amd64_sse_rsqrtpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x52)
1181 #define amd64_sse_rcppd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x53)
1183 #define amd64_sse_addsubpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd0)
1185 #define amd64_sse_haddpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x7c)
1187 #define amd64_sse_hsubpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x7d)
1189 #define amd64_sse_movddup_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xf2, 0x0f, 0x12)
1192 #define amd64_sse_pmovmskb_reg_reg(inst,dreg,reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd7)
1195 #define amd64_sse_pand_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xdb)
1197 #define amd64_sse_por_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xeb)
1199 #define amd64_sse_pxor_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xef)
1202 #define amd64_sse_paddb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xfc)
1204 #define amd64_sse_paddw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xfd)
1206 #define amd64_sse_paddd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xfe)
1208 #define amd64_sse_paddq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd4)
1211 #define amd64_sse_psubb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xf8)
1213 #define amd64_sse_psubw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xf9)
1215 #define amd64_sse_psubd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xfa)
1217 #define amd64_sse_psubq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xfb)
1220 #define amd64_sse_pmaxub_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xde)
1222 #define amd64_sse_pmaxuw_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x3e)
1224 #define amd64_sse_pmaxud_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x3f)
1227 #define amd64_sse_pmaxsb_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x3c)
1229 #define amd64_sse_pmaxsw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xee)
1231 #define amd64_sse_pmaxsd_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x3d)
1234 #define amd64_sse_pavgb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe0)
1236 #define amd64_sse_pavgw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe3)
1239 #define amd64_sse_pminub_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xda)
1241 #define amd64_sse_pminuw_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x3a)
1243 #define amd64_sse_pminud_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x3b)
1246 #define amd64_sse_pminsb_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x38)
1248 #define amd64_sse_pminsw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xea)
1250 #define amd64_sse_pminsd_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x39)
1253 #define amd64_sse_pcmpeqb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x74)
1255 #define amd64_sse_pcmpeqw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x75)
1257 #define amd64_sse_pcmpeqd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x76)
1259 #define amd64_sse_pcmpeqq_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x29)
1262 #define amd64_sse_pcmpgtb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x64)
1264 #define amd64_sse_pcmpgtw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x65)
1266 #define amd64_sse_pcmpgtd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x66)
1268 #define amd64_sse_pcmpgtq_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x37)
1271 #define amd64_sse_psadbw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xf6)
1274 #define amd64_sse_punpcklbw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x60)
1276 #define amd64_sse_punpcklwd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x61)
1278 #define amd64_sse_punpckldq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x62)
1280 #define amd64_sse_punpcklqdq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x6c)
1282 #define amd64_sse_unpcklpd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x14)
1284 #define amd64_sse_unpcklps_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x14)
1287 #define amd64_sse_punpckhbw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x68)
1289 #define amd64_sse_punpckhwd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x69)
1291 #define amd64_sse_punpckhdq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x6a)
1293 #define amd64_sse_punpckhqdq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x6d)
1295 #define amd64_sse_unpckhpd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x15)
1297 #define amd64_sse_unpckhps_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x15)
1300 #define amd64_sse_packsswb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x63)
1302 #define amd64_sse_packssdw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x6b)
1304 #define amd64_sse_packuswb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0x67)
1306 #define amd64_sse_packusdw_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x2b)
1309 #define amd64_sse_paddusb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xdc)
1311 #define amd64_sse_psubusb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd8)
1313 #define amd64_sse_paddusw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xdd)
1315 #define amd64_sse_psubusw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd8)
1318 #define amd64_sse_paddsb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xec)
1320 #define amd64_sse_psubsb_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe8)
1322 #define amd64_sse_paddsw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xed)
1324 #define amd64_sse_psubsw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe9)
1327 #define amd64_sse_pmullw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd5)
1329 #define amd64_sse_pmulld_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op4((inst), (dreg), (reg), 0x66, 0x0f, 0x38, 0x40)
1331 #define amd64_sse_pmuludq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xf4)
1333 #define amd64_sse_pmulhuw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe4)
1335 #define amd64_sse_pmulhw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe5)
1338 #define amd64_sse_psrlw_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SHR, (reg), 0x66, 0x0f, 0x71, (imm))
1340 #define amd64_sse_psrlw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd1)
1343 #define amd64_sse_psraw_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SAR, (reg), 0x66, 0x0f, 0x71, (imm))
1345 #define amd64_sse_psraw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe1)
1348 #define amd64_sse_psllw_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SHL, (reg), 0x66, 0x0f, 0x71, (imm))
1350 #define amd64_sse_psllw_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xf1)
1353 #define amd64_sse_psrld_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SHR, (reg), 0x66, 0x0f, 0x72, (imm))
1355 #define amd64_sse_psrld_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd2)
1358 #define amd64_sse_psrad_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SAR, (reg), 0x66, 0x0f, 0x72, (imm))
1360 #define amd64_sse_psrad_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe2)
1363 #define amd64_sse_pslld_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SHL, (reg), 0x66, 0x0f, 0x72, (imm))
1365 #define amd64_sse_pslld_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xf2)
1368 #define amd64_sse_psrlq_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SHR, (reg), 0x66, 0x0f, 0x73, (imm))
1370 #define amd64_sse_psrlq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xd3)
1373 #define amd64_sse_psraq_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SAR, (reg), 0x66, 0x0f, 0x73, (imm))
1375 #define amd64_sse_psraq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xe3)
1378 #define amd64_sse_psllq_reg_imm(inst, reg, imm) emit_sse_reg_reg_imm((inst), X86_SSE_SHL, (reg), 0x66, 0x0f, 0x73, (imm))
1380 #define amd64_sse_psllq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0f, 0xf3)
1383 #define amd64_sse_cvtdq2pd_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xF3, 0x0F, 0xE6)
1385 #define amd64_sse_cvtdq2ps_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0F, 0x5B)
1387 #define amd64_sse_cvtpd2dq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xF2, 0x0F, 0xE6)
1389 #define amd64_sse_cvtpd2ps_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0F, 0x5A)
1391 #define amd64_sse_cvtps2dq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0F, 0x5B)
1393 #define amd64_sse_cvtps2pd_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0F, 0x5A)
1395 #define amd64_sse_cvttpd2dq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0x66, 0x0F, 0xE6)
1397 #define amd64_sse_cvttps2dq_reg_reg(inst, dreg, reg) emit_sse_reg_reg((inst), (dreg), (reg), 0xF3, 0x0F, 0x5B)
1400 #define amd64_movd_xreg_reg_size(inst,dreg,sreg,size) emit_sse_reg_reg_size((inst), (dreg), (sreg), 0x66, 0x0f, 0x6e, (size))
1402 #define amd64_movd_reg_xreg_size(inst,dreg,sreg,size) emit_sse_reg_reg_size((inst), (sreg), (dreg), 0x66, 0x0f, 0x7e, (size))
1404 #define amd64_movd_xreg_membase(inst,dreg,basereg,disp) emit_sse_reg_membase((inst), (dreg), (basereg), (disp), 0x66, 0x0f, 0x6e)
1407 #define amd64_movlhps_reg_reg(inst,dreg,sreg) emit_sse_reg_reg_op2((inst), (dreg), (sreg), 0x0f, 0x16)
1409 #define amd64_movhlps_reg_reg(inst,dreg,sreg) emit_sse_reg_reg_op2((inst), (dreg), (sreg), 0x0f, 0x12)
1412 #define amd64_sse_movups_membase_reg(inst, basereg, disp, reg) emit_sse_membase_reg_op2((inst), (basereg), (disp), (reg), 0x0f, 0x11)
1414 #define amd64_sse_movups_reg_membase(inst, dreg, basereg, disp) emit_sse_reg_membase_op2((inst), (dreg), (basereg), (disp), 0x0f, 0x10)
1416 #define amd64_sse_movaps_membase_reg(inst, basereg, disp, reg) emit_sse_membase_reg_op2((inst), (basereg), (disp), (reg), 0x0f, 0x29)
1418 #define amd64_sse_movaps_reg_membase(inst, dreg, basereg, disp) emit_sse_reg_membase_op2((inst), (dreg), (basereg), (disp), 0x0f, 0x28)
1420 #define amd64_sse_movaps_reg_reg(inst, dreg, reg) emit_sse_reg_reg_op2((inst), (dreg), (reg), 0x0f, 0x28)
1422 #define amd64_sse_movntps_reg_membase(inst, dreg, basereg, disp) emit_sse_reg_membase_op2((inst), (dreg), (basereg), (disp), 0x0f, 0x2b)
1424 #define amd64_sse_prefetch_reg_membase(inst, arg, basereg, disp) emit_sse_reg_membase_op2((inst), (arg), (basereg), (disp), 0x0f, 0x18)
1426 /* Generated from x86-codegen.h */
1428 #define amd64_breakpoint_size(inst,size) do { x86_breakpoint(inst); } while (0)
1429 #define amd64_cld_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_cld(inst); amd64_codegen_post(inst); } while (0)
1430 #define amd64_stosb_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_stosb(inst); amd64_codegen_post(inst); } while (0)
1431 #define amd64_stosl_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_stosl(inst); amd64_codegen_post(inst); } while (0)
1432 #define amd64_stosd_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_stosd(inst); amd64_codegen_post(inst); } while (0)
1433 #define amd64_movsb_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_movsb(inst); amd64_codegen_post(inst); } while (0)
1434 #define amd64_movsl_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_movsl(inst); amd64_codegen_post(inst); } while (0)
1435 #define amd64_movsd_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_movsd(inst); amd64_codegen_post(inst); } while (0)
1436 #define amd64_prefix_size(inst,p,size) do { x86_prefix((inst), p); } while (0)
1437 #define amd64_rdtsc_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_rdtsc(inst); amd64_codegen_post(inst); } while (0)
1438 #define amd64_cmpxchg_reg_reg_size(inst,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_cmpxchg_reg_reg((inst),((dreg)&0x7),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1439 #define amd64_cmpxchg_mem_reg_size(inst,mem,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_cmpxchg_mem_reg((inst),(mem),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1440 #define amd64_cmpxchg_membase_reg_size(inst,basereg,disp,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_cmpxchg_membase_reg((inst),((basereg)&0x7),(disp),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1441 #define amd64_xchg_reg_reg_size(inst,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_xchg_reg_reg((inst),((dreg)&0x7),((reg)&0x7),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1442 #define amd64_xchg_mem_reg_size(inst,mem,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_xchg_mem_reg((inst),(mem),((reg)&0x7),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1443 #define amd64_xchg_membase_reg_size(inst,basereg,disp,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_xchg_membase_reg((inst),((basereg)&0x7),(disp),((reg)&0x7),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1444 #define amd64_inc_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_inc_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1445 #define amd64_inc_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_inc_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1446 //#define amd64_inc_reg_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_inc_reg((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1447 #define amd64_dec_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_dec_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1448 #define amd64_dec_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_dec_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1449 //#define amd64_dec_reg_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_dec_reg((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1450 #define amd64_not_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_not_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1451 #define amd64_not_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_not_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1452 #define amd64_not_reg_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_not_reg((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1453 #define amd64_neg_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_neg_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1454 #define amd64_neg_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_neg_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1455 #define amd64_neg_reg_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_neg_reg((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1456 #define amd64_nop_size(inst,size) do { amd64_codegen_pre(inst); x86_nop(inst); amd64_codegen_post(inst); } while (0)
1457 //#define amd64_alu_reg_imm_size(inst,opc,reg,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_alu_reg_imm((inst),(opc),((reg)&0x7),(imm)); amd64_codegen_post(inst); } while (0)
1458 #define amd64_alu_mem_imm_size(inst,opc,mem,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_alu_mem_imm((inst),(opc),(mem),(imm)); amd64_codegen_post(inst); } while (0)
1459 #define amd64_alu_membase_imm_size(inst,opc,basereg,disp,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_alu_membase_imm((inst),(opc),((basereg)&0x7),(disp),(imm)); amd64_codegen_post(inst); } while (0)
1460 #define amd64_alu_membase8_imm_size(inst,opc,basereg,disp,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_alu_membase8_imm((inst),(opc),((basereg)&0x7),(disp),(imm)); amd64_codegen_post(inst); } while (0)
1461 #define amd64_alu_mem_reg_size(inst,opc,mem,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_alu_mem_reg((inst),(opc),(mem),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1462 #define amd64_alu_membase_reg_size(inst,opc,basereg,disp,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_alu_membase_reg((inst),(opc),((basereg)&0x7),(disp),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1463 //#define amd64_alu_reg_reg_size(inst,opc,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_alu_reg_reg((inst),(opc),((dreg)&0x7),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1464 #define amd64_alu_reg8_reg8_size(inst,opc,dreg,reg,is_dreg_h,is_reg_h,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_alu_reg8_reg8((inst),(opc),((dreg)&0x7),((reg)&0x7),(is_dreg_h),(is_reg_h)); amd64_codegen_post(inst); } while (0)
1465 #define amd64_alu_reg_mem_size(inst,opc,reg,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_alu_reg_mem((inst),(opc),((reg)&0x7),(mem)); amd64_codegen_post(inst); } while (0)
1466 //#define amd64_alu_reg_membase_size(inst,opc,reg,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_alu_reg_membase((inst),(opc),((reg)&0x7),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1467 #define amd64_test_reg_imm_size(inst,reg,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_test_reg_imm((inst),((reg)&0x7),(imm)); amd64_codegen_post(inst); } while (0)
1468 #define amd64_test_mem_imm_size(inst,mem,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_test_mem_imm((inst),(mem),(imm)); amd64_codegen_post(inst); } while (0)
1469 #define amd64_test_membase_imm_size(inst,basereg,disp,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_test_membase_imm((inst),((basereg)&0x7),(disp),(imm)); amd64_codegen_post(inst); } while (0)
1470 #define amd64_test_reg_reg_size(inst,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_test_reg_reg((inst),((dreg)&0x7),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1471 #define amd64_test_mem_reg_size(inst,mem,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_test_mem_reg((inst),(mem),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1472 #define amd64_test_membase_reg_size(inst,basereg,disp,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_test_membase_reg((inst),((basereg)&0x7),(disp),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1473 #define amd64_shift_reg_imm_size(inst,opc,reg,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_shift_reg_imm((inst),(opc),((reg)&0x7),(imm)); amd64_codegen_post(inst); } while (0)
1474 #define amd64_shift_mem_imm_size(inst,opc,mem,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_shift_mem_imm((inst),(opc),(mem),(imm)); amd64_codegen_post(inst); } while (0)
1475 #define amd64_shift_membase_imm_size(inst,opc,basereg,disp,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_shift_membase_imm((inst),(opc),((basereg)&0x7),(disp),(imm)); amd64_codegen_post(inst); } while (0)
1476 #define amd64_shift_reg_size(inst,opc,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_shift_reg((inst),(opc),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1477 #define amd64_shift_mem_size(inst,opc,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_shift_mem((inst),(opc),(mem)); amd64_codegen_post(inst); } while (0)
1478 #define amd64_shift_membase_size(inst,opc,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_shift_membase((inst),(opc),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1479 #define amd64_shrd_reg_size(inst,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_shrd_reg((inst),((dreg)&0x7),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1480 #define amd64_shrd_reg_imm_size(inst,dreg,reg,shamt,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_shrd_reg_imm((inst),((dreg)&0x7),((reg)&0x7),(shamt)); amd64_codegen_post(inst); } while (0)
1481 #define amd64_shld_reg_size(inst,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_shld_reg((inst),((dreg)&0x7),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1482 #define amd64_shld_reg_imm_size(inst,dreg,reg,shamt,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_shld_reg_imm((inst),((dreg)&0x7),((reg)&0x7),(shamt)); amd64_codegen_post(inst); } while (0)
1483 #define amd64_mul_reg_size(inst,reg,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_mul_reg((inst),((reg)&0x7),(is_signed)); amd64_codegen_post(inst); } while (0)
1484 #define amd64_mul_mem_size(inst,mem,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_mul_mem((inst),(mem),(is_signed)); amd64_codegen_post(inst); } while (0)
1485 #define amd64_mul_membase_size(inst,basereg,disp,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_mul_membase((inst),((basereg)&0x7),(disp),(is_signed)); amd64_codegen_post(inst); } while (0)
1486 #define amd64_imul_reg_reg_size(inst,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_imul_reg_reg((inst),((dreg)&0x7),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1487 #define amd64_imul_reg_mem_size(inst,reg,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_imul_reg_mem((inst),((reg)&0x7),(mem)); amd64_codegen_post(inst); } while (0)
1488 #define amd64_imul_reg_membase_size(inst,reg,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_imul_reg_membase((inst),((reg)&0x7),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1489 #define amd64_imul_reg_reg_imm_size(inst,dreg,reg,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_imul_reg_reg_imm((inst),((dreg)&0x7),((reg)&0x7),(imm)); amd64_codegen_post(inst); } while (0)
1490 #define amd64_imul_reg_mem_imm_size(inst,reg,mem,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_imul_reg_mem_imm((inst),((reg)&0x7),(mem),(imm)); amd64_codegen_post(inst); } while (0)
1491 #define amd64_imul_reg_membase_imm_size(inst,reg,basereg,disp,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_imul_reg_membase_imm((inst),((reg)&0x7),((basereg)&0x7),(disp),(imm)); amd64_codegen_post(inst); } while (0)
1492 #define amd64_div_reg_size(inst,reg,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_div_reg((inst),((reg)&0x7),(is_signed)); amd64_codegen_post(inst); } while (0)
1493 #define amd64_div_mem_size(inst,mem,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_div_mem((inst),(mem),(is_signed)); amd64_codegen_post(inst); } while (0)
1494 #define amd64_div_membase_size(inst,basereg,disp,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_div_membase((inst),((basereg)&0x7),(disp),(is_signed)); amd64_codegen_post(inst); } while (0)
1495 #define amd64_mov_mem_reg_size(inst,mem,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_mov_mem_reg((inst),(mem),((reg)&0x7),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1496 //#define amd64_mov_regp_reg_size(inst,regp,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(regp),0,(reg)); x86_mov_regp_reg((inst),(regp),((reg)&0x7),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1497 //#define amd64_mov_membase_reg_size(inst,basereg,disp,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_mov_membase_reg((inst),((basereg)&0x7),(disp),((reg)&0x7),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1498 #define amd64_mov_memindex_reg_size(inst,basereg,disp,indexreg,shift,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),(indexreg),(basereg)); x86_mov_memindex_reg((inst),((basereg)&0x7),(disp),((indexreg)&0x7),(shift),((reg)&0x7),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1499 #define amd64_mov_reg_reg_size(inst,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_mov_reg_reg((inst),((dreg)&0x7),((reg)&0x7),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1500 //#define amd64_mov_reg_mem_size(inst,reg,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_mov_reg_mem((inst),((reg)&0x7),(mem),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1501 //#define amd64_mov_reg_membase_size(inst,reg,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_mov_reg_membase((inst),((reg)&0x7),((basereg)&0x7),(disp),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1502 //#define amd64_mov_reg_memindex_size(inst,reg,basereg,disp,indexreg,shift,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),(indexreg),(basereg)); x86_mov_reg_memindex((inst),((reg)&0x7),((basereg)&0x7),(disp),((indexreg)&0x7),(shift),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1503 #define amd64_clear_reg_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_clear_reg((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1504 //#define amd64_mov_reg_imm_size(inst,reg,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_mov_reg_imm((inst),((reg)&0x7),(imm)); amd64_codegen_post(inst); } while (0)
1505 #define amd64_mov_mem_imm_size(inst,mem,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_mov_mem_imm((inst),(mem),(imm),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1506 //#define amd64_mov_membase_imm_size(inst,basereg,disp,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_mov_membase_imm((inst),((basereg)&0x7),(disp),(imm),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1507 #define amd64_mov_memindex_imm_size(inst,basereg,disp,indexreg,shift,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,(indexreg),(basereg)); x86_mov_memindex_imm((inst),((basereg)&0x7),(disp),((indexreg)&0x7),(shift),(imm),(size) == 8 ? 4 : (size)); amd64_codegen_post(inst); } while (0)
1508 #define amd64_lea_mem_size(inst,reg,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_lea_mem((inst),((reg)&0x7),(mem)); amd64_codegen_post(inst); } while (0)
1509 //#define amd64_lea_membase_size(inst,reg,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_lea_membase((inst),((reg)&0x7),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1510 #define amd64_lea_memindex_size(inst,reg,basereg,disp,indexreg,shift,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),(indexreg),(basereg)); x86_lea_memindex((inst),((reg)&0x7),((basereg)&0x7),(disp),((indexreg)&0x7),(shift)); amd64_codegen_post(inst); } while (0)
1511 #define amd64_widen_reg_size(inst,dreg,reg,is_signed,is_half,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_widen_reg((inst),((dreg)&0x7),((reg)&0x7),(is_signed),(is_half)); amd64_codegen_post(inst); } while (0)
1512 #define amd64_widen_mem_size(inst,dreg,mem,is_signed,is_half,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,0); x86_widen_mem((inst),((dreg)&0x7),(mem),(is_signed),(is_half)); amd64_codegen_post(inst); } while (0)
1513 #define amd64_widen_membase_size(inst,dreg,basereg,disp,is_signed,is_half,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(basereg)); x86_widen_membase((inst),((dreg)&0x7),((basereg)&0x7),(disp),(is_signed),(is_half)); amd64_codegen_post(inst); } while (0)
1514 #define amd64_widen_memindex_size(inst,dreg,basereg,disp,indexreg,shift,is_signed,is_half,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),(indexreg),(basereg)); x86_widen_memindex((inst),((dreg)&0x7),((basereg)&0x7),(disp),((indexreg)&0x7),(shift),(is_signed),(is_half)); amd64_codegen_post(inst); } while (0)
1515 #define amd64_cdq_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_cdq(inst); amd64_codegen_post(inst); } while (0)
1516 #define amd64_wait_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_wait(inst); amd64_codegen_post(inst); } while (0)
1517 #define amd64_fp_op_mem_size(inst,opc,mem,is_double,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fp_op_mem((inst),(opc),(mem),(is_double)); amd64_codegen_post(inst); } while (0)
1518 #define amd64_fp_op_membase_size(inst,opc,basereg,disp,is_double,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fp_op_membase((inst),(opc),((basereg)&0x7),(disp),(is_double)); amd64_codegen_post(inst); } while (0)
1519 #define amd64_fp_op_size(inst,opc,index,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fp_op((inst),(opc),(index)); amd64_codegen_post(inst); } while (0)
1520 #define amd64_fp_op_reg_size(inst,opc,index,pop_stack,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fp_op_reg((inst),(opc),(index),(pop_stack)); amd64_codegen_post(inst); } while (0)
1521 #define amd64_fp_int_op_membase_size(inst,opc,basereg,disp,is_int,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fp_int_op_membase((inst),(opc),((basereg)&0x7),(disp),(is_int)); amd64_codegen_post(inst); } while (0)
1522 #define amd64_fstp_size(inst,index,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fstp((inst),(index)); amd64_codegen_post(inst); } while (0)
1523 #define amd64_fcompp_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fcompp(inst); amd64_codegen_post(inst); } while (0)
1524 #define amd64_fucompp_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fucompp(inst); amd64_codegen_post(inst); } while (0)
1525 #define amd64_fnstsw_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fnstsw(inst); amd64_codegen_post(inst); } while (0)
1526 #define amd64_fnstcw_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fnstcw((inst),(mem)); amd64_codegen_post(inst); } while (0)
1527 #define amd64_fnstcw_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_fnstcw_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1528 #define amd64_fldcw_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fldcw((inst),(mem)); amd64_codegen_post(inst); } while (0)
1529 #define amd64_fldcw_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fldcw_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1530 #define amd64_fchs_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fchs(inst); amd64_codegen_post(inst); } while (0)
1531 #define amd64_frem_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_frem(inst); amd64_codegen_post(inst); } while (0)
1532 #define amd64_fxch_size(inst,index,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fxch((inst),(index)); amd64_codegen_post(inst); } while (0)
1533 #define amd64_fcomi_size(inst,index,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fcomi((inst),(index)); amd64_codegen_post(inst); } while (0)
1534 #define amd64_fcomip_size(inst,index,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fcomip((inst),(index)); amd64_codegen_post(inst); } while (0)
1535 #define amd64_fucomi_size(inst,index,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fucomi((inst),(index)); amd64_codegen_post(inst); } while (0)
1536 #define amd64_fucomip_size(inst,index,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fucomip((inst),(index)); amd64_codegen_post(inst); } while (0)
1537 #define amd64_fld_size(inst,mem,is_double,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fld((inst),(mem),(is_double)); amd64_codegen_post(inst); } while (0)
1538 //#define amd64_fld_membase_size(inst,basereg,disp,is_double,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fld_membase((inst),((basereg)&0x7),(disp),(is_double)); amd64_codegen_post(inst); } while (0)
1539 #define amd64_fld80_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fld80_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1540 #define amd64_fld80_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_fld80_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1541 #define amd64_fild_size(inst,mem,is_long,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fild((inst),(mem),(is_long)); amd64_codegen_post(inst); } while (0)
1542 #define amd64_fild_membase_size(inst,basereg,disp,is_long,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fild_membase((inst),((basereg)&0x7),(disp),(is_long)); amd64_codegen_post(inst); } while (0)
1543 #define amd64_fld_reg_size(inst,index,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fld_reg((inst),(index)); amd64_codegen_post(inst); } while (0)
1544 #define amd64_fldz_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fldz(inst); amd64_codegen_post(inst); } while (0)
1545 #define amd64_fld1_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fld1(inst); amd64_codegen_post(inst); } while (0)
1546 #define amd64_fldpi_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fldpi(inst); amd64_codegen_post(inst); } while (0)
1547 #define amd64_fst_size(inst,mem,is_double,pop_stack,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fst((inst),(mem),(is_double),(pop_stack)); amd64_codegen_post(inst); } while (0)
1548 #define amd64_fst_membase_size(inst,basereg,disp,is_double,pop_stack,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fst_membase((inst),((basereg)&0x7),(disp),(is_double),(pop_stack)); amd64_codegen_post(inst); } while (0)
1549 #define amd64_fst80_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fst80_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1550 #define amd64_fst80_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fst80_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1551 #define amd64_fist_pop_size(inst,mem,is_long,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_fist_pop((inst),(mem),(is_long)); amd64_codegen_post(inst); } while (0)
1552 #define amd64_fist_pop_membase_size(inst,basereg,disp,is_long,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fist_pop_membase((inst),((basereg)&0x7),(disp),(is_long)); amd64_codegen_post(inst); } while (0)
1553 #define amd64_fstsw_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_fstsw(inst); amd64_codegen_post(inst); } while (0)
1554 #define amd64_fist_membase_size(inst,basereg,disp,is_int,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fist_membase((inst),((basereg)&0x7),(disp),(is_int)); amd64_codegen_post(inst); } while (0)
1555 //#define amd64_push_reg_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_push_reg((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1556 #define amd64_push_regp_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_push_regp((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1557 #define amd64_push_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_push_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1558 //#define amd64_push_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_push_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1559 #define amd64_push_memindex_size(inst,basereg,disp,indexreg,shift,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,(indexreg),(basereg)); x86_push_memindex((inst),((basereg)&0x7),(disp),((indexreg)&0x7),(shift)); amd64_codegen_post(inst); } while (0)
1560 #define amd64_push_imm_size(inst,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_push_imm((inst),(imm)); amd64_codegen_post(inst); } while (0)
1561 //#define amd64_pop_reg_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_pop_reg((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1562 #define amd64_pop_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_pop_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1563 #define amd64_pop_membase_size(inst,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_pop_membase((inst),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1564 #define amd64_pushad_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_pushad(inst); amd64_codegen_post(inst); } while (0)
1565 #define amd64_pushfd_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_pushfd(inst); amd64_codegen_post(inst); } while (0)
1566 #define amd64_popad_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_popad(inst); amd64_codegen_post(inst); } while (0)
1567 #define amd64_popfd_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_popfd(inst); amd64_codegen_post(inst); } while (0)
1568 #define amd64_loop_size(inst,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_loop((inst),(imm)); amd64_codegen_post(inst); } while (0)
1569 #define amd64_loope_size(inst,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_loope((inst),(imm)); amd64_codegen_post(inst); } while (0)
1570 #define amd64_loopne_size(inst,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_loopne((inst),(imm)); amd64_codegen_post(inst); } while (0)
1571 #define amd64_jump32_size(inst,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_jump32((inst),(imm)); amd64_codegen_post(inst); } while (0)
1572 #define amd64_jump8_size(inst,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_jump8((inst),(imm)); amd64_codegen_post(inst); } while (0)
1573 #if !defined( __native_client_codegen__ )
1574 /* Defined above for Native Client, so they can be used in other macros */
1575 #define amd64_jump_reg_size(inst,reg,size) do { amd64_emit_rex ((inst),0,0,0,(reg)); x86_jump_reg((inst),((reg)&0x7)); } while (0)
1576 #define amd64_jump_mem_size(inst,mem,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_jump_mem((inst),(mem)); } while (0)
1578 #define amd64_jump_disp_size(inst,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,0); x86_jump_disp((inst),(disp)); amd64_codegen_post(inst); } while (0)
1579 #define amd64_branch8_size(inst,cond,imm,is_signed,size) do { x86_branch8((inst),(cond),(imm),(is_signed)); } while (0)
1580 #define amd64_branch32_size(inst,cond,imm,is_signed,size) do { x86_branch32((inst),(cond),(imm),(is_signed)); } while (0)
1581 #define amd64_branch_size_body(inst,cond,target,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_branch((inst),(cond),(target),(is_signed)); amd64_codegen_post(inst); } while (0)
1582 #if defined(__default_codegen__)
1583 #define amd64_branch_size(inst,cond,target,is_signed,size) do { amd64_branch_size_body((inst),(cond),(target),(is_signed),(size)); } while (0)
1584 #elif defined(__native_client_codegen__)
1585 #define amd64_branch_size(inst,cond,target,is_signed,size) \
1587 /* amd64_branch_size_body used twice in */ \
1588 /* case of relocation by amd64_codegen_post */ \
1589 guint8* branch_start; \
1590 amd64_codegen_pre(inst); \
1591 amd64_branch_size_body((inst),(cond),(target),(is_signed),(size)); \
1592 inst = amd64_codegen_post(inst); \
1593 branch_start = inst; \
1594 amd64_branch_size_body((inst),(cond),(target),(is_signed),(size)); \
1595 mono_amd64_patch(branch_start, (target)); \
1599 #define amd64_branch_disp_size(inst,cond,disp,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_branch_disp((inst),(cond),(disp),(is_signed)); amd64_codegen_post(inst); } while (0)
1600 #define amd64_set_reg_size(inst,cond,reg,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex((inst),1,0,0,(reg)); x86_set_reg((inst),(cond),((reg)&0x7),(is_signed)); amd64_codegen_post(inst); } while (0)
1601 #define amd64_set_mem_size(inst,cond,mem,is_signed,size) do { amd64_codegen_pre(inst); x86_set_mem((inst),(cond),(mem),(is_signed)); amd64_codegen_post(inst); } while (0)
1602 #define amd64_set_membase_size(inst,cond,basereg,disp,is_signed,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),0,0,0,(basereg)); x86_set_membase((inst),(cond),((basereg)&0x7),(disp),(is_signed)); amd64_codegen_post(inst); } while (0)
1603 //#define amd64_call_reg_size(inst,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_call_reg((inst),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1604 #define amd64_call_mem_size(inst,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_call_mem((inst),(mem)); amd64_codegen_post(inst); } while (0)
1606 #if defined(__default_codegen__)
1608 #define amd64_call_imm_size(inst,disp,size) do { x86_call_imm((inst),(disp)); } while (0)
1609 #define amd64_call_code_size(inst,target,size) do { x86_call_code((inst),(target)); } while (0)
1611 #elif defined(__native_client_codegen__)
1612 /* Size is ignored for Native Client calls, we restrict jumping to 32-bits */
1613 #define amd64_call_imm_size(inst,disp,size) \
1615 amd64_codegen_pre((inst)); \
1616 amd64_call_sequence_pre((inst)); \
1617 x86_call_imm((inst),(disp)); \
1618 amd64_call_sequence_post((inst)); \
1619 amd64_codegen_post((inst)); \
1622 /* x86_call_code is called twice below, first so we can get the size of the */
1623 /* call sequence, and again so the exact offset from "inst" is used, since */
1624 /* the sequence could have moved from amd64_call_sequence_post. */
1625 /* Size is ignored for Native Client jumps, we restrict jumping to 32-bits */
1626 #define amd64_call_code_size(inst,target,size) \
1628 amd64_codegen_pre((inst)); \
1629 guint8* adjusted_start; \
1630 guint8* call_start; \
1631 amd64_call_sequence_pre((inst)); \
1632 x86_call_code((inst),(target)); \
1633 adjusted_start = amd64_call_sequence_post((inst)); \
1634 call_start = adjusted_start; \
1635 x86_call_code(adjusted_start, (target)); \
1636 amd64_codegen_post((inst)); \
1637 mono_amd64_patch(call_start, (target)); \
1640 #endif /*__native_client_codegen__*/
1642 //#define amd64_ret_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_ret(inst); amd64_codegen_post(inst); } while (0)
1643 #define amd64_ret_imm_size(inst,imm,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_ret_imm((inst),(imm)); amd64_codegen_post(inst); } while (0)
1644 #define amd64_cmov_reg_size(inst,cond,is_signed,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_cmov_reg((inst),(cond),(is_signed),((dreg)&0x7),((reg)&0x7)); amd64_codegen_post(inst); } while (0)
1645 #define amd64_cmov_mem_size(inst,cond,is_signed,reg,mem,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_cmov_mem((inst),(cond),(is_signed),((reg)&0x7),(mem)); amd64_codegen_post(inst); } while (0)
1646 #define amd64_cmov_membase_size(inst,cond,is_signed,reg,basereg,disp,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_cmov_membase((inst),(cond),(is_signed),((reg)&0x7),((basereg)&0x7),(disp)); amd64_codegen_post(inst); } while (0)
1647 #define amd64_enter_size(inst,framesize) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_enter((inst),(framesize)); amd64_codegen_post(inst); } while (0)
1648 //#define amd64_leave_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_leave(inst); amd64_codegen_post(inst); } while (0)
1649 #define amd64_sahf_size(inst,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_sahf(inst); amd64_codegen_post(inst); } while (0)
1650 #define amd64_fsin_size(inst,size) do { amd64_codegen_pre(inst); x86_fsin(inst); amd64_codegen_post(inst); } while (0)
1651 #define amd64_fcos_size(inst,size) do { amd64_codegen_pre(inst); x86_fcos(inst); amd64_codegen_post(inst); } while (0)
1652 #define amd64_fabs_size(inst,size) do { amd64_codegen_pre(inst); x86_fabs(inst); amd64_codegen_post(inst); } while (0)
1653 #define amd64_ftst_size(inst,size) do { amd64_codegen_pre(inst); x86_ftst(inst); amd64_codegen_post(inst); } while (0)
1654 #define amd64_fxam_size(inst,size) do { amd64_codegen_pre(inst); x86_fxam(inst); amd64_codegen_post(inst); } while (0)
1655 #define amd64_fpatan_size(inst,size) do { amd64_codegen_pre(inst); x86_fpatan(inst); amd64_codegen_post(inst); } while (0)
1656 #define amd64_fprem_size(inst,size) do { amd64_codegen_pre(inst); x86_fprem(inst); amd64_codegen_post(inst); } while (0)
1657 #define amd64_fprem1_size(inst,size) do { amd64_codegen_pre(inst); x86_fprem1(inst); amd64_codegen_post(inst); } while (0)
1658 #define amd64_frndint_size(inst,size) do { amd64_codegen_pre(inst); x86_frndint(inst); amd64_codegen_post(inst); } while (0)
1659 #define amd64_fsqrt_size(inst,size) do { amd64_codegen_pre(inst); x86_fsqrt(inst); amd64_codegen_post(inst); } while (0)
1660 #define amd64_fptan_size(inst,size) do { amd64_codegen_pre(inst); x86_fptan(inst); amd64_codegen_post(inst); } while (0)
1661 //#define amd64_padding_size(inst,size) do { amd64_codegen_pre(inst); x86_padding((inst),(size)); amd64_codegen_post(inst); } while (0)
1662 #define amd64_prolog_size(inst,frame_size,reg_mask,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_prolog((inst),(frame_size),(reg_mask)); amd64_codegen_post(inst); } while (0)
1663 #define amd64_epilog_size(inst,reg_mask,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,0); x86_epilog((inst),(reg_mask)); amd64_codegen_post(inst); } while (0)
1664 #define amd64_xadd_reg_reg_size(inst,dreg,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_xadd_reg_reg ((inst), (dreg), (reg), (size)); amd64_codegen_post(inst); } while (0)
1665 #define amd64_xadd_mem_reg_size(inst,mem,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),0,0,(reg)); x86_xadd_mem_reg((inst),(mem),((reg)&0x7), (size)); amd64_codegen_post(inst); } while (0)
1666 #define amd64_xadd_membase_reg_size(inst,basereg,disp,reg,size) do { amd64_codegen_pre(inst); amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_xadd_membase_reg((inst),((basereg)&0x7),(disp),((reg)&0x7),(size)); amd64_codegen_post(inst); } while (0)
1671 #define amd64_breakpoint(inst) amd64_breakpoint_size(inst,8)
1672 #define amd64_cld(inst) amd64_cld_size(inst,8)
1673 #define amd64_stosb(inst) amd64_stosb_size(inst,8)
1674 #define amd64_stosl(inst) amd64_stosl_size(inst,8)
1675 #define amd64_stosd(inst) amd64_stosd_size(inst,8)
1676 #define amd64_movsb(inst) amd64_movsb_size(inst,8)
1677 #define amd64_movsl(inst) amd64_movsl_size(inst,8)
1678 #define amd64_movsd(inst) amd64_movsd_size(inst,8)
1679 #define amd64_prefix(inst,p) amd64_prefix_size(inst,p,8)
1680 #define amd64_rdtsc(inst) amd64_rdtsc_size(inst,8)
1681 #define amd64_cmpxchg_reg_reg(inst,dreg,reg) amd64_cmpxchg_reg_reg_size(inst,dreg,reg,8)
1682 #define amd64_cmpxchg_mem_reg(inst,mem,reg) amd64_cmpxchg_mem_reg_size(inst,mem,reg,8)
1683 #define amd64_cmpxchg_membase_reg(inst,basereg,disp,reg) amd64_cmpxchg_membase_reg_size(inst,basereg,disp,reg,8)
1684 #define amd64_xchg_reg_reg(inst,dreg,reg,size) amd64_xchg_reg_reg_size(inst,dreg,reg,size)
1685 #define amd64_xchg_mem_reg(inst,mem,reg,size) amd64_xchg_mem_reg_size(inst,mem,reg,size)
1686 #define amd64_xchg_membase_reg(inst,basereg,disp,reg,size) amd64_xchg_membase_reg_size(inst,basereg,disp,reg,size)
1687 #define amd64_xadd_reg_reg(inst,dreg,reg,size) amd64_xadd_reg_reg_size(inst,dreg,reg,size)
1688 #define amd64_xadd_mem_reg(inst,mem,reg,size) amd64_xadd_mem_reg_size(inst,mem,reg,size)
1689 #define amd64_xadd_membase_reg(inst,basereg,disp,reg,size) amd64_xadd_membase_reg_size(inst,basereg,disp,reg,size)
1690 #define amd64_inc_mem(inst,mem) amd64_inc_mem_size(inst,mem,8)
1691 #define amd64_inc_membase(inst,basereg,disp) amd64_inc_membase_size(inst,basereg,disp,8)
1692 #define amd64_inc_reg(inst,reg) amd64_inc_reg_size(inst,reg,8)
1693 #define amd64_dec_mem(inst,mem) amd64_dec_mem_size(inst,mem,8)
1694 #define amd64_dec_membase(inst,basereg,disp) amd64_dec_membase_size(inst,basereg,disp,8)
1695 #define amd64_dec_reg(inst,reg) amd64_dec_reg_size(inst,reg,8)
1696 #define amd64_not_mem(inst,mem) amd64_not_mem_size(inst,mem,8)
1697 #define amd64_not_membase(inst,basereg,disp) amd64_not_membase_size(inst,basereg,disp,8)
1698 #define amd64_not_reg(inst,reg) amd64_not_reg_size(inst,reg,8)
1699 #define amd64_neg_mem(inst,mem) amd64_neg_mem_size(inst,mem,8)
1700 #define amd64_neg_membase(inst,basereg,disp) amd64_neg_membase_size(inst,basereg,disp,8)
1701 #define amd64_neg_reg(inst,reg) amd64_neg_reg_size(inst,reg,8)
1702 #define amd64_nop(inst) amd64_nop_size(inst,8)
1703 //#define amd64_alu_reg_imm(inst,opc,reg,imm) amd64_alu_reg_imm_size(inst,opc,reg,imm,8)
1704 #define amd64_alu_mem_imm(inst,opc,mem,imm) amd64_alu_mem_imm_size(inst,opc,mem,imm,8)
1705 #define amd64_alu_membase_imm(inst,opc,basereg,disp,imm) amd64_alu_membase_imm_size(inst,opc,basereg,disp,imm,8)
1706 #define amd64_alu_mem_reg(inst,opc,mem,reg) amd64_alu_mem_reg_size(inst,opc,mem,reg,8)
1707 #define amd64_alu_membase_reg(inst,opc,basereg,disp,reg) amd64_alu_membase_reg_size(inst,opc,basereg,disp,reg,8)
1708 //#define amd64_alu_reg_reg(inst,opc,dreg,reg) amd64_alu_reg_reg_size(inst,opc,dreg,reg,8)
1709 #define amd64_alu_reg8_reg8(inst,opc,dreg,reg,is_dreg_h,is_reg_h) amd64_alu_reg8_reg8_size(inst,opc,dreg,reg,is_dreg_h,is_reg_h,8)
1710 #define amd64_alu_reg_mem(inst,opc,reg,mem) amd64_alu_reg_mem_size(inst,opc,reg,mem,8)
1711 #define amd64_alu_reg_membase(inst,opc,reg,basereg,disp) amd64_alu_reg_membase_size(inst,opc,reg,basereg,disp,8)
1712 #define amd64_test_reg_imm(inst,reg,imm) amd64_test_reg_imm_size(inst,reg,imm,8)
1713 #define amd64_test_mem_imm(inst,mem,imm) amd64_test_mem_imm_size(inst,mem,imm,8)
1714 #define amd64_test_membase_imm(inst,basereg,disp,imm) amd64_test_membase_imm_size(inst,basereg,disp,imm,8)
1715 #define amd64_test_reg_reg(inst,dreg,reg) amd64_test_reg_reg_size(inst,dreg,reg,8)
1716 #define amd64_test_mem_reg(inst,mem,reg) amd64_test_mem_reg_size(inst,mem,reg,8)
1717 #define amd64_test_membase_reg(inst,basereg,disp,reg) amd64_test_membase_reg_size(inst,basereg,disp,reg,8)
1718 #define amd64_shift_reg_imm(inst,opc,reg,imm) amd64_shift_reg_imm_size(inst,opc,reg,imm,8)
1719 #define amd64_shift_mem_imm(inst,opc,mem,imm) amd64_shift_mem_imm_size(inst,opc,mem,imm,8)
1720 #define amd64_shift_membase_imm(inst,opc,basereg,disp,imm) amd64_shift_membase_imm_size(inst,opc,basereg,disp,imm,8)
1721 #define amd64_shift_reg(inst,opc,reg) amd64_shift_reg_size(inst,opc,reg,8)
1722 #define amd64_shift_mem(inst,opc,mem) amd64_shift_mem_size(inst,opc,mem,8)
1723 #define amd64_shift_membase(inst,opc,basereg,disp) amd64_shift_membase_size(inst,opc,basereg,disp,8)
1724 #define amd64_shrd_reg(inst,dreg,reg) amd64_shrd_reg_size(inst,dreg,reg,8)
1725 #define amd64_shrd_reg_imm(inst,dreg,reg,shamt) amd64_shrd_reg_imm_size(inst,dreg,reg,shamt,8)
1726 #define amd64_shld_reg(inst,dreg,reg) amd64_shld_reg_size(inst,dreg,reg,8)
1727 #define amd64_shld_reg_imm(inst,dreg,reg,shamt) amd64_shld_reg_imm_size(inst,dreg,reg,shamt,8)
1728 #define amd64_mul_reg(inst,reg,is_signed) amd64_mul_reg_size(inst,reg,is_signed,8)
1729 #define amd64_mul_mem(inst,mem,is_signed) amd64_mul_mem_size(inst,mem,is_signed,8)
1730 #define amd64_mul_membase(inst,basereg,disp,is_signed) amd64_mul_membase_size(inst,basereg,disp,is_signed,8)
1731 #define amd64_imul_reg_reg(inst,dreg,reg) amd64_imul_reg_reg_size(inst,dreg,reg,8)
1732 #define amd64_imul_reg_mem(inst,reg,mem) amd64_imul_reg_mem_size(inst,reg,mem,8)
1733 #define amd64_imul_reg_membase(inst,reg,basereg,disp) amd64_imul_reg_membase_size(inst,reg,basereg,disp,8)
1734 #define amd64_imul_reg_reg_imm(inst,dreg,reg,imm) amd64_imul_reg_reg_imm_size(inst,dreg,reg,imm,8)
1735 #define amd64_imul_reg_mem_imm(inst,reg,mem,imm) amd64_imul_reg_mem_imm_size(inst,reg,mem,imm,8)
1736 #define amd64_imul_reg_membase_imm(inst,reg,basereg,disp,imm) amd64_imul_reg_membase_imm_size(inst,reg,basereg,disp,imm,8)
1737 #define amd64_div_reg(inst,reg,is_signed) amd64_div_reg_size(inst,reg,is_signed,8)
1738 #define amd64_div_mem(inst,mem,is_signed) amd64_div_mem_size(inst,mem,is_signed,8)
1739 #define amd64_div_membase(inst,basereg,disp,is_signed) amd64_div_membase_size(inst,basereg,disp,is_signed,8)
1740 //#define amd64_mov_mem_reg(inst,mem,reg,size) amd64_mov_mem_reg_size(inst,mem,reg,size)
1741 //#define amd64_mov_regp_reg(inst,regp,reg,size) amd64_mov_regp_reg_size(inst,regp,reg,size)
1742 //#define amd64_mov_membase_reg(inst,basereg,disp,reg,size) amd64_mov_membase_reg_size(inst,basereg,disp,reg,size)
1743 #define amd64_mov_memindex_reg(inst,basereg,disp,indexreg,shift,reg,size) amd64_mov_memindex_reg_size(inst,basereg,disp,indexreg,shift,reg,size)
1744 //#define amd64_mov_reg_reg(inst,dreg,reg,size) amd64_mov_reg_reg_size(inst,dreg,reg,size)
1745 //#define amd64_mov_reg_mem(inst,reg,mem,size) amd64_mov_reg_mem_size(inst,reg,mem,size)
1746 //#define amd64_mov_reg_membase(inst,reg,basereg,disp,size) amd64_mov_reg_membase_size(inst,reg,basereg,disp,size)
1747 #define amd64_mov_reg_memindex(inst,reg,basereg,disp,indexreg,shift,size) amd64_mov_reg_memindex_size(inst,reg,basereg,disp,indexreg,shift,size)
1748 #define amd64_clear_reg(inst,reg) amd64_clear_reg_size(inst,reg,8)
1749 //#define amd64_mov_reg_imm(inst,reg,imm) amd64_mov_reg_imm_size(inst,reg,imm,8)
1750 #define amd64_mov_mem_imm(inst,mem,imm,size) amd64_mov_mem_imm_size(inst,mem,imm,size)
1751 //#define amd64_mov_membase_imm(inst,basereg,disp,imm,size) amd64_mov_membase_imm_size(inst,basereg,disp,imm,size)
1752 #define amd64_mov_memindex_imm(inst,basereg,disp,indexreg,shift,imm,size) amd64_mov_memindex_imm_size(inst,basereg,disp,indexreg,shift,imm,size)
1753 #define amd64_lea_mem(inst,reg,mem) amd64_lea_mem_size(inst,reg,mem,8)
1754 //#define amd64_lea_membase(inst,reg,basereg,disp) amd64_lea_membase_size(inst,reg,basereg,disp,8)
1755 #define amd64_lea_memindex(inst,reg,basereg,disp,indexreg,shift) amd64_lea_memindex_size(inst,reg,basereg,disp,indexreg,shift,8)
1756 #define amd64_widen_reg(inst,dreg,reg,is_signed,is_half) amd64_widen_reg_size(inst,dreg,reg,is_signed,is_half,8)
1757 #define amd64_widen_mem(inst,dreg,mem,is_signed,is_half) amd64_widen_mem_size(inst,dreg,mem,is_signed,is_half,8)
1758 #define amd64_widen_membase(inst,dreg,basereg,disp,is_signed,is_half) amd64_widen_membase_size(inst,dreg,basereg,disp,is_signed,is_half,8)
1759 #define amd64_widen_memindex(inst,dreg,basereg,disp,indexreg,shift,is_signed,is_half) amd64_widen_memindex_size(inst,dreg,basereg,disp,indexreg,shift,is_signed,is_half,8)
1760 #define amd64_cdq(inst) amd64_cdq_size(inst,8)
1761 #define amd64_wait(inst) amd64_wait_size(inst,8)
1762 #define amd64_fp_op_mem(inst,opc,mem,is_double) amd64_fp_op_mem_size(inst,opc,mem,is_double,8)
1763 #define amd64_fp_op_membase(inst,opc,basereg,disp,is_double) amd64_fp_op_membase_size(inst,opc,basereg,disp,is_double,8)
1764 #define amd64_fp_op(inst,opc,index) amd64_fp_op_size(inst,opc,index,8)
1765 #define amd64_fp_op_reg(inst,opc,index,pop_stack) amd64_fp_op_reg_size(inst,opc,index,pop_stack,8)
1766 #define amd64_fp_int_op_membase(inst,opc,basereg,disp,is_int) amd64_fp_int_op_membase_size(inst,opc,basereg,disp,is_int,8)
1767 #define amd64_fstp(inst,index) amd64_fstp_size(inst,index,8)
1768 #define amd64_fcompp(inst) amd64_fcompp_size(inst,8)
1769 #define amd64_fucompp(inst) amd64_fucompp_size(inst,8)
1770 #define amd64_fnstsw(inst) amd64_fnstsw_size(inst,8)
1771 #define amd64_fnstcw(inst,mem) amd64_fnstcw_size(inst,mem,8)
1772 #define amd64_fnstcw_membase(inst,basereg,disp) amd64_fnstcw_membase_size(inst,basereg,disp,8)
1773 #define amd64_fldcw(inst,mem) amd64_fldcw_size(inst,mem,8)
1774 #define amd64_fldcw_membase(inst,basereg,disp) amd64_fldcw_membase_size(inst,basereg,disp,8)
1775 #define amd64_fchs(inst) amd64_fchs_size(inst,8)
1776 #define amd64_frem(inst) amd64_frem_size(inst,8)
1777 #define amd64_fxch(inst,index) amd64_fxch_size(inst,index,8)
1778 #define amd64_fcomi(inst,index) amd64_fcomi_size(inst,index,8)
1779 #define amd64_fcomip(inst,index) amd64_fcomip_size(inst,index,8)
1780 #define amd64_fucomi(inst,index) amd64_fucomi_size(inst,index,8)
1781 #define amd64_fucomip(inst,index) amd64_fucomip_size(inst,index,8)
1782 #define amd64_fld(inst,mem,is_double) amd64_fld_size(inst,mem,is_double,8)
1783 #define amd64_fld_membase(inst,basereg,disp,is_double) amd64_fld_membase_size(inst,basereg,disp,is_double,8)
1784 #define amd64_fld80_mem(inst,mem) amd64_fld80_mem_size(inst,mem,8)
1785 #define amd64_fld80_membase(inst,basereg,disp) amd64_fld80_membase_size(inst,basereg,disp,8)
1786 #define amd64_fild(inst,mem,is_long) amd64_fild_size(inst,mem,is_long,8)
1787 #define amd64_fild_membase(inst,basereg,disp,is_long) amd64_fild_membase_size(inst,basereg,disp,is_long,8)
1788 #define amd64_fld_reg(inst,index) amd64_fld_reg_size(inst,index,8)
1789 #define amd64_fldz(inst) amd64_fldz_size(inst,8)
1790 #define amd64_fld1(inst) amd64_fld1_size(inst,8)
1791 #define amd64_fldpi(inst) amd64_fldpi_size(inst,8)
1792 #define amd64_fst(inst,mem,is_double,pop_stack) amd64_fst_size(inst,mem,is_double,pop_stack,8)
1793 #define amd64_fst_membase(inst,basereg,disp,is_double,pop_stack) amd64_fst_membase_size(inst,basereg,disp,is_double,pop_stack,8)
1794 #define amd64_fst80_mem(inst,mem) amd64_fst80_mem_size(inst,mem,8)
1795 #define amd64_fst80_membase(inst,basereg,disp) amd64_fst80_membase_size(inst,basereg,disp,8)
1796 #define amd64_fist_pop(inst,mem,is_long) amd64_fist_pop_size(inst,mem,is_long,8)
1797 #define amd64_fist_pop_membase(inst,basereg,disp,is_long) amd64_fist_pop_membase_size(inst,basereg,disp,is_long,8)
1798 #define amd64_fstsw(inst) amd64_fstsw_size(inst,8)
1799 #define amd64_fist_membase(inst,basereg,disp,is_int) amd64_fist_membase_size(inst,basereg,disp,is_int,8)
1800 //#define amd64_push_reg(inst,reg) amd64_push_reg_size(inst,reg,8)
1801 #define amd64_push_regp(inst,reg) amd64_push_regp_size(inst,reg,8)
1802 #define amd64_push_mem(inst,mem) amd64_push_mem_size(inst,mem,8)
1803 //#define amd64_push_membase(inst,basereg,disp) amd64_push_membase_size(inst,basereg,disp,8)
1804 #define amd64_push_memindex(inst,basereg,disp,indexreg,shift) amd64_push_memindex_size(inst,basereg,disp,indexreg,shift,8)
1805 #define amd64_push_imm(inst,imm) amd64_push_imm_size(inst,imm,8)
1806 //#define amd64_pop_reg(inst,reg) amd64_pop_reg_size(inst,reg,8)
1807 #define amd64_pop_mem(inst,mem) amd64_pop_mem_size(inst,mem,8)
1808 #define amd64_pop_membase(inst,basereg,disp) amd64_pop_membase_size(inst,basereg,disp,8)
1809 #define amd64_pushad(inst) amd64_pushad_size(inst,8)
1810 #define amd64_pushfd(inst) amd64_pushfd_size(inst,8)
1811 #define amd64_popad(inst) amd64_popad_size(inst,8)
1812 #define amd64_popfd(inst) amd64_popfd_size(inst,8)
1813 #define amd64_loop(inst,imm) amd64_loop_size(inst,imm,8)
1814 #define amd64_loope(inst,imm) amd64_loope_size(inst,imm,8)
1815 #define amd64_loopne(inst,imm) amd64_loopne_size(inst,imm,8)
1816 #define amd64_jump32(inst,imm) amd64_jump32_size(inst,imm,8)
1817 #define amd64_jump8(inst,imm) amd64_jump8_size(inst,imm,8)
1818 #define amd64_jump_reg(inst,reg) amd64_jump_reg_size(inst,reg,8)
1819 #define amd64_jump_mem(inst,mem) amd64_jump_mem_size(inst,mem,8)
1820 #define amd64_jump_membase(inst,basereg,disp) amd64_jump_membase_size(inst,basereg,disp,8)
1821 #define amd64_jump_code(inst,target) amd64_jump_code_size(inst,target,8)
1822 #define amd64_jump_disp(inst,disp) amd64_jump_disp_size(inst,disp,8)
1823 #define amd64_branch8(inst,cond,imm,is_signed) amd64_branch8_size(inst,cond,imm,is_signed,8)
1824 #define amd64_branch32(inst,cond,imm,is_signed) amd64_branch32_size(inst,cond,imm,is_signed,8)
1825 #define amd64_branch(inst,cond,target,is_signed) amd64_branch_size(inst,cond,target,is_signed,8)
1826 #define amd64_branch_disp(inst,cond,disp,is_signed) amd64_branch_disp_size(inst,cond,disp,is_signed,8)
1827 #define amd64_set_reg(inst,cond,reg,is_signed) amd64_set_reg_size(inst,cond,reg,is_signed,8)
1828 #define amd64_set_mem(inst,cond,mem,is_signed) amd64_set_mem_size(inst,cond,mem,is_signed,8)
1829 #define amd64_set_membase(inst,cond,basereg,disp,is_signed) amd64_set_membase_size(inst,cond,basereg,disp,is_signed,8)
1830 #define amd64_call_imm(inst,disp) amd64_call_imm_size(inst,disp,8)
1831 //#define amd64_call_reg(inst,reg) amd64_call_reg_size(inst,reg,8)
1832 #define amd64_call_mem(inst,mem) amd64_call_mem_size(inst,mem,8)
1833 #define amd64_call_membase(inst,basereg,disp) amd64_call_membase_size(inst,basereg,disp,8)
1834 #define amd64_call_code(inst,target) amd64_call_code_size(inst,target,8)
1835 //#define amd64_ret(inst) amd64_ret_size(inst,8)
1836 #define amd64_ret_imm(inst,imm) amd64_ret_imm_size(inst,imm,8)
1837 #define amd64_cmov_reg(inst,cond,is_signed,dreg,reg) amd64_cmov_reg_size(inst,cond,is_signed,dreg,reg,8)
1838 #define amd64_cmov_mem(inst,cond,is_signed,reg,mem) amd64_cmov_mem_size(inst,cond,is_signed,reg,mem,8)
1839 #define amd64_cmov_membase(inst,cond,is_signed,reg,basereg,disp) amd64_cmov_membase_size(inst,cond,is_signed,reg,basereg,disp,8)
1840 #define amd64_enter(inst,framesize) amd64_enter_size(inst,framesize)
1841 //#define amd64_leave(inst) amd64_leave_size(inst,8)
1842 #define amd64_sahf(inst) amd64_sahf_size(inst,8)
1843 #define amd64_fsin(inst) amd64_fsin_size(inst,8)
1844 #define amd64_fcos(inst) amd64_fcos_size(inst,8)
1845 #define amd64_fabs(inst) amd64_fabs_size(inst,8)
1846 #define amd64_ftst(inst) amd64_ftst_size(inst,8)
1847 #define amd64_fxam(inst) amd64_fxam_size(inst,8)
1848 #define amd64_fpatan(inst) amd64_fpatan_size(inst,8)
1849 #define amd64_fprem(inst) amd64_fprem_size(inst,8)
1850 #define amd64_fprem1(inst) amd64_fprem1_size(inst,8)
1851 #define amd64_frndint(inst) amd64_frndint_size(inst,8)
1852 #define amd64_fsqrt(inst) amd64_fsqrt_size(inst,8)
1853 #define amd64_fptan(inst) amd64_fptan_size(inst,8)
1854 #define amd64_padding(inst,size) amd64_padding_size(inst,size)
1855 #define amd64_prolog(inst,frame,reg_mask) amd64_prolog_size(inst,frame,reg_mask,8)
1856 #define amd64_epilog(inst,reg_mask) amd64_epilog_size(inst,reg_mask,8)