1 /* jit/x86_64/emitfuncs.c - x86_64 code emitter functions
3 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 Institut f. Computersprachen, TU Wien
5 R. Grafl, A. Krall, C. Kruegel, C. Oates, R. Obermaisser, M. Probst,
6 S. Ring, E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich,
9 This file is part of CACAO.
11 This program is free software; you can redistribute it and/or
12 modify it under the terms of the GNU General Public License as
13 published by the Free Software Foundation; either version 2, or (at
14 your option) any later version.
16 This program is distributed in the hope that it will be useful, but
17 WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 General Public License for more details.
21 You should have received a copy of the GNU General Public License
22 along with this program; if not, write to the Free Software
23 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
26 Contact: cacao@complang.tuwien.ac.at
28 Authors: Christian Thalinger
30 $Id: emitfuncs.c 1266 2004-07-01 20:38:16Z twisti $
36 #include "jit/x86_64/emitfuncs.h"
37 #include "jit/x86_64/codegen.h"
38 #include "jit/x86_64/types.h"
41 /* code generation functions */
43 void x86_64_emit_ialu(s4 alu_op, stackptr src, instruction *iptr)
45 s4 s1 = src->prev->regoff;
47 s4 d = iptr->dst->regoff;
49 if (iptr->dst->flags & INMEMORY) {
50 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
52 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
53 x86_64_alul_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
56 x86_64_movl_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
57 x86_64_alul_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
60 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
61 x86_64_alul_membase_reg(alu_op, REG_SP, s2 * 8, REG_ITMP1);
62 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
65 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
67 x86_64_alul_reg_membase(alu_op, s1, REG_SP, d * 8);
70 x86_64_movl_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
71 x86_64_alul_reg_reg(alu_op, s1, REG_ITMP1);
72 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
75 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
77 x86_64_alul_reg_membase(alu_op, s2, REG_SP, d * 8);
80 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
81 x86_64_alul_reg_reg(alu_op, s2, REG_ITMP1);
82 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
86 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
87 x86_64_alul_reg_membase(alu_op, s2, REG_SP, d * 8);
91 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
92 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
93 x86_64_alul_membase_reg(alu_op, REG_SP, s2 * 8, d);
95 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
97 x86_64_alul_membase_reg(alu_op, REG_SP, s2 * 8, d);
99 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
101 x86_64_alul_membase_reg(alu_op, REG_SP, s1 * 8, d);
105 x86_64_alul_reg_reg(alu_op, s1, d);
109 x86_64_alul_reg_reg(alu_op, s2, d);
117 void x86_64_emit_lalu(s4 alu_op, stackptr src, instruction *iptr)
119 s4 s1 = src->prev->regoff;
121 s4 d = iptr->dst->regoff;
123 if (iptr->dst->flags & INMEMORY) {
124 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
126 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
127 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
129 } else if (s1 == d) {
130 x86_64_mov_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
131 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
134 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
135 x86_64_alu_membase_reg(alu_op, REG_SP, s2 * 8, REG_ITMP1);
136 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
139 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
141 x86_64_alu_reg_membase(alu_op, s1, REG_SP, d * 8);
144 x86_64_mov_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
145 x86_64_alu_reg_reg(alu_op, s1, REG_ITMP1);
146 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
149 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
151 x86_64_alu_reg_membase(alu_op, s2, REG_SP, d * 8);
154 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
155 x86_64_alu_reg_reg(alu_op, s2, REG_ITMP1);
156 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
160 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
161 x86_64_alu_reg_membase(alu_op, s2, REG_SP, d * 8);
165 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
166 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
167 x86_64_alu_membase_reg(alu_op, REG_SP, s2 * 8, d);
169 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
171 x86_64_alu_membase_reg(alu_op, REG_SP, s2 * 8, d);
173 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
175 x86_64_alu_membase_reg(alu_op, REG_SP, s1 * 8, d);
179 x86_64_alu_reg_reg(alu_op, s1, d);
183 x86_64_alu_reg_reg(alu_op, s2, d);
191 void x86_64_emit_ialuconst(s4 alu_op, stackptr src, instruction *iptr)
194 s4 d = iptr->dst->regoff;
196 if (iptr->dst->flags & INMEMORY) {
197 if (src->flags & INMEMORY) {
199 x86_64_alul_imm_membase(alu_op, iptr->val.i, REG_SP, d * 8);
202 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
203 x86_64_alul_imm_reg(alu_op, iptr->val.i, REG_ITMP1);
204 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
208 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
209 x86_64_alul_imm_membase(alu_op, iptr->val.i, REG_SP, d * 8);
213 if (src->flags & INMEMORY) {
214 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
215 x86_64_alul_imm_reg(alu_op, iptr->val.i, d);
219 x86_64_alul_imm_reg(alu_op, iptr->val.i, d);
226 void x86_64_emit_laluconst(s4 alu_op, stackptr src, instruction *iptr)
229 s4 d = iptr->dst->regoff;
231 if (iptr->dst->flags & INMEMORY) {
232 if (src->flags & INMEMORY) {
234 if (x86_64_is_imm32(iptr->val.l)) {
235 x86_64_alu_imm_membase(alu_op, iptr->val.l, REG_SP, d * 8);
238 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
239 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
243 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
245 if (x86_64_is_imm32(iptr->val.l)) {
246 x86_64_alu_imm_reg(alu_op, iptr->val.l, REG_ITMP1);
249 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP2);
250 x86_64_alu_reg_reg(alu_op, REG_ITMP2, REG_ITMP1);
252 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
256 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
258 if (x86_64_is_imm32(iptr->val.l)) {
259 x86_64_alu_imm_membase(alu_op, iptr->val.l, REG_SP, d * 8);
262 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
263 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
268 if (src->flags & INMEMORY) {
269 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
275 if (x86_64_is_imm32(iptr->val.l)) {
276 x86_64_alu_imm_reg(alu_op, iptr->val.l, d);
279 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
280 x86_64_alu_reg_reg(alu_op, REG_ITMP1, d);
287 void x86_64_emit_ishift(s4 shift_op, stackptr src, instruction *iptr)
289 s4 s1 = src->prev->regoff;
291 s4 d = iptr->dst->regoff;
293 M_INTMOVE(RCX, REG_ITMP1); /* save RCX */
294 if (iptr->dst->flags & INMEMORY) {
295 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
297 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
298 x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
301 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
302 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
303 x86_64_shiftl_reg(shift_op, REG_ITMP2);
304 x86_64_movl_reg_membase(REG_ITMP2, REG_SP, d * 8);
307 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
308 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
309 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
310 x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
312 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
315 x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
319 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
320 x86_64_shiftl_reg(shift_op, REG_ITMP2);
321 x86_64_movl_reg_membase(REG_ITMP2, REG_SP, d * 8);
326 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
327 x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
329 M_INTMOVE(REG_ITMP1, RCX); /* restore RCX */
336 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
337 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
338 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
339 x86_64_shiftl_reg(shift_op, d);
341 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
342 M_INTMOVE(s1, d); /* maybe src is RCX */
343 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
344 x86_64_shiftl_reg(shift_op, d);
346 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
348 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
349 x86_64_shiftl_reg(shift_op, d);
360 x86_64_shiftl_reg(shift_op, d);
364 M_INTMOVE(REG_ITMP3, RCX);
367 M_INTMOVE(REG_ITMP1, RCX); /* restore RCX */
374 void x86_64_emit_lshift(s4 shift_op, stackptr src, instruction *iptr)
376 s4 s1 = src->prev->regoff;
378 s4 d = iptr->dst->regoff;
380 M_INTMOVE(RCX, REG_ITMP1); /* save RCX */
381 if (iptr->dst->flags & INMEMORY) {
382 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
384 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
385 x86_64_shift_membase(shift_op, REG_SP, d * 8);
388 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
389 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
390 x86_64_shift_reg(shift_op, REG_ITMP2);
391 x86_64_mov_reg_membase(REG_ITMP2, REG_SP, d * 8);
394 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
395 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
396 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
397 x86_64_shift_membase(shift_op, REG_SP, d * 8);
399 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
402 x86_64_shift_membase(shift_op, REG_SP, d * 8);
406 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
407 x86_64_shift_reg(shift_op, REG_ITMP2);
408 x86_64_mov_reg_membase(REG_ITMP2, REG_SP, d * 8);
413 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
414 x86_64_shift_membase(shift_op, REG_SP, d * 8);
416 M_INTMOVE(REG_ITMP1, RCX); /* restore RCX */
423 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
424 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
425 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
426 x86_64_shift_reg(shift_op, d);
428 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
429 M_INTMOVE(s1, d); /* maybe src is RCX */
430 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
431 x86_64_shift_reg(shift_op, d);
433 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
435 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
436 x86_64_shift_reg(shift_op, d);
446 x86_64_shift_reg(shift_op, d);
450 M_INTMOVE(REG_ITMP3, RCX);
453 M_INTMOVE(REG_ITMP1, RCX); /* restore RCX */
460 void x86_64_emit_ishiftconst(s4 shift_op, stackptr src, instruction *iptr)
463 s4 d = iptr->dst->regoff;
465 if ((src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
467 x86_64_shiftl_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
470 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
471 x86_64_shiftl_imm_reg(shift_op, iptr->val.i, REG_ITMP1);
472 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
475 } else if ((src->flags & INMEMORY) && !(iptr->dst->flags & INMEMORY)) {
476 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
477 x86_64_shiftl_imm_reg(shift_op, iptr->val.i, d);
479 } else if (!(src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
480 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
481 x86_64_shiftl_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
485 x86_64_shiftl_imm_reg(shift_op, iptr->val.i, d);
491 void x86_64_emit_lshiftconst(s4 shift_op, stackptr src, instruction *iptr)
494 s4 d = iptr->dst->regoff;
496 if ((src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
498 x86_64_shift_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
501 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
502 x86_64_shift_imm_reg(shift_op, iptr->val.i, REG_ITMP1);
503 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
506 } else if ((src->flags & INMEMORY) && !(iptr->dst->flags & INMEMORY)) {
507 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
508 x86_64_shift_imm_reg(shift_op, iptr->val.i, d);
510 } else if (!(src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
511 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
512 x86_64_shift_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
516 x86_64_shift_imm_reg(shift_op, iptr->val.i, d);
522 void x86_64_emit_ifcc(s4 if_op, stackptr src, instruction *iptr)
524 if (src->flags & INMEMORY) {
525 x86_64_alul_imm_membase(X86_64_CMP, iptr->val.i, REG_SP, src->regoff * 8);
528 x86_64_alul_imm_reg(X86_64_CMP, iptr->val.i, src->regoff);
530 x86_64_jcc(if_op, 0);
531 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
536 void x86_64_emit_if_lcc(s4 if_op, stackptr src, instruction *iptr)
540 if (src->flags & INMEMORY) {
541 if (x86_64_is_imm32(iptr->val.l)) {
542 x86_64_alu_imm_membase(X86_64_CMP, iptr->val.l, REG_SP, s1 * 8);
545 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
546 x86_64_alu_reg_membase(X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
550 if (x86_64_is_imm32(iptr->val.l)) {
551 x86_64_alu_imm_reg(X86_64_CMP, iptr->val.l, s1);
554 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
555 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP1, s1);
558 x86_64_jcc(if_op, 0);
559 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
564 void x86_64_emit_if_icmpcc(s4 if_op, stackptr src, instruction *iptr)
566 s4 s1 = src->prev->regoff;
569 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
570 x86_64_movl_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
571 x86_64_alul_reg_membase(X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
573 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
574 x86_64_alul_membase_reg(X86_64_CMP, REG_SP, s2 * 8, s1);
576 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
577 x86_64_alul_reg_membase(X86_64_CMP, s2, REG_SP, s1 * 8);
580 x86_64_alul_reg_reg(X86_64_CMP, s2, s1);
582 x86_64_jcc(if_op, 0);
583 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
588 void x86_64_emit_if_lcmpcc(s4 if_op, stackptr src, instruction *iptr)
590 s4 s1 = src->prev->regoff;
593 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
594 x86_64_mov_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
595 x86_64_alu_reg_membase(X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
597 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
598 x86_64_alu_membase_reg(X86_64_CMP, REG_SP, s2 * 8, s1);
600 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
601 x86_64_alu_reg_membase(X86_64_CMP, s2, REG_SP, s1 * 8);
604 x86_64_alu_reg_reg(X86_64_CMP, s2, s1);
606 x86_64_jcc(if_op, 0);
607 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
614 void x86_64_mov_reg_reg(s8 reg, s8 dreg) {
615 x86_64_emit_rex(1,(reg),0,(dreg));
616 *(mcodeptr++) = 0x89;
617 x86_64_emit_reg((reg),(dreg));
621 void x86_64_mov_imm_reg(s8 imm, s8 reg) {
622 x86_64_emit_rex(1,0,0,(reg));
623 *(mcodeptr++) = 0xb8 + ((reg) & 0x07);
624 x86_64_emit_imm64((imm));
628 void x86_64_movl_imm_reg(s8 imm, s8 reg) {
629 x86_64_emit_rex(0,0,0,(reg));
630 *(mcodeptr++) = 0xb8 + ((reg) & 0x07);
631 x86_64_emit_imm32((imm));
635 void x86_64_mov_membase_reg(s8 basereg, s8 disp, s8 reg) {
636 x86_64_emit_rex(1,(reg),0,(basereg));
637 *(mcodeptr++) = 0x8b;
638 x86_64_emit_membase((basereg),(disp),(reg));
642 void x86_64_movl_membase_reg(s8 basereg, s8 disp, s8 reg) {
643 x86_64_emit_rex(0,(reg),0,(basereg));
644 *(mcodeptr++) = 0x8b;
645 x86_64_emit_membase((basereg),(disp),(reg));
650 * this one is for INVOKEVIRTUAL/INVOKEINTERFACE to have a
651 * constant membase immediate length of 32bit
653 void x86_64_mov_membase32_reg(s8 basereg, s8 disp, s8 reg) {
654 x86_64_emit_rex(1,(reg),0,(basereg));
655 *(mcodeptr++) = 0x8b;
656 x86_64_address_byte(2, (reg), (basereg));
657 x86_64_emit_imm32((disp));
661 void x86_64_mov_reg_membase(s8 reg, s8 basereg, s8 disp) {
662 x86_64_emit_rex(1,(reg),0,(basereg));
663 *(mcodeptr++) = 0x89;
664 x86_64_emit_membase((basereg),(disp),(reg));
668 void x86_64_movl_reg_membase(s8 reg, s8 basereg, s8 disp) {
669 x86_64_emit_rex(0,(reg),0,(basereg));
670 *(mcodeptr++) = 0x89;
671 x86_64_emit_membase((basereg),(disp),(reg));
675 void x86_64_mov_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
676 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
677 *(mcodeptr++) = 0x8b;
678 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
682 void x86_64_movl_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
683 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
684 *(mcodeptr++) = 0x8b;
685 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
689 void x86_64_mov_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
690 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
691 *(mcodeptr++) = 0x89;
692 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
696 void x86_64_movl_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
697 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
698 *(mcodeptr++) = 0x89;
699 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
703 void x86_64_movw_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
704 *(mcodeptr++) = 0x66;
705 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
706 *(mcodeptr++) = 0x89;
707 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
711 void x86_64_movb_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
712 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
713 *(mcodeptr++) = 0x88;
714 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
718 void x86_64_mov_imm_membase(s8 imm, s8 basereg, s8 disp) {
719 x86_64_emit_rex(1,0,0,(basereg));
720 *(mcodeptr++) = 0xc7;
721 x86_64_emit_membase((basereg),(disp),0);
722 x86_64_emit_imm32((imm));
726 void x86_64_movl_imm_membase(s8 imm, s8 basereg, s8 disp) {
727 x86_64_emit_rex(0,0,0,(basereg));
728 *(mcodeptr++) = 0xc7;
729 x86_64_emit_membase((basereg),(disp),0);
730 x86_64_emit_imm32((imm));
734 void x86_64_movsbq_reg_reg(s8 reg, s8 dreg) {
735 x86_64_emit_rex(1,(dreg),0,(reg));
736 *(mcodeptr++) = 0x0f;
737 *(mcodeptr++) = 0xbe;
738 /* XXX: why do reg and dreg have to be exchanged */
739 x86_64_emit_reg((dreg),(reg));
743 void x86_64_movsbq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
744 x86_64_emit_rex(1,(dreg),0,(basereg));
745 *(mcodeptr++) = 0x0f;
746 *(mcodeptr++) = 0xbe;
747 x86_64_emit_membase((basereg),(disp),(dreg));
751 void x86_64_movswq_reg_reg(s8 reg, s8 dreg) {
752 x86_64_emit_rex(1,(dreg),0,(reg));
753 *(mcodeptr++) = 0x0f;
754 *(mcodeptr++) = 0xbf;
755 /* XXX: why do reg and dreg have to be exchanged */
756 x86_64_emit_reg((dreg),(reg));
760 void x86_64_movswq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
761 x86_64_emit_rex(1,(dreg),0,(basereg));
762 *(mcodeptr++) = 0x0f;
763 *(mcodeptr++) = 0xbf;
764 x86_64_emit_membase((basereg),(disp),(dreg));
768 void x86_64_movslq_reg_reg(s8 reg, s8 dreg) {
769 x86_64_emit_rex(1,(dreg),0,(reg));
770 *(mcodeptr++) = 0x63;
771 /* XXX: why do reg and dreg have to be exchanged */
772 x86_64_emit_reg((dreg),(reg));
776 void x86_64_movslq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
777 x86_64_emit_rex(1,(dreg),0,(basereg));
778 *(mcodeptr++) = 0x63;
779 x86_64_emit_membase((basereg),(disp),(dreg));
783 void x86_64_movzwq_reg_reg(s8 reg, s8 dreg) {
784 x86_64_emit_rex(1,(dreg),0,(reg));
785 *(mcodeptr++) = 0x0f;
786 *(mcodeptr++) = 0xb7;
787 /* XXX: why do reg and dreg have to be exchanged */
788 x86_64_emit_reg((dreg),(reg));
792 void x86_64_movzwq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
793 x86_64_emit_rex(1,(dreg),0,(basereg));
794 *(mcodeptr++) = 0x0f;
795 *(mcodeptr++) = 0xb7;
796 x86_64_emit_membase((basereg),(disp),(dreg));
800 void x86_64_movswq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
801 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
802 *(mcodeptr++) = 0x0f;
803 *(mcodeptr++) = 0xbf;
804 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
808 void x86_64_movsbq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
809 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
810 *(mcodeptr++) = 0x0f;
811 *(mcodeptr++) = 0xbe;
812 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
816 void x86_64_movzwq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
817 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
818 *(mcodeptr++) = 0x0f;
819 *(mcodeptr++) = 0xb7;
820 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
828 void x86_64_alu_reg_reg(s8 opc, s8 reg, s8 dreg) {
829 x86_64_emit_rex(1,(reg),0,(dreg));
830 *(mcodeptr++) = (((opc)) << 3) + 1;
831 x86_64_emit_reg((reg),(dreg));
835 void x86_64_alul_reg_reg(s8 opc, s8 reg, s8 dreg) {
836 x86_64_emit_rex(0,(reg),0,(dreg));
837 *(mcodeptr++) = (((opc)) << 3) + 1;
838 x86_64_emit_reg((reg),(dreg));
842 void x86_64_alu_reg_membase(s8 opc, s8 reg, s8 basereg, s8 disp) {
843 x86_64_emit_rex(1,(reg),0,(basereg));
844 *(mcodeptr++) = (((opc)) << 3) + 1;
845 x86_64_emit_membase((basereg),(disp),(reg));
849 void x86_64_alul_reg_membase(s8 opc, s8 reg, s8 basereg, s8 disp) {
850 x86_64_emit_rex(0,(reg),0,(basereg));
851 *(mcodeptr++) = (((opc)) << 3) + 1;
852 x86_64_emit_membase((basereg),(disp),(reg));
856 void x86_64_alu_membase_reg(s8 opc, s8 basereg, s8 disp, s8 reg) {
857 x86_64_emit_rex(1,(reg),0,(basereg));
858 *(mcodeptr++) = (((opc)) << 3) + 3;
859 x86_64_emit_membase((basereg),(disp),(reg));
863 void x86_64_alul_membase_reg(s8 opc, s8 basereg, s8 disp, s8 reg) {
864 x86_64_emit_rex(0,(reg),0,(basereg));
865 *(mcodeptr++) = (((opc)) << 3) + 3;
866 x86_64_emit_membase((basereg),(disp),(reg));
870 void x86_64_alu_imm_reg(s8 opc, s8 imm, s8 dreg) {
871 if (x86_64_is_imm8(imm)) {
872 x86_64_emit_rex(1,0,0,(dreg));
873 *(mcodeptr++) = 0x83;
874 x86_64_emit_reg((opc),(dreg));
875 x86_64_emit_imm8((imm));
877 x86_64_emit_rex(1,0,0,(dreg));
878 *(mcodeptr++) = 0x81;
879 x86_64_emit_reg((opc),(dreg));
880 x86_64_emit_imm32((imm));
885 void x86_64_alul_imm_reg(s8 opc, s8 imm, s8 dreg) {
886 if (x86_64_is_imm8(imm)) {
887 x86_64_emit_rex(0,0,0,(dreg));
888 *(mcodeptr++) = 0x83;
889 x86_64_emit_reg((opc),(dreg));
890 x86_64_emit_imm8((imm));
892 x86_64_emit_rex(0,0,0,(dreg));
893 *(mcodeptr++) = 0x81;
894 x86_64_emit_reg((opc),(dreg));
895 x86_64_emit_imm32((imm));
900 void x86_64_alu_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
901 if (x86_64_is_imm8(imm)) {
902 x86_64_emit_rex(1,(basereg),0,0);
903 *(mcodeptr++) = 0x83;
904 x86_64_emit_membase((basereg),(disp),(opc));
905 x86_64_emit_imm8((imm));
907 x86_64_emit_rex(1,(basereg),0,0);
908 *(mcodeptr++) = 0x81;
909 x86_64_emit_membase((basereg),(disp),(opc));
910 x86_64_emit_imm32((imm));
915 void x86_64_alul_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
916 if (x86_64_is_imm8(imm)) {
917 x86_64_emit_rex(0,(basereg),0,0);
918 *(mcodeptr++) = 0x83;
919 x86_64_emit_membase((basereg),(disp),(opc));
920 x86_64_emit_imm8((imm));
922 x86_64_emit_rex(0,(basereg),0,0);
923 *(mcodeptr++) = 0x81;
924 x86_64_emit_membase((basereg),(disp),(opc));
925 x86_64_emit_imm32((imm));
930 void x86_64_test_reg_reg(s8 reg, s8 dreg) {
931 x86_64_emit_rex(1,(reg),0,(dreg));
932 *(mcodeptr++) = 0x85;
933 x86_64_emit_reg((reg),(dreg));
937 void x86_64_testl_reg_reg(s8 reg, s8 dreg) {
938 x86_64_emit_rex(0,(reg),0,(dreg));
939 *(mcodeptr++) = 0x85;
940 x86_64_emit_reg((reg),(dreg));
944 void x86_64_test_imm_reg(s8 imm, s8 reg) {
945 *(mcodeptr++) = 0xf7;
946 x86_64_emit_reg(0,(reg));
947 x86_64_emit_imm32((imm));
951 void x86_64_testw_imm_reg(s8 imm, s8 reg) {
952 *(mcodeptr++) = 0x66;
953 *(mcodeptr++) = 0xf7;
954 x86_64_emit_reg(0,(reg));
955 x86_64_emit_imm16((imm));
959 void x86_64_testb_imm_reg(s8 imm, s8 reg) {
960 *(mcodeptr++) = 0xf6;
961 x86_64_emit_reg(0,(reg));
962 x86_64_emit_imm8((imm));
966 void x86_64_lea_membase_reg(s8 basereg, s8 disp, s8 reg) {
967 x86_64_emit_rex(1,(reg),0,(basereg));
968 *(mcodeptr++) = 0x8d;
969 x86_64_emit_membase((basereg),(disp),(reg));
973 void x86_64_leal_membase_reg(s8 basereg, s8 disp, s8 reg) {
974 x86_64_emit_rex(0,(reg),0,(basereg));
975 *(mcodeptr++) = 0x8d;
976 x86_64_emit_membase((basereg),(disp),(reg));
982 * inc, dec operations
984 void x86_64_inc_reg(s8 reg) {
985 x86_64_emit_rex(1,0,0,(reg));
986 *(mcodeptr++) = 0xff;
987 x86_64_emit_reg(0,(reg));
991 void x86_64_incl_reg(s8 reg) {
992 x86_64_emit_rex(0,0,0,(reg));
993 *(mcodeptr++) = 0xff;
994 x86_64_emit_reg(0,(reg));
998 void x86_64_inc_membase(s8 basereg, s8 disp) {
999 x86_64_emit_rex(1,(basereg),0,0);
1000 *(mcodeptr++) = 0xff;
1001 x86_64_emit_membase((basereg),(disp),0);
1005 void x86_64_incl_membase(s8 basereg, s8 disp) {
1006 x86_64_emit_rex(0,(basereg),0,0);
1007 *(mcodeptr++) = 0xff;
1008 x86_64_emit_membase((basereg),(disp),0);
1012 void x86_64_dec_reg(s8 reg) {
1013 x86_64_emit_rex(1,0,0,(reg));
1014 *(mcodeptr++) = 0xff;
1015 x86_64_emit_reg(1,(reg));
1019 void x86_64_decl_reg(s8 reg) {
1020 x86_64_emit_rex(0,0,0,(reg));
1021 *(mcodeptr++) = 0xff;
1022 x86_64_emit_reg(1,(reg));
1026 void x86_64_dec_membase(s8 basereg, s8 disp) {
1027 x86_64_emit_rex(1,(basereg),0,0);
1028 *(mcodeptr++) = 0xff;
1029 x86_64_emit_membase((basereg),(disp),1);
1033 void x86_64_decl_membase(s8 basereg, s8 disp) {
1034 x86_64_emit_rex(0,(basereg),0,0);
1035 *(mcodeptr++) = 0xff;
1036 x86_64_emit_membase((basereg),(disp),1);
1042 void x86_64_cltd() {
1043 *(mcodeptr++) = 0x99;
1047 void x86_64_cqto() {
1048 x86_64_emit_rex(1,0,0,0);
1049 *(mcodeptr++) = 0x99;
1054 void x86_64_imul_reg_reg(s8 reg, s8 dreg) {
1055 x86_64_emit_rex(1,(dreg),0,(reg));
1056 *(mcodeptr++) = 0x0f;
1057 *(mcodeptr++) = 0xaf;
1058 x86_64_emit_reg((dreg),(reg));
1062 void x86_64_imull_reg_reg(s8 reg, s8 dreg) {
1063 x86_64_emit_rex(0,(dreg),0,(reg));
1064 *(mcodeptr++) = 0x0f;
1065 *(mcodeptr++) = 0xaf;
1066 x86_64_emit_reg((dreg),(reg));
1070 void x86_64_imul_membase_reg(s8 basereg, s8 disp, s8 dreg) {
1071 x86_64_emit_rex(1,(dreg),0,(basereg));
1072 *(mcodeptr++) = 0x0f;
1073 *(mcodeptr++) = 0xaf;
1074 x86_64_emit_membase((basereg),(disp),(dreg));
1078 void x86_64_imull_membase_reg(s8 basereg, s8 disp, s8 dreg) {
1079 x86_64_emit_rex(0,(dreg),0,(basereg));
1080 *(mcodeptr++) = 0x0f;
1081 *(mcodeptr++) = 0xaf;
1082 x86_64_emit_membase((basereg),(disp),(dreg));
1086 void x86_64_imul_imm_reg(s8 imm, s8 dreg) {
1087 if (x86_64_is_imm8((imm))) {
1088 x86_64_emit_rex(1,0,0,(dreg));
1089 *(mcodeptr++) = 0x6b;
1090 x86_64_emit_reg(0,(dreg));
1091 x86_64_emit_imm8((imm));
1093 x86_64_emit_rex(1,0,0,(dreg));
1094 *(mcodeptr++) = 0x69;
1095 x86_64_emit_reg(0,(dreg));
1096 x86_64_emit_imm32((imm));
1101 void x86_64_imul_imm_reg_reg(s8 imm, s8 reg, s8 dreg) {
1102 if (x86_64_is_imm8((imm))) {
1103 x86_64_emit_rex(1,(dreg),0,(reg));
1104 *(mcodeptr++) = 0x6b;
1105 x86_64_emit_reg((dreg),(reg));
1106 x86_64_emit_imm8((imm));
1108 x86_64_emit_rex(1,(dreg),0,(reg));
1109 *(mcodeptr++) = 0x69;
1110 x86_64_emit_reg((dreg),(reg));
1111 x86_64_emit_imm32((imm));
1116 void x86_64_imull_imm_reg_reg(s8 imm, s8 reg, s8 dreg) {
1117 if (x86_64_is_imm8((imm))) {
1118 x86_64_emit_rex(0,(dreg),0,(reg));
1119 *(mcodeptr++) = 0x6b;
1120 x86_64_emit_reg((dreg),(reg));
1121 x86_64_emit_imm8((imm));
1123 x86_64_emit_rex(0,(dreg),0,(reg));
1124 *(mcodeptr++) = 0x69;
1125 x86_64_emit_reg((dreg),(reg));
1126 x86_64_emit_imm32((imm));
1131 void x86_64_imul_imm_membase_reg(s8 imm, s8 basereg, s8 disp, s8 dreg) {
1132 if (x86_64_is_imm8((imm))) {
1133 x86_64_emit_rex(1,(dreg),0,(basereg));
1134 *(mcodeptr++) = 0x6b;
1135 x86_64_emit_membase((basereg),(disp),(dreg));
1136 x86_64_emit_imm8((imm));
1138 x86_64_emit_rex(1,(dreg),0,(basereg));
1139 *(mcodeptr++) = 0x69;
1140 x86_64_emit_membase((basereg),(disp),(dreg));
1141 x86_64_emit_imm32((imm));
1146 void x86_64_imull_imm_membase_reg(s8 imm, s8 basereg, s8 disp, s8 dreg) {
1147 if (x86_64_is_imm8((imm))) {
1148 x86_64_emit_rex(0,(dreg),0,(basereg));
1149 *(mcodeptr++) = 0x6b;
1150 x86_64_emit_membase((basereg),(disp),(dreg));
1151 x86_64_emit_imm8((imm));
1153 x86_64_emit_rex(0,(dreg),0,(basereg));
1154 *(mcodeptr++) = 0x69;
1155 x86_64_emit_membase((basereg),(disp),(dreg));
1156 x86_64_emit_imm32((imm));
1161 void x86_64_idiv_reg(s8 reg) {
1162 x86_64_emit_rex(1,0,0,(reg));
1163 *(mcodeptr++) = 0xf7;
1164 x86_64_emit_reg(7,(reg));
1168 void x86_64_idivl_reg(s8 reg) {
1169 x86_64_emit_rex(0,0,0,(reg));
1170 *(mcodeptr++) = 0xf7;
1171 x86_64_emit_reg(7,(reg));
1177 *(mcodeptr++) = 0xc3;
1185 void x86_64_shift_reg(s8 opc, s8 reg) {
1186 x86_64_emit_rex(1,0,0,(reg));
1187 *(mcodeptr++) = 0xd3;
1188 x86_64_emit_reg((opc),(reg));
1192 void x86_64_shiftl_reg(s8 opc, s8 reg) {
1193 x86_64_emit_rex(0,0,0,(reg));
1194 *(mcodeptr++) = 0xd3;
1195 x86_64_emit_reg((opc),(reg));
1199 void x86_64_shift_membase(s8 opc, s8 basereg, s8 disp) {
1200 x86_64_emit_rex(1,0,0,(basereg));
1201 *(mcodeptr++) = 0xd3;
1202 x86_64_emit_membase((basereg),(disp),(opc));
1206 void x86_64_shiftl_membase(s8 opc, s8 basereg, s8 disp) {
1207 x86_64_emit_rex(0,0,0,(basereg));
1208 *(mcodeptr++) = 0xd3;
1209 x86_64_emit_membase((basereg),(disp),(opc));
1213 void x86_64_shift_imm_reg(s8 opc, s8 imm, s8 dreg) {
1215 x86_64_emit_rex(1,0,0,(dreg));
1216 *(mcodeptr++) = 0xd1;
1217 x86_64_emit_reg((opc),(dreg));
1219 x86_64_emit_rex(1,0,0,(dreg));
1220 *(mcodeptr++) = 0xc1;
1221 x86_64_emit_reg((opc),(dreg));
1222 x86_64_emit_imm8((imm));
1227 void x86_64_shiftl_imm_reg(s8 opc, s8 imm, s8 dreg) {
1229 x86_64_emit_rex(0,0,0,(dreg));
1230 *(mcodeptr++) = 0xd1;
1231 x86_64_emit_reg((opc),(dreg));
1233 x86_64_emit_rex(0,0,0,(dreg));
1234 *(mcodeptr++) = 0xc1;
1235 x86_64_emit_reg((opc),(dreg));
1236 x86_64_emit_imm8((imm));
1241 void x86_64_shift_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
1243 x86_64_emit_rex(1,0,0,(basereg));
1244 *(mcodeptr++) = 0xd1;
1245 x86_64_emit_membase((basereg),(disp),(opc));
1247 x86_64_emit_rex(1,0,0,(basereg));
1248 *(mcodeptr++) = 0xc1;
1249 x86_64_emit_membase((basereg),(disp),(opc));
1250 x86_64_emit_imm8((imm));
1255 void x86_64_shiftl_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
1257 x86_64_emit_rex(0,0,0,(basereg));
1258 *(mcodeptr++) = 0xd1;
1259 x86_64_emit_membase((basereg),(disp),(opc));
1261 x86_64_emit_rex(0,0,0,(basereg));
1262 *(mcodeptr++) = 0xc1;
1263 x86_64_emit_membase((basereg),(disp),(opc));
1264 x86_64_emit_imm8((imm));
1273 void x86_64_jmp_imm(s8 imm) {
1274 *(mcodeptr++) = 0xe9;
1275 x86_64_emit_imm32((imm));
1279 void x86_64_jmp_reg(s8 reg) {
1280 x86_64_emit_rex(0,0,0,(reg));
1281 *(mcodeptr++) = 0xff;
1282 x86_64_emit_reg(4,(reg));
1286 void x86_64_jcc(s8 opc, s8 imm) {
1287 *(mcodeptr++) = 0x0f;
1288 *(mcodeptr++) = (0x80 + (opc));
1289 x86_64_emit_imm32((imm));
1295 * conditional set and move operations
1298 /* we need the rex byte to get all low bytes */
1299 void x86_64_setcc_reg(s8 opc, s8 reg) {
1300 *(mcodeptr++) = (0x40 | (((reg) >> 3) & 0x01));
1301 *(mcodeptr++) = 0x0f;
1302 *(mcodeptr++) = (0x90 + (opc));
1303 x86_64_emit_reg(0,(reg));
1307 /* we need the rex byte to get all low bytes */
1308 void x86_64_setcc_membase(s8 opc, s8 basereg, s8 disp) {
1309 *(mcodeptr++) = (0x40 | (((basereg) >> 3) & 0x01));
1310 *(mcodeptr++) = 0x0f;
1311 *(mcodeptr++) = (0x90 + (opc));
1312 x86_64_emit_membase((basereg),(disp),0);
1316 void x86_64_cmovcc_reg_reg(s8 opc, s8 reg, s8 dreg) {
1317 x86_64_emit_rex(1,(dreg),0,(reg));
1318 *(mcodeptr++) = 0x0f;
1319 *(mcodeptr++) = (0x40 + (opc));
1320 x86_64_emit_reg((dreg),(reg));
1324 void x86_64_cmovccl_reg_reg(s8 opc, s8 reg, s8 dreg) {
1325 x86_64_emit_rex(0,(dreg),0,(reg));
1326 *(mcodeptr++) = 0x0f;
1327 *(mcodeptr++) = (0x40 + (opc));
1328 x86_64_emit_reg((dreg),(reg));
1333 void x86_64_neg_reg(s8 reg) {
1334 x86_64_emit_rex(1,0,0,(reg));
1335 *(mcodeptr++) = 0xf7;
1336 x86_64_emit_reg(3,(reg));
1340 void x86_64_negl_reg(s8 reg) {
1341 x86_64_emit_rex(0,0,0,(reg));
1342 *(mcodeptr++) = 0xf7;
1343 x86_64_emit_reg(3,(reg));
1347 void x86_64_neg_membase(s8 basereg, s8 disp) {
1348 x86_64_emit_rex(1,0,0,(basereg));
1349 *(mcodeptr++) = 0xf7;
1350 x86_64_emit_membase((basereg),(disp),3);
1354 void x86_64_negl_membase(s8 basereg, s8 disp) {
1355 x86_64_emit_rex(0,0,0,(basereg));
1356 *(mcodeptr++) = 0xf7;
1357 x86_64_emit_membase((basereg),(disp),3);
1362 void x86_64_push_imm(s8 imm) {
1363 *(mcodeptr++) = 0x68;
1364 x86_64_emit_imm32((imm));
1368 void x86_64_pop_reg(s8 reg) {
1369 x86_64_emit_rex(0,0,0,(reg));
1370 *(mcodeptr++) = 0x58 + (0x07 & (reg));
1374 void x86_64_xchg_reg_reg(s8 reg, s8 dreg) {
1375 x86_64_emit_rex(1,(reg),0,(dreg));
1376 *(mcodeptr++) = 0x87;
1377 x86_64_emit_reg((reg),(dreg));
1382 *(mcodeptr++) = 0x90;
1390 void x86_64_call_reg(s8 reg) {
1391 x86_64_emit_rex(1,0,0,(reg));
1392 *(mcodeptr++) = 0xff;
1393 x86_64_emit_reg(2,(reg));
1397 void x86_64_call_imm(s8 imm) {
1398 *(mcodeptr++) = 0xe8;
1399 x86_64_emit_imm32((imm));
1405 * floating point instructions (SSE2)
1407 void x86_64_addsd_reg_reg(s8 reg, s8 dreg) {
1408 *(mcodeptr++) = 0xf2;
1409 x86_64_emit_rex(0,(dreg),0,(reg));
1410 *(mcodeptr++) = 0x0f;
1411 *(mcodeptr++) = 0x58;
1412 x86_64_emit_reg((dreg),(reg));
1416 void x86_64_addss_reg_reg(s8 reg, s8 dreg) {
1417 *(mcodeptr++) = 0xf3;
1418 x86_64_emit_rex(0,(dreg),0,(reg));
1419 *(mcodeptr++) = 0x0f;
1420 *(mcodeptr++) = 0x58;
1421 x86_64_emit_reg((dreg),(reg));
1425 void x86_64_cvtsi2ssq_reg_reg(s8 reg, s8 dreg) {
1426 *(mcodeptr++) = 0xf3;
1427 x86_64_emit_rex(1,(dreg),0,(reg));
1428 *(mcodeptr++) = 0x0f;
1429 *(mcodeptr++) = 0x2a;
1430 x86_64_emit_reg((dreg),(reg));
1434 void x86_64_cvtsi2ss_reg_reg(s8 reg, s8 dreg) {
1435 *(mcodeptr++) = 0xf3;
1436 x86_64_emit_rex(0,(dreg),0,(reg));
1437 *(mcodeptr++) = 0x0f;
1438 *(mcodeptr++) = 0x2a;
1439 x86_64_emit_reg((dreg),(reg));
1443 void x86_64_cvtsi2sdq_reg_reg(s8 reg, s8 dreg) {
1444 *(mcodeptr++) = 0xf2;
1445 x86_64_emit_rex(1,(dreg),0,(reg));
1446 *(mcodeptr++) = 0x0f;
1447 *(mcodeptr++) = 0x2a;
1448 x86_64_emit_reg((dreg),(reg));
1452 void x86_64_cvtsi2sd_reg_reg(s8 reg, s8 dreg) {
1453 *(mcodeptr++) = 0xf2;
1454 x86_64_emit_rex(0,(dreg),0,(reg));
1455 *(mcodeptr++) = 0x0f;
1456 *(mcodeptr++) = 0x2a;
1457 x86_64_emit_reg((dreg),(reg));
1461 void x86_64_cvtss2sd_reg_reg(s8 reg, s8 dreg) {
1462 *(mcodeptr++) = 0xf3;
1463 x86_64_emit_rex(0,(dreg),0,(reg));
1464 *(mcodeptr++) = 0x0f;
1465 *(mcodeptr++) = 0x5a;
1466 x86_64_emit_reg((dreg),(reg));
1470 void x86_64_cvtsd2ss_reg_reg(s8 reg, s8 dreg) {
1471 *(mcodeptr++) = 0xf2;
1472 x86_64_emit_rex(0,(dreg),0,(reg));
1473 *(mcodeptr++) = 0x0f;
1474 *(mcodeptr++) = 0x5a;
1475 x86_64_emit_reg((dreg),(reg));
1479 void x86_64_cvttss2siq_reg_reg(s8 reg, s8 dreg) {
1480 *(mcodeptr++) = 0xf3;
1481 x86_64_emit_rex(1,(dreg),0,(reg));
1482 *(mcodeptr++) = 0x0f;
1483 *(mcodeptr++) = 0x2c;
1484 x86_64_emit_reg((dreg),(reg));
1488 void x86_64_cvttss2si_reg_reg(s8 reg, s8 dreg) {
1489 *(mcodeptr++) = 0xf3;
1490 x86_64_emit_rex(0,(dreg),0,(reg));
1491 *(mcodeptr++) = 0x0f;
1492 *(mcodeptr++) = 0x2c;
1493 x86_64_emit_reg((dreg),(reg));
1497 void x86_64_cvttsd2siq_reg_reg(s8 reg, s8 dreg) {
1498 *(mcodeptr++) = 0xf2;
1499 x86_64_emit_rex(1,(dreg),0,(reg));
1500 *(mcodeptr++) = 0x0f;
1501 *(mcodeptr++) = 0x2c;
1502 x86_64_emit_reg((dreg),(reg));
1506 void x86_64_cvttsd2si_reg_reg(s8 reg, s8 dreg) {
1507 *(mcodeptr++) = 0xf2;
1508 x86_64_emit_rex(0,(dreg),0,(reg));
1509 *(mcodeptr++) = 0x0f;
1510 *(mcodeptr++) = 0x2c;
1511 x86_64_emit_reg((dreg),(reg));
1515 void x86_64_divss_reg_reg(s8 reg, s8 dreg) {
1516 *(mcodeptr++) = 0xf3;
1517 x86_64_emit_rex(0,(dreg),0,(reg));
1518 *(mcodeptr++) = 0x0f;
1519 *(mcodeptr++) = 0x5e;
1520 x86_64_emit_reg((dreg),(reg));
1524 void x86_64_divsd_reg_reg(s8 reg, s8 dreg) {
1525 *(mcodeptr++) = 0xf2;
1526 x86_64_emit_rex(0,(dreg),0,(reg));
1527 *(mcodeptr++) = 0x0f;
1528 *(mcodeptr++) = 0x5e;
1529 x86_64_emit_reg((dreg),(reg));
1533 void x86_64_movd_reg_freg(s8 reg, s8 freg) {
1534 *(mcodeptr++) = 0x66;
1535 x86_64_emit_rex(1,(freg),0,(reg));
1536 *(mcodeptr++) = 0x0f;
1537 *(mcodeptr++) = 0x6e;
1538 x86_64_emit_reg((freg),(reg));
1542 void x86_64_movd_freg_reg(s8 freg, s8 reg) {
1543 *(mcodeptr++) = 0x66;
1544 x86_64_emit_rex(1,(freg),0,(reg));
1545 *(mcodeptr++) = 0x0f;
1546 *(mcodeptr++) = 0x7e;
1547 x86_64_emit_reg((freg),(reg));
1551 void x86_64_movd_reg_membase(s8 reg, s8 basereg, s8 disp) {
1552 *(mcodeptr++) = 0x66;
1553 x86_64_emit_rex(0,(reg),0,(basereg));
1554 *(mcodeptr++) = 0x0f;
1555 *(mcodeptr++) = 0x7e;
1556 x86_64_emit_membase((basereg),(disp),(reg));
1560 void x86_64_movd_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
1561 *(mcodeptr++) = 0x66;
1562 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
1563 *(mcodeptr++) = 0x0f;
1564 *(mcodeptr++) = 0x7e;
1565 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
1569 void x86_64_movd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
1570 *(mcodeptr++) = 0x66;
1571 x86_64_emit_rex(1,(dreg),0,(basereg));
1572 *(mcodeptr++) = 0x0f;
1573 *(mcodeptr++) = 0x6e;
1574 x86_64_emit_membase((basereg),(disp),(dreg));
1578 void x86_64_movdl_membase_reg(s8 basereg, s8 disp, s8 dreg) {
1579 *(mcodeptr++) = 0x66;
1580 x86_64_emit_rex(0,(dreg),0,(basereg));
1581 *(mcodeptr++) = 0x0f;
1582 *(mcodeptr++) = 0x6e;
1583 x86_64_emit_membase((basereg),(disp),(dreg));
1587 void x86_64_movd_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
1588 *(mcodeptr++) = 0x66;
1589 x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
1590 *(mcodeptr++) = 0x0f;
1591 *(mcodeptr++) = 0x6e;
1592 x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
1596 void x86_64_movq_reg_reg(s8 reg, s8 dreg) {
1597 *(mcodeptr++) = 0xf3;
1598 x86_64_emit_rex(0,(dreg),0,(reg));
1599 *(mcodeptr++) = 0x0f;
1600 *(mcodeptr++) = 0x7e;
1601 x86_64_emit_reg((dreg),(reg));
1605 void x86_64_movq_reg_membase(s8 reg, s8 basereg, s8 disp) {
1606 *(mcodeptr++) = 0x66;
1607 x86_64_emit_rex(0,(reg),0,(basereg));
1608 *(mcodeptr++) = 0x0f;
1609 *(mcodeptr++) = 0xd6;
1610 x86_64_emit_membase((basereg),(disp),(reg));
1614 void x86_64_movq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
1615 *(mcodeptr++) = 0xf3;
1616 x86_64_emit_rex(0,(dreg),0,(basereg));
1617 *(mcodeptr++) = 0x0f;
1618 *(mcodeptr++) = 0x7e;
1619 x86_64_emit_membase((basereg),(disp),(dreg));
1623 void x86_64_movss_reg_reg(s8 reg, s8 dreg) {
1624 *(mcodeptr++) = 0xf3;
1625 x86_64_emit_rex(0,(reg),0,(dreg));
1626 *(mcodeptr++) = 0x0f;
1627 *(mcodeptr++) = 0x10;
1628 x86_64_emit_reg((reg),(dreg));
1632 void x86_64_movsd_reg_reg(s8 reg, s8 dreg) {
1633 *(mcodeptr++) = 0xf2;
1634 x86_64_emit_rex(0,(reg),0,(dreg));
1635 *(mcodeptr++) = 0x0f;
1636 *(mcodeptr++) = 0x10;
1637 x86_64_emit_reg((reg),(dreg));
1641 void x86_64_movss_reg_membase(s8 reg, s8 basereg, s8 disp) {
1642 *(mcodeptr++) = 0xf3;
1643 x86_64_emit_rex(0,(reg),0,(basereg));
1644 *(mcodeptr++) = 0x0f;
1645 *(mcodeptr++) = 0x11;
1646 x86_64_emit_membase((basereg),(disp),(reg));
1650 void x86_64_movsd_reg_membase(s8 reg, s8 basereg, s8 disp) {
1651 *(mcodeptr++) = 0xf2;
1652 x86_64_emit_rex(0,(reg),0,(basereg));
1653 *(mcodeptr++) = 0x0f;
1654 *(mcodeptr++) = 0x11;
1655 x86_64_emit_membase((basereg),(disp),(reg));
1659 void x86_64_movss_membase_reg(s8 basereg, s8 disp, s8 dreg) {
1660 *(mcodeptr++) = 0xf3;
1661 x86_64_emit_rex(0,(dreg),0,(basereg));
1662 *(mcodeptr++) = 0x0f;
1663 *(mcodeptr++) = 0x10;
1664 x86_64_emit_membase((basereg),(disp),(dreg));
1668 void x86_64_movlps_membase_reg(s8 basereg, s8 disp, s8 dreg) {
1669 x86_64_emit_rex(0,(dreg),0,(basereg));
1670 *(mcodeptr++) = 0x0f;
1671 *(mcodeptr++) = 0x12;
1672 x86_64_emit_membase((basereg),(disp),(dreg));
1676 void x86_64_movsd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
1677 *(mcodeptr++) = 0xf2;
1678 x86_64_emit_rex(0,(dreg),0,(basereg));
1679 *(mcodeptr++) = 0x0f;
1680 *(mcodeptr++) = 0x10;
1681 x86_64_emit_membase((basereg),(disp),(dreg));
1685 void x86_64_movlpd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
1686 *(mcodeptr++) = 0x66;
1687 x86_64_emit_rex(0,(dreg),0,(basereg));
1688 *(mcodeptr++) = 0x0f;
1689 *(mcodeptr++) = 0x12;
1690 x86_64_emit_membase((basereg),(disp),(dreg));
1694 void x86_64_movss_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
1695 *(mcodeptr++) = 0xf3;
1696 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
1697 *(mcodeptr++) = 0x0f;
1698 *(mcodeptr++) = 0x11;
1699 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
1703 void x86_64_movsd_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
1704 *(mcodeptr++) = 0xf2;
1705 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
1706 *(mcodeptr++) = 0x0f;
1707 *(mcodeptr++) = 0x11;
1708 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
1712 void x86_64_movss_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
1713 *(mcodeptr++) = 0xf3;
1714 x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
1715 *(mcodeptr++) = 0x0f;
1716 *(mcodeptr++) = 0x10;
1717 x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
1721 void x86_64_movsd_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
1722 *(mcodeptr++) = 0xf2;
1723 x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
1724 *(mcodeptr++) = 0x0f;
1725 *(mcodeptr++) = 0x10;
1726 x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
1730 void x86_64_mulss_reg_reg(s8 reg, s8 dreg) {
1731 *(mcodeptr++) = 0xf3;
1732 x86_64_emit_rex(0,(dreg),0,(reg));
1733 *(mcodeptr++) = 0x0f;
1734 *(mcodeptr++) = 0x59;
1735 x86_64_emit_reg((dreg),(reg));
1739 void x86_64_mulsd_reg_reg(s8 reg, s8 dreg) {
1740 *(mcodeptr++) = 0xf2;
1741 x86_64_emit_rex(0,(dreg),0,(reg));
1742 *(mcodeptr++) = 0x0f;
1743 *(mcodeptr++) = 0x59;
1744 x86_64_emit_reg((dreg),(reg));
1748 void x86_64_subss_reg_reg(s8 reg, s8 dreg) {
1749 *(mcodeptr++) = 0xf3;
1750 x86_64_emit_rex(0,(dreg),0,(reg));
1751 *(mcodeptr++) = 0x0f;
1752 *(mcodeptr++) = 0x5c;
1753 x86_64_emit_reg((dreg),(reg));
1757 void x86_64_subsd_reg_reg(s8 reg, s8 dreg) {
1758 *(mcodeptr++) = 0xf2;
1759 x86_64_emit_rex(0,(dreg),0,(reg));
1760 *(mcodeptr++) = 0x0f;
1761 *(mcodeptr++) = 0x5c;
1762 x86_64_emit_reg((dreg),(reg));
1766 void x86_64_ucomiss_reg_reg(s8 reg, s8 dreg) {
1767 x86_64_emit_rex(0,(dreg),0,(reg));
1768 *(mcodeptr++) = 0x0f;
1769 *(mcodeptr++) = 0x2e;
1770 x86_64_emit_reg((dreg),(reg));
1774 void x86_64_ucomisd_reg_reg(s8 reg, s8 dreg) {
1775 *(mcodeptr++) = 0x66;
1776 x86_64_emit_rex(0,(dreg),0,(reg));
1777 *(mcodeptr++) = 0x0f;
1778 *(mcodeptr++) = 0x2e;
1779 x86_64_emit_reg((dreg),(reg));
1783 void x86_64_xorps_reg_reg(s8 reg, s8 dreg) {
1784 x86_64_emit_rex(0,(dreg),0,(reg));
1785 *(mcodeptr++) = 0x0f;
1786 *(mcodeptr++) = 0x57;
1787 x86_64_emit_reg((dreg),(reg));
1791 void x86_64_xorps_membase_reg(s8 basereg, s8 disp, s8 dreg) {
1792 x86_64_emit_rex(0,(dreg),0,(basereg));
1793 *(mcodeptr++) = 0x0f;
1794 *(mcodeptr++) = 0x57;
1795 x86_64_emit_membase((basereg),(disp),(dreg));
1799 void x86_64_xorpd_reg_reg(s8 reg, s8 dreg) {
1800 *(mcodeptr++) = 0x66;
1801 x86_64_emit_rex(0,(dreg),0,(reg));
1802 *(mcodeptr++) = 0x0f;
1803 *(mcodeptr++) = 0x57;
1804 x86_64_emit_reg((dreg),(reg));
1808 void x86_64_xorpd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
1809 *(mcodeptr++) = 0x66;
1810 x86_64_emit_rex(0,(dreg),0,(basereg));
1811 *(mcodeptr++) = 0x0f;
1812 *(mcodeptr++) = 0x57;
1813 x86_64_emit_membase((basereg),(disp),(dreg));
1818 * These are local overrides for various environment variables in Emacs.
1819 * Please do not remove this and leave it at the end of the file, where
1820 * Emacs will automagically detect them.
1821 * ---------------------------------------------------------------------
1824 * indent-tabs-mode: t