1 /* jit/x86_64/emitfuncs.c - x86_64 code emitter functions
3 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 Institut f. Computersprachen, TU Wien
5 R. Grafl, A. Krall, C. Kruegel, C. Oates, R. Obermaisser, M. Probst,
6 S. Ring, E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich,
9 This file is part of CACAO.
11 This program is free software; you can redistribute it and/or
12 modify it under the terms of the GNU General Public License as
13 published by the Free Software Foundation; either version 2, or (at
14 your option) any later version.
16 This program is distributed in the hope that it will be useful, but
17 WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 General Public License for more details.
21 You should have received a copy of the GNU General Public License
22 along with this program; if not, write to the Free Software
23 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
26 Contact: cacao@complang.tuwien.ac.at
28 Authors: Christian Thalinger
30 $Id: emitfuncs.c 1284 2004-07-07 15:56:17Z twisti $
36 #include "jit/x86_64/emitfuncs.h"
37 #include "jit/x86_64/codegen.h"
38 #include "jit/x86_64/types.h"
41 /* code generation functions */
43 void x86_64_emit_ialu(s4 alu_op, stackptr src, instruction *iptr)
45 s4 s1 = src->prev->regoff;
47 s4 d = iptr->dst->regoff;
49 if (iptr->dst->flags & INMEMORY) {
50 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
52 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
53 x86_64_alul_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
56 x86_64_movl_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
57 x86_64_alul_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
60 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
61 x86_64_alul_membase_reg(alu_op, REG_SP, s2 * 8, REG_ITMP1);
62 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
65 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
67 x86_64_alul_reg_membase(alu_op, s1, REG_SP, d * 8);
70 x86_64_movl_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
71 x86_64_alul_reg_reg(alu_op, s1, REG_ITMP1);
72 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
75 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
77 x86_64_alul_reg_membase(alu_op, s2, REG_SP, d * 8);
80 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
81 x86_64_alul_reg_reg(alu_op, s2, REG_ITMP1);
82 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
86 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
87 x86_64_alul_reg_membase(alu_op, s2, REG_SP, d * 8);
91 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
92 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
93 x86_64_alul_membase_reg(alu_op, REG_SP, s2 * 8, d);
95 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
97 x86_64_alul_membase_reg(alu_op, REG_SP, s2 * 8, d);
99 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
101 x86_64_alul_membase_reg(alu_op, REG_SP, s1 * 8, d);
105 x86_64_alul_reg_reg(alu_op, s1, d);
109 x86_64_alul_reg_reg(alu_op, s2, d);
117 void x86_64_emit_lalu(s4 alu_op, stackptr src, instruction *iptr)
119 s4 s1 = src->prev->regoff;
121 s4 d = iptr->dst->regoff;
123 if (iptr->dst->flags & INMEMORY) {
124 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
126 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
127 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
129 } else if (s1 == d) {
130 x86_64_mov_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
131 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
134 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
135 x86_64_alu_membase_reg(alu_op, REG_SP, s2 * 8, REG_ITMP1);
136 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
139 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
141 x86_64_alu_reg_membase(alu_op, s1, REG_SP, d * 8);
144 x86_64_mov_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
145 x86_64_alu_reg_reg(alu_op, s1, REG_ITMP1);
146 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
149 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
151 x86_64_alu_reg_membase(alu_op, s2, REG_SP, d * 8);
154 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
155 x86_64_alu_reg_reg(alu_op, s2, REG_ITMP1);
156 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
160 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
161 x86_64_alu_reg_membase(alu_op, s2, REG_SP, d * 8);
165 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
166 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
167 x86_64_alu_membase_reg(alu_op, REG_SP, s2 * 8, d);
169 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
171 x86_64_alu_membase_reg(alu_op, REG_SP, s2 * 8, d);
173 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
175 x86_64_alu_membase_reg(alu_op, REG_SP, s1 * 8, d);
179 x86_64_alu_reg_reg(alu_op, s1, d);
183 x86_64_alu_reg_reg(alu_op, s2, d);
191 void x86_64_emit_ialuconst(s4 alu_op, stackptr src, instruction *iptr)
194 s4 d = iptr->dst->regoff;
196 if (iptr->dst->flags & INMEMORY) {
197 if (src->flags & INMEMORY) {
199 x86_64_alul_imm_membase(alu_op, iptr->val.i, REG_SP, d * 8);
202 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
203 x86_64_alul_imm_reg(alu_op, iptr->val.i, REG_ITMP1);
204 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
208 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
209 x86_64_alul_imm_membase(alu_op, iptr->val.i, REG_SP, d * 8);
213 if (src->flags & INMEMORY) {
214 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
215 x86_64_alul_imm_reg(alu_op, iptr->val.i, d);
219 x86_64_alul_imm_reg(alu_op, iptr->val.i, d);
226 void x86_64_emit_laluconst(s4 alu_op, stackptr src, instruction *iptr)
229 s4 d = iptr->dst->regoff;
231 if (iptr->dst->flags & INMEMORY) {
232 if (src->flags & INMEMORY) {
234 if (x86_64_is_imm32(iptr->val.l)) {
235 x86_64_alu_imm_membase(alu_op, iptr->val.l, REG_SP, d * 8);
238 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
239 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
243 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
245 if (x86_64_is_imm32(iptr->val.l)) {
246 x86_64_alu_imm_reg(alu_op, iptr->val.l, REG_ITMP1);
249 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP2);
250 x86_64_alu_reg_reg(alu_op, REG_ITMP2, REG_ITMP1);
252 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
256 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
258 if (x86_64_is_imm32(iptr->val.l)) {
259 x86_64_alu_imm_membase(alu_op, iptr->val.l, REG_SP, d * 8);
262 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
263 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
268 if (src->flags & INMEMORY) {
269 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
275 if (x86_64_is_imm32(iptr->val.l)) {
276 x86_64_alu_imm_reg(alu_op, iptr->val.l, d);
279 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
280 x86_64_alu_reg_reg(alu_op, REG_ITMP1, d);
287 void x86_64_emit_ishift(s4 shift_op, stackptr src, instruction *iptr)
289 s4 s1 = src->prev->regoff;
291 s4 d = iptr->dst->regoff;
293 M_INTMOVE(RCX, REG_ITMP1); /* save RCX */
294 if (iptr->dst->flags & INMEMORY) {
295 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
297 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
298 x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
301 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
302 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
303 x86_64_shiftl_reg(shift_op, REG_ITMP2);
304 x86_64_movl_reg_membase(REG_ITMP2, REG_SP, d * 8);
307 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
308 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
309 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
310 x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
312 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
315 x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
319 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
320 x86_64_shiftl_reg(shift_op, REG_ITMP2);
321 x86_64_movl_reg_membase(REG_ITMP2, REG_SP, d * 8);
326 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
327 x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
329 M_INTMOVE(REG_ITMP1, RCX); /* restore RCX */
336 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
337 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
338 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
339 x86_64_shiftl_reg(shift_op, d);
341 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
342 M_INTMOVE(s1, d); /* maybe src is RCX */
343 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
344 x86_64_shiftl_reg(shift_op, d);
346 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
348 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
349 x86_64_shiftl_reg(shift_op, d);
360 x86_64_shiftl_reg(shift_op, d);
364 M_INTMOVE(REG_ITMP3, RCX);
367 M_INTMOVE(REG_ITMP1, RCX); /* restore RCX */
374 void x86_64_emit_lshift(s4 shift_op, stackptr src, instruction *iptr)
376 s4 s1 = src->prev->regoff;
378 s4 d = iptr->dst->regoff;
380 M_INTMOVE(RCX, REG_ITMP1); /* save RCX */
381 if (iptr->dst->flags & INMEMORY) {
382 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
384 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
385 x86_64_shift_membase(shift_op, REG_SP, d * 8);
388 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
389 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
390 x86_64_shift_reg(shift_op, REG_ITMP2);
391 x86_64_mov_reg_membase(REG_ITMP2, REG_SP, d * 8);
394 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
395 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
396 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
397 x86_64_shift_membase(shift_op, REG_SP, d * 8);
399 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
402 x86_64_shift_membase(shift_op, REG_SP, d * 8);
406 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
407 x86_64_shift_reg(shift_op, REG_ITMP2);
408 x86_64_mov_reg_membase(REG_ITMP2, REG_SP, d * 8);
413 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
414 x86_64_shift_membase(shift_op, REG_SP, d * 8);
416 M_INTMOVE(REG_ITMP1, RCX); /* restore RCX */
423 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
424 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
425 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
426 x86_64_shift_reg(shift_op, d);
428 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
429 M_INTMOVE(s1, d); /* maybe src is RCX */
430 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
431 x86_64_shift_reg(shift_op, d);
433 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
435 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
436 x86_64_shift_reg(shift_op, d);
446 x86_64_shift_reg(shift_op, d);
450 M_INTMOVE(REG_ITMP3, RCX);
453 M_INTMOVE(REG_ITMP1, RCX); /* restore RCX */
460 void x86_64_emit_ishiftconst(s4 shift_op, stackptr src, instruction *iptr)
463 s4 d = iptr->dst->regoff;
465 if ((src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
467 x86_64_shiftl_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
470 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
471 x86_64_shiftl_imm_reg(shift_op, iptr->val.i, REG_ITMP1);
472 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
475 } else if ((src->flags & INMEMORY) && !(iptr->dst->flags & INMEMORY)) {
476 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
477 x86_64_shiftl_imm_reg(shift_op, iptr->val.i, d);
479 } else if (!(src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
480 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
481 x86_64_shiftl_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
485 x86_64_shiftl_imm_reg(shift_op, iptr->val.i, d);
491 void x86_64_emit_lshiftconst(s4 shift_op, stackptr src, instruction *iptr)
494 s4 d = iptr->dst->regoff;
496 if ((src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
498 x86_64_shift_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
501 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
502 x86_64_shift_imm_reg(shift_op, iptr->val.i, REG_ITMP1);
503 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
506 } else if ((src->flags & INMEMORY) && !(iptr->dst->flags & INMEMORY)) {
507 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
508 x86_64_shift_imm_reg(shift_op, iptr->val.i, d);
510 } else if (!(src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
511 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
512 x86_64_shift_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
516 x86_64_shift_imm_reg(shift_op, iptr->val.i, d);
522 void x86_64_emit_ifcc(s4 if_op, stackptr src, instruction *iptr)
524 if (src->flags & INMEMORY) {
525 x86_64_alul_imm_membase(X86_64_CMP, iptr->val.i, REG_SP, src->regoff * 8);
528 x86_64_alul_imm_reg(X86_64_CMP, iptr->val.i, src->regoff);
530 x86_64_jcc(if_op, 0);
531 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
536 void x86_64_emit_if_lcc(s4 if_op, stackptr src, instruction *iptr)
540 if (src->flags & INMEMORY) {
541 if (x86_64_is_imm32(iptr->val.l)) {
542 x86_64_alu_imm_membase(X86_64_CMP, iptr->val.l, REG_SP, s1 * 8);
545 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
546 x86_64_alu_reg_membase(X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
550 if (x86_64_is_imm32(iptr->val.l)) {
551 x86_64_alu_imm_reg(X86_64_CMP, iptr->val.l, s1);
554 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
555 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP1, s1);
558 x86_64_jcc(if_op, 0);
559 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
564 void x86_64_emit_if_icmpcc(s4 if_op, stackptr src, instruction *iptr)
566 s4 s1 = src->prev->regoff;
569 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
570 x86_64_movl_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
571 x86_64_alul_reg_membase(X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
573 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
574 x86_64_alul_membase_reg(X86_64_CMP, REG_SP, s2 * 8, s1);
576 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
577 x86_64_alul_reg_membase(X86_64_CMP, s2, REG_SP, s1 * 8);
580 x86_64_alul_reg_reg(X86_64_CMP, s2, s1);
582 x86_64_jcc(if_op, 0);
583 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
588 void x86_64_emit_if_lcmpcc(s4 if_op, stackptr src, instruction *iptr)
590 s4 s1 = src->prev->regoff;
593 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
594 x86_64_mov_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
595 x86_64_alu_reg_membase(X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
597 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
598 x86_64_alu_membase_reg(X86_64_CMP, REG_SP, s2 * 8, s1);
600 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
601 x86_64_alu_reg_membase(X86_64_CMP, s2, REG_SP, s1 * 8);
604 x86_64_alu_reg_reg(X86_64_CMP, s2, s1);
606 x86_64_jcc(if_op, 0);
607 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
614 void x86_64_mov_reg_reg(s8 reg, s8 dreg) {
615 x86_64_emit_rex(1,(reg),0,(dreg));
616 *(mcodeptr++) = 0x89;
617 x86_64_emit_reg((reg),(dreg));
621 void x86_64_mov_imm_reg(s8 imm, s8 reg) {
622 x86_64_emit_rex(1,0,0,(reg));
623 *(mcodeptr++) = 0xb8 + ((reg) & 0x07);
624 x86_64_emit_imm64((imm));
628 void x86_64_movl_imm_reg(s8 imm, s8 reg) {
629 x86_64_emit_rex(0,0,0,(reg));
630 *(mcodeptr++) = 0xb8 + ((reg) & 0x07);
631 x86_64_emit_imm32((imm));
635 void x86_64_mov_membase_reg(s8 basereg, s8 disp, s8 reg) {
636 x86_64_emit_rex(1,(reg),0,(basereg));
637 *(mcodeptr++) = 0x8b;
638 x86_64_emit_membase((basereg),(disp),(reg));
642 void x86_64_movl_membase_reg(s8 basereg, s8 disp, s8 reg) {
643 x86_64_emit_rex(0,(reg),0,(basereg));
644 *(mcodeptr++) = 0x8b;
645 x86_64_emit_membase((basereg),(disp),(reg));
650 * this one is for INVOKEVIRTUAL/INVOKEINTERFACE to have a
651 * constant membase immediate length of 32bit
653 void x86_64_mov_membase32_reg(s8 basereg, s8 disp, s8 reg) {
654 x86_64_emit_rex(1,(reg),0,(basereg));
655 *(mcodeptr++) = 0x8b;
656 x86_64_address_byte(2, (reg), (basereg));
657 x86_64_emit_imm32((disp));
661 void x86_64_mov_reg_membase(s8 reg, s8 basereg, s8 disp) {
662 x86_64_emit_rex(1,(reg),0,(basereg));
663 *(mcodeptr++) = 0x89;
664 x86_64_emit_membase((basereg),(disp),(reg));
668 void x86_64_movl_reg_membase(s8 reg, s8 basereg, s8 disp) {
669 x86_64_emit_rex(0,(reg),0,(basereg));
670 *(mcodeptr++) = 0x89;
671 x86_64_emit_membase((basereg),(disp),(reg));
675 void x86_64_mov_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
676 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
677 *(mcodeptr++) = 0x8b;
678 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
682 void x86_64_movl_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
683 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
684 *(mcodeptr++) = 0x8b;
685 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
689 void x86_64_mov_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
690 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
691 *(mcodeptr++) = 0x89;
692 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
696 void x86_64_movl_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
697 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
698 *(mcodeptr++) = 0x89;
699 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
703 void x86_64_movw_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
704 *(mcodeptr++) = 0x66;
705 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
706 *(mcodeptr++) = 0x89;
707 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
711 void x86_64_movb_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
712 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
713 *(mcodeptr++) = 0x88;
714 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
718 void x86_64_mov_imm_membase(s8 imm, s8 basereg, s8 disp) {
719 x86_64_emit_rex(1,0,0,(basereg));
720 *(mcodeptr++) = 0xc7;
721 x86_64_emit_membase((basereg),(disp),0);
722 x86_64_emit_imm32((imm));
726 void x86_64_movl_imm_membase(s8 imm, s8 basereg, s8 disp) {
727 x86_64_emit_rex(0,0,0,(basereg));
728 *(mcodeptr++) = 0xc7;
729 x86_64_emit_membase((basereg),(disp),0);
730 x86_64_emit_imm32((imm));
734 void x86_64_movsbq_reg_reg(s8 reg, s8 dreg) {
735 x86_64_emit_rex(1,(dreg),0,(reg));
736 *(mcodeptr++) = 0x0f;
737 *(mcodeptr++) = 0xbe;
738 /* XXX: why do reg and dreg have to be exchanged */
739 x86_64_emit_reg((dreg),(reg));
743 void x86_64_movsbq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
744 x86_64_emit_rex(1,(dreg),0,(basereg));
745 *(mcodeptr++) = 0x0f;
746 *(mcodeptr++) = 0xbe;
747 x86_64_emit_membase((basereg),(disp),(dreg));
751 void x86_64_movswq_reg_reg(s8 reg, s8 dreg) {
752 x86_64_emit_rex(1,(dreg),0,(reg));
753 *(mcodeptr++) = 0x0f;
754 *(mcodeptr++) = 0xbf;
755 /* XXX: why do reg and dreg have to be exchanged */
756 x86_64_emit_reg((dreg),(reg));
760 void x86_64_movswq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
761 x86_64_emit_rex(1,(dreg),0,(basereg));
762 *(mcodeptr++) = 0x0f;
763 *(mcodeptr++) = 0xbf;
764 x86_64_emit_membase((basereg),(disp),(dreg));
768 void x86_64_movslq_reg_reg(s8 reg, s8 dreg) {
769 x86_64_emit_rex(1,(dreg),0,(reg));
770 *(mcodeptr++) = 0x63;
771 /* XXX: why do reg and dreg have to be exchanged */
772 x86_64_emit_reg((dreg),(reg));
776 void x86_64_movslq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
777 x86_64_emit_rex(1,(dreg),0,(basereg));
778 *(mcodeptr++) = 0x63;
779 x86_64_emit_membase((basereg),(disp),(dreg));
783 void x86_64_movzwq_reg_reg(s8 reg, s8 dreg) {
784 x86_64_emit_rex(1,(dreg),0,(reg));
785 *(mcodeptr++) = 0x0f;
786 *(mcodeptr++) = 0xb7;
787 /* XXX: why do reg and dreg have to be exchanged */
788 x86_64_emit_reg((dreg),(reg));
792 void x86_64_movzwq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
793 x86_64_emit_rex(1,(dreg),0,(basereg));
794 *(mcodeptr++) = 0x0f;
795 *(mcodeptr++) = 0xb7;
796 x86_64_emit_membase((basereg),(disp),(dreg));
800 void x86_64_movswq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
801 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
802 *(mcodeptr++) = 0x0f;
803 *(mcodeptr++) = 0xbf;
804 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
808 void x86_64_movsbq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
809 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
810 *(mcodeptr++) = 0x0f;
811 *(mcodeptr++) = 0xbe;
812 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
816 void x86_64_movzwq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
817 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
818 *(mcodeptr++) = 0x0f;
819 *(mcodeptr++) = 0xb7;
820 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
828 void x86_64_alu_reg_reg(s8 opc, s8 reg, s8 dreg) {
829 x86_64_emit_rex(1,(reg),0,(dreg));
830 *(mcodeptr++) = (((opc)) << 3) + 1;
831 x86_64_emit_reg((reg),(dreg));
835 void x86_64_alul_reg_reg(s8 opc, s8 reg, s8 dreg) {
836 x86_64_emit_rex(0,(reg),0,(dreg));
837 *(mcodeptr++) = (((opc)) << 3) + 1;
838 x86_64_emit_reg((reg),(dreg));
842 void x86_64_alu_reg_membase(s8 opc, s8 reg, s8 basereg, s8 disp) {
843 x86_64_emit_rex(1,(reg),0,(basereg));
844 *(mcodeptr++) = (((opc)) << 3) + 1;
845 x86_64_emit_membase((basereg),(disp),(reg));
849 void x86_64_alul_reg_membase(s8 opc, s8 reg, s8 basereg, s8 disp) {
850 x86_64_emit_rex(0,(reg),0,(basereg));
851 *(mcodeptr++) = (((opc)) << 3) + 1;
852 x86_64_emit_membase((basereg),(disp),(reg));
856 void x86_64_alu_membase_reg(s8 opc, s8 basereg, s8 disp, s8 reg) {
857 x86_64_emit_rex(1,(reg),0,(basereg));
858 *(mcodeptr++) = (((opc)) << 3) + 3;
859 x86_64_emit_membase((basereg),(disp),(reg));
863 void x86_64_alul_membase_reg(s8 opc, s8 basereg, s8 disp, s8 reg) {
864 x86_64_emit_rex(0,(reg),0,(basereg));
865 *(mcodeptr++) = (((opc)) << 3) + 3;
866 x86_64_emit_membase((basereg),(disp),(reg));
870 void x86_64_alu_imm_reg(s8 opc, s8 imm, s8 dreg) {
871 if (x86_64_is_imm8(imm)) {
872 x86_64_emit_rex(1,0,0,(dreg));
873 *(mcodeptr++) = 0x83;
874 x86_64_emit_reg((opc),(dreg));
875 x86_64_emit_imm8((imm));
877 x86_64_emit_rex(1,0,0,(dreg));
878 *(mcodeptr++) = 0x81;
879 x86_64_emit_reg((opc),(dreg));
880 x86_64_emit_imm32((imm));
885 void x86_64_alul_imm_reg(s8 opc, s8 imm, s8 dreg) {
886 if (x86_64_is_imm8(imm)) {
887 x86_64_emit_rex(0,0,0,(dreg));
888 *(mcodeptr++) = 0x83;
889 x86_64_emit_reg((opc),(dreg));
890 x86_64_emit_imm8((imm));
892 x86_64_emit_rex(0,0,0,(dreg));
893 *(mcodeptr++) = 0x81;
894 x86_64_emit_reg((opc),(dreg));
895 x86_64_emit_imm32((imm));
900 void x86_64_alu_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
901 if (x86_64_is_imm8(imm)) {
902 x86_64_emit_rex(1,(basereg),0,0);
903 *(mcodeptr++) = 0x83;
904 x86_64_emit_membase((basereg),(disp),(opc));
905 x86_64_emit_imm8((imm));
907 x86_64_emit_rex(1,(basereg),0,0);
908 *(mcodeptr++) = 0x81;
909 x86_64_emit_membase((basereg),(disp),(opc));
910 x86_64_emit_imm32((imm));
915 void x86_64_alul_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
916 if (x86_64_is_imm8(imm)) {
917 x86_64_emit_rex(0,(basereg),0,0);
918 *(mcodeptr++) = 0x83;
919 x86_64_emit_membase((basereg),(disp),(opc));
920 x86_64_emit_imm8((imm));
922 x86_64_emit_rex(0,(basereg),0,0);
923 *(mcodeptr++) = 0x81;
924 x86_64_emit_membase((basereg),(disp),(opc));
925 x86_64_emit_imm32((imm));
930 void x86_64_test_reg_reg(s8 reg, s8 dreg) {
931 x86_64_emit_rex(1,(reg),0,(dreg));
932 *(mcodeptr++) = 0x85;
933 x86_64_emit_reg((reg),(dreg));
937 void x86_64_testl_reg_reg(s8 reg, s8 dreg) {
938 x86_64_emit_rex(0,(reg),0,(dreg));
939 *(mcodeptr++) = 0x85;
940 x86_64_emit_reg((reg),(dreg));
944 void x86_64_test_imm_reg(s8 imm, s8 reg) {
945 *(mcodeptr++) = 0xf7;
946 x86_64_emit_reg(0,(reg));
947 x86_64_emit_imm32((imm));
951 void x86_64_testw_imm_reg(s8 imm, s8 reg) {
952 *(mcodeptr++) = 0x66;
953 *(mcodeptr++) = 0xf7;
954 x86_64_emit_reg(0,(reg));
955 x86_64_emit_imm16((imm));
959 void x86_64_testb_imm_reg(s8 imm, s8 reg) {
960 *(mcodeptr++) = 0xf6;
961 x86_64_emit_reg(0,(reg));
962 x86_64_emit_imm8((imm));
966 void x86_64_lea_membase_reg(s8 basereg, s8 disp, s8 reg) {
967 x86_64_emit_rex(1,(reg),0,(basereg));
968 *(mcodeptr++) = 0x8d;
969 x86_64_emit_membase((basereg),(disp),(reg));
973 void x86_64_leal_membase_reg(s8 basereg, s8 disp, s8 reg) {
974 x86_64_emit_rex(0,(reg),0,(basereg));
975 *(mcodeptr++) = 0x8d;
976 x86_64_emit_membase((basereg),(disp),(reg));
982 * inc, dec operations
984 void x86_64_inc_reg(s8 reg) {
985 x86_64_emit_rex(1,0,0,(reg));
986 *(mcodeptr++) = 0xff;
987 x86_64_emit_reg(0,(reg));
991 void x86_64_incl_reg(s8 reg) {
992 x86_64_emit_rex(0,0,0,(reg));
993 *(mcodeptr++) = 0xff;
994 x86_64_emit_reg(0,(reg));
998 void x86_64_inc_membase(s8 basereg, s8 disp) {
999 x86_64_emit_rex(1,(basereg),0,0);
1000 *(mcodeptr++) = 0xff;
1001 x86_64_emit_membase((basereg),(disp),0);
1005 void x86_64_incl_membase(s8 basereg, s8 disp) {
1006 x86_64_emit_rex(0,(basereg),0,0);
1007 *(mcodeptr++) = 0xff;
1008 x86_64_emit_membase((basereg),(disp),0);
1012 void x86_64_dec_reg(s8 reg) {
1013 x86_64_emit_rex(1,0,0,(reg));
1014 *(mcodeptr++) = 0xff;
1015 x86_64_emit_reg(1,(reg));
1019 void x86_64_decl_reg(s8 reg) {
1020 x86_64_emit_rex(0,0,0,(reg));
1021 *(mcodeptr++) = 0xff;
1022 x86_64_emit_reg(1,(reg));
1026 void x86_64_dec_membase(s8 basereg, s8 disp) {
1027 x86_64_emit_rex(1,(basereg),0,0);
1028 *(mcodeptr++) = 0xff;
1029 x86_64_emit_membase((basereg),(disp),1);
1033 void x86_64_decl_membase(s8 basereg, s8 disp) {
1034 x86_64_emit_rex(0,(basereg),0,0);
1035 *(mcodeptr++) = 0xff;
1036 x86_64_emit_membase((basereg),(disp),1);
1042 void x86_64_cltd() {
1043 *(mcodeptr++) = 0x99;
1047 void x86_64_cqto() {
1048 x86_64_emit_rex(1,0,0,0);
1049 *(mcodeptr++) = 0x99;
1054 void x86_64_imul_reg_reg(s8 reg, s8 dreg) {
1055 x86_64_emit_rex(1,(dreg),0,(reg));
1056 *(mcodeptr++) = 0x0f;
1057 *(mcodeptr++) = 0xaf;
1058 x86_64_emit_reg((dreg),(reg));
1062 void x86_64_imull_reg_reg(s8 reg, s8 dreg) {
1063 x86_64_emit_rex(0,(dreg),0,(reg));
1064 *(mcodeptr++) = 0x0f;
1065 *(mcodeptr++) = 0xaf;
1066 x86_64_emit_reg((dreg),(reg));
1070 void x86_64_imul_membase_reg(s8 basereg, s8 disp, s8 dreg) {
1071 x86_64_emit_rex(1,(dreg),0,(basereg));
1072 *(mcodeptr++) = 0x0f;
1073 *(mcodeptr++) = 0xaf;
1074 x86_64_emit_membase((basereg),(disp),(dreg));
1078 void x86_64_imull_membase_reg(s8 basereg, s8 disp, s8 dreg) {
1079 x86_64_emit_rex(0,(dreg),0,(basereg));
1080 *(mcodeptr++) = 0x0f;
1081 *(mcodeptr++) = 0xaf;
1082 x86_64_emit_membase((basereg),(disp),(dreg));
1086 void x86_64_imul_imm_reg(s8 imm, s8 dreg) {
1087 if (x86_64_is_imm8((imm))) {
1088 x86_64_emit_rex(1,0,0,(dreg));
1089 *(mcodeptr++) = 0x6b;
1090 x86_64_emit_reg(0,(dreg));
1091 x86_64_emit_imm8((imm));
1093 x86_64_emit_rex(1,0,0,(dreg));
1094 *(mcodeptr++) = 0x69;
1095 x86_64_emit_reg(0,(dreg));
1096 x86_64_emit_imm32((imm));
1101 void x86_64_imul_imm_reg_reg(s8 imm, s8 reg, s8 dreg) {
1102 if (x86_64_is_imm8((imm))) {
1103 x86_64_emit_rex(1,(dreg),0,(reg));
1104 *(mcodeptr++) = 0x6b;
1105 x86_64_emit_reg((dreg),(reg));
1106 x86_64_emit_imm8((imm));
1108 x86_64_emit_rex(1,(dreg),0,(reg));
1109 *(mcodeptr++) = 0x69;
1110 x86_64_emit_reg((dreg),(reg));
1111 x86_64_emit_imm32((imm));
1116 void x86_64_imull_imm_reg_reg(s8 imm, s8 reg, s8 dreg) {
1117 if (x86_64_is_imm8((imm))) {
1118 x86_64_emit_rex(0,(dreg),0,(reg));
1119 *(mcodeptr++) = 0x6b;
1120 x86_64_emit_reg((dreg),(reg));
1121 x86_64_emit_imm8((imm));
1123 x86_64_emit_rex(0,(dreg),0,(reg));
1124 *(mcodeptr++) = 0x69;
1125 x86_64_emit_reg((dreg),(reg));
1126 x86_64_emit_imm32((imm));
1131 void x86_64_imul_imm_membase_reg(s8 imm, s8 basereg, s8 disp, s8 dreg) {
1132 if (x86_64_is_imm8((imm))) {
1133 x86_64_emit_rex(1,(dreg),0,(basereg));
1134 *(mcodeptr++) = 0x6b;
1135 x86_64_emit_membase((basereg),(disp),(dreg));
1136 x86_64_emit_imm8((imm));
1138 x86_64_emit_rex(1,(dreg),0,(basereg));
1139 *(mcodeptr++) = 0x69;
1140 x86_64_emit_membase((basereg),(disp),(dreg));
1141 x86_64_emit_imm32((imm));
1146 void x86_64_imull_imm_membase_reg(s8 imm, s8 basereg, s8 disp, s8 dreg) {
1147 if (x86_64_is_imm8((imm))) {
1148 x86_64_emit_rex(0,(dreg),0,(basereg));
1149 *(mcodeptr++) = 0x6b;
1150 x86_64_emit_membase((basereg),(disp),(dreg));
1151 x86_64_emit_imm8((imm));
1153 x86_64_emit_rex(0,(dreg),0,(basereg));
1154 *(mcodeptr++) = 0x69;
1155 x86_64_emit_membase((basereg),(disp),(dreg));
1156 x86_64_emit_imm32((imm));
1161 void x86_64_idiv_reg(s8 reg) {
1162 x86_64_emit_rex(1,0,0,(reg));
1163 *(mcodeptr++) = 0xf7;
1164 x86_64_emit_reg(7,(reg));
1168 void x86_64_idivl_reg(s8 reg) {
1169 x86_64_emit_rex(0,0,0,(reg));
1170 *(mcodeptr++) = 0xf7;
1171 x86_64_emit_reg(7,(reg));
1177 *(mcodeptr++) = 0xc3;
1185 void x86_64_shift_reg(s8 opc, s8 reg) {
1186 x86_64_emit_rex(1,0,0,(reg));
1187 *(mcodeptr++) = 0xd3;
1188 x86_64_emit_reg((opc),(reg));
1192 void x86_64_shiftl_reg(s8 opc, s8 reg) {
1193 x86_64_emit_rex(0,0,0,(reg));
1194 *(mcodeptr++) = 0xd3;
1195 x86_64_emit_reg((opc),(reg));
1199 void x86_64_shift_membase(s8 opc, s8 basereg, s8 disp) {
1200 x86_64_emit_rex(1,0,0,(basereg));
1201 *(mcodeptr++) = 0xd3;
1202 x86_64_emit_membase((basereg),(disp),(opc));
1206 void x86_64_shiftl_membase(s8 opc, s8 basereg, s8 disp) {
1207 x86_64_emit_rex(0,0,0,(basereg));
1208 *(mcodeptr++) = 0xd3;
1209 x86_64_emit_membase((basereg),(disp),(opc));
1213 void x86_64_shift_imm_reg(s8 opc, s8 imm, s8 dreg) {
1215 x86_64_emit_rex(1,0,0,(dreg));
1216 *(mcodeptr++) = 0xd1;
1217 x86_64_emit_reg((opc),(dreg));
1219 x86_64_emit_rex(1,0,0,(dreg));
1220 *(mcodeptr++) = 0xc1;
1221 x86_64_emit_reg((opc),(dreg));
1222 x86_64_emit_imm8((imm));
1227 void x86_64_shiftl_imm_reg(s8 opc, s8 imm, s8 dreg) {
1229 x86_64_emit_rex(0,0,0,(dreg));
1230 *(mcodeptr++) = 0xd1;
1231 x86_64_emit_reg((opc),(dreg));
1233 x86_64_emit_rex(0,0,0,(dreg));
1234 *(mcodeptr++) = 0xc1;
1235 x86_64_emit_reg((opc),(dreg));
1236 x86_64_emit_imm8((imm));
1241 void x86_64_shift_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
1243 x86_64_emit_rex(1,0,0,(basereg));
1244 *(mcodeptr++) = 0xd1;
1245 x86_64_emit_membase((basereg),(disp),(opc));
1247 x86_64_emit_rex(1,0,0,(basereg));
1248 *(mcodeptr++) = 0xc1;
1249 x86_64_emit_membase((basereg),(disp),(opc));
1250 x86_64_emit_imm8((imm));
1255 void x86_64_shiftl_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
1257 x86_64_emit_rex(0,0,0,(basereg));
1258 *(mcodeptr++) = 0xd1;
1259 x86_64_emit_membase((basereg),(disp),(opc));
1261 x86_64_emit_rex(0,0,0,(basereg));
1262 *(mcodeptr++) = 0xc1;
1263 x86_64_emit_membase((basereg),(disp),(opc));
1264 x86_64_emit_imm8((imm));
1273 void x86_64_jmp_imm(s8 imm) {
1274 *(mcodeptr++) = 0xe9;
1275 x86_64_emit_imm32((imm));
1279 void x86_64_jmp_reg(s8 reg) {
1280 x86_64_emit_rex(0,0,0,(reg));
1281 *(mcodeptr++) = 0xff;
1282 x86_64_emit_reg(4,(reg));
1286 void x86_64_jcc(s8 opc, s8 imm) {
1287 *(mcodeptr++) = 0x0f;
1288 *(mcodeptr++) = (0x80 + (opc));
1289 x86_64_emit_imm32((imm));
1295 * conditional set and move operations
1298 /* we need the rex byte to get all low bytes */
1299 void x86_64_setcc_reg(s8 opc, s8 reg) {
1300 *(mcodeptr++) = (0x40 | (((reg) >> 3) & 0x01));
1301 *(mcodeptr++) = 0x0f;
1302 *(mcodeptr++) = (0x90 + (opc));
1303 x86_64_emit_reg(0,(reg));
1307 /* we need the rex byte to get all low bytes */
1308 void x86_64_setcc_membase(s8 opc, s8 basereg, s8 disp) {
1309 *(mcodeptr++) = (0x40 | (((basereg) >> 3) & 0x01));
1310 *(mcodeptr++) = 0x0f;
1311 *(mcodeptr++) = (0x90 + (opc));
1312 x86_64_emit_membase((basereg),(disp),0);
1316 void x86_64_cmovcc_reg_reg(s8 opc, s8 reg, s8 dreg) {
1317 x86_64_emit_rex(1,(dreg),0,(reg));
1318 *(mcodeptr++) = 0x0f;
1319 *(mcodeptr++) = (0x40 + (opc));
1320 x86_64_emit_reg((dreg),(reg));
1324 void x86_64_cmovccl_reg_reg(s8 opc, s8 reg, s8 dreg) {
1325 x86_64_emit_rex(0,(dreg),0,(reg));
1326 *(mcodeptr++) = 0x0f;
1327 *(mcodeptr++) = (0x40 + (opc));
1328 x86_64_emit_reg((dreg),(reg));
1333 void x86_64_neg_reg(s8 reg) {
1334 x86_64_emit_rex(1,0,0,(reg));
1335 *(mcodeptr++) = 0xf7;
1336 x86_64_emit_reg(3,(reg));
1340 void x86_64_negl_reg(s8 reg) {
1341 x86_64_emit_rex(0,0,0,(reg));
1342 *(mcodeptr++) = 0xf7;
1343 x86_64_emit_reg(3,(reg));
1347 void x86_64_neg_membase(s8 basereg, s8 disp) {
1348 x86_64_emit_rex(1,0,0,(basereg));
1349 *(mcodeptr++) = 0xf7;
1350 x86_64_emit_membase((basereg),(disp),3);
1354 void x86_64_negl_membase(s8 basereg, s8 disp) {
1355 x86_64_emit_rex(0,0,0,(basereg));
1356 *(mcodeptr++) = 0xf7;
1357 x86_64_emit_membase((basereg),(disp),3);
1361 void x86_64_push_reg(s8 reg) {
1362 x86_64_emit_rex(0,0,0,(reg));
1363 *(mcodeptr++) = 0x50 + (0x07 & (reg));
1367 void x86_64_push_imm(s8 imm) {
1368 *(mcodeptr++) = 0x68;
1369 x86_64_emit_imm32((imm));
1373 void x86_64_pop_reg(s8 reg) {
1374 x86_64_emit_rex(0,0,0,(reg));
1375 *(mcodeptr++) = 0x58 + (0x07 & (reg));
1379 void x86_64_xchg_reg_reg(s8 reg, s8 dreg) {
1380 x86_64_emit_rex(1,(reg),0,(dreg));
1381 *(mcodeptr++) = 0x87;
1382 x86_64_emit_reg((reg),(dreg));
1387 *(mcodeptr++) = 0x90;
1395 void x86_64_call_reg(s8 reg) {
1396 x86_64_emit_rex(1,0,0,(reg));
1397 *(mcodeptr++) = 0xff;
1398 x86_64_emit_reg(2,(reg));
1402 void x86_64_call_imm(s8 imm) {
1403 *(mcodeptr++) = 0xe8;
1404 x86_64_emit_imm32((imm));
1408 void x86_64_call_mem(s8 mem) {
1409 *(mcodeptr++) = 0xff;
1410 x86_64_emit_mem(2,(mem));
1416 * floating point instructions (SSE2)
1418 void x86_64_addsd_reg_reg(s8 reg, s8 dreg) {
1419 *(mcodeptr++) = 0xf2;
1420 x86_64_emit_rex(0,(dreg),0,(reg));
1421 *(mcodeptr++) = 0x0f;
1422 *(mcodeptr++) = 0x58;
1423 x86_64_emit_reg((dreg),(reg));
1427 void x86_64_addss_reg_reg(s8 reg, s8 dreg) {
1428 *(mcodeptr++) = 0xf3;
1429 x86_64_emit_rex(0,(dreg),0,(reg));
1430 *(mcodeptr++) = 0x0f;
1431 *(mcodeptr++) = 0x58;
1432 x86_64_emit_reg((dreg),(reg));
1436 void x86_64_cvtsi2ssq_reg_reg(s8 reg, s8 dreg) {
1437 *(mcodeptr++) = 0xf3;
1438 x86_64_emit_rex(1,(dreg),0,(reg));
1439 *(mcodeptr++) = 0x0f;
1440 *(mcodeptr++) = 0x2a;
1441 x86_64_emit_reg((dreg),(reg));
1445 void x86_64_cvtsi2ss_reg_reg(s8 reg, s8 dreg) {
1446 *(mcodeptr++) = 0xf3;
1447 x86_64_emit_rex(0,(dreg),0,(reg));
1448 *(mcodeptr++) = 0x0f;
1449 *(mcodeptr++) = 0x2a;
1450 x86_64_emit_reg((dreg),(reg));
1454 void x86_64_cvtsi2sdq_reg_reg(s8 reg, s8 dreg) {
1455 *(mcodeptr++) = 0xf2;
1456 x86_64_emit_rex(1,(dreg),0,(reg));
1457 *(mcodeptr++) = 0x0f;
1458 *(mcodeptr++) = 0x2a;
1459 x86_64_emit_reg((dreg),(reg));
1463 void x86_64_cvtsi2sd_reg_reg(s8 reg, s8 dreg) {
1464 *(mcodeptr++) = 0xf2;
1465 x86_64_emit_rex(0,(dreg),0,(reg));
1466 *(mcodeptr++) = 0x0f;
1467 *(mcodeptr++) = 0x2a;
1468 x86_64_emit_reg((dreg),(reg));
1472 void x86_64_cvtss2sd_reg_reg(s8 reg, s8 dreg) {
1473 *(mcodeptr++) = 0xf3;
1474 x86_64_emit_rex(0,(dreg),0,(reg));
1475 *(mcodeptr++) = 0x0f;
1476 *(mcodeptr++) = 0x5a;
1477 x86_64_emit_reg((dreg),(reg));
1481 void x86_64_cvtsd2ss_reg_reg(s8 reg, s8 dreg) {
1482 *(mcodeptr++) = 0xf2;
1483 x86_64_emit_rex(0,(dreg),0,(reg));
1484 *(mcodeptr++) = 0x0f;
1485 *(mcodeptr++) = 0x5a;
1486 x86_64_emit_reg((dreg),(reg));
1490 void x86_64_cvttss2siq_reg_reg(s8 reg, s8 dreg) {
1491 *(mcodeptr++) = 0xf3;
1492 x86_64_emit_rex(1,(dreg),0,(reg));
1493 *(mcodeptr++) = 0x0f;
1494 *(mcodeptr++) = 0x2c;
1495 x86_64_emit_reg((dreg),(reg));
1499 void x86_64_cvttss2si_reg_reg(s8 reg, s8 dreg) {
1500 *(mcodeptr++) = 0xf3;
1501 x86_64_emit_rex(0,(dreg),0,(reg));
1502 *(mcodeptr++) = 0x0f;
1503 *(mcodeptr++) = 0x2c;
1504 x86_64_emit_reg((dreg),(reg));
1508 void x86_64_cvttsd2siq_reg_reg(s8 reg, s8 dreg) {
1509 *(mcodeptr++) = 0xf2;
1510 x86_64_emit_rex(1,(dreg),0,(reg));
1511 *(mcodeptr++) = 0x0f;
1512 *(mcodeptr++) = 0x2c;
1513 x86_64_emit_reg((dreg),(reg));
1517 void x86_64_cvttsd2si_reg_reg(s8 reg, s8 dreg) {
1518 *(mcodeptr++) = 0xf2;
1519 x86_64_emit_rex(0,(dreg),0,(reg));
1520 *(mcodeptr++) = 0x0f;
1521 *(mcodeptr++) = 0x2c;
1522 x86_64_emit_reg((dreg),(reg));
1526 void x86_64_divss_reg_reg(s8 reg, s8 dreg) {
1527 *(mcodeptr++) = 0xf3;
1528 x86_64_emit_rex(0,(dreg),0,(reg));
1529 *(mcodeptr++) = 0x0f;
1530 *(mcodeptr++) = 0x5e;
1531 x86_64_emit_reg((dreg),(reg));
1535 void x86_64_divsd_reg_reg(s8 reg, s8 dreg) {
1536 *(mcodeptr++) = 0xf2;
1537 x86_64_emit_rex(0,(dreg),0,(reg));
1538 *(mcodeptr++) = 0x0f;
1539 *(mcodeptr++) = 0x5e;
1540 x86_64_emit_reg((dreg),(reg));
1544 void x86_64_movd_reg_freg(s8 reg, s8 freg) {
1545 *(mcodeptr++) = 0x66;
1546 x86_64_emit_rex(1,(freg),0,(reg));
1547 *(mcodeptr++) = 0x0f;
1548 *(mcodeptr++) = 0x6e;
1549 x86_64_emit_reg((freg),(reg));
1553 void x86_64_movd_freg_reg(s8 freg, s8 reg) {
1554 *(mcodeptr++) = 0x66;
1555 x86_64_emit_rex(1,(freg),0,(reg));
1556 *(mcodeptr++) = 0x0f;
1557 *(mcodeptr++) = 0x7e;
1558 x86_64_emit_reg((freg),(reg));
1562 void x86_64_movd_reg_membase(s8 reg, s8 basereg, s8 disp) {
1563 *(mcodeptr++) = 0x66;
1564 x86_64_emit_rex(0,(reg),0,(basereg));
1565 *(mcodeptr++) = 0x0f;
1566 *(mcodeptr++) = 0x7e;
1567 x86_64_emit_membase((basereg),(disp),(reg));
1571 void x86_64_movd_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
1572 *(mcodeptr++) = 0x66;
1573 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
1574 *(mcodeptr++) = 0x0f;
1575 *(mcodeptr++) = 0x7e;
1576 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
1580 void x86_64_movd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
1581 *(mcodeptr++) = 0x66;
1582 x86_64_emit_rex(1,(dreg),0,(basereg));
1583 *(mcodeptr++) = 0x0f;
1584 *(mcodeptr++) = 0x6e;
1585 x86_64_emit_membase((basereg),(disp),(dreg));
1589 void x86_64_movdl_membase_reg(s8 basereg, s8 disp, s8 dreg) {
1590 *(mcodeptr++) = 0x66;
1591 x86_64_emit_rex(0,(dreg),0,(basereg));
1592 *(mcodeptr++) = 0x0f;
1593 *(mcodeptr++) = 0x6e;
1594 x86_64_emit_membase((basereg),(disp),(dreg));
1598 void x86_64_movd_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
1599 *(mcodeptr++) = 0x66;
1600 x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
1601 *(mcodeptr++) = 0x0f;
1602 *(mcodeptr++) = 0x6e;
1603 x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
1607 void x86_64_movq_reg_reg(s8 reg, s8 dreg) {
1608 *(mcodeptr++) = 0xf3;
1609 x86_64_emit_rex(0,(dreg),0,(reg));
1610 *(mcodeptr++) = 0x0f;
1611 *(mcodeptr++) = 0x7e;
1612 x86_64_emit_reg((dreg),(reg));
1616 void x86_64_movq_reg_membase(s8 reg, s8 basereg, s8 disp) {
1617 *(mcodeptr++) = 0x66;
1618 x86_64_emit_rex(0,(reg),0,(basereg));
1619 *(mcodeptr++) = 0x0f;
1620 *(mcodeptr++) = 0xd6;
1621 x86_64_emit_membase((basereg),(disp),(reg));
1625 void x86_64_movq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
1626 *(mcodeptr++) = 0xf3;
1627 x86_64_emit_rex(0,(dreg),0,(basereg));
1628 *(mcodeptr++) = 0x0f;
1629 *(mcodeptr++) = 0x7e;
1630 x86_64_emit_membase((basereg),(disp),(dreg));
1634 void x86_64_movss_reg_reg(s8 reg, s8 dreg) {
1635 *(mcodeptr++) = 0xf3;
1636 x86_64_emit_rex(0,(reg),0,(dreg));
1637 *(mcodeptr++) = 0x0f;
1638 *(mcodeptr++) = 0x10;
1639 x86_64_emit_reg((reg),(dreg));
1643 void x86_64_movsd_reg_reg(s8 reg, s8 dreg) {
1644 *(mcodeptr++) = 0xf2;
1645 x86_64_emit_rex(0,(reg),0,(dreg));
1646 *(mcodeptr++) = 0x0f;
1647 *(mcodeptr++) = 0x10;
1648 x86_64_emit_reg((reg),(dreg));
1652 void x86_64_movss_reg_membase(s8 reg, s8 basereg, s8 disp) {
1653 *(mcodeptr++) = 0xf3;
1654 x86_64_emit_rex(0,(reg),0,(basereg));
1655 *(mcodeptr++) = 0x0f;
1656 *(mcodeptr++) = 0x11;
1657 x86_64_emit_membase((basereg),(disp),(reg));
1661 void x86_64_movsd_reg_membase(s8 reg, s8 basereg, s8 disp) {
1662 *(mcodeptr++) = 0xf2;
1663 x86_64_emit_rex(0,(reg),0,(basereg));
1664 *(mcodeptr++) = 0x0f;
1665 *(mcodeptr++) = 0x11;
1666 x86_64_emit_membase((basereg),(disp),(reg));
1670 void x86_64_movss_membase_reg(s8 basereg, s8 disp, s8 dreg) {
1671 *(mcodeptr++) = 0xf3;
1672 x86_64_emit_rex(0,(dreg),0,(basereg));
1673 *(mcodeptr++) = 0x0f;
1674 *(mcodeptr++) = 0x10;
1675 x86_64_emit_membase((basereg),(disp),(dreg));
1679 void x86_64_movlps_membase_reg(s8 basereg, s8 disp, s8 dreg) {
1680 x86_64_emit_rex(0,(dreg),0,(basereg));
1681 *(mcodeptr++) = 0x0f;
1682 *(mcodeptr++) = 0x12;
1683 x86_64_emit_membase((basereg),(disp),(dreg));
1687 void x86_64_movsd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
1688 *(mcodeptr++) = 0xf2;
1689 x86_64_emit_rex(0,(dreg),0,(basereg));
1690 *(mcodeptr++) = 0x0f;
1691 *(mcodeptr++) = 0x10;
1692 x86_64_emit_membase((basereg),(disp),(dreg));
1696 void x86_64_movlpd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
1697 *(mcodeptr++) = 0x66;
1698 x86_64_emit_rex(0,(dreg),0,(basereg));
1699 *(mcodeptr++) = 0x0f;
1700 *(mcodeptr++) = 0x12;
1701 x86_64_emit_membase((basereg),(disp),(dreg));
1705 void x86_64_movss_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
1706 *(mcodeptr++) = 0xf3;
1707 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
1708 *(mcodeptr++) = 0x0f;
1709 *(mcodeptr++) = 0x11;
1710 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
1714 void x86_64_movsd_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
1715 *(mcodeptr++) = 0xf2;
1716 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
1717 *(mcodeptr++) = 0x0f;
1718 *(mcodeptr++) = 0x11;
1719 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
1723 void x86_64_movss_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
1724 *(mcodeptr++) = 0xf3;
1725 x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
1726 *(mcodeptr++) = 0x0f;
1727 *(mcodeptr++) = 0x10;
1728 x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
1732 void x86_64_movsd_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
1733 *(mcodeptr++) = 0xf2;
1734 x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
1735 *(mcodeptr++) = 0x0f;
1736 *(mcodeptr++) = 0x10;
1737 x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
1741 void x86_64_mulss_reg_reg(s8 reg, s8 dreg) {
1742 *(mcodeptr++) = 0xf3;
1743 x86_64_emit_rex(0,(dreg),0,(reg));
1744 *(mcodeptr++) = 0x0f;
1745 *(mcodeptr++) = 0x59;
1746 x86_64_emit_reg((dreg),(reg));
1750 void x86_64_mulsd_reg_reg(s8 reg, s8 dreg) {
1751 *(mcodeptr++) = 0xf2;
1752 x86_64_emit_rex(0,(dreg),0,(reg));
1753 *(mcodeptr++) = 0x0f;
1754 *(mcodeptr++) = 0x59;
1755 x86_64_emit_reg((dreg),(reg));
1759 void x86_64_subss_reg_reg(s8 reg, s8 dreg) {
1760 *(mcodeptr++) = 0xf3;
1761 x86_64_emit_rex(0,(dreg),0,(reg));
1762 *(mcodeptr++) = 0x0f;
1763 *(mcodeptr++) = 0x5c;
1764 x86_64_emit_reg((dreg),(reg));
1768 void x86_64_subsd_reg_reg(s8 reg, s8 dreg) {
1769 *(mcodeptr++) = 0xf2;
1770 x86_64_emit_rex(0,(dreg),0,(reg));
1771 *(mcodeptr++) = 0x0f;
1772 *(mcodeptr++) = 0x5c;
1773 x86_64_emit_reg((dreg),(reg));
1777 void x86_64_ucomiss_reg_reg(s8 reg, s8 dreg) {
1778 x86_64_emit_rex(0,(dreg),0,(reg));
1779 *(mcodeptr++) = 0x0f;
1780 *(mcodeptr++) = 0x2e;
1781 x86_64_emit_reg((dreg),(reg));
1785 void x86_64_ucomisd_reg_reg(s8 reg, s8 dreg) {
1786 *(mcodeptr++) = 0x66;
1787 x86_64_emit_rex(0,(dreg),0,(reg));
1788 *(mcodeptr++) = 0x0f;
1789 *(mcodeptr++) = 0x2e;
1790 x86_64_emit_reg((dreg),(reg));
1794 void x86_64_xorps_reg_reg(s8 reg, s8 dreg) {
1795 x86_64_emit_rex(0,(dreg),0,(reg));
1796 *(mcodeptr++) = 0x0f;
1797 *(mcodeptr++) = 0x57;
1798 x86_64_emit_reg((dreg),(reg));
1802 void x86_64_xorps_membase_reg(s8 basereg, s8 disp, s8 dreg) {
1803 x86_64_emit_rex(0,(dreg),0,(basereg));
1804 *(mcodeptr++) = 0x0f;
1805 *(mcodeptr++) = 0x57;
1806 x86_64_emit_membase((basereg),(disp),(dreg));
1810 void x86_64_xorpd_reg_reg(s8 reg, s8 dreg) {
1811 *(mcodeptr++) = 0x66;
1812 x86_64_emit_rex(0,(dreg),0,(reg));
1813 *(mcodeptr++) = 0x0f;
1814 *(mcodeptr++) = 0x57;
1815 x86_64_emit_reg((dreg),(reg));
1819 void x86_64_xorpd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
1820 *(mcodeptr++) = 0x66;
1821 x86_64_emit_rex(0,(dreg),0,(basereg));
1822 *(mcodeptr++) = 0x0f;
1823 *(mcodeptr++) = 0x57;
1824 x86_64_emit_membase((basereg),(disp),(dreg));
1829 * These are local overrides for various environment variables in Emacs.
1830 * Please do not remove this and leave it at the end of the file, where
1831 * Emacs will automagically detect them.
1832 * ---------------------------------------------------------------------
1835 * indent-tabs-mode: t