1 /* src/vm/jit/x86_64/emitfuncs.c - x86_64 code emitter functions
3 Copyright (C) 1996-2005, 2006 R. Grafl, A. Krall, C. Kruegel,
4 C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
5 E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
6 J. Wenninger, Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
25 Contact: cacao@cacaojvm.org
27 Authors: Christian Thalinger
31 $Id: emitfuncs.c 4357 2006-01-22 23:33:38Z twisti $
40 #include "vm/jit/jit.h"
41 #include "vm/jit/x86_64/codegen.h"
42 #include "vm/jit/x86_64/emitfuncs.h"
45 /* code generation functions */
47 void x86_64_emit_ialu(codegendata *cd, s4 alu_op, stackptr src, instruction *iptr)
49 s4 s1 = src->prev->regoff;
51 s4 d = iptr->dst->regoff;
53 if (iptr->dst->flags & INMEMORY) {
54 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
56 x86_64_movl_membase_reg(cd, REG_SP, s1 * 8, REG_ITMP1);
57 x86_64_alul_reg_membase(cd, alu_op, REG_ITMP1, REG_SP, d * 8);
60 x86_64_movl_membase_reg(cd, REG_SP, s2 * 8, REG_ITMP1);
61 x86_64_alul_reg_membase(cd, alu_op, REG_ITMP1, REG_SP, d * 8);
64 x86_64_movl_membase_reg(cd, REG_SP, s1 * 8, REG_ITMP1);
65 x86_64_alul_membase_reg(cd, alu_op, REG_SP, s2 * 8, REG_ITMP1);
66 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, d * 8);
69 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
71 x86_64_alul_reg_membase(cd, alu_op, s1, REG_SP, d * 8);
74 x86_64_movl_membase_reg(cd, REG_SP, s2 * 8, REG_ITMP1);
75 x86_64_alul_reg_reg(cd, alu_op, s1, REG_ITMP1);
76 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, d * 8);
79 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
81 x86_64_alul_reg_membase(cd, alu_op, s2, REG_SP, d * 8);
84 x86_64_movl_membase_reg(cd, REG_SP, s1 * 8, REG_ITMP1);
85 x86_64_alul_reg_reg(cd, alu_op, s2, REG_ITMP1);
86 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, d * 8);
90 x86_64_movl_reg_membase(cd, s1, REG_SP, d * 8);
91 x86_64_alul_reg_membase(cd, alu_op, s2, REG_SP, d * 8);
95 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
96 x86_64_movl_membase_reg(cd, REG_SP, s1 * 8, d);
97 x86_64_alul_membase_reg(cd, alu_op, REG_SP, s2 * 8, d);
99 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
101 x86_64_alul_membase_reg(cd, alu_op, REG_SP, s2 * 8, d);
103 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
105 x86_64_alul_membase_reg(cd, alu_op, REG_SP, s1 * 8, d);
109 x86_64_alul_reg_reg(cd, alu_op, s1, d);
113 x86_64_alul_reg_reg(cd, alu_op, s2, d);
120 void x86_64_emit_lalu(codegendata *cd, s4 alu_op, stackptr src, instruction *iptr)
122 s4 s1 = src->prev->regoff;
124 s4 d = iptr->dst->regoff;
126 if (iptr->dst->flags & INMEMORY) {
127 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
129 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, REG_ITMP1);
130 x86_64_alu_reg_membase(cd, alu_op, REG_ITMP1, REG_SP, d * 8);
132 } else if (s1 == d) {
133 x86_64_mov_membase_reg(cd, REG_SP, s2 * 8, REG_ITMP1);
134 x86_64_alu_reg_membase(cd, alu_op, REG_ITMP1, REG_SP, d * 8);
137 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, REG_ITMP1);
138 x86_64_alu_membase_reg(cd, alu_op, REG_SP, s2 * 8, REG_ITMP1);
139 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, d * 8);
142 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
144 x86_64_alu_reg_membase(cd, alu_op, s1, REG_SP, d * 8);
147 x86_64_mov_membase_reg(cd, REG_SP, s2 * 8, REG_ITMP1);
148 x86_64_alu_reg_reg(cd, alu_op, s1, REG_ITMP1);
149 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, d * 8);
152 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
154 x86_64_alu_reg_membase(cd, alu_op, s2, REG_SP, d * 8);
157 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, REG_ITMP1);
158 x86_64_alu_reg_reg(cd, alu_op, s2, REG_ITMP1);
159 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, d * 8);
163 x86_64_mov_reg_membase(cd, s1, REG_SP, d * 8);
164 x86_64_alu_reg_membase(cd, alu_op, s2, REG_SP, d * 8);
168 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
169 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, d);
170 x86_64_alu_membase_reg(cd, alu_op, REG_SP, s2 * 8, d);
172 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
174 x86_64_alu_membase_reg(cd, alu_op, REG_SP, s2 * 8, d);
176 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
178 x86_64_alu_membase_reg(cd, alu_op, REG_SP, s1 * 8, d);
182 x86_64_alu_reg_reg(cd, alu_op, s1, d);
186 x86_64_alu_reg_reg(cd, alu_op, s2, d);
193 void x86_64_emit_ialuconst(codegendata *cd, s4 alu_op, stackptr src, instruction *iptr)
196 s4 d = iptr->dst->regoff;
198 if (iptr->dst->flags & INMEMORY) {
199 if (src->flags & INMEMORY) {
201 x86_64_alul_imm_membase(cd, alu_op, iptr->val.i, REG_SP, d * 8);
204 x86_64_movl_membase_reg(cd, REG_SP, s1 * 8, REG_ITMP1);
205 x86_64_alul_imm_reg(cd, alu_op, iptr->val.i, REG_ITMP1);
206 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, d * 8);
210 x86_64_movl_reg_membase(cd, s1, REG_SP, d * 8);
211 x86_64_alul_imm_membase(cd, alu_op, iptr->val.i, REG_SP, d * 8);
215 if (src->flags & INMEMORY) {
216 x86_64_movl_membase_reg(cd, REG_SP, s1 * 8, d);
217 x86_64_alul_imm_reg(cd, alu_op, iptr->val.i, d);
222 x86_64_alul_imm_reg(cd, alu_op, iptr->val.i, d);
224 /* lea addition optimization */
226 if ((alu_op == X86_64_ADD) && (s1 != d)) {
227 M_ILEA(s1, iptr->val.i, d);
231 x86_64_alul_imm_reg(cd, alu_op, iptr->val.i, d);
239 void x86_64_emit_laluconst(codegendata *cd, s4 alu_op, stackptr src, instruction *iptr)
242 s4 d = iptr->dst->regoff;
244 if (iptr->dst->flags & INMEMORY) {
245 if (src->flags & INMEMORY) {
247 if (IS_IMM32(iptr->val.l)) {
248 x86_64_alu_imm_membase(cd, alu_op, iptr->val.l, REG_SP, d * 8);
251 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
252 x86_64_alu_reg_membase(cd, alu_op, REG_ITMP1, REG_SP, d * 8);
256 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, REG_ITMP1);
258 if (IS_IMM32(iptr->val.l)) {
259 x86_64_alu_imm_reg(cd, alu_op, iptr->val.l, REG_ITMP1);
262 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP2);
263 x86_64_alu_reg_reg(cd, alu_op, REG_ITMP2, REG_ITMP1);
265 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, d * 8);
269 x86_64_mov_reg_membase(cd, s1, REG_SP, d * 8);
271 if (IS_IMM32(iptr->val.l)) {
272 x86_64_alu_imm_membase(cd, alu_op, iptr->val.l, REG_SP, d * 8);
275 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
276 x86_64_alu_reg_membase(cd, alu_op, REG_ITMP1, REG_SP, d * 8);
282 if (src->flags & INMEMORY) {
283 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, d);
289 if (IS_IMM32(iptr->val.l)) {
290 x86_64_alu_imm_reg(cd, alu_op, iptr->val.l, d);
293 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
294 x86_64_alu_reg_reg(cd, alu_op, REG_ITMP1, d);
297 if (src->flags & INMEMORY) {
298 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, d);
300 if (IS_IMM32(iptr->val.l)) {
301 x86_64_alu_imm_reg(cd, alu_op, iptr->val.l, d);
304 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
305 x86_64_alu_reg_reg(cd, alu_op, REG_ITMP1, d);
309 if (IS_IMM32(iptr->val.l)) {
310 /* lea addition optimization */
312 if ((alu_op == X86_64_ADD) && (s1 != d)) {
313 M_LLEA(s1, iptr->val.l, d);
317 x86_64_alu_imm_reg(cd, alu_op, iptr->val.l, d);
322 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
323 x86_64_alu_reg_reg(cd, alu_op, REG_ITMP1, d);
331 void x86_64_emit_ishift(codegendata *cd, s4 shift_op, stackptr src, instruction *iptr)
333 s4 s1 = src->prev->regoff;
335 s4 d = iptr->dst->regoff;
338 M_INTMOVE(RCX, REG_ITMP1); /* save RCX */
340 if (iptr->dst->flags & INMEMORY) {
341 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
343 x86_64_movl_membase_reg(cd, REG_SP, s2 * 8, RCX);
344 x86_64_shiftl_membase(cd, shift_op, REG_SP, d * 8);
347 x86_64_movl_membase_reg(cd, REG_SP, s2 * 8, RCX);
348 x86_64_movl_membase_reg(cd, REG_SP, s1 * 8, REG_ITMP2);
349 x86_64_shiftl_reg(cd, shift_op, REG_ITMP2);
350 x86_64_movl_reg_membase(cd, REG_ITMP2, REG_SP, d * 8);
353 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
354 /* s1 may be equal to RCX */
355 x86_64_movl_reg_membase(cd, s1, REG_SP, d * 8);
356 x86_64_movl_membase_reg(cd, REG_SP, s2 * 8, RCX);
357 x86_64_shiftl_membase(cd, shift_op, REG_SP, d * 8);
359 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
362 x86_64_shiftl_membase(cd, shift_op, REG_SP, d * 8);
366 x86_64_movl_membase_reg(cd, REG_SP, s1 * 8, REG_ITMP2);
367 x86_64_shiftl_reg(cd, shift_op, REG_ITMP2);
368 x86_64_movl_reg_membase(cd, REG_ITMP2, REG_SP, d * 8);
372 /* s1 may be equal to RCX */
373 x86_64_movl_reg_membase(cd, s1, REG_SP, d * 8);
375 x86_64_shiftl_membase(cd, shift_op, REG_SP, d * 8);
378 M_INTMOVE(REG_ITMP1, RCX); /* restore RCX */
386 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
387 x86_64_movl_membase_reg(cd, REG_SP, s2 * 8, RCX);
388 x86_64_movl_membase_reg(cd, REG_SP, s1 * 8, d);
389 x86_64_shiftl_reg(cd, shift_op, d);
391 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
392 /* s1 may be equal to RCX */
394 x86_64_movl_membase_reg(cd, REG_SP, s2 * 8, RCX);
395 x86_64_shiftl_reg(cd, shift_op, d);
397 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
399 x86_64_movl_membase_reg(cd, REG_SP, s1 * 8, d);
400 x86_64_shiftl_reg(cd, shift_op, d);
403 /* s1 may be equal to RCX */
408 /* d may be equal to s2 */
412 x86_64_shiftl_reg(cd, shift_op, d);
416 M_INTMOVE(REG_ITMP3, RCX);
419 M_INTMOVE(REG_ITMP1, RCX); /* restore RCX */
425 void x86_64_emit_lshift(codegendata *cd, s4 shift_op, stackptr src, instruction *iptr)
427 s4 s1 = src->prev->regoff;
429 s4 d = iptr->dst->regoff;
432 M_INTMOVE(RCX, REG_ITMP1); /* save RCX */
434 if (iptr->dst->flags & INMEMORY) {
435 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
437 x86_64_mov_membase_reg(cd, REG_SP, s2 * 8, RCX);
438 x86_64_shift_membase(cd, shift_op, REG_SP, d * 8);
441 x86_64_mov_membase_reg(cd, REG_SP, s2 * 8, RCX);
442 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, REG_ITMP2);
443 x86_64_shift_reg(cd, shift_op, REG_ITMP2);
444 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_SP, d * 8);
447 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
448 /* s1 may be equal to RCX */
449 x86_64_mov_reg_membase(cd, s1, REG_SP, d * 8);
450 x86_64_mov_membase_reg(cd, REG_SP, s2 * 8, RCX);
451 x86_64_shift_membase(cd, shift_op, REG_SP, d * 8);
453 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
456 x86_64_shift_membase(cd, shift_op, REG_SP, d * 8);
460 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, REG_ITMP2);
461 x86_64_shift_reg(cd, shift_op, REG_ITMP2);
462 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_SP, d * 8);
466 /* s1 may be equal to RCX */
467 x86_64_mov_reg_membase(cd, s1, REG_SP, d * 8);
469 x86_64_shift_membase(cd, shift_op, REG_SP, d * 8);
472 M_INTMOVE(REG_ITMP1, RCX); /* restore RCX */
480 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
481 x86_64_mov_membase_reg(cd, REG_SP, s2 * 8, RCX);
482 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, d);
483 x86_64_shift_reg(cd, shift_op, d);
485 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
486 /* s1 may be equal to RCX */
488 x86_64_mov_membase_reg(cd, REG_SP, s2 * 8, RCX);
489 x86_64_shift_reg(cd, shift_op, d);
491 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
493 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, d);
494 x86_64_shift_reg(cd, shift_op, d);
497 /* s1 may be equal to RCX */
502 /* d may be equal to s2 */
506 x86_64_shift_reg(cd, shift_op, d);
510 M_INTMOVE(REG_ITMP3, RCX);
513 M_INTMOVE(REG_ITMP1, RCX); /* restore RCX */
519 void x86_64_emit_ishiftconst(codegendata *cd, s4 shift_op, stackptr src, instruction *iptr)
522 s4 d = iptr->dst->regoff;
524 if ((src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
526 x86_64_shiftl_imm_membase(cd, shift_op, iptr->val.i, REG_SP, d * 8);
529 x86_64_movl_membase_reg(cd, REG_SP, s1 * 8, REG_ITMP1);
530 x86_64_shiftl_imm_reg(cd, shift_op, iptr->val.i, REG_ITMP1);
531 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, d * 8);
534 } else if ((src->flags & INMEMORY) && !(iptr->dst->flags & INMEMORY)) {
535 x86_64_movl_membase_reg(cd, REG_SP, s1 * 8, d);
536 x86_64_shiftl_imm_reg(cd, shift_op, iptr->val.i, d);
538 } else if (!(src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
539 x86_64_movl_reg_membase(cd, s1, REG_SP, d * 8);
540 x86_64_shiftl_imm_membase(cd, shift_op, iptr->val.i, REG_SP, d * 8);
544 x86_64_shiftl_imm_reg(cd, shift_op, iptr->val.i, d);
549 void x86_64_emit_lshiftconst(codegendata *cd, s4 shift_op, stackptr src, instruction *iptr)
552 s4 d = iptr->dst->regoff;
554 if ((src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
556 x86_64_shift_imm_membase(cd, shift_op, iptr->val.i, REG_SP, d * 8);
559 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, REG_ITMP1);
560 x86_64_shift_imm_reg(cd, shift_op, iptr->val.i, REG_ITMP1);
561 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, d * 8);
564 } else if ((src->flags & INMEMORY) && !(iptr->dst->flags & INMEMORY)) {
565 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, d);
566 x86_64_shift_imm_reg(cd, shift_op, iptr->val.i, d);
568 } else if (!(src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
569 x86_64_mov_reg_membase(cd, s1, REG_SP, d * 8);
570 x86_64_shift_imm_membase(cd, shift_op, iptr->val.i, REG_SP, d * 8);
574 x86_64_shift_imm_reg(cd, shift_op, iptr->val.i, d);
579 void x86_64_emit_ifcc(codegendata *cd, s4 if_op, stackptr src, instruction *iptr)
581 if (src->flags & INMEMORY) {
582 x86_64_alul_imm_membase(cd, X86_64_CMP, iptr->val.i, REG_SP, src->regoff * 8);
585 if (iptr->val.i == 0) {
586 x86_64_testl_reg_reg(cd, src->regoff, src->regoff);
589 x86_64_alul_imm_reg(cd, X86_64_CMP, iptr->val.i, src->regoff);
592 x86_64_jcc(cd, if_op, 0);
593 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
597 void x86_64_emit_if_lcc(codegendata *cd, s4 if_op, stackptr src, instruction *iptr)
601 if (src->flags & INMEMORY) {
602 if (IS_IMM32(iptr->val.l)) {
603 x86_64_alu_imm_membase(cd, X86_64_CMP, iptr->val.l, REG_SP, s1 * 8);
606 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
607 x86_64_alu_reg_membase(cd, X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
611 if (iptr->val.l == 0) {
612 x86_64_test_reg_reg(cd, s1, s1);
615 if (IS_IMM32(iptr->val.l)) {
616 x86_64_alu_imm_reg(cd, X86_64_CMP, iptr->val.l, s1);
619 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
620 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP1, s1);
624 x86_64_jcc(cd, if_op, 0);
625 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
629 void x86_64_emit_if_icmpcc(codegendata *cd, s4 if_op, stackptr src, instruction *iptr)
631 s4 s1 = src->prev->regoff;
634 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
635 x86_64_movl_membase_reg(cd, REG_SP, s2 * 8, REG_ITMP1);
636 x86_64_alul_reg_membase(cd, X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
638 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
639 x86_64_alul_membase_reg(cd, X86_64_CMP, REG_SP, s2 * 8, s1);
641 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
642 x86_64_alul_reg_membase(cd, X86_64_CMP, s2, REG_SP, s1 * 8);
645 x86_64_alul_reg_reg(cd, X86_64_CMP, s2, s1);
647 x86_64_jcc(cd, if_op, 0);
648 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
652 void x86_64_emit_if_lcmpcc(codegendata *cd, s4 if_op, stackptr src, instruction *iptr)
654 s4 s1 = src->prev->regoff;
657 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
658 x86_64_mov_membase_reg(cd, REG_SP, s2 * 8, REG_ITMP1);
659 x86_64_alu_reg_membase(cd, X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
661 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
662 x86_64_alu_membase_reg(cd, X86_64_CMP, REG_SP, s2 * 8, s1);
664 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
665 x86_64_alu_reg_membase(cd, X86_64_CMP, s2, REG_SP, s1 * 8);
668 x86_64_alu_reg_reg(cd, X86_64_CMP, s2, s1);
670 x86_64_jcc(cd, if_op, 0);
671 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
678 void x86_64_mov_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
679 x86_64_emit_rex(1,(reg),0,(dreg));
680 *(cd->mcodeptr++) = 0x89;
681 x86_64_emit_reg((reg),(dreg));
685 void x86_64_mov_imm_reg(codegendata *cd, s8 imm, s8 reg) {
686 x86_64_emit_rex(1,0,0,(reg));
687 *(cd->mcodeptr++) = 0xb8 + ((reg) & 0x07);
688 x86_64_emit_imm64((imm));
692 void x86_64_movl_imm_reg(codegendata *cd, s8 imm, s8 reg) {
693 x86_64_emit_rex(0,0,0,(reg));
694 *(cd->mcodeptr++) = 0xb8 + ((reg) & 0x07);
695 x86_64_emit_imm32((imm));
699 void x86_64_mov_membase_reg(codegendata *cd, s8 basereg, s8 disp, s8 reg) {
700 x86_64_emit_rex(1,(reg),0,(basereg));
701 *(cd->mcodeptr++) = 0x8b;
702 x86_64_emit_membase((basereg),(disp),(reg));
707 * this one is for INVOKEVIRTUAL/INVOKEINTERFACE to have a
708 * constant membase immediate length of 32bit
710 void x86_64_mov_membase32_reg(codegendata *cd, s8 basereg, s8 disp, s8 reg) {
711 x86_64_emit_rex(1,(reg),0,(basereg));
712 *(cd->mcodeptr++) = 0x8b;
713 x86_64_emit_membase32((basereg),(disp),(reg));
717 void x86_64_movl_membase_reg(codegendata *cd, s8 basereg, s8 disp, s8 reg) {
718 x86_64_emit_rex(0,(reg),0,(basereg));
719 *(cd->mcodeptr++) = 0x8b;
720 x86_64_emit_membase((basereg),(disp),(reg));
724 /* Always emit a REX byte, because the instruction size can be smaller when */
725 /* all register indexes are smaller than 7. */
726 void x86_64_movl_membase32_reg(codegendata *cd, s8 basereg, s8 disp, s8 reg) {
727 x86_64_emit_byte_rex((reg),0,(basereg));
728 *(cd->mcodeptr++) = 0x8b;
729 x86_64_emit_membase32((basereg),(disp),(reg));
733 void x86_64_mov_reg_membase(codegendata *cd, s8 reg, s8 basereg, s8 disp) {
734 x86_64_emit_rex(1,(reg),0,(basereg));
735 *(cd->mcodeptr++) = 0x89;
736 x86_64_emit_membase((basereg),(disp),(reg));
740 void x86_64_mov_reg_membase32(codegendata *cd, s8 reg, s8 basereg, s8 disp) {
741 x86_64_emit_rex(1,(reg),0,(basereg));
742 *(cd->mcodeptr++) = 0x89;
743 x86_64_emit_membase32((basereg),(disp),(reg));
747 void x86_64_movl_reg_membase(codegendata *cd, s8 reg, s8 basereg, s8 disp) {
748 x86_64_emit_rex(0,(reg),0,(basereg));
749 *(cd->mcodeptr++) = 0x89;
750 x86_64_emit_membase((basereg),(disp),(reg));
754 /* Always emit a REX byte, because the instruction size can be smaller when */
755 /* all register indexes are smaller than 7. */
756 void x86_64_movl_reg_membase32(codegendata *cd, s8 reg, s8 basereg, s8 disp) {
757 x86_64_emit_byte_rex((reg),0,(basereg));
758 *(cd->mcodeptr++) = 0x89;
759 x86_64_emit_membase32((basereg),(disp),(reg));
763 void x86_64_mov_memindex_reg(codegendata *cd, s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
764 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
765 *(cd->mcodeptr++) = 0x8b;
766 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
770 void x86_64_movl_memindex_reg(codegendata *cd, s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
771 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
772 *(cd->mcodeptr++) = 0x8b;
773 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
777 void x86_64_mov_reg_memindex(codegendata *cd, s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
778 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
779 *(cd->mcodeptr++) = 0x89;
780 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
784 void x86_64_movl_reg_memindex(codegendata *cd, s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
785 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
786 *(cd->mcodeptr++) = 0x89;
787 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
791 void x86_64_movw_reg_memindex(codegendata *cd, s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
792 *(cd->mcodeptr++) = 0x66;
793 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
794 *(cd->mcodeptr++) = 0x89;
795 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
799 void x86_64_movb_reg_memindex(codegendata *cd, s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
800 x86_64_emit_byte_rex((reg),(indexreg),(basereg));
801 *(cd->mcodeptr++) = 0x88;
802 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
806 void x86_64_mov_imm_membase(codegendata *cd, s8 imm, s8 basereg, s8 disp) {
807 x86_64_emit_rex(1,0,0,(basereg));
808 *(cd->mcodeptr++) = 0xc7;
809 x86_64_emit_membase((basereg),(disp),0);
810 x86_64_emit_imm32((imm));
814 void x86_64_mov_imm_membase32(codegendata *cd, s8 imm, s8 basereg, s8 disp) {
815 x86_64_emit_rex(1,0,0,(basereg));
816 *(cd->mcodeptr++) = 0xc7;
817 x86_64_emit_membase32((basereg),(disp),0);
818 x86_64_emit_imm32((imm));
822 void x86_64_movl_imm_membase(codegendata *cd, s8 imm, s8 basereg, s8 disp) {
823 x86_64_emit_rex(0,0,0,(basereg));
824 *(cd->mcodeptr++) = 0xc7;
825 x86_64_emit_membase((basereg),(disp),0);
826 x86_64_emit_imm32((imm));
830 /* Always emit a REX byte, because the instruction size can be smaller when */
831 /* all register indexes are smaller than 7. */
832 void x86_64_movl_imm_membase32(codegendata *cd, s8 imm, s8 basereg, s8 disp) {
833 x86_64_emit_byte_rex(0,0,(basereg));
834 *(cd->mcodeptr++) = 0xc7;
835 x86_64_emit_membase32((basereg),(disp),0);
836 x86_64_emit_imm32((imm));
840 void x86_64_movsbq_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
841 x86_64_emit_rex(1,(dreg),0,(reg));
842 *(cd->mcodeptr++) = 0x0f;
843 *(cd->mcodeptr++) = 0xbe;
844 /* XXX: why do reg and dreg have to be exchanged */
845 x86_64_emit_reg((dreg),(reg));
849 void x86_64_movsbq_membase_reg(codegendata *cd, s8 basereg, s8 disp, s8 dreg) {
850 x86_64_emit_rex(1,(dreg),0,(basereg));
851 *(cd->mcodeptr++) = 0x0f;
852 *(cd->mcodeptr++) = 0xbe;
853 x86_64_emit_membase((basereg),(disp),(dreg));
857 void x86_64_movswq_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
858 x86_64_emit_rex(1,(dreg),0,(reg));
859 *(cd->mcodeptr++) = 0x0f;
860 *(cd->mcodeptr++) = 0xbf;
861 /* XXX: why do reg and dreg have to be exchanged */
862 x86_64_emit_reg((dreg),(reg));
866 void x86_64_movswq_membase_reg(codegendata *cd, s8 basereg, s8 disp, s8 dreg) {
867 x86_64_emit_rex(1,(dreg),0,(basereg));
868 *(cd->mcodeptr++) = 0x0f;
869 *(cd->mcodeptr++) = 0xbf;
870 x86_64_emit_membase((basereg),(disp),(dreg));
874 void x86_64_movslq_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
875 x86_64_emit_rex(1,(dreg),0,(reg));
876 *(cd->mcodeptr++) = 0x63;
877 /* XXX: why do reg and dreg have to be exchanged */
878 x86_64_emit_reg((dreg),(reg));
882 void x86_64_movslq_membase_reg(codegendata *cd, s8 basereg, s8 disp, s8 dreg) {
883 x86_64_emit_rex(1,(dreg),0,(basereg));
884 *(cd->mcodeptr++) = 0x63;
885 x86_64_emit_membase((basereg),(disp),(dreg));
889 void x86_64_movzwq_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
890 x86_64_emit_rex(1,(dreg),0,(reg));
891 *(cd->mcodeptr++) = 0x0f;
892 *(cd->mcodeptr++) = 0xb7;
893 /* XXX: why do reg and dreg have to be exchanged */
894 x86_64_emit_reg((dreg),(reg));
898 void x86_64_movzwq_membase_reg(codegendata *cd, s8 basereg, s8 disp, s8 dreg) {
899 x86_64_emit_rex(1,(dreg),0,(basereg));
900 *(cd->mcodeptr++) = 0x0f;
901 *(cd->mcodeptr++) = 0xb7;
902 x86_64_emit_membase((basereg),(disp),(dreg));
906 void x86_64_movswq_memindex_reg(codegendata *cd, s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
907 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
908 *(cd->mcodeptr++) = 0x0f;
909 *(cd->mcodeptr++) = 0xbf;
910 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
914 void x86_64_movsbq_memindex_reg(codegendata *cd, s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
915 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
916 *(cd->mcodeptr++) = 0x0f;
917 *(cd->mcodeptr++) = 0xbe;
918 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
922 void x86_64_movzwq_memindex_reg(codegendata *cd, s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
923 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
924 *(cd->mcodeptr++) = 0x0f;
925 *(cd->mcodeptr++) = 0xb7;
926 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
930 void x86_64_mov_imm_memindex(codegendata *cd, s4 imm, s4 disp, s4 basereg, s4 indexreg, s4 scale)
932 x86_64_emit_rex(1,0,(indexreg),(basereg));
933 *(cd->mcodeptr++) = 0xc7;
934 x86_64_emit_memindex(0,(disp),(basereg),(indexreg),(scale));
935 x86_64_emit_imm32((imm));
939 void x86_64_movl_imm_memindex(codegendata *cd, s4 imm, s4 disp, s4 basereg, s4 indexreg, s4 scale)
941 x86_64_emit_rex(0,0,(indexreg),(basereg));
942 *(cd->mcodeptr++) = 0xc7;
943 x86_64_emit_memindex(0,(disp),(basereg),(indexreg),(scale));
944 x86_64_emit_imm32((imm));
948 void x86_64_movw_imm_memindex(codegendata *cd, s4 imm, s4 disp, s4 basereg, s4 indexreg, s4 scale)
950 *(cd->mcodeptr++) = 0x66;
951 x86_64_emit_rex(0,0,(indexreg),(basereg));
952 *(cd->mcodeptr++) = 0xc7;
953 x86_64_emit_memindex(0,(disp),(basereg),(indexreg),(scale));
954 x86_64_emit_imm16((imm));
958 void x86_64_movb_imm_memindex(codegendata *cd, s4 imm, s4 disp, s4 basereg, s4 indexreg, s4 scale)
960 x86_64_emit_rex(0,0,(indexreg),(basereg));
961 *(cd->mcodeptr++) = 0xc6;
962 x86_64_emit_memindex(0,(disp),(basereg),(indexreg),(scale));
963 x86_64_emit_imm8((imm));
970 void x86_64_alu_reg_reg(codegendata *cd, s8 opc, s8 reg, s8 dreg) {
971 x86_64_emit_rex(1,(reg),0,(dreg));
972 *(cd->mcodeptr++) = (((opc)) << 3) + 1;
973 x86_64_emit_reg((reg),(dreg));
977 void x86_64_alul_reg_reg(codegendata *cd, s8 opc, s8 reg, s8 dreg) {
978 x86_64_emit_rex(0,(reg),0,(dreg));
979 *(cd->mcodeptr++) = (((opc)) << 3) + 1;
980 x86_64_emit_reg((reg),(dreg));
984 void x86_64_alu_reg_membase(codegendata *cd, s8 opc, s8 reg, s8 basereg, s8 disp) {
985 x86_64_emit_rex(1,(reg),0,(basereg));
986 *(cd->mcodeptr++) = (((opc)) << 3) + 1;
987 x86_64_emit_membase((basereg),(disp),(reg));
991 void x86_64_alul_reg_membase(codegendata *cd, s8 opc, s8 reg, s8 basereg, s8 disp) {
992 x86_64_emit_rex(0,(reg),0,(basereg));
993 *(cd->mcodeptr++) = (((opc)) << 3) + 1;
994 x86_64_emit_membase((basereg),(disp),(reg));
998 void x86_64_alu_membase_reg(codegendata *cd, s8 opc, s8 basereg, s8 disp, s8 reg) {
999 x86_64_emit_rex(1,(reg),0,(basereg));
1000 *(cd->mcodeptr++) = (((opc)) << 3) + 3;
1001 x86_64_emit_membase((basereg),(disp),(reg));
1005 void x86_64_alul_membase_reg(codegendata *cd, s8 opc, s8 basereg, s8 disp, s8 reg) {
1006 x86_64_emit_rex(0,(reg),0,(basereg));
1007 *(cd->mcodeptr++) = (((opc)) << 3) + 3;
1008 x86_64_emit_membase((basereg),(disp),(reg));
1012 void x86_64_alu_imm_reg(codegendata *cd, s8 opc, s8 imm, s8 dreg) {
1014 x86_64_emit_rex(1,0,0,(dreg));
1015 *(cd->mcodeptr++) = 0x83;
1016 x86_64_emit_reg((opc),(dreg));
1017 x86_64_emit_imm8((imm));
1019 x86_64_emit_rex(1,0,0,(dreg));
1020 *(cd->mcodeptr++) = 0x81;
1021 x86_64_emit_reg((opc),(dreg));
1022 x86_64_emit_imm32((imm));
1027 void x86_64_alu_imm32_reg(codegendata *cd, s8 opc, s8 imm, s8 dreg) {
1028 x86_64_emit_rex(1,0,0,(dreg));
1029 *(cd->mcodeptr++) = 0x81;
1030 x86_64_emit_reg((opc),(dreg));
1031 x86_64_emit_imm32((imm));
1035 void x86_64_alul_imm_reg(codegendata *cd, s8 opc, s8 imm, s8 dreg) {
1037 x86_64_emit_rex(0,0,0,(dreg));
1038 *(cd->mcodeptr++) = 0x83;
1039 x86_64_emit_reg((opc),(dreg));
1040 x86_64_emit_imm8((imm));
1042 x86_64_emit_rex(0,0,0,(dreg));
1043 *(cd->mcodeptr++) = 0x81;
1044 x86_64_emit_reg((opc),(dreg));
1045 x86_64_emit_imm32((imm));
1050 void x86_64_alu_imm_membase(codegendata *cd, s8 opc, s8 imm, s8 basereg, s8 disp) {
1052 x86_64_emit_rex(1,(basereg),0,0);
1053 *(cd->mcodeptr++) = 0x83;
1054 x86_64_emit_membase((basereg),(disp),(opc));
1055 x86_64_emit_imm8((imm));
1057 x86_64_emit_rex(1,(basereg),0,0);
1058 *(cd->mcodeptr++) = 0x81;
1059 x86_64_emit_membase((basereg),(disp),(opc));
1060 x86_64_emit_imm32((imm));
1065 void x86_64_alul_imm_membase(codegendata *cd, s8 opc, s8 imm, s8 basereg, s8 disp) {
1067 x86_64_emit_rex(0,(basereg),0,0);
1068 *(cd->mcodeptr++) = 0x83;
1069 x86_64_emit_membase((basereg),(disp),(opc));
1070 x86_64_emit_imm8((imm));
1072 x86_64_emit_rex(0,(basereg),0,0);
1073 *(cd->mcodeptr++) = 0x81;
1074 x86_64_emit_membase((basereg),(disp),(opc));
1075 x86_64_emit_imm32((imm));
1080 void x86_64_test_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1081 x86_64_emit_rex(1,(reg),0,(dreg));
1082 *(cd->mcodeptr++) = 0x85;
1083 x86_64_emit_reg((reg),(dreg));
1087 void x86_64_testl_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1088 x86_64_emit_rex(0,(reg),0,(dreg));
1089 *(cd->mcodeptr++) = 0x85;
1090 x86_64_emit_reg((reg),(dreg));
1094 void x86_64_test_imm_reg(codegendata *cd, s8 imm, s8 reg) {
1095 *(cd->mcodeptr++) = 0xf7;
1096 x86_64_emit_reg(0,(reg));
1097 x86_64_emit_imm32((imm));
1101 void x86_64_testw_imm_reg(codegendata *cd, s8 imm, s8 reg) {
1102 *(cd->mcodeptr++) = 0x66;
1103 *(cd->mcodeptr++) = 0xf7;
1104 x86_64_emit_reg(0,(reg));
1105 x86_64_emit_imm16((imm));
1109 void x86_64_testb_imm_reg(codegendata *cd, s8 imm, s8 reg) {
1110 *(cd->mcodeptr++) = 0xf6;
1111 x86_64_emit_reg(0,(reg));
1112 x86_64_emit_imm8((imm));
1116 void x86_64_lea_membase_reg(codegendata *cd, s8 basereg, s8 disp, s8 reg) {
1117 x86_64_emit_rex(1,(reg),0,(basereg));
1118 *(cd->mcodeptr++) = 0x8d;
1119 x86_64_emit_membase((basereg),(disp),(reg));
1123 void x86_64_leal_membase_reg(codegendata *cd, s8 basereg, s8 disp, s8 reg) {
1124 x86_64_emit_rex(0,(reg),0,(basereg));
1125 *(cd->mcodeptr++) = 0x8d;
1126 x86_64_emit_membase((basereg),(disp),(reg));
1132 * inc, dec operations
1134 void x86_64_inc_reg(codegendata *cd, s8 reg) {
1135 x86_64_emit_rex(1,0,0,(reg));
1136 *(cd->mcodeptr++) = 0xff;
1137 x86_64_emit_reg(0,(reg));
1141 void x86_64_incl_reg(codegendata *cd, s8 reg) {
1142 x86_64_emit_rex(0,0,0,(reg));
1143 *(cd->mcodeptr++) = 0xff;
1144 x86_64_emit_reg(0,(reg));
1148 void x86_64_inc_membase(codegendata *cd, s8 basereg, s8 disp) {
1149 x86_64_emit_rex(1,(basereg),0,0);
1150 *(cd->mcodeptr++) = 0xff;
1151 x86_64_emit_membase((basereg),(disp),0);
1155 void x86_64_incl_membase(codegendata *cd, s8 basereg, s8 disp) {
1156 x86_64_emit_rex(0,(basereg),0,0);
1157 *(cd->mcodeptr++) = 0xff;
1158 x86_64_emit_membase((basereg),(disp),0);
1162 void x86_64_dec_reg(codegendata *cd, s8 reg) {
1163 x86_64_emit_rex(1,0,0,(reg));
1164 *(cd->mcodeptr++) = 0xff;
1165 x86_64_emit_reg(1,(reg));
1169 void x86_64_decl_reg(codegendata *cd, s8 reg) {
1170 x86_64_emit_rex(0,0,0,(reg));
1171 *(cd->mcodeptr++) = 0xff;
1172 x86_64_emit_reg(1,(reg));
1176 void x86_64_dec_membase(codegendata *cd, s8 basereg, s8 disp) {
1177 x86_64_emit_rex(1,(basereg),0,0);
1178 *(cd->mcodeptr++) = 0xff;
1179 x86_64_emit_membase((basereg),(disp),1);
1183 void x86_64_decl_membase(codegendata *cd, s8 basereg, s8 disp) {
1184 x86_64_emit_rex(0,(basereg),0,0);
1185 *(cd->mcodeptr++) = 0xff;
1186 x86_64_emit_membase((basereg),(disp),1);
1192 void x86_64_cltd(codegendata *cd) {
1193 *(cd->mcodeptr++) = 0x99;
1197 void x86_64_cqto(codegendata *cd) {
1198 x86_64_emit_rex(1,0,0,0);
1199 *(cd->mcodeptr++) = 0x99;
1204 void x86_64_imul_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1205 x86_64_emit_rex(1,(dreg),0,(reg));
1206 *(cd->mcodeptr++) = 0x0f;
1207 *(cd->mcodeptr++) = 0xaf;
1208 x86_64_emit_reg((dreg),(reg));
1212 void x86_64_imull_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1213 x86_64_emit_rex(0,(dreg),0,(reg));
1214 *(cd->mcodeptr++) = 0x0f;
1215 *(cd->mcodeptr++) = 0xaf;
1216 x86_64_emit_reg((dreg),(reg));
1220 void x86_64_imul_membase_reg(codegendata *cd, s8 basereg, s8 disp, s8 dreg) {
1221 x86_64_emit_rex(1,(dreg),0,(basereg));
1222 *(cd->mcodeptr++) = 0x0f;
1223 *(cd->mcodeptr++) = 0xaf;
1224 x86_64_emit_membase((basereg),(disp),(dreg));
1228 void x86_64_imull_membase_reg(codegendata *cd, s8 basereg, s8 disp, s8 dreg) {
1229 x86_64_emit_rex(0,(dreg),0,(basereg));
1230 *(cd->mcodeptr++) = 0x0f;
1231 *(cd->mcodeptr++) = 0xaf;
1232 x86_64_emit_membase((basereg),(disp),(dreg));
1236 void x86_64_imul_imm_reg(codegendata *cd, s8 imm, s8 dreg) {
1237 if (IS_IMM8((imm))) {
1238 x86_64_emit_rex(1,0,0,(dreg));
1239 *(cd->mcodeptr++) = 0x6b;
1240 x86_64_emit_reg(0,(dreg));
1241 x86_64_emit_imm8((imm));
1243 x86_64_emit_rex(1,0,0,(dreg));
1244 *(cd->mcodeptr++) = 0x69;
1245 x86_64_emit_reg(0,(dreg));
1246 x86_64_emit_imm32((imm));
1251 void x86_64_imul_imm_reg_reg(codegendata *cd, s8 imm, s8 reg, s8 dreg) {
1252 if (IS_IMM8((imm))) {
1253 x86_64_emit_rex(1,(dreg),0,(reg));
1254 *(cd->mcodeptr++) = 0x6b;
1255 x86_64_emit_reg((dreg),(reg));
1256 x86_64_emit_imm8((imm));
1258 x86_64_emit_rex(1,(dreg),0,(reg));
1259 *(cd->mcodeptr++) = 0x69;
1260 x86_64_emit_reg((dreg),(reg));
1261 x86_64_emit_imm32((imm));
1266 void x86_64_imull_imm_reg_reg(codegendata *cd, s8 imm, s8 reg, s8 dreg) {
1267 if (IS_IMM8((imm))) {
1268 x86_64_emit_rex(0,(dreg),0,(reg));
1269 *(cd->mcodeptr++) = 0x6b;
1270 x86_64_emit_reg((dreg),(reg));
1271 x86_64_emit_imm8((imm));
1273 x86_64_emit_rex(0,(dreg),0,(reg));
1274 *(cd->mcodeptr++) = 0x69;
1275 x86_64_emit_reg((dreg),(reg));
1276 x86_64_emit_imm32((imm));
1281 void x86_64_imul_imm_membase_reg(codegendata *cd, s8 imm, s8 basereg, s8 disp, s8 dreg) {
1282 if (IS_IMM8((imm))) {
1283 x86_64_emit_rex(1,(dreg),0,(basereg));
1284 *(cd->mcodeptr++) = 0x6b;
1285 x86_64_emit_membase((basereg),(disp),(dreg));
1286 x86_64_emit_imm8((imm));
1288 x86_64_emit_rex(1,(dreg),0,(basereg));
1289 *(cd->mcodeptr++) = 0x69;
1290 x86_64_emit_membase((basereg),(disp),(dreg));
1291 x86_64_emit_imm32((imm));
1296 void x86_64_imull_imm_membase_reg(codegendata *cd, s8 imm, s8 basereg, s8 disp, s8 dreg) {
1297 if (IS_IMM8((imm))) {
1298 x86_64_emit_rex(0,(dreg),0,(basereg));
1299 *(cd->mcodeptr++) = 0x6b;
1300 x86_64_emit_membase((basereg),(disp),(dreg));
1301 x86_64_emit_imm8((imm));
1303 x86_64_emit_rex(0,(dreg),0,(basereg));
1304 *(cd->mcodeptr++) = 0x69;
1305 x86_64_emit_membase((basereg),(disp),(dreg));
1306 x86_64_emit_imm32((imm));
1311 void x86_64_idiv_reg(codegendata *cd, s8 reg) {
1312 x86_64_emit_rex(1,0,0,(reg));
1313 *(cd->mcodeptr++) = 0xf7;
1314 x86_64_emit_reg(7,(reg));
1318 void x86_64_idivl_reg(codegendata *cd, s8 reg) {
1319 x86_64_emit_rex(0,0,0,(reg));
1320 *(cd->mcodeptr++) = 0xf7;
1321 x86_64_emit_reg(7,(reg));
1326 void x86_64_ret(codegendata *cd) {
1327 *(cd->mcodeptr++) = 0xc3;
1335 void x86_64_shift_reg(codegendata *cd, s8 opc, s8 reg) {
1336 x86_64_emit_rex(1,0,0,(reg));
1337 *(cd->mcodeptr++) = 0xd3;
1338 x86_64_emit_reg((opc),(reg));
1342 void x86_64_shiftl_reg(codegendata *cd, s8 opc, s8 reg) {
1343 x86_64_emit_rex(0,0,0,(reg));
1344 *(cd->mcodeptr++) = 0xd3;
1345 x86_64_emit_reg((opc),(reg));
1349 void x86_64_shift_membase(codegendata *cd, s8 opc, s8 basereg, s8 disp) {
1350 x86_64_emit_rex(1,0,0,(basereg));
1351 *(cd->mcodeptr++) = 0xd3;
1352 x86_64_emit_membase((basereg),(disp),(opc));
1356 void x86_64_shiftl_membase(codegendata *cd, s8 opc, s8 basereg, s8 disp) {
1357 x86_64_emit_rex(0,0,0,(basereg));
1358 *(cd->mcodeptr++) = 0xd3;
1359 x86_64_emit_membase((basereg),(disp),(opc));
1363 void x86_64_shift_imm_reg(codegendata *cd, s8 opc, s8 imm, s8 dreg) {
1365 x86_64_emit_rex(1,0,0,(dreg));
1366 *(cd->mcodeptr++) = 0xd1;
1367 x86_64_emit_reg((opc),(dreg));
1369 x86_64_emit_rex(1,0,0,(dreg));
1370 *(cd->mcodeptr++) = 0xc1;
1371 x86_64_emit_reg((opc),(dreg));
1372 x86_64_emit_imm8((imm));
1377 void x86_64_shiftl_imm_reg(codegendata *cd, s8 opc, s8 imm, s8 dreg) {
1379 x86_64_emit_rex(0,0,0,(dreg));
1380 *(cd->mcodeptr++) = 0xd1;
1381 x86_64_emit_reg((opc),(dreg));
1383 x86_64_emit_rex(0,0,0,(dreg));
1384 *(cd->mcodeptr++) = 0xc1;
1385 x86_64_emit_reg((opc),(dreg));
1386 x86_64_emit_imm8((imm));
1391 void x86_64_shift_imm_membase(codegendata *cd, s8 opc, s8 imm, s8 basereg, s8 disp) {
1393 x86_64_emit_rex(1,0,0,(basereg));
1394 *(cd->mcodeptr++) = 0xd1;
1395 x86_64_emit_membase((basereg),(disp),(opc));
1397 x86_64_emit_rex(1,0,0,(basereg));
1398 *(cd->mcodeptr++) = 0xc1;
1399 x86_64_emit_membase((basereg),(disp),(opc));
1400 x86_64_emit_imm8((imm));
1405 void x86_64_shiftl_imm_membase(codegendata *cd, s8 opc, s8 imm, s8 basereg, s8 disp) {
1407 x86_64_emit_rex(0,0,0,(basereg));
1408 *(cd->mcodeptr++) = 0xd1;
1409 x86_64_emit_membase((basereg),(disp),(opc));
1411 x86_64_emit_rex(0,0,0,(basereg));
1412 *(cd->mcodeptr++) = 0xc1;
1413 x86_64_emit_membase((basereg),(disp),(opc));
1414 x86_64_emit_imm8((imm));
1423 void x86_64_jmp_imm(codegendata *cd, s8 imm) {
1424 *(cd->mcodeptr++) = 0xe9;
1425 x86_64_emit_imm32((imm));
1429 void x86_64_jmp_reg(codegendata *cd, s8 reg) {
1430 x86_64_emit_rex(0,0,0,(reg));
1431 *(cd->mcodeptr++) = 0xff;
1432 x86_64_emit_reg(4,(reg));
1436 void x86_64_jcc(codegendata *cd, s8 opc, s8 imm) {
1437 *(cd->mcodeptr++) = 0x0f;
1438 *(cd->mcodeptr++) = (0x80 + (opc));
1439 x86_64_emit_imm32((imm));
1445 * conditional set and move operations
1448 /* we need the rex byte to get all low bytes */
1449 void x86_64_setcc_reg(codegendata *cd, s8 opc, s8 reg) {
1450 *(cd->mcodeptr++) = (0x40 | (((reg) >> 3) & 0x01));
1451 *(cd->mcodeptr++) = 0x0f;
1452 *(cd->mcodeptr++) = (0x90 + (opc));
1453 x86_64_emit_reg(0,(reg));
1457 /* we need the rex byte to get all low bytes */
1458 void x86_64_setcc_membase(codegendata *cd, s8 opc, s8 basereg, s8 disp) {
1459 *(cd->mcodeptr++) = (0x40 | (((basereg) >> 3) & 0x01));
1460 *(cd->mcodeptr++) = 0x0f;
1461 *(cd->mcodeptr++) = (0x90 + (opc));
1462 x86_64_emit_membase((basereg),(disp),0);
1466 void x86_64_cmovcc_reg_reg(codegendata *cd, s8 opc, s8 reg, s8 dreg) {
1467 x86_64_emit_rex(1,(dreg),0,(reg));
1468 *(cd->mcodeptr++) = 0x0f;
1469 *(cd->mcodeptr++) = (0x40 + (opc));
1470 x86_64_emit_reg((dreg),(reg));
1474 void x86_64_cmovccl_reg_reg(codegendata *cd, s8 opc, s8 reg, s8 dreg) {
1475 x86_64_emit_rex(0,(dreg),0,(reg));
1476 *(cd->mcodeptr++) = 0x0f;
1477 *(cd->mcodeptr++) = (0x40 + (opc));
1478 x86_64_emit_reg((dreg),(reg));
1483 void x86_64_neg_reg(codegendata *cd, s8 reg) {
1484 x86_64_emit_rex(1,0,0,(reg));
1485 *(cd->mcodeptr++) = 0xf7;
1486 x86_64_emit_reg(3,(reg));
1490 void x86_64_negl_reg(codegendata *cd, s8 reg) {
1491 x86_64_emit_rex(0,0,0,(reg));
1492 *(cd->mcodeptr++) = 0xf7;
1493 x86_64_emit_reg(3,(reg));
1497 void x86_64_neg_membase(codegendata *cd, s8 basereg, s8 disp) {
1498 x86_64_emit_rex(1,0,0,(basereg));
1499 *(cd->mcodeptr++) = 0xf7;
1500 x86_64_emit_membase((basereg),(disp),3);
1504 void x86_64_negl_membase(codegendata *cd, s8 basereg, s8 disp) {
1505 x86_64_emit_rex(0,0,0,(basereg));
1506 *(cd->mcodeptr++) = 0xf7;
1507 x86_64_emit_membase((basereg),(disp),3);
1511 void x86_64_push_reg(codegendata *cd, s8 reg) {
1512 x86_64_emit_rex(0,0,0,(reg));
1513 *(cd->mcodeptr++) = 0x50 + (0x07 & (reg));
1517 void x86_64_push_imm(codegendata *cd, s8 imm) {
1518 *(cd->mcodeptr++) = 0x68;
1519 x86_64_emit_imm32((imm));
1523 void x86_64_pop_reg(codegendata *cd, s8 reg) {
1524 x86_64_emit_rex(0,0,0,(reg));
1525 *(cd->mcodeptr++) = 0x58 + (0x07 & (reg));
1529 void x86_64_xchg_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1530 x86_64_emit_rex(1,(reg),0,(dreg));
1531 *(cd->mcodeptr++) = 0x87;
1532 x86_64_emit_reg((reg),(dreg));
1536 void x86_64_nop(codegendata *cd) {
1537 *(cd->mcodeptr++) = 0x90;
1545 void x86_64_call_reg(codegendata *cd, s8 reg) {
1546 x86_64_emit_rex(1,0,0,(reg));
1547 *(cd->mcodeptr++) = 0xff;
1548 x86_64_emit_reg(2,(reg));
1552 void x86_64_call_imm(codegendata *cd, s8 imm) {
1553 *(cd->mcodeptr++) = 0xe8;
1554 x86_64_emit_imm32((imm));
1558 void x86_64_call_mem(codegendata *cd, s8 mem) {
1559 *(cd->mcodeptr++) = 0xff;
1560 x86_64_emit_mem(2,(mem));
1566 * floating point instructions (SSE2)
1568 void x86_64_addsd_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1569 *(cd->mcodeptr++) = 0xf2;
1570 x86_64_emit_rex(0,(dreg),0,(reg));
1571 *(cd->mcodeptr++) = 0x0f;
1572 *(cd->mcodeptr++) = 0x58;
1573 x86_64_emit_reg((dreg),(reg));
1577 void x86_64_addss_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1578 *(cd->mcodeptr++) = 0xf3;
1579 x86_64_emit_rex(0,(dreg),0,(reg));
1580 *(cd->mcodeptr++) = 0x0f;
1581 *(cd->mcodeptr++) = 0x58;
1582 x86_64_emit_reg((dreg),(reg));
1586 void x86_64_cvtsi2ssq_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1587 *(cd->mcodeptr++) = 0xf3;
1588 x86_64_emit_rex(1,(dreg),0,(reg));
1589 *(cd->mcodeptr++) = 0x0f;
1590 *(cd->mcodeptr++) = 0x2a;
1591 x86_64_emit_reg((dreg),(reg));
1595 void x86_64_cvtsi2ss_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1596 *(cd->mcodeptr++) = 0xf3;
1597 x86_64_emit_rex(0,(dreg),0,(reg));
1598 *(cd->mcodeptr++) = 0x0f;
1599 *(cd->mcodeptr++) = 0x2a;
1600 x86_64_emit_reg((dreg),(reg));
1604 void x86_64_cvtsi2sdq_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1605 *(cd->mcodeptr++) = 0xf2;
1606 x86_64_emit_rex(1,(dreg),0,(reg));
1607 *(cd->mcodeptr++) = 0x0f;
1608 *(cd->mcodeptr++) = 0x2a;
1609 x86_64_emit_reg((dreg),(reg));
1613 void x86_64_cvtsi2sd_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1614 *(cd->mcodeptr++) = 0xf2;
1615 x86_64_emit_rex(0,(dreg),0,(reg));
1616 *(cd->mcodeptr++) = 0x0f;
1617 *(cd->mcodeptr++) = 0x2a;
1618 x86_64_emit_reg((dreg),(reg));
1622 void x86_64_cvtss2sd_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1623 *(cd->mcodeptr++) = 0xf3;
1624 x86_64_emit_rex(0,(dreg),0,(reg));
1625 *(cd->mcodeptr++) = 0x0f;
1626 *(cd->mcodeptr++) = 0x5a;
1627 x86_64_emit_reg((dreg),(reg));
1631 void x86_64_cvtsd2ss_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1632 *(cd->mcodeptr++) = 0xf2;
1633 x86_64_emit_rex(0,(dreg),0,(reg));
1634 *(cd->mcodeptr++) = 0x0f;
1635 *(cd->mcodeptr++) = 0x5a;
1636 x86_64_emit_reg((dreg),(reg));
1640 void x86_64_cvttss2siq_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1641 *(cd->mcodeptr++) = 0xf3;
1642 x86_64_emit_rex(1,(dreg),0,(reg));
1643 *(cd->mcodeptr++) = 0x0f;
1644 *(cd->mcodeptr++) = 0x2c;
1645 x86_64_emit_reg((dreg),(reg));
1649 void x86_64_cvttss2si_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1650 *(cd->mcodeptr++) = 0xf3;
1651 x86_64_emit_rex(0,(dreg),0,(reg));
1652 *(cd->mcodeptr++) = 0x0f;
1653 *(cd->mcodeptr++) = 0x2c;
1654 x86_64_emit_reg((dreg),(reg));
1658 void x86_64_cvttsd2siq_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1659 *(cd->mcodeptr++) = 0xf2;
1660 x86_64_emit_rex(1,(dreg),0,(reg));
1661 *(cd->mcodeptr++) = 0x0f;
1662 *(cd->mcodeptr++) = 0x2c;
1663 x86_64_emit_reg((dreg),(reg));
1667 void x86_64_cvttsd2si_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1668 *(cd->mcodeptr++) = 0xf2;
1669 x86_64_emit_rex(0,(dreg),0,(reg));
1670 *(cd->mcodeptr++) = 0x0f;
1671 *(cd->mcodeptr++) = 0x2c;
1672 x86_64_emit_reg((dreg),(reg));
1676 void x86_64_divss_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1677 *(cd->mcodeptr++) = 0xf3;
1678 x86_64_emit_rex(0,(dreg),0,(reg));
1679 *(cd->mcodeptr++) = 0x0f;
1680 *(cd->mcodeptr++) = 0x5e;
1681 x86_64_emit_reg((dreg),(reg));
1685 void x86_64_divsd_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1686 *(cd->mcodeptr++) = 0xf2;
1687 x86_64_emit_rex(0,(dreg),0,(reg));
1688 *(cd->mcodeptr++) = 0x0f;
1689 *(cd->mcodeptr++) = 0x5e;
1690 x86_64_emit_reg((dreg),(reg));
1694 void x86_64_movd_reg_freg(codegendata *cd, s8 reg, s8 freg) {
1695 *(cd->mcodeptr++) = 0x66;
1696 x86_64_emit_rex(1,(freg),0,(reg));
1697 *(cd->mcodeptr++) = 0x0f;
1698 *(cd->mcodeptr++) = 0x6e;
1699 x86_64_emit_reg((freg),(reg));
1703 void x86_64_movd_freg_reg(codegendata *cd, s8 freg, s8 reg) {
1704 *(cd->mcodeptr++) = 0x66;
1705 x86_64_emit_rex(1,(freg),0,(reg));
1706 *(cd->mcodeptr++) = 0x0f;
1707 *(cd->mcodeptr++) = 0x7e;
1708 x86_64_emit_reg((freg),(reg));
1712 void x86_64_movd_reg_membase(codegendata *cd, s8 reg, s8 basereg, s8 disp) {
1713 *(cd->mcodeptr++) = 0x66;
1714 x86_64_emit_rex(0,(reg),0,(basereg));
1715 *(cd->mcodeptr++) = 0x0f;
1716 *(cd->mcodeptr++) = 0x7e;
1717 x86_64_emit_membase((basereg),(disp),(reg));
1721 void x86_64_movd_reg_memindex(codegendata *cd, s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
1722 *(cd->mcodeptr++) = 0x66;
1723 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
1724 *(cd->mcodeptr++) = 0x0f;
1725 *(cd->mcodeptr++) = 0x7e;
1726 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
1730 void x86_64_movd_membase_reg(codegendata *cd, s8 basereg, s8 disp, s8 dreg) {
1731 *(cd->mcodeptr++) = 0x66;
1732 x86_64_emit_rex(1,(dreg),0,(basereg));
1733 *(cd->mcodeptr++) = 0x0f;
1734 *(cd->mcodeptr++) = 0x6e;
1735 x86_64_emit_membase((basereg),(disp),(dreg));
1739 void x86_64_movdl_membase_reg(codegendata *cd, s8 basereg, s8 disp, s8 dreg) {
1740 *(cd->mcodeptr++) = 0x66;
1741 x86_64_emit_rex(0,(dreg),0,(basereg));
1742 *(cd->mcodeptr++) = 0x0f;
1743 *(cd->mcodeptr++) = 0x6e;
1744 x86_64_emit_membase((basereg),(disp),(dreg));
1748 void x86_64_movd_memindex_reg(codegendata *cd, s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
1749 *(cd->mcodeptr++) = 0x66;
1750 x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
1751 *(cd->mcodeptr++) = 0x0f;
1752 *(cd->mcodeptr++) = 0x6e;
1753 x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
1757 void x86_64_movq_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1758 *(cd->mcodeptr++) = 0xf3;
1759 x86_64_emit_rex(0,(dreg),0,(reg));
1760 *(cd->mcodeptr++) = 0x0f;
1761 *(cd->mcodeptr++) = 0x7e;
1762 x86_64_emit_reg((dreg),(reg));
1766 void x86_64_movq_reg_membase(codegendata *cd, s8 reg, s8 basereg, s8 disp) {
1767 *(cd->mcodeptr++) = 0x66;
1768 x86_64_emit_rex(0,(reg),0,(basereg));
1769 *(cd->mcodeptr++) = 0x0f;
1770 *(cd->mcodeptr++) = 0xd6;
1771 x86_64_emit_membase((basereg),(disp),(reg));
1775 void x86_64_movq_membase_reg(codegendata *cd, s8 basereg, s8 disp, s8 dreg) {
1776 *(cd->mcodeptr++) = 0xf3;
1777 x86_64_emit_rex(0,(dreg),0,(basereg));
1778 *(cd->mcodeptr++) = 0x0f;
1779 *(cd->mcodeptr++) = 0x7e;
1780 x86_64_emit_membase((basereg),(disp),(dreg));
1784 void x86_64_movss_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1785 *(cd->mcodeptr++) = 0xf3;
1786 x86_64_emit_rex(0,(reg),0,(dreg));
1787 *(cd->mcodeptr++) = 0x0f;
1788 *(cd->mcodeptr++) = 0x10;
1789 x86_64_emit_reg((reg),(dreg));
1793 void x86_64_movsd_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1794 *(cd->mcodeptr++) = 0xf2;
1795 x86_64_emit_rex(0,(reg),0,(dreg));
1796 *(cd->mcodeptr++) = 0x0f;
1797 *(cd->mcodeptr++) = 0x10;
1798 x86_64_emit_reg((reg),(dreg));
1802 void x86_64_movss_reg_membase(codegendata *cd, s8 reg, s8 basereg, s8 disp) {
1803 *(cd->mcodeptr++) = 0xf3;
1804 x86_64_emit_rex(0,(reg),0,(basereg));
1805 *(cd->mcodeptr++) = 0x0f;
1806 *(cd->mcodeptr++) = 0x11;
1807 x86_64_emit_membase((basereg),(disp),(reg));
1811 /* Always emit a REX byte, because the instruction size can be smaller when */
1812 /* all register indexes are smaller than 7. */
1813 void x86_64_movss_reg_membase32(codegendata *cd, s8 reg, s8 basereg, s8 disp) {
1814 *(cd->mcodeptr++) = 0xf3;
1815 x86_64_emit_byte_rex((reg),0,(basereg));
1816 *(cd->mcodeptr++) = 0x0f;
1817 *(cd->mcodeptr++) = 0x11;
1818 x86_64_emit_membase32((basereg),(disp),(reg));
1822 void x86_64_movsd_reg_membase(codegendata *cd, s8 reg, s8 basereg, s8 disp) {
1823 *(cd->mcodeptr++) = 0xf2;
1824 x86_64_emit_rex(0,(reg),0,(basereg));
1825 *(cd->mcodeptr++) = 0x0f;
1826 *(cd->mcodeptr++) = 0x11;
1827 x86_64_emit_membase((basereg),(disp),(reg));
1831 /* Always emit a REX byte, because the instruction size can be smaller when */
1832 /* all register indexes are smaller than 7. */
1833 void x86_64_movsd_reg_membase32(codegendata *cd, s8 reg, s8 basereg, s8 disp) {
1834 *(cd->mcodeptr++) = 0xf2;
1835 x86_64_emit_byte_rex((reg),0,(basereg));
1836 *(cd->mcodeptr++) = 0x0f;
1837 *(cd->mcodeptr++) = 0x11;
1838 x86_64_emit_membase32((basereg),(disp),(reg));
1842 void x86_64_movss_membase_reg(codegendata *cd, s8 basereg, s8 disp, s8 dreg) {
1843 *(cd->mcodeptr++) = 0xf3;
1844 x86_64_emit_rex(0,(dreg),0,(basereg));
1845 *(cd->mcodeptr++) = 0x0f;
1846 *(cd->mcodeptr++) = 0x10;
1847 x86_64_emit_membase((basereg),(disp),(dreg));
1851 /* Always emit a REX byte, because the instruction size can be smaller when */
1852 /* all register indexes are smaller than 7. */
1853 void x86_64_movss_membase32_reg(codegendata *cd, s8 basereg, s8 disp, s8 dreg) {
1854 *(cd->mcodeptr++) = 0xf3;
1855 x86_64_emit_byte_rex((dreg),0,(basereg));
1856 *(cd->mcodeptr++) = 0x0f;
1857 *(cd->mcodeptr++) = 0x10;
1858 x86_64_emit_membase32((basereg),(disp),(dreg));
1862 void x86_64_movlps_membase_reg(codegendata *cd, s8 basereg, s8 disp, s8 dreg) {
1863 x86_64_emit_rex(0,(dreg),0,(basereg));
1864 *(cd->mcodeptr++) = 0x0f;
1865 *(cd->mcodeptr++) = 0x12;
1866 x86_64_emit_membase((basereg),(disp),(dreg));
1870 void x86_64_movsd_membase_reg(codegendata *cd, s8 basereg, s8 disp, s8 dreg) {
1871 *(cd->mcodeptr++) = 0xf2;
1872 x86_64_emit_rex(0,(dreg),0,(basereg));
1873 *(cd->mcodeptr++) = 0x0f;
1874 *(cd->mcodeptr++) = 0x10;
1875 x86_64_emit_membase((basereg),(disp),(dreg));
1879 /* Always emit a REX byte, because the instruction size can be smaller when */
1880 /* all register indexes are smaller than 7. */
1881 void x86_64_movsd_membase32_reg(codegendata *cd, s8 basereg, s8 disp, s8 dreg) {
1882 *(cd->mcodeptr++) = 0xf2;
1883 x86_64_emit_byte_rex((dreg),0,(basereg));
1884 *(cd->mcodeptr++) = 0x0f;
1885 *(cd->mcodeptr++) = 0x10;
1886 x86_64_emit_membase32((basereg),(disp),(dreg));
1890 void x86_64_movlpd_membase_reg(codegendata *cd, s8 basereg, s8 disp, s8 dreg) {
1891 *(cd->mcodeptr++) = 0x66;
1892 x86_64_emit_rex(0,(dreg),0,(basereg));
1893 *(cd->mcodeptr++) = 0x0f;
1894 *(cd->mcodeptr++) = 0x12;
1895 x86_64_emit_membase((basereg),(disp),(dreg));
1899 void x86_64_movss_reg_memindex(codegendata *cd, s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
1900 *(cd->mcodeptr++) = 0xf3;
1901 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
1902 *(cd->mcodeptr++) = 0x0f;
1903 *(cd->mcodeptr++) = 0x11;
1904 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
1908 void x86_64_movsd_reg_memindex(codegendata *cd, s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
1909 *(cd->mcodeptr++) = 0xf2;
1910 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
1911 *(cd->mcodeptr++) = 0x0f;
1912 *(cd->mcodeptr++) = 0x11;
1913 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
1917 void x86_64_movss_memindex_reg(codegendata *cd, s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
1918 *(cd->mcodeptr++) = 0xf3;
1919 x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
1920 *(cd->mcodeptr++) = 0x0f;
1921 *(cd->mcodeptr++) = 0x10;
1922 x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
1926 void x86_64_movsd_memindex_reg(codegendata *cd, s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
1927 *(cd->mcodeptr++) = 0xf2;
1928 x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
1929 *(cd->mcodeptr++) = 0x0f;
1930 *(cd->mcodeptr++) = 0x10;
1931 x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
1935 void x86_64_mulss_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1936 *(cd->mcodeptr++) = 0xf3;
1937 x86_64_emit_rex(0,(dreg),0,(reg));
1938 *(cd->mcodeptr++) = 0x0f;
1939 *(cd->mcodeptr++) = 0x59;
1940 x86_64_emit_reg((dreg),(reg));
1944 void x86_64_mulsd_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1945 *(cd->mcodeptr++) = 0xf2;
1946 x86_64_emit_rex(0,(dreg),0,(reg));
1947 *(cd->mcodeptr++) = 0x0f;
1948 *(cd->mcodeptr++) = 0x59;
1949 x86_64_emit_reg((dreg),(reg));
1953 void x86_64_subss_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1954 *(cd->mcodeptr++) = 0xf3;
1955 x86_64_emit_rex(0,(dreg),0,(reg));
1956 *(cd->mcodeptr++) = 0x0f;
1957 *(cd->mcodeptr++) = 0x5c;
1958 x86_64_emit_reg((dreg),(reg));
1962 void x86_64_subsd_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1963 *(cd->mcodeptr++) = 0xf2;
1964 x86_64_emit_rex(0,(dreg),0,(reg));
1965 *(cd->mcodeptr++) = 0x0f;
1966 *(cd->mcodeptr++) = 0x5c;
1967 x86_64_emit_reg((dreg),(reg));
1971 void x86_64_ucomiss_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1972 x86_64_emit_rex(0,(dreg),0,(reg));
1973 *(cd->mcodeptr++) = 0x0f;
1974 *(cd->mcodeptr++) = 0x2e;
1975 x86_64_emit_reg((dreg),(reg));
1979 void x86_64_ucomisd_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1980 *(cd->mcodeptr++) = 0x66;
1981 x86_64_emit_rex(0,(dreg),0,(reg));
1982 *(cd->mcodeptr++) = 0x0f;
1983 *(cd->mcodeptr++) = 0x2e;
1984 x86_64_emit_reg((dreg),(reg));
1988 void x86_64_xorps_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
1989 x86_64_emit_rex(0,(dreg),0,(reg));
1990 *(cd->mcodeptr++) = 0x0f;
1991 *(cd->mcodeptr++) = 0x57;
1992 x86_64_emit_reg((dreg),(reg));
1996 void x86_64_xorps_membase_reg(codegendata *cd, s8 basereg, s8 disp, s8 dreg) {
1997 x86_64_emit_rex(0,(dreg),0,(basereg));
1998 *(cd->mcodeptr++) = 0x0f;
1999 *(cd->mcodeptr++) = 0x57;
2000 x86_64_emit_membase((basereg),(disp),(dreg));
2004 void x86_64_xorpd_reg_reg(codegendata *cd, s8 reg, s8 dreg) {
2005 *(cd->mcodeptr++) = 0x66;
2006 x86_64_emit_rex(0,(dreg),0,(reg));
2007 *(cd->mcodeptr++) = 0x0f;
2008 *(cd->mcodeptr++) = 0x57;
2009 x86_64_emit_reg((dreg),(reg));
2013 void x86_64_xorpd_membase_reg(codegendata *cd, s8 basereg, s8 disp, s8 dreg) {
2014 *(cd->mcodeptr++) = 0x66;
2015 x86_64_emit_rex(0,(dreg),0,(basereg));
2016 *(cd->mcodeptr++) = 0x0f;
2017 *(cd->mcodeptr++) = 0x57;
2018 x86_64_emit_membase((basereg),(disp),(dreg));
2023 * These are local overrides for various environment variables in Emacs.
2024 * Please do not remove this and leave it at the end of the file, where
2025 * Emacs will automagically detect them.
2026 * ---------------------------------------------------------------------
2029 * indent-tabs-mode: t