1 /* jit/x86_64/codegen.c - machine code generator for x86_64
3 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 Institut f. Computersprachen, TU Wien
5 R. Grafl, A. Krall, C. Kruegel, C. Oates, R. Obermaisser, M. Probst,
6 S. Ring, E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich,
9 This file is part of CACAO.
11 This program is free software; you can redistribute it and/or
12 modify it under the terms of the GNU General Public License as
13 published by the Free Software Foundation; either version 2, or (at
14 your option) any later version.
16 This program is distributed in the hope that it will be useful, but
17 WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 General Public License for more details.
21 You should have received a copy of the GNU General Public License
22 along with this program; if not, write to the Free Software
23 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
26 Contact: cacao@complang.tuwien.ac.at
28 Authors: Andreas Krall
31 $Id: codegen.c 724 2003-12-09 18:56:11Z edwin $
36 #define _GNU_SOURCE /* we need this for signal handling */
50 #include "methodtable.h"
52 /* include independent code generation stuff */
53 #include "codegen.inc"
57 /* register descripton - array ************************************************/
59 /* #define REG_RES 0 reserved register for OS or code generator */
60 /* #define REG_RET 1 return value register */
61 /* #define REG_EXC 2 exception value register (only old jit) */
62 /* #define REG_SAV 3 (callee) saved register */
63 /* #define REG_TMP 4 scratch temporary register (caller saved) */
64 /* #define REG_ARG 5 argument register (caller saved) */
66 /* #define REG_END -1 last entry in tables */
69 REG_RET, REG_ARG, REG_ARG, REG_TMP, REG_RES, REG_SAV, REG_ARG, REG_ARG,
70 REG_ARG, REG_ARG, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV,
75 int nregdescfloat[] = {
76 /* REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_TMP, REG_TMP, REG_TMP, REG_TMP, */
77 /* REG_RES, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV, REG_SAV, */
78 REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
79 REG_RES, REG_RES, REG_RES, REG_TMP, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
84 /* additional functions and macros to generate code ***************************/
86 #define BlockPtrOfPC(pc) ((basicblock *) iptr->target)
90 #define COUNT_SPILLS count_spills++
96 #define CALCOFFSETBYTES(var, reg, val) \
97 if ((s4) (val) < -128 || (s4) (val) > 127) (var) += 4; \
98 else if ((s4) (val) != 0) (var) += 1; \
99 else if ((reg) == RBP || (reg) == RSP || (reg) == R12 || (reg) == R13) (var) += 1;
102 #define CALCREGOFFBYTES(var, val) \
103 if ((val) > 15) (var) += 4; \
104 else if ((val) != 0) (var) += 1;
107 #define CALCIMMEDIATEBYTES(var, val) \
108 if ((s4) (val) < -128 || (s4) (val) > 127) (var) += 4; \
112 /* gen_nullptr_check(objreg) */
114 #ifdef SOFTNULLPTRCHECK
115 #define gen_nullptr_check(objreg) \
117 x86_64_test_reg_reg((objreg), (objreg)); \
118 x86_64_jcc(X86_64_CC_E, 0); \
119 codegen_addxnullrefs(mcodeptr); \
122 #define gen_nullptr_check(objreg)
126 /* MCODECHECK(icnt) */
128 #define MCODECHECK(icnt) \
129 if ((mcodeptr + (icnt)) > (u1*) mcodeend) mcodeptr = (u1*) codegen_increase((u1*) mcodeptr)
132 generates an integer-move from register a to b.
133 if a and b are the same int-register, no code will be generated.
136 #define M_INTMOVE(reg,dreg) \
137 if ((reg) != (dreg)) { \
138 x86_64_mov_reg_reg((reg),(dreg)); \
143 generates a floating-point-move from register a to b.
144 if a and b are the same float-register, no code will be generated
147 #define M_FLTMOVE(reg,dreg) \
148 if ((reg) != (dreg)) { \
149 x86_64_movq_reg_reg((reg),(dreg)); \
154 this function generates code to fetch data from a pseudo-register
155 into a real register.
156 If the pseudo-register has actually been assigned to a real
157 register, no code will be emitted, since following operations
158 can use this register directly.
160 v: pseudoregister to be fetched from
161 tempregnum: temporary register to be used if v is actually spilled to ram
163 return: the register number, where the operand can be found after
164 fetching (this wil be either tempregnum or the register
165 number allready given to v)
168 #define var_to_reg_int(regnr,v,tempnr) \
169 if ((v)->flags & INMEMORY) { \
171 if ((v)->type == TYPE_INT) { \
172 x86_64_movl_membase_reg(REG_SP, (v)->regoff * 8, tempnr); \
174 x86_64_mov_membase_reg(REG_SP, (v)->regoff * 8, tempnr); \
178 regnr = (v)->regoff; \
183 #define var_to_reg_flt(regnr,v,tempnr) \
184 if ((v)->flags & INMEMORY) { \
186 if ((v)->type == TYPE_FLT) { \
187 x86_64_movlps_membase_reg(REG_SP, (v)->regoff * 8, tempnr); \
189 x86_64_movlpd_membase_reg(REG_SP, (v)->regoff * 8, tempnr); \
191 /* x86_64_movq_membase_reg(REG_SP, (v)->regoff * 8, tempnr);*/ \
194 regnr = (v)->regoff; \
199 This function determines a register, to which the result of an operation
200 should go, when it is ultimatively intended to store the result in
202 If v is assigned to an actual register, this register will be returned.
203 Otherwise (when v is spilled) this function returns tempregnum.
204 If not already done, regoff and flags are set in the stack location.
207 static int reg_of_var(stackptr v, int tempregnum)
211 switch (v->varkind) {
213 if (!(v->flags & INMEMORY))
217 var = &(interfaces[v->varnum][v->type]);
218 v->regoff = var->regoff;
219 if (!(var->flags & INMEMORY))
223 var = &(locals[v->varnum][v->type]);
224 v->regoff = var->regoff;
225 if (!(var->flags & INMEMORY))
229 v->regoff = v->varnum;
230 if (IS_FLT_DBL_TYPE(v->type)) {
231 if (v->varnum < FLT_ARG_CNT) {
232 v->regoff = argfltregs[v->varnum];
233 return(argfltregs[v->varnum]);
236 if (v->varnum < INT_ARG_CNT) {
237 v->regoff = argintregs[v->varnum];
238 return(argintregs[v->varnum]);
241 v->regoff -= INT_ARG_CNT;
244 v->flags |= INMEMORY;
249 /* store_reg_to_var_xxx:
250 This function generates the code to store the result of an operation
251 back into a spilled pseudo-variable.
252 If the pseudo-variable has not been spilled in the first place, this
253 function will generate nothing.
255 v ............ Pseudovariable
256 tempregnum ... Number of the temporary registers as returned by
260 #define store_reg_to_var_int(sptr, tempregnum) \
261 if ((sptr)->flags & INMEMORY) { \
263 x86_64_mov_reg_membase(tempregnum, REG_SP, (sptr)->regoff * 8); \
267 #define store_reg_to_var_flt(sptr, tempregnum) \
268 if ((sptr)->flags & INMEMORY) { \
270 x86_64_movq_reg_membase(tempregnum, REG_SP, (sptr)->regoff * 8); \
274 /* NullPointerException signal handler for hardware null pointer check */
276 void catch_NullPointerException(int sig, siginfo_t *siginfo, void *_p)
280 /* long faultaddr; */
282 struct ucontext *_uc = (struct ucontext *) _p;
283 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
285 /* Reset signal handler - necessary for SysV, does no harm for BSD */
288 /* instr = *((int*)(sigctx->rip)); */
289 /* faultaddr = sigctx->sc_regs[(instr >> 16) & 0x1f]; */
291 /* if (faultaddr == 0) { */
292 signal(sig, (void *) catch_NullPointerException); /* reinstall handler */
294 sigaddset(&nsig, sig);
295 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
296 sigctx->rax = (long) proto_java_lang_NullPointerException; /* REG_ITMP1_XPTR */
297 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
298 sigctx->rip = (long) asm_handle_exception;
303 /* faultaddr += (long) ((instr << 16) >> 16); */
304 /* fprintf(stderr, "faulting address: 0x%08x\n", faultaddr); */
305 /* panic("Stack overflow"); */
310 /* ArithmeticException signal handler for hardware divide by zero check */
312 void catch_ArithmeticException(int sig, siginfo_t *siginfo, void *_p)
316 struct ucontext *_uc = (struct ucontext *) _p;
317 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
320 java_objectheader *p;
323 /* Reset signal handler - necessary for SysV, does no harm for BSD */
325 signal(sig, (void *) catch_ArithmeticException); /* reinstall handler */
327 sigaddset(&nsig, sig);
328 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
330 c = loader_load_sysclass(NULL,utf_new_char("java/lang/ArithmeticException"));
332 m = class_fetchmethod(c,
333 utf_new_char("<init>"),
334 utf_new_char("(Ljava/lang/String;)V"));
336 asm_calljavamethod(m, p, javastring_new_char("/ by zero"), NULL, NULL);
338 sigctx->rax = (long) p; /* REG_ITMP1_XPTR */
339 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
340 sigctx->rip = (long) asm_handle_exception;
346 void init_exceptions(void)
348 /* install signal handlers we need to convert to exceptions */
352 signal(SIGSEGV, (void *) catch_NullPointerException);
356 signal(SIGBUS, (void *) catch_NullPointerException);
360 signal(SIGFPE, (void *) catch_ArithmeticException);
364 /* function gen_mcode **********************************************************
366 generates machine code
368 *******************************************************************************/
370 /* global code generation pointer */
377 int len, s1, s2, s3, d;
391 /* space to save used callee saved registers */
393 savedregs_num += (savintregcnt - maxsavintreguse);
394 savedregs_num += (savfltregcnt - maxsavfltreguse);
396 parentargs_base = maxmemuse + savedregs_num;
398 #ifdef USE_THREADS /* space to save argument of monitor_enter */
400 if (checksync && (method->flags & ACC_SYNCHRONIZED))
405 /* keep stack 16-byte aligned for calls into libc */
407 if (!isleafmethod || runverbose) {
408 if ((parentargs_base % 2) == 0) {
413 /* create method header */
415 (void) dseg_addaddress(method); /* MethodPointer */
416 (void) dseg_adds4(parentargs_base * 8); /* FrameSize */
420 /* IsSync contains the offset relative to the stack pointer for the
421 argument of monitor_exit used in the exception handler. Since the
422 offset could be zero and give a wrong meaning of the flag it is
426 if (checksync && (method->flags & ACC_SYNCHRONIZED))
427 (void) dseg_adds4((maxmemuse + 1) * 8); /* IsSync */
432 (void) dseg_adds4(0); /* IsSync */
434 (void) dseg_adds4(isleafmethod); /* IsLeaf */
435 (void) dseg_adds4(savintregcnt - maxsavintreguse); /* IntSave */
436 (void) dseg_adds4(savfltregcnt - maxsavfltreguse); /* FltSave */
437 (void) dseg_adds4(exceptiontablelength); /* ExTableSize */
439 /* create exception table */
441 for (ex = extable; ex != NULL; ex = ex->down) {
444 if (ex->start != NULL)
445 printf("adding start - %d - ", ex->start->debug_nr);
447 printf("PANIC - start is NULL");
452 dseg_addtarget(ex->start);
456 printf("adding end - %d - ", ex->end->debug_nr);
458 printf("PANIC - end is NULL");
463 dseg_addtarget(ex->end);
466 if (ex->handler != NULL)
467 printf("adding handler - %d\n", ex->handler->debug_nr);
469 printf("PANIC - handler is NULL");
474 dseg_addtarget(ex->handler);
476 (void) dseg_addaddress(ex->catchtype);
479 /* initialize mcode variables */
481 mcodeptr = (u1*) mcodebase;
482 mcodeend = (s4*) (mcodebase + mcodesize);
483 MCODECHECK(128 + mparamcount);
485 /* create stack frame (if necessary) */
487 if (parentargs_base) {
488 x86_64_alu_imm_reg(X86_64_SUB, parentargs_base * 8, REG_SP);
491 /* save return address and used callee saved registers */
494 for (r = savintregcnt - 1; r >= maxsavintreguse; r--) {
495 p--; x86_64_mov_reg_membase(savintregs[r], REG_SP, p * 8);
497 for (r = savfltregcnt - 1; r >= maxsavfltreguse; r--) {
498 p--; x86_64_movq_reg_membase(savfltregs[r], REG_SP, p * 8);
501 /* save monitorenter argument */
504 if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
505 if (method->flags & ACC_STATIC) {
506 x86_64_mov_imm_reg((s8) class, REG_ITMP1);
507 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, maxmemuse * 8);
510 x86_64_mov_reg_membase(argintregs[0], REG_SP, maxmemuse * 8);
515 /* copy argument registers to stack and call trace function with pointer
516 to arguments on stack.
519 x86_64_alu_imm_reg(X86_64_SUB, (6 + 8 + 1 + 1) * 8, REG_SP);
521 x86_64_mov_reg_membase(argintregs[0], REG_SP, 1 * 8);
522 x86_64_mov_reg_membase(argintregs[1], REG_SP, 2 * 8);
523 x86_64_mov_reg_membase(argintregs[2], REG_SP, 3 * 8);
524 x86_64_mov_reg_membase(argintregs[3], REG_SP, 4 * 8);
525 x86_64_mov_reg_membase(argintregs[4], REG_SP, 5 * 8);
526 x86_64_mov_reg_membase(argintregs[5], REG_SP, 6 * 8);
528 x86_64_movq_reg_membase(argfltregs[0], REG_SP, 7 * 8);
529 x86_64_movq_reg_membase(argfltregs[1], REG_SP, 8 * 8);
530 x86_64_movq_reg_membase(argfltregs[2], REG_SP, 9 * 8);
531 x86_64_movq_reg_membase(argfltregs[3], REG_SP, 10 * 8);
532 /* x86_64_movq_reg_membase(argfltregs[4], REG_SP, 11 * 8); */
533 /* x86_64_movq_reg_membase(argfltregs[5], REG_SP, 12 * 8); */
534 /* x86_64_movq_reg_membase(argfltregs[6], REG_SP, 13 * 8); */
535 /* x86_64_movq_reg_membase(argfltregs[7], REG_SP, 14 * 8); */
537 for (p = 0, l = 0; p < mparamcount; p++) {
540 if (IS_FLT_DBL_TYPE(t)) {
541 for (s1 = (mparamcount > INT_ARG_CNT) ? INT_ARG_CNT - 2 : mparamcount - 2; s1 >= p; s1--) {
542 x86_64_mov_reg_reg(argintregs[s1], argintregs[s1 + 1]);
545 x86_64_movd_freg_reg(argfltregs[l], argintregs[p]);
550 x86_64_mov_imm_reg((s8) method, REG_ITMP2);
551 x86_64_mov_reg_membase(REG_ITMP2, REG_SP, 0 * 8);
552 x86_64_mov_imm_reg((s8) builtin_trace_args, REG_ITMP1);
553 x86_64_call_reg(REG_ITMP1);
555 x86_64_mov_membase_reg(REG_SP, 1 * 8, argintregs[0]);
556 x86_64_mov_membase_reg(REG_SP, 2 * 8, argintregs[1]);
557 x86_64_mov_membase_reg(REG_SP, 3 * 8, argintregs[2]);
558 x86_64_mov_membase_reg(REG_SP, 4 * 8, argintregs[3]);
559 x86_64_mov_membase_reg(REG_SP, 5 * 8, argintregs[4]);
560 x86_64_mov_membase_reg(REG_SP, 6 * 8, argintregs[5]);
562 x86_64_movq_membase_reg(REG_SP, 7 * 8, argfltregs[0]);
563 x86_64_movq_membase_reg(REG_SP, 8 * 8, argfltregs[1]);
564 x86_64_movq_membase_reg(REG_SP, 9 * 8, argfltregs[2]);
565 x86_64_movq_membase_reg(REG_SP, 10 * 8, argfltregs[3]);
566 /* x86_64_movq_membase_reg(REG_SP, 11 * 8, argfltregs[4]); */
567 /* x86_64_movq_membase_reg(REG_SP, 12 * 8, argfltregs[5]); */
568 /* x86_64_movq_membase_reg(REG_SP, 13 * 8, argfltregs[6]); */
569 /* x86_64_movq_membase_reg(REG_SP, 14 * 8, argfltregs[7]); */
571 x86_64_alu_imm_reg(X86_64_ADD, (6 + 8 + 1 + 1) * 8, REG_SP);
574 /* take arguments out of register or stack frame */
576 for (p = 0, l = 0, s1 = 0, s2 = 0; p < mparamcount; p++) {
578 var = &(locals[l][t]);
580 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
583 if (IS_INT_LNG_TYPE(t)) {
591 if (IS_INT_LNG_TYPE(t)) { /* integer args */
592 if (s1 < INT_ARG_CNT) { /* register arguments */
593 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
594 M_INTMOVE(argintregs[s1], r);
596 } else { /* reg arg -> spilled */
597 x86_64_mov_reg_membase(argintregs[s1], REG_SP, r * 8);
599 } else { /* stack arguments */
600 pa = s1 - INT_ARG_CNT;
601 if (s2 >= FLT_ARG_CNT) {
602 pa += s2 - FLT_ARG_CNT;
604 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
605 x86_64_mov_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, r); /* + 8 for return address */
606 } else { /* stack arg -> spilled */
607 x86_64_mov_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, REG_ITMP1); /* + 8 for return address */
608 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, r * 8);
613 } else { /* floating args */
614 if (s2 < FLT_ARG_CNT) { /* register arguments */
615 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
616 M_FLTMOVE(argfltregs[s2], r);
618 } else { /* reg arg -> spilled */
619 x86_64_movq_reg_membase(argfltregs[s2], REG_SP, r * 8);
622 } else { /* stack arguments */
623 pa = s2 - FLT_ARG_CNT;
624 if (s1 >= INT_ARG_CNT) {
625 pa += s1 - INT_ARG_CNT;
627 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
628 x86_64_movq_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, r);
631 x86_64_movq_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, REG_FTMP1);
632 x86_64_movq_reg_membase(REG_FTMP1, REG_SP, r * 8);
639 /* call monitorenter function */
642 if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
643 x86_64_mov_membase_reg(REG_SP, 8 * maxmemuse, argintregs[0]);
644 x86_64_mov_imm_reg((s8) builtin_monitorenter, REG_ITMP1);
645 x86_64_call_reg(REG_ITMP1);
650 /* end of header generation */
652 /* walk through all basic blocks */
653 for (/* bbs = block_count, */ bptr = block; /* --bbs >= 0 */ bptr != NULL; bptr = bptr->next) {
655 bptr->mpc = (int)((u1*) mcodeptr - mcodebase);
657 if (bptr->flags >= BBREACHED) {
659 /* branch resolving */
662 for (brefs = bptr->branchrefs; brefs != NULL; brefs = brefs->next) {
663 gen_resolvebranch((u1*) mcodebase + brefs->branchpos,
664 brefs->branchpos, bptr->mpc);
667 /* copy interface registers to their destination */
672 while (src != NULL) {
674 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
675 if (bptr->type == BBTYPE_SBR) {
676 d = reg_of_var(src, REG_ITMP1);
678 store_reg_to_var_int(src, d);
680 } else if (bptr->type == BBTYPE_EXH) {
681 d = reg_of_var(src, REG_ITMP1);
682 M_INTMOVE(REG_ITMP1, d);
683 store_reg_to_var_int(src, d);
687 d = reg_of_var(src, REG_ITMP1);
688 if ((src->varkind != STACKVAR)) {
690 if (IS_FLT_DBL_TYPE(s2)) {
691 s1 = interfaces[len][s2].regoff;
692 if (!(interfaces[len][s2].flags & INMEMORY)) {
696 x86_64_movq_membase_reg(REG_SP, s1 * 8, d);
698 store_reg_to_var_flt(src, d);
701 s1 = interfaces[len][s2].regoff;
702 if (!(interfaces[len][s2].flags & INMEMORY)) {
706 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
708 store_reg_to_var_int(src, d);
715 /* walk through all instructions */
719 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
721 MCODECHECK(64); /* an instruction usually needs < 64 words */
724 case ICMD_NOP: /* ... ==> ... */
727 case ICMD_NULLCHECKPOP: /* ..., objectref ==> ... */
728 if (src->flags & INMEMORY) {
729 x86_64_alu_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
732 x86_64_test_reg_reg(src->regoff, src->regoff);
734 x86_64_jcc(X86_64_CC_E, 0);
735 codegen_addxnullrefs(mcodeptr);
738 /* constant operations ************************************************/
740 case ICMD_ICONST: /* ... ==> ..., constant */
741 /* op1 = 0, val.i = constant */
743 d = reg_of_var(iptr->dst, REG_ITMP1);
744 if (iptr->val.i == 0) {
745 x86_64_alu_reg_reg(X86_64_XOR, d, d);
747 x86_64_movl_imm_reg(iptr->val.i, d);
749 store_reg_to_var_int(iptr->dst, d);
752 case ICMD_ACONST: /* ... ==> ..., constant */
753 /* op1 = 0, val.a = constant */
755 d = reg_of_var(iptr->dst, REG_ITMP1);
756 if (iptr->val.a == 0) {
757 x86_64_alu_reg_reg(X86_64_XOR, d, d);
759 x86_64_mov_imm_reg((s8) iptr->val.a, d);
761 store_reg_to_var_int(iptr->dst, d);
764 case ICMD_LCONST: /* ... ==> ..., constant */
765 /* op1 = 0, val.l = constant */
767 d = reg_of_var(iptr->dst, REG_ITMP1);
768 if (iptr->val.l == 0) {
769 x86_64_alu_reg_reg(X86_64_XOR, d, d);
771 x86_64_mov_imm_reg(iptr->val.l, d);
773 store_reg_to_var_int(iptr->dst, d);
776 case ICMD_FCONST: /* ... ==> ..., constant */
777 /* op1 = 0, val.f = constant */
779 d = reg_of_var(iptr->dst, REG_FTMP1);
780 a = dseg_addfloat(iptr->val.f);
781 x86_64_movdl_membase_reg(RIP, -(((s8) mcodeptr + ((d > 7) ? 9 : 8)) - (s8) mcodebase) + a, d);
782 store_reg_to_var_flt(iptr->dst, d);
785 case ICMD_DCONST: /* ... ==> ..., constant */
786 /* op1 = 0, val.d = constant */
788 d = reg_of_var(iptr->dst, REG_FTMP1);
789 a = dseg_adddouble(iptr->val.d);
790 x86_64_movd_membase_reg(RIP, -(((s8) mcodeptr + 9) - (s8) mcodebase) + a, d);
791 store_reg_to_var_flt(iptr->dst, d);
795 /* load/store operations **********************************************/
797 case ICMD_ILOAD: /* ... ==> ..., content of local variable */
798 /* op1 = local variable */
800 d = reg_of_var(iptr->dst, REG_ITMP1);
801 if ((iptr->dst->varkind == LOCALVAR) &&
802 (iptr->dst->varnum == iptr->op1)) {
805 var = &(locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
806 if (var->flags & INMEMORY) {
807 x86_64_movl_membase_reg(REG_SP, var->regoff * 8, d);
808 store_reg_to_var_int(iptr->dst, d);
811 if (iptr->dst->flags & INMEMORY) {
812 x86_64_mov_reg_membase(var->regoff, REG_SP, iptr->dst->regoff * 8);
815 M_INTMOVE(var->regoff, d);
820 case ICMD_LLOAD: /* ... ==> ..., content of local variable */
821 case ICMD_ALOAD: /* op1 = local variable */
823 d = reg_of_var(iptr->dst, REG_ITMP1);
824 if ((iptr->dst->varkind == LOCALVAR) &&
825 (iptr->dst->varnum == iptr->op1)) {
828 var = &(locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
829 if (var->flags & INMEMORY) {
830 x86_64_mov_membase_reg(REG_SP, var->regoff * 8, d);
831 store_reg_to_var_int(iptr->dst, d);
834 if (iptr->dst->flags & INMEMORY) {
835 x86_64_mov_reg_membase(var->regoff, REG_SP, iptr->dst->regoff * 8);
838 M_INTMOVE(var->regoff, d);
843 case ICMD_FLOAD: /* ... ==> ..., content of local variable */
844 case ICMD_DLOAD: /* op1 = local variable */
846 d = reg_of_var(iptr->dst, REG_FTMP1);
847 if ((iptr->dst->varkind == LOCALVAR) &&
848 (iptr->dst->varnum == iptr->op1)) {
851 var = &(locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
852 if (var->flags & INMEMORY) {
853 x86_64_movq_membase_reg(REG_SP, var->regoff * 8, d);
854 store_reg_to_var_flt(iptr->dst, d);
857 if (iptr->dst->flags & INMEMORY) {
858 x86_64_movq_reg_membase(var->regoff, REG_SP, iptr->dst->regoff * 8);
861 M_FLTMOVE(var->regoff, d);
866 case ICMD_ISTORE: /* ..., value ==> ... */
867 case ICMD_LSTORE: /* op1 = local variable */
870 if ((src->varkind == LOCALVAR) &&
871 (src->varnum == iptr->op1)) {
874 var = &(locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
875 if (var->flags & INMEMORY) {
876 var_to_reg_int(s1, src, REG_ITMP1);
877 x86_64_mov_reg_membase(s1, REG_SP, var->regoff * 8);
880 var_to_reg_int(s1, src, var->regoff);
881 M_INTMOVE(s1, var->regoff);
885 case ICMD_FSTORE: /* ..., value ==> ... */
886 case ICMD_DSTORE: /* op1 = local variable */
888 if ((src->varkind == LOCALVAR) &&
889 (src->varnum == iptr->op1)) {
892 var = &(locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
893 if (var->flags & INMEMORY) {
894 var_to_reg_flt(s1, src, REG_FTMP1);
895 x86_64_movq_reg_membase(s1, REG_SP, var->regoff * 8);
898 var_to_reg_flt(s1, src, var->regoff);
899 M_FLTMOVE(s1, var->regoff);
904 /* pop/dup/swap operations ********************************************/
906 /* attention: double and longs are only one entry in CACAO ICMDs */
908 case ICMD_POP: /* ..., value ==> ... */
909 case ICMD_POP2: /* ..., value, value ==> ... */
912 #define M_COPY(from,to) \
913 d = reg_of_var(to, REG_ITMP1); \
914 if ((from->regoff != to->regoff) || \
915 ((from->flags ^ to->flags) & INMEMORY)) { \
916 if (IS_FLT_DBL_TYPE(from->type)) { \
917 var_to_reg_flt(s1, from, d); \
919 store_reg_to_var_flt(to, d); \
921 var_to_reg_int(s1, from, d); \
923 store_reg_to_var_int(to, d); \
927 case ICMD_DUP: /* ..., a ==> ..., a, a */
928 M_COPY(src, iptr->dst);
931 case ICMD_DUP_X1: /* ..., a, b ==> ..., b, a, b */
933 M_COPY(src, iptr->dst->prev->prev);
935 case ICMD_DUP2: /* ..., a, b ==> ..., a, b, a, b */
937 M_COPY(src, iptr->dst);
938 M_COPY(src->prev, iptr->dst->prev);
941 case ICMD_DUP2_X1: /* ..., a, b, c ==> ..., b, c, a, b, c */
943 M_COPY(src->prev, iptr->dst->prev->prev->prev);
945 case ICMD_DUP_X2: /* ..., a, b, c ==> ..., c, a, b, c */
947 M_COPY(src, iptr->dst);
948 M_COPY(src->prev, iptr->dst->prev);
949 M_COPY(src->prev->prev, iptr->dst->prev->prev);
950 M_COPY(src, iptr->dst->prev->prev->prev);
953 case ICMD_DUP2_X2: /* ..., a, b, c, d ==> ..., c, d, a, b, c, d */
955 M_COPY(src, iptr->dst);
956 M_COPY(src->prev, iptr->dst->prev);
957 M_COPY(src->prev->prev, iptr->dst->prev->prev);
958 M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
959 M_COPY(src, iptr->dst->prev->prev->prev->prev);
960 M_COPY(src->prev, iptr->dst->prev->prev->prev->prev->prev);
963 case ICMD_SWAP: /* ..., a, b ==> ..., b, a */
965 M_COPY(src, iptr->dst->prev);
966 M_COPY(src->prev, iptr->dst);
970 /* integer operations *************************************************/
972 case ICMD_INEG: /* ..., value ==> ..., - value */
974 d = reg_of_var(iptr->dst, REG_NULL);
975 if (iptr->dst->flags & INMEMORY) {
976 if (src->flags & INMEMORY) {
977 if (src->regoff == iptr->dst->regoff) {
978 x86_64_negl_membase(REG_SP, iptr->dst->regoff * 8);
981 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
982 x86_64_negl_reg(REG_ITMP1);
983 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
987 x86_64_movl_reg_membase(src->regoff, REG_SP, iptr->dst->regoff * 8);
988 x86_64_negl_membase(REG_SP, iptr->dst->regoff * 8);
992 if (src->flags & INMEMORY) {
993 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
997 M_INTMOVE(src->regoff, iptr->dst->regoff);
998 x86_64_negl_reg(iptr->dst->regoff);
1003 case ICMD_LNEG: /* ..., value ==> ..., - value */
1005 d = reg_of_var(iptr->dst, REG_NULL);
1006 if (iptr->dst->flags & INMEMORY) {
1007 if (src->flags & INMEMORY) {
1008 if (src->regoff == iptr->dst->regoff) {
1009 x86_64_neg_membase(REG_SP, iptr->dst->regoff * 8);
1012 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1013 x86_64_neg_reg(REG_ITMP1);
1014 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1018 x86_64_mov_reg_membase(src->regoff, REG_SP, iptr->dst->regoff * 8);
1019 x86_64_neg_membase(REG_SP, iptr->dst->regoff * 8);
1023 if (src->flags & INMEMORY) {
1024 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1025 x86_64_neg_reg(iptr->dst->regoff);
1028 M_INTMOVE(src->regoff, iptr->dst->regoff);
1029 x86_64_neg_reg(iptr->dst->regoff);
1034 case ICMD_I2L: /* ..., value ==> ..., value */
1036 d = reg_of_var(iptr->dst, REG_ITMP3);
1037 if (src->flags & INMEMORY) {
1038 x86_64_movslq_membase_reg(REG_SP, src->regoff * 8, d);
1041 x86_64_movslq_reg_reg(src->regoff, d);
1043 store_reg_to_var_int(iptr->dst, d);
1046 case ICMD_L2I: /* ..., value ==> ..., value */
1048 var_to_reg_int(s1, src, REG_ITMP1);
1049 d = reg_of_var(iptr->dst, REG_ITMP3);
1051 store_reg_to_var_int(iptr->dst, d);
1054 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
1056 d = reg_of_var(iptr->dst, REG_ITMP3);
1057 if (src->flags & INMEMORY) {
1058 x86_64_movsbq_membase_reg(REG_SP, src->regoff * 8, d);
1061 x86_64_movsbq_reg_reg(src->regoff, d);
1063 store_reg_to_var_int(iptr->dst, d);
1066 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
1068 d = reg_of_var(iptr->dst, REG_ITMP3);
1069 if (src->flags & INMEMORY) {
1070 x86_64_movzwq_membase_reg(REG_SP, src->regoff * 8, d);
1073 x86_64_movzwq_reg_reg(src->regoff, d);
1075 store_reg_to_var_int(iptr->dst, d);
1078 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
1080 d = reg_of_var(iptr->dst, REG_ITMP3);
1081 if (src->flags & INMEMORY) {
1082 x86_64_movswq_membase_reg(REG_SP, src->regoff * 8, d);
1085 x86_64_movswq_reg_reg(src->regoff, d);
1087 store_reg_to_var_int(iptr->dst, d);
1091 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1093 d = reg_of_var(iptr->dst, REG_NULL);
1094 x86_64_emit_ialu(X86_64_ADD, src, iptr);
1097 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
1098 /* val.i = constant */
1100 d = reg_of_var(iptr->dst, REG_NULL);
1101 x86_64_emit_ialuconst(X86_64_ADD, src, iptr);
1104 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1106 d = reg_of_var(iptr->dst, REG_NULL);
1107 x86_64_emit_lalu(X86_64_ADD, src, iptr);
1110 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
1111 /* val.l = constant */
1113 d = reg_of_var(iptr->dst, REG_NULL);
1114 x86_64_emit_laluconst(X86_64_ADD, src, iptr);
1117 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1119 d = reg_of_var(iptr->dst, REG_NULL);
1120 if (iptr->dst->flags & INMEMORY) {
1121 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1122 if (src->prev->regoff == iptr->dst->regoff) {
1123 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1124 x86_64_alul_reg_membase(X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1127 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1128 x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1129 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1132 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1133 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1134 x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1135 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1137 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1138 if (src->prev->regoff == iptr->dst->regoff) {
1139 x86_64_alul_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1142 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1143 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1144 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1148 x86_64_movl_reg_membase(src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1149 x86_64_alul_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1153 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1154 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, d);
1155 x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1157 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1158 M_INTMOVE(src->prev->regoff, d);
1159 x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1161 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1162 /* workaround for reg alloc */
1163 if (src->regoff == iptr->dst->regoff) {
1164 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1165 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1166 M_INTMOVE(REG_ITMP1, d);
1169 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, d);
1170 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, d);
1174 /* workaround for reg alloc */
1175 if (src->regoff == iptr->dst->regoff) {
1176 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1177 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1178 M_INTMOVE(REG_ITMP1, d);
1181 M_INTMOVE(src->prev->regoff, d);
1182 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, d);
1188 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
1189 /* val.i = constant */
1191 d = reg_of_var(iptr->dst, REG_NULL);
1192 x86_64_emit_ialuconst(X86_64_SUB, src, iptr);
1195 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1197 d = reg_of_var(iptr->dst, REG_NULL);
1198 if (iptr->dst->flags & INMEMORY) {
1199 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1200 if (src->prev->regoff == iptr->dst->regoff) {
1201 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1202 x86_64_alu_reg_membase(X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1205 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1206 x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1207 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1210 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1211 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1212 x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1213 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1215 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1216 if (src->prev->regoff == iptr->dst->regoff) {
1217 x86_64_alu_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1220 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1221 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1222 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1226 x86_64_mov_reg_membase(src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1227 x86_64_alu_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1231 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1232 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, d);
1233 x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1235 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1236 M_INTMOVE(src->prev->regoff, d);
1237 x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1239 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1240 /* workaround for reg alloc */
1241 if (src->regoff == iptr->dst->regoff) {
1242 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1243 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1244 M_INTMOVE(REG_ITMP1, d);
1247 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, d);
1248 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, d);
1252 /* workaround for reg alloc */
1253 if (src->regoff == iptr->dst->regoff) {
1254 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1255 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1256 M_INTMOVE(REG_ITMP1, d);
1259 M_INTMOVE(src->prev->regoff, d);
1260 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, d);
1266 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
1267 /* val.l = constant */
1269 d = reg_of_var(iptr->dst, REG_NULL);
1270 x86_64_emit_laluconst(X86_64_SUB, src, iptr);
1273 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1275 d = reg_of_var(iptr->dst, REG_NULL);
1276 if (iptr->dst->flags & INMEMORY) {
1277 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1278 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1279 x86_64_imull_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1280 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1282 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1283 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1284 x86_64_imull_reg_reg(src->prev->regoff, REG_ITMP1);
1285 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1287 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1288 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1289 x86_64_imull_reg_reg(src->regoff, REG_ITMP1);
1290 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1293 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1294 x86_64_imull_reg_reg(src->regoff, REG_ITMP1);
1295 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1299 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1300 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1301 x86_64_imull_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1303 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1304 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1305 x86_64_imull_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1307 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1308 M_INTMOVE(src->regoff, iptr->dst->regoff);
1309 x86_64_imull_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1312 if (src->regoff == iptr->dst->regoff) {
1313 x86_64_imull_reg_reg(src->prev->regoff, iptr->dst->regoff);
1316 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1317 x86_64_imull_reg_reg(src->regoff, iptr->dst->regoff);
1323 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
1324 /* val.i = constant */
1326 d = reg_of_var(iptr->dst, REG_NULL);
1327 if (iptr->dst->flags & INMEMORY) {
1328 if (src->flags & INMEMORY) {
1329 x86_64_imull_imm_membase_reg(iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1330 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1333 x86_64_imull_imm_reg_reg(iptr->val.i, src->regoff, REG_ITMP1);
1334 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1338 if (src->flags & INMEMORY) {
1339 x86_64_imull_imm_membase_reg(iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1342 if (iptr->val.i == 2) {
1343 M_INTMOVE(src->regoff, iptr->dst->regoff);
1344 x86_64_alul_reg_reg(X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1347 x86_64_imull_imm_reg_reg(iptr->val.i, src->regoff, iptr->dst->regoff); /* 3 cycles */
1353 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1355 d = reg_of_var(iptr->dst, REG_NULL);
1356 if (iptr->dst->flags & INMEMORY) {
1357 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1358 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1359 x86_64_imul_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1360 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1362 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1363 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1364 x86_64_imul_reg_reg(src->prev->regoff, REG_ITMP1);
1365 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1367 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1368 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1369 x86_64_imul_reg_reg(src->regoff, REG_ITMP1);
1370 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1373 x86_64_mov_reg_reg(src->prev->regoff, REG_ITMP1);
1374 x86_64_imul_reg_reg(src->regoff, REG_ITMP1);
1375 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1379 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1380 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1381 x86_64_imul_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1383 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1384 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1385 x86_64_imul_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1387 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1388 M_INTMOVE(src->regoff, iptr->dst->regoff);
1389 x86_64_imul_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1392 if (src->regoff == iptr->dst->regoff) {
1393 x86_64_imul_reg_reg(src->prev->regoff, iptr->dst->regoff);
1396 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1397 x86_64_imul_reg_reg(src->regoff, iptr->dst->regoff);
1403 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
1404 /* val.l = constant */
1406 d = reg_of_var(iptr->dst, REG_NULL);
1407 if (iptr->dst->flags & INMEMORY) {
1408 if (src->flags & INMEMORY) {
1409 if (x86_64_is_imm32(iptr->val.l)) {
1410 x86_64_imul_imm_membase_reg(iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1413 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
1414 x86_64_imul_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1416 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1419 if (x86_64_is_imm32(iptr->val.l)) {
1420 x86_64_imul_imm_reg_reg(iptr->val.l, src->regoff, REG_ITMP1);
1423 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
1424 x86_64_imul_reg_reg(src->regoff, REG_ITMP1);
1426 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1430 if (src->flags & INMEMORY) {
1431 if (x86_64_is_imm32(iptr->val.l)) {
1432 x86_64_imul_imm_membase_reg(iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1435 x86_64_mov_imm_reg(iptr->val.l, iptr->dst->regoff);
1436 x86_64_imul_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1440 /* should match in many cases */
1441 if (iptr->val.l == 2) {
1442 M_INTMOVE(src->regoff, iptr->dst->regoff);
1443 x86_64_alul_reg_reg(X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1446 if (x86_64_is_imm32(iptr->val.l)) {
1447 x86_64_imul_imm_reg_reg(iptr->val.l, src->regoff, iptr->dst->regoff); /* 4 cycles */
1450 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
1451 M_INTMOVE(src->regoff, iptr->dst->regoff);
1452 x86_64_imul_reg_reg(REG_ITMP1, iptr->dst->regoff);
1459 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1461 d = reg_of_var(iptr->dst, REG_NULL);
1462 if (src->prev->flags & INMEMORY) {
1463 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, RAX);
1466 M_INTMOVE(src->prev->regoff, RAX);
1469 if (src->flags & INMEMORY) {
1470 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1473 M_INTMOVE(src->regoff, REG_ITMP3);
1476 x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1477 x86_64_jcc(X86_64_CC_NE, 4 + 6);
1478 x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1479 x86_64_jcc(X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1481 x86_64_mov_reg_reg(RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1483 x86_64_idivl_reg(REG_ITMP3);
1485 if (iptr->dst->flags & INMEMORY) {
1486 x86_64_mov_reg_membase(RAX, REG_SP, iptr->dst->regoff * 8);
1487 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1490 M_INTMOVE(RAX, iptr->dst->regoff);
1492 if (iptr->dst->regoff != RDX) {
1493 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1498 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1500 d = reg_of_var(iptr->dst, REG_NULL);
1501 if (src->prev->flags & INMEMORY) {
1502 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, RAX);
1505 M_INTMOVE(src->prev->regoff, RAX);
1508 if (src->flags & INMEMORY) {
1509 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1512 M_INTMOVE(src->regoff, REG_ITMP3);
1515 x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1516 x86_64_jcc(X86_64_CC_NE, 2 + 4 + 6);
1517 x86_64_alul_reg_reg(X86_64_XOR, RDX, RDX); /* 2 bytes */
1518 x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1519 x86_64_jcc(X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1521 x86_64_mov_reg_reg(RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1523 x86_64_idivl_reg(REG_ITMP3);
1525 if (iptr->dst->flags & INMEMORY) {
1526 x86_64_mov_reg_membase(RDX, REG_SP, iptr->dst->regoff * 8);
1527 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1530 M_INTMOVE(RDX, iptr->dst->regoff);
1532 if (iptr->dst->regoff != RDX) {
1533 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1538 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
1539 /* val.i = constant */
1541 var_to_reg_int(s1, src, REG_ITMP1);
1542 d = reg_of_var(iptr->dst, REG_ITMP3);
1543 M_INTMOVE(s1, REG_ITMP1);
1544 x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1545 x86_64_leal_membase_reg(REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1546 x86_64_cmovccl_reg_reg(X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1547 x86_64_shiftl_imm_reg(X86_64_SAR, iptr->val.i, REG_ITMP1);
1548 x86_64_mov_reg_reg(REG_ITMP1, d);
1549 store_reg_to_var_int(iptr->dst, d);
1552 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
1553 /* val.i = constant */
1555 var_to_reg_int(s1, src, REG_ITMP1);
1556 d = reg_of_var(iptr->dst, REG_ITMP3);
1557 M_INTMOVE(s1, REG_ITMP1);
1558 x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1559 x86_64_leal_membase_reg(REG_ITMP1, iptr->val.i, REG_ITMP2);
1560 x86_64_cmovccl_reg_reg(X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1561 x86_64_alul_imm_reg(X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1562 x86_64_alul_reg_reg(X86_64_SUB, REG_ITMP2, REG_ITMP1);
1563 x86_64_mov_reg_reg(REG_ITMP1, d);
1564 store_reg_to_var_int(iptr->dst, d);
1568 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1570 d = reg_of_var(iptr->dst, REG_NULL);
1571 if (src->prev->flags & INMEMORY) {
1572 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1575 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1578 if (src->flags & INMEMORY) {
1579 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1582 M_INTMOVE(src->regoff, REG_ITMP3);
1585 x86_64_mov_imm_reg(0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1586 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
1587 x86_64_jcc(X86_64_CC_NE, 4 + 6);
1588 x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1589 x86_64_jcc(X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1591 x86_64_mov_reg_reg(RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1593 x86_64_idiv_reg(REG_ITMP3);
1595 if (iptr->dst->flags & INMEMORY) {
1596 x86_64_mov_reg_membase(RAX, REG_SP, iptr->dst->regoff * 8);
1597 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1600 M_INTMOVE(RAX, iptr->dst->regoff);
1602 if (iptr->dst->regoff != RDX) {
1603 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1608 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1610 d = reg_of_var(iptr->dst, REG_NULL);
1611 if (src->prev->flags & INMEMORY) {
1612 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1615 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1618 if (src->flags & INMEMORY) {
1619 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1622 M_INTMOVE(src->regoff, REG_ITMP3);
1625 x86_64_mov_imm_reg(0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1626 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
1627 x86_64_jcc(X86_64_CC_NE, 2 + 4 + 6);
1628 x86_64_alul_reg_reg(X86_64_XOR, RDX, RDX); /* 2 bytes */
1629 x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1630 x86_64_jcc(X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1632 x86_64_mov_reg_reg(RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1634 x86_64_idiv_reg(REG_ITMP3);
1636 if (iptr->dst->flags & INMEMORY) {
1637 x86_64_mov_reg_membase(RDX, REG_SP, iptr->dst->regoff * 8);
1638 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1641 M_INTMOVE(RDX, iptr->dst->regoff);
1643 if (iptr->dst->regoff != RDX) {
1644 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1649 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1650 /* val.i = constant */
1652 var_to_reg_int(s1, src, REG_ITMP1);
1653 d = reg_of_var(iptr->dst, REG_ITMP3);
1654 M_INTMOVE(s1, REG_ITMP1);
1655 x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1656 x86_64_lea_membase_reg(REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1657 x86_64_cmovcc_reg_reg(X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1658 x86_64_shift_imm_reg(X86_64_SAR, iptr->val.i, REG_ITMP1);
1659 x86_64_mov_reg_reg(REG_ITMP1, d);
1660 store_reg_to_var_int(iptr->dst, d);
1663 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1664 /* val.l = constant */
1666 var_to_reg_int(s1, src, REG_ITMP1);
1667 d = reg_of_var(iptr->dst, REG_ITMP3);
1668 M_INTMOVE(s1, REG_ITMP1);
1669 x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1670 x86_64_lea_membase_reg(REG_ITMP1, iptr->val.i, REG_ITMP2);
1671 x86_64_cmovcc_reg_reg(X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1672 x86_64_alu_imm_reg(X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1673 x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP2, REG_ITMP1);
1674 x86_64_mov_reg_reg(REG_ITMP1, d);
1675 store_reg_to_var_int(iptr->dst, d);
1678 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1680 d = reg_of_var(iptr->dst, REG_NULL);
1681 x86_64_emit_ishift(X86_64_SHL, src, iptr);
1684 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1685 /* val.i = constant */
1687 d = reg_of_var(iptr->dst, REG_NULL);
1688 x86_64_emit_ishiftconst(X86_64_SHL, src, iptr);
1691 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1693 d = reg_of_var(iptr->dst, REG_NULL);
1694 x86_64_emit_ishift(X86_64_SAR, src, iptr);
1697 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1698 /* val.i = constant */
1700 d = reg_of_var(iptr->dst, REG_NULL);
1701 x86_64_emit_ishiftconst(X86_64_SAR, src, iptr);
1704 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1706 d = reg_of_var(iptr->dst, REG_NULL);
1707 x86_64_emit_ishift(X86_64_SHR, src, iptr);
1710 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1711 /* val.i = constant */
1713 d = reg_of_var(iptr->dst, REG_NULL);
1714 x86_64_emit_ishiftconst(X86_64_SHR, src, iptr);
1717 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1719 d = reg_of_var(iptr->dst, REG_NULL);
1720 x86_64_emit_lshift(X86_64_SHL, src, iptr);
1723 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1724 /* val.i = constant */
1726 d = reg_of_var(iptr->dst, REG_NULL);
1727 x86_64_emit_lshiftconst(X86_64_SHL, src, iptr);
1730 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1732 d = reg_of_var(iptr->dst, REG_NULL);
1733 x86_64_emit_lshift(X86_64_SAR, src, iptr);
1736 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1737 /* val.i = constant */
1739 d = reg_of_var(iptr->dst, REG_NULL);
1740 x86_64_emit_lshiftconst(X86_64_SAR, src, iptr);
1743 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1745 d = reg_of_var(iptr->dst, REG_NULL);
1746 x86_64_emit_lshift(X86_64_SHR, src, iptr);
1749 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1750 /* val.l = constant */
1752 d = reg_of_var(iptr->dst, REG_NULL);
1753 x86_64_emit_lshiftconst(X86_64_SHR, src, iptr);
1756 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1758 d = reg_of_var(iptr->dst, REG_NULL);
1759 x86_64_emit_ialu(X86_64_AND, src, iptr);
1762 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1763 /* val.i = constant */
1765 d = reg_of_var(iptr->dst, REG_NULL);
1766 x86_64_emit_ialuconst(X86_64_AND, src, iptr);
1769 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1771 d = reg_of_var(iptr->dst, REG_NULL);
1772 x86_64_emit_lalu(X86_64_AND, src, iptr);
1775 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1776 /* val.l = constant */
1778 d = reg_of_var(iptr->dst, REG_NULL);
1779 x86_64_emit_laluconst(X86_64_AND, src, iptr);
1782 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1784 d = reg_of_var(iptr->dst, REG_NULL);
1785 x86_64_emit_ialu(X86_64_OR, src, iptr);
1788 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1789 /* val.i = constant */
1791 d = reg_of_var(iptr->dst, REG_NULL);
1792 x86_64_emit_ialuconst(X86_64_OR, src, iptr);
1795 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1797 d = reg_of_var(iptr->dst, REG_NULL);
1798 x86_64_emit_lalu(X86_64_OR, src, iptr);
1801 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1802 /* val.l = constant */
1804 d = reg_of_var(iptr->dst, REG_NULL);
1805 x86_64_emit_laluconst(X86_64_OR, src, iptr);
1808 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1810 d = reg_of_var(iptr->dst, REG_NULL);
1811 x86_64_emit_ialu(X86_64_XOR, src, iptr);
1814 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1815 /* val.i = constant */
1817 d = reg_of_var(iptr->dst, REG_NULL);
1818 x86_64_emit_ialuconst(X86_64_XOR, src, iptr);
1821 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1823 d = reg_of_var(iptr->dst, REG_NULL);
1824 x86_64_emit_lalu(X86_64_XOR, src, iptr);
1827 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1828 /* val.l = constant */
1830 d = reg_of_var(iptr->dst, REG_NULL);
1831 x86_64_emit_laluconst(X86_64_XOR, src, iptr);
1835 case ICMD_IINC: /* ..., value ==> ..., value + constant */
1836 /* op1 = variable, val.i = constant */
1838 var = &(locals[iptr->op1][TYPE_INT]);
1840 if (var->flags & INMEMORY) {
1841 if (iptr->val.i == 1) {
1842 x86_64_incl_membase(REG_SP, d * 8);
1844 } else if (iptr->val.i == -1) {
1845 x86_64_decl_membase(REG_SP, d * 8);
1848 x86_64_alul_imm_membase(X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1852 if (iptr->val.i == 1) {
1855 } else if (iptr->val.i == -1) {
1859 x86_64_alul_imm_reg(X86_64_ADD, iptr->val.i, d);
1865 /* floating operations ************************************************/
1867 case ICMD_FNEG: /* ..., value ==> ..., - value */
1869 var_to_reg_flt(s1, src, REG_FTMP1);
1870 d = reg_of_var(iptr->dst, REG_FTMP3);
1871 a = dseg_adds4(0x80000000);
1873 x86_64_movss_membase_reg(RIP, -(((s8) mcodeptr + 9) - (s8) mcodebase) + a, REG_FTMP2);
1874 x86_64_xorps_reg_reg(REG_FTMP2, d);
1875 store_reg_to_var_flt(iptr->dst, d);
1878 case ICMD_DNEG: /* ..., value ==> ..., - value */
1880 var_to_reg_flt(s1, src, REG_FTMP1);
1881 d = reg_of_var(iptr->dst, REG_FTMP3);
1882 a = dseg_adds8(0x8000000000000000);
1884 x86_64_movd_membase_reg(RIP, -(((s8) mcodeptr + 9) - (s8) mcodebase) + a, REG_FTMP2);
1885 x86_64_xorpd_reg_reg(REG_FTMP2, d);
1886 store_reg_to_var_flt(iptr->dst, d);
1889 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1891 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1892 var_to_reg_flt(s2, src, REG_FTMP2);
1893 d = reg_of_var(iptr->dst, REG_FTMP3);
1895 x86_64_addss_reg_reg(s2, d);
1896 } else if (s2 == d) {
1897 x86_64_addss_reg_reg(s1, d);
1900 x86_64_addss_reg_reg(s2, d);
1902 store_reg_to_var_flt(iptr->dst, d);
1905 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1907 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1908 var_to_reg_flt(s2, src, REG_FTMP2);
1909 d = reg_of_var(iptr->dst, REG_FTMP3);
1911 x86_64_addsd_reg_reg(s2, d);
1912 } else if (s2 == d) {
1913 x86_64_addsd_reg_reg(s1, d);
1916 x86_64_addsd_reg_reg(s2, d);
1918 store_reg_to_var_flt(iptr->dst, d);
1921 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1923 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1924 var_to_reg_flt(s2, src, REG_FTMP2);
1925 d = reg_of_var(iptr->dst, REG_FTMP3);
1927 M_FLTMOVE(s2, REG_FTMP2);
1931 x86_64_subss_reg_reg(s2, d);
1932 store_reg_to_var_flt(iptr->dst, d);
1935 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1937 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1938 var_to_reg_flt(s2, src, REG_FTMP2);
1939 d = reg_of_var(iptr->dst, REG_FTMP3);
1941 M_FLTMOVE(s2, REG_FTMP2);
1945 x86_64_subsd_reg_reg(s2, d);
1946 store_reg_to_var_flt(iptr->dst, d);
1949 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1951 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1952 var_to_reg_flt(s2, src, REG_FTMP2);
1953 d = reg_of_var(iptr->dst, REG_FTMP3);
1955 x86_64_mulss_reg_reg(s2, d);
1956 } else if (s2 == d) {
1957 x86_64_mulss_reg_reg(s1, d);
1960 x86_64_mulss_reg_reg(s2, d);
1962 store_reg_to_var_flt(iptr->dst, d);
1965 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1967 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1968 var_to_reg_flt(s2, src, REG_FTMP2);
1969 d = reg_of_var(iptr->dst, REG_FTMP3);
1971 x86_64_mulsd_reg_reg(s2, d);
1972 } else if (s2 == d) {
1973 x86_64_mulsd_reg_reg(s1, d);
1976 x86_64_mulsd_reg_reg(s2, d);
1978 store_reg_to_var_flt(iptr->dst, d);
1981 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1983 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1984 var_to_reg_flt(s2, src, REG_FTMP2);
1985 d = reg_of_var(iptr->dst, REG_FTMP3);
1987 M_FLTMOVE(s2, REG_FTMP2);
1991 x86_64_divss_reg_reg(s2, d);
1992 store_reg_to_var_flt(iptr->dst, d);
1995 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1997 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1998 var_to_reg_flt(s2, src, REG_FTMP2);
1999 d = reg_of_var(iptr->dst, REG_FTMP3);
2001 M_FLTMOVE(s2, REG_FTMP2);
2005 x86_64_divsd_reg_reg(s2, d);
2006 store_reg_to_var_flt(iptr->dst, d);
2009 case ICMD_I2F: /* ..., value ==> ..., (float) value */
2011 var_to_reg_int(s1, src, REG_ITMP1);
2012 d = reg_of_var(iptr->dst, REG_FTMP1);
2013 x86_64_cvtsi2ss_reg_reg(s1, d);
2014 store_reg_to_var_flt(iptr->dst, d);
2017 case ICMD_I2D: /* ..., value ==> ..., (double) value */
2019 var_to_reg_int(s1, src, REG_ITMP1);
2020 d = reg_of_var(iptr->dst, REG_FTMP1);
2021 x86_64_cvtsi2sd_reg_reg(s1, d);
2022 store_reg_to_var_flt(iptr->dst, d);
2025 case ICMD_L2F: /* ..., value ==> ..., (float) value */
2027 var_to_reg_int(s1, src, REG_ITMP1);
2028 d = reg_of_var(iptr->dst, REG_FTMP1);
2029 x86_64_cvtsi2ssq_reg_reg(s1, d);
2030 store_reg_to_var_flt(iptr->dst, d);
2033 case ICMD_L2D: /* ..., value ==> ..., (double) value */
2035 var_to_reg_int(s1, src, REG_ITMP1);
2036 d = reg_of_var(iptr->dst, REG_FTMP1);
2037 x86_64_cvtsi2sdq_reg_reg(s1, d);
2038 store_reg_to_var_flt(iptr->dst, d);
2041 case ICMD_F2I: /* ..., value ==> ..., (int) value */
2043 var_to_reg_flt(s1, src, REG_FTMP1);
2044 d = reg_of_var(iptr->dst, REG_ITMP1);
2045 x86_64_cvttss2si_reg_reg(s1, d);
2046 x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, d); /* corner cases */
2047 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
2048 x86_64_jcc(X86_64_CC_NE, a);
2049 M_FLTMOVE(s1, REG_FTMP1);
2050 x86_64_mov_imm_reg((s8) asm_builtin_f2i, REG_ITMP2);
2051 x86_64_call_reg(REG_ITMP2);
2052 M_INTMOVE(REG_RESULT, d);
2053 store_reg_to_var_int(iptr->dst, d);
2056 case ICMD_D2I: /* ..., value ==> ..., (int) value */
2058 var_to_reg_flt(s1, src, REG_FTMP1);
2059 d = reg_of_var(iptr->dst, REG_ITMP1);
2060 x86_64_cvttsd2si_reg_reg(s1, d);
2061 x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, d); /* corner cases */
2062 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
2063 x86_64_jcc(X86_64_CC_NE, a);
2064 M_FLTMOVE(s1, REG_FTMP1);
2065 x86_64_mov_imm_reg((s8) asm_builtin_d2i, REG_ITMP2);
2066 x86_64_call_reg(REG_ITMP2);
2067 M_INTMOVE(REG_RESULT, d);
2068 store_reg_to_var_int(iptr->dst, d);
2071 case ICMD_F2L: /* ..., value ==> ..., (long) value */
2073 var_to_reg_flt(s1, src, REG_FTMP1);
2074 d = reg_of_var(iptr->dst, REG_ITMP1);
2075 x86_64_cvttss2siq_reg_reg(s1, d);
2076 x86_64_mov_imm_reg(0x8000000000000000, REG_ITMP2);
2077 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, d); /* corner cases */
2078 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
2079 x86_64_jcc(X86_64_CC_NE, a);
2080 M_FLTMOVE(s1, REG_FTMP1);
2081 x86_64_mov_imm_reg((s8) asm_builtin_f2l, REG_ITMP2);
2082 x86_64_call_reg(REG_ITMP2);
2083 M_INTMOVE(REG_RESULT, d);
2084 store_reg_to_var_int(iptr->dst, d);
2087 case ICMD_D2L: /* ..., value ==> ..., (long) value */
2089 var_to_reg_flt(s1, src, REG_FTMP1);
2090 d = reg_of_var(iptr->dst, REG_ITMP1);
2091 x86_64_cvttsd2siq_reg_reg(s1, d);
2092 x86_64_mov_imm_reg(0x8000000000000000, REG_ITMP2);
2093 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, d); /* corner cases */
2094 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
2095 x86_64_jcc(X86_64_CC_NE, a);
2096 M_FLTMOVE(s1, REG_FTMP1);
2097 x86_64_mov_imm_reg((s8) asm_builtin_d2l, REG_ITMP2);
2098 x86_64_call_reg(REG_ITMP2);
2099 M_INTMOVE(REG_RESULT, d);
2100 store_reg_to_var_int(iptr->dst, d);
2103 case ICMD_F2D: /* ..., value ==> ..., (double) value */
2105 var_to_reg_flt(s1, src, REG_FTMP1);
2106 d = reg_of_var(iptr->dst, REG_FTMP3);
2107 x86_64_cvtss2sd_reg_reg(s1, d);
2108 store_reg_to_var_flt(iptr->dst, d);
2111 case ICMD_D2F: /* ..., value ==> ..., (float) value */
2113 var_to_reg_flt(s1, src, REG_FTMP1);
2114 d = reg_of_var(iptr->dst, REG_FTMP3);
2115 x86_64_cvtsd2ss_reg_reg(s1, d);
2116 store_reg_to_var_flt(iptr->dst, d);
2119 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
2120 /* == => 0, < => 1, > => -1 */
2122 var_to_reg_flt(s1, src->prev, REG_FTMP1);
2123 var_to_reg_flt(s2, src, REG_FTMP2);
2124 d = reg_of_var(iptr->dst, REG_ITMP3);
2125 x86_64_alu_reg_reg(X86_64_XOR, d, d);
2126 x86_64_mov_imm_reg(1, REG_ITMP1);
2127 x86_64_mov_imm_reg(-1, REG_ITMP2);
2128 x86_64_ucomiss_reg_reg(s1, s2);
2129 x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
2130 x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
2131 x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
2132 store_reg_to_var_int(iptr->dst, d);
2135 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
2136 /* == => 0, < => 1, > => -1 */
2138 var_to_reg_flt(s1, src->prev, REG_FTMP1);
2139 var_to_reg_flt(s2, src, REG_FTMP2);
2140 d = reg_of_var(iptr->dst, REG_ITMP3);
2141 x86_64_alu_reg_reg(X86_64_XOR, d, d);
2142 x86_64_mov_imm_reg(1, REG_ITMP1);
2143 x86_64_mov_imm_reg(-1, REG_ITMP2);
2144 x86_64_ucomiss_reg_reg(s1, s2);
2145 x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
2146 x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
2147 x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
2148 store_reg_to_var_int(iptr->dst, d);
2151 case ICMD_DCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
2152 /* == => 0, < => 1, > => -1 */
2154 var_to_reg_flt(s1, src->prev, REG_FTMP1);
2155 var_to_reg_flt(s2, src, REG_FTMP2);
2156 d = reg_of_var(iptr->dst, REG_ITMP3);
2157 x86_64_alu_reg_reg(X86_64_XOR, d, d);
2158 x86_64_mov_imm_reg(1, REG_ITMP1);
2159 x86_64_mov_imm_reg(-1, REG_ITMP2);
2160 x86_64_ucomisd_reg_reg(s1, s2);
2161 x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
2162 x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
2163 x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
2164 store_reg_to_var_int(iptr->dst, d);
2167 case ICMD_DCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
2168 /* == => 0, < => 1, > => -1 */
2170 var_to_reg_flt(s1, src->prev, REG_FTMP1);
2171 var_to_reg_flt(s2, src, REG_FTMP2);
2172 d = reg_of_var(iptr->dst, REG_ITMP3);
2173 x86_64_alu_reg_reg(X86_64_XOR, d, d);
2174 x86_64_mov_imm_reg(1, REG_ITMP1);
2175 x86_64_mov_imm_reg(-1, REG_ITMP2);
2176 x86_64_ucomisd_reg_reg(s1, s2);
2177 x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
2178 x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
2179 x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
2180 store_reg_to_var_int(iptr->dst, d);
2184 /* memory operations **************************************************/
2186 #define gen_bound_check \
2187 if (checkbounds) { \
2188 x86_64_alul_membase_reg(X86_64_CMP, s1, OFFSET(java_arrayheader, size), s2); \
2189 x86_64_jcc(X86_64_CC_AE, 0); \
2190 codegen_addxboundrefs(mcodeptr); \
2193 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., (int) length */
2195 var_to_reg_int(s1, src, REG_ITMP1);
2196 d = reg_of_var(iptr->dst, REG_ITMP3);
2197 gen_nullptr_check(s1);
2198 x86_64_movl_membase_reg(s1, OFFSET(java_arrayheader, size), d);
2199 store_reg_to_var_int(iptr->dst, d);
2202 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2204 var_to_reg_int(s1, src->prev, REG_ITMP1);
2205 var_to_reg_int(s2, src, REG_ITMP2);
2206 d = reg_of_var(iptr->dst, REG_ITMP3);
2207 if (iptr->op1 == 0) {
2208 gen_nullptr_check(s1);
2211 x86_64_mov_memindex_reg(OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
2212 store_reg_to_var_int(iptr->dst, d);
2215 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
2217 var_to_reg_int(s1, src->prev, REG_ITMP1);
2218 var_to_reg_int(s2, src, REG_ITMP2);
2219 d = reg_of_var(iptr->dst, REG_ITMP3);
2220 if (iptr->op1 == 0) {
2221 gen_nullptr_check(s1);
2224 x86_64_mov_memindex_reg(OFFSET(java_longarray, data[0]), s1, s2, 3, d);
2225 store_reg_to_var_int(iptr->dst, d);
2228 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
2230 var_to_reg_int(s1, src->prev, REG_ITMP1);
2231 var_to_reg_int(s2, src, REG_ITMP2);
2232 d = reg_of_var(iptr->dst, REG_ITMP3);
2233 if (iptr->op1 == 0) {
2234 gen_nullptr_check(s1);
2237 x86_64_movl_memindex_reg(OFFSET(java_intarray, data[0]), s1, s2, 2, d);
2238 store_reg_to_var_int(iptr->dst, d);
2241 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2243 var_to_reg_int(s1, src->prev, REG_ITMP1);
2244 var_to_reg_int(s2, src, REG_ITMP2);
2245 d = reg_of_var(iptr->dst, REG_FTMP3);
2246 if (iptr->op1 == 0) {
2247 gen_nullptr_check(s1);
2250 x86_64_movss_memindex_reg(OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2251 store_reg_to_var_flt(iptr->dst, d);
2254 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2256 var_to_reg_int(s1, src->prev, REG_ITMP1);
2257 var_to_reg_int(s2, src, REG_ITMP2);
2258 d = reg_of_var(iptr->dst, REG_FTMP3);
2259 if (iptr->op1 == 0) {
2260 gen_nullptr_check(s1);
2263 x86_64_movsd_memindex_reg(OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2264 store_reg_to_var_flt(iptr->dst, d);
2267 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
2269 var_to_reg_int(s1, src->prev, REG_ITMP1);
2270 var_to_reg_int(s2, src, REG_ITMP2);
2271 d = reg_of_var(iptr->dst, REG_ITMP3);
2272 if (iptr->op1 == 0) {
2273 gen_nullptr_check(s1);
2276 x86_64_movzwq_memindex_reg(OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2277 store_reg_to_var_int(iptr->dst, d);
2280 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
2282 var_to_reg_int(s1, src->prev, REG_ITMP1);
2283 var_to_reg_int(s2, src, REG_ITMP2);
2284 d = reg_of_var(iptr->dst, REG_ITMP3);
2285 if (iptr->op1 == 0) {
2286 gen_nullptr_check(s1);
2289 x86_64_movswq_memindex_reg(OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2290 store_reg_to_var_int(iptr->dst, d);
2293 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
2295 var_to_reg_int(s1, src->prev, REG_ITMP1);
2296 var_to_reg_int(s2, src, REG_ITMP2);
2297 d = reg_of_var(iptr->dst, REG_ITMP3);
2298 if (iptr->op1 == 0) {
2299 gen_nullptr_check(s1);
2302 x86_64_movsbq_memindex_reg(OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2303 store_reg_to_var_int(iptr->dst, d);
2307 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2309 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2310 var_to_reg_int(s2, src->prev, REG_ITMP2);
2311 if (iptr->op1 == 0) {
2312 gen_nullptr_check(s1);
2315 var_to_reg_int(s3, src, REG_ITMP3);
2316 x86_64_mov_reg_memindex(s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2319 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2321 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2322 var_to_reg_int(s2, src->prev, REG_ITMP2);
2323 if (iptr->op1 == 0) {
2324 gen_nullptr_check(s1);
2327 var_to_reg_int(s3, src, REG_ITMP3);
2328 x86_64_mov_reg_memindex(s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2331 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2333 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2334 var_to_reg_int(s2, src->prev, REG_ITMP2);
2335 if (iptr->op1 == 0) {
2336 gen_nullptr_check(s1);
2339 var_to_reg_int(s3, src, REG_ITMP3);
2340 x86_64_movl_reg_memindex(s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2343 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2345 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2346 var_to_reg_int(s2, src->prev, REG_ITMP2);
2347 if (iptr->op1 == 0) {
2348 gen_nullptr_check(s1);
2351 var_to_reg_flt(s3, src, REG_FTMP3);
2352 x86_64_movss_reg_memindex(s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2355 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2357 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2358 var_to_reg_int(s2, src->prev, REG_ITMP2);
2359 if (iptr->op1 == 0) {
2360 gen_nullptr_check(s1);
2363 var_to_reg_flt(s3, src, REG_FTMP3);
2364 x86_64_movsd_reg_memindex(s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2367 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2369 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2370 var_to_reg_int(s2, src->prev, REG_ITMP2);
2371 if (iptr->op1 == 0) {
2372 gen_nullptr_check(s1);
2375 var_to_reg_int(s3, src, REG_ITMP3);
2376 x86_64_movw_reg_memindex(s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2379 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2381 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2382 var_to_reg_int(s2, src->prev, REG_ITMP2);
2383 if (iptr->op1 == 0) {
2384 gen_nullptr_check(s1);
2387 var_to_reg_int(s3, src, REG_ITMP3);
2388 x86_64_movw_reg_memindex(s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2391 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2393 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2394 var_to_reg_int(s2, src->prev, REG_ITMP2);
2395 if (iptr->op1 == 0) {
2396 gen_nullptr_check(s1);
2399 var_to_reg_int(s3, src, REG_ITMP3);
2400 x86_64_movb_reg_memindex(s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2404 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2405 /* op1 = type, val.a = field address */
2407 a = dseg_addaddress(&(((fieldinfo *)(iptr->val.a))->value));
2408 /* x86_64_mov_imm_reg(0, REG_ITMP2); */
2409 /* dseg_adddata(mcodeptr); */
2410 /* x86_64_mov_membase_reg(REG_ITMP2, a, REG_ITMP3); */
2411 x86_64_mov_membase_reg(RIP, -(((s8) mcodeptr + 7) - (s8) mcodebase) + a, REG_ITMP2);
2412 switch (iptr->op1) {
2414 var_to_reg_int(s2, src, REG_ITMP1);
2415 x86_64_movl_reg_membase(s2, REG_ITMP2, 0);
2419 var_to_reg_int(s2, src, REG_ITMP1);
2420 x86_64_mov_reg_membase(s2, REG_ITMP2, 0);
2423 var_to_reg_flt(s2, src, REG_FTMP1);
2424 x86_64_movss_reg_membase(s2, REG_ITMP2, 0);
2427 var_to_reg_flt(s2, src, REG_FTMP1);
2428 x86_64_movsd_reg_membase(s2, REG_ITMP2, 0);
2430 default: panic("internal error");
2434 case ICMD_GETSTATIC: /* ... ==> ..., value */
2435 /* op1 = type, val.a = field address */
2437 a = dseg_addaddress(&(((fieldinfo *)(iptr->val.a))->value));
2438 /* x86_64_mov_imm_reg(0, REG_ITMP2); */
2439 /* dseg_adddata(mcodeptr); */
2440 /* x86_64_mov_membase_reg(REG_ITMP2, a, REG_ITMP3); */
2441 x86_64_mov_membase_reg(RIP, -(((s8) mcodeptr + 7) - (s8) mcodebase) + a, REG_ITMP2);
2442 switch (iptr->op1) {
2444 d = reg_of_var(iptr->dst, REG_ITMP1);
2445 x86_64_movl_membase_reg(REG_ITMP2, 0, d);
2446 store_reg_to_var_int(iptr->dst, d);
2450 d = reg_of_var(iptr->dst, REG_ITMP1);
2451 x86_64_mov_membase_reg(REG_ITMP2, 0, d);
2452 store_reg_to_var_int(iptr->dst, d);
2455 d = reg_of_var(iptr->dst, REG_ITMP1);
2456 x86_64_movss_membase_reg(REG_ITMP2, 0, d);
2457 store_reg_to_var_flt(iptr->dst, d);
2460 d = reg_of_var(iptr->dst, REG_ITMP1);
2461 x86_64_movsd_membase_reg(REG_ITMP2, 0, d);
2462 store_reg_to_var_flt(iptr->dst, d);
2464 default: panic("internal error");
2468 case ICMD_PUTFIELD: /* ..., value ==> ... */
2469 /* op1 = type, val.i = field offset */
2471 a = ((fieldinfo *)(iptr->val.a))->offset;
2472 var_to_reg_int(s1, src->prev, REG_ITMP1);
2473 switch (iptr->op1) {
2475 var_to_reg_int(s2, src, REG_ITMP2);
2476 gen_nullptr_check(s1);
2477 x86_64_movl_reg_membase(s2, s1, a);
2481 var_to_reg_int(s2, src, REG_ITMP2);
2482 gen_nullptr_check(s1);
2483 x86_64_mov_reg_membase(s2, s1, a);
2486 var_to_reg_flt(s2, src, REG_FTMP2);
2487 gen_nullptr_check(s1);
2488 x86_64_movss_reg_membase(s2, s1, a);
2491 var_to_reg_flt(s2, src, REG_FTMP2);
2492 gen_nullptr_check(s1);
2493 x86_64_movsd_reg_membase(s2, s1, a);
2495 default: panic ("internal error");
2499 case ICMD_GETFIELD: /* ... ==> ..., value */
2500 /* op1 = type, val.i = field offset */
2502 a = ((fieldinfo *)(iptr->val.a))->offset;
2503 var_to_reg_int(s1, src, REG_ITMP1);
2504 switch (iptr->op1) {
2506 d = reg_of_var(iptr->dst, REG_ITMP1);
2507 gen_nullptr_check(s1);
2508 x86_64_movl_membase_reg(s1, a, d);
2509 store_reg_to_var_int(iptr->dst, d);
2513 d = reg_of_var(iptr->dst, REG_ITMP1);
2514 gen_nullptr_check(s1);
2515 x86_64_mov_membase_reg(s1, a, d);
2516 store_reg_to_var_int(iptr->dst, d);
2519 d = reg_of_var(iptr->dst, REG_FTMP1);
2520 gen_nullptr_check(s1);
2521 x86_64_movss_membase_reg(s1, a, d);
2522 store_reg_to_var_flt(iptr->dst, d);
2525 d = reg_of_var(iptr->dst, REG_FTMP1);
2526 gen_nullptr_check(s1);
2527 x86_64_movsd_membase_reg(s1, a, d);
2528 store_reg_to_var_flt(iptr->dst, d);
2530 default: panic ("internal error");
2535 /* branch operations **************************************************/
2537 /* #define ALIGNCODENOP {if((int)((long)mcodeptr&7)){M_NOP;}} */
2538 #define ALIGNCODENOP do {} while (0)
2540 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2542 var_to_reg_int(s1, src, REG_ITMP1);
2543 M_INTMOVE(s1, REG_ITMP1_XPTR);
2545 x86_64_call_imm(0); /* passing exception pointer */
2546 x86_64_pop_reg(REG_ITMP2_XPC);
2548 x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
2549 x86_64_jmp_reg(REG_ITMP3);
2553 case ICMD_GOTO: /* ... ==> ... */
2554 /* op1 = target JavaVM pc */
2557 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2561 case ICMD_JSR: /* ... ==> ... */
2562 /* op1 = target JavaVM pc */
2565 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2568 case ICMD_RET: /* ... ==> ... */
2569 /* op1 = local variable */
2571 var = &(locals[iptr->op1][TYPE_ADR]);
2572 var_to_reg_int(s1, var, REG_ITMP1);
2576 case ICMD_IFNULL: /* ..., value ==> ... */
2577 /* op1 = target JavaVM pc */
2579 if (src->flags & INMEMORY) {
2580 x86_64_alu_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
2583 x86_64_test_reg_reg(src->regoff, src->regoff);
2585 x86_64_jcc(X86_64_CC_E, 0);
2586 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2589 case ICMD_IFNONNULL: /* ..., value ==> ... */
2590 /* op1 = target JavaVM pc */
2592 if (src->flags & INMEMORY) {
2593 x86_64_alu_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
2596 x86_64_test_reg_reg(src->regoff, src->regoff);
2598 x86_64_jcc(X86_64_CC_NE, 0);
2599 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2602 case ICMD_IFEQ: /* ..., value ==> ... */
2603 /* op1 = target JavaVM pc, val.i = constant */
2605 x86_64_emit_ifcc(X86_64_CC_E, src, iptr);
2608 case ICMD_IFLT: /* ..., value ==> ... */
2609 /* op1 = target JavaVM pc, val.i = constant */
2611 x86_64_emit_ifcc(X86_64_CC_L, src, iptr);
2614 case ICMD_IFLE: /* ..., value ==> ... */
2615 /* op1 = target JavaVM pc, val.i = constant */
2617 x86_64_emit_ifcc(X86_64_CC_LE, src, iptr);
2620 case ICMD_IFNE: /* ..., value ==> ... */
2621 /* op1 = target JavaVM pc, val.i = constant */
2623 x86_64_emit_ifcc(X86_64_CC_NE, src, iptr);
2626 case ICMD_IFGT: /* ..., value ==> ... */
2627 /* op1 = target JavaVM pc, val.i = constant */
2629 x86_64_emit_ifcc(X86_64_CC_G, src, iptr);
2632 case ICMD_IFGE: /* ..., value ==> ... */
2633 /* op1 = target JavaVM pc, val.i = constant */
2635 x86_64_emit_ifcc(X86_64_CC_GE, src, iptr);
2638 case ICMD_IF_LEQ: /* ..., value ==> ... */
2639 /* op1 = target JavaVM pc, val.l = constant */
2641 x86_64_emit_if_lcc(X86_64_CC_E, src, iptr);
2644 case ICMD_IF_LLT: /* ..., value ==> ... */
2645 /* op1 = target JavaVM pc, val.l = constant */
2647 x86_64_emit_if_lcc(X86_64_CC_L, src, iptr);
2650 case ICMD_IF_LLE: /* ..., value ==> ... */
2651 /* op1 = target JavaVM pc, val.l = constant */
2653 x86_64_emit_if_lcc(X86_64_CC_LE, src, iptr);
2656 case ICMD_IF_LNE: /* ..., value ==> ... */
2657 /* op1 = target JavaVM pc, val.l = constant */
2659 x86_64_emit_if_lcc(X86_64_CC_NE, src, iptr);
2662 case ICMD_IF_LGT: /* ..., value ==> ... */
2663 /* op1 = target JavaVM pc, val.l = constant */
2665 x86_64_emit_if_lcc(X86_64_CC_G, src, iptr);
2668 case ICMD_IF_LGE: /* ..., value ==> ... */
2669 /* op1 = target JavaVM pc, val.l = constant */
2671 x86_64_emit_if_lcc(X86_64_CC_GE, src, iptr);
2674 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2675 /* op1 = target JavaVM pc */
2677 x86_64_emit_if_icmpcc(X86_64_CC_E, src, iptr);
2680 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2681 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2683 x86_64_emit_if_lcmpcc(X86_64_CC_E, src, iptr);
2686 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2687 /* op1 = target JavaVM pc */
2689 x86_64_emit_if_icmpcc(X86_64_CC_NE, src, iptr);
2692 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2693 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2695 x86_64_emit_if_lcmpcc(X86_64_CC_NE, src, iptr);
2698 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2699 /* op1 = target JavaVM pc */
2701 x86_64_emit_if_icmpcc(X86_64_CC_L, src, iptr);
2704 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2705 /* op1 = target JavaVM pc */
2707 x86_64_emit_if_lcmpcc(X86_64_CC_L, src, iptr);
2710 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2711 /* op1 = target JavaVM pc */
2713 x86_64_emit_if_icmpcc(X86_64_CC_G, src, iptr);
2716 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2717 /* op1 = target JavaVM pc */
2719 x86_64_emit_if_lcmpcc(X86_64_CC_G, src, iptr);
2722 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2723 /* op1 = target JavaVM pc */
2725 x86_64_emit_if_icmpcc(X86_64_CC_LE, src, iptr);
2728 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2729 /* op1 = target JavaVM pc */
2731 x86_64_emit_if_lcmpcc(X86_64_CC_LE, src, iptr);
2734 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2735 /* op1 = target JavaVM pc */
2737 x86_64_emit_if_icmpcc(X86_64_CC_GE, src, iptr);
2740 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2741 /* op1 = target JavaVM pc */
2743 x86_64_emit_if_lcmpcc(X86_64_CC_GE, src, iptr);
2746 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST */
2748 case ICMD_ELSE_ICONST: /* handled by IFxx_ICONST */
2751 case ICMD_IFEQ_ICONST: /* ..., value ==> ..., constant */
2752 /* val.i = constant */
2754 var_to_reg_int(s1, src, REG_ITMP1);
2755 d = reg_of_var(iptr->dst, REG_ITMP3);
2757 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2759 M_INTMOVE(s1, REG_ITMP1);
2762 x86_64_movl_imm_reg(iptr[1].val.i, d);
2764 x86_64_movl_imm_reg(s3, REG_ITMP2);
2765 x86_64_testl_reg_reg(s1, s1);
2766 x86_64_cmovccl_reg_reg(X86_64_CC_E, REG_ITMP2, d);
2767 store_reg_to_var_int(iptr->dst, d);
2770 case ICMD_IFNE_ICONST: /* ..., value ==> ..., constant */
2771 /* val.i = constant */
2773 var_to_reg_int(s1, src, REG_ITMP1);
2774 d = reg_of_var(iptr->dst, REG_ITMP3);
2776 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2778 M_INTMOVE(s1, REG_ITMP1);
2781 x86_64_movl_imm_reg(iptr[1].val.i, d);
2783 x86_64_movl_imm_reg(s3, REG_ITMP2);
2784 x86_64_testl_reg_reg(s1, s1);
2785 x86_64_cmovccl_reg_reg(X86_64_CC_NE, REG_ITMP2, d);
2786 store_reg_to_var_int(iptr->dst, d);
2789 case ICMD_IFLT_ICONST: /* ..., value ==> ..., constant */
2790 /* val.i = constant */
2792 var_to_reg_int(s1, src, REG_ITMP1);
2793 d = reg_of_var(iptr->dst, REG_ITMP3);
2795 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2797 M_INTMOVE(s1, REG_ITMP1);
2800 x86_64_movl_imm_reg(iptr[1].val.i, d);
2802 x86_64_movl_imm_reg(s3, REG_ITMP2);
2803 x86_64_testl_reg_reg(s1, s1);
2804 x86_64_cmovccl_reg_reg(X86_64_CC_L, REG_ITMP2, d);
2805 store_reg_to_var_int(iptr->dst, d);
2808 case ICMD_IFGE_ICONST: /* ..., value ==> ..., constant */
2809 /* val.i = constant */
2811 var_to_reg_int(s1, src, REG_ITMP1);
2812 d = reg_of_var(iptr->dst, REG_ITMP3);
2814 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2816 M_INTMOVE(s1, REG_ITMP1);
2819 x86_64_movl_imm_reg(iptr[1].val.i, d);
2821 x86_64_movl_imm_reg(s3, REG_ITMP2);
2822 x86_64_testl_reg_reg(s1, s1);
2823 x86_64_cmovccl_reg_reg(X86_64_CC_GE, REG_ITMP2, d);
2824 store_reg_to_var_int(iptr->dst, d);
2827 case ICMD_IFGT_ICONST: /* ..., value ==> ..., constant */
2828 /* val.i = constant */
2830 var_to_reg_int(s1, src, REG_ITMP1);
2831 d = reg_of_var(iptr->dst, REG_ITMP3);
2833 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2835 M_INTMOVE(s1, REG_ITMP1);
2838 x86_64_movl_imm_reg(iptr[1].val.i, d);
2840 x86_64_movl_imm_reg(s3, REG_ITMP2);
2841 x86_64_testl_reg_reg(s1, s1);
2842 x86_64_cmovccl_reg_reg(X86_64_CC_G, REG_ITMP2, d);
2843 store_reg_to_var_int(iptr->dst, d);
2846 case ICMD_IFLE_ICONST: /* ..., value ==> ..., constant */
2847 /* val.i = constant */
2849 var_to_reg_int(s1, src, REG_ITMP1);
2850 d = reg_of_var(iptr->dst, REG_ITMP3);
2852 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2854 M_INTMOVE(s1, REG_ITMP1);
2857 x86_64_movl_imm_reg(iptr[1].val.i, d);
2859 x86_64_movl_imm_reg(s3, REG_ITMP2);
2860 x86_64_testl_reg_reg(s1, s1);
2861 x86_64_cmovccl_reg_reg(X86_64_CC_LE, REG_ITMP2, d);
2862 store_reg_to_var_int(iptr->dst, d);
2866 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2871 if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
2872 x86_64_mov_membase_reg(REG_SP, 8 * maxmemuse, argintregs[0]);
2873 x86_64_mov_imm_reg((s8) builtin_monitorexit, REG_ITMP1);
2874 x86_64_call_reg(REG_ITMP1);
2877 var_to_reg_int(s1, src, REG_RESULT);
2878 M_INTMOVE(s1, REG_RESULT);
2879 goto nowperformreturn;
2881 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2885 if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
2886 x86_64_mov_membase_reg(REG_SP, 8 * maxmemuse, argintregs[0]);
2887 x86_64_mov_imm_reg((s8) builtin_monitorexit, REG_ITMP1);
2888 x86_64_call_reg(REG_ITMP1);
2891 var_to_reg_flt(s1, src, REG_FRESULT);
2892 M_FLTMOVE(s1, REG_FRESULT);
2893 goto nowperformreturn;
2895 case ICMD_RETURN: /* ... ==> ... */
2898 if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
2899 x86_64_mov_membase_reg(REG_SP, 8 * maxmemuse, argintregs[0]);
2900 x86_64_mov_imm_reg((s8) builtin_monitorexit, REG_ITMP1);
2901 x86_64_call_reg(REG_ITMP1);
2909 p = parentargs_base;
2911 /* call trace function */
2913 x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
2915 x86_64_mov_reg_membase(REG_RESULT, REG_SP, 0 * 8);
2916 x86_64_movq_reg_membase(REG_FRESULT, REG_SP, 1 * 8);
2918 x86_64_mov_imm_reg((s8) method, argintregs[0]);
2919 x86_64_mov_reg_reg(REG_RESULT, argintregs[1]);
2920 M_FLTMOVE(REG_FRESULT, argfltregs[0]);
2921 M_FLTMOVE(REG_FRESULT, argfltregs[1]);
2923 x86_64_mov_imm_reg((s8) builtin_displaymethodstop, REG_ITMP1);
2924 x86_64_call_reg(REG_ITMP1);
2926 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_RESULT);
2927 x86_64_movq_membase_reg(REG_SP, 1 * 8, REG_FRESULT);
2929 x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
2932 /* restore saved registers */
2933 for (r = savintregcnt - 1; r >= maxsavintreguse; r--) {
2934 p--; x86_64_mov_membase_reg(REG_SP, p * 8, savintregs[r]);
2936 for (r = savfltregcnt - 1; r >= maxsavfltreguse; r--) {
2937 p--; x86_64_movq_membase_reg(REG_SP, p * 8, savfltregs[r]);
2940 /* deallocate stack */
2941 if (parentargs_base) {
2942 x86_64_alu_imm_reg(X86_64_ADD, parentargs_base * 8, REG_SP);
2951 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2956 tptr = (void **) iptr->target;
2958 s4ptr = iptr->val.a;
2959 l = s4ptr[1]; /* low */
2960 i = s4ptr[2]; /* high */
2962 var_to_reg_int(s1, src, REG_ITMP1);
2963 M_INTMOVE(s1, REG_ITMP1);
2965 x86_64_alul_imm_reg(X86_64_SUB, l, REG_ITMP1);
2970 x86_64_alul_imm_reg(X86_64_CMP, i - 1, REG_ITMP1);
2971 x86_64_jcc(X86_64_CC_A, 0);
2973 /* codegen_addreference(BlockPtrOfPC(s4ptr[0]), mcodeptr); */
2974 codegen_addreference((basicblock *) tptr[0], mcodeptr);
2976 /* build jump table top down and use address of lowest entry */
2978 /* s4ptr += 3 + i; */
2982 /* dseg_addtarget(BlockPtrOfPC(*--s4ptr)); */
2983 dseg_addtarget((basicblock *) tptr[0]);
2987 /* length of dataseg after last dseg_addtarget is used by load */
2989 x86_64_mov_imm_reg(0, REG_ITMP2);
2990 dseg_adddata(mcodeptr);
2991 x86_64_mov_memindex_reg(-dseglen, REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
2992 x86_64_jmp_reg(REG_ITMP1);
2998 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
3000 s4 i, l, val, *s4ptr;
3003 tptr = (void **) iptr->target;
3005 s4ptr = iptr->val.a;
3006 l = s4ptr[0]; /* default */
3007 i = s4ptr[1]; /* count */
3009 MCODECHECK((i<<2)+8);
3010 var_to_reg_int(s1, src, REG_ITMP1); /* reg compare should always be faster */
3016 x86_64_alul_imm_reg(X86_64_CMP, val, s1);
3017 x86_64_jcc(X86_64_CC_E, 0);
3018 /* codegen_addreference(BlockPtrOfPC(s4ptr[1]), mcodeptr); */
3019 codegen_addreference((basicblock *) tptr[0], mcodeptr);
3023 /* codegen_addreference(BlockPtrOfPC(l), mcodeptr); */
3025 tptr = (void **) iptr->target;
3026 codegen_addreference((basicblock *) tptr[0], mcodeptr);
3033 case ICMD_BUILTIN3: /* ..., arg1, arg2, arg3 ==> ... */
3034 /* op1 = return type, val.a = function pointer*/
3038 case ICMD_BUILTIN2: /* ..., arg1, arg2 ==> ... */
3039 /* op1 = return type, val.a = function pointer*/
3043 case ICMD_BUILTIN1: /* ..., arg1 ==> ... */
3044 /* op1 = return type, val.a = function pointer*/
3048 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
3049 /* op1 = arg count, val.a = method pointer */
3051 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3052 /* op1 = arg count, val.a = method pointer */
3054 case ICMD_INVOKEVIRTUAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3055 /* op1 = arg count, val.a = method pointer */
3057 case ICMD_INVOKEINTERFACE:/*.., objectref, [arg1, [arg2 ...]] ==> ... */
3058 /* op1 = arg count, val.a = method pointer */
3069 MCODECHECK((s3 << 1) + 64);
3074 /* copy arguments to registers or stack location */
3075 for (; --s3 >= 0; src = src->prev) {
3076 IS_INT_LNG_TYPE(src->type) ? iarg++ : farg++;
3082 s2 = (iarg > INT_ARG_CNT) ? iarg - INT_ARG_CNT : 0 + (farg > FLT_ARG_CNT) ? farg - FLT_ARG_CNT : 0;
3084 for (; --s3 >= 0; src = src->prev) {
3085 IS_INT_LNG_TYPE(src->type) ? iarg-- : farg--;
3086 if (src->varkind == ARGVAR) {
3087 if (IS_INT_LNG_TYPE(src->type)) {
3088 if (iarg >= INT_ARG_CNT) {
3092 if (farg >= FLT_ARG_CNT) {
3099 if (IS_INT_LNG_TYPE(src->type)) {
3100 if (iarg < INT_ARG_CNT) {
3101 s1 = argintregs[iarg];
3102 var_to_reg_int(d, src, s1);
3106 var_to_reg_int(d, src, REG_ITMP1);
3108 x86_64_mov_reg_membase(d, REG_SP, s2 * 8);
3112 if (farg < FLT_ARG_CNT) {
3113 s1 = argfltregs[farg];
3114 var_to_reg_flt(d, src, s1);
3118 var_to_reg_flt(d, src, REG_FTMP1);
3120 x86_64_movq_reg_membase(d, REG_SP, s2 * 8);
3126 switch (iptr->opc) {
3134 x86_64_mov_imm_reg(a, REG_ITMP1);
3135 x86_64_call_reg(REG_ITMP1);
3138 case ICMD_INVOKESTATIC:
3140 a = (s8) m->stubroutine;
3143 x86_64_mov_imm_reg(a, REG_ITMP2);
3144 x86_64_call_reg(REG_ITMP2);
3147 case ICMD_INVOKESPECIAL:
3149 a = (s8) m->stubroutine;
3152 gen_nullptr_check(argintregs[0]); /* first argument contains pointer */
3153 x86_64_mov_membase_reg(argintregs[0], 0, REG_ITMP2); /* access memory for hardware nullptr */
3154 x86_64_mov_imm_reg(a, REG_ITMP2);
3155 x86_64_call_reg(REG_ITMP2);
3158 case ICMD_INVOKEVIRTUAL:
3162 gen_nullptr_check(argintregs[0]);
3163 x86_64_mov_membase_reg(argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3164 x86_64_mov_membase32_reg(REG_ITMP2, OFFSET(vftbl, table[0]) + sizeof(methodptr) * m->vftblindex, REG_ITMP1);
3165 x86_64_call_reg(REG_ITMP1);
3168 case ICMD_INVOKEINTERFACE:
3173 gen_nullptr_check(argintregs[0]);
3174 x86_64_mov_membase_reg(argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3175 x86_64_mov_membase_reg(REG_ITMP2, OFFSET(vftbl, interfacetable[0]) - sizeof(methodptr) * ci->index, REG_ITMP2);
3176 x86_64_mov_membase32_reg(REG_ITMP2, sizeof(methodptr) * (m - ci->methods), REG_ITMP1);
3177 x86_64_call_reg(REG_ITMP1);
3182 error("Unkown ICMD-Command: %d", iptr->opc);
3185 /* d contains return type */
3187 if (d != TYPE_VOID) {
3188 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3189 s1 = reg_of_var(iptr->dst, REG_RESULT);
3190 M_INTMOVE(REG_RESULT, s1);
3191 store_reg_to_var_int(iptr->dst, s1);
3194 s1 = reg_of_var(iptr->dst, REG_FRESULT);
3195 M_FLTMOVE(REG_FRESULT, s1);
3196 store_reg_to_var_flt(iptr->dst, s1);
3203 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3205 /* op1: 0 == array, 1 == class */
3206 /* val.a: (classinfo*) superclass */
3208 /* superclass is an interface:
3210 * return (sub != NULL) &&
3211 * (sub->vftbl->interfacetablelength > super->index) &&
3212 * (sub->vftbl->interfacetable[-super->index] != NULL);
3214 * superclass is a class:
3216 * return ((sub != NULL) && (0
3217 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3218 * super->vftbl->diffvall));
3222 classinfo *super = (classinfo*) iptr->val.a;
3224 var_to_reg_int(s1, src, REG_ITMP1);
3225 d = reg_of_var(iptr->dst, REG_ITMP3);
3227 M_INTMOVE(s1, REG_ITMP1);
3230 x86_64_alu_reg_reg(X86_64_XOR, d, d);
3231 if (iptr->op1) { /* class/interface */
3232 if (super->flags & ACC_INTERFACE) { /* interface */
3233 x86_64_test_reg_reg(s1, s1);
3235 /* TODO: clean up this calculation */
3236 a = 3; /* mov_membase_reg */
3237 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3239 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3240 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetablelength));
3243 CALCOFFSETBYTES(a, 0, super->index);
3248 a += 3; /* mov_membase_reg */
3249 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*));
3254 x86_64_jcc(X86_64_CC_E, a);
3256 x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3257 x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetablelength), REG_ITMP2);
3258 x86_64_alu_imm_reg(X86_64_SUB, super->index, REG_ITMP2);
3259 x86_64_test_reg_reg(REG_ITMP2, REG_ITMP2);
3261 /* TODO: clean up this calculation */
3263 a += 3; /* mov_membase_reg */
3264 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*));
3269 x86_64_jcc(X86_64_CC_LE, a);
3270 x86_64_mov_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP1);
3271 x86_64_test_reg_reg(REG_ITMP1, REG_ITMP1);
3272 x86_64_setcc_reg(X86_64_CC_NE, d);
3274 } else { /* class */
3275 x86_64_test_reg_reg(s1, s1);
3277 /* TODO: clean up this calculation */
3278 a = 3; /* mov_membase_reg */
3279 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3281 a += 10; /* mov_imm_reg */
3283 a += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3284 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, baseval));
3286 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3287 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, baseval));
3289 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3290 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, diffval));
3297 x86_64_jcc(X86_64_CC_E, a);
3299 x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3300 x86_64_mov_imm_reg((s8) super->vftbl, REG_ITMP2);
3301 x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, baseval), REG_ITMP1);
3302 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, baseval), REG_ITMP3);
3303 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, diffval), REG_ITMP2);
3304 x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP3, REG_ITMP1);
3305 x86_64_alu_reg_reg(X86_64_XOR, d, d);
3306 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
3307 x86_64_setcc_reg(X86_64_CC_BE, d);
3311 panic("internal error: no inlined array instanceof");
3313 store_reg_to_var_int(iptr->dst, d);
3316 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3318 /* op1: 0 == array, 1 == class */
3319 /* val.a: (classinfo*) superclass */
3321 /* superclass is an interface:
3323 * OK if ((sub == NULL) ||
3324 * (sub->vftbl->interfacetablelength > super->index) &&
3325 * (sub->vftbl->interfacetable[-super->index] != NULL));
3327 * superclass is a class:
3329 * OK if ((sub == NULL) || (0
3330 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3331 * super->vftbl->diffvall));
3335 classinfo *super = (classinfo*) iptr->val.a;
3337 d = reg_of_var(iptr->dst, REG_ITMP3);
3338 var_to_reg_int(s1, src, d);
3339 if (iptr->op1) { /* class/interface */
3340 if (super->flags & ACC_INTERFACE) { /* interface */
3341 x86_64_test_reg_reg(s1, s1);
3343 /* TODO: clean up this calculation */
3344 a = 3; /* mov_membase_reg */
3345 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3347 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3348 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetablelength));
3351 CALCOFFSETBYTES(a, 0, super->index);
3356 a += 3; /* mov_membase_reg */
3357 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*));
3362 x86_64_jcc(X86_64_CC_E, a);
3364 x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3365 x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetablelength), REG_ITMP2);
3366 x86_64_alu_imm_reg(X86_64_SUB, super->index, REG_ITMP2);
3367 x86_64_test_reg_reg(REG_ITMP2, REG_ITMP2);
3368 x86_64_jcc(X86_64_CC_LE, 0);
3369 codegen_addxcastrefs(mcodeptr);
3370 x86_64_mov_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP2);
3371 x86_64_test_reg_reg(REG_ITMP2, REG_ITMP2);
3372 x86_64_jcc(X86_64_CC_E, 0);
3373 codegen_addxcastrefs(mcodeptr);
3375 } else { /* class */
3376 x86_64_test_reg_reg(s1, s1);
3378 /* TODO: clean up this calculation */
3379 a = 3; /* mov_membase_reg */
3380 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3381 a += 10; /* mov_imm_reg */
3382 a += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3383 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, baseval));
3385 if (d != REG_ITMP3) {
3386 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3387 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, baseval));
3388 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3389 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, diffval));
3393 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3394 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, baseval));
3396 a += 10; /* mov_imm_reg */
3397 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3398 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, diffval));
3404 x86_64_jcc(X86_64_CC_E, a);
3406 x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3407 x86_64_mov_imm_reg((s8) super->vftbl, REG_ITMP2);
3408 x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, baseval), REG_ITMP1);
3409 if (d != REG_ITMP3) {
3410 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, baseval), REG_ITMP3);
3411 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, diffval), REG_ITMP2);
3412 x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP3, REG_ITMP1);
3415 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, baseval), REG_ITMP2);
3416 x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP2, REG_ITMP1);
3417 x86_64_mov_imm_reg((s8) super->vftbl, REG_ITMP2);
3418 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, diffval), REG_ITMP2);
3420 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
3421 x86_64_jcc(X86_64_CC_A, 0); /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3422 codegen_addxcastrefs(mcodeptr);
3426 panic("internal error: no inlined array checkcast");
3429 store_reg_to_var_int(iptr->dst, d);
3432 case ICMD_CHECKASIZE: /* ..., size ==> ..., size */
3434 if (src->flags & INMEMORY) {
3435 x86_64_alul_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
3438 x86_64_testl_reg_reg(src->regoff, src->regoff);
3440 x86_64_jcc(X86_64_CC_L, 0);
3441 codegen_addxcheckarefs(mcodeptr);
3444 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3445 /* op1 = dimension, val.a = array descriptor */
3447 /* check for negative sizes and copy sizes to stack if necessary */
3449 MCODECHECK((iptr->op1 << 1) + 64);
3451 for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3452 var_to_reg_int(s2, src, REG_ITMP1);
3453 x86_64_testl_reg_reg(s2, s2);
3454 x86_64_jcc(X86_64_CC_L, 0);
3455 codegen_addxcheckarefs(mcodeptr);
3457 /* copy sizes to stack (argument numbers >= INT_ARG_CNT) */
3459 if (src->varkind != ARGVAR) {
3460 x86_64_mov_reg_membase(s2, REG_SP, (s1 + INT_ARG_CNT) * 8);
3464 /* a0 = dimension count */
3465 x86_64_mov_imm_reg(iptr->op1, argintregs[0]);
3467 /* a1 = arraydescriptor */
3468 x86_64_mov_imm_reg((s8) iptr->val.a, argintregs[1]);
3470 /* a2 = pointer to dimensions = stack pointer */
3471 x86_64_mov_reg_reg(REG_SP, argintregs[2]);
3473 x86_64_mov_imm_reg((s8) (builtin_nmultianewarray), REG_ITMP1);
3474 x86_64_call_reg(REG_ITMP1);
3476 s1 = reg_of_var(iptr->dst, REG_RESULT);
3477 M_INTMOVE(REG_RESULT, s1);
3478 store_reg_to_var_int(iptr->dst, s1);
3481 default: error("Unknown pseudo command: %d", iptr->opc);
3484 } /* for instruction */
3486 /* copy values to interface registers */
3488 src = bptr->outstack;
3489 len = bptr->outdepth;
3493 if ((src->varkind != STACKVAR)) {
3495 if (IS_FLT_DBL_TYPE(s2)) {
3496 var_to_reg_flt(s1, src, REG_FTMP1);
3497 if (!(interfaces[len][s2].flags & INMEMORY)) {
3498 M_FLTMOVE(s1, interfaces[len][s2].regoff);
3501 x86_64_movq_reg_membase(s1, REG_SP, 8 * interfaces[len][s2].regoff);
3505 var_to_reg_int(s1, src, REG_ITMP1);
3506 if (!(interfaces[len][s2].flags & INMEMORY)) {
3507 M_INTMOVE(s1, interfaces[len][s2].regoff);
3510 x86_64_mov_reg_membase(s1, REG_SP, interfaces[len][s2].regoff * 8);
3516 } /* if (bptr -> flags >= BBREACHED) */
3517 } /* for basic block */
3519 /* bptr -> mpc = (int)((u1*) mcodeptr - mcodebase); */
3523 /* generate bound check stubs */
3524 u1 *xcodeptr = NULL;
3526 for (; xboundrefs != NULL; xboundrefs = xboundrefs->next) {
3527 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3528 gen_resolvebranch(mcodebase + xboundrefs->branchpos,
3529 xboundrefs->branchpos, xcodeptr - mcodebase - (10 + 10 + 3));
3533 gen_resolvebranch(mcodebase + xboundrefs->branchpos,
3534 xboundrefs->branchpos, mcodeptr - mcodebase);
3538 x86_64_mov_imm_reg(0, REG_ITMP2_XPC); /* 10 bytes */
3539 dseg_adddata(mcodeptr);
3540 x86_64_mov_imm_reg(xboundrefs->branchpos - 6, REG_ITMP1); /* 10 bytes */
3541 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3543 if (xcodeptr != NULL) {
3544 x86_64_jmp_imm((xcodeptr - mcodeptr) - 5);
3547 xcodeptr = mcodeptr;
3549 x86_64_mov_imm_reg((s8) proto_java_lang_ArrayIndexOutOfBoundsException, REG_ITMP1_XPTR);
3550 x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3551 x86_64_jmp_reg(REG_ITMP3);
3555 /* generate negative array size check stubs */
3558 for (; xcheckarefs != NULL; xcheckarefs = xcheckarefs->next) {
3559 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3560 gen_resolvebranch(mcodebase + xcheckarefs->branchpos,
3561 xcheckarefs->branchpos, xcodeptr - mcodebase - (10 + 10 + 3));
3565 gen_resolvebranch(mcodebase + xcheckarefs->branchpos,
3566 xcheckarefs->branchpos, mcodeptr - mcodebase);
3570 x86_64_mov_imm_reg(0, REG_ITMP2_XPC); /* 10 bytes */
3571 dseg_adddata(mcodeptr);
3572 x86_64_mov_imm_reg(xcheckarefs->branchpos - 6, REG_ITMP1); /* 10 bytes */
3573 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3575 if (xcodeptr != NULL) {
3576 x86_64_jmp_imm((xcodeptr - mcodeptr) - 5);
3579 xcodeptr = mcodeptr;
3581 x86_64_mov_imm_reg((s8) proto_java_lang_NegativeArraySizeException, REG_ITMP1_XPTR);
3582 x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3583 x86_64_jmp_reg(REG_ITMP3);
3587 /* generate cast check stubs */
3590 for (; xcastrefs != NULL; xcastrefs = xcastrefs->next) {
3591 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3592 gen_resolvebranch(mcodebase + xcastrefs->branchpos,
3593 xcastrefs->branchpos, xcodeptr - mcodebase - (10 + 10 + 3));
3597 gen_resolvebranch(mcodebase + xcastrefs->branchpos,
3598 xcastrefs->branchpos, mcodeptr - mcodebase);
3602 x86_64_mov_imm_reg(0, REG_ITMP2_XPC); /* 10 bytes */
3603 dseg_adddata(mcodeptr);
3604 x86_64_mov_imm_reg(xcastrefs->branchpos - 6, REG_ITMP1); /* 10 bytes */
3605 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3607 if (xcodeptr != NULL) {
3608 x86_64_jmp_imm((xcodeptr - mcodeptr) - 5);
3611 xcodeptr = mcodeptr;
3613 x86_64_mov_imm_reg((s8) proto_java_lang_ClassCastException, REG_ITMP1_XPTR);
3614 x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3615 x86_64_jmp_reg(REG_ITMP3);
3619 #ifdef SOFTNULLPTRCHECK
3620 /* generate null pointer check stubs */
3623 for (; xnullrefs != NULL; xnullrefs = xnullrefs->next) {
3624 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3625 gen_resolvebranch(mcodebase + xnullrefs->branchpos,
3626 xnullrefs->branchpos, xcodeptr - mcodebase - (10 + 10 + 3));
3630 gen_resolvebranch(mcodebase + xnullrefs->branchpos,
3631 xnullrefs->branchpos, mcodeptr - mcodebase);
3635 x86_64_mov_imm_reg(0, REG_ITMP2_XPC); /* 10 bytes */
3636 dseg_adddata(mcodeptr);
3637 x86_64_mov_imm_reg(xnullrefs->branchpos - 6, REG_ITMP1); /* 10 bytes */
3638 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3640 if (xcodeptr != NULL) {
3641 x86_64_jmp_imm((xcodeptr - mcodeptr) - 5);
3644 xcodeptr = mcodeptr;
3646 x86_64_mov_imm_reg((s8) proto_java_lang_NullPointerException, REG_ITMP1_XPTR);
3647 x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3648 x86_64_jmp_reg(REG_ITMP3);
3655 codegen_finish((int)((u1*) mcodeptr - mcodebase));
3659 /* function createcompilerstub *************************************************
3661 creates a stub routine which calls the compiler
3663 *******************************************************************************/
3665 #define COMPSTUBSIZE 23
3667 u1 *createcompilerstub(methodinfo *m)
3669 u1 *s = CNEW(u1, COMPSTUBSIZE); /* memory to hold the stub */
3670 mcodeptr = s; /* code generation pointer */
3672 /* code for the stub */
3673 x86_64_mov_imm_reg((s8) m, REG_ITMP1); /* pass method pointer to compiler */
3674 x86_64_mov_imm_reg((s8) asm_call_jit_compiler, REG_ITMP3);/* load address */
3675 x86_64_jmp_reg(REG_ITMP3); /* jump to compiler */
3678 count_cstub_len += COMPSTUBSIZE;
3685 /* function removecompilerstub *************************************************
3687 deletes a compilerstub from memory (simply by freeing it)
3689 *******************************************************************************/
3691 void removecompilerstub(u1 *stub)
3693 CFREE(stub, COMPSTUBSIZE);
3696 /* function: createnativestub **************************************************
3698 creates a stub routine which calls a native method
3700 *******************************************************************************/
3702 #define NATIVESTUBSIZE 420
3704 u1 *createnativestub(functionptr f, methodinfo *m)
3706 u1 *s = CNEW(u1, NATIVESTUBSIZE); /* memory to hold the stub */
3707 int stackframesize; /* size of stackframe if needed */
3708 mcodeptr = s; /* make macros work */
3711 descriptor2types(m); /* set paramcount and paramtypes */
3716 x86_64_alu_imm_reg(X86_64_SUB, (6 + 8 + 1) * 8, REG_SP);
3718 x86_64_mov_reg_membase(argintregs[0], REG_SP, 1 * 8);
3719 x86_64_mov_reg_membase(argintregs[1], REG_SP, 2 * 8);
3720 x86_64_mov_reg_membase(argintregs[2], REG_SP, 3 * 8);
3721 x86_64_mov_reg_membase(argintregs[3], REG_SP, 4 * 8);
3722 x86_64_mov_reg_membase(argintregs[4], REG_SP, 5 * 8);
3723 x86_64_mov_reg_membase(argintregs[5], REG_SP, 6 * 8);
3725 x86_64_movq_reg_membase(argfltregs[0], REG_SP, 7 * 8);
3726 x86_64_movq_reg_membase(argfltregs[1], REG_SP, 8 * 8);
3727 x86_64_movq_reg_membase(argfltregs[2], REG_SP, 9 * 8);
3728 x86_64_movq_reg_membase(argfltregs[3], REG_SP, 10 * 8);
3729 /* x86_64_movq_reg_membase(argfltregs[4], REG_SP, 11 * 8); */
3730 /* x86_64_movq_reg_membase(argfltregs[5], REG_SP, 12 * 8); */
3731 /* x86_64_movq_reg_membase(argfltregs[6], REG_SP, 13 * 8); */
3732 /* x86_64_movq_reg_membase(argfltregs[7], REG_SP, 14 * 8); */
3734 /* also show the hex code for floats passed */
3735 for (p = 0, l = 0; p < m->paramcount; p++) {
3736 if (IS_FLT_DBL_TYPE(m->paramtypes[p])) {
3737 for (s1 = (m->paramcount > INT_ARG_CNT) ? INT_ARG_CNT - 2 : m->paramcount - 2; s1 >= p; s1--) {
3738 x86_64_mov_reg_reg(argintregs[s1], argintregs[s1 + 1]);
3741 x86_64_movd_freg_reg(argfltregs[l], argintregs[p]);
3746 x86_64_mov_imm_reg((s8) m, REG_ITMP2);
3747 x86_64_mov_reg_membase(REG_ITMP2, REG_SP, 0 * 8);
3748 /* x86_64_mov_imm_reg(asm_builtin_trace, REG_ITMP1); */
3749 x86_64_mov_imm_reg((s8) builtin_trace_args, REG_ITMP1);
3750 x86_64_call_reg(REG_ITMP1);
3752 x86_64_mov_membase_reg(REG_SP, 1 * 8, argintregs[0]);
3753 x86_64_mov_membase_reg(REG_SP, 2 * 8, argintregs[1]);
3754 x86_64_mov_membase_reg(REG_SP, 3 * 8, argintregs[2]);
3755 x86_64_mov_membase_reg(REG_SP, 4 * 8, argintregs[3]);
3756 x86_64_mov_membase_reg(REG_SP, 5 * 8, argintregs[4]);
3757 x86_64_mov_membase_reg(REG_SP, 6 * 8, argintregs[5]);
3759 x86_64_movq_membase_reg(REG_SP, 7 * 8, argfltregs[0]);
3760 x86_64_movq_membase_reg(REG_SP, 8 * 8, argfltregs[1]);
3761 x86_64_movq_membase_reg(REG_SP, 9 * 8, argfltregs[2]);
3762 x86_64_movq_membase_reg(REG_SP, 10 * 8, argfltregs[3]);
3763 /* x86_64_movq_membase_reg(REG_SP, 11 * 8, argfltregs[4]); */
3764 /* x86_64_movq_membase_reg(REG_SP, 12 * 8, argfltregs[5]); */
3765 /* x86_64_movq_membase_reg(REG_SP, 13 * 8, argfltregs[6]); */
3766 /* x86_64_movq_membase_reg(REG_SP, 14 * 8, argfltregs[7]); */
3768 x86_64_alu_imm_reg(X86_64_ADD, (6 + 8 + 1) * 8, REG_SP);
3772 x86_64_alu_imm_reg(X86_64_SUB, 7 * 8, REG_SP); /* keep stack 16-byte aligned */
3774 /* save callee saved float registers */
3775 x86_64_movq_reg_membase(XMM15, REG_SP, 0 * 8);
3776 x86_64_movq_reg_membase(XMM14, REG_SP, 1 * 8);
3777 x86_64_movq_reg_membase(XMM13, REG_SP, 2 * 8);
3778 x86_64_movq_reg_membase(XMM12, REG_SP, 3 * 8);
3779 x86_64_movq_reg_membase(XMM11, REG_SP, 4 * 8);
3780 x86_64_movq_reg_membase(XMM10, REG_SP, 5 * 8);
3783 /* save argument registers on stack -- if we have to */
3784 if ((m->flags & ACC_STATIC && m->paramcount > 4) || m->paramcount > 5) {
3786 int paramshiftcnt = (m->flags & ACC_STATIC) ? 2 : 1;
3787 int stackparamcnt = (m->paramcount > 6) ? m->paramcount - 6 : 0;
3789 stackframesize = stackparamcnt + paramshiftcnt;
3791 /* keep stack 16-byte aligned */
3792 if ((stackframesize % 2) == 0) stackframesize++;
3794 x86_64_alu_imm_reg(X86_64_SUB, stackframesize * 8, REG_SP);
3796 /* copy stack arguments into new stack frame -- if any */
3797 for (i = 0; i < stackparamcnt; i++) {
3798 x86_64_mov_membase_reg(REG_SP, (stackparamcnt + 1 + i) * 8, REG_ITMP1);
3799 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, (paramshiftcnt + i) * 8);
3802 if (m->flags & ACC_STATIC) {
3803 x86_64_mov_reg_membase(argintregs[5], REG_SP, 1 * 8);
3804 x86_64_mov_reg_membase(argintregs[4], REG_SP, 0 * 8);
3807 x86_64_mov_reg_membase(argintregs[5], REG_SP, 0 * 8);
3811 /* keep stack 16-byte aligned -- this is essential for x86_64 */
3812 x86_64_alu_imm_reg(X86_64_SUB, 8, REG_SP);
3816 if (m->flags & ACC_STATIC) {
3817 x86_64_mov_reg_reg(argintregs[3], argintregs[5]);
3818 x86_64_mov_reg_reg(argintregs[2], argintregs[4]);
3819 x86_64_mov_reg_reg(argintregs[1], argintregs[3]);
3820 x86_64_mov_reg_reg(argintregs[0], argintregs[2]);
3822 /* put class into second argument register */
3823 x86_64_mov_imm_reg((s8) m->class, argintregs[1]);
3826 x86_64_mov_reg_reg(argintregs[4], argintregs[5]);
3827 x86_64_mov_reg_reg(argintregs[3], argintregs[4]);
3828 x86_64_mov_reg_reg(argintregs[2], argintregs[3]);
3829 x86_64_mov_reg_reg(argintregs[1], argintregs[2]);
3830 x86_64_mov_reg_reg(argintregs[0], argintregs[1]);
3833 /* put env into first argument register */
3834 x86_64_mov_imm_reg((s8) &env, argintregs[0]);
3836 x86_64_mov_imm_reg((s8) f, REG_ITMP1);
3837 x86_64_call_reg(REG_ITMP1);
3839 /* removed stackframe if there is one */
3840 if (stackframesize) {
3841 x86_64_alu_imm_reg(X86_64_ADD, stackframesize * 8, REG_SP);
3845 /* restore callee saved registers */
3846 x86_64_movq_membase_reg(REG_SP, 0 * 8, XMM15);
3847 x86_64_movq_membase_reg(REG_SP, 1 * 8, XMM14);
3848 x86_64_movq_membase_reg(REG_SP, 2 * 8, XMM13);
3849 x86_64_movq_membase_reg(REG_SP, 3 * 8, XMM12);
3850 x86_64_movq_membase_reg(REG_SP, 4 * 8, XMM11);
3851 x86_64_movq_membase_reg(REG_SP, 5 * 8, XMM10);
3853 x86_64_alu_imm_reg(X86_64_ADD, 7 * 8, REG_SP); /* keep stack 16-byte aligned */
3857 x86_64_alu_imm_reg(X86_64_SUB, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
3859 x86_64_mov_reg_membase(REG_RESULT, REG_SP, 0 * 8);
3860 x86_64_movq_reg_membase(REG_FRESULT, REG_SP, 1 * 8);
3862 x86_64_mov_imm_reg((s8) m, argintregs[0]);
3863 x86_64_mov_reg_reg(REG_RESULT, argintregs[1]);
3864 M_FLTMOVE(REG_FRESULT, argfltregs[0]);
3865 M_FLTMOVE(REG_FRESULT, argfltregs[1]);
3867 /* x86_64_mov_imm_reg(asm_builtin_exittrace, REG_ITMP1); */
3868 x86_64_mov_imm_reg((s8) builtin_displaymethodstop, REG_ITMP1);
3869 x86_64_call_reg(REG_ITMP1);
3871 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_RESULT);
3872 x86_64_movq_membase_reg(REG_SP, 1 * 8, REG_FRESULT);
3874 x86_64_alu_imm_reg(X86_64_ADD, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
3877 x86_64_mov_imm_reg((s8) &exceptionptr, REG_ITMP3);
3878 x86_64_mov_membase_reg(REG_ITMP3, 0, REG_ITMP3);
3879 x86_64_test_reg_reg(REG_ITMP3, REG_ITMP3);
3880 x86_64_jcc(X86_64_CC_NE, 1);
3884 x86_64_mov_reg_reg(REG_ITMP3, REG_ITMP1_XPTR);
3885 x86_64_mov_imm_reg((s8) &exceptionptr, REG_ITMP3);
3886 x86_64_alu_reg_reg(X86_64_XOR, REG_ITMP2, REG_ITMP2);
3887 x86_64_mov_reg_membase(REG_ITMP2, REG_ITMP3, 0); /* clear exception pointer */
3889 x86_64_mov_membase_reg(REG_SP, 0, REG_ITMP2_XPC); /* get return address from stack */
3890 x86_64_alu_imm_reg(X86_64_SUB, 3, REG_ITMP2_XPC); /* callq */
3892 x86_64_mov_imm_reg((s8) asm_handle_nat_exception, REG_ITMP3);
3893 x86_64_jmp_reg(REG_ITMP3);
3897 static int stubprinted;
3899 printf("stubsize: %d\n", ((long)mcodeptr - (long) s));
3905 count_nstub_len += NATIVESTUBSIZE;
3912 /* function: removenativestub **************************************************
3914 removes a previously created native-stub from memory
3916 *******************************************************************************/
3918 void removenativestub(u1 *stub)
3920 CFREE(stub, NATIVESTUBSIZE);
3924 /* code generation functions */
3926 void x86_64_emit_ialu(s4 alu_op, stackptr src, instruction *iptr)
3928 s4 s1 = src->prev->regoff;
3929 s4 s2 = src->regoff;
3930 s4 d = iptr->dst->regoff;
3932 if (iptr->dst->flags & INMEMORY) {
3933 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
3935 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
3936 x86_64_alul_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
3938 } else if (s1 == d) {
3939 x86_64_movl_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
3940 x86_64_alul_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
3943 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
3944 x86_64_alul_membase_reg(alu_op, REG_SP, s2 * 8, REG_ITMP1);
3945 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
3948 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
3950 x86_64_alul_reg_membase(alu_op, s1, REG_SP, d * 8);
3953 x86_64_movl_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
3954 x86_64_alul_reg_reg(alu_op, s1, REG_ITMP1);
3955 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
3958 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
3960 x86_64_alul_reg_membase(alu_op, s2, REG_SP, d * 8);
3963 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
3964 x86_64_alul_reg_reg(alu_op, s2, REG_ITMP1);
3965 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
3969 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
3970 x86_64_alul_reg_membase(alu_op, s2, REG_SP, d * 8);
3974 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
3975 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
3976 x86_64_alul_membase_reg(alu_op, REG_SP, s2 * 8, d);
3978 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
3980 x86_64_alul_membase_reg(alu_op, REG_SP, s2 * 8, d);
3982 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
3984 x86_64_alul_membase_reg(alu_op, REG_SP, s1 * 8, d);
3988 x86_64_alul_reg_reg(alu_op, s1, d);
3992 x86_64_alul_reg_reg(alu_op, s2, d);
4000 void x86_64_emit_lalu(s4 alu_op, stackptr src, instruction *iptr)
4002 s4 s1 = src->prev->regoff;
4003 s4 s2 = src->regoff;
4004 s4 d = iptr->dst->regoff;
4006 if (iptr->dst->flags & INMEMORY) {
4007 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4009 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4010 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4012 } else if (s1 == d) {
4013 x86_64_mov_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4014 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4017 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4018 x86_64_alu_membase_reg(alu_op, REG_SP, s2 * 8, REG_ITMP1);
4019 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4022 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4024 x86_64_alu_reg_membase(alu_op, s1, REG_SP, d * 8);
4027 x86_64_mov_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4028 x86_64_alu_reg_reg(alu_op, s1, REG_ITMP1);
4029 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4032 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4034 x86_64_alu_reg_membase(alu_op, s2, REG_SP, d * 8);
4037 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4038 x86_64_alu_reg_reg(alu_op, s2, REG_ITMP1);
4039 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4043 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4044 x86_64_alu_reg_membase(alu_op, s2, REG_SP, d * 8);
4048 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4049 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4050 x86_64_alu_membase_reg(alu_op, REG_SP, s2 * 8, d);
4052 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4054 x86_64_alu_membase_reg(alu_op, REG_SP, s2 * 8, d);
4056 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4058 x86_64_alu_membase_reg(alu_op, REG_SP, s1 * 8, d);
4062 x86_64_alu_reg_reg(alu_op, s1, d);
4066 x86_64_alu_reg_reg(alu_op, s2, d);
4074 void x86_64_emit_ialuconst(s4 alu_op, stackptr src, instruction *iptr)
4076 s4 s1 = src->regoff;
4077 s4 d = iptr->dst->regoff;
4079 if (iptr->dst->flags & INMEMORY) {
4080 if (src->flags & INMEMORY) {
4082 x86_64_alul_imm_membase(alu_op, iptr->val.i, REG_SP, d * 8);
4085 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4086 x86_64_alul_imm_reg(alu_op, iptr->val.i, REG_ITMP1);
4087 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
4091 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4092 x86_64_alul_imm_membase(alu_op, iptr->val.i, REG_SP, d * 8);
4096 if (src->flags & INMEMORY) {
4097 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4098 x86_64_alul_imm_reg(alu_op, iptr->val.i, d);
4102 x86_64_alul_imm_reg(alu_op, iptr->val.i, d);
4109 void x86_64_emit_laluconst(s4 alu_op, stackptr src, instruction *iptr)
4111 s4 s1 = src->regoff;
4112 s4 d = iptr->dst->regoff;
4114 if (iptr->dst->flags & INMEMORY) {
4115 if (src->flags & INMEMORY) {
4117 if (x86_64_is_imm32(iptr->val.l)) {
4118 x86_64_alu_imm_membase(alu_op, iptr->val.l, REG_SP, d * 8);
4121 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4122 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4126 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4128 if (x86_64_is_imm32(iptr->val.l)) {
4129 x86_64_alu_imm_reg(alu_op, iptr->val.l, REG_ITMP1);
4132 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP2);
4133 x86_64_alu_reg_reg(alu_op, REG_ITMP2, REG_ITMP1);
4135 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4139 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4141 if (x86_64_is_imm32(iptr->val.l)) {
4142 x86_64_alu_imm_membase(alu_op, iptr->val.l, REG_SP, d * 8);
4145 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4146 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4151 if (src->flags & INMEMORY) {
4152 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4158 if (x86_64_is_imm32(iptr->val.l)) {
4159 x86_64_alu_imm_reg(alu_op, iptr->val.l, d);
4162 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4163 x86_64_alu_reg_reg(alu_op, REG_ITMP1, d);
4170 void x86_64_emit_ishift(s4 shift_op, stackptr src, instruction *iptr)
4172 s4 s1 = src->prev->regoff;
4173 s4 s2 = src->regoff;
4174 s4 d = iptr->dst->regoff;
4176 M_INTMOVE(RCX, REG_ITMP1); /* save RCX */
4177 if (iptr->dst->flags & INMEMORY) {
4178 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4180 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4181 x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
4184 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4185 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
4186 x86_64_shiftl_reg(shift_op, REG_ITMP2);
4187 x86_64_movl_reg_membase(REG_ITMP2, REG_SP, d * 8);
4190 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4191 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4192 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4193 x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
4195 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4198 x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
4202 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
4203 x86_64_shiftl_reg(shift_op, REG_ITMP2);
4204 x86_64_movl_reg_membase(REG_ITMP2, REG_SP, d * 8);
4209 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4210 x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
4212 M_INTMOVE(REG_ITMP1, RCX); /* restore RCX */
4219 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4220 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4221 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4222 x86_64_shiftl_reg(shift_op, d);
4224 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4225 M_INTMOVE(s1, d); /* maybe src is RCX */
4226 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4227 x86_64_shiftl_reg(shift_op, d);
4229 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4231 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4232 x86_64_shiftl_reg(shift_op, d);
4243 x86_64_shiftl_reg(shift_op, d);
4247 M_INTMOVE(REG_ITMP3, RCX);
4250 M_INTMOVE(REG_ITMP1, RCX); /* restore RCX */
4257 void x86_64_emit_lshift(s4 shift_op, stackptr src, instruction *iptr)
4259 s4 s1 = src->prev->regoff;
4260 s4 s2 = src->regoff;
4261 s4 d = iptr->dst->regoff;
4263 M_INTMOVE(RCX, REG_ITMP1); /* save RCX */
4264 if (iptr->dst->flags & INMEMORY) {
4265 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4267 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4268 x86_64_shift_membase(shift_op, REG_SP, d * 8);
4271 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4272 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
4273 x86_64_shift_reg(shift_op, REG_ITMP2);
4274 x86_64_mov_reg_membase(REG_ITMP2, REG_SP, d * 8);
4277 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4278 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4279 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4280 x86_64_shift_membase(shift_op, REG_SP, d * 8);
4282 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4285 x86_64_shift_membase(shift_op, REG_SP, d * 8);
4289 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
4290 x86_64_shift_reg(shift_op, REG_ITMP2);
4291 x86_64_mov_reg_membase(REG_ITMP2, REG_SP, d * 8);
4296 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4297 x86_64_shift_membase(shift_op, REG_SP, d * 8);
4299 M_INTMOVE(REG_ITMP1, RCX); /* restore RCX */
4306 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4307 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4308 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4309 x86_64_shift_reg(shift_op, d);
4311 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4312 M_INTMOVE(s1, d); /* maybe src is RCX */
4313 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4314 x86_64_shift_reg(shift_op, d);
4316 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4318 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4319 x86_64_shift_reg(shift_op, d);
4329 x86_64_shift_reg(shift_op, d);
4333 M_INTMOVE(REG_ITMP3, RCX);
4336 M_INTMOVE(REG_ITMP1, RCX); /* restore RCX */
4343 void x86_64_emit_ishiftconst(s4 shift_op, stackptr src, instruction *iptr)
4345 s4 s1 = src->regoff;
4346 s4 d = iptr->dst->regoff;
4348 if ((src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
4350 x86_64_shiftl_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
4353 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4354 x86_64_shiftl_imm_reg(shift_op, iptr->val.i, REG_ITMP1);
4355 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
4358 } else if ((src->flags & INMEMORY) && !(iptr->dst->flags & INMEMORY)) {
4359 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4360 x86_64_shiftl_imm_reg(shift_op, iptr->val.i, d);
4362 } else if (!(src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
4363 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4364 x86_64_shiftl_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
4368 x86_64_shiftl_imm_reg(shift_op, iptr->val.i, d);
4374 void x86_64_emit_lshiftconst(s4 shift_op, stackptr src, instruction *iptr)
4376 s4 s1 = src->regoff;
4377 s4 d = iptr->dst->regoff;
4379 if ((src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
4381 x86_64_shift_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
4384 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4385 x86_64_shift_imm_reg(shift_op, iptr->val.i, REG_ITMP1);
4386 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4389 } else if ((src->flags & INMEMORY) && !(iptr->dst->flags & INMEMORY)) {
4390 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4391 x86_64_shift_imm_reg(shift_op, iptr->val.i, d);
4393 } else if (!(src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
4394 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4395 x86_64_shift_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
4399 x86_64_shift_imm_reg(shift_op, iptr->val.i, d);
4405 void x86_64_emit_ifcc(s4 if_op, stackptr src, instruction *iptr)
4407 if (src->flags & INMEMORY) {
4408 x86_64_alul_imm_membase(X86_64_CMP, iptr->val.i, REG_SP, src->regoff * 8);
4411 x86_64_alul_imm_reg(X86_64_CMP, iptr->val.i, src->regoff);
4413 x86_64_jcc(if_op, 0);
4414 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
4419 void x86_64_emit_if_lcc(s4 if_op, stackptr src, instruction *iptr)
4421 s4 s1 = src->regoff;
4423 if (src->flags & INMEMORY) {
4424 if (x86_64_is_imm32(iptr->val.l)) {
4425 x86_64_alu_imm_membase(X86_64_CMP, iptr->val.l, REG_SP, s1 * 8);
4428 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4429 x86_64_alu_reg_membase(X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
4433 if (x86_64_is_imm32(iptr->val.l)) {
4434 x86_64_alu_imm_reg(X86_64_CMP, iptr->val.l, s1);
4437 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4438 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP1, s1);
4441 x86_64_jcc(if_op, 0);
4442 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
4447 void x86_64_emit_if_icmpcc(s4 if_op, stackptr src, instruction *iptr)
4449 s4 s1 = src->prev->regoff;
4450 s4 s2 = src->regoff;
4452 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4453 x86_64_movl_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4454 x86_64_alul_reg_membase(X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
4456 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4457 x86_64_alul_membase_reg(X86_64_CMP, REG_SP, s2 * 8, s1);
4459 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4460 x86_64_alul_reg_membase(X86_64_CMP, s2, REG_SP, s1 * 8);
4463 x86_64_alul_reg_reg(X86_64_CMP, s2, s1);
4465 x86_64_jcc(if_op, 0);
4466 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
4471 void x86_64_emit_if_lcmpcc(s4 if_op, stackptr src, instruction *iptr)
4473 s4 s1 = src->prev->regoff;
4474 s4 s2 = src->regoff;
4476 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4477 x86_64_mov_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4478 x86_64_alu_reg_membase(X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
4480 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4481 x86_64_alu_membase_reg(X86_64_CMP, REG_SP, s2 * 8, s1);
4483 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4484 x86_64_alu_reg_membase(X86_64_CMP, s2, REG_SP, s1 * 8);
4487 x86_64_alu_reg_reg(X86_64_CMP, s2, s1);
4489 x86_64_jcc(if_op, 0);
4490 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
4500 void x86_64_mov_reg_reg(s8 reg, s8 dreg) {
4501 x86_64_emit_rex(1,(reg),0,(dreg));
4502 *(mcodeptr++) = 0x89;
4503 x86_64_emit_reg((reg),(dreg));
4507 void x86_64_mov_imm_reg(s8 imm, s8 reg) {
4508 x86_64_emit_rex(1,0,0,(reg));
4509 *(mcodeptr++) = 0xb8 + ((reg) & 0x07);
4510 x86_64_emit_imm64((imm));
4514 void x86_64_movl_imm_reg(s8 imm, s8 reg) {
4515 x86_64_emit_rex(0,0,0,(reg));
4516 *(mcodeptr++) = 0xb8 + ((reg) & 0x07);
4517 x86_64_emit_imm32((imm));
4521 void x86_64_mov_membase_reg(s8 basereg, s8 disp, s8 reg) {
4522 x86_64_emit_rex(1,(reg),0,(basereg));
4523 *(mcodeptr++) = 0x8b;
4524 x86_64_emit_membase((basereg),(disp),(reg));
4528 void x86_64_movl_membase_reg(s8 basereg, s8 disp, s8 reg) {
4529 x86_64_emit_rex(0,(reg),0,(basereg));
4530 *(mcodeptr++) = 0x8b;
4531 x86_64_emit_membase((basereg),(disp),(reg));
4536 * this one is for INVOKEVIRTUAL/INVOKEINTERFACE to have a
4537 * constant membase immediate length of 32bit
4539 void x86_64_mov_membase32_reg(s8 basereg, s8 disp, s8 reg) {
4540 x86_64_emit_rex(1,(reg),0,(basereg));
4541 *(mcodeptr++) = 0x8b;
4542 x86_64_address_byte(2, (reg), (basereg));
4543 x86_64_emit_imm32((disp));
4547 void x86_64_mov_reg_membase(s8 reg, s8 basereg, s8 disp) {
4548 x86_64_emit_rex(1,(reg),0,(basereg));
4549 *(mcodeptr++) = 0x89;
4550 x86_64_emit_membase((basereg),(disp),(reg));
4554 void x86_64_movl_reg_membase(s8 reg, s8 basereg, s8 disp) {
4555 x86_64_emit_rex(0,(reg),0,(basereg));
4556 *(mcodeptr++) = 0x89;
4557 x86_64_emit_membase((basereg),(disp),(reg));
4561 void x86_64_mov_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4562 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4563 *(mcodeptr++) = 0x8b;
4564 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4568 void x86_64_movl_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4569 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
4570 *(mcodeptr++) = 0x8b;
4571 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4575 void x86_64_mov_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
4576 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4577 *(mcodeptr++) = 0x89;
4578 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4582 void x86_64_movl_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
4583 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
4584 *(mcodeptr++) = 0x89;
4585 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4589 void x86_64_movw_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
4590 *(mcodeptr++) = 0x66;
4591 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
4592 *(mcodeptr++) = 0x89;
4593 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4597 void x86_64_movb_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
4598 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
4599 *(mcodeptr++) = 0x88;
4600 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4604 void x86_64_mov_imm_membase(s8 imm, s8 basereg, s8 disp) {
4605 x86_64_emit_rex(1,0,0,(basereg));
4606 *(mcodeptr++) = 0xc7;
4607 x86_64_emit_membase((basereg),(disp),0);
4608 x86_64_emit_imm32((imm));
4612 void x86_64_movl_imm_membase(s8 imm, s8 basereg, s8 disp) {
4613 x86_64_emit_rex(0,0,0,(basereg));
4614 *(mcodeptr++) = 0xc7;
4615 x86_64_emit_membase((basereg),(disp),0);
4616 x86_64_emit_imm32((imm));
4620 void x86_64_movsbq_reg_reg(s8 reg, s8 dreg) {
4621 x86_64_emit_rex(1,(dreg),0,(reg));
4622 *(mcodeptr++) = 0x0f;
4623 *(mcodeptr++) = 0xbe;
4624 /* XXX: why do reg and dreg have to be exchanged */
4625 x86_64_emit_reg((dreg),(reg));
4629 void x86_64_movsbq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4630 x86_64_emit_rex(1,(dreg),0,(basereg));
4631 *(mcodeptr++) = 0x0f;
4632 *(mcodeptr++) = 0xbe;
4633 x86_64_emit_membase((basereg),(disp),(dreg));
4637 void x86_64_movswq_reg_reg(s8 reg, s8 dreg) {
4638 x86_64_emit_rex(1,(dreg),0,(reg));
4639 *(mcodeptr++) = 0x0f;
4640 *(mcodeptr++) = 0xbf;
4641 /* XXX: why do reg and dreg have to be exchanged */
4642 x86_64_emit_reg((dreg),(reg));
4646 void x86_64_movswq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4647 x86_64_emit_rex(1,(dreg),0,(basereg));
4648 *(mcodeptr++) = 0x0f;
4649 *(mcodeptr++) = 0xbf;
4650 x86_64_emit_membase((basereg),(disp),(dreg));
4654 void x86_64_movslq_reg_reg(s8 reg, s8 dreg) {
4655 x86_64_emit_rex(1,(dreg),0,(reg));
4656 *(mcodeptr++) = 0x63;
4657 /* XXX: why do reg and dreg have to be exchanged */
4658 x86_64_emit_reg((dreg),(reg));
4662 void x86_64_movslq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4663 x86_64_emit_rex(1,(dreg),0,(basereg));
4664 *(mcodeptr++) = 0x63;
4665 x86_64_emit_membase((basereg),(disp),(dreg));
4669 void x86_64_movzwq_reg_reg(s8 reg, s8 dreg) {
4670 x86_64_emit_rex(1,(dreg),0,(reg));
4671 *(mcodeptr++) = 0x0f;
4672 *(mcodeptr++) = 0xb7;
4673 /* XXX: why do reg and dreg have to be exchanged */
4674 x86_64_emit_reg((dreg),(reg));
4678 void x86_64_movzwq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4679 x86_64_emit_rex(1,(dreg),0,(basereg));
4680 *(mcodeptr++) = 0x0f;
4681 *(mcodeptr++) = 0xb7;
4682 x86_64_emit_membase((basereg),(disp),(dreg));
4686 void x86_64_movswq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4687 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4688 *(mcodeptr++) = 0x0f;
4689 *(mcodeptr++) = 0xbf;
4690 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4694 void x86_64_movsbq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4695 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4696 *(mcodeptr++) = 0x0f;
4697 *(mcodeptr++) = 0xbe;
4698 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4702 void x86_64_movzwq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4703 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4704 *(mcodeptr++) = 0x0f;
4705 *(mcodeptr++) = 0xb7;
4706 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4714 void x86_64_alu_reg_reg(s8 opc, s8 reg, s8 dreg) {
4715 x86_64_emit_rex(1,(reg),0,(dreg));
4716 *(mcodeptr++) = (((opc)) << 3) + 1;
4717 x86_64_emit_reg((reg),(dreg));
4721 void x86_64_alul_reg_reg(s8 opc, s8 reg, s8 dreg) {
4722 x86_64_emit_rex(0,(reg),0,(dreg));
4723 *(mcodeptr++) = (((opc)) << 3) + 1;
4724 x86_64_emit_reg((reg),(dreg));
4728 void x86_64_alu_reg_membase(s8 opc, s8 reg, s8 basereg, s8 disp) {
4729 x86_64_emit_rex(1,(reg),0,(basereg));
4730 *(mcodeptr++) = (((opc)) << 3) + 1;
4731 x86_64_emit_membase((basereg),(disp),(reg));
4735 void x86_64_alul_reg_membase(s8 opc, s8 reg, s8 basereg, s8 disp) {
4736 x86_64_emit_rex(0,(reg),0,(basereg));
4737 *(mcodeptr++) = (((opc)) << 3) + 1;
4738 x86_64_emit_membase((basereg),(disp),(reg));
4742 void x86_64_alu_membase_reg(s8 opc, s8 basereg, s8 disp, s8 reg) {
4743 x86_64_emit_rex(1,(reg),0,(basereg));
4744 *(mcodeptr++) = (((opc)) << 3) + 3;
4745 x86_64_emit_membase((basereg),(disp),(reg));
4749 void x86_64_alul_membase_reg(s8 opc, s8 basereg, s8 disp, s8 reg) {
4750 x86_64_emit_rex(0,(reg),0,(basereg));
4751 *(mcodeptr++) = (((opc)) << 3) + 3;
4752 x86_64_emit_membase((basereg),(disp),(reg));
4756 void x86_64_alu_imm_reg(s8 opc, s8 imm, s8 dreg) {
4757 if (x86_64_is_imm8(imm)) {
4758 x86_64_emit_rex(1,0,0,(dreg));
4759 *(mcodeptr++) = 0x83;
4760 x86_64_emit_reg((opc),(dreg));
4761 x86_64_emit_imm8((imm));
4763 x86_64_emit_rex(1,0,0,(dreg));
4764 *(mcodeptr++) = 0x81;
4765 x86_64_emit_reg((opc),(dreg));
4766 x86_64_emit_imm32((imm));
4771 void x86_64_alul_imm_reg(s8 opc, s8 imm, s8 dreg) {
4772 if (x86_64_is_imm8(imm)) {
4773 x86_64_emit_rex(0,0,0,(dreg));
4774 *(mcodeptr++) = 0x83;
4775 x86_64_emit_reg((opc),(dreg));
4776 x86_64_emit_imm8((imm));
4778 x86_64_emit_rex(0,0,0,(dreg));
4779 *(mcodeptr++) = 0x81;
4780 x86_64_emit_reg((opc),(dreg));
4781 x86_64_emit_imm32((imm));
4786 void x86_64_alu_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
4787 if (x86_64_is_imm8(imm)) {
4788 x86_64_emit_rex(1,(basereg),0,0);
4789 *(mcodeptr++) = 0x83;
4790 x86_64_emit_membase((basereg),(disp),(opc));
4791 x86_64_emit_imm8((imm));
4793 x86_64_emit_rex(1,(basereg),0,0);
4794 *(mcodeptr++) = 0x81;
4795 x86_64_emit_membase((basereg),(disp),(opc));
4796 x86_64_emit_imm32((imm));
4801 void x86_64_alul_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
4802 if (x86_64_is_imm8(imm)) {
4803 x86_64_emit_rex(0,(basereg),0,0);
4804 *(mcodeptr++) = 0x83;
4805 x86_64_emit_membase((basereg),(disp),(opc));
4806 x86_64_emit_imm8((imm));
4808 x86_64_emit_rex(0,(basereg),0,0);
4809 *(mcodeptr++) = 0x81;
4810 x86_64_emit_membase((basereg),(disp),(opc));
4811 x86_64_emit_imm32((imm));
4816 void x86_64_test_reg_reg(s8 reg, s8 dreg) {
4817 x86_64_emit_rex(1,(reg),0,(dreg));
4818 *(mcodeptr++) = 0x85;
4819 x86_64_emit_reg((reg),(dreg));
4823 void x86_64_testl_reg_reg(s8 reg, s8 dreg) {
4824 x86_64_emit_rex(0,(reg),0,(dreg));
4825 *(mcodeptr++) = 0x85;
4826 x86_64_emit_reg((reg),(dreg));
4830 void x86_64_test_imm_reg(s8 imm, s8 reg) {
4831 *(mcodeptr++) = 0xf7;
4832 x86_64_emit_reg(0,(reg));
4833 x86_64_emit_imm32((imm));
4837 void x86_64_testw_imm_reg(s8 imm, s8 reg) {
4838 *(mcodeptr++) = 0x66;
4839 *(mcodeptr++) = 0xf7;
4840 x86_64_emit_reg(0,(reg));
4841 x86_64_emit_imm16((imm));
4845 void x86_64_testb_imm_reg(s8 imm, s8 reg) {
4846 *(mcodeptr++) = 0xf6;
4847 x86_64_emit_reg(0,(reg));
4848 x86_64_emit_imm8((imm));
4852 void x86_64_lea_membase_reg(s8 basereg, s8 disp, s8 reg) {
4853 x86_64_emit_rex(1,(reg),0,(basereg));
4854 *(mcodeptr++) = 0x8d;
4855 x86_64_emit_membase((basereg),(disp),(reg));
4859 void x86_64_leal_membase_reg(s8 basereg, s8 disp, s8 reg) {
4860 x86_64_emit_rex(0,(reg),0,(basereg));
4861 *(mcodeptr++) = 0x8d;
4862 x86_64_emit_membase((basereg),(disp),(reg));
4868 * inc, dec operations
4870 void x86_64_inc_reg(s8 reg) {
4871 x86_64_emit_rex(1,0,0,(reg));
4872 *(mcodeptr++) = 0xff;
4873 x86_64_emit_reg(0,(reg));
4877 void x86_64_incl_reg(s8 reg) {
4878 x86_64_emit_rex(0,0,0,(reg));
4879 *(mcodeptr++) = 0xff;
4880 x86_64_emit_reg(0,(reg));
4884 void x86_64_inc_membase(s8 basereg, s8 disp) {
4885 x86_64_emit_rex(1,(basereg),0,0);
4886 *(mcodeptr++) = 0xff;
4887 x86_64_emit_membase((basereg),(disp),0);
4891 void x86_64_incl_membase(s8 basereg, s8 disp) {
4892 x86_64_emit_rex(0,(basereg),0,0);
4893 *(mcodeptr++) = 0xff;
4894 x86_64_emit_membase((basereg),(disp),0);
4898 void x86_64_dec_reg(s8 reg) {
4899 x86_64_emit_rex(1,0,0,(reg));
4900 *(mcodeptr++) = 0xff;
4901 x86_64_emit_reg(1,(reg));
4905 void x86_64_decl_reg(s8 reg) {
4906 x86_64_emit_rex(0,0,0,(reg));
4907 *(mcodeptr++) = 0xff;
4908 x86_64_emit_reg(1,(reg));
4912 void x86_64_dec_membase(s8 basereg, s8 disp) {
4913 x86_64_emit_rex(1,(basereg),0,0);
4914 *(mcodeptr++) = 0xff;
4915 x86_64_emit_membase((basereg),(disp),1);
4919 void x86_64_decl_membase(s8 basereg, s8 disp) {
4920 x86_64_emit_rex(0,(basereg),0,0);
4921 *(mcodeptr++) = 0xff;
4922 x86_64_emit_membase((basereg),(disp),1);
4928 void x86_64_cltd() {
4929 *(mcodeptr++) = 0x99;
4933 void x86_64_cqto() {
4934 x86_64_emit_rex(1,0,0,0);
4935 *(mcodeptr++) = 0x99;
4940 void x86_64_imul_reg_reg(s8 reg, s8 dreg) {
4941 x86_64_emit_rex(1,(dreg),0,(reg));
4942 *(mcodeptr++) = 0x0f;
4943 *(mcodeptr++) = 0xaf;
4944 x86_64_emit_reg((dreg),(reg));
4948 void x86_64_imull_reg_reg(s8 reg, s8 dreg) {
4949 x86_64_emit_rex(0,(dreg),0,(reg));
4950 *(mcodeptr++) = 0x0f;
4951 *(mcodeptr++) = 0xaf;
4952 x86_64_emit_reg((dreg),(reg));
4956 void x86_64_imul_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4957 x86_64_emit_rex(1,(dreg),0,(basereg));
4958 *(mcodeptr++) = 0x0f;
4959 *(mcodeptr++) = 0xaf;
4960 x86_64_emit_membase((basereg),(disp),(dreg));
4964 void x86_64_imull_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4965 x86_64_emit_rex(0,(dreg),0,(basereg));
4966 *(mcodeptr++) = 0x0f;
4967 *(mcodeptr++) = 0xaf;
4968 x86_64_emit_membase((basereg),(disp),(dreg));
4972 void x86_64_imul_imm_reg(s8 imm, s8 dreg) {
4973 if (x86_64_is_imm8((imm))) {
4974 x86_64_emit_rex(1,0,0,(dreg));
4975 *(mcodeptr++) = 0x6b;
4976 x86_64_emit_reg(0,(dreg));
4977 x86_64_emit_imm8((imm));
4979 x86_64_emit_rex(1,0,0,(dreg));
4980 *(mcodeptr++) = 0x69;
4981 x86_64_emit_reg(0,(dreg));
4982 x86_64_emit_imm32((imm));
4987 void x86_64_imul_imm_reg_reg(s8 imm, s8 reg, s8 dreg) {
4988 if (x86_64_is_imm8((imm))) {
4989 x86_64_emit_rex(1,(dreg),0,(reg));
4990 *(mcodeptr++) = 0x6b;
4991 x86_64_emit_reg((dreg),(reg));
4992 x86_64_emit_imm8((imm));
4994 x86_64_emit_rex(1,(dreg),0,(reg));
4995 *(mcodeptr++) = 0x69;
4996 x86_64_emit_reg((dreg),(reg));
4997 x86_64_emit_imm32((imm));
5002 void x86_64_imull_imm_reg_reg(s8 imm, s8 reg, s8 dreg) {
5003 if (x86_64_is_imm8((imm))) {
5004 x86_64_emit_rex(0,(dreg),0,(reg));
5005 *(mcodeptr++) = 0x6b;
5006 x86_64_emit_reg((dreg),(reg));
5007 x86_64_emit_imm8((imm));
5009 x86_64_emit_rex(0,(dreg),0,(reg));
5010 *(mcodeptr++) = 0x69;
5011 x86_64_emit_reg((dreg),(reg));
5012 x86_64_emit_imm32((imm));
5017 void x86_64_imul_imm_membase_reg(s8 imm, s8 basereg, s8 disp, s8 dreg) {
5018 if (x86_64_is_imm8((imm))) {
5019 x86_64_emit_rex(1,(dreg),0,(basereg));
5020 *(mcodeptr++) = 0x6b;
5021 x86_64_emit_membase((basereg),(disp),(dreg));
5022 x86_64_emit_imm8((imm));
5024 x86_64_emit_rex(1,(dreg),0,(basereg));
5025 *(mcodeptr++) = 0x69;
5026 x86_64_emit_membase((basereg),(disp),(dreg));
5027 x86_64_emit_imm32((imm));
5032 void x86_64_imull_imm_membase_reg(s8 imm, s8 basereg, s8 disp, s8 dreg) {
5033 if (x86_64_is_imm8((imm))) {
5034 x86_64_emit_rex(0,(dreg),0,(basereg));
5035 *(mcodeptr++) = 0x6b;
5036 x86_64_emit_membase((basereg),(disp),(dreg));
5037 x86_64_emit_imm8((imm));
5039 x86_64_emit_rex(0,(dreg),0,(basereg));
5040 *(mcodeptr++) = 0x69;
5041 x86_64_emit_membase((basereg),(disp),(dreg));
5042 x86_64_emit_imm32((imm));
5047 void x86_64_idiv_reg(s8 reg) {
5048 x86_64_emit_rex(1,0,0,(reg));
5049 *(mcodeptr++) = 0xf7;
5050 x86_64_emit_reg(7,(reg));
5054 void x86_64_idivl_reg(s8 reg) {
5055 x86_64_emit_rex(0,0,0,(reg));
5056 *(mcodeptr++) = 0xf7;
5057 x86_64_emit_reg(7,(reg));
5063 *(mcodeptr++) = 0xc3;
5071 void x86_64_shift_reg(s8 opc, s8 reg) {
5072 x86_64_emit_rex(1,0,0,(reg));
5073 *(mcodeptr++) = 0xd3;
5074 x86_64_emit_reg((opc),(reg));
5078 void x86_64_shiftl_reg(s8 opc, s8 reg) {
5079 x86_64_emit_rex(0,0,0,(reg));
5080 *(mcodeptr++) = 0xd3;
5081 x86_64_emit_reg((opc),(reg));
5085 void x86_64_shift_membase(s8 opc, s8 basereg, s8 disp) {
5086 x86_64_emit_rex(1,0,0,(basereg));
5087 *(mcodeptr++) = 0xd3;
5088 x86_64_emit_membase((basereg),(disp),(opc));
5092 void x86_64_shiftl_membase(s8 opc, s8 basereg, s8 disp) {
5093 x86_64_emit_rex(0,0,0,(basereg));
5094 *(mcodeptr++) = 0xd3;
5095 x86_64_emit_membase((basereg),(disp),(opc));
5099 void x86_64_shift_imm_reg(s8 opc, s8 imm, s8 dreg) {
5101 x86_64_emit_rex(1,0,0,(dreg));
5102 *(mcodeptr++) = 0xd1;
5103 x86_64_emit_reg((opc),(dreg));
5105 x86_64_emit_rex(1,0,0,(dreg));
5106 *(mcodeptr++) = 0xc1;
5107 x86_64_emit_reg((opc),(dreg));
5108 x86_64_emit_imm8((imm));
5113 void x86_64_shiftl_imm_reg(s8 opc, s8 imm, s8 dreg) {
5115 x86_64_emit_rex(0,0,0,(dreg));
5116 *(mcodeptr++) = 0xd1;
5117 x86_64_emit_reg((opc),(dreg));
5119 x86_64_emit_rex(0,0,0,(dreg));
5120 *(mcodeptr++) = 0xc1;
5121 x86_64_emit_reg((opc),(dreg));
5122 x86_64_emit_imm8((imm));
5127 void x86_64_shift_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
5129 x86_64_emit_rex(1,0,0,(basereg));
5130 *(mcodeptr++) = 0xd1;
5131 x86_64_emit_membase((basereg),(disp),(opc));
5133 x86_64_emit_rex(1,0,0,(basereg));
5134 *(mcodeptr++) = 0xc1;
5135 x86_64_emit_membase((basereg),(disp),(opc));
5136 x86_64_emit_imm8((imm));
5141 void x86_64_shiftl_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
5143 x86_64_emit_rex(0,0,0,(basereg));
5144 *(mcodeptr++) = 0xd1;
5145 x86_64_emit_membase((basereg),(disp),(opc));
5147 x86_64_emit_rex(0,0,0,(basereg));
5148 *(mcodeptr++) = 0xc1;
5149 x86_64_emit_membase((basereg),(disp),(opc));
5150 x86_64_emit_imm8((imm));
5159 void x86_64_jmp_imm(s8 imm) {
5160 *(mcodeptr++) = 0xe9;
5161 x86_64_emit_imm32((imm));
5165 void x86_64_jmp_reg(s8 reg) {
5166 x86_64_emit_rex(0,0,0,(reg));
5167 *(mcodeptr++) = 0xff;
5168 x86_64_emit_reg(4,(reg));
5172 void x86_64_jcc(s8 opc, s8 imm) {
5173 *(mcodeptr++) = 0x0f;
5174 *(mcodeptr++) = (0x80 + (opc));
5175 x86_64_emit_imm32((imm));
5181 * conditional set and move operations
5184 /* we need the rex byte to get all low bytes */
5185 void x86_64_setcc_reg(s8 opc, s8 reg) {
5186 *(mcodeptr++) = (0x40 | (((reg) >> 3) & 0x01));
5187 *(mcodeptr++) = 0x0f;
5188 *(mcodeptr++) = (0x90 + (opc));
5189 x86_64_emit_reg(0,(reg));
5193 /* we need the rex byte to get all low bytes */
5194 void x86_64_setcc_membase(s8 opc, s8 basereg, s8 disp) {
5195 *(mcodeptr++) = (0x40 | (((basereg) >> 3) & 0x01));
5196 *(mcodeptr++) = 0x0f;
5197 *(mcodeptr++) = (0x90 + (opc));
5198 x86_64_emit_membase((basereg),(disp),0);
5202 void x86_64_cmovcc_reg_reg(s8 opc, s8 reg, s8 dreg) {
5203 x86_64_emit_rex(1,(dreg),0,(reg));
5204 *(mcodeptr++) = 0x0f;
5205 *(mcodeptr++) = (0x40 + (opc));
5206 x86_64_emit_reg((dreg),(reg));
5210 void x86_64_cmovccl_reg_reg(s8 opc, s8 reg, s8 dreg) {
5211 x86_64_emit_rex(0,(dreg),0,(reg));
5212 *(mcodeptr++) = 0x0f;
5213 *(mcodeptr++) = (0x40 + (opc));
5214 x86_64_emit_reg((dreg),(reg));
5219 void x86_64_neg_reg(s8 reg) {
5220 x86_64_emit_rex(1,0,0,(reg));
5221 *(mcodeptr++) = 0xf7;
5222 x86_64_emit_reg(3,(reg));
5226 void x86_64_negl_reg(s8 reg) {
5227 x86_64_emit_rex(0,0,0,(reg));
5228 *(mcodeptr++) = 0xf7;
5229 x86_64_emit_reg(3,(reg));
5233 void x86_64_neg_membase(s8 basereg, s8 disp) {
5234 x86_64_emit_rex(1,0,0,(basereg));
5235 *(mcodeptr++) = 0xf7;
5236 x86_64_emit_membase((basereg),(disp),3);
5240 void x86_64_negl_membase(s8 basereg, s8 disp) {
5241 x86_64_emit_rex(0,0,0,(basereg));
5242 *(mcodeptr++) = 0xf7;
5243 x86_64_emit_membase((basereg),(disp),3);
5248 void x86_64_push_imm(s8 imm) {
5249 *(mcodeptr++) = 0x68;
5250 x86_64_emit_imm32((imm));
5254 void x86_64_pop_reg(s8 reg) {
5255 x86_64_emit_rex(0,0,0,(reg));
5256 *(mcodeptr++) = 0x58 + (0x07 & (reg));
5260 void x86_64_xchg_reg_reg(s8 reg, s8 dreg) {
5261 x86_64_emit_rex(1,(reg),0,(dreg));
5262 *(mcodeptr++) = 0x87;
5263 x86_64_emit_reg((reg),(dreg));
5268 *(mcodeptr++) = 0x90;
5276 void x86_64_call_reg(s8 reg) {
5277 x86_64_emit_rex(1,0,0,(reg));
5278 *(mcodeptr++) = 0xff;
5279 x86_64_emit_reg(2,(reg));
5283 void x86_64_call_imm(s8 imm) {
5284 *(mcodeptr++) = 0xe8;
5285 x86_64_emit_imm32((imm));
5291 * floating point instructions (SSE2)
5293 void x86_64_addsd_reg_reg(s8 reg, s8 dreg) {
5294 *(mcodeptr++) = 0xf2;
5295 x86_64_emit_rex(0,(dreg),0,(reg));
5296 *(mcodeptr++) = 0x0f;
5297 *(mcodeptr++) = 0x58;
5298 x86_64_emit_reg((dreg),(reg));
5302 void x86_64_addss_reg_reg(s8 reg, s8 dreg) {
5303 *(mcodeptr++) = 0xf3;
5304 x86_64_emit_rex(0,(dreg),0,(reg));
5305 *(mcodeptr++) = 0x0f;
5306 *(mcodeptr++) = 0x58;
5307 x86_64_emit_reg((dreg),(reg));
5311 void x86_64_cvtsi2ssq_reg_reg(s8 reg, s8 dreg) {
5312 *(mcodeptr++) = 0xf3;
5313 x86_64_emit_rex(1,(dreg),0,(reg));
5314 *(mcodeptr++) = 0x0f;
5315 *(mcodeptr++) = 0x2a;
5316 x86_64_emit_reg((dreg),(reg));
5320 void x86_64_cvtsi2ss_reg_reg(s8 reg, s8 dreg) {
5321 *(mcodeptr++) = 0xf3;
5322 x86_64_emit_rex(0,(dreg),0,(reg));
5323 *(mcodeptr++) = 0x0f;
5324 *(mcodeptr++) = 0x2a;
5325 x86_64_emit_reg((dreg),(reg));
5329 void x86_64_cvtsi2sdq_reg_reg(s8 reg, s8 dreg) {
5330 *(mcodeptr++) = 0xf2;
5331 x86_64_emit_rex(1,(dreg),0,(reg));
5332 *(mcodeptr++) = 0x0f;
5333 *(mcodeptr++) = 0x2a;
5334 x86_64_emit_reg((dreg),(reg));
5338 void x86_64_cvtsi2sd_reg_reg(s8 reg, s8 dreg) {
5339 *(mcodeptr++) = 0xf2;
5340 x86_64_emit_rex(0,(dreg),0,(reg));
5341 *(mcodeptr++) = 0x0f;
5342 *(mcodeptr++) = 0x2a;
5343 x86_64_emit_reg((dreg),(reg));
5347 void x86_64_cvtss2sd_reg_reg(s8 reg, s8 dreg) {
5348 *(mcodeptr++) = 0xf3;
5349 x86_64_emit_rex(0,(dreg),0,(reg));
5350 *(mcodeptr++) = 0x0f;
5351 *(mcodeptr++) = 0x5a;
5352 x86_64_emit_reg((dreg),(reg));
5356 void x86_64_cvtsd2ss_reg_reg(s8 reg, s8 dreg) {
5357 *(mcodeptr++) = 0xf2;
5358 x86_64_emit_rex(0,(dreg),0,(reg));
5359 *(mcodeptr++) = 0x0f;
5360 *(mcodeptr++) = 0x5a;
5361 x86_64_emit_reg((dreg),(reg));
5365 void x86_64_cvttss2siq_reg_reg(s8 reg, s8 dreg) {
5366 *(mcodeptr++) = 0xf3;
5367 x86_64_emit_rex(1,(dreg),0,(reg));
5368 *(mcodeptr++) = 0x0f;
5369 *(mcodeptr++) = 0x2c;
5370 x86_64_emit_reg((dreg),(reg));
5374 void x86_64_cvttss2si_reg_reg(s8 reg, s8 dreg) {
5375 *(mcodeptr++) = 0xf3;
5376 x86_64_emit_rex(0,(dreg),0,(reg));
5377 *(mcodeptr++) = 0x0f;
5378 *(mcodeptr++) = 0x2c;
5379 x86_64_emit_reg((dreg),(reg));
5383 void x86_64_cvttsd2siq_reg_reg(s8 reg, s8 dreg) {
5384 *(mcodeptr++) = 0xf2;
5385 x86_64_emit_rex(1,(dreg),0,(reg));
5386 *(mcodeptr++) = 0x0f;
5387 *(mcodeptr++) = 0x2c;
5388 x86_64_emit_reg((dreg),(reg));
5392 void x86_64_cvttsd2si_reg_reg(s8 reg, s8 dreg) {
5393 *(mcodeptr++) = 0xf2;
5394 x86_64_emit_rex(0,(dreg),0,(reg));
5395 *(mcodeptr++) = 0x0f;
5396 *(mcodeptr++) = 0x2c;
5397 x86_64_emit_reg((dreg),(reg));
5401 void x86_64_divss_reg_reg(s8 reg, s8 dreg) {
5402 *(mcodeptr++) = 0xf3;
5403 x86_64_emit_rex(0,(dreg),0,(reg));
5404 *(mcodeptr++) = 0x0f;
5405 *(mcodeptr++) = 0x5e;
5406 x86_64_emit_reg((dreg),(reg));
5410 void x86_64_divsd_reg_reg(s8 reg, s8 dreg) {
5411 *(mcodeptr++) = 0xf2;
5412 x86_64_emit_rex(0,(dreg),0,(reg));
5413 *(mcodeptr++) = 0x0f;
5414 *(mcodeptr++) = 0x5e;
5415 x86_64_emit_reg((dreg),(reg));
5419 void x86_64_movd_reg_freg(s8 reg, s8 freg) {
5420 *(mcodeptr++) = 0x66;
5421 x86_64_emit_rex(1,(freg),0,(reg));
5422 *(mcodeptr++) = 0x0f;
5423 *(mcodeptr++) = 0x6e;
5424 x86_64_emit_reg((freg),(reg));
5428 void x86_64_movd_freg_reg(s8 freg, s8 reg) {
5429 *(mcodeptr++) = 0x66;
5430 x86_64_emit_rex(1,(freg),0,(reg));
5431 *(mcodeptr++) = 0x0f;
5432 *(mcodeptr++) = 0x7e;
5433 x86_64_emit_reg((freg),(reg));
5437 void x86_64_movd_reg_membase(s8 reg, s8 basereg, s8 disp) {
5438 *(mcodeptr++) = 0x66;
5439 x86_64_emit_rex(0,(reg),0,(basereg));
5440 *(mcodeptr++) = 0x0f;
5441 *(mcodeptr++) = 0x7e;
5442 x86_64_emit_membase((basereg),(disp),(reg));
5446 void x86_64_movd_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
5447 *(mcodeptr++) = 0x66;
5448 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
5449 *(mcodeptr++) = 0x0f;
5450 *(mcodeptr++) = 0x7e;
5451 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
5455 void x86_64_movd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5456 *(mcodeptr++) = 0x66;
5457 x86_64_emit_rex(1,(dreg),0,(basereg));
5458 *(mcodeptr++) = 0x0f;
5459 *(mcodeptr++) = 0x6e;
5460 x86_64_emit_membase((basereg),(disp),(dreg));
5464 void x86_64_movdl_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5465 *(mcodeptr++) = 0x66;
5466 x86_64_emit_rex(0,(dreg),0,(basereg));
5467 *(mcodeptr++) = 0x0f;
5468 *(mcodeptr++) = 0x6e;
5469 x86_64_emit_membase((basereg),(disp),(dreg));
5473 void x86_64_movd_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
5474 *(mcodeptr++) = 0x66;
5475 x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
5476 *(mcodeptr++) = 0x0f;
5477 *(mcodeptr++) = 0x6e;
5478 x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
5482 void x86_64_movq_reg_reg(s8 reg, s8 dreg) {
5483 *(mcodeptr++) = 0xf3;
5484 x86_64_emit_rex(0,(dreg),0,(reg));
5485 *(mcodeptr++) = 0x0f;
5486 *(mcodeptr++) = 0x7e;
5487 x86_64_emit_reg((dreg),(reg));
5491 void x86_64_movq_reg_membase(s8 reg, s8 basereg, s8 disp) {
5492 *(mcodeptr++) = 0x66;
5493 x86_64_emit_rex(0,(reg),0,(basereg));
5494 *(mcodeptr++) = 0x0f;
5495 *(mcodeptr++) = 0xd6;
5496 x86_64_emit_membase((basereg),(disp),(reg));
5500 void x86_64_movq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5501 *(mcodeptr++) = 0xf3;
5502 x86_64_emit_rex(0,(dreg),0,(basereg));
5503 *(mcodeptr++) = 0x0f;
5504 *(mcodeptr++) = 0x7e;
5505 x86_64_emit_membase((basereg),(disp),(dreg));
5509 void x86_64_movss_reg_reg(s8 reg, s8 dreg) {
5510 *(mcodeptr++) = 0xf3;
5511 x86_64_emit_rex(0,(reg),0,(dreg));
5512 *(mcodeptr++) = 0x0f;
5513 *(mcodeptr++) = 0x10;
5514 x86_64_emit_reg((reg),(dreg));
5518 void x86_64_movsd_reg_reg(s8 reg, s8 dreg) {
5519 *(mcodeptr++) = 0xf2;
5520 x86_64_emit_rex(0,(reg),0,(dreg));
5521 *(mcodeptr++) = 0x0f;
5522 *(mcodeptr++) = 0x10;
5523 x86_64_emit_reg((reg),(dreg));
5527 void x86_64_movss_reg_membase(s8 reg, s8 basereg, s8 disp) {
5528 *(mcodeptr++) = 0xf3;
5529 x86_64_emit_rex(0,(reg),0,(basereg));
5530 *(mcodeptr++) = 0x0f;
5531 *(mcodeptr++) = 0x11;
5532 x86_64_emit_membase((basereg),(disp),(reg));
5536 void x86_64_movsd_reg_membase(s8 reg, s8 basereg, s8 disp) {
5537 *(mcodeptr++) = 0xf2;
5538 x86_64_emit_rex(0,(reg),0,(basereg));
5539 *(mcodeptr++) = 0x0f;
5540 *(mcodeptr++) = 0x11;
5541 x86_64_emit_membase((basereg),(disp),(reg));
5545 void x86_64_movss_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5546 *(mcodeptr++) = 0xf3;
5547 x86_64_emit_rex(0,(dreg),0,(basereg));
5548 *(mcodeptr++) = 0x0f;
5549 *(mcodeptr++) = 0x10;
5550 x86_64_emit_membase((basereg),(disp),(dreg));
5554 void x86_64_movlps_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5555 x86_64_emit_rex(0,(dreg),0,(basereg));
5556 *(mcodeptr++) = 0x0f;
5557 *(mcodeptr++) = 0x12;
5558 x86_64_emit_membase((basereg),(disp),(dreg));
5562 void x86_64_movsd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5563 *(mcodeptr++) = 0xf2;
5564 x86_64_emit_rex(0,(dreg),0,(basereg));
5565 *(mcodeptr++) = 0x0f;
5566 *(mcodeptr++) = 0x10;
5567 x86_64_emit_membase((basereg),(disp),(dreg));
5571 void x86_64_movlpd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5572 *(mcodeptr++) = 0x66;
5573 x86_64_emit_rex(0,(dreg),0,(basereg));
5574 *(mcodeptr++) = 0x0f;
5575 *(mcodeptr++) = 0x12;
5576 x86_64_emit_membase((basereg),(disp),(dreg));
5580 void x86_64_movss_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
5581 *(mcodeptr++) = 0xf3;
5582 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
5583 *(mcodeptr++) = 0x0f;
5584 *(mcodeptr++) = 0x11;
5585 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
5589 void x86_64_movsd_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
5590 *(mcodeptr++) = 0xf2;
5591 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
5592 *(mcodeptr++) = 0x0f;
5593 *(mcodeptr++) = 0x11;
5594 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
5598 void x86_64_movss_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
5599 *(mcodeptr++) = 0xf3;
5600 x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
5601 *(mcodeptr++) = 0x0f;
5602 *(mcodeptr++) = 0x10;
5603 x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
5607 void x86_64_movsd_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
5608 *(mcodeptr++) = 0xf2;
5609 x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
5610 *(mcodeptr++) = 0x0f;
5611 *(mcodeptr++) = 0x10;
5612 x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
5616 void x86_64_mulss_reg_reg(s8 reg, s8 dreg) {
5617 *(mcodeptr++) = 0xf3;
5618 x86_64_emit_rex(0,(dreg),0,(reg));
5619 *(mcodeptr++) = 0x0f;
5620 *(mcodeptr++) = 0x59;
5621 x86_64_emit_reg((dreg),(reg));
5625 void x86_64_mulsd_reg_reg(s8 reg, s8 dreg) {
5626 *(mcodeptr++) = 0xf2;
5627 x86_64_emit_rex(0,(dreg),0,(reg));
5628 *(mcodeptr++) = 0x0f;
5629 *(mcodeptr++) = 0x59;
5630 x86_64_emit_reg((dreg),(reg));
5634 void x86_64_subss_reg_reg(s8 reg, s8 dreg) {
5635 *(mcodeptr++) = 0xf3;
5636 x86_64_emit_rex(0,(dreg),0,(reg));
5637 *(mcodeptr++) = 0x0f;
5638 *(mcodeptr++) = 0x5c;
5639 x86_64_emit_reg((dreg),(reg));
5643 void x86_64_subsd_reg_reg(s8 reg, s8 dreg) {
5644 *(mcodeptr++) = 0xf2;
5645 x86_64_emit_rex(0,(dreg),0,(reg));
5646 *(mcodeptr++) = 0x0f;
5647 *(mcodeptr++) = 0x5c;
5648 x86_64_emit_reg((dreg),(reg));
5652 void x86_64_ucomiss_reg_reg(s8 reg, s8 dreg) {
5653 x86_64_emit_rex(0,(dreg),0,(reg));
5654 *(mcodeptr++) = 0x0f;
5655 *(mcodeptr++) = 0x2e;
5656 x86_64_emit_reg((dreg),(reg));
5660 void x86_64_ucomisd_reg_reg(s8 reg, s8 dreg) {
5661 *(mcodeptr++) = 0x66;
5662 x86_64_emit_rex(0,(dreg),0,(reg));
5663 *(mcodeptr++) = 0x0f;
5664 *(mcodeptr++) = 0x2e;
5665 x86_64_emit_reg((dreg),(reg));
5669 void x86_64_xorps_reg_reg(s8 reg, s8 dreg) {
5670 x86_64_emit_rex(0,(dreg),0,(reg));
5671 *(mcodeptr++) = 0x0f;
5672 *(mcodeptr++) = 0x57;
5673 x86_64_emit_reg((dreg),(reg));
5677 void x86_64_xorps_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5678 x86_64_emit_rex(0,(dreg),0,(basereg));
5679 *(mcodeptr++) = 0x0f;
5680 *(mcodeptr++) = 0x57;
5681 x86_64_emit_membase((basereg),(disp),(dreg));
5685 void x86_64_xorpd_reg_reg(s8 reg, s8 dreg) {
5686 *(mcodeptr++) = 0x66;
5687 x86_64_emit_rex(0,(dreg),0,(reg));
5688 *(mcodeptr++) = 0x0f;
5689 *(mcodeptr++) = 0x57;
5690 x86_64_emit_reg((dreg),(reg));
5694 void x86_64_xorpd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5695 *(mcodeptr++) = 0x66;
5696 x86_64_emit_rex(0,(dreg),0,(basereg));
5697 *(mcodeptr++) = 0x0f;
5698 *(mcodeptr++) = 0x57;
5699 x86_64_emit_membase((basereg),(disp),(dreg));
5705 * These are local overrides for various environment variables in Emacs.
5706 * Please do not remove this and leave it at the end of the file, where
5707 * Emacs will automagically detect them.
5708 * ---------------------------------------------------------------------
5711 * indent-tabs-mode: t