1 /* jit/x86_64/codegen.c - machine code generator for x86_64
3 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 Institut f. Computersprachen, TU Wien
5 R. Grafl, A. Krall, C. Kruegel, C. Oates, R. Obermaisser, M. Probst,
6 S. Ring, E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich,
9 This file is part of CACAO.
11 This program is free software; you can redistribute it and/or
12 modify it under the terms of the GNU General Public License as
13 published by the Free Software Foundation; either version 2, or (at
14 your option) any later version.
16 This program is distributed in the hope that it will be useful, but
17 WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 General Public License for more details.
21 You should have received a copy of the GNU General Public License
22 along with this program; if not, write to the Free Software
23 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
26 Contact: cacao@complang.tuwien.ac.at
28 Authors: Andreas Krall
31 $Id: codegen.c 1064 2004-05-16 15:36:36Z twisti $
36 #define _POSIX_C_SOURCE 199506L
38 #define _XOPEN_SOURCE_EXTENDED
54 #include "methodtable.h"
56 /* include independent code generation stuff */
57 #include "codegen.inc"
61 /* register descripton - array ************************************************/
63 /* #define REG_RES 0 reserved register for OS or code generator */
64 /* #define REG_RET 1 return value register */
65 /* #define REG_EXC 2 exception value register (only old jit) */
66 /* #define REG_SAV 3 (callee) saved register */
67 /* #define REG_TMP 4 scratch temporary register (caller saved) */
68 /* #define REG_ARG 5 argument register (caller saved) */
70 /* #define REG_END -1 last entry in tables */
73 REG_RET, REG_ARG, REG_ARG, REG_TMP, REG_RES, REG_SAV, REG_ARG, REG_ARG,
74 REG_ARG, REG_ARG, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV,
79 int nregdescfloat[] = {
80 /* REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_TMP, REG_TMP, REG_TMP, REG_TMP, */
81 /* REG_RES, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV, REG_SAV, */
82 REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
83 REG_RES, REG_RES, REG_RES, REG_TMP, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
88 /* additional functions and macros to generate code ***************************/
90 #define BlockPtrOfPC(pc) ((basicblock *) iptr->target)
94 #define COUNT_SPILLS count_spills++
100 #define CALCOFFSETBYTES(var, reg, val) \
101 if ((s4) (val) < -128 || (s4) (val) > 127) (var) += 4; \
102 else if ((s4) (val) != 0) (var) += 1; \
103 else if ((reg) == RBP || (reg) == RSP || (reg) == R12 || (reg) == R13) (var) += 1;
106 #define CALCIMMEDIATEBYTES(var, val) \
107 if ((s4) (val) < -128 || (s4) (val) > 127) (var) += 4; \
111 /* gen_nullptr_check(objreg) */
113 #define gen_nullptr_check(objreg) \
115 x86_64_test_reg_reg((objreg), (objreg)); \
116 x86_64_jcc(X86_64_CC_E, 0); \
117 codegen_addxnullrefs(mcodeptr); \
121 /* MCODECHECK(icnt) */
123 #define MCODECHECK(icnt) \
124 if ((mcodeptr + (icnt)) > (u1*) mcodeend) mcodeptr = (u1*) codegen_increase((u1*) mcodeptr)
127 generates an integer-move from register a to b.
128 if a and b are the same int-register, no code will be generated.
131 #define M_INTMOVE(reg,dreg) \
132 if ((reg) != (dreg)) { \
133 x86_64_mov_reg_reg((reg),(dreg)); \
138 generates a floating-point-move from register a to b.
139 if a and b are the same float-register, no code will be generated
142 #define M_FLTMOVE(reg,dreg) \
143 if ((reg) != (dreg)) { \
144 x86_64_movq_reg_reg((reg),(dreg)); \
149 this function generates code to fetch data from a pseudo-register
150 into a real register.
151 If the pseudo-register has actually been assigned to a real
152 register, no code will be emitted, since following operations
153 can use this register directly.
155 v: pseudoregister to be fetched from
156 tempregnum: temporary register to be used if v is actually spilled to ram
158 return: the register number, where the operand can be found after
159 fetching (this wil be either tempregnum or the register
160 number allready given to v)
163 #define var_to_reg_int(regnr,v,tempnr) \
164 if ((v)->flags & INMEMORY) { \
166 if ((v)->type == TYPE_INT) { \
167 x86_64_movl_membase_reg(REG_SP, (v)->regoff * 8, tempnr); \
169 x86_64_mov_membase_reg(REG_SP, (v)->regoff * 8, tempnr); \
173 regnr = (v)->regoff; \
178 #define var_to_reg_flt(regnr,v,tempnr) \
179 if ((v)->flags & INMEMORY) { \
181 if ((v)->type == TYPE_FLT) { \
182 x86_64_movlps_membase_reg(REG_SP, (v)->regoff * 8, tempnr); \
184 x86_64_movlpd_membase_reg(REG_SP, (v)->regoff * 8, tempnr); \
186 /* x86_64_movq_membase_reg(REG_SP, (v)->regoff * 8, tempnr);*/ \
189 regnr = (v)->regoff; \
194 This function determines a register, to which the result of an operation
195 should go, when it is ultimatively intended to store the result in
197 If v is assigned to an actual register, this register will be returned.
198 Otherwise (when v is spilled) this function returns tempregnum.
199 If not already done, regoff and flags are set in the stack location.
202 static int reg_of_var(stackptr v, int tempregnum)
206 switch (v->varkind) {
208 if (!(v->flags & INMEMORY))
212 var = &(interfaces[v->varnum][v->type]);
213 v->regoff = var->regoff;
214 if (!(var->flags & INMEMORY))
218 var = &(locals[v->varnum][v->type]);
219 v->regoff = var->regoff;
220 if (!(var->flags & INMEMORY))
224 v->regoff = v->varnum;
225 if (IS_FLT_DBL_TYPE(v->type)) {
226 if (v->varnum < FLT_ARG_CNT) {
227 v->regoff = argfltregs[v->varnum];
228 return(argfltregs[v->varnum]);
231 if (v->varnum < INT_ARG_CNT) {
232 v->regoff = argintregs[v->varnum];
233 return(argintregs[v->varnum]);
236 v->regoff -= INT_ARG_CNT;
239 v->flags |= INMEMORY;
244 /* store_reg_to_var_xxx:
245 This function generates the code to store the result of an operation
246 back into a spilled pseudo-variable.
247 If the pseudo-variable has not been spilled in the first place, this
248 function will generate nothing.
250 v ............ Pseudovariable
251 tempregnum ... Number of the temporary registers as returned by
255 #define store_reg_to_var_int(sptr, tempregnum) \
256 if ((sptr)->flags & INMEMORY) { \
258 x86_64_mov_reg_membase(tempregnum, REG_SP, (sptr)->regoff * 8); \
262 #define store_reg_to_var_flt(sptr, tempregnum) \
263 if ((sptr)->flags & INMEMORY) { \
265 x86_64_movq_reg_membase(tempregnum, REG_SP, (sptr)->regoff * 8); \
269 /* NullPointerException signal handler for hardware null pointer check */
271 void catch_NullPointerException(int sig, siginfo_t *siginfo, void *_p)
275 /* long faultaddr; */
277 struct ucontext *_uc = (struct ucontext *) _p;
278 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
279 struct sigaction act;
280 java_objectheader *xptr;
282 /* Reset signal handler - necessary for SysV, does no harm for BSD */
285 /* instr = *((int*)(sigctx->rip)); */
286 /* faultaddr = sigctx->sc_regs[(instr >> 16) & 0x1f]; */
288 /* if (faultaddr == 0) { */
289 act.sa_sigaction = (void *) catch_NullPointerException; /* reinstall handler */
290 act.sa_flags = SA_SIGINFO;
291 sigaction(sig, &act, NULL);
294 sigaddset(&nsig, sig);
295 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
297 xptr = new_exception(string_java_lang_NullPointerException);
299 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
300 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
301 sigctx->rip = (u8) asm_handle_exception;
306 /* faultaddr += (long) ((instr << 16) >> 16); */
307 /* fprintf(stderr, "faulting address: 0x%08x\n", faultaddr); */
308 /* panic("Stack overflow"); */
313 /* ArithmeticException signal handler for hardware divide by zero check */
315 void catch_ArithmeticException(int sig, siginfo_t *siginfo, void *_p)
319 struct ucontext *_uc = (struct ucontext *) _p;
320 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
321 struct sigaction act;
322 java_objectheader *xptr;
324 /* Reset signal handler - necessary for SysV, does no harm for BSD */
326 act.sa_sigaction = (void *) catch_ArithmeticException; /* reinstall handler */
327 act.sa_flags = SA_SIGINFO;
328 sigaction(sig, &act, NULL);
331 sigaddset(&nsig, sig);
332 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
334 xptr = new_exception_message(string_java_lang_ArithmeticException,
335 string_java_lang_ArithmeticException_message);
337 sigctx->rax = (s8) xptr; /* REG_ITMP1_XPTR */
338 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
339 sigctx->rip = (s8) asm_handle_exception;
345 void init_exceptions(void)
347 struct sigaction act;
349 /* install signal handlers we need to convert to exceptions */
353 act.sa_sigaction = (void *) catch_NullPointerException;
354 act.sa_flags = SA_SIGINFO;
355 sigaction(SIGSEGV, &act, NULL);
359 act.sa_sigaction = (void *) catch_NullPointerException;
360 act.sa_flags = SA_SIGINFO;
361 sigaction(SIGBUS, &act, NULL);
365 act.sa_sigaction = (void *) catch_ArithmeticException;
366 act.sa_flags = SA_SIGINFO;
367 sigaction(SIGFPE, &act, NULL);
371 /* function gen_mcode **********************************************************
373 generates machine code
375 *******************************************************************************/
377 /* global code generation pointer */
384 int len, s1, s2, s3, d;
398 /* space to save used callee saved registers */
400 savedregs_num += (savintregcnt - maxsavintreguse);
401 savedregs_num += (savfltregcnt - maxsavfltreguse);
403 parentargs_base = maxmemuse + savedregs_num;
405 #if defined(USE_THREADS) /* space to save argument of monitor_enter */
407 if (checksync && (method->flags & ACC_SYNCHRONIZED))
412 /* keep stack 16-byte aligned for calls into libc */
414 if (!isleafmethod || runverbose) {
415 if ((parentargs_base % 2) == 0) {
420 /* create method header */
422 (void) dseg_addaddress(method); /* MethodPointer */
423 (void) dseg_adds4(parentargs_base * 8); /* FrameSize */
425 #if defined(USE_THREADS)
427 /* IsSync contains the offset relative to the stack pointer for the
428 argument of monitor_exit used in the exception handler. Since the
429 offset could be zero and give a wrong meaning of the flag it is
433 if (checksync && (method->flags & ACC_SYNCHRONIZED))
434 (void) dseg_adds4((maxmemuse + 1) * 8); /* IsSync */
439 (void) dseg_adds4(0); /* IsSync */
441 (void) dseg_adds4(isleafmethod); /* IsLeaf */
442 (void) dseg_adds4(savintregcnt - maxsavintreguse); /* IntSave */
443 (void) dseg_adds4(savfltregcnt - maxsavfltreguse); /* FltSave */
444 (void) dseg_adds4(exceptiontablelength); /* ExTableSize */
446 /* create exception table */
448 for (ex = extable; ex != NULL; ex = ex->down) {
449 dseg_addtarget(ex->start);
450 dseg_addtarget(ex->end);
451 dseg_addtarget(ex->handler);
452 (void) dseg_addaddress(ex->catchtype);
455 /* initialize mcode variables */
457 mcodeptr = (u1*) mcodebase;
458 mcodeend = (s4*) (mcodebase + mcodesize);
459 MCODECHECK(128 + mparamcount);
461 /* create stack frame (if necessary) */
463 if (parentargs_base) {
464 x86_64_alu_imm_reg(X86_64_SUB, parentargs_base * 8, REG_SP);
467 /* save return address and used callee saved registers */
470 for (r = savintregcnt - 1; r >= maxsavintreguse; r--) {
471 p--; x86_64_mov_reg_membase(savintregs[r], REG_SP, p * 8);
473 for (r = savfltregcnt - 1; r >= maxsavfltreguse; r--) {
474 p--; x86_64_movq_reg_membase(savfltregs[r], REG_SP, p * 8);
477 /* save monitorenter argument */
479 #if defined(USE_THREADS)
480 if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
481 if (method->flags & ACC_STATIC) {
482 x86_64_mov_imm_reg((s8) class, REG_ITMP1);
483 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, maxmemuse * 8);
486 x86_64_mov_reg_membase(argintregs[0], REG_SP, maxmemuse * 8);
491 /* copy argument registers to stack and call trace function with pointer
492 to arguments on stack.
495 x86_64_alu_imm_reg(X86_64_SUB, (6 + 8 + 1 + 1) * 8, REG_SP);
497 x86_64_mov_reg_membase(argintregs[0], REG_SP, 1 * 8);
498 x86_64_mov_reg_membase(argintregs[1], REG_SP, 2 * 8);
499 x86_64_mov_reg_membase(argintregs[2], REG_SP, 3 * 8);
500 x86_64_mov_reg_membase(argintregs[3], REG_SP, 4 * 8);
501 x86_64_mov_reg_membase(argintregs[4], REG_SP, 5 * 8);
502 x86_64_mov_reg_membase(argintregs[5], REG_SP, 6 * 8);
504 x86_64_movq_reg_membase(argfltregs[0], REG_SP, 7 * 8);
505 x86_64_movq_reg_membase(argfltregs[1], REG_SP, 8 * 8);
506 x86_64_movq_reg_membase(argfltregs[2], REG_SP, 9 * 8);
507 x86_64_movq_reg_membase(argfltregs[3], REG_SP, 10 * 8);
508 /* x86_64_movq_reg_membase(argfltregs[4], REG_SP, 11 * 8); */
509 /* x86_64_movq_reg_membase(argfltregs[5], REG_SP, 12 * 8); */
510 /* x86_64_movq_reg_membase(argfltregs[6], REG_SP, 13 * 8); */
511 /* x86_64_movq_reg_membase(argfltregs[7], REG_SP, 14 * 8); */
513 for (p = 0, l = 0; p < mparamcount; p++) {
516 if (IS_FLT_DBL_TYPE(t)) {
517 for (s1 = (mparamcount > INT_ARG_CNT) ? INT_ARG_CNT - 2 : mparamcount - 2; s1 >= p; s1--) {
518 x86_64_mov_reg_reg(argintregs[s1], argintregs[s1 + 1]);
521 x86_64_movd_freg_reg(argfltregs[l], argintregs[p]);
526 x86_64_mov_imm_reg((s8) method, REG_ITMP2);
527 x86_64_mov_reg_membase(REG_ITMP2, REG_SP, 0 * 8);
528 x86_64_mov_imm_reg((s8) builtin_trace_args, REG_ITMP1);
529 x86_64_call_reg(REG_ITMP1);
531 x86_64_mov_membase_reg(REG_SP, 1 * 8, argintregs[0]);
532 x86_64_mov_membase_reg(REG_SP, 2 * 8, argintregs[1]);
533 x86_64_mov_membase_reg(REG_SP, 3 * 8, argintregs[2]);
534 x86_64_mov_membase_reg(REG_SP, 4 * 8, argintregs[3]);
535 x86_64_mov_membase_reg(REG_SP, 5 * 8, argintregs[4]);
536 x86_64_mov_membase_reg(REG_SP, 6 * 8, argintregs[5]);
538 x86_64_movq_membase_reg(REG_SP, 7 * 8, argfltregs[0]);
539 x86_64_movq_membase_reg(REG_SP, 8 * 8, argfltregs[1]);
540 x86_64_movq_membase_reg(REG_SP, 9 * 8, argfltregs[2]);
541 x86_64_movq_membase_reg(REG_SP, 10 * 8, argfltregs[3]);
542 /* x86_64_movq_membase_reg(REG_SP, 11 * 8, argfltregs[4]); */
543 /* x86_64_movq_membase_reg(REG_SP, 12 * 8, argfltregs[5]); */
544 /* x86_64_movq_membase_reg(REG_SP, 13 * 8, argfltregs[6]); */
545 /* x86_64_movq_membase_reg(REG_SP, 14 * 8, argfltregs[7]); */
547 x86_64_alu_imm_reg(X86_64_ADD, (6 + 8 + 1 + 1) * 8, REG_SP);
550 /* take arguments out of register or stack frame */
552 for (p = 0, l = 0, s1 = 0, s2 = 0; p < mparamcount; p++) {
554 var = &(locals[l][t]);
556 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
559 if (IS_INT_LNG_TYPE(t)) {
567 if (IS_INT_LNG_TYPE(t)) { /* integer args */
568 if (s1 < INT_ARG_CNT) { /* register arguments */
569 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
570 M_INTMOVE(argintregs[s1], r);
572 } else { /* reg arg -> spilled */
573 x86_64_mov_reg_membase(argintregs[s1], REG_SP, r * 8);
575 } else { /* stack arguments */
576 pa = s1 - INT_ARG_CNT;
577 if (s2 >= FLT_ARG_CNT) {
578 pa += s2 - FLT_ARG_CNT;
580 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
581 x86_64_mov_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, r); /* + 8 for return address */
582 } else { /* stack arg -> spilled */
583 x86_64_mov_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, REG_ITMP1); /* + 8 for return address */
584 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, r * 8);
589 } else { /* floating args */
590 if (s2 < FLT_ARG_CNT) { /* register arguments */
591 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
592 M_FLTMOVE(argfltregs[s2], r);
594 } else { /* reg arg -> spilled */
595 x86_64_movq_reg_membase(argfltregs[s2], REG_SP, r * 8);
598 } else { /* stack arguments */
599 pa = s2 - FLT_ARG_CNT;
600 if (s1 >= INT_ARG_CNT) {
601 pa += s1 - INT_ARG_CNT;
603 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
604 x86_64_movq_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, r);
607 x86_64_movq_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, REG_FTMP1);
608 x86_64_movq_reg_membase(REG_FTMP1, REG_SP, r * 8);
615 /* call monitorenter function */
617 #if defined(USE_THREADS)
618 if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
619 x86_64_mov_membase_reg(REG_SP, maxmemuse * 8, argintregs[0]);
620 x86_64_mov_imm_reg((s8) builtin_monitorenter, REG_ITMP1);
621 x86_64_call_reg(REG_ITMP1);
626 /* end of header generation */
628 /* walk through all basic blocks */
629 for (/* bbs = block_count, */ bptr = block; /* --bbs >= 0 */ bptr != NULL; bptr = bptr->next) {
631 bptr->mpc = (int)((u1*) mcodeptr - mcodebase);
633 if (bptr->flags >= BBREACHED) {
635 /* branch resolving */
638 for (brefs = bptr->branchrefs; brefs != NULL; brefs = brefs->next) {
639 gen_resolvebranch((u1*) mcodebase + brefs->branchpos,
640 brefs->branchpos, bptr->mpc);
643 /* copy interface registers to their destination */
648 while (src != NULL) {
650 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
651 if (bptr->type == BBTYPE_SBR) {
652 d = reg_of_var(src, REG_ITMP1);
654 store_reg_to_var_int(src, d);
656 } else if (bptr->type == BBTYPE_EXH) {
657 d = reg_of_var(src, REG_ITMP1);
658 M_INTMOVE(REG_ITMP1, d);
659 store_reg_to_var_int(src, d);
663 d = reg_of_var(src, REG_ITMP1);
664 if ((src->varkind != STACKVAR)) {
666 if (IS_FLT_DBL_TYPE(s2)) {
667 s1 = interfaces[len][s2].regoff;
668 if (!(interfaces[len][s2].flags & INMEMORY)) {
672 x86_64_movq_membase_reg(REG_SP, s1 * 8, d);
674 store_reg_to_var_flt(src, d);
677 s1 = interfaces[len][s2].regoff;
678 if (!(interfaces[len][s2].flags & INMEMORY)) {
682 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
684 store_reg_to_var_int(src, d);
691 /* walk through all instructions */
695 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
697 MCODECHECK(64); /* an instruction usually needs < 64 words */
700 case ICMD_NOP: /* ... ==> ... */
703 case ICMD_NULLCHECKPOP: /* ..., objectref ==> ... */
704 if (src->flags & INMEMORY) {
705 x86_64_alu_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
708 x86_64_test_reg_reg(src->regoff, src->regoff);
710 x86_64_jcc(X86_64_CC_E, 0);
711 codegen_addxnullrefs(mcodeptr);
714 /* constant operations ************************************************/
716 case ICMD_ICONST: /* ... ==> ..., constant */
717 /* op1 = 0, val.i = constant */
719 d = reg_of_var(iptr->dst, REG_ITMP1);
720 if (iptr->val.i == 0) {
721 x86_64_alu_reg_reg(X86_64_XOR, d, d);
723 x86_64_movl_imm_reg(iptr->val.i, d);
725 store_reg_to_var_int(iptr->dst, d);
728 case ICMD_ACONST: /* ... ==> ..., constant */
729 /* op1 = 0, val.a = constant */
731 d = reg_of_var(iptr->dst, REG_ITMP1);
732 if (iptr->val.a == 0) {
733 x86_64_alu_reg_reg(X86_64_XOR, d, d);
735 x86_64_mov_imm_reg((s8) iptr->val.a, d);
737 store_reg_to_var_int(iptr->dst, d);
740 case ICMD_LCONST: /* ... ==> ..., constant */
741 /* op1 = 0, val.l = constant */
743 d = reg_of_var(iptr->dst, REG_ITMP1);
744 if (iptr->val.l == 0) {
745 x86_64_alu_reg_reg(X86_64_XOR, d, d);
747 x86_64_mov_imm_reg(iptr->val.l, d);
749 store_reg_to_var_int(iptr->dst, d);
752 case ICMD_FCONST: /* ... ==> ..., constant */
753 /* op1 = 0, val.f = constant */
755 d = reg_of_var(iptr->dst, REG_FTMP1);
756 a = dseg_addfloat(iptr->val.f);
757 x86_64_movdl_membase_reg(RIP, -(((s8) mcodeptr + ((d > 7) ? 9 : 8)) - (s8) mcodebase) + a, d);
758 store_reg_to_var_flt(iptr->dst, d);
761 case ICMD_DCONST: /* ... ==> ..., constant */
762 /* op1 = 0, val.d = constant */
764 d = reg_of_var(iptr->dst, REG_FTMP1);
765 a = dseg_adddouble(iptr->val.d);
766 x86_64_movd_membase_reg(RIP, -(((s8) mcodeptr + 9) - (s8) mcodebase) + a, d);
767 store_reg_to_var_flt(iptr->dst, d);
771 /* load/store operations **********************************************/
773 case ICMD_ILOAD: /* ... ==> ..., content of local variable */
774 /* op1 = local variable */
776 d = reg_of_var(iptr->dst, REG_ITMP1);
777 if ((iptr->dst->varkind == LOCALVAR) &&
778 (iptr->dst->varnum == iptr->op1)) {
781 var = &(locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
782 if (var->flags & INMEMORY) {
783 x86_64_movl_membase_reg(REG_SP, var->regoff * 8, d);
784 store_reg_to_var_int(iptr->dst, d);
787 if (iptr->dst->flags & INMEMORY) {
788 x86_64_mov_reg_membase(var->regoff, REG_SP, iptr->dst->regoff * 8);
791 M_INTMOVE(var->regoff, d);
796 case ICMD_LLOAD: /* ... ==> ..., content of local variable */
797 case ICMD_ALOAD: /* op1 = local variable */
799 d = reg_of_var(iptr->dst, REG_ITMP1);
800 if ((iptr->dst->varkind == LOCALVAR) &&
801 (iptr->dst->varnum == iptr->op1)) {
804 var = &(locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
805 if (var->flags & INMEMORY) {
806 x86_64_mov_membase_reg(REG_SP, var->regoff * 8, d);
807 store_reg_to_var_int(iptr->dst, d);
810 if (iptr->dst->flags & INMEMORY) {
811 x86_64_mov_reg_membase(var->regoff, REG_SP, iptr->dst->regoff * 8);
814 M_INTMOVE(var->regoff, d);
819 case ICMD_FLOAD: /* ... ==> ..., content of local variable */
820 case ICMD_DLOAD: /* op1 = local variable */
822 d = reg_of_var(iptr->dst, REG_FTMP1);
823 if ((iptr->dst->varkind == LOCALVAR) &&
824 (iptr->dst->varnum == iptr->op1)) {
827 var = &(locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
828 if (var->flags & INMEMORY) {
829 x86_64_movq_membase_reg(REG_SP, var->regoff * 8, d);
830 store_reg_to_var_flt(iptr->dst, d);
833 if (iptr->dst->flags & INMEMORY) {
834 x86_64_movq_reg_membase(var->regoff, REG_SP, iptr->dst->regoff * 8);
837 M_FLTMOVE(var->regoff, d);
842 case ICMD_ISTORE: /* ..., value ==> ... */
843 case ICMD_LSTORE: /* op1 = local variable */
846 if ((src->varkind == LOCALVAR) &&
847 (src->varnum == iptr->op1)) {
850 var = &(locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
851 if (var->flags & INMEMORY) {
852 var_to_reg_int(s1, src, REG_ITMP1);
853 x86_64_mov_reg_membase(s1, REG_SP, var->regoff * 8);
856 var_to_reg_int(s1, src, var->regoff);
857 M_INTMOVE(s1, var->regoff);
861 case ICMD_FSTORE: /* ..., value ==> ... */
862 case ICMD_DSTORE: /* op1 = local variable */
864 if ((src->varkind == LOCALVAR) &&
865 (src->varnum == iptr->op1)) {
868 var = &(locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
869 if (var->flags & INMEMORY) {
870 var_to_reg_flt(s1, src, REG_FTMP1);
871 x86_64_movq_reg_membase(s1, REG_SP, var->regoff * 8);
874 var_to_reg_flt(s1, src, var->regoff);
875 M_FLTMOVE(s1, var->regoff);
880 /* pop/dup/swap operations ********************************************/
882 /* attention: double and longs are only one entry in CACAO ICMDs */
884 case ICMD_POP: /* ..., value ==> ... */
885 case ICMD_POP2: /* ..., value, value ==> ... */
888 #define M_COPY(from,to) \
889 d = reg_of_var(to, REG_ITMP1); \
890 if ((from->regoff != to->regoff) || \
891 ((from->flags ^ to->flags) & INMEMORY)) { \
892 if (IS_FLT_DBL_TYPE(from->type)) { \
893 var_to_reg_flt(s1, from, d); \
895 store_reg_to_var_flt(to, d); \
897 var_to_reg_int(s1, from, d); \
899 store_reg_to_var_int(to, d); \
903 case ICMD_DUP: /* ..., a ==> ..., a, a */
904 M_COPY(src, iptr->dst);
907 case ICMD_DUP_X1: /* ..., a, b ==> ..., b, a, b */
909 M_COPY(src, iptr->dst->prev->prev);
911 case ICMD_DUP2: /* ..., a, b ==> ..., a, b, a, b */
913 M_COPY(src, iptr->dst);
914 M_COPY(src->prev, iptr->dst->prev);
917 case ICMD_DUP2_X1: /* ..., a, b, c ==> ..., b, c, a, b, c */
919 M_COPY(src->prev, iptr->dst->prev->prev->prev);
921 case ICMD_DUP_X2: /* ..., a, b, c ==> ..., c, a, b, c */
923 M_COPY(src, iptr->dst);
924 M_COPY(src->prev, iptr->dst->prev);
925 M_COPY(src->prev->prev, iptr->dst->prev->prev);
926 M_COPY(src, iptr->dst->prev->prev->prev);
929 case ICMD_DUP2_X2: /* ..., a, b, c, d ==> ..., c, d, a, b, c, d */
931 M_COPY(src, iptr->dst);
932 M_COPY(src->prev, iptr->dst->prev);
933 M_COPY(src->prev->prev, iptr->dst->prev->prev);
934 M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
935 M_COPY(src, iptr->dst->prev->prev->prev->prev);
936 M_COPY(src->prev, iptr->dst->prev->prev->prev->prev->prev);
939 case ICMD_SWAP: /* ..., a, b ==> ..., b, a */
941 M_COPY(src, iptr->dst->prev);
942 M_COPY(src->prev, iptr->dst);
946 /* integer operations *************************************************/
948 case ICMD_INEG: /* ..., value ==> ..., - value */
950 d = reg_of_var(iptr->dst, REG_NULL);
951 if (iptr->dst->flags & INMEMORY) {
952 if (src->flags & INMEMORY) {
953 if (src->regoff == iptr->dst->regoff) {
954 x86_64_negl_membase(REG_SP, iptr->dst->regoff * 8);
957 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
958 x86_64_negl_reg(REG_ITMP1);
959 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
963 x86_64_movl_reg_membase(src->regoff, REG_SP, iptr->dst->regoff * 8);
964 x86_64_negl_membase(REG_SP, iptr->dst->regoff * 8);
968 if (src->flags & INMEMORY) {
969 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
973 M_INTMOVE(src->regoff, iptr->dst->regoff);
974 x86_64_negl_reg(iptr->dst->regoff);
979 case ICMD_LNEG: /* ..., value ==> ..., - value */
981 d = reg_of_var(iptr->dst, REG_NULL);
982 if (iptr->dst->flags & INMEMORY) {
983 if (src->flags & INMEMORY) {
984 if (src->regoff == iptr->dst->regoff) {
985 x86_64_neg_membase(REG_SP, iptr->dst->regoff * 8);
988 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
989 x86_64_neg_reg(REG_ITMP1);
990 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
994 x86_64_mov_reg_membase(src->regoff, REG_SP, iptr->dst->regoff * 8);
995 x86_64_neg_membase(REG_SP, iptr->dst->regoff * 8);
999 if (src->flags & INMEMORY) {
1000 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1001 x86_64_neg_reg(iptr->dst->regoff);
1004 M_INTMOVE(src->regoff, iptr->dst->regoff);
1005 x86_64_neg_reg(iptr->dst->regoff);
1010 case ICMD_I2L: /* ..., value ==> ..., value */
1012 d = reg_of_var(iptr->dst, REG_ITMP3);
1013 if (src->flags & INMEMORY) {
1014 x86_64_movslq_membase_reg(REG_SP, src->regoff * 8, d);
1017 x86_64_movslq_reg_reg(src->regoff, d);
1019 store_reg_to_var_int(iptr->dst, d);
1022 case ICMD_L2I: /* ..., value ==> ..., value */
1024 var_to_reg_int(s1, src, REG_ITMP1);
1025 d = reg_of_var(iptr->dst, REG_ITMP3);
1027 store_reg_to_var_int(iptr->dst, d);
1030 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
1032 d = reg_of_var(iptr->dst, REG_ITMP3);
1033 if (src->flags & INMEMORY) {
1034 x86_64_movsbq_membase_reg(REG_SP, src->regoff * 8, d);
1037 x86_64_movsbq_reg_reg(src->regoff, d);
1039 store_reg_to_var_int(iptr->dst, d);
1042 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
1044 d = reg_of_var(iptr->dst, REG_ITMP3);
1045 if (src->flags & INMEMORY) {
1046 x86_64_movzwq_membase_reg(REG_SP, src->regoff * 8, d);
1049 x86_64_movzwq_reg_reg(src->regoff, d);
1051 store_reg_to_var_int(iptr->dst, d);
1054 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
1056 d = reg_of_var(iptr->dst, REG_ITMP3);
1057 if (src->flags & INMEMORY) {
1058 x86_64_movswq_membase_reg(REG_SP, src->regoff * 8, d);
1061 x86_64_movswq_reg_reg(src->regoff, d);
1063 store_reg_to_var_int(iptr->dst, d);
1067 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1069 d = reg_of_var(iptr->dst, REG_NULL);
1070 x86_64_emit_ialu(X86_64_ADD, src, iptr);
1073 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
1074 /* val.i = constant */
1076 d = reg_of_var(iptr->dst, REG_NULL);
1077 x86_64_emit_ialuconst(X86_64_ADD, src, iptr);
1080 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1082 d = reg_of_var(iptr->dst, REG_NULL);
1083 x86_64_emit_lalu(X86_64_ADD, src, iptr);
1086 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
1087 /* val.l = constant */
1089 d = reg_of_var(iptr->dst, REG_NULL);
1090 x86_64_emit_laluconst(X86_64_ADD, src, iptr);
1093 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1095 d = reg_of_var(iptr->dst, REG_NULL);
1096 if (iptr->dst->flags & INMEMORY) {
1097 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1098 if (src->prev->regoff == iptr->dst->regoff) {
1099 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1100 x86_64_alul_reg_membase(X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1103 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1104 x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1105 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1108 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1109 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1110 x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1111 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1113 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1114 if (src->prev->regoff == iptr->dst->regoff) {
1115 x86_64_alul_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1118 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1119 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1120 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1124 x86_64_movl_reg_membase(src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1125 x86_64_alul_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1129 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1130 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, d);
1131 x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1133 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1134 M_INTMOVE(src->prev->regoff, d);
1135 x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1137 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1138 /* workaround for reg alloc */
1139 if (src->regoff == iptr->dst->regoff) {
1140 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1141 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1142 M_INTMOVE(REG_ITMP1, d);
1145 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, d);
1146 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, d);
1150 /* workaround for reg alloc */
1151 if (src->regoff == iptr->dst->regoff) {
1152 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1153 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1154 M_INTMOVE(REG_ITMP1, d);
1157 M_INTMOVE(src->prev->regoff, d);
1158 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, d);
1164 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
1165 /* val.i = constant */
1167 d = reg_of_var(iptr->dst, REG_NULL);
1168 x86_64_emit_ialuconst(X86_64_SUB, src, iptr);
1171 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1173 d = reg_of_var(iptr->dst, REG_NULL);
1174 if (iptr->dst->flags & INMEMORY) {
1175 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1176 if (src->prev->regoff == iptr->dst->regoff) {
1177 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1178 x86_64_alu_reg_membase(X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1181 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1182 x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1183 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1186 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1187 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1188 x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1189 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1191 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1192 if (src->prev->regoff == iptr->dst->regoff) {
1193 x86_64_alu_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1196 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1197 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1198 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1202 x86_64_mov_reg_membase(src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1203 x86_64_alu_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1207 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1208 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, d);
1209 x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1211 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1212 M_INTMOVE(src->prev->regoff, d);
1213 x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1215 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1216 /* workaround for reg alloc */
1217 if (src->regoff == iptr->dst->regoff) {
1218 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1219 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1220 M_INTMOVE(REG_ITMP1, d);
1223 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, d);
1224 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, d);
1228 /* workaround for reg alloc */
1229 if (src->regoff == iptr->dst->regoff) {
1230 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1231 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1232 M_INTMOVE(REG_ITMP1, d);
1235 M_INTMOVE(src->prev->regoff, d);
1236 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, d);
1242 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
1243 /* val.l = constant */
1245 d = reg_of_var(iptr->dst, REG_NULL);
1246 x86_64_emit_laluconst(X86_64_SUB, src, iptr);
1249 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1251 d = reg_of_var(iptr->dst, REG_NULL);
1252 if (iptr->dst->flags & INMEMORY) {
1253 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1254 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1255 x86_64_imull_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1256 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1258 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1259 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1260 x86_64_imull_reg_reg(src->prev->regoff, REG_ITMP1);
1261 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1263 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1264 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1265 x86_64_imull_reg_reg(src->regoff, REG_ITMP1);
1266 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1269 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1270 x86_64_imull_reg_reg(src->regoff, REG_ITMP1);
1271 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1275 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1276 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1277 x86_64_imull_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1279 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1280 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1281 x86_64_imull_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1283 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1284 M_INTMOVE(src->regoff, iptr->dst->regoff);
1285 x86_64_imull_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1288 if (src->regoff == iptr->dst->regoff) {
1289 x86_64_imull_reg_reg(src->prev->regoff, iptr->dst->regoff);
1292 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1293 x86_64_imull_reg_reg(src->regoff, iptr->dst->regoff);
1299 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
1300 /* val.i = constant */
1302 d = reg_of_var(iptr->dst, REG_NULL);
1303 if (iptr->dst->flags & INMEMORY) {
1304 if (src->flags & INMEMORY) {
1305 x86_64_imull_imm_membase_reg(iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1306 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1309 x86_64_imull_imm_reg_reg(iptr->val.i, src->regoff, REG_ITMP1);
1310 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1314 if (src->flags & INMEMORY) {
1315 x86_64_imull_imm_membase_reg(iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1318 if (iptr->val.i == 2) {
1319 M_INTMOVE(src->regoff, iptr->dst->regoff);
1320 x86_64_alul_reg_reg(X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1323 x86_64_imull_imm_reg_reg(iptr->val.i, src->regoff, iptr->dst->regoff); /* 3 cycles */
1329 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1331 d = reg_of_var(iptr->dst, REG_NULL);
1332 if (iptr->dst->flags & INMEMORY) {
1333 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1334 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1335 x86_64_imul_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1336 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1338 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1339 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1340 x86_64_imul_reg_reg(src->prev->regoff, REG_ITMP1);
1341 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1343 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1344 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1345 x86_64_imul_reg_reg(src->regoff, REG_ITMP1);
1346 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1349 x86_64_mov_reg_reg(src->prev->regoff, REG_ITMP1);
1350 x86_64_imul_reg_reg(src->regoff, REG_ITMP1);
1351 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1355 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1356 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1357 x86_64_imul_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1359 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1360 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1361 x86_64_imul_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1363 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1364 M_INTMOVE(src->regoff, iptr->dst->regoff);
1365 x86_64_imul_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1368 if (src->regoff == iptr->dst->regoff) {
1369 x86_64_imul_reg_reg(src->prev->regoff, iptr->dst->regoff);
1372 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1373 x86_64_imul_reg_reg(src->regoff, iptr->dst->regoff);
1379 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
1380 /* val.l = constant */
1382 d = reg_of_var(iptr->dst, REG_NULL);
1383 if (iptr->dst->flags & INMEMORY) {
1384 if (src->flags & INMEMORY) {
1385 if (x86_64_is_imm32(iptr->val.l)) {
1386 x86_64_imul_imm_membase_reg(iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1389 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
1390 x86_64_imul_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1392 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1395 if (x86_64_is_imm32(iptr->val.l)) {
1396 x86_64_imul_imm_reg_reg(iptr->val.l, src->regoff, REG_ITMP1);
1399 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
1400 x86_64_imul_reg_reg(src->regoff, REG_ITMP1);
1402 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1406 if (src->flags & INMEMORY) {
1407 if (x86_64_is_imm32(iptr->val.l)) {
1408 x86_64_imul_imm_membase_reg(iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1411 x86_64_mov_imm_reg(iptr->val.l, iptr->dst->regoff);
1412 x86_64_imul_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1416 /* should match in many cases */
1417 if (iptr->val.l == 2) {
1418 M_INTMOVE(src->regoff, iptr->dst->regoff);
1419 x86_64_alul_reg_reg(X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1422 if (x86_64_is_imm32(iptr->val.l)) {
1423 x86_64_imul_imm_reg_reg(iptr->val.l, src->regoff, iptr->dst->regoff); /* 4 cycles */
1426 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
1427 M_INTMOVE(src->regoff, iptr->dst->regoff);
1428 x86_64_imul_reg_reg(REG_ITMP1, iptr->dst->regoff);
1435 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1437 d = reg_of_var(iptr->dst, REG_NULL);
1438 if (src->prev->flags & INMEMORY) {
1439 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, RAX);
1442 M_INTMOVE(src->prev->regoff, RAX);
1445 if (src->flags & INMEMORY) {
1446 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1449 M_INTMOVE(src->regoff, REG_ITMP3);
1452 x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1453 x86_64_jcc(X86_64_CC_NE, 4 + 6);
1454 x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1455 x86_64_jcc(X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1457 x86_64_mov_reg_reg(RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1459 x86_64_idivl_reg(REG_ITMP3);
1461 if (iptr->dst->flags & INMEMORY) {
1462 x86_64_mov_reg_membase(RAX, REG_SP, iptr->dst->regoff * 8);
1463 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1466 M_INTMOVE(RAX, iptr->dst->regoff);
1468 if (iptr->dst->regoff != RDX) {
1469 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1474 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1476 d = reg_of_var(iptr->dst, REG_NULL);
1477 if (src->prev->flags & INMEMORY) {
1478 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, RAX);
1481 M_INTMOVE(src->prev->regoff, RAX);
1484 if (src->flags & INMEMORY) {
1485 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1488 M_INTMOVE(src->regoff, REG_ITMP3);
1491 x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1492 x86_64_jcc(X86_64_CC_NE, 2 + 4 + 6);
1493 x86_64_alul_reg_reg(X86_64_XOR, RDX, RDX); /* 2 bytes */
1494 x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1495 x86_64_jcc(X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1497 x86_64_mov_reg_reg(RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1499 x86_64_idivl_reg(REG_ITMP3);
1501 if (iptr->dst->flags & INMEMORY) {
1502 x86_64_mov_reg_membase(RDX, REG_SP, iptr->dst->regoff * 8);
1503 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1506 M_INTMOVE(RDX, iptr->dst->regoff);
1508 if (iptr->dst->regoff != RDX) {
1509 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1514 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
1515 /* val.i = constant */
1517 var_to_reg_int(s1, src, REG_ITMP1);
1518 d = reg_of_var(iptr->dst, REG_ITMP3);
1519 M_INTMOVE(s1, REG_ITMP1);
1520 x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1521 x86_64_leal_membase_reg(REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1522 x86_64_cmovccl_reg_reg(X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1523 x86_64_shiftl_imm_reg(X86_64_SAR, iptr->val.i, REG_ITMP1);
1524 x86_64_mov_reg_reg(REG_ITMP1, d);
1525 store_reg_to_var_int(iptr->dst, d);
1528 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
1529 /* val.i = constant */
1531 var_to_reg_int(s1, src, REG_ITMP1);
1532 d = reg_of_var(iptr->dst, REG_ITMP3);
1533 M_INTMOVE(s1, REG_ITMP1);
1534 x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1535 x86_64_leal_membase_reg(REG_ITMP1, iptr->val.i, REG_ITMP2);
1536 x86_64_cmovccl_reg_reg(X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1537 x86_64_alul_imm_reg(X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1538 x86_64_alul_reg_reg(X86_64_SUB, REG_ITMP2, REG_ITMP1);
1539 x86_64_mov_reg_reg(REG_ITMP1, d);
1540 store_reg_to_var_int(iptr->dst, d);
1544 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1546 d = reg_of_var(iptr->dst, REG_NULL);
1547 if (src->prev->flags & INMEMORY) {
1548 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1551 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1554 if (src->flags & INMEMORY) {
1555 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1558 M_INTMOVE(src->regoff, REG_ITMP3);
1561 x86_64_mov_imm_reg(0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1562 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
1563 x86_64_jcc(X86_64_CC_NE, 4 + 6);
1564 x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1565 x86_64_jcc(X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1567 x86_64_mov_reg_reg(RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1569 x86_64_idiv_reg(REG_ITMP3);
1571 if (iptr->dst->flags & INMEMORY) {
1572 x86_64_mov_reg_membase(RAX, REG_SP, iptr->dst->regoff * 8);
1573 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1576 M_INTMOVE(RAX, iptr->dst->regoff);
1578 if (iptr->dst->regoff != RDX) {
1579 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1584 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1586 d = reg_of_var(iptr->dst, REG_NULL);
1587 if (src->prev->flags & INMEMORY) {
1588 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1591 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1594 if (src->flags & INMEMORY) {
1595 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1598 M_INTMOVE(src->regoff, REG_ITMP3);
1601 x86_64_mov_imm_reg(0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1602 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
1603 x86_64_jcc(X86_64_CC_NE, 2 + 4 + 6);
1604 x86_64_alul_reg_reg(X86_64_XOR, RDX, RDX); /* 2 bytes */
1605 x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1606 x86_64_jcc(X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1608 x86_64_mov_reg_reg(RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1610 x86_64_idiv_reg(REG_ITMP3);
1612 if (iptr->dst->flags & INMEMORY) {
1613 x86_64_mov_reg_membase(RDX, REG_SP, iptr->dst->regoff * 8);
1614 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1617 M_INTMOVE(RDX, iptr->dst->regoff);
1619 if (iptr->dst->regoff != RDX) {
1620 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1625 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1626 /* val.i = constant */
1628 var_to_reg_int(s1, src, REG_ITMP1);
1629 d = reg_of_var(iptr->dst, REG_ITMP3);
1630 M_INTMOVE(s1, REG_ITMP1);
1631 x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1632 x86_64_lea_membase_reg(REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1633 x86_64_cmovcc_reg_reg(X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1634 x86_64_shift_imm_reg(X86_64_SAR, iptr->val.i, REG_ITMP1);
1635 x86_64_mov_reg_reg(REG_ITMP1, d);
1636 store_reg_to_var_int(iptr->dst, d);
1639 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1640 /* val.l = constant */
1642 var_to_reg_int(s1, src, REG_ITMP1);
1643 d = reg_of_var(iptr->dst, REG_ITMP3);
1644 M_INTMOVE(s1, REG_ITMP1);
1645 x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1646 x86_64_lea_membase_reg(REG_ITMP1, iptr->val.i, REG_ITMP2);
1647 x86_64_cmovcc_reg_reg(X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1648 x86_64_alu_imm_reg(X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1649 x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP2, REG_ITMP1);
1650 x86_64_mov_reg_reg(REG_ITMP1, d);
1651 store_reg_to_var_int(iptr->dst, d);
1654 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1656 d = reg_of_var(iptr->dst, REG_NULL);
1657 x86_64_emit_ishift(X86_64_SHL, src, iptr);
1660 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1661 /* val.i = constant */
1663 d = reg_of_var(iptr->dst, REG_NULL);
1664 x86_64_emit_ishiftconst(X86_64_SHL, src, iptr);
1667 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1669 d = reg_of_var(iptr->dst, REG_NULL);
1670 x86_64_emit_ishift(X86_64_SAR, src, iptr);
1673 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1674 /* val.i = constant */
1676 d = reg_of_var(iptr->dst, REG_NULL);
1677 x86_64_emit_ishiftconst(X86_64_SAR, src, iptr);
1680 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1682 d = reg_of_var(iptr->dst, REG_NULL);
1683 x86_64_emit_ishift(X86_64_SHR, src, iptr);
1686 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1687 /* val.i = constant */
1689 d = reg_of_var(iptr->dst, REG_NULL);
1690 x86_64_emit_ishiftconst(X86_64_SHR, src, iptr);
1693 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1695 d = reg_of_var(iptr->dst, REG_NULL);
1696 x86_64_emit_lshift(X86_64_SHL, src, iptr);
1699 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1700 /* val.i = constant */
1702 d = reg_of_var(iptr->dst, REG_NULL);
1703 x86_64_emit_lshiftconst(X86_64_SHL, src, iptr);
1706 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1708 d = reg_of_var(iptr->dst, REG_NULL);
1709 x86_64_emit_lshift(X86_64_SAR, src, iptr);
1712 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1713 /* val.i = constant */
1715 d = reg_of_var(iptr->dst, REG_NULL);
1716 x86_64_emit_lshiftconst(X86_64_SAR, src, iptr);
1719 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1721 d = reg_of_var(iptr->dst, REG_NULL);
1722 x86_64_emit_lshift(X86_64_SHR, src, iptr);
1725 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1726 /* val.l = constant */
1728 d = reg_of_var(iptr->dst, REG_NULL);
1729 x86_64_emit_lshiftconst(X86_64_SHR, src, iptr);
1732 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1734 d = reg_of_var(iptr->dst, REG_NULL);
1735 x86_64_emit_ialu(X86_64_AND, src, iptr);
1738 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1739 /* val.i = constant */
1741 d = reg_of_var(iptr->dst, REG_NULL);
1742 x86_64_emit_ialuconst(X86_64_AND, src, iptr);
1745 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1747 d = reg_of_var(iptr->dst, REG_NULL);
1748 x86_64_emit_lalu(X86_64_AND, src, iptr);
1751 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1752 /* val.l = constant */
1754 d = reg_of_var(iptr->dst, REG_NULL);
1755 x86_64_emit_laluconst(X86_64_AND, src, iptr);
1758 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1760 d = reg_of_var(iptr->dst, REG_NULL);
1761 x86_64_emit_ialu(X86_64_OR, src, iptr);
1764 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1765 /* val.i = constant */
1767 d = reg_of_var(iptr->dst, REG_NULL);
1768 x86_64_emit_ialuconst(X86_64_OR, src, iptr);
1771 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1773 d = reg_of_var(iptr->dst, REG_NULL);
1774 x86_64_emit_lalu(X86_64_OR, src, iptr);
1777 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1778 /* val.l = constant */
1780 d = reg_of_var(iptr->dst, REG_NULL);
1781 x86_64_emit_laluconst(X86_64_OR, src, iptr);
1784 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1786 d = reg_of_var(iptr->dst, REG_NULL);
1787 x86_64_emit_ialu(X86_64_XOR, src, iptr);
1790 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1791 /* val.i = constant */
1793 d = reg_of_var(iptr->dst, REG_NULL);
1794 x86_64_emit_ialuconst(X86_64_XOR, src, iptr);
1797 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1799 d = reg_of_var(iptr->dst, REG_NULL);
1800 x86_64_emit_lalu(X86_64_XOR, src, iptr);
1803 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1804 /* val.l = constant */
1806 d = reg_of_var(iptr->dst, REG_NULL);
1807 x86_64_emit_laluconst(X86_64_XOR, src, iptr);
1811 case ICMD_IINC: /* ..., value ==> ..., value + constant */
1812 /* op1 = variable, val.i = constant */
1814 var = &(locals[iptr->op1][TYPE_INT]);
1816 if (var->flags & INMEMORY) {
1817 if (iptr->val.i == 1) {
1818 x86_64_incl_membase(REG_SP, d * 8);
1820 } else if (iptr->val.i == -1) {
1821 x86_64_decl_membase(REG_SP, d * 8);
1824 x86_64_alul_imm_membase(X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1828 if (iptr->val.i == 1) {
1831 } else if (iptr->val.i == -1) {
1835 x86_64_alul_imm_reg(X86_64_ADD, iptr->val.i, d);
1841 /* floating operations ************************************************/
1843 case ICMD_FNEG: /* ..., value ==> ..., - value */
1845 var_to_reg_flt(s1, src, REG_FTMP1);
1846 d = reg_of_var(iptr->dst, REG_FTMP3);
1847 a = dseg_adds4(0x80000000);
1849 x86_64_movss_membase_reg(RIP, -(((s8) mcodeptr + 9) - (s8) mcodebase) + a, REG_FTMP2);
1850 x86_64_xorps_reg_reg(REG_FTMP2, d);
1851 store_reg_to_var_flt(iptr->dst, d);
1854 case ICMD_DNEG: /* ..., value ==> ..., - value */
1856 var_to_reg_flt(s1, src, REG_FTMP1);
1857 d = reg_of_var(iptr->dst, REG_FTMP3);
1858 a = dseg_adds8(0x8000000000000000);
1860 x86_64_movd_membase_reg(RIP, -(((s8) mcodeptr + 9) - (s8) mcodebase) + a, REG_FTMP2);
1861 x86_64_xorpd_reg_reg(REG_FTMP2, d);
1862 store_reg_to_var_flt(iptr->dst, d);
1865 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1867 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1868 var_to_reg_flt(s2, src, REG_FTMP2);
1869 d = reg_of_var(iptr->dst, REG_FTMP3);
1871 x86_64_addss_reg_reg(s2, d);
1872 } else if (s2 == d) {
1873 x86_64_addss_reg_reg(s1, d);
1876 x86_64_addss_reg_reg(s2, d);
1878 store_reg_to_var_flt(iptr->dst, d);
1881 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1883 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1884 var_to_reg_flt(s2, src, REG_FTMP2);
1885 d = reg_of_var(iptr->dst, REG_FTMP3);
1887 x86_64_addsd_reg_reg(s2, d);
1888 } else if (s2 == d) {
1889 x86_64_addsd_reg_reg(s1, d);
1892 x86_64_addsd_reg_reg(s2, d);
1894 store_reg_to_var_flt(iptr->dst, d);
1897 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1899 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1900 var_to_reg_flt(s2, src, REG_FTMP2);
1901 d = reg_of_var(iptr->dst, REG_FTMP3);
1903 M_FLTMOVE(s2, REG_FTMP2);
1907 x86_64_subss_reg_reg(s2, d);
1908 store_reg_to_var_flt(iptr->dst, d);
1911 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1913 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1914 var_to_reg_flt(s2, src, REG_FTMP2);
1915 d = reg_of_var(iptr->dst, REG_FTMP3);
1917 M_FLTMOVE(s2, REG_FTMP2);
1921 x86_64_subsd_reg_reg(s2, d);
1922 store_reg_to_var_flt(iptr->dst, d);
1925 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1927 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1928 var_to_reg_flt(s2, src, REG_FTMP2);
1929 d = reg_of_var(iptr->dst, REG_FTMP3);
1931 x86_64_mulss_reg_reg(s2, d);
1932 } else if (s2 == d) {
1933 x86_64_mulss_reg_reg(s1, d);
1936 x86_64_mulss_reg_reg(s2, d);
1938 store_reg_to_var_flt(iptr->dst, d);
1941 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1943 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1944 var_to_reg_flt(s2, src, REG_FTMP2);
1945 d = reg_of_var(iptr->dst, REG_FTMP3);
1947 x86_64_mulsd_reg_reg(s2, d);
1948 } else if (s2 == d) {
1949 x86_64_mulsd_reg_reg(s1, d);
1952 x86_64_mulsd_reg_reg(s2, d);
1954 store_reg_to_var_flt(iptr->dst, d);
1957 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1959 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1960 var_to_reg_flt(s2, src, REG_FTMP2);
1961 d = reg_of_var(iptr->dst, REG_FTMP3);
1963 M_FLTMOVE(s2, REG_FTMP2);
1967 x86_64_divss_reg_reg(s2, d);
1968 store_reg_to_var_flt(iptr->dst, d);
1971 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1973 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1974 var_to_reg_flt(s2, src, REG_FTMP2);
1975 d = reg_of_var(iptr->dst, REG_FTMP3);
1977 M_FLTMOVE(s2, REG_FTMP2);
1981 x86_64_divsd_reg_reg(s2, d);
1982 store_reg_to_var_flt(iptr->dst, d);
1985 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1987 var_to_reg_int(s1, src, REG_ITMP1);
1988 d = reg_of_var(iptr->dst, REG_FTMP1);
1989 x86_64_cvtsi2ss_reg_reg(s1, d);
1990 store_reg_to_var_flt(iptr->dst, d);
1993 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1995 var_to_reg_int(s1, src, REG_ITMP1);
1996 d = reg_of_var(iptr->dst, REG_FTMP1);
1997 x86_64_cvtsi2sd_reg_reg(s1, d);
1998 store_reg_to_var_flt(iptr->dst, d);
2001 case ICMD_L2F: /* ..., value ==> ..., (float) value */
2003 var_to_reg_int(s1, src, REG_ITMP1);
2004 d = reg_of_var(iptr->dst, REG_FTMP1);
2005 x86_64_cvtsi2ssq_reg_reg(s1, d);
2006 store_reg_to_var_flt(iptr->dst, d);
2009 case ICMD_L2D: /* ..., value ==> ..., (double) value */
2011 var_to_reg_int(s1, src, REG_ITMP1);
2012 d = reg_of_var(iptr->dst, REG_FTMP1);
2013 x86_64_cvtsi2sdq_reg_reg(s1, d);
2014 store_reg_to_var_flt(iptr->dst, d);
2017 case ICMD_F2I: /* ..., value ==> ..., (int) value */
2019 var_to_reg_flt(s1, src, REG_FTMP1);
2020 d = reg_of_var(iptr->dst, REG_ITMP1);
2021 x86_64_cvttss2si_reg_reg(s1, d);
2022 x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, d); /* corner cases */
2023 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
2024 x86_64_jcc(X86_64_CC_NE, a);
2025 M_FLTMOVE(s1, REG_FTMP1);
2026 x86_64_mov_imm_reg((s8) asm_builtin_f2i, REG_ITMP2);
2027 x86_64_call_reg(REG_ITMP2);
2028 M_INTMOVE(REG_RESULT, d);
2029 store_reg_to_var_int(iptr->dst, d);
2032 case ICMD_D2I: /* ..., value ==> ..., (int) value */
2034 var_to_reg_flt(s1, src, REG_FTMP1);
2035 d = reg_of_var(iptr->dst, REG_ITMP1);
2036 x86_64_cvttsd2si_reg_reg(s1, d);
2037 x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, d); /* corner cases */
2038 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
2039 x86_64_jcc(X86_64_CC_NE, a);
2040 M_FLTMOVE(s1, REG_FTMP1);
2041 x86_64_mov_imm_reg((s8) asm_builtin_d2i, REG_ITMP2);
2042 x86_64_call_reg(REG_ITMP2);
2043 M_INTMOVE(REG_RESULT, d);
2044 store_reg_to_var_int(iptr->dst, d);
2047 case ICMD_F2L: /* ..., value ==> ..., (long) value */
2049 var_to_reg_flt(s1, src, REG_FTMP1);
2050 d = reg_of_var(iptr->dst, REG_ITMP1);
2051 x86_64_cvttss2siq_reg_reg(s1, d);
2052 x86_64_mov_imm_reg(0x8000000000000000, REG_ITMP2);
2053 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, d); /* corner cases */
2054 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
2055 x86_64_jcc(X86_64_CC_NE, a);
2056 M_FLTMOVE(s1, REG_FTMP1);
2057 x86_64_mov_imm_reg((s8) asm_builtin_f2l, REG_ITMP2);
2058 x86_64_call_reg(REG_ITMP2);
2059 M_INTMOVE(REG_RESULT, d);
2060 store_reg_to_var_int(iptr->dst, d);
2063 case ICMD_D2L: /* ..., value ==> ..., (long) value */
2065 var_to_reg_flt(s1, src, REG_FTMP1);
2066 d = reg_of_var(iptr->dst, REG_ITMP1);
2067 x86_64_cvttsd2siq_reg_reg(s1, d);
2068 x86_64_mov_imm_reg(0x8000000000000000, REG_ITMP2);
2069 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, d); /* corner cases */
2070 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
2071 x86_64_jcc(X86_64_CC_NE, a);
2072 M_FLTMOVE(s1, REG_FTMP1);
2073 x86_64_mov_imm_reg((s8) asm_builtin_d2l, REG_ITMP2);
2074 x86_64_call_reg(REG_ITMP2);
2075 M_INTMOVE(REG_RESULT, d);
2076 store_reg_to_var_int(iptr->dst, d);
2079 case ICMD_F2D: /* ..., value ==> ..., (double) value */
2081 var_to_reg_flt(s1, src, REG_FTMP1);
2082 d = reg_of_var(iptr->dst, REG_FTMP3);
2083 x86_64_cvtss2sd_reg_reg(s1, d);
2084 store_reg_to_var_flt(iptr->dst, d);
2087 case ICMD_D2F: /* ..., value ==> ..., (float) value */
2089 var_to_reg_flt(s1, src, REG_FTMP1);
2090 d = reg_of_var(iptr->dst, REG_FTMP3);
2091 x86_64_cvtsd2ss_reg_reg(s1, d);
2092 store_reg_to_var_flt(iptr->dst, d);
2095 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
2096 /* == => 0, < => 1, > => -1 */
2098 var_to_reg_flt(s1, src->prev, REG_FTMP1);
2099 var_to_reg_flt(s2, src, REG_FTMP2);
2100 d = reg_of_var(iptr->dst, REG_ITMP3);
2101 x86_64_alu_reg_reg(X86_64_XOR, d, d);
2102 x86_64_mov_imm_reg(1, REG_ITMP1);
2103 x86_64_mov_imm_reg(-1, REG_ITMP2);
2104 x86_64_ucomiss_reg_reg(s1, s2);
2105 x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
2106 x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
2107 x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
2108 store_reg_to_var_int(iptr->dst, d);
2111 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
2112 /* == => 0, < => 1, > => -1 */
2114 var_to_reg_flt(s1, src->prev, REG_FTMP1);
2115 var_to_reg_flt(s2, src, REG_FTMP2);
2116 d = reg_of_var(iptr->dst, REG_ITMP3);
2117 x86_64_alu_reg_reg(X86_64_XOR, d, d);
2118 x86_64_mov_imm_reg(1, REG_ITMP1);
2119 x86_64_mov_imm_reg(-1, REG_ITMP2);
2120 x86_64_ucomiss_reg_reg(s1, s2);
2121 x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
2122 x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
2123 x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
2124 store_reg_to_var_int(iptr->dst, d);
2127 case ICMD_DCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
2128 /* == => 0, < => 1, > => -1 */
2130 var_to_reg_flt(s1, src->prev, REG_FTMP1);
2131 var_to_reg_flt(s2, src, REG_FTMP2);
2132 d = reg_of_var(iptr->dst, REG_ITMP3);
2133 x86_64_alu_reg_reg(X86_64_XOR, d, d);
2134 x86_64_mov_imm_reg(1, REG_ITMP1);
2135 x86_64_mov_imm_reg(-1, REG_ITMP2);
2136 x86_64_ucomisd_reg_reg(s1, s2);
2137 x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
2138 x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
2139 x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
2140 store_reg_to_var_int(iptr->dst, d);
2143 case ICMD_DCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
2144 /* == => 0, < => 1, > => -1 */
2146 var_to_reg_flt(s1, src->prev, REG_FTMP1);
2147 var_to_reg_flt(s2, src, REG_FTMP2);
2148 d = reg_of_var(iptr->dst, REG_ITMP3);
2149 x86_64_alu_reg_reg(X86_64_XOR, d, d);
2150 x86_64_mov_imm_reg(1, REG_ITMP1);
2151 x86_64_mov_imm_reg(-1, REG_ITMP2);
2152 x86_64_ucomisd_reg_reg(s1, s2);
2153 x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
2154 x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
2155 x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
2156 store_reg_to_var_int(iptr->dst, d);
2160 /* memory operations **************************************************/
2162 #define gen_bound_check \
2163 if (checkbounds) { \
2164 x86_64_alul_membase_reg(X86_64_CMP, s1, OFFSET(java_arrayheader, size), s2); \
2165 x86_64_jcc(X86_64_CC_AE, 0); \
2166 codegen_addxboundrefs(mcodeptr, s2); \
2169 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., (int) length */
2171 var_to_reg_int(s1, src, REG_ITMP1);
2172 d = reg_of_var(iptr->dst, REG_ITMP3);
2173 gen_nullptr_check(s1);
2174 x86_64_movl_membase_reg(s1, OFFSET(java_arrayheader, size), d);
2175 store_reg_to_var_int(iptr->dst, d);
2178 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2180 var_to_reg_int(s1, src->prev, REG_ITMP1);
2181 var_to_reg_int(s2, src, REG_ITMP2);
2182 d = reg_of_var(iptr->dst, REG_ITMP3);
2183 if (iptr->op1 == 0) {
2184 gen_nullptr_check(s1);
2187 x86_64_mov_memindex_reg(OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
2188 store_reg_to_var_int(iptr->dst, d);
2191 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
2193 var_to_reg_int(s1, src->prev, REG_ITMP1);
2194 var_to_reg_int(s2, src, REG_ITMP2);
2195 d = reg_of_var(iptr->dst, REG_ITMP3);
2196 if (iptr->op1 == 0) {
2197 gen_nullptr_check(s1);
2200 x86_64_mov_memindex_reg(OFFSET(java_longarray, data[0]), s1, s2, 3, d);
2201 store_reg_to_var_int(iptr->dst, d);
2204 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
2206 var_to_reg_int(s1, src->prev, REG_ITMP1);
2207 var_to_reg_int(s2, src, REG_ITMP2);
2208 d = reg_of_var(iptr->dst, REG_ITMP3);
2209 if (iptr->op1 == 0) {
2210 gen_nullptr_check(s1);
2213 x86_64_movl_memindex_reg(OFFSET(java_intarray, data[0]), s1, s2, 2, d);
2214 store_reg_to_var_int(iptr->dst, d);
2217 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2219 var_to_reg_int(s1, src->prev, REG_ITMP1);
2220 var_to_reg_int(s2, src, REG_ITMP2);
2221 d = reg_of_var(iptr->dst, REG_FTMP3);
2222 if (iptr->op1 == 0) {
2223 gen_nullptr_check(s1);
2226 x86_64_movss_memindex_reg(OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2227 store_reg_to_var_flt(iptr->dst, d);
2230 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2232 var_to_reg_int(s1, src->prev, REG_ITMP1);
2233 var_to_reg_int(s2, src, REG_ITMP2);
2234 d = reg_of_var(iptr->dst, REG_FTMP3);
2235 if (iptr->op1 == 0) {
2236 gen_nullptr_check(s1);
2239 x86_64_movsd_memindex_reg(OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2240 store_reg_to_var_flt(iptr->dst, d);
2243 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
2245 var_to_reg_int(s1, src->prev, REG_ITMP1);
2246 var_to_reg_int(s2, src, REG_ITMP2);
2247 d = reg_of_var(iptr->dst, REG_ITMP3);
2248 if (iptr->op1 == 0) {
2249 gen_nullptr_check(s1);
2252 x86_64_movzwq_memindex_reg(OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2253 store_reg_to_var_int(iptr->dst, d);
2256 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
2258 var_to_reg_int(s1, src->prev, REG_ITMP1);
2259 var_to_reg_int(s2, src, REG_ITMP2);
2260 d = reg_of_var(iptr->dst, REG_ITMP3);
2261 if (iptr->op1 == 0) {
2262 gen_nullptr_check(s1);
2265 x86_64_movswq_memindex_reg(OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2266 store_reg_to_var_int(iptr->dst, d);
2269 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
2271 var_to_reg_int(s1, src->prev, REG_ITMP1);
2272 var_to_reg_int(s2, src, REG_ITMP2);
2273 d = reg_of_var(iptr->dst, REG_ITMP3);
2274 if (iptr->op1 == 0) {
2275 gen_nullptr_check(s1);
2278 x86_64_movsbq_memindex_reg(OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2279 store_reg_to_var_int(iptr->dst, d);
2283 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2285 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2286 var_to_reg_int(s2, src->prev, REG_ITMP2);
2287 if (iptr->op1 == 0) {
2288 gen_nullptr_check(s1);
2291 var_to_reg_int(s3, src, REG_ITMP3);
2292 x86_64_mov_reg_memindex(s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2295 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2297 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2298 var_to_reg_int(s2, src->prev, REG_ITMP2);
2299 if (iptr->op1 == 0) {
2300 gen_nullptr_check(s1);
2303 var_to_reg_int(s3, src, REG_ITMP3);
2304 x86_64_mov_reg_memindex(s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2307 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2309 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2310 var_to_reg_int(s2, src->prev, REG_ITMP2);
2311 if (iptr->op1 == 0) {
2312 gen_nullptr_check(s1);
2315 var_to_reg_int(s3, src, REG_ITMP3);
2316 x86_64_movl_reg_memindex(s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2319 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2321 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2322 var_to_reg_int(s2, src->prev, REG_ITMP2);
2323 if (iptr->op1 == 0) {
2324 gen_nullptr_check(s1);
2327 var_to_reg_flt(s3, src, REG_FTMP3);
2328 x86_64_movss_reg_memindex(s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2331 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2333 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2334 var_to_reg_int(s2, src->prev, REG_ITMP2);
2335 if (iptr->op1 == 0) {
2336 gen_nullptr_check(s1);
2339 var_to_reg_flt(s3, src, REG_FTMP3);
2340 x86_64_movsd_reg_memindex(s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2343 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2345 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2346 var_to_reg_int(s2, src->prev, REG_ITMP2);
2347 if (iptr->op1 == 0) {
2348 gen_nullptr_check(s1);
2351 var_to_reg_int(s3, src, REG_ITMP3);
2352 x86_64_movw_reg_memindex(s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2355 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2357 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2358 var_to_reg_int(s2, src->prev, REG_ITMP2);
2359 if (iptr->op1 == 0) {
2360 gen_nullptr_check(s1);
2363 var_to_reg_int(s3, src, REG_ITMP3);
2364 x86_64_movw_reg_memindex(s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2367 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2369 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2370 var_to_reg_int(s2, src->prev, REG_ITMP2);
2371 if (iptr->op1 == 0) {
2372 gen_nullptr_check(s1);
2375 var_to_reg_int(s3, src, REG_ITMP3);
2376 x86_64_movb_reg_memindex(s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2380 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2381 /* op1 = type, val.a = field address */
2383 /* if class isn't yet initialized, do it */
2384 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2385 /* call helper function which patches this code */
2386 x86_64_mov_imm_reg((s8) ((fieldinfo *) iptr->val.a)->class, REG_ITMP1);
2387 x86_64_mov_imm_reg((s8) asm_check_clinit, REG_ITMP2);
2388 x86_64_call_reg(REG_ITMP2);
2391 a = dseg_addaddress(&(((fieldinfo *) iptr->val.a)->value));
2392 /* x86_64_mov_imm_reg(0, REG_ITMP2); */
2393 /* dseg_adddata(mcodeptr); */
2394 /* x86_64_mov_membase_reg(REG_ITMP2, a, REG_ITMP2); */
2395 x86_64_mov_membase_reg(RIP, -(((s8) mcodeptr + 7) - (s8) mcodebase) + a, REG_ITMP2);
2396 switch (iptr->op1) {
2398 var_to_reg_int(s2, src, REG_ITMP1);
2399 x86_64_movl_reg_membase(s2, REG_ITMP2, 0);
2403 var_to_reg_int(s2, src, REG_ITMP1);
2404 x86_64_mov_reg_membase(s2, REG_ITMP2, 0);
2407 var_to_reg_flt(s2, src, REG_FTMP1);
2408 x86_64_movss_reg_membase(s2, REG_ITMP2, 0);
2411 var_to_reg_flt(s2, src, REG_FTMP1);
2412 x86_64_movsd_reg_membase(s2, REG_ITMP2, 0);
2414 default: panic("internal error");
2418 case ICMD_GETSTATIC: /* ... ==> ..., value */
2419 /* op1 = type, val.a = field address */
2421 /* if class isn't yet initialized, do it */
2422 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2423 /* call helper function which patches this code */
2424 x86_64_mov_imm_reg((s8) ((fieldinfo *) iptr->val.a)->class, REG_ITMP1);
2425 x86_64_mov_imm_reg((s8) asm_check_clinit, REG_ITMP2);
2426 x86_64_call_reg(REG_ITMP2);
2429 a = dseg_addaddress(&(((fieldinfo *) iptr->val.a)->value));
2430 /* x86_64_mov_imm_reg(0, REG_ITMP2); */
2431 /* dseg_adddata(mcodeptr); */
2432 /* x86_64_mov_membase_reg(REG_ITMP2, a, REG_ITMP2); */
2433 x86_64_mov_membase_reg(RIP, -(((s8) mcodeptr + 7) - (s8) mcodebase) + a, REG_ITMP2);
2434 switch (iptr->op1) {
2436 d = reg_of_var(iptr->dst, REG_ITMP1);
2437 x86_64_movl_membase_reg(REG_ITMP2, 0, d);
2438 store_reg_to_var_int(iptr->dst, d);
2442 d = reg_of_var(iptr->dst, REG_ITMP1);
2443 x86_64_mov_membase_reg(REG_ITMP2, 0, d);
2444 store_reg_to_var_int(iptr->dst, d);
2447 d = reg_of_var(iptr->dst, REG_ITMP1);
2448 x86_64_movss_membase_reg(REG_ITMP2, 0, d);
2449 store_reg_to_var_flt(iptr->dst, d);
2452 d = reg_of_var(iptr->dst, REG_ITMP1);
2453 x86_64_movsd_membase_reg(REG_ITMP2, 0, d);
2454 store_reg_to_var_flt(iptr->dst, d);
2456 default: panic("internal error");
2460 case ICMD_PUTFIELD: /* ..., value ==> ... */
2461 /* op1 = type, val.i = field offset */
2463 /* if class isn't yet initialized, do it */
2464 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2465 /* call helper function which patches this code */
2466 x86_64_mov_imm_reg((s8) ((fieldinfo *) iptr->val.a)->class, REG_ITMP1);
2467 x86_64_mov_imm_reg((s8) asm_check_clinit, REG_ITMP2);
2468 x86_64_call_reg(REG_ITMP2);
2471 a = ((fieldinfo *)(iptr->val.a))->offset;
2472 var_to_reg_int(s1, src->prev, REG_ITMP1);
2473 switch (iptr->op1) {
2475 var_to_reg_int(s2, src, REG_ITMP2);
2476 gen_nullptr_check(s1);
2477 x86_64_movl_reg_membase(s2, s1, a);
2481 var_to_reg_int(s2, src, REG_ITMP2);
2482 gen_nullptr_check(s1);
2483 x86_64_mov_reg_membase(s2, s1, a);
2486 var_to_reg_flt(s2, src, REG_FTMP2);
2487 gen_nullptr_check(s1);
2488 x86_64_movss_reg_membase(s2, s1, a);
2491 var_to_reg_flt(s2, src, REG_FTMP2);
2492 gen_nullptr_check(s1);
2493 x86_64_movsd_reg_membase(s2, s1, a);
2495 default: panic ("internal error");
2499 case ICMD_GETFIELD: /* ... ==> ..., value */
2500 /* op1 = type, val.i = field offset */
2502 a = ((fieldinfo *)(iptr->val.a))->offset;
2503 var_to_reg_int(s1, src, REG_ITMP1);
2504 switch (iptr->op1) {
2506 d = reg_of_var(iptr->dst, REG_ITMP1);
2507 gen_nullptr_check(s1);
2508 x86_64_movl_membase_reg(s1, a, d);
2509 store_reg_to_var_int(iptr->dst, d);
2513 d = reg_of_var(iptr->dst, REG_ITMP1);
2514 gen_nullptr_check(s1);
2515 x86_64_mov_membase_reg(s1, a, d);
2516 store_reg_to_var_int(iptr->dst, d);
2519 d = reg_of_var(iptr->dst, REG_FTMP1);
2520 gen_nullptr_check(s1);
2521 x86_64_movss_membase_reg(s1, a, d);
2522 store_reg_to_var_flt(iptr->dst, d);
2525 d = reg_of_var(iptr->dst, REG_FTMP1);
2526 gen_nullptr_check(s1);
2527 x86_64_movsd_membase_reg(s1, a, d);
2528 store_reg_to_var_flt(iptr->dst, d);
2530 default: panic ("internal error");
2535 /* branch operations **************************************************/
2537 /* #define ALIGNCODENOP {if((int)((long)mcodeptr&7)){M_NOP;}} */
2538 #define ALIGNCODENOP do {} while (0)
2540 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2542 var_to_reg_int(s1, src, REG_ITMP1);
2543 M_INTMOVE(s1, REG_ITMP1_XPTR);
2545 x86_64_call_imm(0); /* passing exception pointer */
2546 x86_64_pop_reg(REG_ITMP2_XPC);
2548 x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
2549 x86_64_jmp_reg(REG_ITMP3);
2553 case ICMD_GOTO: /* ... ==> ... */
2554 /* op1 = target JavaVM pc */
2557 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2561 case ICMD_JSR: /* ... ==> ... */
2562 /* op1 = target JavaVM pc */
2565 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2568 case ICMD_RET: /* ... ==> ... */
2569 /* op1 = local variable */
2571 var = &(locals[iptr->op1][TYPE_ADR]);
2572 var_to_reg_int(s1, var, REG_ITMP1);
2576 case ICMD_IFNULL: /* ..., value ==> ... */
2577 /* op1 = target JavaVM pc */
2579 if (src->flags & INMEMORY) {
2580 x86_64_alu_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
2583 x86_64_test_reg_reg(src->regoff, src->regoff);
2585 x86_64_jcc(X86_64_CC_E, 0);
2586 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2589 case ICMD_IFNONNULL: /* ..., value ==> ... */
2590 /* op1 = target JavaVM pc */
2592 if (src->flags & INMEMORY) {
2593 x86_64_alu_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
2596 x86_64_test_reg_reg(src->regoff, src->regoff);
2598 x86_64_jcc(X86_64_CC_NE, 0);
2599 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2602 case ICMD_IFEQ: /* ..., value ==> ... */
2603 /* op1 = target JavaVM pc, val.i = constant */
2605 x86_64_emit_ifcc(X86_64_CC_E, src, iptr);
2608 case ICMD_IFLT: /* ..., value ==> ... */
2609 /* op1 = target JavaVM pc, val.i = constant */
2611 x86_64_emit_ifcc(X86_64_CC_L, src, iptr);
2614 case ICMD_IFLE: /* ..., value ==> ... */
2615 /* op1 = target JavaVM pc, val.i = constant */
2617 x86_64_emit_ifcc(X86_64_CC_LE, src, iptr);
2620 case ICMD_IFNE: /* ..., value ==> ... */
2621 /* op1 = target JavaVM pc, val.i = constant */
2623 x86_64_emit_ifcc(X86_64_CC_NE, src, iptr);
2626 case ICMD_IFGT: /* ..., value ==> ... */
2627 /* op1 = target JavaVM pc, val.i = constant */
2629 x86_64_emit_ifcc(X86_64_CC_G, src, iptr);
2632 case ICMD_IFGE: /* ..., value ==> ... */
2633 /* op1 = target JavaVM pc, val.i = constant */
2635 x86_64_emit_ifcc(X86_64_CC_GE, src, iptr);
2638 case ICMD_IF_LEQ: /* ..., value ==> ... */
2639 /* op1 = target JavaVM pc, val.l = constant */
2641 x86_64_emit_if_lcc(X86_64_CC_E, src, iptr);
2644 case ICMD_IF_LLT: /* ..., value ==> ... */
2645 /* op1 = target JavaVM pc, val.l = constant */
2647 x86_64_emit_if_lcc(X86_64_CC_L, src, iptr);
2650 case ICMD_IF_LLE: /* ..., value ==> ... */
2651 /* op1 = target JavaVM pc, val.l = constant */
2653 x86_64_emit_if_lcc(X86_64_CC_LE, src, iptr);
2656 case ICMD_IF_LNE: /* ..., value ==> ... */
2657 /* op1 = target JavaVM pc, val.l = constant */
2659 x86_64_emit_if_lcc(X86_64_CC_NE, src, iptr);
2662 case ICMD_IF_LGT: /* ..., value ==> ... */
2663 /* op1 = target JavaVM pc, val.l = constant */
2665 x86_64_emit_if_lcc(X86_64_CC_G, src, iptr);
2668 case ICMD_IF_LGE: /* ..., value ==> ... */
2669 /* op1 = target JavaVM pc, val.l = constant */
2671 x86_64_emit_if_lcc(X86_64_CC_GE, src, iptr);
2674 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2675 /* op1 = target JavaVM pc */
2677 x86_64_emit_if_icmpcc(X86_64_CC_E, src, iptr);
2680 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2681 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2683 x86_64_emit_if_lcmpcc(X86_64_CC_E, src, iptr);
2686 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2687 /* op1 = target JavaVM pc */
2689 x86_64_emit_if_icmpcc(X86_64_CC_NE, src, iptr);
2692 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2693 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2695 x86_64_emit_if_lcmpcc(X86_64_CC_NE, src, iptr);
2698 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2699 /* op1 = target JavaVM pc */
2701 x86_64_emit_if_icmpcc(X86_64_CC_L, src, iptr);
2704 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2705 /* op1 = target JavaVM pc */
2707 x86_64_emit_if_lcmpcc(X86_64_CC_L, src, iptr);
2710 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2711 /* op1 = target JavaVM pc */
2713 x86_64_emit_if_icmpcc(X86_64_CC_G, src, iptr);
2716 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2717 /* op1 = target JavaVM pc */
2719 x86_64_emit_if_lcmpcc(X86_64_CC_G, src, iptr);
2722 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2723 /* op1 = target JavaVM pc */
2725 x86_64_emit_if_icmpcc(X86_64_CC_LE, src, iptr);
2728 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2729 /* op1 = target JavaVM pc */
2731 x86_64_emit_if_lcmpcc(X86_64_CC_LE, src, iptr);
2734 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2735 /* op1 = target JavaVM pc */
2737 x86_64_emit_if_icmpcc(X86_64_CC_GE, src, iptr);
2740 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2741 /* op1 = target JavaVM pc */
2743 x86_64_emit_if_lcmpcc(X86_64_CC_GE, src, iptr);
2746 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST */
2748 case ICMD_ELSE_ICONST: /* handled by IFxx_ICONST */
2751 case ICMD_IFEQ_ICONST: /* ..., value ==> ..., constant */
2752 /* val.i = constant */
2754 var_to_reg_int(s1, src, REG_ITMP1);
2755 d = reg_of_var(iptr->dst, REG_ITMP3);
2757 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2759 M_INTMOVE(s1, REG_ITMP1);
2762 x86_64_movl_imm_reg(iptr[1].val.i, d);
2764 x86_64_movl_imm_reg(s3, REG_ITMP2);
2765 x86_64_testl_reg_reg(s1, s1);
2766 x86_64_cmovccl_reg_reg(X86_64_CC_E, REG_ITMP2, d);
2767 store_reg_to_var_int(iptr->dst, d);
2770 case ICMD_IFNE_ICONST: /* ..., value ==> ..., constant */
2771 /* val.i = constant */
2773 var_to_reg_int(s1, src, REG_ITMP1);
2774 d = reg_of_var(iptr->dst, REG_ITMP3);
2776 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2778 M_INTMOVE(s1, REG_ITMP1);
2781 x86_64_movl_imm_reg(iptr[1].val.i, d);
2783 x86_64_movl_imm_reg(s3, REG_ITMP2);
2784 x86_64_testl_reg_reg(s1, s1);
2785 x86_64_cmovccl_reg_reg(X86_64_CC_NE, REG_ITMP2, d);
2786 store_reg_to_var_int(iptr->dst, d);
2789 case ICMD_IFLT_ICONST: /* ..., value ==> ..., constant */
2790 /* val.i = constant */
2792 var_to_reg_int(s1, src, REG_ITMP1);
2793 d = reg_of_var(iptr->dst, REG_ITMP3);
2795 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2797 M_INTMOVE(s1, REG_ITMP1);
2800 x86_64_movl_imm_reg(iptr[1].val.i, d);
2802 x86_64_movl_imm_reg(s3, REG_ITMP2);
2803 x86_64_testl_reg_reg(s1, s1);
2804 x86_64_cmovccl_reg_reg(X86_64_CC_L, REG_ITMP2, d);
2805 store_reg_to_var_int(iptr->dst, d);
2808 case ICMD_IFGE_ICONST: /* ..., value ==> ..., constant */
2809 /* val.i = constant */
2811 var_to_reg_int(s1, src, REG_ITMP1);
2812 d = reg_of_var(iptr->dst, REG_ITMP3);
2814 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2816 M_INTMOVE(s1, REG_ITMP1);
2819 x86_64_movl_imm_reg(iptr[1].val.i, d);
2821 x86_64_movl_imm_reg(s3, REG_ITMP2);
2822 x86_64_testl_reg_reg(s1, s1);
2823 x86_64_cmovccl_reg_reg(X86_64_CC_GE, REG_ITMP2, d);
2824 store_reg_to_var_int(iptr->dst, d);
2827 case ICMD_IFGT_ICONST: /* ..., value ==> ..., constant */
2828 /* val.i = constant */
2830 var_to_reg_int(s1, src, REG_ITMP1);
2831 d = reg_of_var(iptr->dst, REG_ITMP3);
2833 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2835 M_INTMOVE(s1, REG_ITMP1);
2838 x86_64_movl_imm_reg(iptr[1].val.i, d);
2840 x86_64_movl_imm_reg(s3, REG_ITMP2);
2841 x86_64_testl_reg_reg(s1, s1);
2842 x86_64_cmovccl_reg_reg(X86_64_CC_G, REG_ITMP2, d);
2843 store_reg_to_var_int(iptr->dst, d);
2846 case ICMD_IFLE_ICONST: /* ..., value ==> ..., constant */
2847 /* val.i = constant */
2849 var_to_reg_int(s1, src, REG_ITMP1);
2850 d = reg_of_var(iptr->dst, REG_ITMP3);
2852 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2854 M_INTMOVE(s1, REG_ITMP1);
2857 x86_64_movl_imm_reg(iptr[1].val.i, d);
2859 x86_64_movl_imm_reg(s3, REG_ITMP2);
2860 x86_64_testl_reg_reg(s1, s1);
2861 x86_64_cmovccl_reg_reg(X86_64_CC_LE, REG_ITMP2, d);
2862 store_reg_to_var_int(iptr->dst, d);
2866 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2870 var_to_reg_int(s1, src, REG_RESULT);
2871 M_INTMOVE(s1, REG_RESULT);
2873 #if defined(USE_THREADS)
2874 if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
2875 x86_64_mov_membase_reg(REG_SP, maxmemuse * 8, argintregs[0]);
2876 x86_64_mov_reg_membase(REG_RESULT, REG_SP, maxmemuse * 8);
2877 x86_64_mov_imm_reg((u8) builtin_monitorexit, REG_ITMP1);
2878 x86_64_call_reg(REG_ITMP1);
2879 x86_64_mov_membase_reg(REG_SP, maxmemuse * 8, REG_RESULT);
2883 goto nowperformreturn;
2885 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2888 var_to_reg_flt(s1, src, REG_FRESULT);
2889 M_FLTMOVE(s1, REG_FRESULT);
2891 #if defined(USE_THREADS)
2892 if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
2893 x86_64_mov_membase_reg(REG_SP, maxmemuse * 8, argintregs[0]);
2894 x86_64_movq_reg_membase(REG_FRESULT, REG_SP, maxmemuse * 8);
2895 x86_64_mov_imm_reg((u8) builtin_monitorexit, REG_ITMP1);
2896 x86_64_call_reg(REG_ITMP1);
2897 x86_64_movq_membase_reg(REG_SP, maxmemuse * 8, REG_FRESULT);
2901 goto nowperformreturn;
2903 case ICMD_RETURN: /* ... ==> ... */
2905 #if defined(USE_THREADS)
2906 if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
2907 x86_64_mov_membase_reg(REG_SP, maxmemuse * 8, argintregs[0]);
2908 x86_64_mov_imm_reg((u8) builtin_monitorexit, REG_ITMP1);
2909 x86_64_call_reg(REG_ITMP1);
2917 p = parentargs_base;
2919 /* call trace function */
2921 x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
2923 x86_64_mov_reg_membase(REG_RESULT, REG_SP, 0 * 8);
2924 x86_64_movq_reg_membase(REG_FRESULT, REG_SP, 1 * 8);
2926 x86_64_mov_imm_reg((s8) method, argintregs[0]);
2927 x86_64_mov_reg_reg(REG_RESULT, argintregs[1]);
2928 M_FLTMOVE(REG_FRESULT, argfltregs[0]);
2929 M_FLTMOVE(REG_FRESULT, argfltregs[1]);
2931 x86_64_mov_imm_reg((s8) builtin_displaymethodstop, REG_ITMP1);
2932 x86_64_call_reg(REG_ITMP1);
2934 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_RESULT);
2935 x86_64_movq_membase_reg(REG_SP, 1 * 8, REG_FRESULT);
2937 x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
2940 /* restore saved registers */
2941 for (r = savintregcnt - 1; r >= maxsavintreguse; r--) {
2942 p--; x86_64_mov_membase_reg(REG_SP, p * 8, savintregs[r]);
2944 for (r = savfltregcnt - 1; r >= maxsavfltreguse; r--) {
2945 p--; x86_64_movq_membase_reg(REG_SP, p * 8, savfltregs[r]);
2948 /* deallocate stack */
2949 if (parentargs_base) {
2950 x86_64_alu_imm_reg(X86_64_ADD, parentargs_base * 8, REG_SP);
2959 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2964 tptr = (void **) iptr->target;
2966 s4ptr = iptr->val.a;
2967 l = s4ptr[1]; /* low */
2968 i = s4ptr[2]; /* high */
2970 var_to_reg_int(s1, src, REG_ITMP1);
2971 M_INTMOVE(s1, REG_ITMP1);
2973 x86_64_alul_imm_reg(X86_64_SUB, l, REG_ITMP1);
2978 x86_64_alul_imm_reg(X86_64_CMP, i - 1, REG_ITMP1);
2979 x86_64_jcc(X86_64_CC_A, 0);
2981 /* codegen_addreference(BlockPtrOfPC(s4ptr[0]), mcodeptr); */
2982 codegen_addreference((basicblock *) tptr[0], mcodeptr);
2984 /* build jump table top down and use address of lowest entry */
2986 /* s4ptr += 3 + i; */
2990 /* dseg_addtarget(BlockPtrOfPC(*--s4ptr)); */
2991 dseg_addtarget((basicblock *) tptr[0]);
2995 /* length of dataseg after last dseg_addtarget is used by load */
2997 x86_64_mov_imm_reg(0, REG_ITMP2);
2998 dseg_adddata(mcodeptr);
2999 x86_64_mov_memindex_reg(-dseglen, REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
3000 x86_64_jmp_reg(REG_ITMP1);
3006 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
3008 s4 i, l, val, *s4ptr;
3011 tptr = (void **) iptr->target;
3013 s4ptr = iptr->val.a;
3014 l = s4ptr[0]; /* default */
3015 i = s4ptr[1]; /* count */
3017 MCODECHECK((i<<2)+8);
3018 var_to_reg_int(s1, src, REG_ITMP1); /* reg compare should always be faster */
3024 x86_64_alul_imm_reg(X86_64_CMP, val, s1);
3025 x86_64_jcc(X86_64_CC_E, 0);
3026 /* codegen_addreference(BlockPtrOfPC(s4ptr[1]), mcodeptr); */
3027 codegen_addreference((basicblock *) tptr[0], mcodeptr);
3031 /* codegen_addreference(BlockPtrOfPC(l), mcodeptr); */
3033 tptr = (void **) iptr->target;
3034 codegen_addreference((basicblock *) tptr[0], mcodeptr);
3041 case ICMD_BUILTIN3: /* ..., arg1, arg2, arg3 ==> ... */
3042 /* op1 = return type, val.a = function pointer*/
3046 case ICMD_BUILTIN2: /* ..., arg1, arg2 ==> ... */
3047 /* op1 = return type, val.a = function pointer*/
3051 case ICMD_BUILTIN1: /* ..., arg1 ==> ... */
3052 /* op1 = return type, val.a = function pointer*/
3056 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
3057 /* op1 = arg count, val.a = method pointer */
3059 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3060 /* op1 = arg count, val.a = method pointer */
3062 case ICMD_INVOKEVIRTUAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3063 /* op1 = arg count, val.a = method pointer */
3065 case ICMD_INVOKEINTERFACE:/*.., objectref, [arg1, [arg2 ...]] ==> ... */
3066 /* op1 = arg count, val.a = method pointer */
3077 MCODECHECK((s3 << 1) + 64);
3082 /* copy arguments to registers or stack location */
3083 for (; --s3 >= 0; src = src->prev) {
3084 IS_INT_LNG_TYPE(src->type) ? iarg++ : farg++;
3090 s2 = (iarg > INT_ARG_CNT) ? iarg - INT_ARG_CNT : 0 + (farg > FLT_ARG_CNT) ? farg - FLT_ARG_CNT : 0;
3092 for (; --s3 >= 0; src = src->prev) {
3093 IS_INT_LNG_TYPE(src->type) ? iarg-- : farg--;
3094 if (src->varkind == ARGVAR) {
3095 if (IS_INT_LNG_TYPE(src->type)) {
3096 if (iarg >= INT_ARG_CNT) {
3100 if (farg >= FLT_ARG_CNT) {
3107 if (IS_INT_LNG_TYPE(src->type)) {
3108 if (iarg < INT_ARG_CNT) {
3109 s1 = argintregs[iarg];
3110 var_to_reg_int(d, src, s1);
3114 var_to_reg_int(d, src, REG_ITMP1);
3116 x86_64_mov_reg_membase(d, REG_SP, s2 * 8);
3120 if (farg < FLT_ARG_CNT) {
3121 s1 = argfltregs[farg];
3122 var_to_reg_flt(d, src, s1);
3126 var_to_reg_flt(d, src, REG_FTMP1);
3128 x86_64_movq_reg_membase(d, REG_SP, s2 * 8);
3134 switch (iptr->opc) {
3142 x86_64_mov_imm_reg(a, REG_ITMP1);
3143 x86_64_call_reg(REG_ITMP1);
3146 case ICMD_INVOKESTATIC:
3148 a = (s8) m->stubroutine;
3151 x86_64_mov_imm_reg(a, REG_ITMP2);
3152 x86_64_call_reg(REG_ITMP2);
3155 case ICMD_INVOKESPECIAL:
3157 a = (s8) m->stubroutine;
3160 gen_nullptr_check(argintregs[0]); /* first argument contains pointer */
3161 x86_64_mov_membase_reg(argintregs[0], 0, REG_ITMP2); /* access memory for hardware nullptr */
3162 x86_64_mov_imm_reg(a, REG_ITMP2);
3163 x86_64_call_reg(REG_ITMP2);
3166 case ICMD_INVOKEVIRTUAL:
3170 gen_nullptr_check(argintregs[0]);
3171 x86_64_mov_membase_reg(argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3172 x86_64_mov_membase32_reg(REG_ITMP2, OFFSET(vftbl, table[0]) + sizeof(methodptr) * m->vftblindex, REG_ITMP1);
3173 x86_64_call_reg(REG_ITMP1);
3176 case ICMD_INVOKEINTERFACE:
3181 gen_nullptr_check(argintregs[0]);
3182 x86_64_mov_membase_reg(argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3183 x86_64_mov_membase_reg(REG_ITMP2, OFFSET(vftbl, interfacetable[0]) - sizeof(methodptr) * ci->index, REG_ITMP2);
3184 x86_64_mov_membase32_reg(REG_ITMP2, sizeof(methodptr) * (m - ci->methods), REG_ITMP1);
3185 x86_64_call_reg(REG_ITMP1);
3190 error("Unkown ICMD-Command: %d", iptr->opc);
3193 /* d contains return type */
3195 if (d != TYPE_VOID) {
3196 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3197 s1 = reg_of_var(iptr->dst, REG_RESULT);
3198 M_INTMOVE(REG_RESULT, s1);
3199 store_reg_to_var_int(iptr->dst, s1);
3202 s1 = reg_of_var(iptr->dst, REG_FRESULT);
3203 M_FLTMOVE(REG_FRESULT, s1);
3204 store_reg_to_var_flt(iptr->dst, s1);
3211 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3213 /* op1: 0 == array, 1 == class */
3214 /* val.a: (classinfo*) superclass */
3216 /* superclass is an interface:
3218 * return (sub != NULL) &&
3219 * (sub->vftbl->interfacetablelength > super->index) &&
3220 * (sub->vftbl->interfacetable[-super->index] != NULL);
3222 * superclass is a class:
3224 * return ((sub != NULL) && (0
3225 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3226 * super->vftbl->diffvall));
3230 classinfo *super = (classinfo*) iptr->val.a;
3232 var_to_reg_int(s1, src, REG_ITMP1);
3233 d = reg_of_var(iptr->dst, REG_ITMP3);
3235 M_INTMOVE(s1, REG_ITMP1);
3238 x86_64_alu_reg_reg(X86_64_XOR, d, d);
3239 if (iptr->op1) { /* class/interface */
3240 if (super->flags & ACC_INTERFACE) { /* interface */
3241 x86_64_test_reg_reg(s1, s1);
3243 /* TODO: clean up this calculation */
3244 a = 3; /* mov_membase_reg */
3245 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3247 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3248 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetablelength));
3251 CALCIMMEDIATEBYTES(a, super->index);
3256 a += 3; /* mov_membase_reg */
3257 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*));
3262 x86_64_jcc(X86_64_CC_E, a);
3264 x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3265 x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetablelength), REG_ITMP2);
3266 x86_64_alu_imm_reg(X86_64_SUB, super->index, REG_ITMP2);
3267 x86_64_test_reg_reg(REG_ITMP2, REG_ITMP2);
3269 /* TODO: clean up this calculation */
3271 a += 3; /* mov_membase_reg */
3272 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*));
3277 x86_64_jcc(X86_64_CC_LE, a);
3278 x86_64_mov_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP1);
3279 x86_64_test_reg_reg(REG_ITMP1, REG_ITMP1);
3280 x86_64_setcc_reg(X86_64_CC_NE, d);
3282 } else { /* class */
3283 x86_64_test_reg_reg(s1, s1);
3285 /* TODO: clean up this calculation */
3286 a = 3; /* mov_membase_reg */
3287 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3289 a += 10; /* mov_imm_reg */
3291 a += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3292 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, baseval));
3294 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3295 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, baseval));
3297 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3298 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, diffval));
3305 x86_64_jcc(X86_64_CC_E, a);
3307 x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3308 x86_64_mov_imm_reg((s8) super->vftbl, REG_ITMP2);
3309 x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, baseval), REG_ITMP1);
3310 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, baseval), REG_ITMP3);
3311 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, diffval), REG_ITMP2);
3312 x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP3, REG_ITMP1);
3313 x86_64_alu_reg_reg(X86_64_XOR, d, d);
3314 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
3315 x86_64_setcc_reg(X86_64_CC_BE, d);
3319 panic("internal error: no inlined array instanceof");
3321 store_reg_to_var_int(iptr->dst, d);
3324 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3326 /* op1: 0 == array, 1 == class */
3327 /* val.a: (classinfo*) superclass */
3329 /* superclass is an interface:
3331 * OK if ((sub == NULL) ||
3332 * (sub->vftbl->interfacetablelength > super->index) &&
3333 * (sub->vftbl->interfacetable[-super->index] != NULL));
3335 * superclass is a class:
3337 * OK if ((sub == NULL) || (0
3338 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3339 * super->vftbl->diffvall));
3343 classinfo *super = (classinfo*) iptr->val.a;
3345 d = reg_of_var(iptr->dst, REG_ITMP3);
3346 var_to_reg_int(s1, src, d);
3347 if (iptr->op1) { /* class/interface */
3348 if (super->flags & ACC_INTERFACE) { /* interface */
3349 x86_64_test_reg_reg(s1, s1);
3351 /* TODO: clean up this calculation */
3352 a = 3; /* mov_membase_reg */
3353 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3355 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3356 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetablelength));
3359 CALCIMMEDIATEBYTES(a, super->index);
3364 a += 3; /* mov_membase_reg */
3365 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*));
3370 x86_64_jcc(X86_64_CC_E, a);
3372 x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3373 x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetablelength), REG_ITMP2);
3374 x86_64_alu_imm_reg(X86_64_SUB, super->index, REG_ITMP2);
3375 x86_64_test_reg_reg(REG_ITMP2, REG_ITMP2);
3376 x86_64_jcc(X86_64_CC_LE, 0);
3377 codegen_addxcastrefs(mcodeptr);
3378 x86_64_mov_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP2);
3379 x86_64_test_reg_reg(REG_ITMP2, REG_ITMP2);
3380 x86_64_jcc(X86_64_CC_E, 0);
3381 codegen_addxcastrefs(mcodeptr);
3383 } else { /* class */
3384 x86_64_test_reg_reg(s1, s1);
3386 /* TODO: clean up this calculation */
3387 a = 3; /* mov_membase_reg */
3388 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3389 a += 10; /* mov_imm_reg */
3390 a += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3391 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, baseval));
3393 if (d != REG_ITMP3) {
3394 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3395 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, baseval));
3396 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3397 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, diffval));
3401 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3402 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, baseval));
3404 a += 10; /* mov_imm_reg */
3405 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3406 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, diffval));
3412 x86_64_jcc(X86_64_CC_E, a);
3414 x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3415 x86_64_mov_imm_reg((s8) super->vftbl, REG_ITMP2);
3416 x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, baseval), REG_ITMP1);
3417 if (d != REG_ITMP3) {
3418 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, baseval), REG_ITMP3);
3419 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, diffval), REG_ITMP2);
3420 x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP3, REG_ITMP1);
3423 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, baseval), REG_ITMP2);
3424 x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP2, REG_ITMP1);
3425 x86_64_mov_imm_reg((s8) super->vftbl, REG_ITMP2);
3426 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, diffval), REG_ITMP2);
3428 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
3429 x86_64_jcc(X86_64_CC_A, 0); /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3430 codegen_addxcastrefs(mcodeptr);
3434 panic("internal error: no inlined array checkcast");
3437 store_reg_to_var_int(iptr->dst, d);
3440 case ICMD_CHECKASIZE: /* ..., size ==> ..., size */
3442 if (src->flags & INMEMORY) {
3443 x86_64_alul_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
3446 x86_64_testl_reg_reg(src->regoff, src->regoff);
3448 x86_64_jcc(X86_64_CC_L, 0);
3449 codegen_addxcheckarefs(mcodeptr);
3452 case ICMD_CHECKOOM: /* ... ==> ... */
3454 x86_64_test_reg_reg(REG_RESULT, REG_RESULT);
3455 x86_64_jcc(X86_64_CC_E, 0);
3456 codegen_addxoomrefs(mcodeptr);
3459 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3460 /* op1 = dimension, val.a = array descriptor */
3462 /* check for negative sizes and copy sizes to stack if necessary */
3464 MCODECHECK((iptr->op1 << 1) + 64);
3466 for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3467 var_to_reg_int(s2, src, REG_ITMP1);
3468 x86_64_testl_reg_reg(s2, s2);
3469 x86_64_jcc(X86_64_CC_L, 0);
3470 codegen_addxcheckarefs(mcodeptr);
3472 /* copy sizes to stack (argument numbers >= INT_ARG_CNT) */
3474 if (src->varkind != ARGVAR) {
3475 x86_64_mov_reg_membase(s2, REG_SP, (s1 + INT_ARG_CNT) * 8);
3479 /* a0 = dimension count */
3480 x86_64_mov_imm_reg(iptr->op1, argintregs[0]);
3482 /* a1 = arraydescriptor */
3483 x86_64_mov_imm_reg((s8) iptr->val.a, argintregs[1]);
3485 /* a2 = pointer to dimensions = stack pointer */
3486 x86_64_mov_reg_reg(REG_SP, argintregs[2]);
3488 x86_64_mov_imm_reg((s8) (builtin_nmultianewarray), REG_ITMP1);
3489 x86_64_call_reg(REG_ITMP1);
3491 s1 = reg_of_var(iptr->dst, REG_RESULT);
3492 M_INTMOVE(REG_RESULT, s1);
3493 store_reg_to_var_int(iptr->dst, s1);
3496 default: error("Unknown pseudo command: %d", iptr->opc);
3499 } /* for instruction */
3501 /* copy values to interface registers */
3503 src = bptr->outstack;
3504 len = bptr->outdepth;
3508 if ((src->varkind != STACKVAR)) {
3510 if (IS_FLT_DBL_TYPE(s2)) {
3511 var_to_reg_flt(s1, src, REG_FTMP1);
3512 if (!(interfaces[len][s2].flags & INMEMORY)) {
3513 M_FLTMOVE(s1, interfaces[len][s2].regoff);
3516 x86_64_movq_reg_membase(s1, REG_SP, 8 * interfaces[len][s2].regoff);
3520 var_to_reg_int(s1, src, REG_ITMP1);
3521 if (!(interfaces[len][s2].flags & INMEMORY)) {
3522 M_INTMOVE(s1, interfaces[len][s2].regoff);
3525 x86_64_mov_reg_membase(s1, REG_SP, interfaces[len][s2].regoff * 8);
3531 } /* if (bptr -> flags >= BBREACHED) */
3532 } /* for basic block */
3534 /* bptr -> mpc = (int)((u1*) mcodeptr - mcodebase); */
3538 /* generate bound check stubs */
3540 u1 *xcodeptr = NULL;
3542 for (; xboundrefs != NULL; xboundrefs = xboundrefs->next) {
3543 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3544 gen_resolvebranch(mcodebase + xboundrefs->branchpos,
3545 xboundrefs->branchpos, xcodeptr - mcodebase - (3 + 10 + 10 + 3));
3549 gen_resolvebranch(mcodebase + xboundrefs->branchpos,
3550 xboundrefs->branchpos, mcodeptr - mcodebase);
3554 /* move index register into REG_ITMP1 */
3555 x86_64_mov_reg_reg(xboundrefs->reg, REG_ITMP1); /* 3 bytes */
3557 x86_64_mov_imm_reg(0, REG_ITMP2_XPC); /* 10 bytes */
3558 dseg_adddata(mcodeptr);
3559 x86_64_mov_imm_reg(xboundrefs->branchpos - 6, REG_ITMP3); /* 10 bytes */
3560 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3562 if (xcodeptr != NULL) {
3563 x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3566 xcodeptr = mcodeptr;
3568 x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
3569 x86_64_mov_reg_membase(REG_ITMP2_XPC, REG_SP, 0 * 8);
3570 x86_64_mov_imm_reg((s8) string_java_lang_ArrayIndexOutOfBoundsException, argintregs[0]);
3571 x86_64_mov_reg_reg(REG_ITMP1, argintregs[1]);
3572 x86_64_mov_imm_reg((s8) new_exception_int, REG_ITMP3);
3573 x86_64_call_reg(REG_ITMP3);
3574 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_ITMP2_XPC);
3575 x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
3577 x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3578 x86_64_jmp_reg(REG_ITMP3);
3582 /* generate negative array size check stubs */
3586 for (; xcheckarefs != NULL; xcheckarefs = xcheckarefs->next) {
3587 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3588 gen_resolvebranch(mcodebase + xcheckarefs->branchpos,
3589 xcheckarefs->branchpos, xcodeptr - mcodebase - (10 + 10 + 3));
3593 gen_resolvebranch(mcodebase + xcheckarefs->branchpos,
3594 xcheckarefs->branchpos, mcodeptr - mcodebase);
3598 x86_64_mov_imm_reg(0, REG_ITMP2_XPC); /* 10 bytes */
3599 dseg_adddata(mcodeptr);
3600 x86_64_mov_imm_reg(xcheckarefs->branchpos - 6, REG_ITMP3); /* 10 bytes */
3601 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3603 if (xcodeptr != NULL) {
3604 x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3607 xcodeptr = mcodeptr;
3609 x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
3610 x86_64_mov_reg_membase(REG_ITMP2_XPC, REG_SP, 0 * 8);
3611 x86_64_mov_imm_reg((s8) string_java_lang_NegativeArraySizeException, argintregs[0]);
3612 x86_64_mov_imm_reg((s8) new_exception, REG_ITMP3);
3613 x86_64_call_reg(REG_ITMP3);
3614 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_ITMP2_XPC);
3615 x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
3617 x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3618 x86_64_jmp_reg(REG_ITMP3);
3622 /* generate cast check stubs */
3626 for (; xcastrefs != NULL; xcastrefs = xcastrefs->next) {
3627 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3628 gen_resolvebranch(mcodebase + xcastrefs->branchpos,
3629 xcastrefs->branchpos, xcodeptr - mcodebase - (10 + 10 + 3));
3633 gen_resolvebranch(mcodebase + xcastrefs->branchpos,
3634 xcastrefs->branchpos, mcodeptr - mcodebase);
3638 x86_64_mov_imm_reg(0, REG_ITMP2_XPC); /* 10 bytes */
3639 dseg_adddata(mcodeptr);
3640 x86_64_mov_imm_reg(xcastrefs->branchpos - 6, REG_ITMP3); /* 10 bytes */
3641 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3643 if (xcodeptr != NULL) {
3644 x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3647 xcodeptr = mcodeptr;
3649 x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
3650 x86_64_mov_reg_membase(REG_ITMP2_XPC, REG_SP, 0 * 8);
3651 x86_64_mov_imm_reg((s8) string_java_lang_ClassCastException, argintregs[0]);
3652 x86_64_mov_imm_reg((s8) new_exception, REG_ITMP3);
3653 x86_64_call_reg(REG_ITMP3);
3654 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_ITMP2_XPC);
3655 x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
3657 x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3658 x86_64_jmp_reg(REG_ITMP3);
3662 /* generate oom check stubs */
3666 for (; xoomrefs != NULL; xoomrefs = xoomrefs->next) {
3667 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3668 gen_resolvebranch(mcodebase + xoomrefs->branchpos,
3669 xoomrefs->branchpos, xcodeptr - mcodebase - (10 + 10 + 3));
3673 gen_resolvebranch(mcodebase + xoomrefs->branchpos,
3674 xoomrefs->branchpos, mcodeptr - mcodebase);
3678 x86_64_mov_imm_reg(0, REG_ITMP2_XPC); /* 10 bytes */
3679 dseg_adddata(mcodeptr);
3680 x86_64_mov_imm_reg(xoomrefs->branchpos - 6, REG_ITMP1); /* 10 bytes */
3681 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3683 if (xcodeptr != NULL) {
3684 x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3687 xcodeptr = mcodeptr;
3689 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3690 x86_64_push_reg(REG_ITMP2_XPC);
3691 x86_64_mov_imm_reg((u8) &builtin_get_exceptionptrptr, REG_ITMP1);
3692 x86_64_call_reg(REG_ITMP1);
3693 x86_64_mov_membase_reg(REG_RESULT, 0, REG_ITMP3);
3694 x86_64_mov_imm_membase(0, REG_RESULT, 0);
3695 x86_64_mov_reg_reg(REG_ITMP3, REG_ITMP1_XPTR);
3696 x86_64_pop_reg(REG_ITMP2_XPC);
3698 x86_64_mov_imm_reg((u8) &_exceptionptr, REG_ITMP3);
3699 x86_64_mov_membase_reg(REG_ITMP3, 0, REG_ITMP1_XPTR);
3700 x86_64_mov_imm_membase(0, REG_ITMP3, 0);
3703 x86_64_mov_imm_reg((u8) asm_handle_exception, REG_ITMP3);
3704 x86_64_jmp_reg(REG_ITMP3);
3708 /* generate null pointer check stubs */
3712 for (; xnullrefs != NULL; xnullrefs = xnullrefs->next) {
3713 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3714 gen_resolvebranch(mcodebase + xnullrefs->branchpos,
3715 xnullrefs->branchpos, xcodeptr - mcodebase - (10 + 10 + 3));
3719 gen_resolvebranch(mcodebase + xnullrefs->branchpos,
3720 xnullrefs->branchpos, mcodeptr - mcodebase);
3724 x86_64_mov_imm_reg(0, REG_ITMP2_XPC); /* 10 bytes */
3725 dseg_adddata(mcodeptr);
3726 x86_64_mov_imm_reg(xnullrefs->branchpos - 6, REG_ITMP1); /* 10 bytes */
3727 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3729 if (xcodeptr != NULL) {
3730 x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3733 xcodeptr = mcodeptr;
3735 x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
3736 x86_64_mov_reg_membase(REG_ITMP2_XPC, REG_SP, 0 * 8);
3737 x86_64_mov_imm_reg((s8) string_java_lang_NullPointerException, argintregs[0]);
3738 x86_64_mov_imm_reg((s8) new_exception, REG_ITMP3);
3739 x86_64_call_reg(REG_ITMP3);
3740 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_ITMP2_XPC);
3741 x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
3743 x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3744 x86_64_jmp_reg(REG_ITMP3);
3750 codegen_finish((int)((u1*) mcodeptr - mcodebase));
3754 /* function createcompilerstub *************************************************
3756 creates a stub routine which calls the compiler
3758 *******************************************************************************/
3760 #define COMPSTUBSIZE 23
3762 u1 *createcompilerstub(methodinfo *m)
3764 u1 *s = CNEW(u1, COMPSTUBSIZE); /* memory to hold the stub */
3765 mcodeptr = s; /* code generation pointer */
3767 /* code for the stub */
3768 x86_64_mov_imm_reg((s8) m, REG_ITMP1); /* pass method pointer to compiler */
3769 x86_64_mov_imm_reg((s8) asm_call_jit_compiler, REG_ITMP3);/* load address */
3770 x86_64_jmp_reg(REG_ITMP3); /* jump to compiler */
3773 count_cstub_len += COMPSTUBSIZE;
3780 /* function removecompilerstub *************************************************
3782 deletes a compilerstub from memory (simply by freeing it)
3784 *******************************************************************************/
3786 void removecompilerstub(u1 *stub)
3788 CFREE(stub, COMPSTUBSIZE);
3791 /* function: createnativestub **************************************************
3793 creates a stub routine which calls a native method
3795 *******************************************************************************/
3797 #define NATIVESTUBSIZE 420
3799 u1 *createnativestub(functionptr f, methodinfo *m)
3801 u1 *s = CNEW(u1, NATIVESTUBSIZE); /* memory to hold the stub */
3802 int stackframesize; /* size of stackframe if needed */
3803 mcodeptr = s; /* make macros work */
3806 descriptor2types(m); /* set paramcount and paramtypes */
3808 /* if function is static, check for initialized */
3810 if (m->flags & ACC_STATIC) {
3811 /* if class isn't yet initialized, do it */
3812 if (!m->class->initialized) {
3813 /* call helper function which patches this code */
3814 x86_64_mov_imm_reg((u8) m->class, REG_ITMP1);
3815 x86_64_mov_imm_reg((u8) asm_check_clinit, REG_ITMP2);
3816 x86_64_call_reg(REG_ITMP2);
3823 x86_64_alu_imm_reg(X86_64_SUB, (6 + 8 + 1) * 8, REG_SP);
3825 x86_64_mov_reg_membase(argintregs[0], REG_SP, 1 * 8);
3826 x86_64_mov_reg_membase(argintregs[1], REG_SP, 2 * 8);
3827 x86_64_mov_reg_membase(argintregs[2], REG_SP, 3 * 8);
3828 x86_64_mov_reg_membase(argintregs[3], REG_SP, 4 * 8);
3829 x86_64_mov_reg_membase(argintregs[4], REG_SP, 5 * 8);
3830 x86_64_mov_reg_membase(argintregs[5], REG_SP, 6 * 8);
3832 x86_64_movq_reg_membase(argfltregs[0], REG_SP, 7 * 8);
3833 x86_64_movq_reg_membase(argfltregs[1], REG_SP, 8 * 8);
3834 x86_64_movq_reg_membase(argfltregs[2], REG_SP, 9 * 8);
3835 x86_64_movq_reg_membase(argfltregs[3], REG_SP, 10 * 8);
3836 /* x86_64_movq_reg_membase(argfltregs[4], REG_SP, 11 * 8); */
3837 /* x86_64_movq_reg_membase(argfltregs[5], REG_SP, 12 * 8); */
3838 /* x86_64_movq_reg_membase(argfltregs[6], REG_SP, 13 * 8); */
3839 /* x86_64_movq_reg_membase(argfltregs[7], REG_SP, 14 * 8); */
3841 /* show integer hex code for float arguments */
3842 for (p = 0, l = 0; p < m->paramcount; p++) {
3843 if (IS_FLT_DBL_TYPE(m->paramtypes[p])) {
3844 for (s1 = (m->paramcount > INT_ARG_CNT) ? INT_ARG_CNT - 2 : m->paramcount - 2; s1 >= p; s1--) {
3845 x86_64_mov_reg_reg(argintregs[s1], argintregs[s1 + 1]);
3848 x86_64_movd_freg_reg(argfltregs[l], argintregs[p]);
3853 x86_64_mov_imm_reg((s8) m, REG_ITMP1);
3854 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, 0 * 8);
3855 x86_64_mov_imm_reg((s8) builtin_trace_args, REG_ITMP1);
3856 x86_64_call_reg(REG_ITMP1);
3858 x86_64_mov_membase_reg(REG_SP, 1 * 8, argintregs[0]);
3859 x86_64_mov_membase_reg(REG_SP, 2 * 8, argintregs[1]);
3860 x86_64_mov_membase_reg(REG_SP, 3 * 8, argintregs[2]);
3861 x86_64_mov_membase_reg(REG_SP, 4 * 8, argintregs[3]);
3862 x86_64_mov_membase_reg(REG_SP, 5 * 8, argintregs[4]);
3863 x86_64_mov_membase_reg(REG_SP, 6 * 8, argintregs[5]);
3865 x86_64_movq_membase_reg(REG_SP, 7 * 8, argfltregs[0]);
3866 x86_64_movq_membase_reg(REG_SP, 8 * 8, argfltregs[1]);
3867 x86_64_movq_membase_reg(REG_SP, 9 * 8, argfltregs[2]);
3868 x86_64_movq_membase_reg(REG_SP, 10 * 8, argfltregs[3]);
3869 /* x86_64_movq_membase_reg(REG_SP, 11 * 8, argfltregs[4]); */
3870 /* x86_64_movq_membase_reg(REG_SP, 12 * 8, argfltregs[5]); */
3871 /* x86_64_movq_membase_reg(REG_SP, 13 * 8, argfltregs[6]); */
3872 /* x86_64_movq_membase_reg(REG_SP, 14 * 8, argfltregs[7]); */
3874 x86_64_alu_imm_reg(X86_64_ADD, (6 + 8 + 1) * 8, REG_SP);
3878 x86_64_alu_imm_reg(X86_64_SUB, 7 * 8, REG_SP); /* keep stack 16-byte aligned */
3880 /* save callee saved float registers */
3881 x86_64_movq_reg_membase(XMM15, REG_SP, 0 * 8);
3882 x86_64_movq_reg_membase(XMM14, REG_SP, 1 * 8);
3883 x86_64_movq_reg_membase(XMM13, REG_SP, 2 * 8);
3884 x86_64_movq_reg_membase(XMM12, REG_SP, 3 * 8);
3885 x86_64_movq_reg_membase(XMM11, REG_SP, 4 * 8);
3886 x86_64_movq_reg_membase(XMM10, REG_SP, 5 * 8);
3889 /* save argument registers on stack -- if we have to */
3890 if ((m->flags & ACC_STATIC && m->paramcount > (INT_ARG_CNT - 2)) || m->paramcount > (INT_ARG_CNT - 1)) {
3892 int paramshiftcnt = (m->flags & ACC_STATIC) ? 2 : 1;
3893 int stackparamcnt = (m->paramcount > INT_ARG_CNT) ? m->paramcount - INT_ARG_CNT : 0;
3895 stackframesize = stackparamcnt + paramshiftcnt;
3897 /* keep stack 16-byte aligned */
3898 if ((stackframesize % 2) == 0) stackframesize++;
3900 x86_64_alu_imm_reg(X86_64_SUB, stackframesize * 8, REG_SP);
3902 /* copy stack arguments into new stack frame -- if any */
3903 for (i = 0; i < stackparamcnt; i++) {
3904 x86_64_mov_membase_reg(REG_SP, (stackparamcnt + 1 + i) * 8, REG_ITMP1);
3905 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, (paramshiftcnt + i) * 8);
3908 if (m->flags & ACC_STATIC) {
3909 x86_64_mov_reg_membase(argintregs[5], REG_SP, 1 * 8);
3910 x86_64_mov_reg_membase(argintregs[4], REG_SP, 0 * 8);
3913 x86_64_mov_reg_membase(argintregs[5], REG_SP, 0 * 8);
3917 /* keep stack 16-byte aligned -- this is essential for x86_64 */
3918 x86_64_alu_imm_reg(X86_64_SUB, 8, REG_SP);
3922 if (m->flags & ACC_STATIC) {
3923 x86_64_mov_reg_reg(argintregs[3], argintregs[5]);
3924 x86_64_mov_reg_reg(argintregs[2], argintregs[4]);
3925 x86_64_mov_reg_reg(argintregs[1], argintregs[3]);
3926 x86_64_mov_reg_reg(argintregs[0], argintregs[2]);
3928 /* put class into second argument register */
3929 x86_64_mov_imm_reg((s8) m->class, argintregs[1]);
3932 x86_64_mov_reg_reg(argintregs[4], argintregs[5]);
3933 x86_64_mov_reg_reg(argintregs[3], argintregs[4]);
3934 x86_64_mov_reg_reg(argintregs[2], argintregs[3]);
3935 x86_64_mov_reg_reg(argintregs[1], argintregs[2]);
3936 x86_64_mov_reg_reg(argintregs[0], argintregs[1]);
3939 /* put env into first argument register */
3940 x86_64_mov_imm_reg((s8) &env, argintregs[0]);
3942 x86_64_mov_imm_reg((s8) f, REG_ITMP1);
3943 x86_64_call_reg(REG_ITMP1);
3945 /* remove stackframe if there is one */
3946 if (stackframesize) {
3947 x86_64_alu_imm_reg(X86_64_ADD, stackframesize * 8, REG_SP);
3951 x86_64_alu_imm_reg(X86_64_SUB, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
3953 x86_64_mov_reg_membase(REG_RESULT, REG_SP, 0 * 8);
3954 x86_64_movq_reg_membase(REG_FRESULT, REG_SP, 1 * 8);
3956 x86_64_mov_imm_reg((s8) m, argintregs[0]);
3957 x86_64_mov_reg_reg(REG_RESULT, argintregs[1]);
3958 M_FLTMOVE(REG_FRESULT, argfltregs[0]);
3959 M_FLTMOVE(REG_FRESULT, argfltregs[1]);
3961 x86_64_mov_imm_reg((s8) builtin_displaymethodstop, REG_ITMP1);
3962 x86_64_call_reg(REG_ITMP1);
3964 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_RESULT);
3965 x86_64_movq_membase_reg(REG_SP, 1 * 8, REG_FRESULT);
3967 x86_64_alu_imm_reg(X86_64_ADD, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
3971 /* restore callee saved registers */
3972 x86_64_movq_membase_reg(REG_SP, 0 * 8, XMM15);
3973 x86_64_movq_membase_reg(REG_SP, 1 * 8, XMM14);
3974 x86_64_movq_membase_reg(REG_SP, 2 * 8, XMM13);
3975 x86_64_movq_membase_reg(REG_SP, 3 * 8, XMM12);
3976 x86_64_movq_membase_reg(REG_SP, 4 * 8, XMM11);
3977 x86_64_movq_membase_reg(REG_SP, 5 * 8, XMM10);
3979 x86_64_alu_imm_reg(X86_64_ADD, 7 * 8, REG_SP); /* keep stack 16-byte aligned */
3982 x86_64_mov_imm_reg((s8) &_exceptionptr, REG_ITMP3);
3983 x86_64_mov_membase_reg(REG_ITMP3, 0, REG_ITMP3);
3984 x86_64_test_reg_reg(REG_ITMP3, REG_ITMP3);
3985 x86_64_jcc(X86_64_CC_NE, 1);
3989 x86_64_mov_reg_reg(REG_ITMP3, REG_ITMP1_XPTR);
3990 x86_64_mov_imm_reg((s8) &_exceptionptr, REG_ITMP3);
3991 x86_64_alu_reg_reg(X86_64_XOR, REG_ITMP2, REG_ITMP2);
3992 x86_64_mov_reg_membase(REG_ITMP2, REG_ITMP3, 0); /* clear exception pointer */
3994 x86_64_mov_membase_reg(REG_SP, 0, REG_ITMP2_XPC); /* get return address from stack */
3995 x86_64_alu_imm_reg(X86_64_SUB, 3, REG_ITMP2_XPC); /* callq */
3997 x86_64_mov_imm_reg((s8) asm_handle_nat_exception, REG_ITMP3);
3998 x86_64_jmp_reg(REG_ITMP3);
4002 static int stubprinted;
4004 printf("stubsize: %d\n", ((long)mcodeptr - (long) s));
4010 count_nstub_len += NATIVESTUBSIZE;
4017 /* function: removenativestub **************************************************
4019 removes a previously created native-stub from memory
4021 *******************************************************************************/
4023 void removenativestub(u1 *stub)
4025 CFREE(stub, NATIVESTUBSIZE);
4029 /* code generation functions */
4031 void x86_64_emit_ialu(s4 alu_op, stackptr src, instruction *iptr)
4033 s4 s1 = src->prev->regoff;
4034 s4 s2 = src->regoff;
4035 s4 d = iptr->dst->regoff;
4037 if (iptr->dst->flags & INMEMORY) {
4038 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4040 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4041 x86_64_alul_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4043 } else if (s1 == d) {
4044 x86_64_movl_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4045 x86_64_alul_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4048 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4049 x86_64_alul_membase_reg(alu_op, REG_SP, s2 * 8, REG_ITMP1);
4050 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
4053 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4055 x86_64_alul_reg_membase(alu_op, s1, REG_SP, d * 8);
4058 x86_64_movl_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4059 x86_64_alul_reg_reg(alu_op, s1, REG_ITMP1);
4060 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
4063 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4065 x86_64_alul_reg_membase(alu_op, s2, REG_SP, d * 8);
4068 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4069 x86_64_alul_reg_reg(alu_op, s2, REG_ITMP1);
4070 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
4074 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4075 x86_64_alul_reg_membase(alu_op, s2, REG_SP, d * 8);
4079 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4080 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4081 x86_64_alul_membase_reg(alu_op, REG_SP, s2 * 8, d);
4083 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4085 x86_64_alul_membase_reg(alu_op, REG_SP, s2 * 8, d);
4087 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4089 x86_64_alul_membase_reg(alu_op, REG_SP, s1 * 8, d);
4093 x86_64_alul_reg_reg(alu_op, s1, d);
4097 x86_64_alul_reg_reg(alu_op, s2, d);
4105 void x86_64_emit_lalu(s4 alu_op, stackptr src, instruction *iptr)
4107 s4 s1 = src->prev->regoff;
4108 s4 s2 = src->regoff;
4109 s4 d = iptr->dst->regoff;
4111 if (iptr->dst->flags & INMEMORY) {
4112 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4114 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4115 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4117 } else if (s1 == d) {
4118 x86_64_mov_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4119 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4122 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4123 x86_64_alu_membase_reg(alu_op, REG_SP, s2 * 8, REG_ITMP1);
4124 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4127 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4129 x86_64_alu_reg_membase(alu_op, s1, REG_SP, d * 8);
4132 x86_64_mov_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4133 x86_64_alu_reg_reg(alu_op, s1, REG_ITMP1);
4134 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4137 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4139 x86_64_alu_reg_membase(alu_op, s2, REG_SP, d * 8);
4142 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4143 x86_64_alu_reg_reg(alu_op, s2, REG_ITMP1);
4144 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4148 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4149 x86_64_alu_reg_membase(alu_op, s2, REG_SP, d * 8);
4153 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4154 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4155 x86_64_alu_membase_reg(alu_op, REG_SP, s2 * 8, d);
4157 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4159 x86_64_alu_membase_reg(alu_op, REG_SP, s2 * 8, d);
4161 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4163 x86_64_alu_membase_reg(alu_op, REG_SP, s1 * 8, d);
4167 x86_64_alu_reg_reg(alu_op, s1, d);
4171 x86_64_alu_reg_reg(alu_op, s2, d);
4179 void x86_64_emit_ialuconst(s4 alu_op, stackptr src, instruction *iptr)
4181 s4 s1 = src->regoff;
4182 s4 d = iptr->dst->regoff;
4184 if (iptr->dst->flags & INMEMORY) {
4185 if (src->flags & INMEMORY) {
4187 x86_64_alul_imm_membase(alu_op, iptr->val.i, REG_SP, d * 8);
4190 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4191 x86_64_alul_imm_reg(alu_op, iptr->val.i, REG_ITMP1);
4192 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
4196 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4197 x86_64_alul_imm_membase(alu_op, iptr->val.i, REG_SP, d * 8);
4201 if (src->flags & INMEMORY) {
4202 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4203 x86_64_alul_imm_reg(alu_op, iptr->val.i, d);
4207 x86_64_alul_imm_reg(alu_op, iptr->val.i, d);
4214 void x86_64_emit_laluconst(s4 alu_op, stackptr src, instruction *iptr)
4216 s4 s1 = src->regoff;
4217 s4 d = iptr->dst->regoff;
4219 if (iptr->dst->flags & INMEMORY) {
4220 if (src->flags & INMEMORY) {
4222 if (x86_64_is_imm32(iptr->val.l)) {
4223 x86_64_alu_imm_membase(alu_op, iptr->val.l, REG_SP, d * 8);
4226 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4227 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4231 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4233 if (x86_64_is_imm32(iptr->val.l)) {
4234 x86_64_alu_imm_reg(alu_op, iptr->val.l, REG_ITMP1);
4237 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP2);
4238 x86_64_alu_reg_reg(alu_op, REG_ITMP2, REG_ITMP1);
4240 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4244 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4246 if (x86_64_is_imm32(iptr->val.l)) {
4247 x86_64_alu_imm_membase(alu_op, iptr->val.l, REG_SP, d * 8);
4250 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4251 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4256 if (src->flags & INMEMORY) {
4257 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4263 if (x86_64_is_imm32(iptr->val.l)) {
4264 x86_64_alu_imm_reg(alu_op, iptr->val.l, d);
4267 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4268 x86_64_alu_reg_reg(alu_op, REG_ITMP1, d);
4275 void x86_64_emit_ishift(s4 shift_op, stackptr src, instruction *iptr)
4277 s4 s1 = src->prev->regoff;
4278 s4 s2 = src->regoff;
4279 s4 d = iptr->dst->regoff;
4281 M_INTMOVE(RCX, REG_ITMP1); /* save RCX */
4282 if (iptr->dst->flags & INMEMORY) {
4283 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4285 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4286 x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
4289 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4290 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
4291 x86_64_shiftl_reg(shift_op, REG_ITMP2);
4292 x86_64_movl_reg_membase(REG_ITMP2, REG_SP, d * 8);
4295 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4296 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4297 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4298 x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
4300 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4303 x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
4307 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
4308 x86_64_shiftl_reg(shift_op, REG_ITMP2);
4309 x86_64_movl_reg_membase(REG_ITMP2, REG_SP, d * 8);
4314 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4315 x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
4317 M_INTMOVE(REG_ITMP1, RCX); /* restore RCX */
4324 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4325 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4326 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4327 x86_64_shiftl_reg(shift_op, d);
4329 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4330 M_INTMOVE(s1, d); /* maybe src is RCX */
4331 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4332 x86_64_shiftl_reg(shift_op, d);
4334 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4336 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4337 x86_64_shiftl_reg(shift_op, d);
4348 x86_64_shiftl_reg(shift_op, d);
4352 M_INTMOVE(REG_ITMP3, RCX);
4355 M_INTMOVE(REG_ITMP1, RCX); /* restore RCX */
4362 void x86_64_emit_lshift(s4 shift_op, stackptr src, instruction *iptr)
4364 s4 s1 = src->prev->regoff;
4365 s4 s2 = src->regoff;
4366 s4 d = iptr->dst->regoff;
4368 M_INTMOVE(RCX, REG_ITMP1); /* save RCX */
4369 if (iptr->dst->flags & INMEMORY) {
4370 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4372 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4373 x86_64_shift_membase(shift_op, REG_SP, d * 8);
4376 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4377 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
4378 x86_64_shift_reg(shift_op, REG_ITMP2);
4379 x86_64_mov_reg_membase(REG_ITMP2, REG_SP, d * 8);
4382 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4383 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4384 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4385 x86_64_shift_membase(shift_op, REG_SP, d * 8);
4387 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4390 x86_64_shift_membase(shift_op, REG_SP, d * 8);
4394 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
4395 x86_64_shift_reg(shift_op, REG_ITMP2);
4396 x86_64_mov_reg_membase(REG_ITMP2, REG_SP, d * 8);
4401 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4402 x86_64_shift_membase(shift_op, REG_SP, d * 8);
4404 M_INTMOVE(REG_ITMP1, RCX); /* restore RCX */
4411 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4412 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4413 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4414 x86_64_shift_reg(shift_op, d);
4416 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4417 M_INTMOVE(s1, d); /* maybe src is RCX */
4418 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4419 x86_64_shift_reg(shift_op, d);
4421 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4423 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4424 x86_64_shift_reg(shift_op, d);
4434 x86_64_shift_reg(shift_op, d);
4438 M_INTMOVE(REG_ITMP3, RCX);
4441 M_INTMOVE(REG_ITMP1, RCX); /* restore RCX */
4448 void x86_64_emit_ishiftconst(s4 shift_op, stackptr src, instruction *iptr)
4450 s4 s1 = src->regoff;
4451 s4 d = iptr->dst->regoff;
4453 if ((src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
4455 x86_64_shiftl_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
4458 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4459 x86_64_shiftl_imm_reg(shift_op, iptr->val.i, REG_ITMP1);
4460 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
4463 } else if ((src->flags & INMEMORY) && !(iptr->dst->flags & INMEMORY)) {
4464 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4465 x86_64_shiftl_imm_reg(shift_op, iptr->val.i, d);
4467 } else if (!(src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
4468 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4469 x86_64_shiftl_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
4473 x86_64_shiftl_imm_reg(shift_op, iptr->val.i, d);
4479 void x86_64_emit_lshiftconst(s4 shift_op, stackptr src, instruction *iptr)
4481 s4 s1 = src->regoff;
4482 s4 d = iptr->dst->regoff;
4484 if ((src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
4486 x86_64_shift_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
4489 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4490 x86_64_shift_imm_reg(shift_op, iptr->val.i, REG_ITMP1);
4491 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4494 } else if ((src->flags & INMEMORY) && !(iptr->dst->flags & INMEMORY)) {
4495 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4496 x86_64_shift_imm_reg(shift_op, iptr->val.i, d);
4498 } else if (!(src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
4499 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4500 x86_64_shift_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
4504 x86_64_shift_imm_reg(shift_op, iptr->val.i, d);
4510 void x86_64_emit_ifcc(s4 if_op, stackptr src, instruction *iptr)
4512 if (src->flags & INMEMORY) {
4513 x86_64_alul_imm_membase(X86_64_CMP, iptr->val.i, REG_SP, src->regoff * 8);
4516 x86_64_alul_imm_reg(X86_64_CMP, iptr->val.i, src->regoff);
4518 x86_64_jcc(if_op, 0);
4519 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
4524 void x86_64_emit_if_lcc(s4 if_op, stackptr src, instruction *iptr)
4526 s4 s1 = src->regoff;
4528 if (src->flags & INMEMORY) {
4529 if (x86_64_is_imm32(iptr->val.l)) {
4530 x86_64_alu_imm_membase(X86_64_CMP, iptr->val.l, REG_SP, s1 * 8);
4533 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4534 x86_64_alu_reg_membase(X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
4538 if (x86_64_is_imm32(iptr->val.l)) {
4539 x86_64_alu_imm_reg(X86_64_CMP, iptr->val.l, s1);
4542 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4543 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP1, s1);
4546 x86_64_jcc(if_op, 0);
4547 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
4552 void x86_64_emit_if_icmpcc(s4 if_op, stackptr src, instruction *iptr)
4554 s4 s1 = src->prev->regoff;
4555 s4 s2 = src->regoff;
4557 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4558 x86_64_movl_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4559 x86_64_alul_reg_membase(X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
4561 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4562 x86_64_alul_membase_reg(X86_64_CMP, REG_SP, s2 * 8, s1);
4564 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4565 x86_64_alul_reg_membase(X86_64_CMP, s2, REG_SP, s1 * 8);
4568 x86_64_alul_reg_reg(X86_64_CMP, s2, s1);
4570 x86_64_jcc(if_op, 0);
4571 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
4576 void x86_64_emit_if_lcmpcc(s4 if_op, stackptr src, instruction *iptr)
4578 s4 s1 = src->prev->regoff;
4579 s4 s2 = src->regoff;
4581 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4582 x86_64_mov_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4583 x86_64_alu_reg_membase(X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
4585 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4586 x86_64_alu_membase_reg(X86_64_CMP, REG_SP, s2 * 8, s1);
4588 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4589 x86_64_alu_reg_membase(X86_64_CMP, s2, REG_SP, s1 * 8);
4592 x86_64_alu_reg_reg(X86_64_CMP, s2, s1);
4594 x86_64_jcc(if_op, 0);
4595 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
4605 void x86_64_mov_reg_reg(s8 reg, s8 dreg) {
4606 x86_64_emit_rex(1,(reg),0,(dreg));
4607 *(mcodeptr++) = 0x89;
4608 x86_64_emit_reg((reg),(dreg));
4612 void x86_64_mov_imm_reg(s8 imm, s8 reg) {
4613 x86_64_emit_rex(1,0,0,(reg));
4614 *(mcodeptr++) = 0xb8 + ((reg) & 0x07);
4615 x86_64_emit_imm64((imm));
4619 void x86_64_movl_imm_reg(s8 imm, s8 reg) {
4620 x86_64_emit_rex(0,0,0,(reg));
4621 *(mcodeptr++) = 0xb8 + ((reg) & 0x07);
4622 x86_64_emit_imm32((imm));
4626 void x86_64_mov_membase_reg(s8 basereg, s8 disp, s8 reg) {
4627 x86_64_emit_rex(1,(reg),0,(basereg));
4628 *(mcodeptr++) = 0x8b;
4629 x86_64_emit_membase((basereg),(disp),(reg));
4633 void x86_64_movl_membase_reg(s8 basereg, s8 disp, s8 reg) {
4634 x86_64_emit_rex(0,(reg),0,(basereg));
4635 *(mcodeptr++) = 0x8b;
4636 x86_64_emit_membase((basereg),(disp),(reg));
4641 * this one is for INVOKEVIRTUAL/INVOKEINTERFACE to have a
4642 * constant membase immediate length of 32bit
4644 void x86_64_mov_membase32_reg(s8 basereg, s8 disp, s8 reg) {
4645 x86_64_emit_rex(1,(reg),0,(basereg));
4646 *(mcodeptr++) = 0x8b;
4647 x86_64_address_byte(2, (reg), (basereg));
4648 x86_64_emit_imm32((disp));
4652 void x86_64_mov_reg_membase(s8 reg, s8 basereg, s8 disp) {
4653 x86_64_emit_rex(1,(reg),0,(basereg));
4654 *(mcodeptr++) = 0x89;
4655 x86_64_emit_membase((basereg),(disp),(reg));
4659 void x86_64_movl_reg_membase(s8 reg, s8 basereg, s8 disp) {
4660 x86_64_emit_rex(0,(reg),0,(basereg));
4661 *(mcodeptr++) = 0x89;
4662 x86_64_emit_membase((basereg),(disp),(reg));
4666 void x86_64_mov_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4667 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4668 *(mcodeptr++) = 0x8b;
4669 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4673 void x86_64_movl_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4674 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
4675 *(mcodeptr++) = 0x8b;
4676 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4680 void x86_64_mov_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
4681 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4682 *(mcodeptr++) = 0x89;
4683 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4687 void x86_64_movl_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
4688 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
4689 *(mcodeptr++) = 0x89;
4690 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4694 void x86_64_movw_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
4695 *(mcodeptr++) = 0x66;
4696 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
4697 *(mcodeptr++) = 0x89;
4698 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4702 void x86_64_movb_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
4703 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
4704 *(mcodeptr++) = 0x88;
4705 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4709 void x86_64_mov_imm_membase(s8 imm, s8 basereg, s8 disp) {
4710 x86_64_emit_rex(1,0,0,(basereg));
4711 *(mcodeptr++) = 0xc7;
4712 x86_64_emit_membase((basereg),(disp),0);
4713 x86_64_emit_imm32((imm));
4717 void x86_64_movl_imm_membase(s8 imm, s8 basereg, s8 disp) {
4718 x86_64_emit_rex(0,0,0,(basereg));
4719 *(mcodeptr++) = 0xc7;
4720 x86_64_emit_membase((basereg),(disp),0);
4721 x86_64_emit_imm32((imm));
4725 void x86_64_movsbq_reg_reg(s8 reg, s8 dreg) {
4726 x86_64_emit_rex(1,(dreg),0,(reg));
4727 *(mcodeptr++) = 0x0f;
4728 *(mcodeptr++) = 0xbe;
4729 /* XXX: why do reg and dreg have to be exchanged */
4730 x86_64_emit_reg((dreg),(reg));
4734 void x86_64_movsbq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4735 x86_64_emit_rex(1,(dreg),0,(basereg));
4736 *(mcodeptr++) = 0x0f;
4737 *(mcodeptr++) = 0xbe;
4738 x86_64_emit_membase((basereg),(disp),(dreg));
4742 void x86_64_movswq_reg_reg(s8 reg, s8 dreg) {
4743 x86_64_emit_rex(1,(dreg),0,(reg));
4744 *(mcodeptr++) = 0x0f;
4745 *(mcodeptr++) = 0xbf;
4746 /* XXX: why do reg and dreg have to be exchanged */
4747 x86_64_emit_reg((dreg),(reg));
4751 void x86_64_movswq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4752 x86_64_emit_rex(1,(dreg),0,(basereg));
4753 *(mcodeptr++) = 0x0f;
4754 *(mcodeptr++) = 0xbf;
4755 x86_64_emit_membase((basereg),(disp),(dreg));
4759 void x86_64_movslq_reg_reg(s8 reg, s8 dreg) {
4760 x86_64_emit_rex(1,(dreg),0,(reg));
4761 *(mcodeptr++) = 0x63;
4762 /* XXX: why do reg and dreg have to be exchanged */
4763 x86_64_emit_reg((dreg),(reg));
4767 void x86_64_movslq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4768 x86_64_emit_rex(1,(dreg),0,(basereg));
4769 *(mcodeptr++) = 0x63;
4770 x86_64_emit_membase((basereg),(disp),(dreg));
4774 void x86_64_movzwq_reg_reg(s8 reg, s8 dreg) {
4775 x86_64_emit_rex(1,(dreg),0,(reg));
4776 *(mcodeptr++) = 0x0f;
4777 *(mcodeptr++) = 0xb7;
4778 /* XXX: why do reg and dreg have to be exchanged */
4779 x86_64_emit_reg((dreg),(reg));
4783 void x86_64_movzwq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4784 x86_64_emit_rex(1,(dreg),0,(basereg));
4785 *(mcodeptr++) = 0x0f;
4786 *(mcodeptr++) = 0xb7;
4787 x86_64_emit_membase((basereg),(disp),(dreg));
4791 void x86_64_movswq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4792 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4793 *(mcodeptr++) = 0x0f;
4794 *(mcodeptr++) = 0xbf;
4795 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4799 void x86_64_movsbq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4800 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4801 *(mcodeptr++) = 0x0f;
4802 *(mcodeptr++) = 0xbe;
4803 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4807 void x86_64_movzwq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4808 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4809 *(mcodeptr++) = 0x0f;
4810 *(mcodeptr++) = 0xb7;
4811 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4819 void x86_64_alu_reg_reg(s8 opc, s8 reg, s8 dreg) {
4820 x86_64_emit_rex(1,(reg),0,(dreg));
4821 *(mcodeptr++) = (((opc)) << 3) + 1;
4822 x86_64_emit_reg((reg),(dreg));
4826 void x86_64_alul_reg_reg(s8 opc, s8 reg, s8 dreg) {
4827 x86_64_emit_rex(0,(reg),0,(dreg));
4828 *(mcodeptr++) = (((opc)) << 3) + 1;
4829 x86_64_emit_reg((reg),(dreg));
4833 void x86_64_alu_reg_membase(s8 opc, s8 reg, s8 basereg, s8 disp) {
4834 x86_64_emit_rex(1,(reg),0,(basereg));
4835 *(mcodeptr++) = (((opc)) << 3) + 1;
4836 x86_64_emit_membase((basereg),(disp),(reg));
4840 void x86_64_alul_reg_membase(s8 opc, s8 reg, s8 basereg, s8 disp) {
4841 x86_64_emit_rex(0,(reg),0,(basereg));
4842 *(mcodeptr++) = (((opc)) << 3) + 1;
4843 x86_64_emit_membase((basereg),(disp),(reg));
4847 void x86_64_alu_membase_reg(s8 opc, s8 basereg, s8 disp, s8 reg) {
4848 x86_64_emit_rex(1,(reg),0,(basereg));
4849 *(mcodeptr++) = (((opc)) << 3) + 3;
4850 x86_64_emit_membase((basereg),(disp),(reg));
4854 void x86_64_alul_membase_reg(s8 opc, s8 basereg, s8 disp, s8 reg) {
4855 x86_64_emit_rex(0,(reg),0,(basereg));
4856 *(mcodeptr++) = (((opc)) << 3) + 3;
4857 x86_64_emit_membase((basereg),(disp),(reg));
4861 void x86_64_alu_imm_reg(s8 opc, s8 imm, s8 dreg) {
4862 if (x86_64_is_imm8(imm)) {
4863 x86_64_emit_rex(1,0,0,(dreg));
4864 *(mcodeptr++) = 0x83;
4865 x86_64_emit_reg((opc),(dreg));
4866 x86_64_emit_imm8((imm));
4868 x86_64_emit_rex(1,0,0,(dreg));
4869 *(mcodeptr++) = 0x81;
4870 x86_64_emit_reg((opc),(dreg));
4871 x86_64_emit_imm32((imm));
4876 void x86_64_alul_imm_reg(s8 opc, s8 imm, s8 dreg) {
4877 if (x86_64_is_imm8(imm)) {
4878 x86_64_emit_rex(0,0,0,(dreg));
4879 *(mcodeptr++) = 0x83;
4880 x86_64_emit_reg((opc),(dreg));
4881 x86_64_emit_imm8((imm));
4883 x86_64_emit_rex(0,0,0,(dreg));
4884 *(mcodeptr++) = 0x81;
4885 x86_64_emit_reg((opc),(dreg));
4886 x86_64_emit_imm32((imm));
4891 void x86_64_alu_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
4892 if (x86_64_is_imm8(imm)) {
4893 x86_64_emit_rex(1,(basereg),0,0);
4894 *(mcodeptr++) = 0x83;
4895 x86_64_emit_membase((basereg),(disp),(opc));
4896 x86_64_emit_imm8((imm));
4898 x86_64_emit_rex(1,(basereg),0,0);
4899 *(mcodeptr++) = 0x81;
4900 x86_64_emit_membase((basereg),(disp),(opc));
4901 x86_64_emit_imm32((imm));
4906 void x86_64_alul_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
4907 if (x86_64_is_imm8(imm)) {
4908 x86_64_emit_rex(0,(basereg),0,0);
4909 *(mcodeptr++) = 0x83;
4910 x86_64_emit_membase((basereg),(disp),(opc));
4911 x86_64_emit_imm8((imm));
4913 x86_64_emit_rex(0,(basereg),0,0);
4914 *(mcodeptr++) = 0x81;
4915 x86_64_emit_membase((basereg),(disp),(opc));
4916 x86_64_emit_imm32((imm));
4921 void x86_64_test_reg_reg(s8 reg, s8 dreg) {
4922 x86_64_emit_rex(1,(reg),0,(dreg));
4923 *(mcodeptr++) = 0x85;
4924 x86_64_emit_reg((reg),(dreg));
4928 void x86_64_testl_reg_reg(s8 reg, s8 dreg) {
4929 x86_64_emit_rex(0,(reg),0,(dreg));
4930 *(mcodeptr++) = 0x85;
4931 x86_64_emit_reg((reg),(dreg));
4935 void x86_64_test_imm_reg(s8 imm, s8 reg) {
4936 *(mcodeptr++) = 0xf7;
4937 x86_64_emit_reg(0,(reg));
4938 x86_64_emit_imm32((imm));
4942 void x86_64_testw_imm_reg(s8 imm, s8 reg) {
4943 *(mcodeptr++) = 0x66;
4944 *(mcodeptr++) = 0xf7;
4945 x86_64_emit_reg(0,(reg));
4946 x86_64_emit_imm16((imm));
4950 void x86_64_testb_imm_reg(s8 imm, s8 reg) {
4951 *(mcodeptr++) = 0xf6;
4952 x86_64_emit_reg(0,(reg));
4953 x86_64_emit_imm8((imm));
4957 void x86_64_lea_membase_reg(s8 basereg, s8 disp, s8 reg) {
4958 x86_64_emit_rex(1,(reg),0,(basereg));
4959 *(mcodeptr++) = 0x8d;
4960 x86_64_emit_membase((basereg),(disp),(reg));
4964 void x86_64_leal_membase_reg(s8 basereg, s8 disp, s8 reg) {
4965 x86_64_emit_rex(0,(reg),0,(basereg));
4966 *(mcodeptr++) = 0x8d;
4967 x86_64_emit_membase((basereg),(disp),(reg));
4973 * inc, dec operations
4975 void x86_64_inc_reg(s8 reg) {
4976 x86_64_emit_rex(1,0,0,(reg));
4977 *(mcodeptr++) = 0xff;
4978 x86_64_emit_reg(0,(reg));
4982 void x86_64_incl_reg(s8 reg) {
4983 x86_64_emit_rex(0,0,0,(reg));
4984 *(mcodeptr++) = 0xff;
4985 x86_64_emit_reg(0,(reg));
4989 void x86_64_inc_membase(s8 basereg, s8 disp) {
4990 x86_64_emit_rex(1,(basereg),0,0);
4991 *(mcodeptr++) = 0xff;
4992 x86_64_emit_membase((basereg),(disp),0);
4996 void x86_64_incl_membase(s8 basereg, s8 disp) {
4997 x86_64_emit_rex(0,(basereg),0,0);
4998 *(mcodeptr++) = 0xff;
4999 x86_64_emit_membase((basereg),(disp),0);
5003 void x86_64_dec_reg(s8 reg) {
5004 x86_64_emit_rex(1,0,0,(reg));
5005 *(mcodeptr++) = 0xff;
5006 x86_64_emit_reg(1,(reg));
5010 void x86_64_decl_reg(s8 reg) {
5011 x86_64_emit_rex(0,0,0,(reg));
5012 *(mcodeptr++) = 0xff;
5013 x86_64_emit_reg(1,(reg));
5017 void x86_64_dec_membase(s8 basereg, s8 disp) {
5018 x86_64_emit_rex(1,(basereg),0,0);
5019 *(mcodeptr++) = 0xff;
5020 x86_64_emit_membase((basereg),(disp),1);
5024 void x86_64_decl_membase(s8 basereg, s8 disp) {
5025 x86_64_emit_rex(0,(basereg),0,0);
5026 *(mcodeptr++) = 0xff;
5027 x86_64_emit_membase((basereg),(disp),1);
5033 void x86_64_cltd() {
5034 *(mcodeptr++) = 0x99;
5038 void x86_64_cqto() {
5039 x86_64_emit_rex(1,0,0,0);
5040 *(mcodeptr++) = 0x99;
5045 void x86_64_imul_reg_reg(s8 reg, s8 dreg) {
5046 x86_64_emit_rex(1,(dreg),0,(reg));
5047 *(mcodeptr++) = 0x0f;
5048 *(mcodeptr++) = 0xaf;
5049 x86_64_emit_reg((dreg),(reg));
5053 void x86_64_imull_reg_reg(s8 reg, s8 dreg) {
5054 x86_64_emit_rex(0,(dreg),0,(reg));
5055 *(mcodeptr++) = 0x0f;
5056 *(mcodeptr++) = 0xaf;
5057 x86_64_emit_reg((dreg),(reg));
5061 void x86_64_imul_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5062 x86_64_emit_rex(1,(dreg),0,(basereg));
5063 *(mcodeptr++) = 0x0f;
5064 *(mcodeptr++) = 0xaf;
5065 x86_64_emit_membase((basereg),(disp),(dreg));
5069 void x86_64_imull_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5070 x86_64_emit_rex(0,(dreg),0,(basereg));
5071 *(mcodeptr++) = 0x0f;
5072 *(mcodeptr++) = 0xaf;
5073 x86_64_emit_membase((basereg),(disp),(dreg));
5077 void x86_64_imul_imm_reg(s8 imm, s8 dreg) {
5078 if (x86_64_is_imm8((imm))) {
5079 x86_64_emit_rex(1,0,0,(dreg));
5080 *(mcodeptr++) = 0x6b;
5081 x86_64_emit_reg(0,(dreg));
5082 x86_64_emit_imm8((imm));
5084 x86_64_emit_rex(1,0,0,(dreg));
5085 *(mcodeptr++) = 0x69;
5086 x86_64_emit_reg(0,(dreg));
5087 x86_64_emit_imm32((imm));
5092 void x86_64_imul_imm_reg_reg(s8 imm, s8 reg, s8 dreg) {
5093 if (x86_64_is_imm8((imm))) {
5094 x86_64_emit_rex(1,(dreg),0,(reg));
5095 *(mcodeptr++) = 0x6b;
5096 x86_64_emit_reg((dreg),(reg));
5097 x86_64_emit_imm8((imm));
5099 x86_64_emit_rex(1,(dreg),0,(reg));
5100 *(mcodeptr++) = 0x69;
5101 x86_64_emit_reg((dreg),(reg));
5102 x86_64_emit_imm32((imm));
5107 void x86_64_imull_imm_reg_reg(s8 imm, s8 reg, s8 dreg) {
5108 if (x86_64_is_imm8((imm))) {
5109 x86_64_emit_rex(0,(dreg),0,(reg));
5110 *(mcodeptr++) = 0x6b;
5111 x86_64_emit_reg((dreg),(reg));
5112 x86_64_emit_imm8((imm));
5114 x86_64_emit_rex(0,(dreg),0,(reg));
5115 *(mcodeptr++) = 0x69;
5116 x86_64_emit_reg((dreg),(reg));
5117 x86_64_emit_imm32((imm));
5122 void x86_64_imul_imm_membase_reg(s8 imm, s8 basereg, s8 disp, s8 dreg) {
5123 if (x86_64_is_imm8((imm))) {
5124 x86_64_emit_rex(1,(dreg),0,(basereg));
5125 *(mcodeptr++) = 0x6b;
5126 x86_64_emit_membase((basereg),(disp),(dreg));
5127 x86_64_emit_imm8((imm));
5129 x86_64_emit_rex(1,(dreg),0,(basereg));
5130 *(mcodeptr++) = 0x69;
5131 x86_64_emit_membase((basereg),(disp),(dreg));
5132 x86_64_emit_imm32((imm));
5137 void x86_64_imull_imm_membase_reg(s8 imm, s8 basereg, s8 disp, s8 dreg) {
5138 if (x86_64_is_imm8((imm))) {
5139 x86_64_emit_rex(0,(dreg),0,(basereg));
5140 *(mcodeptr++) = 0x6b;
5141 x86_64_emit_membase((basereg),(disp),(dreg));
5142 x86_64_emit_imm8((imm));
5144 x86_64_emit_rex(0,(dreg),0,(basereg));
5145 *(mcodeptr++) = 0x69;
5146 x86_64_emit_membase((basereg),(disp),(dreg));
5147 x86_64_emit_imm32((imm));
5152 void x86_64_idiv_reg(s8 reg) {
5153 x86_64_emit_rex(1,0,0,(reg));
5154 *(mcodeptr++) = 0xf7;
5155 x86_64_emit_reg(7,(reg));
5159 void x86_64_idivl_reg(s8 reg) {
5160 x86_64_emit_rex(0,0,0,(reg));
5161 *(mcodeptr++) = 0xf7;
5162 x86_64_emit_reg(7,(reg));
5168 *(mcodeptr++) = 0xc3;
5176 void x86_64_shift_reg(s8 opc, s8 reg) {
5177 x86_64_emit_rex(1,0,0,(reg));
5178 *(mcodeptr++) = 0xd3;
5179 x86_64_emit_reg((opc),(reg));
5183 void x86_64_shiftl_reg(s8 opc, s8 reg) {
5184 x86_64_emit_rex(0,0,0,(reg));
5185 *(mcodeptr++) = 0xd3;
5186 x86_64_emit_reg((opc),(reg));
5190 void x86_64_shift_membase(s8 opc, s8 basereg, s8 disp) {
5191 x86_64_emit_rex(1,0,0,(basereg));
5192 *(mcodeptr++) = 0xd3;
5193 x86_64_emit_membase((basereg),(disp),(opc));
5197 void x86_64_shiftl_membase(s8 opc, s8 basereg, s8 disp) {
5198 x86_64_emit_rex(0,0,0,(basereg));
5199 *(mcodeptr++) = 0xd3;
5200 x86_64_emit_membase((basereg),(disp),(opc));
5204 void x86_64_shift_imm_reg(s8 opc, s8 imm, s8 dreg) {
5206 x86_64_emit_rex(1,0,0,(dreg));
5207 *(mcodeptr++) = 0xd1;
5208 x86_64_emit_reg((opc),(dreg));
5210 x86_64_emit_rex(1,0,0,(dreg));
5211 *(mcodeptr++) = 0xc1;
5212 x86_64_emit_reg((opc),(dreg));
5213 x86_64_emit_imm8((imm));
5218 void x86_64_shiftl_imm_reg(s8 opc, s8 imm, s8 dreg) {
5220 x86_64_emit_rex(0,0,0,(dreg));
5221 *(mcodeptr++) = 0xd1;
5222 x86_64_emit_reg((opc),(dreg));
5224 x86_64_emit_rex(0,0,0,(dreg));
5225 *(mcodeptr++) = 0xc1;
5226 x86_64_emit_reg((opc),(dreg));
5227 x86_64_emit_imm8((imm));
5232 void x86_64_shift_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
5234 x86_64_emit_rex(1,0,0,(basereg));
5235 *(mcodeptr++) = 0xd1;
5236 x86_64_emit_membase((basereg),(disp),(opc));
5238 x86_64_emit_rex(1,0,0,(basereg));
5239 *(mcodeptr++) = 0xc1;
5240 x86_64_emit_membase((basereg),(disp),(opc));
5241 x86_64_emit_imm8((imm));
5246 void x86_64_shiftl_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
5248 x86_64_emit_rex(0,0,0,(basereg));
5249 *(mcodeptr++) = 0xd1;
5250 x86_64_emit_membase((basereg),(disp),(opc));
5252 x86_64_emit_rex(0,0,0,(basereg));
5253 *(mcodeptr++) = 0xc1;
5254 x86_64_emit_membase((basereg),(disp),(opc));
5255 x86_64_emit_imm8((imm));
5264 void x86_64_jmp_imm(s8 imm) {
5265 *(mcodeptr++) = 0xe9;
5266 x86_64_emit_imm32((imm));
5270 void x86_64_jmp_reg(s8 reg) {
5271 x86_64_emit_rex(0,0,0,(reg));
5272 *(mcodeptr++) = 0xff;
5273 x86_64_emit_reg(4,(reg));
5277 void x86_64_jcc(s8 opc, s8 imm) {
5278 *(mcodeptr++) = 0x0f;
5279 *(mcodeptr++) = (0x80 + (opc));
5280 x86_64_emit_imm32((imm));
5286 * conditional set and move operations
5289 /* we need the rex byte to get all low bytes */
5290 void x86_64_setcc_reg(s8 opc, s8 reg) {
5291 *(mcodeptr++) = (0x40 | (((reg) >> 3) & 0x01));
5292 *(mcodeptr++) = 0x0f;
5293 *(mcodeptr++) = (0x90 + (opc));
5294 x86_64_emit_reg(0,(reg));
5298 /* we need the rex byte to get all low bytes */
5299 void x86_64_setcc_membase(s8 opc, s8 basereg, s8 disp) {
5300 *(mcodeptr++) = (0x40 | (((basereg) >> 3) & 0x01));
5301 *(mcodeptr++) = 0x0f;
5302 *(mcodeptr++) = (0x90 + (opc));
5303 x86_64_emit_membase((basereg),(disp),0);
5307 void x86_64_cmovcc_reg_reg(s8 opc, s8 reg, s8 dreg) {
5308 x86_64_emit_rex(1,(dreg),0,(reg));
5309 *(mcodeptr++) = 0x0f;
5310 *(mcodeptr++) = (0x40 + (opc));
5311 x86_64_emit_reg((dreg),(reg));
5315 void x86_64_cmovccl_reg_reg(s8 opc, s8 reg, s8 dreg) {
5316 x86_64_emit_rex(0,(dreg),0,(reg));
5317 *(mcodeptr++) = 0x0f;
5318 *(mcodeptr++) = (0x40 + (opc));
5319 x86_64_emit_reg((dreg),(reg));
5324 void x86_64_neg_reg(s8 reg) {
5325 x86_64_emit_rex(1,0,0,(reg));
5326 *(mcodeptr++) = 0xf7;
5327 x86_64_emit_reg(3,(reg));
5331 void x86_64_negl_reg(s8 reg) {
5332 x86_64_emit_rex(0,0,0,(reg));
5333 *(mcodeptr++) = 0xf7;
5334 x86_64_emit_reg(3,(reg));
5338 void x86_64_neg_membase(s8 basereg, s8 disp) {
5339 x86_64_emit_rex(1,0,0,(basereg));
5340 *(mcodeptr++) = 0xf7;
5341 x86_64_emit_membase((basereg),(disp),3);
5345 void x86_64_negl_membase(s8 basereg, s8 disp) {
5346 x86_64_emit_rex(0,0,0,(basereg));
5347 *(mcodeptr++) = 0xf7;
5348 x86_64_emit_membase((basereg),(disp),3);
5353 void x86_64_push_imm(s8 imm) {
5354 *(mcodeptr++) = 0x68;
5355 x86_64_emit_imm32((imm));
5359 void x86_64_pop_reg(s8 reg) {
5360 x86_64_emit_rex(0,0,0,(reg));
5361 *(mcodeptr++) = 0x58 + (0x07 & (reg));
5365 void x86_64_xchg_reg_reg(s8 reg, s8 dreg) {
5366 x86_64_emit_rex(1,(reg),0,(dreg));
5367 *(mcodeptr++) = 0x87;
5368 x86_64_emit_reg((reg),(dreg));
5373 *(mcodeptr++) = 0x90;
5381 void x86_64_call_reg(s8 reg) {
5382 x86_64_emit_rex(1,0,0,(reg));
5383 *(mcodeptr++) = 0xff;
5384 x86_64_emit_reg(2,(reg));
5388 void x86_64_call_imm(s8 imm) {
5389 *(mcodeptr++) = 0xe8;
5390 x86_64_emit_imm32((imm));
5396 * floating point instructions (SSE2)
5398 void x86_64_addsd_reg_reg(s8 reg, s8 dreg) {
5399 *(mcodeptr++) = 0xf2;
5400 x86_64_emit_rex(0,(dreg),0,(reg));
5401 *(mcodeptr++) = 0x0f;
5402 *(mcodeptr++) = 0x58;
5403 x86_64_emit_reg((dreg),(reg));
5407 void x86_64_addss_reg_reg(s8 reg, s8 dreg) {
5408 *(mcodeptr++) = 0xf3;
5409 x86_64_emit_rex(0,(dreg),0,(reg));
5410 *(mcodeptr++) = 0x0f;
5411 *(mcodeptr++) = 0x58;
5412 x86_64_emit_reg((dreg),(reg));
5416 void x86_64_cvtsi2ssq_reg_reg(s8 reg, s8 dreg) {
5417 *(mcodeptr++) = 0xf3;
5418 x86_64_emit_rex(1,(dreg),0,(reg));
5419 *(mcodeptr++) = 0x0f;
5420 *(mcodeptr++) = 0x2a;
5421 x86_64_emit_reg((dreg),(reg));
5425 void x86_64_cvtsi2ss_reg_reg(s8 reg, s8 dreg) {
5426 *(mcodeptr++) = 0xf3;
5427 x86_64_emit_rex(0,(dreg),0,(reg));
5428 *(mcodeptr++) = 0x0f;
5429 *(mcodeptr++) = 0x2a;
5430 x86_64_emit_reg((dreg),(reg));
5434 void x86_64_cvtsi2sdq_reg_reg(s8 reg, s8 dreg) {
5435 *(mcodeptr++) = 0xf2;
5436 x86_64_emit_rex(1,(dreg),0,(reg));
5437 *(mcodeptr++) = 0x0f;
5438 *(mcodeptr++) = 0x2a;
5439 x86_64_emit_reg((dreg),(reg));
5443 void x86_64_cvtsi2sd_reg_reg(s8 reg, s8 dreg) {
5444 *(mcodeptr++) = 0xf2;
5445 x86_64_emit_rex(0,(dreg),0,(reg));
5446 *(mcodeptr++) = 0x0f;
5447 *(mcodeptr++) = 0x2a;
5448 x86_64_emit_reg((dreg),(reg));
5452 void x86_64_cvtss2sd_reg_reg(s8 reg, s8 dreg) {
5453 *(mcodeptr++) = 0xf3;
5454 x86_64_emit_rex(0,(dreg),0,(reg));
5455 *(mcodeptr++) = 0x0f;
5456 *(mcodeptr++) = 0x5a;
5457 x86_64_emit_reg((dreg),(reg));
5461 void x86_64_cvtsd2ss_reg_reg(s8 reg, s8 dreg) {
5462 *(mcodeptr++) = 0xf2;
5463 x86_64_emit_rex(0,(dreg),0,(reg));
5464 *(mcodeptr++) = 0x0f;
5465 *(mcodeptr++) = 0x5a;
5466 x86_64_emit_reg((dreg),(reg));
5470 void x86_64_cvttss2siq_reg_reg(s8 reg, s8 dreg) {
5471 *(mcodeptr++) = 0xf3;
5472 x86_64_emit_rex(1,(dreg),0,(reg));
5473 *(mcodeptr++) = 0x0f;
5474 *(mcodeptr++) = 0x2c;
5475 x86_64_emit_reg((dreg),(reg));
5479 void x86_64_cvttss2si_reg_reg(s8 reg, s8 dreg) {
5480 *(mcodeptr++) = 0xf3;
5481 x86_64_emit_rex(0,(dreg),0,(reg));
5482 *(mcodeptr++) = 0x0f;
5483 *(mcodeptr++) = 0x2c;
5484 x86_64_emit_reg((dreg),(reg));
5488 void x86_64_cvttsd2siq_reg_reg(s8 reg, s8 dreg) {
5489 *(mcodeptr++) = 0xf2;
5490 x86_64_emit_rex(1,(dreg),0,(reg));
5491 *(mcodeptr++) = 0x0f;
5492 *(mcodeptr++) = 0x2c;
5493 x86_64_emit_reg((dreg),(reg));
5497 void x86_64_cvttsd2si_reg_reg(s8 reg, s8 dreg) {
5498 *(mcodeptr++) = 0xf2;
5499 x86_64_emit_rex(0,(dreg),0,(reg));
5500 *(mcodeptr++) = 0x0f;
5501 *(mcodeptr++) = 0x2c;
5502 x86_64_emit_reg((dreg),(reg));
5506 void x86_64_divss_reg_reg(s8 reg, s8 dreg) {
5507 *(mcodeptr++) = 0xf3;
5508 x86_64_emit_rex(0,(dreg),0,(reg));
5509 *(mcodeptr++) = 0x0f;
5510 *(mcodeptr++) = 0x5e;
5511 x86_64_emit_reg((dreg),(reg));
5515 void x86_64_divsd_reg_reg(s8 reg, s8 dreg) {
5516 *(mcodeptr++) = 0xf2;
5517 x86_64_emit_rex(0,(dreg),0,(reg));
5518 *(mcodeptr++) = 0x0f;
5519 *(mcodeptr++) = 0x5e;
5520 x86_64_emit_reg((dreg),(reg));
5524 void x86_64_movd_reg_freg(s8 reg, s8 freg) {
5525 *(mcodeptr++) = 0x66;
5526 x86_64_emit_rex(1,(freg),0,(reg));
5527 *(mcodeptr++) = 0x0f;
5528 *(mcodeptr++) = 0x6e;
5529 x86_64_emit_reg((freg),(reg));
5533 void x86_64_movd_freg_reg(s8 freg, s8 reg) {
5534 *(mcodeptr++) = 0x66;
5535 x86_64_emit_rex(1,(freg),0,(reg));
5536 *(mcodeptr++) = 0x0f;
5537 *(mcodeptr++) = 0x7e;
5538 x86_64_emit_reg((freg),(reg));
5542 void x86_64_movd_reg_membase(s8 reg, s8 basereg, s8 disp) {
5543 *(mcodeptr++) = 0x66;
5544 x86_64_emit_rex(0,(reg),0,(basereg));
5545 *(mcodeptr++) = 0x0f;
5546 *(mcodeptr++) = 0x7e;
5547 x86_64_emit_membase((basereg),(disp),(reg));
5551 void x86_64_movd_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
5552 *(mcodeptr++) = 0x66;
5553 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
5554 *(mcodeptr++) = 0x0f;
5555 *(mcodeptr++) = 0x7e;
5556 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
5560 void x86_64_movd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5561 *(mcodeptr++) = 0x66;
5562 x86_64_emit_rex(1,(dreg),0,(basereg));
5563 *(mcodeptr++) = 0x0f;
5564 *(mcodeptr++) = 0x6e;
5565 x86_64_emit_membase((basereg),(disp),(dreg));
5569 void x86_64_movdl_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5570 *(mcodeptr++) = 0x66;
5571 x86_64_emit_rex(0,(dreg),0,(basereg));
5572 *(mcodeptr++) = 0x0f;
5573 *(mcodeptr++) = 0x6e;
5574 x86_64_emit_membase((basereg),(disp),(dreg));
5578 void x86_64_movd_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
5579 *(mcodeptr++) = 0x66;
5580 x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
5581 *(mcodeptr++) = 0x0f;
5582 *(mcodeptr++) = 0x6e;
5583 x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
5587 void x86_64_movq_reg_reg(s8 reg, s8 dreg) {
5588 *(mcodeptr++) = 0xf3;
5589 x86_64_emit_rex(0,(dreg),0,(reg));
5590 *(mcodeptr++) = 0x0f;
5591 *(mcodeptr++) = 0x7e;
5592 x86_64_emit_reg((dreg),(reg));
5596 void x86_64_movq_reg_membase(s8 reg, s8 basereg, s8 disp) {
5597 *(mcodeptr++) = 0x66;
5598 x86_64_emit_rex(0,(reg),0,(basereg));
5599 *(mcodeptr++) = 0x0f;
5600 *(mcodeptr++) = 0xd6;
5601 x86_64_emit_membase((basereg),(disp),(reg));
5605 void x86_64_movq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5606 *(mcodeptr++) = 0xf3;
5607 x86_64_emit_rex(0,(dreg),0,(basereg));
5608 *(mcodeptr++) = 0x0f;
5609 *(mcodeptr++) = 0x7e;
5610 x86_64_emit_membase((basereg),(disp),(dreg));
5614 void x86_64_movss_reg_reg(s8 reg, s8 dreg) {
5615 *(mcodeptr++) = 0xf3;
5616 x86_64_emit_rex(0,(reg),0,(dreg));
5617 *(mcodeptr++) = 0x0f;
5618 *(mcodeptr++) = 0x10;
5619 x86_64_emit_reg((reg),(dreg));
5623 void x86_64_movsd_reg_reg(s8 reg, s8 dreg) {
5624 *(mcodeptr++) = 0xf2;
5625 x86_64_emit_rex(0,(reg),0,(dreg));
5626 *(mcodeptr++) = 0x0f;
5627 *(mcodeptr++) = 0x10;
5628 x86_64_emit_reg((reg),(dreg));
5632 void x86_64_movss_reg_membase(s8 reg, s8 basereg, s8 disp) {
5633 *(mcodeptr++) = 0xf3;
5634 x86_64_emit_rex(0,(reg),0,(basereg));
5635 *(mcodeptr++) = 0x0f;
5636 *(mcodeptr++) = 0x11;
5637 x86_64_emit_membase((basereg),(disp),(reg));
5641 void x86_64_movsd_reg_membase(s8 reg, s8 basereg, s8 disp) {
5642 *(mcodeptr++) = 0xf2;
5643 x86_64_emit_rex(0,(reg),0,(basereg));
5644 *(mcodeptr++) = 0x0f;
5645 *(mcodeptr++) = 0x11;
5646 x86_64_emit_membase((basereg),(disp),(reg));
5650 void x86_64_movss_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5651 *(mcodeptr++) = 0xf3;
5652 x86_64_emit_rex(0,(dreg),0,(basereg));
5653 *(mcodeptr++) = 0x0f;
5654 *(mcodeptr++) = 0x10;
5655 x86_64_emit_membase((basereg),(disp),(dreg));
5659 void x86_64_movlps_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5660 x86_64_emit_rex(0,(dreg),0,(basereg));
5661 *(mcodeptr++) = 0x0f;
5662 *(mcodeptr++) = 0x12;
5663 x86_64_emit_membase((basereg),(disp),(dreg));
5667 void x86_64_movsd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5668 *(mcodeptr++) = 0xf2;
5669 x86_64_emit_rex(0,(dreg),0,(basereg));
5670 *(mcodeptr++) = 0x0f;
5671 *(mcodeptr++) = 0x10;
5672 x86_64_emit_membase((basereg),(disp),(dreg));
5676 void x86_64_movlpd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5677 *(mcodeptr++) = 0x66;
5678 x86_64_emit_rex(0,(dreg),0,(basereg));
5679 *(mcodeptr++) = 0x0f;
5680 *(mcodeptr++) = 0x12;
5681 x86_64_emit_membase((basereg),(disp),(dreg));
5685 void x86_64_movss_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
5686 *(mcodeptr++) = 0xf3;
5687 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
5688 *(mcodeptr++) = 0x0f;
5689 *(mcodeptr++) = 0x11;
5690 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
5694 void x86_64_movsd_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
5695 *(mcodeptr++) = 0xf2;
5696 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
5697 *(mcodeptr++) = 0x0f;
5698 *(mcodeptr++) = 0x11;
5699 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
5703 void x86_64_movss_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
5704 *(mcodeptr++) = 0xf3;
5705 x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
5706 *(mcodeptr++) = 0x0f;
5707 *(mcodeptr++) = 0x10;
5708 x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
5712 void x86_64_movsd_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
5713 *(mcodeptr++) = 0xf2;
5714 x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
5715 *(mcodeptr++) = 0x0f;
5716 *(mcodeptr++) = 0x10;
5717 x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
5721 void x86_64_mulss_reg_reg(s8 reg, s8 dreg) {
5722 *(mcodeptr++) = 0xf3;
5723 x86_64_emit_rex(0,(dreg),0,(reg));
5724 *(mcodeptr++) = 0x0f;
5725 *(mcodeptr++) = 0x59;
5726 x86_64_emit_reg((dreg),(reg));
5730 void x86_64_mulsd_reg_reg(s8 reg, s8 dreg) {
5731 *(mcodeptr++) = 0xf2;
5732 x86_64_emit_rex(0,(dreg),0,(reg));
5733 *(mcodeptr++) = 0x0f;
5734 *(mcodeptr++) = 0x59;
5735 x86_64_emit_reg((dreg),(reg));
5739 void x86_64_subss_reg_reg(s8 reg, s8 dreg) {
5740 *(mcodeptr++) = 0xf3;
5741 x86_64_emit_rex(0,(dreg),0,(reg));
5742 *(mcodeptr++) = 0x0f;
5743 *(mcodeptr++) = 0x5c;
5744 x86_64_emit_reg((dreg),(reg));
5748 void x86_64_subsd_reg_reg(s8 reg, s8 dreg) {
5749 *(mcodeptr++) = 0xf2;
5750 x86_64_emit_rex(0,(dreg),0,(reg));
5751 *(mcodeptr++) = 0x0f;
5752 *(mcodeptr++) = 0x5c;
5753 x86_64_emit_reg((dreg),(reg));
5757 void x86_64_ucomiss_reg_reg(s8 reg, s8 dreg) {
5758 x86_64_emit_rex(0,(dreg),0,(reg));
5759 *(mcodeptr++) = 0x0f;
5760 *(mcodeptr++) = 0x2e;
5761 x86_64_emit_reg((dreg),(reg));
5765 void x86_64_ucomisd_reg_reg(s8 reg, s8 dreg) {
5766 *(mcodeptr++) = 0x66;
5767 x86_64_emit_rex(0,(dreg),0,(reg));
5768 *(mcodeptr++) = 0x0f;
5769 *(mcodeptr++) = 0x2e;
5770 x86_64_emit_reg((dreg),(reg));
5774 void x86_64_xorps_reg_reg(s8 reg, s8 dreg) {
5775 x86_64_emit_rex(0,(dreg),0,(reg));
5776 *(mcodeptr++) = 0x0f;
5777 *(mcodeptr++) = 0x57;
5778 x86_64_emit_reg((dreg),(reg));
5782 void x86_64_xorps_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5783 x86_64_emit_rex(0,(dreg),0,(basereg));
5784 *(mcodeptr++) = 0x0f;
5785 *(mcodeptr++) = 0x57;
5786 x86_64_emit_membase((basereg),(disp),(dreg));
5790 void x86_64_xorpd_reg_reg(s8 reg, s8 dreg) {
5791 *(mcodeptr++) = 0x66;
5792 x86_64_emit_rex(0,(dreg),0,(reg));
5793 *(mcodeptr++) = 0x0f;
5794 *(mcodeptr++) = 0x57;
5795 x86_64_emit_reg((dreg),(reg));
5799 void x86_64_xorpd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5800 *(mcodeptr++) = 0x66;
5801 x86_64_emit_rex(0,(dreg),0,(basereg));
5802 *(mcodeptr++) = 0x0f;
5803 *(mcodeptr++) = 0x57;
5804 x86_64_emit_membase((basereg),(disp),(dreg));
5810 * These are local overrides for various environment variables in Emacs.
5811 * Please do not remove this and leave it at the end of the file, where
5812 * Emacs will automagically detect them.
5813 * ---------------------------------------------------------------------
5816 * indent-tabs-mode: t