1 /* jit/x86_64/codegen.c - machine code generator for x86_64
3 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 Institut f. Computersprachen, TU Wien
5 R. Grafl, A. Krall, C. Kruegel, C. Oates, R. Obermaisser, M. Probst,
6 S. Ring, E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich,
9 This file is part of CACAO.
11 This program is free software; you can redistribute it and/or
12 modify it under the terms of the GNU General Public License as
13 published by the Free Software Foundation; either version 2, or (at
14 your option) any later version.
16 This program is distributed in the hope that it will be useful, but
17 WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 General Public License for more details.
21 You should have received a copy of the GNU General Public License
22 along with this program; if not, write to the Free Software
23 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
26 Contact: cacao@complang.tuwien.ac.at
28 Authors: Andreas Krall
31 $Id: codegen.c 1126 2004-06-03 21:35:05Z twisti $
51 /* include independent code generation stuff */
52 #include "codegen.inc"
56 /* register descripton - array ************************************************/
58 /* #define REG_RES 0 reserved register for OS or code generator */
59 /* #define REG_RET 1 return value register */
60 /* #define REG_EXC 2 exception value register (only old jit) */
61 /* #define REG_SAV 3 (callee) saved register */
62 /* #define REG_TMP 4 scratch temporary register (caller saved) */
63 /* #define REG_ARG 5 argument register (caller saved) */
65 /* #define REG_END -1 last entry in tables */
68 REG_RET, REG_ARG, REG_ARG, REG_TMP, REG_RES, REG_SAV, REG_ARG, REG_ARG,
69 REG_ARG, REG_ARG, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV,
74 int nregdescfloat[] = {
75 /* REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_TMP, REG_TMP, REG_TMP, REG_TMP, */
76 /* REG_RES, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV, REG_SAV, */
77 REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
78 REG_RES, REG_RES, REG_RES, REG_TMP, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
83 /* additional functions and macros to generate code ***************************/
85 #define BlockPtrOfPC(pc) ((basicblock *) iptr->target)
89 #define COUNT_SPILLS count_spills++
95 #define CALCOFFSETBYTES(var, reg, val) \
96 if ((s4) (val) < -128 || (s4) (val) > 127) (var) += 4; \
97 else if ((s4) (val) != 0) (var) += 1; \
98 else if ((reg) == RBP || (reg) == RSP || (reg) == R12 || (reg) == R13) (var) += 1;
101 #define CALCIMMEDIATEBYTES(var, val) \
102 if ((s4) (val) < -128 || (s4) (val) > 127) (var) += 4; \
106 /* gen_nullptr_check(objreg) */
108 #define gen_nullptr_check(objreg) \
110 x86_64_test_reg_reg((objreg), (objreg)); \
111 x86_64_jcc(X86_64_CC_E, 0); \
112 codegen_addxnullrefs(mcodeptr); \
116 #define gen_div_check(v) \
118 if ((v)->flags & INMEMORY) { \
119 x86_64_alu_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8); \
121 x86_64_test_reg_reg(src->regoff, src->regoff); \
123 x86_64_jcc(X86_64_CC_E, 0); \
124 codegen_addxdivrefs(mcodeptr); \
128 /* MCODECHECK(icnt) */
130 #define MCODECHECK(icnt) \
131 if ((mcodeptr + (icnt)) > (u1*) mcodeend) mcodeptr = (u1*) codegen_increase((u1*) mcodeptr)
134 generates an integer-move from register a to b.
135 if a and b are the same int-register, no code will be generated.
138 #define M_INTMOVE(reg,dreg) \
139 if ((reg) != (dreg)) { \
140 x86_64_mov_reg_reg((reg),(dreg)); \
145 generates a floating-point-move from register a to b.
146 if a and b are the same float-register, no code will be generated
149 #define M_FLTMOVE(reg,dreg) \
150 if ((reg) != (dreg)) { \
151 x86_64_movq_reg_reg((reg),(dreg)); \
156 this function generates code to fetch data from a pseudo-register
157 into a real register.
158 If the pseudo-register has actually been assigned to a real
159 register, no code will be emitted, since following operations
160 can use this register directly.
162 v: pseudoregister to be fetched from
163 tempregnum: temporary register to be used if v is actually spilled to ram
165 return: the register number, where the operand can be found after
166 fetching (this wil be either tempregnum or the register
167 number allready given to v)
170 #define var_to_reg_int(regnr,v,tempnr) \
171 if ((v)->flags & INMEMORY) { \
173 if ((v)->type == TYPE_INT) { \
174 x86_64_movl_membase_reg(REG_SP, (v)->regoff * 8, tempnr); \
176 x86_64_mov_membase_reg(REG_SP, (v)->regoff * 8, tempnr); \
180 regnr = (v)->regoff; \
185 #define var_to_reg_flt(regnr,v,tempnr) \
186 if ((v)->flags & INMEMORY) { \
188 if ((v)->type == TYPE_FLT) { \
189 x86_64_movlps_membase_reg(REG_SP, (v)->regoff * 8, tempnr); \
191 x86_64_movlpd_membase_reg(REG_SP, (v)->regoff * 8, tempnr); \
193 /* x86_64_movq_membase_reg(REG_SP, (v)->regoff * 8, tempnr);*/ \
196 regnr = (v)->regoff; \
201 This function determines a register, to which the result of an operation
202 should go, when it is ultimatively intended to store the result in
204 If v is assigned to an actual register, this register will be returned.
205 Otherwise (when v is spilled) this function returns tempregnum.
206 If not already done, regoff and flags are set in the stack location.
209 static int reg_of_var(stackptr v, int tempregnum)
213 switch (v->varkind) {
215 if (!(v->flags & INMEMORY))
219 var = &(interfaces[v->varnum][v->type]);
220 v->regoff = var->regoff;
221 if (!(var->flags & INMEMORY))
225 var = &(locals[v->varnum][v->type]);
226 v->regoff = var->regoff;
227 if (!(var->flags & INMEMORY))
231 v->regoff = v->varnum;
232 if (IS_FLT_DBL_TYPE(v->type)) {
233 if (v->varnum < FLT_ARG_CNT) {
234 v->regoff = argfltregs[v->varnum];
235 return(argfltregs[v->varnum]);
238 if (v->varnum < INT_ARG_CNT) {
239 v->regoff = argintregs[v->varnum];
240 return(argintregs[v->varnum]);
243 v->regoff -= INT_ARG_CNT;
246 v->flags |= INMEMORY;
251 /* store_reg_to_var_xxx:
252 This function generates the code to store the result of an operation
253 back into a spilled pseudo-variable.
254 If the pseudo-variable has not been spilled in the first place, this
255 function will generate nothing.
257 v ............ Pseudovariable
258 tempregnum ... Number of the temporary registers as returned by
262 #define store_reg_to_var_int(sptr, tempregnum) \
263 if ((sptr)->flags & INMEMORY) { \
265 x86_64_mov_reg_membase(tempregnum, REG_SP, (sptr)->regoff * 8); \
269 #define store_reg_to_var_flt(sptr, tempregnum) \
270 if ((sptr)->flags & INMEMORY) { \
272 x86_64_movq_reg_membase(tempregnum, REG_SP, (sptr)->regoff * 8); \
276 /* NullPointerException signal handler for hardware null pointer check */
278 void catch_NullPointerException(int sig, siginfo_t *siginfo, void *_p)
282 /* long faultaddr; */
284 struct ucontext *_uc = (struct ucontext *) _p;
285 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
286 struct sigaction act;
287 java_objectheader *xptr;
289 /* Reset signal handler - necessary for SysV, does no harm for BSD */
292 /* instr = *((int*)(sigctx->rip)); */
293 /* faultaddr = sigctx->sc_regs[(instr >> 16) & 0x1f]; */
295 /* if (faultaddr == 0) { */
296 act.sa_sigaction = (void *) catch_NullPointerException; /* reinstall handler */
297 act.sa_flags = SA_SIGINFO;
298 sigaction(sig, &act, NULL);
301 sigaddset(&nsig, sig);
302 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
304 xptr = new_exception(string_java_lang_NullPointerException);
306 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
307 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
308 sigctx->rip = (u8) asm_handle_exception;
313 /* faultaddr += (long) ((instr << 16) >> 16); */
314 /* fprintf(stderr, "faulting address: 0x%08x\n", faultaddr); */
315 /* panic("Stack overflow"); */
320 /* ArithmeticException signal handler for hardware divide by zero check */
322 void catch_ArithmeticException(int sig, siginfo_t *siginfo, void *_p)
326 struct ucontext *_uc = (struct ucontext *) _p;
327 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
328 struct sigaction act;
329 java_objectheader *xptr;
331 /* Reset signal handler - necessary for SysV, does no harm for BSD */
333 act.sa_sigaction = (void *) catch_ArithmeticException; /* reinstall handler */
334 act.sa_flags = SA_SIGINFO;
335 sigaction(sig, &act, NULL);
338 sigaddset(&nsig, sig);
339 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
341 xptr = new_exception_message(string_java_lang_ArithmeticException,
342 string_java_lang_ArithmeticException_message);
344 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
345 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
346 sigctx->rip = (u8) asm_handle_exception;
352 void init_exceptions(void)
354 struct sigaction act;
356 /* install signal handlers we need to convert to exceptions */
360 act.sa_sigaction = (void *) catch_NullPointerException;
361 act.sa_flags = SA_SIGINFO;
362 sigaction(SIGSEGV, &act, NULL);
366 act.sa_sigaction = (void *) catch_NullPointerException;
367 act.sa_flags = SA_SIGINFO;
368 sigaction(SIGBUS, &act, NULL);
372 act.sa_sigaction = (void *) catch_ArithmeticException;
373 act.sa_flags = SA_SIGINFO;
374 sigaction(SIGFPE, &act, NULL);
378 /* function gen_mcode **********************************************************
380 generates machine code
382 *******************************************************************************/
384 /* global code generation pointer */
391 int len, s1, s2, s3, d;
405 /* space to save used callee saved registers */
407 savedregs_num += (savintregcnt - maxsavintreguse);
408 savedregs_num += (savfltregcnt - maxsavfltreguse);
410 parentargs_base = maxmemuse + savedregs_num;
412 #if defined(USE_THREADS) /* space to save argument of monitor_enter */
414 if (checksync && (method->flags & ACC_SYNCHRONIZED))
419 /* keep stack 16-byte aligned for calls into libc */
421 if (!isleafmethod || runverbose) {
422 if ((parentargs_base % 2) == 0) {
427 /* create method header */
429 (void) dseg_addaddress(method); /* MethodPointer */
430 (void) dseg_adds4(parentargs_base * 8); /* FrameSize */
432 #if defined(USE_THREADS)
434 /* IsSync contains the offset relative to the stack pointer for the
435 argument of monitor_exit used in the exception handler. Since the
436 offset could be zero and give a wrong meaning of the flag it is
440 if (checksync && (method->flags & ACC_SYNCHRONIZED))
441 (void) dseg_adds4((maxmemuse + 1) * 8); /* IsSync */
446 (void) dseg_adds4(0); /* IsSync */
448 (void) dseg_adds4(isleafmethod); /* IsLeaf */
449 (void) dseg_adds4(savintregcnt - maxsavintreguse); /* IntSave */
450 (void) dseg_adds4(savfltregcnt - maxsavfltreguse); /* FltSave */
451 (void) dseg_adds4(exceptiontablelength); /* ExTableSize */
453 /* create exception table */
455 for (ex = extable; ex != NULL; ex = ex->down) {
456 dseg_addtarget(ex->start);
457 dseg_addtarget(ex->end);
458 dseg_addtarget(ex->handler);
459 (void) dseg_addaddress(ex->catchtype);
462 /* initialize mcode variables */
464 mcodeptr = (u1*) mcodebase;
465 mcodeend = (s4*) (mcodebase + mcodesize);
466 MCODECHECK(128 + mparamcount);
468 /* create stack frame (if necessary) */
470 if (parentargs_base) {
471 x86_64_alu_imm_reg(X86_64_SUB, parentargs_base * 8, REG_SP);
474 /* save return address and used callee saved registers */
477 for (r = savintregcnt - 1; r >= maxsavintreguse; r--) {
478 p--; x86_64_mov_reg_membase(savintregs[r], REG_SP, p * 8);
480 for (r = savfltregcnt - 1; r >= maxsavfltreguse; r--) {
481 p--; x86_64_movq_reg_membase(savfltregs[r], REG_SP, p * 8);
484 /* save monitorenter argument */
486 #if defined(USE_THREADS)
487 if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
488 if (method->flags & ACC_STATIC) {
489 x86_64_mov_imm_reg((s8) class, REG_ITMP1);
490 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, maxmemuse * 8);
493 x86_64_mov_reg_membase(argintregs[0], REG_SP, maxmemuse * 8);
498 /* copy argument registers to stack and call trace function with pointer
499 to arguments on stack.
502 x86_64_alu_imm_reg(X86_64_SUB, (6 + 8 + 1 + 1) * 8, REG_SP);
504 x86_64_mov_reg_membase(argintregs[0], REG_SP, 1 * 8);
505 x86_64_mov_reg_membase(argintregs[1], REG_SP, 2 * 8);
506 x86_64_mov_reg_membase(argintregs[2], REG_SP, 3 * 8);
507 x86_64_mov_reg_membase(argintregs[3], REG_SP, 4 * 8);
508 x86_64_mov_reg_membase(argintregs[4], REG_SP, 5 * 8);
509 x86_64_mov_reg_membase(argintregs[5], REG_SP, 6 * 8);
511 x86_64_movq_reg_membase(argfltregs[0], REG_SP, 7 * 8);
512 x86_64_movq_reg_membase(argfltregs[1], REG_SP, 8 * 8);
513 x86_64_movq_reg_membase(argfltregs[2], REG_SP, 9 * 8);
514 x86_64_movq_reg_membase(argfltregs[3], REG_SP, 10 * 8);
515 /* x86_64_movq_reg_membase(argfltregs[4], REG_SP, 11 * 8); */
516 /* x86_64_movq_reg_membase(argfltregs[5], REG_SP, 12 * 8); */
517 /* x86_64_movq_reg_membase(argfltregs[6], REG_SP, 13 * 8); */
518 /* x86_64_movq_reg_membase(argfltregs[7], REG_SP, 14 * 8); */
520 for (p = 0, l = 0; p < mparamcount; p++) {
523 if (IS_FLT_DBL_TYPE(t)) {
524 for (s1 = (mparamcount > INT_ARG_CNT) ? INT_ARG_CNT - 2 : mparamcount - 2; s1 >= p; s1--) {
525 x86_64_mov_reg_reg(argintregs[s1], argintregs[s1 + 1]);
528 x86_64_movd_freg_reg(argfltregs[l], argintregs[p]);
533 x86_64_mov_imm_reg((s8) method, REG_ITMP2);
534 x86_64_mov_reg_membase(REG_ITMP2, REG_SP, 0 * 8);
535 x86_64_mov_imm_reg((s8) builtin_trace_args, REG_ITMP1);
536 x86_64_call_reg(REG_ITMP1);
538 x86_64_mov_membase_reg(REG_SP, 1 * 8, argintregs[0]);
539 x86_64_mov_membase_reg(REG_SP, 2 * 8, argintregs[1]);
540 x86_64_mov_membase_reg(REG_SP, 3 * 8, argintregs[2]);
541 x86_64_mov_membase_reg(REG_SP, 4 * 8, argintregs[3]);
542 x86_64_mov_membase_reg(REG_SP, 5 * 8, argintregs[4]);
543 x86_64_mov_membase_reg(REG_SP, 6 * 8, argintregs[5]);
545 x86_64_movq_membase_reg(REG_SP, 7 * 8, argfltregs[0]);
546 x86_64_movq_membase_reg(REG_SP, 8 * 8, argfltregs[1]);
547 x86_64_movq_membase_reg(REG_SP, 9 * 8, argfltregs[2]);
548 x86_64_movq_membase_reg(REG_SP, 10 * 8, argfltregs[3]);
549 /* x86_64_movq_membase_reg(REG_SP, 11 * 8, argfltregs[4]); */
550 /* x86_64_movq_membase_reg(REG_SP, 12 * 8, argfltregs[5]); */
551 /* x86_64_movq_membase_reg(REG_SP, 13 * 8, argfltregs[6]); */
552 /* x86_64_movq_membase_reg(REG_SP, 14 * 8, argfltregs[7]); */
554 x86_64_alu_imm_reg(X86_64_ADD, (6 + 8 + 1 + 1) * 8, REG_SP);
557 /* take arguments out of register or stack frame */
559 for (p = 0, l = 0, s1 = 0, s2 = 0; p < mparamcount; p++) {
561 var = &(locals[l][t]);
563 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
566 if (IS_INT_LNG_TYPE(t)) {
574 if (IS_INT_LNG_TYPE(t)) { /* integer args */
575 if (s1 < INT_ARG_CNT) { /* register arguments */
576 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
577 M_INTMOVE(argintregs[s1], r);
579 } else { /* reg arg -> spilled */
580 x86_64_mov_reg_membase(argintregs[s1], REG_SP, r * 8);
582 } else { /* stack arguments */
583 pa = s1 - INT_ARG_CNT;
584 if (s2 >= FLT_ARG_CNT) {
585 pa += s2 - FLT_ARG_CNT;
587 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
588 x86_64_mov_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, r); /* + 8 for return address */
589 } else { /* stack arg -> spilled */
590 x86_64_mov_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, REG_ITMP1); /* + 8 for return address */
591 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, r * 8);
596 } else { /* floating args */
597 if (s2 < FLT_ARG_CNT) { /* register arguments */
598 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
599 M_FLTMOVE(argfltregs[s2], r);
601 } else { /* reg arg -> spilled */
602 x86_64_movq_reg_membase(argfltregs[s2], REG_SP, r * 8);
605 } else { /* stack arguments */
606 pa = s2 - FLT_ARG_CNT;
607 if (s1 >= INT_ARG_CNT) {
608 pa += s1 - INT_ARG_CNT;
610 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
611 x86_64_movq_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, r);
614 x86_64_movq_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, REG_FTMP1);
615 x86_64_movq_reg_membase(REG_FTMP1, REG_SP, r * 8);
622 /* call monitorenter function */
624 #if defined(USE_THREADS)
625 if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
626 x86_64_mov_membase_reg(REG_SP, maxmemuse * 8, argintregs[0]);
627 x86_64_mov_imm_reg((s8) builtin_monitorenter, REG_ITMP1);
628 x86_64_call_reg(REG_ITMP1);
633 /* end of header generation */
635 /* walk through all basic blocks */
636 for (bptr = block; bptr != NULL; bptr = bptr->next) {
638 bptr->mpc = (u4) ((u1 *) mcodeptr - mcodebase);
640 if (bptr->flags >= BBREACHED) {
642 /* branch resolving */
645 for (brefs = bptr->branchrefs; brefs != NULL; brefs = brefs->next) {
646 gen_resolvebranch((u1*) mcodebase + brefs->branchpos,
647 brefs->branchpos, bptr->mpc);
650 /* copy interface registers to their destination */
655 while (src != NULL) {
657 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
658 if (bptr->type == BBTYPE_SBR) {
659 d = reg_of_var(src, REG_ITMP1);
661 store_reg_to_var_int(src, d);
663 } else if (bptr->type == BBTYPE_EXH) {
664 d = reg_of_var(src, REG_ITMP1);
665 M_INTMOVE(REG_ITMP1, d);
666 store_reg_to_var_int(src, d);
670 d = reg_of_var(src, REG_ITMP1);
671 if ((src->varkind != STACKVAR)) {
673 if (IS_FLT_DBL_TYPE(s2)) {
674 s1 = interfaces[len][s2].regoff;
675 if (!(interfaces[len][s2].flags & INMEMORY)) {
679 x86_64_movq_membase_reg(REG_SP, s1 * 8, d);
681 store_reg_to_var_flt(src, d);
684 s1 = interfaces[len][s2].regoff;
685 if (!(interfaces[len][s2].flags & INMEMORY)) {
689 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
691 store_reg_to_var_int(src, d);
698 /* walk through all instructions */
702 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
704 MCODECHECK(64); /* an instruction usually needs < 64 words */
707 case ICMD_NOP: /* ... ==> ... */
710 case ICMD_NULLCHECKPOP: /* ..., objectref ==> ... */
711 if (src->flags & INMEMORY) {
712 x86_64_alu_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
715 x86_64_test_reg_reg(src->regoff, src->regoff);
717 x86_64_jcc(X86_64_CC_E, 0);
718 codegen_addxnullrefs(mcodeptr);
721 /* constant operations ************************************************/
723 case ICMD_ICONST: /* ... ==> ..., constant */
724 /* op1 = 0, val.i = constant */
726 d = reg_of_var(iptr->dst, REG_ITMP1);
727 if (iptr->val.i == 0) {
728 x86_64_alu_reg_reg(X86_64_XOR, d, d);
730 x86_64_movl_imm_reg(iptr->val.i, d);
732 store_reg_to_var_int(iptr->dst, d);
735 case ICMD_ACONST: /* ... ==> ..., constant */
736 /* op1 = 0, val.a = constant */
738 d = reg_of_var(iptr->dst, REG_ITMP1);
739 if (iptr->val.a == 0) {
740 x86_64_alu_reg_reg(X86_64_XOR, d, d);
742 x86_64_mov_imm_reg((s8) iptr->val.a, d);
744 store_reg_to_var_int(iptr->dst, d);
747 case ICMD_LCONST: /* ... ==> ..., constant */
748 /* op1 = 0, val.l = constant */
750 d = reg_of_var(iptr->dst, REG_ITMP1);
751 if (iptr->val.l == 0) {
752 x86_64_alu_reg_reg(X86_64_XOR, d, d);
754 x86_64_mov_imm_reg(iptr->val.l, d);
756 store_reg_to_var_int(iptr->dst, d);
759 case ICMD_FCONST: /* ... ==> ..., constant */
760 /* op1 = 0, val.f = constant */
762 d = reg_of_var(iptr->dst, REG_FTMP1);
763 a = dseg_addfloat(iptr->val.f);
764 x86_64_movdl_membase_reg(RIP, -(((s8) mcodeptr + ((d > 7) ? 9 : 8)) - (s8) mcodebase) + a, d);
765 store_reg_to_var_flt(iptr->dst, d);
768 case ICMD_DCONST: /* ... ==> ..., constant */
769 /* op1 = 0, val.d = constant */
771 d = reg_of_var(iptr->dst, REG_FTMP1);
772 a = dseg_adddouble(iptr->val.d);
773 x86_64_movd_membase_reg(RIP, -(((s8) mcodeptr + 9) - (s8) mcodebase) + a, d);
774 store_reg_to_var_flt(iptr->dst, d);
778 /* load/store operations **********************************************/
780 case ICMD_ILOAD: /* ... ==> ..., content of local variable */
781 /* op1 = local variable */
783 d = reg_of_var(iptr->dst, REG_ITMP1);
784 if ((iptr->dst->varkind == LOCALVAR) &&
785 (iptr->dst->varnum == iptr->op1)) {
788 var = &(locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
789 if (var->flags & INMEMORY) {
790 x86_64_movl_membase_reg(REG_SP, var->regoff * 8, d);
791 store_reg_to_var_int(iptr->dst, d);
794 if (iptr->dst->flags & INMEMORY) {
795 x86_64_mov_reg_membase(var->regoff, REG_SP, iptr->dst->regoff * 8);
798 M_INTMOVE(var->regoff, d);
803 case ICMD_LLOAD: /* ... ==> ..., content of local variable */
804 case ICMD_ALOAD: /* op1 = local variable */
806 d = reg_of_var(iptr->dst, REG_ITMP1);
807 if ((iptr->dst->varkind == LOCALVAR) &&
808 (iptr->dst->varnum == iptr->op1)) {
811 var = &(locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
812 if (var->flags & INMEMORY) {
813 x86_64_mov_membase_reg(REG_SP, var->regoff * 8, d);
814 store_reg_to_var_int(iptr->dst, d);
817 if (iptr->dst->flags & INMEMORY) {
818 x86_64_mov_reg_membase(var->regoff, REG_SP, iptr->dst->regoff * 8);
821 M_INTMOVE(var->regoff, d);
826 case ICMD_FLOAD: /* ... ==> ..., content of local variable */
827 case ICMD_DLOAD: /* op1 = local variable */
829 d = reg_of_var(iptr->dst, REG_FTMP1);
830 if ((iptr->dst->varkind == LOCALVAR) &&
831 (iptr->dst->varnum == iptr->op1)) {
834 var = &(locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
835 if (var->flags & INMEMORY) {
836 x86_64_movq_membase_reg(REG_SP, var->regoff * 8, d);
837 store_reg_to_var_flt(iptr->dst, d);
840 if (iptr->dst->flags & INMEMORY) {
841 x86_64_movq_reg_membase(var->regoff, REG_SP, iptr->dst->regoff * 8);
844 M_FLTMOVE(var->regoff, d);
849 case ICMD_ISTORE: /* ..., value ==> ... */
850 case ICMD_LSTORE: /* op1 = local variable */
853 if ((src->varkind == LOCALVAR) &&
854 (src->varnum == iptr->op1)) {
857 var = &(locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
858 if (var->flags & INMEMORY) {
859 var_to_reg_int(s1, src, REG_ITMP1);
860 x86_64_mov_reg_membase(s1, REG_SP, var->regoff * 8);
863 var_to_reg_int(s1, src, var->regoff);
864 M_INTMOVE(s1, var->regoff);
868 case ICMD_FSTORE: /* ..., value ==> ... */
869 case ICMD_DSTORE: /* op1 = local variable */
871 if ((src->varkind == LOCALVAR) &&
872 (src->varnum == iptr->op1)) {
875 var = &(locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
876 if (var->flags & INMEMORY) {
877 var_to_reg_flt(s1, src, REG_FTMP1);
878 x86_64_movq_reg_membase(s1, REG_SP, var->regoff * 8);
881 var_to_reg_flt(s1, src, var->regoff);
882 M_FLTMOVE(s1, var->regoff);
887 /* pop/dup/swap operations ********************************************/
889 /* attention: double and longs are only one entry in CACAO ICMDs */
891 case ICMD_POP: /* ..., value ==> ... */
892 case ICMD_POP2: /* ..., value, value ==> ... */
895 #define M_COPY(from,to) \
896 d = reg_of_var(to, REG_ITMP1); \
897 if ((from->regoff != to->regoff) || \
898 ((from->flags ^ to->flags) & INMEMORY)) { \
899 if (IS_FLT_DBL_TYPE(from->type)) { \
900 var_to_reg_flt(s1, from, d); \
902 store_reg_to_var_flt(to, d); \
904 var_to_reg_int(s1, from, d); \
906 store_reg_to_var_int(to, d); \
910 case ICMD_DUP: /* ..., a ==> ..., a, a */
911 M_COPY(src, iptr->dst);
914 case ICMD_DUP_X1: /* ..., a, b ==> ..., b, a, b */
916 M_COPY(src, iptr->dst->prev->prev);
918 case ICMD_DUP2: /* ..., a, b ==> ..., a, b, a, b */
920 M_COPY(src, iptr->dst);
921 M_COPY(src->prev, iptr->dst->prev);
924 case ICMD_DUP2_X1: /* ..., a, b, c ==> ..., b, c, a, b, c */
926 M_COPY(src->prev, iptr->dst->prev->prev->prev);
928 case ICMD_DUP_X2: /* ..., a, b, c ==> ..., c, a, b, c */
930 M_COPY(src, iptr->dst);
931 M_COPY(src->prev, iptr->dst->prev);
932 M_COPY(src->prev->prev, iptr->dst->prev->prev);
933 M_COPY(src, iptr->dst->prev->prev->prev);
936 case ICMD_DUP2_X2: /* ..., a, b, c, d ==> ..., c, d, a, b, c, d */
938 M_COPY(src, iptr->dst);
939 M_COPY(src->prev, iptr->dst->prev);
940 M_COPY(src->prev->prev, iptr->dst->prev->prev);
941 M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
942 M_COPY(src, iptr->dst->prev->prev->prev->prev);
943 M_COPY(src->prev, iptr->dst->prev->prev->prev->prev->prev);
946 case ICMD_SWAP: /* ..., a, b ==> ..., b, a */
948 M_COPY(src, iptr->dst->prev);
949 M_COPY(src->prev, iptr->dst);
953 /* integer operations *************************************************/
955 case ICMD_INEG: /* ..., value ==> ..., - value */
957 d = reg_of_var(iptr->dst, REG_NULL);
958 if (iptr->dst->flags & INMEMORY) {
959 if (src->flags & INMEMORY) {
960 if (src->regoff == iptr->dst->regoff) {
961 x86_64_negl_membase(REG_SP, iptr->dst->regoff * 8);
964 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
965 x86_64_negl_reg(REG_ITMP1);
966 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
970 x86_64_movl_reg_membase(src->regoff, REG_SP, iptr->dst->regoff * 8);
971 x86_64_negl_membase(REG_SP, iptr->dst->regoff * 8);
975 if (src->flags & INMEMORY) {
976 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
980 M_INTMOVE(src->regoff, iptr->dst->regoff);
981 x86_64_negl_reg(iptr->dst->regoff);
986 case ICMD_LNEG: /* ..., value ==> ..., - value */
988 d = reg_of_var(iptr->dst, REG_NULL);
989 if (iptr->dst->flags & INMEMORY) {
990 if (src->flags & INMEMORY) {
991 if (src->regoff == iptr->dst->regoff) {
992 x86_64_neg_membase(REG_SP, iptr->dst->regoff * 8);
995 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
996 x86_64_neg_reg(REG_ITMP1);
997 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1001 x86_64_mov_reg_membase(src->regoff, REG_SP, iptr->dst->regoff * 8);
1002 x86_64_neg_membase(REG_SP, iptr->dst->regoff * 8);
1006 if (src->flags & INMEMORY) {
1007 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1008 x86_64_neg_reg(iptr->dst->regoff);
1011 M_INTMOVE(src->regoff, iptr->dst->regoff);
1012 x86_64_neg_reg(iptr->dst->regoff);
1017 case ICMD_I2L: /* ..., value ==> ..., value */
1019 d = reg_of_var(iptr->dst, REG_ITMP3);
1020 if (src->flags & INMEMORY) {
1021 x86_64_movslq_membase_reg(REG_SP, src->regoff * 8, d);
1024 x86_64_movslq_reg_reg(src->regoff, d);
1026 store_reg_to_var_int(iptr->dst, d);
1029 case ICMD_L2I: /* ..., value ==> ..., value */
1031 var_to_reg_int(s1, src, REG_ITMP1);
1032 d = reg_of_var(iptr->dst, REG_ITMP3);
1034 store_reg_to_var_int(iptr->dst, d);
1037 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
1039 d = reg_of_var(iptr->dst, REG_ITMP3);
1040 if (src->flags & INMEMORY) {
1041 x86_64_movsbq_membase_reg(REG_SP, src->regoff * 8, d);
1044 x86_64_movsbq_reg_reg(src->regoff, d);
1046 store_reg_to_var_int(iptr->dst, d);
1049 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
1051 d = reg_of_var(iptr->dst, REG_ITMP3);
1052 if (src->flags & INMEMORY) {
1053 x86_64_movzwq_membase_reg(REG_SP, src->regoff * 8, d);
1056 x86_64_movzwq_reg_reg(src->regoff, d);
1058 store_reg_to_var_int(iptr->dst, d);
1061 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
1063 d = reg_of_var(iptr->dst, REG_ITMP3);
1064 if (src->flags & INMEMORY) {
1065 x86_64_movswq_membase_reg(REG_SP, src->regoff * 8, d);
1068 x86_64_movswq_reg_reg(src->regoff, d);
1070 store_reg_to_var_int(iptr->dst, d);
1074 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1076 d = reg_of_var(iptr->dst, REG_NULL);
1077 x86_64_emit_ialu(X86_64_ADD, src, iptr);
1080 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
1081 /* val.i = constant */
1083 d = reg_of_var(iptr->dst, REG_NULL);
1084 x86_64_emit_ialuconst(X86_64_ADD, src, iptr);
1087 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1089 d = reg_of_var(iptr->dst, REG_NULL);
1090 x86_64_emit_lalu(X86_64_ADD, src, iptr);
1093 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
1094 /* val.l = constant */
1096 d = reg_of_var(iptr->dst, REG_NULL);
1097 x86_64_emit_laluconst(X86_64_ADD, src, iptr);
1100 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1102 d = reg_of_var(iptr->dst, REG_NULL);
1103 if (iptr->dst->flags & INMEMORY) {
1104 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1105 if (src->prev->regoff == iptr->dst->regoff) {
1106 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1107 x86_64_alul_reg_membase(X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1110 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1111 x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1112 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1115 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1116 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1117 x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1118 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1120 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1121 if (src->prev->regoff == iptr->dst->regoff) {
1122 x86_64_alul_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1125 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1126 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1127 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1131 x86_64_movl_reg_membase(src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1132 x86_64_alul_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1136 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1137 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, d);
1138 x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1140 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1141 M_INTMOVE(src->prev->regoff, d);
1142 x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1144 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1145 /* workaround for reg alloc */
1146 if (src->regoff == iptr->dst->regoff) {
1147 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1148 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1149 M_INTMOVE(REG_ITMP1, d);
1152 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, d);
1153 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, d);
1157 /* workaround for reg alloc */
1158 if (src->regoff == iptr->dst->regoff) {
1159 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1160 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1161 M_INTMOVE(REG_ITMP1, d);
1164 M_INTMOVE(src->prev->regoff, d);
1165 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, d);
1171 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
1172 /* val.i = constant */
1174 d = reg_of_var(iptr->dst, REG_NULL);
1175 x86_64_emit_ialuconst(X86_64_SUB, src, iptr);
1178 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1180 d = reg_of_var(iptr->dst, REG_NULL);
1181 if (iptr->dst->flags & INMEMORY) {
1182 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1183 if (src->prev->regoff == iptr->dst->regoff) {
1184 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1185 x86_64_alu_reg_membase(X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1188 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1189 x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1190 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1193 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1194 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1195 x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1196 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1198 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1199 if (src->prev->regoff == iptr->dst->regoff) {
1200 x86_64_alu_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1203 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1204 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1205 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1209 x86_64_mov_reg_membase(src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1210 x86_64_alu_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1214 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1215 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, d);
1216 x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1218 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1219 M_INTMOVE(src->prev->regoff, d);
1220 x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1222 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1223 /* workaround for reg alloc */
1224 if (src->regoff == iptr->dst->regoff) {
1225 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1226 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1227 M_INTMOVE(REG_ITMP1, d);
1230 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, d);
1231 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, d);
1235 /* workaround for reg alloc */
1236 if (src->regoff == iptr->dst->regoff) {
1237 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1238 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1239 M_INTMOVE(REG_ITMP1, d);
1242 M_INTMOVE(src->prev->regoff, d);
1243 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, d);
1249 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
1250 /* val.l = constant */
1252 d = reg_of_var(iptr->dst, REG_NULL);
1253 x86_64_emit_laluconst(X86_64_SUB, src, iptr);
1256 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1258 d = reg_of_var(iptr->dst, REG_NULL);
1259 if (iptr->dst->flags & INMEMORY) {
1260 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1261 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1262 x86_64_imull_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1263 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1265 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1266 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1267 x86_64_imull_reg_reg(src->prev->regoff, REG_ITMP1);
1268 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1270 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1271 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1272 x86_64_imull_reg_reg(src->regoff, REG_ITMP1);
1273 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1276 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1277 x86_64_imull_reg_reg(src->regoff, REG_ITMP1);
1278 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1282 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1283 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1284 x86_64_imull_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1286 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1287 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1288 x86_64_imull_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1290 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1291 M_INTMOVE(src->regoff, iptr->dst->regoff);
1292 x86_64_imull_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1295 if (src->regoff == iptr->dst->regoff) {
1296 x86_64_imull_reg_reg(src->prev->regoff, iptr->dst->regoff);
1299 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1300 x86_64_imull_reg_reg(src->regoff, iptr->dst->regoff);
1306 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
1307 /* val.i = constant */
1309 d = reg_of_var(iptr->dst, REG_NULL);
1310 if (iptr->dst->flags & INMEMORY) {
1311 if (src->flags & INMEMORY) {
1312 x86_64_imull_imm_membase_reg(iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1313 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1316 x86_64_imull_imm_reg_reg(iptr->val.i, src->regoff, REG_ITMP1);
1317 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1321 if (src->flags & INMEMORY) {
1322 x86_64_imull_imm_membase_reg(iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1325 if (iptr->val.i == 2) {
1326 M_INTMOVE(src->regoff, iptr->dst->regoff);
1327 x86_64_alul_reg_reg(X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1330 x86_64_imull_imm_reg_reg(iptr->val.i, src->regoff, iptr->dst->regoff); /* 3 cycles */
1336 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1338 d = reg_of_var(iptr->dst, REG_NULL);
1339 if (iptr->dst->flags & INMEMORY) {
1340 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1341 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1342 x86_64_imul_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1343 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1345 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1346 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1347 x86_64_imul_reg_reg(src->prev->regoff, REG_ITMP1);
1348 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1350 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1351 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1352 x86_64_imul_reg_reg(src->regoff, REG_ITMP1);
1353 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1356 x86_64_mov_reg_reg(src->prev->regoff, REG_ITMP1);
1357 x86_64_imul_reg_reg(src->regoff, REG_ITMP1);
1358 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1362 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1363 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1364 x86_64_imul_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1366 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1367 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1368 x86_64_imul_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1370 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1371 M_INTMOVE(src->regoff, iptr->dst->regoff);
1372 x86_64_imul_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1375 if (src->regoff == iptr->dst->regoff) {
1376 x86_64_imul_reg_reg(src->prev->regoff, iptr->dst->regoff);
1379 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1380 x86_64_imul_reg_reg(src->regoff, iptr->dst->regoff);
1386 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
1387 /* val.l = constant */
1389 d = reg_of_var(iptr->dst, REG_NULL);
1390 if (iptr->dst->flags & INMEMORY) {
1391 if (src->flags & INMEMORY) {
1392 if (x86_64_is_imm32(iptr->val.l)) {
1393 x86_64_imul_imm_membase_reg(iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1396 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
1397 x86_64_imul_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1399 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1402 if (x86_64_is_imm32(iptr->val.l)) {
1403 x86_64_imul_imm_reg_reg(iptr->val.l, src->regoff, REG_ITMP1);
1406 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
1407 x86_64_imul_reg_reg(src->regoff, REG_ITMP1);
1409 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1413 if (src->flags & INMEMORY) {
1414 if (x86_64_is_imm32(iptr->val.l)) {
1415 x86_64_imul_imm_membase_reg(iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1418 x86_64_mov_imm_reg(iptr->val.l, iptr->dst->regoff);
1419 x86_64_imul_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1423 /* should match in many cases */
1424 if (iptr->val.l == 2) {
1425 M_INTMOVE(src->regoff, iptr->dst->regoff);
1426 x86_64_alul_reg_reg(X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1429 if (x86_64_is_imm32(iptr->val.l)) {
1430 x86_64_imul_imm_reg_reg(iptr->val.l, src->regoff, iptr->dst->regoff); /* 4 cycles */
1433 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
1434 M_INTMOVE(src->regoff, iptr->dst->regoff);
1435 x86_64_imul_reg_reg(REG_ITMP1, iptr->dst->regoff);
1442 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1444 d = reg_of_var(iptr->dst, REG_NULL);
1445 if (src->prev->flags & INMEMORY) {
1446 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, RAX);
1449 M_INTMOVE(src->prev->regoff, RAX);
1452 if (src->flags & INMEMORY) {
1453 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1456 M_INTMOVE(src->regoff, REG_ITMP3);
1460 x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1461 x86_64_jcc(X86_64_CC_NE, 4 + 6);
1462 x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1463 x86_64_jcc(X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1465 x86_64_mov_reg_reg(RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1467 x86_64_idivl_reg(REG_ITMP3);
1469 if (iptr->dst->flags & INMEMORY) {
1470 x86_64_mov_reg_membase(RAX, REG_SP, iptr->dst->regoff * 8);
1471 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1474 M_INTMOVE(RAX, iptr->dst->regoff);
1476 if (iptr->dst->regoff != RDX) {
1477 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1482 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1484 d = reg_of_var(iptr->dst, REG_NULL);
1485 if (src->prev->flags & INMEMORY) {
1486 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, RAX);
1489 M_INTMOVE(src->prev->regoff, RAX);
1492 if (src->flags & INMEMORY) {
1493 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1496 M_INTMOVE(src->regoff, REG_ITMP3);
1500 x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1501 x86_64_jcc(X86_64_CC_NE, 2 + 4 + 6);
1502 x86_64_alul_reg_reg(X86_64_XOR, RDX, RDX); /* 2 bytes */
1503 x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1504 x86_64_jcc(X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1506 x86_64_mov_reg_reg(RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1508 x86_64_idivl_reg(REG_ITMP3);
1510 if (iptr->dst->flags & INMEMORY) {
1511 x86_64_mov_reg_membase(RDX, REG_SP, iptr->dst->regoff * 8);
1512 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1515 M_INTMOVE(RDX, iptr->dst->regoff);
1517 if (iptr->dst->regoff != RDX) {
1518 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1523 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
1524 /* val.i = constant */
1526 var_to_reg_int(s1, src, REG_ITMP1);
1527 d = reg_of_var(iptr->dst, REG_ITMP3);
1528 M_INTMOVE(s1, REG_ITMP1);
1529 x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1530 x86_64_leal_membase_reg(REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1531 x86_64_cmovccl_reg_reg(X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1532 x86_64_shiftl_imm_reg(X86_64_SAR, iptr->val.i, REG_ITMP1);
1533 x86_64_mov_reg_reg(REG_ITMP1, d);
1534 store_reg_to_var_int(iptr->dst, d);
1537 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
1538 /* val.i = constant */
1540 var_to_reg_int(s1, src, REG_ITMP1);
1541 d = reg_of_var(iptr->dst, REG_ITMP3);
1542 M_INTMOVE(s1, REG_ITMP1);
1543 x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1544 x86_64_leal_membase_reg(REG_ITMP1, iptr->val.i, REG_ITMP2);
1545 x86_64_cmovccl_reg_reg(X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1546 x86_64_alul_imm_reg(X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1547 x86_64_alul_reg_reg(X86_64_SUB, REG_ITMP2, REG_ITMP1);
1548 x86_64_mov_reg_reg(REG_ITMP1, d);
1549 store_reg_to_var_int(iptr->dst, d);
1553 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1555 d = reg_of_var(iptr->dst, REG_NULL);
1556 if (src->prev->flags & INMEMORY) {
1557 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1560 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1563 if (src->flags & INMEMORY) {
1564 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1567 M_INTMOVE(src->regoff, REG_ITMP3);
1571 x86_64_mov_imm_reg(0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1572 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
1573 x86_64_jcc(X86_64_CC_NE, 4 + 6);
1574 x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1575 x86_64_jcc(X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1577 x86_64_mov_reg_reg(RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1579 x86_64_idiv_reg(REG_ITMP3);
1581 if (iptr->dst->flags & INMEMORY) {
1582 x86_64_mov_reg_membase(RAX, REG_SP, iptr->dst->regoff * 8);
1583 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1586 M_INTMOVE(RAX, iptr->dst->regoff);
1588 if (iptr->dst->regoff != RDX) {
1589 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1594 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1596 d = reg_of_var(iptr->dst, REG_NULL);
1597 if (src->prev->flags & INMEMORY) {
1598 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1601 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1604 if (src->flags & INMEMORY) {
1605 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1608 M_INTMOVE(src->regoff, REG_ITMP3);
1612 x86_64_mov_imm_reg(0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1613 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
1614 x86_64_jcc(X86_64_CC_NE, 2 + 4 + 6);
1615 x86_64_alul_reg_reg(X86_64_XOR, RDX, RDX); /* 2 bytes */
1616 x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1617 x86_64_jcc(X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1619 x86_64_mov_reg_reg(RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1621 x86_64_idiv_reg(REG_ITMP3);
1623 if (iptr->dst->flags & INMEMORY) {
1624 x86_64_mov_reg_membase(RDX, REG_SP, iptr->dst->regoff * 8);
1625 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1628 M_INTMOVE(RDX, iptr->dst->regoff);
1630 if (iptr->dst->regoff != RDX) {
1631 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1636 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1637 /* val.i = constant */
1639 var_to_reg_int(s1, src, REG_ITMP1);
1640 d = reg_of_var(iptr->dst, REG_ITMP3);
1641 M_INTMOVE(s1, REG_ITMP1);
1642 x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1643 x86_64_lea_membase_reg(REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1644 x86_64_cmovcc_reg_reg(X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1645 x86_64_shift_imm_reg(X86_64_SAR, iptr->val.i, REG_ITMP1);
1646 x86_64_mov_reg_reg(REG_ITMP1, d);
1647 store_reg_to_var_int(iptr->dst, d);
1650 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1651 /* val.l = constant */
1653 var_to_reg_int(s1, src, REG_ITMP1);
1654 d = reg_of_var(iptr->dst, REG_ITMP3);
1655 M_INTMOVE(s1, REG_ITMP1);
1656 x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1657 x86_64_lea_membase_reg(REG_ITMP1, iptr->val.i, REG_ITMP2);
1658 x86_64_cmovcc_reg_reg(X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1659 x86_64_alu_imm_reg(X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1660 x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP2, REG_ITMP1);
1661 x86_64_mov_reg_reg(REG_ITMP1, d);
1662 store_reg_to_var_int(iptr->dst, d);
1665 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1667 d = reg_of_var(iptr->dst, REG_NULL);
1668 x86_64_emit_ishift(X86_64_SHL, src, iptr);
1671 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1672 /* val.i = constant */
1674 d = reg_of_var(iptr->dst, REG_NULL);
1675 x86_64_emit_ishiftconst(X86_64_SHL, src, iptr);
1678 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1680 d = reg_of_var(iptr->dst, REG_NULL);
1681 x86_64_emit_ishift(X86_64_SAR, src, iptr);
1684 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1685 /* val.i = constant */
1687 d = reg_of_var(iptr->dst, REG_NULL);
1688 x86_64_emit_ishiftconst(X86_64_SAR, src, iptr);
1691 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1693 d = reg_of_var(iptr->dst, REG_NULL);
1694 x86_64_emit_ishift(X86_64_SHR, src, iptr);
1697 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1698 /* val.i = constant */
1700 d = reg_of_var(iptr->dst, REG_NULL);
1701 x86_64_emit_ishiftconst(X86_64_SHR, src, iptr);
1704 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1706 d = reg_of_var(iptr->dst, REG_NULL);
1707 x86_64_emit_lshift(X86_64_SHL, src, iptr);
1710 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1711 /* val.i = constant */
1713 d = reg_of_var(iptr->dst, REG_NULL);
1714 x86_64_emit_lshiftconst(X86_64_SHL, src, iptr);
1717 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1719 d = reg_of_var(iptr->dst, REG_NULL);
1720 x86_64_emit_lshift(X86_64_SAR, src, iptr);
1723 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1724 /* val.i = constant */
1726 d = reg_of_var(iptr->dst, REG_NULL);
1727 x86_64_emit_lshiftconst(X86_64_SAR, src, iptr);
1730 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1732 d = reg_of_var(iptr->dst, REG_NULL);
1733 x86_64_emit_lshift(X86_64_SHR, src, iptr);
1736 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1737 /* val.l = constant */
1739 d = reg_of_var(iptr->dst, REG_NULL);
1740 x86_64_emit_lshiftconst(X86_64_SHR, src, iptr);
1743 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1745 d = reg_of_var(iptr->dst, REG_NULL);
1746 x86_64_emit_ialu(X86_64_AND, src, iptr);
1749 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1750 /* val.i = constant */
1752 d = reg_of_var(iptr->dst, REG_NULL);
1753 x86_64_emit_ialuconst(X86_64_AND, src, iptr);
1756 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1758 d = reg_of_var(iptr->dst, REG_NULL);
1759 x86_64_emit_lalu(X86_64_AND, src, iptr);
1762 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1763 /* val.l = constant */
1765 d = reg_of_var(iptr->dst, REG_NULL);
1766 x86_64_emit_laluconst(X86_64_AND, src, iptr);
1769 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1771 d = reg_of_var(iptr->dst, REG_NULL);
1772 x86_64_emit_ialu(X86_64_OR, src, iptr);
1775 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1776 /* val.i = constant */
1778 d = reg_of_var(iptr->dst, REG_NULL);
1779 x86_64_emit_ialuconst(X86_64_OR, src, iptr);
1782 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1784 d = reg_of_var(iptr->dst, REG_NULL);
1785 x86_64_emit_lalu(X86_64_OR, src, iptr);
1788 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1789 /* val.l = constant */
1791 d = reg_of_var(iptr->dst, REG_NULL);
1792 x86_64_emit_laluconst(X86_64_OR, src, iptr);
1795 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1797 d = reg_of_var(iptr->dst, REG_NULL);
1798 x86_64_emit_ialu(X86_64_XOR, src, iptr);
1801 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1802 /* val.i = constant */
1804 d = reg_of_var(iptr->dst, REG_NULL);
1805 x86_64_emit_ialuconst(X86_64_XOR, src, iptr);
1808 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1810 d = reg_of_var(iptr->dst, REG_NULL);
1811 x86_64_emit_lalu(X86_64_XOR, src, iptr);
1814 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1815 /* val.l = constant */
1817 d = reg_of_var(iptr->dst, REG_NULL);
1818 x86_64_emit_laluconst(X86_64_XOR, src, iptr);
1822 case ICMD_IINC: /* ..., value ==> ..., value + constant */
1823 /* op1 = variable, val.i = constant */
1825 var = &(locals[iptr->op1][TYPE_INT]);
1827 if (var->flags & INMEMORY) {
1828 if (iptr->val.i == 1) {
1829 x86_64_incl_membase(REG_SP, d * 8);
1831 } else if (iptr->val.i == -1) {
1832 x86_64_decl_membase(REG_SP, d * 8);
1835 x86_64_alul_imm_membase(X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1839 if (iptr->val.i == 1) {
1842 } else if (iptr->val.i == -1) {
1846 x86_64_alul_imm_reg(X86_64_ADD, iptr->val.i, d);
1852 /* floating operations ************************************************/
1854 case ICMD_FNEG: /* ..., value ==> ..., - value */
1856 var_to_reg_flt(s1, src, REG_FTMP1);
1857 d = reg_of_var(iptr->dst, REG_FTMP3);
1858 a = dseg_adds4(0x80000000);
1860 x86_64_movss_membase_reg(RIP, -(((s8) mcodeptr + 9) - (s8) mcodebase) + a, REG_FTMP2);
1861 x86_64_xorps_reg_reg(REG_FTMP2, d);
1862 store_reg_to_var_flt(iptr->dst, d);
1865 case ICMD_DNEG: /* ..., value ==> ..., - value */
1867 var_to_reg_flt(s1, src, REG_FTMP1);
1868 d = reg_of_var(iptr->dst, REG_FTMP3);
1869 a = dseg_adds8(0x8000000000000000);
1871 x86_64_movd_membase_reg(RIP, -(((s8) mcodeptr + 9) - (s8) mcodebase) + a, REG_FTMP2);
1872 x86_64_xorpd_reg_reg(REG_FTMP2, d);
1873 store_reg_to_var_flt(iptr->dst, d);
1876 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1878 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1879 var_to_reg_flt(s2, src, REG_FTMP2);
1880 d = reg_of_var(iptr->dst, REG_FTMP3);
1882 x86_64_addss_reg_reg(s2, d);
1883 } else if (s2 == d) {
1884 x86_64_addss_reg_reg(s1, d);
1887 x86_64_addss_reg_reg(s2, d);
1889 store_reg_to_var_flt(iptr->dst, d);
1892 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1894 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1895 var_to_reg_flt(s2, src, REG_FTMP2);
1896 d = reg_of_var(iptr->dst, REG_FTMP3);
1898 x86_64_addsd_reg_reg(s2, d);
1899 } else if (s2 == d) {
1900 x86_64_addsd_reg_reg(s1, d);
1903 x86_64_addsd_reg_reg(s2, d);
1905 store_reg_to_var_flt(iptr->dst, d);
1908 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1910 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1911 var_to_reg_flt(s2, src, REG_FTMP2);
1912 d = reg_of_var(iptr->dst, REG_FTMP3);
1914 M_FLTMOVE(s2, REG_FTMP2);
1918 x86_64_subss_reg_reg(s2, d);
1919 store_reg_to_var_flt(iptr->dst, d);
1922 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1924 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1925 var_to_reg_flt(s2, src, REG_FTMP2);
1926 d = reg_of_var(iptr->dst, REG_FTMP3);
1928 M_FLTMOVE(s2, REG_FTMP2);
1932 x86_64_subsd_reg_reg(s2, d);
1933 store_reg_to_var_flt(iptr->dst, d);
1936 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1938 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1939 var_to_reg_flt(s2, src, REG_FTMP2);
1940 d = reg_of_var(iptr->dst, REG_FTMP3);
1942 x86_64_mulss_reg_reg(s2, d);
1943 } else if (s2 == d) {
1944 x86_64_mulss_reg_reg(s1, d);
1947 x86_64_mulss_reg_reg(s2, d);
1949 store_reg_to_var_flt(iptr->dst, d);
1952 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1954 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1955 var_to_reg_flt(s2, src, REG_FTMP2);
1956 d = reg_of_var(iptr->dst, REG_FTMP3);
1958 x86_64_mulsd_reg_reg(s2, d);
1959 } else if (s2 == d) {
1960 x86_64_mulsd_reg_reg(s1, d);
1963 x86_64_mulsd_reg_reg(s2, d);
1965 store_reg_to_var_flt(iptr->dst, d);
1968 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1970 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1971 var_to_reg_flt(s2, src, REG_FTMP2);
1972 d = reg_of_var(iptr->dst, REG_FTMP3);
1974 M_FLTMOVE(s2, REG_FTMP2);
1978 x86_64_divss_reg_reg(s2, d);
1979 store_reg_to_var_flt(iptr->dst, d);
1982 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1984 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1985 var_to_reg_flt(s2, src, REG_FTMP2);
1986 d = reg_of_var(iptr->dst, REG_FTMP3);
1988 M_FLTMOVE(s2, REG_FTMP2);
1992 x86_64_divsd_reg_reg(s2, d);
1993 store_reg_to_var_flt(iptr->dst, d);
1996 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1998 var_to_reg_int(s1, src, REG_ITMP1);
1999 d = reg_of_var(iptr->dst, REG_FTMP1);
2000 x86_64_cvtsi2ss_reg_reg(s1, d);
2001 store_reg_to_var_flt(iptr->dst, d);
2004 case ICMD_I2D: /* ..., value ==> ..., (double) value */
2006 var_to_reg_int(s1, src, REG_ITMP1);
2007 d = reg_of_var(iptr->dst, REG_FTMP1);
2008 x86_64_cvtsi2sd_reg_reg(s1, d);
2009 store_reg_to_var_flt(iptr->dst, d);
2012 case ICMD_L2F: /* ..., value ==> ..., (float) value */
2014 var_to_reg_int(s1, src, REG_ITMP1);
2015 d = reg_of_var(iptr->dst, REG_FTMP1);
2016 x86_64_cvtsi2ssq_reg_reg(s1, d);
2017 store_reg_to_var_flt(iptr->dst, d);
2020 case ICMD_L2D: /* ..., value ==> ..., (double) value */
2022 var_to_reg_int(s1, src, REG_ITMP1);
2023 d = reg_of_var(iptr->dst, REG_FTMP1);
2024 x86_64_cvtsi2sdq_reg_reg(s1, d);
2025 store_reg_to_var_flt(iptr->dst, d);
2028 case ICMD_F2I: /* ..., value ==> ..., (int) value */
2030 var_to_reg_flt(s1, src, REG_FTMP1);
2031 d = reg_of_var(iptr->dst, REG_ITMP1);
2032 x86_64_cvttss2si_reg_reg(s1, d);
2033 x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, d); /* corner cases */
2034 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
2035 x86_64_jcc(X86_64_CC_NE, a);
2036 M_FLTMOVE(s1, REG_FTMP1);
2037 x86_64_mov_imm_reg((s8) asm_builtin_f2i, REG_ITMP2);
2038 x86_64_call_reg(REG_ITMP2);
2039 M_INTMOVE(REG_RESULT, d);
2040 store_reg_to_var_int(iptr->dst, d);
2043 case ICMD_D2I: /* ..., value ==> ..., (int) value */
2045 var_to_reg_flt(s1, src, REG_FTMP1);
2046 d = reg_of_var(iptr->dst, REG_ITMP1);
2047 x86_64_cvttsd2si_reg_reg(s1, d);
2048 x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, d); /* corner cases */
2049 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
2050 x86_64_jcc(X86_64_CC_NE, a);
2051 M_FLTMOVE(s1, REG_FTMP1);
2052 x86_64_mov_imm_reg((s8) asm_builtin_d2i, REG_ITMP2);
2053 x86_64_call_reg(REG_ITMP2);
2054 M_INTMOVE(REG_RESULT, d);
2055 store_reg_to_var_int(iptr->dst, d);
2058 case ICMD_F2L: /* ..., value ==> ..., (long) value */
2060 var_to_reg_flt(s1, src, REG_FTMP1);
2061 d = reg_of_var(iptr->dst, REG_ITMP1);
2062 x86_64_cvttss2siq_reg_reg(s1, d);
2063 x86_64_mov_imm_reg(0x8000000000000000, REG_ITMP2);
2064 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, d); /* corner cases */
2065 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
2066 x86_64_jcc(X86_64_CC_NE, a);
2067 M_FLTMOVE(s1, REG_FTMP1);
2068 x86_64_mov_imm_reg((s8) asm_builtin_f2l, REG_ITMP2);
2069 x86_64_call_reg(REG_ITMP2);
2070 M_INTMOVE(REG_RESULT, d);
2071 store_reg_to_var_int(iptr->dst, d);
2074 case ICMD_D2L: /* ..., value ==> ..., (long) value */
2076 var_to_reg_flt(s1, src, REG_FTMP1);
2077 d = reg_of_var(iptr->dst, REG_ITMP1);
2078 x86_64_cvttsd2siq_reg_reg(s1, d);
2079 x86_64_mov_imm_reg(0x8000000000000000, REG_ITMP2);
2080 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, d); /* corner cases */
2081 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
2082 x86_64_jcc(X86_64_CC_NE, a);
2083 M_FLTMOVE(s1, REG_FTMP1);
2084 x86_64_mov_imm_reg((s8) asm_builtin_d2l, REG_ITMP2);
2085 x86_64_call_reg(REG_ITMP2);
2086 M_INTMOVE(REG_RESULT, d);
2087 store_reg_to_var_int(iptr->dst, d);
2090 case ICMD_F2D: /* ..., value ==> ..., (double) value */
2092 var_to_reg_flt(s1, src, REG_FTMP1);
2093 d = reg_of_var(iptr->dst, REG_FTMP3);
2094 x86_64_cvtss2sd_reg_reg(s1, d);
2095 store_reg_to_var_flt(iptr->dst, d);
2098 case ICMD_D2F: /* ..., value ==> ..., (float) value */
2100 var_to_reg_flt(s1, src, REG_FTMP1);
2101 d = reg_of_var(iptr->dst, REG_FTMP3);
2102 x86_64_cvtsd2ss_reg_reg(s1, d);
2103 store_reg_to_var_flt(iptr->dst, d);
2106 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
2107 /* == => 0, < => 1, > => -1 */
2109 var_to_reg_flt(s1, src->prev, REG_FTMP1);
2110 var_to_reg_flt(s2, src, REG_FTMP2);
2111 d = reg_of_var(iptr->dst, REG_ITMP3);
2112 x86_64_alu_reg_reg(X86_64_XOR, d, d);
2113 x86_64_mov_imm_reg(1, REG_ITMP1);
2114 x86_64_mov_imm_reg(-1, REG_ITMP2);
2115 x86_64_ucomiss_reg_reg(s1, s2);
2116 x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
2117 x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
2118 x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
2119 store_reg_to_var_int(iptr->dst, d);
2122 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
2123 /* == => 0, < => 1, > => -1 */
2125 var_to_reg_flt(s1, src->prev, REG_FTMP1);
2126 var_to_reg_flt(s2, src, REG_FTMP2);
2127 d = reg_of_var(iptr->dst, REG_ITMP3);
2128 x86_64_alu_reg_reg(X86_64_XOR, d, d);
2129 x86_64_mov_imm_reg(1, REG_ITMP1);
2130 x86_64_mov_imm_reg(-1, REG_ITMP2);
2131 x86_64_ucomiss_reg_reg(s1, s2);
2132 x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
2133 x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
2134 x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
2135 store_reg_to_var_int(iptr->dst, d);
2138 case ICMD_DCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
2139 /* == => 0, < => 1, > => -1 */
2141 var_to_reg_flt(s1, src->prev, REG_FTMP1);
2142 var_to_reg_flt(s2, src, REG_FTMP2);
2143 d = reg_of_var(iptr->dst, REG_ITMP3);
2144 x86_64_alu_reg_reg(X86_64_XOR, d, d);
2145 x86_64_mov_imm_reg(1, REG_ITMP1);
2146 x86_64_mov_imm_reg(-1, REG_ITMP2);
2147 x86_64_ucomisd_reg_reg(s1, s2);
2148 x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
2149 x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
2150 x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
2151 store_reg_to_var_int(iptr->dst, d);
2154 case ICMD_DCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
2155 /* == => 0, < => 1, > => -1 */
2157 var_to_reg_flt(s1, src->prev, REG_FTMP1);
2158 var_to_reg_flt(s2, src, REG_FTMP2);
2159 d = reg_of_var(iptr->dst, REG_ITMP3);
2160 x86_64_alu_reg_reg(X86_64_XOR, d, d);
2161 x86_64_mov_imm_reg(1, REG_ITMP1);
2162 x86_64_mov_imm_reg(-1, REG_ITMP2);
2163 x86_64_ucomisd_reg_reg(s1, s2);
2164 x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
2165 x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
2166 x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
2167 store_reg_to_var_int(iptr->dst, d);
2171 /* memory operations **************************************************/
2173 #define gen_bound_check \
2174 if (checkbounds) { \
2175 x86_64_alul_membase_reg(X86_64_CMP, s1, OFFSET(java_arrayheader, size), s2); \
2176 x86_64_jcc(X86_64_CC_AE, 0); \
2177 codegen_addxboundrefs(mcodeptr, s2); \
2180 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., (int) length */
2182 var_to_reg_int(s1, src, REG_ITMP1);
2183 d = reg_of_var(iptr->dst, REG_ITMP3);
2184 gen_nullptr_check(s1);
2185 x86_64_movl_membase_reg(s1, OFFSET(java_arrayheader, size), d);
2186 store_reg_to_var_int(iptr->dst, d);
2189 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2191 var_to_reg_int(s1, src->prev, REG_ITMP1);
2192 var_to_reg_int(s2, src, REG_ITMP2);
2193 d = reg_of_var(iptr->dst, REG_ITMP3);
2194 if (iptr->op1 == 0) {
2195 gen_nullptr_check(s1);
2198 x86_64_mov_memindex_reg(OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
2199 store_reg_to_var_int(iptr->dst, d);
2202 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
2204 var_to_reg_int(s1, src->prev, REG_ITMP1);
2205 var_to_reg_int(s2, src, REG_ITMP2);
2206 d = reg_of_var(iptr->dst, REG_ITMP3);
2207 if (iptr->op1 == 0) {
2208 gen_nullptr_check(s1);
2211 x86_64_mov_memindex_reg(OFFSET(java_longarray, data[0]), s1, s2, 3, d);
2212 store_reg_to_var_int(iptr->dst, d);
2215 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
2217 var_to_reg_int(s1, src->prev, REG_ITMP1);
2218 var_to_reg_int(s2, src, REG_ITMP2);
2219 d = reg_of_var(iptr->dst, REG_ITMP3);
2220 if (iptr->op1 == 0) {
2221 gen_nullptr_check(s1);
2224 x86_64_movl_memindex_reg(OFFSET(java_intarray, data[0]), s1, s2, 2, d);
2225 store_reg_to_var_int(iptr->dst, d);
2228 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2230 var_to_reg_int(s1, src->prev, REG_ITMP1);
2231 var_to_reg_int(s2, src, REG_ITMP2);
2232 d = reg_of_var(iptr->dst, REG_FTMP3);
2233 if (iptr->op1 == 0) {
2234 gen_nullptr_check(s1);
2237 x86_64_movss_memindex_reg(OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2238 store_reg_to_var_flt(iptr->dst, d);
2241 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2243 var_to_reg_int(s1, src->prev, REG_ITMP1);
2244 var_to_reg_int(s2, src, REG_ITMP2);
2245 d = reg_of_var(iptr->dst, REG_FTMP3);
2246 if (iptr->op1 == 0) {
2247 gen_nullptr_check(s1);
2250 x86_64_movsd_memindex_reg(OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2251 store_reg_to_var_flt(iptr->dst, d);
2254 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
2256 var_to_reg_int(s1, src->prev, REG_ITMP1);
2257 var_to_reg_int(s2, src, REG_ITMP2);
2258 d = reg_of_var(iptr->dst, REG_ITMP3);
2259 if (iptr->op1 == 0) {
2260 gen_nullptr_check(s1);
2263 x86_64_movzwq_memindex_reg(OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2264 store_reg_to_var_int(iptr->dst, d);
2267 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
2269 var_to_reg_int(s1, src->prev, REG_ITMP1);
2270 var_to_reg_int(s2, src, REG_ITMP2);
2271 d = reg_of_var(iptr->dst, REG_ITMP3);
2272 if (iptr->op1 == 0) {
2273 gen_nullptr_check(s1);
2276 x86_64_movswq_memindex_reg(OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2277 store_reg_to_var_int(iptr->dst, d);
2280 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
2282 var_to_reg_int(s1, src->prev, REG_ITMP1);
2283 var_to_reg_int(s2, src, REG_ITMP2);
2284 d = reg_of_var(iptr->dst, REG_ITMP3);
2285 if (iptr->op1 == 0) {
2286 gen_nullptr_check(s1);
2289 x86_64_movsbq_memindex_reg(OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2290 store_reg_to_var_int(iptr->dst, d);
2294 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2296 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2297 var_to_reg_int(s2, src->prev, REG_ITMP2);
2298 if (iptr->op1 == 0) {
2299 gen_nullptr_check(s1);
2302 var_to_reg_int(s3, src, REG_ITMP3);
2303 x86_64_mov_reg_memindex(s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2306 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2308 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2309 var_to_reg_int(s2, src->prev, REG_ITMP2);
2310 if (iptr->op1 == 0) {
2311 gen_nullptr_check(s1);
2314 var_to_reg_int(s3, src, REG_ITMP3);
2315 x86_64_mov_reg_memindex(s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2318 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2320 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2321 var_to_reg_int(s2, src->prev, REG_ITMP2);
2322 if (iptr->op1 == 0) {
2323 gen_nullptr_check(s1);
2326 var_to_reg_int(s3, src, REG_ITMP3);
2327 x86_64_movl_reg_memindex(s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2330 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2332 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2333 var_to_reg_int(s2, src->prev, REG_ITMP2);
2334 if (iptr->op1 == 0) {
2335 gen_nullptr_check(s1);
2338 var_to_reg_flt(s3, src, REG_FTMP3);
2339 x86_64_movss_reg_memindex(s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2342 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2344 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2345 var_to_reg_int(s2, src->prev, REG_ITMP2);
2346 if (iptr->op1 == 0) {
2347 gen_nullptr_check(s1);
2350 var_to_reg_flt(s3, src, REG_FTMP3);
2351 x86_64_movsd_reg_memindex(s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2354 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2356 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2357 var_to_reg_int(s2, src->prev, REG_ITMP2);
2358 if (iptr->op1 == 0) {
2359 gen_nullptr_check(s1);
2362 var_to_reg_int(s3, src, REG_ITMP3);
2363 x86_64_movw_reg_memindex(s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2366 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2368 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2369 var_to_reg_int(s2, src->prev, REG_ITMP2);
2370 if (iptr->op1 == 0) {
2371 gen_nullptr_check(s1);
2374 var_to_reg_int(s3, src, REG_ITMP3);
2375 x86_64_movw_reg_memindex(s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2378 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2380 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2381 var_to_reg_int(s2, src->prev, REG_ITMP2);
2382 if (iptr->op1 == 0) {
2383 gen_nullptr_check(s1);
2386 var_to_reg_int(s3, src, REG_ITMP3);
2387 x86_64_movb_reg_memindex(s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2391 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2392 /* op1 = type, val.a = field address */
2394 /* if class isn't yet initialized, do it */
2395 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2396 /* call helper function which patches this code */
2397 x86_64_mov_imm_reg((s8) ((fieldinfo *) iptr->val.a)->class, REG_ITMP1);
2398 x86_64_mov_imm_reg((s8) asm_check_clinit, REG_ITMP2);
2399 x86_64_call_reg(REG_ITMP2);
2402 a = dseg_addaddress(&(((fieldinfo *) iptr->val.a)->value));
2403 /* x86_64_mov_imm_reg(0, REG_ITMP2); */
2404 /* dseg_adddata(mcodeptr); */
2405 /* x86_64_mov_membase_reg(REG_ITMP2, a, REG_ITMP2); */
2406 x86_64_mov_membase_reg(RIP, -(((s8) mcodeptr + 7) - (s8) mcodebase) + a, REG_ITMP2);
2407 switch (iptr->op1) {
2409 var_to_reg_int(s2, src, REG_ITMP1);
2410 x86_64_movl_reg_membase(s2, REG_ITMP2, 0);
2414 var_to_reg_int(s2, src, REG_ITMP1);
2415 x86_64_mov_reg_membase(s2, REG_ITMP2, 0);
2418 var_to_reg_flt(s2, src, REG_FTMP1);
2419 x86_64_movss_reg_membase(s2, REG_ITMP2, 0);
2422 var_to_reg_flt(s2, src, REG_FTMP1);
2423 x86_64_movsd_reg_membase(s2, REG_ITMP2, 0);
2425 default: panic("internal error");
2429 case ICMD_GETSTATIC: /* ... ==> ..., value */
2430 /* op1 = type, val.a = field address */
2432 /* if class isn't yet initialized, do it */
2433 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2434 /* call helper function which patches this code */
2435 x86_64_mov_imm_reg((s8) ((fieldinfo *) iptr->val.a)->class, REG_ITMP1);
2436 x86_64_mov_imm_reg((s8) asm_check_clinit, REG_ITMP2);
2437 x86_64_call_reg(REG_ITMP2);
2440 a = dseg_addaddress(&(((fieldinfo *) iptr->val.a)->value));
2441 /* x86_64_mov_imm_reg(0, REG_ITMP2); */
2442 /* dseg_adddata(mcodeptr); */
2443 /* x86_64_mov_membase_reg(REG_ITMP2, a, REG_ITMP2); */
2444 x86_64_mov_membase_reg(RIP, -(((s8) mcodeptr + 7) - (s8) mcodebase) + a, REG_ITMP2);
2445 switch (iptr->op1) {
2447 d = reg_of_var(iptr->dst, REG_ITMP1);
2448 x86_64_movl_membase_reg(REG_ITMP2, 0, d);
2449 store_reg_to_var_int(iptr->dst, d);
2453 d = reg_of_var(iptr->dst, REG_ITMP1);
2454 x86_64_mov_membase_reg(REG_ITMP2, 0, d);
2455 store_reg_to_var_int(iptr->dst, d);
2458 d = reg_of_var(iptr->dst, REG_ITMP1);
2459 x86_64_movss_membase_reg(REG_ITMP2, 0, d);
2460 store_reg_to_var_flt(iptr->dst, d);
2463 d = reg_of_var(iptr->dst, REG_ITMP1);
2464 x86_64_movsd_membase_reg(REG_ITMP2, 0, d);
2465 store_reg_to_var_flt(iptr->dst, d);
2467 default: panic("internal error");
2471 case ICMD_PUTFIELD: /* ..., value ==> ... */
2472 /* op1 = type, val.i = field offset */
2474 /* if class isn't yet initialized, do it */
2475 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2476 /* call helper function which patches this code */
2477 x86_64_mov_imm_reg((s8) ((fieldinfo *) iptr->val.a)->class, REG_ITMP1);
2478 x86_64_mov_imm_reg((s8) asm_check_clinit, REG_ITMP2);
2479 x86_64_call_reg(REG_ITMP2);
2482 a = ((fieldinfo *)(iptr->val.a))->offset;
2483 var_to_reg_int(s1, src->prev, REG_ITMP1);
2484 switch (iptr->op1) {
2486 var_to_reg_int(s2, src, REG_ITMP2);
2487 gen_nullptr_check(s1);
2488 x86_64_movl_reg_membase(s2, s1, a);
2492 var_to_reg_int(s2, src, REG_ITMP2);
2493 gen_nullptr_check(s1);
2494 x86_64_mov_reg_membase(s2, s1, a);
2497 var_to_reg_flt(s2, src, REG_FTMP2);
2498 gen_nullptr_check(s1);
2499 x86_64_movss_reg_membase(s2, s1, a);
2502 var_to_reg_flt(s2, src, REG_FTMP2);
2503 gen_nullptr_check(s1);
2504 x86_64_movsd_reg_membase(s2, s1, a);
2506 default: panic ("internal error");
2510 case ICMD_GETFIELD: /* ... ==> ..., value */
2511 /* op1 = type, val.i = field offset */
2513 a = ((fieldinfo *)(iptr->val.a))->offset;
2514 var_to_reg_int(s1, src, REG_ITMP1);
2515 switch (iptr->op1) {
2517 d = reg_of_var(iptr->dst, REG_ITMP1);
2518 gen_nullptr_check(s1);
2519 x86_64_movl_membase_reg(s1, a, d);
2520 store_reg_to_var_int(iptr->dst, d);
2524 d = reg_of_var(iptr->dst, REG_ITMP1);
2525 gen_nullptr_check(s1);
2526 x86_64_mov_membase_reg(s1, a, d);
2527 store_reg_to_var_int(iptr->dst, d);
2530 d = reg_of_var(iptr->dst, REG_FTMP1);
2531 gen_nullptr_check(s1);
2532 x86_64_movss_membase_reg(s1, a, d);
2533 store_reg_to_var_flt(iptr->dst, d);
2536 d = reg_of_var(iptr->dst, REG_FTMP1);
2537 gen_nullptr_check(s1);
2538 x86_64_movsd_membase_reg(s1, a, d);
2539 store_reg_to_var_flt(iptr->dst, d);
2541 default: panic ("internal error");
2546 /* branch operations **************************************************/
2548 /* #define ALIGNCODENOP {if((int)((long)mcodeptr&7)){M_NOP;}} */
2549 #define ALIGNCODENOP do {} while (0)
2551 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2553 var_to_reg_int(s1, src, REG_ITMP1);
2554 M_INTMOVE(s1, REG_ITMP1_XPTR);
2556 x86_64_call_imm(0); /* passing exception pointer */
2557 x86_64_pop_reg(REG_ITMP2_XPC);
2559 x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
2560 x86_64_jmp_reg(REG_ITMP3);
2564 case ICMD_GOTO: /* ... ==> ... */
2565 /* op1 = target JavaVM pc */
2568 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2572 case ICMD_JSR: /* ... ==> ... */
2573 /* op1 = target JavaVM pc */
2576 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2579 case ICMD_RET: /* ... ==> ... */
2580 /* op1 = local variable */
2582 var = &(locals[iptr->op1][TYPE_ADR]);
2583 var_to_reg_int(s1, var, REG_ITMP1);
2587 case ICMD_IFNULL: /* ..., value ==> ... */
2588 /* op1 = target JavaVM pc */
2590 if (src->flags & INMEMORY) {
2591 x86_64_alu_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
2594 x86_64_test_reg_reg(src->regoff, src->regoff);
2596 x86_64_jcc(X86_64_CC_E, 0);
2597 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2600 case ICMD_IFNONNULL: /* ..., value ==> ... */
2601 /* op1 = target JavaVM pc */
2603 if (src->flags & INMEMORY) {
2604 x86_64_alu_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
2607 x86_64_test_reg_reg(src->regoff, src->regoff);
2609 x86_64_jcc(X86_64_CC_NE, 0);
2610 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2613 case ICMD_IFEQ: /* ..., value ==> ... */
2614 /* op1 = target JavaVM pc, val.i = constant */
2616 x86_64_emit_ifcc(X86_64_CC_E, src, iptr);
2619 case ICMD_IFLT: /* ..., value ==> ... */
2620 /* op1 = target JavaVM pc, val.i = constant */
2622 x86_64_emit_ifcc(X86_64_CC_L, src, iptr);
2625 case ICMD_IFLE: /* ..., value ==> ... */
2626 /* op1 = target JavaVM pc, val.i = constant */
2628 x86_64_emit_ifcc(X86_64_CC_LE, src, iptr);
2631 case ICMD_IFNE: /* ..., value ==> ... */
2632 /* op1 = target JavaVM pc, val.i = constant */
2634 x86_64_emit_ifcc(X86_64_CC_NE, src, iptr);
2637 case ICMD_IFGT: /* ..., value ==> ... */
2638 /* op1 = target JavaVM pc, val.i = constant */
2640 x86_64_emit_ifcc(X86_64_CC_G, src, iptr);
2643 case ICMD_IFGE: /* ..., value ==> ... */
2644 /* op1 = target JavaVM pc, val.i = constant */
2646 x86_64_emit_ifcc(X86_64_CC_GE, src, iptr);
2649 case ICMD_IF_LEQ: /* ..., value ==> ... */
2650 /* op1 = target JavaVM pc, val.l = constant */
2652 x86_64_emit_if_lcc(X86_64_CC_E, src, iptr);
2655 case ICMD_IF_LLT: /* ..., value ==> ... */
2656 /* op1 = target JavaVM pc, val.l = constant */
2658 x86_64_emit_if_lcc(X86_64_CC_L, src, iptr);
2661 case ICMD_IF_LLE: /* ..., value ==> ... */
2662 /* op1 = target JavaVM pc, val.l = constant */
2664 x86_64_emit_if_lcc(X86_64_CC_LE, src, iptr);
2667 case ICMD_IF_LNE: /* ..., value ==> ... */
2668 /* op1 = target JavaVM pc, val.l = constant */
2670 x86_64_emit_if_lcc(X86_64_CC_NE, src, iptr);
2673 case ICMD_IF_LGT: /* ..., value ==> ... */
2674 /* op1 = target JavaVM pc, val.l = constant */
2676 x86_64_emit_if_lcc(X86_64_CC_G, src, iptr);
2679 case ICMD_IF_LGE: /* ..., value ==> ... */
2680 /* op1 = target JavaVM pc, val.l = constant */
2682 x86_64_emit_if_lcc(X86_64_CC_GE, src, iptr);
2685 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2686 /* op1 = target JavaVM pc */
2688 x86_64_emit_if_icmpcc(X86_64_CC_E, src, iptr);
2691 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2692 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2694 x86_64_emit_if_lcmpcc(X86_64_CC_E, src, iptr);
2697 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2698 /* op1 = target JavaVM pc */
2700 x86_64_emit_if_icmpcc(X86_64_CC_NE, src, iptr);
2703 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2704 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2706 x86_64_emit_if_lcmpcc(X86_64_CC_NE, src, iptr);
2709 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2710 /* op1 = target JavaVM pc */
2712 x86_64_emit_if_icmpcc(X86_64_CC_L, src, iptr);
2715 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2716 /* op1 = target JavaVM pc */
2718 x86_64_emit_if_lcmpcc(X86_64_CC_L, src, iptr);
2721 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2722 /* op1 = target JavaVM pc */
2724 x86_64_emit_if_icmpcc(X86_64_CC_G, src, iptr);
2727 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2728 /* op1 = target JavaVM pc */
2730 x86_64_emit_if_lcmpcc(X86_64_CC_G, src, iptr);
2733 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2734 /* op1 = target JavaVM pc */
2736 x86_64_emit_if_icmpcc(X86_64_CC_LE, src, iptr);
2739 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2740 /* op1 = target JavaVM pc */
2742 x86_64_emit_if_lcmpcc(X86_64_CC_LE, src, iptr);
2745 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2746 /* op1 = target JavaVM pc */
2748 x86_64_emit_if_icmpcc(X86_64_CC_GE, src, iptr);
2751 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2752 /* op1 = target JavaVM pc */
2754 x86_64_emit_if_lcmpcc(X86_64_CC_GE, src, iptr);
2757 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST */
2759 case ICMD_ELSE_ICONST: /* handled by IFxx_ICONST */
2762 case ICMD_IFEQ_ICONST: /* ..., value ==> ..., constant */
2763 /* val.i = constant */
2765 var_to_reg_int(s1, src, REG_ITMP1);
2766 d = reg_of_var(iptr->dst, REG_ITMP3);
2768 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2770 M_INTMOVE(s1, REG_ITMP1);
2773 x86_64_movl_imm_reg(iptr[1].val.i, d);
2775 x86_64_movl_imm_reg(s3, REG_ITMP2);
2776 x86_64_testl_reg_reg(s1, s1);
2777 x86_64_cmovccl_reg_reg(X86_64_CC_E, REG_ITMP2, d);
2778 store_reg_to_var_int(iptr->dst, d);
2781 case ICMD_IFNE_ICONST: /* ..., value ==> ..., constant */
2782 /* val.i = constant */
2784 var_to_reg_int(s1, src, REG_ITMP1);
2785 d = reg_of_var(iptr->dst, REG_ITMP3);
2787 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2789 M_INTMOVE(s1, REG_ITMP1);
2792 x86_64_movl_imm_reg(iptr[1].val.i, d);
2794 x86_64_movl_imm_reg(s3, REG_ITMP2);
2795 x86_64_testl_reg_reg(s1, s1);
2796 x86_64_cmovccl_reg_reg(X86_64_CC_NE, REG_ITMP2, d);
2797 store_reg_to_var_int(iptr->dst, d);
2800 case ICMD_IFLT_ICONST: /* ..., value ==> ..., constant */
2801 /* val.i = constant */
2803 var_to_reg_int(s1, src, REG_ITMP1);
2804 d = reg_of_var(iptr->dst, REG_ITMP3);
2806 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2808 M_INTMOVE(s1, REG_ITMP1);
2811 x86_64_movl_imm_reg(iptr[1].val.i, d);
2813 x86_64_movl_imm_reg(s3, REG_ITMP2);
2814 x86_64_testl_reg_reg(s1, s1);
2815 x86_64_cmovccl_reg_reg(X86_64_CC_L, REG_ITMP2, d);
2816 store_reg_to_var_int(iptr->dst, d);
2819 case ICMD_IFGE_ICONST: /* ..., value ==> ..., constant */
2820 /* val.i = constant */
2822 var_to_reg_int(s1, src, REG_ITMP1);
2823 d = reg_of_var(iptr->dst, REG_ITMP3);
2825 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2827 M_INTMOVE(s1, REG_ITMP1);
2830 x86_64_movl_imm_reg(iptr[1].val.i, d);
2832 x86_64_movl_imm_reg(s3, REG_ITMP2);
2833 x86_64_testl_reg_reg(s1, s1);
2834 x86_64_cmovccl_reg_reg(X86_64_CC_GE, REG_ITMP2, d);
2835 store_reg_to_var_int(iptr->dst, d);
2838 case ICMD_IFGT_ICONST: /* ..., value ==> ..., constant */
2839 /* val.i = constant */
2841 var_to_reg_int(s1, src, REG_ITMP1);
2842 d = reg_of_var(iptr->dst, REG_ITMP3);
2844 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2846 M_INTMOVE(s1, REG_ITMP1);
2849 x86_64_movl_imm_reg(iptr[1].val.i, d);
2851 x86_64_movl_imm_reg(s3, REG_ITMP2);
2852 x86_64_testl_reg_reg(s1, s1);
2853 x86_64_cmovccl_reg_reg(X86_64_CC_G, REG_ITMP2, d);
2854 store_reg_to_var_int(iptr->dst, d);
2857 case ICMD_IFLE_ICONST: /* ..., value ==> ..., constant */
2858 /* val.i = constant */
2860 var_to_reg_int(s1, src, REG_ITMP1);
2861 d = reg_of_var(iptr->dst, REG_ITMP3);
2863 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2865 M_INTMOVE(s1, REG_ITMP1);
2868 x86_64_movl_imm_reg(iptr[1].val.i, d);
2870 x86_64_movl_imm_reg(s3, REG_ITMP2);
2871 x86_64_testl_reg_reg(s1, s1);
2872 x86_64_cmovccl_reg_reg(X86_64_CC_LE, REG_ITMP2, d);
2873 store_reg_to_var_int(iptr->dst, d);
2877 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2881 var_to_reg_int(s1, src, REG_RESULT);
2882 M_INTMOVE(s1, REG_RESULT);
2884 #if defined(USE_THREADS)
2885 if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
2886 x86_64_mov_membase_reg(REG_SP, maxmemuse * 8, argintregs[0]);
2887 x86_64_mov_reg_membase(REG_RESULT, REG_SP, maxmemuse * 8);
2888 x86_64_mov_imm_reg((u8) builtin_monitorexit, REG_ITMP1);
2889 x86_64_call_reg(REG_ITMP1);
2890 x86_64_mov_membase_reg(REG_SP, maxmemuse * 8, REG_RESULT);
2894 goto nowperformreturn;
2896 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2899 var_to_reg_flt(s1, src, REG_FRESULT);
2900 M_FLTMOVE(s1, REG_FRESULT);
2902 #if defined(USE_THREADS)
2903 if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
2904 x86_64_mov_membase_reg(REG_SP, maxmemuse * 8, argintregs[0]);
2905 x86_64_movq_reg_membase(REG_FRESULT, REG_SP, maxmemuse * 8);
2906 x86_64_mov_imm_reg((u8) builtin_monitorexit, REG_ITMP1);
2907 x86_64_call_reg(REG_ITMP1);
2908 x86_64_movq_membase_reg(REG_SP, maxmemuse * 8, REG_FRESULT);
2912 goto nowperformreturn;
2914 case ICMD_RETURN: /* ... ==> ... */
2916 #if defined(USE_THREADS)
2917 if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
2918 x86_64_mov_membase_reg(REG_SP, maxmemuse * 8, argintregs[0]);
2919 x86_64_mov_imm_reg((u8) builtin_monitorexit, REG_ITMP1);
2920 x86_64_call_reg(REG_ITMP1);
2928 p = parentargs_base;
2930 /* call trace function */
2932 x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
2934 x86_64_mov_reg_membase(REG_RESULT, REG_SP, 0 * 8);
2935 x86_64_movq_reg_membase(REG_FRESULT, REG_SP, 1 * 8);
2937 x86_64_mov_imm_reg((s8) method, argintregs[0]);
2938 x86_64_mov_reg_reg(REG_RESULT, argintregs[1]);
2939 M_FLTMOVE(REG_FRESULT, argfltregs[0]);
2940 M_FLTMOVE(REG_FRESULT, argfltregs[1]);
2942 x86_64_mov_imm_reg((s8) builtin_displaymethodstop, REG_ITMP1);
2943 x86_64_call_reg(REG_ITMP1);
2945 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_RESULT);
2946 x86_64_movq_membase_reg(REG_SP, 1 * 8, REG_FRESULT);
2948 x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
2951 /* restore saved registers */
2952 for (r = savintregcnt - 1; r >= maxsavintreguse; r--) {
2953 p--; x86_64_mov_membase_reg(REG_SP, p * 8, savintregs[r]);
2955 for (r = savfltregcnt - 1; r >= maxsavfltreguse; r--) {
2956 p--; x86_64_movq_membase_reg(REG_SP, p * 8, savfltregs[r]);
2959 /* deallocate stack */
2960 if (parentargs_base) {
2961 x86_64_alu_imm_reg(X86_64_ADD, parentargs_base * 8, REG_SP);
2970 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2975 tptr = (void **) iptr->target;
2977 s4ptr = iptr->val.a;
2978 l = s4ptr[1]; /* low */
2979 i = s4ptr[2]; /* high */
2981 var_to_reg_int(s1, src, REG_ITMP1);
2982 M_INTMOVE(s1, REG_ITMP1);
2984 x86_64_alul_imm_reg(X86_64_SUB, l, REG_ITMP1);
2989 x86_64_alul_imm_reg(X86_64_CMP, i - 1, REG_ITMP1);
2990 x86_64_jcc(X86_64_CC_A, 0);
2992 /* codegen_addreference(BlockPtrOfPC(s4ptr[0]), mcodeptr); */
2993 codegen_addreference((basicblock *) tptr[0], mcodeptr);
2995 /* build jump table top down and use address of lowest entry */
2997 /* s4ptr += 3 + i; */
3001 /* dseg_addtarget(BlockPtrOfPC(*--s4ptr)); */
3002 dseg_addtarget((basicblock *) tptr[0]);
3006 /* length of dataseg after last dseg_addtarget is used by load */
3008 x86_64_mov_imm_reg(0, REG_ITMP2);
3009 dseg_adddata(mcodeptr);
3010 x86_64_mov_memindex_reg(-dseglen, REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
3011 x86_64_jmp_reg(REG_ITMP1);
3017 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
3019 s4 i, l, val, *s4ptr;
3022 tptr = (void **) iptr->target;
3024 s4ptr = iptr->val.a;
3025 l = s4ptr[0]; /* default */
3026 i = s4ptr[1]; /* count */
3028 MCODECHECK((i<<2)+8);
3029 var_to_reg_int(s1, src, REG_ITMP1); /* reg compare should always be faster */
3035 x86_64_alul_imm_reg(X86_64_CMP, val, s1);
3036 x86_64_jcc(X86_64_CC_E, 0);
3037 /* codegen_addreference(BlockPtrOfPC(s4ptr[1]), mcodeptr); */
3038 codegen_addreference((basicblock *) tptr[0], mcodeptr);
3042 /* codegen_addreference(BlockPtrOfPC(l), mcodeptr); */
3044 tptr = (void **) iptr->target;
3045 codegen_addreference((basicblock *) tptr[0], mcodeptr);
3052 case ICMD_BUILTIN3: /* ..., arg1, arg2, arg3 ==> ... */
3053 /* op1 = return type, val.a = function pointer*/
3057 case ICMD_BUILTIN2: /* ..., arg1, arg2 ==> ... */
3058 /* op1 = return type, val.a = function pointer*/
3062 case ICMD_BUILTIN1: /* ..., arg1 ==> ... */
3063 /* op1 = return type, val.a = function pointer*/
3067 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
3068 /* op1 = arg count, val.a = method pointer */
3070 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3071 /* op1 = arg count, val.a = method pointer */
3073 case ICMD_INVOKEVIRTUAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3074 /* op1 = arg count, val.a = method pointer */
3076 case ICMD_INVOKEINTERFACE:/*.., objectref, [arg1, [arg2 ...]] ==> ... */
3077 /* op1 = arg count, val.a = method pointer */
3088 MCODECHECK((s3 << 1) + 64);
3093 /* copy arguments to registers or stack location */
3094 for (; --s3 >= 0; src = src->prev) {
3095 IS_INT_LNG_TYPE(src->type) ? iarg++ : farg++;
3101 s2 = (iarg > INT_ARG_CNT) ? iarg - INT_ARG_CNT : 0 + (farg > FLT_ARG_CNT) ? farg - FLT_ARG_CNT : 0;
3103 for (; --s3 >= 0; src = src->prev) {
3104 IS_INT_LNG_TYPE(src->type) ? iarg-- : farg--;
3105 if (src->varkind == ARGVAR) {
3106 if (IS_INT_LNG_TYPE(src->type)) {
3107 if (iarg >= INT_ARG_CNT) {
3111 if (farg >= FLT_ARG_CNT) {
3118 if (IS_INT_LNG_TYPE(src->type)) {
3119 if (iarg < INT_ARG_CNT) {
3120 s1 = argintregs[iarg];
3121 var_to_reg_int(d, src, s1);
3125 var_to_reg_int(d, src, REG_ITMP1);
3127 x86_64_mov_reg_membase(d, REG_SP, s2 * 8);
3131 if (farg < FLT_ARG_CNT) {
3132 s1 = argfltregs[farg];
3133 var_to_reg_flt(d, src, s1);
3137 var_to_reg_flt(d, src, REG_FTMP1);
3139 x86_64_movq_reg_membase(d, REG_SP, s2 * 8);
3145 switch (iptr->opc) {
3153 x86_64_mov_imm_reg(a, REG_ITMP1);
3154 x86_64_call_reg(REG_ITMP1);
3157 case ICMD_INVOKESTATIC:
3159 a = (s8) m->stubroutine;
3162 x86_64_mov_imm_reg(a, REG_ITMP2);
3163 x86_64_call_reg(REG_ITMP2);
3166 case ICMD_INVOKESPECIAL:
3168 a = (s8) m->stubroutine;
3171 gen_nullptr_check(argintregs[0]); /* first argument contains pointer */
3172 x86_64_mov_membase_reg(argintregs[0], 0, REG_ITMP2); /* access memory for hardware nullptr */
3173 x86_64_mov_imm_reg(a, REG_ITMP2);
3174 x86_64_call_reg(REG_ITMP2);
3177 case ICMD_INVOKEVIRTUAL:
3181 gen_nullptr_check(argintregs[0]);
3182 x86_64_mov_membase_reg(argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3183 x86_64_mov_membase32_reg(REG_ITMP2, OFFSET(vftbl, table[0]) + sizeof(methodptr) * m->vftblindex, REG_ITMP1);
3184 x86_64_call_reg(REG_ITMP1);
3187 case ICMD_INVOKEINTERFACE:
3192 gen_nullptr_check(argintregs[0]);
3193 x86_64_mov_membase_reg(argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3194 x86_64_mov_membase_reg(REG_ITMP2, OFFSET(vftbl, interfacetable[0]) - sizeof(methodptr) * ci->index, REG_ITMP2);
3195 x86_64_mov_membase32_reg(REG_ITMP2, sizeof(methodptr) * (m - ci->methods), REG_ITMP1);
3196 x86_64_call_reg(REG_ITMP1);
3201 error("Unkown ICMD-Command: %d", iptr->opc);
3204 /* d contains return type */
3206 if (d != TYPE_VOID) {
3207 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3208 s1 = reg_of_var(iptr->dst, REG_RESULT);
3209 M_INTMOVE(REG_RESULT, s1);
3210 store_reg_to_var_int(iptr->dst, s1);
3213 s1 = reg_of_var(iptr->dst, REG_FRESULT);
3214 M_FLTMOVE(REG_FRESULT, s1);
3215 store_reg_to_var_flt(iptr->dst, s1);
3222 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3224 /* op1: 0 == array, 1 == class */
3225 /* val.a: (classinfo*) superclass */
3227 /* superclass is an interface:
3229 * return (sub != NULL) &&
3230 * (sub->vftbl->interfacetablelength > super->index) &&
3231 * (sub->vftbl->interfacetable[-super->index] != NULL);
3233 * superclass is a class:
3235 * return ((sub != NULL) && (0
3236 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3237 * super->vftbl->diffvall));
3241 classinfo *super = (classinfo*) iptr->val.a;
3243 var_to_reg_int(s1, src, REG_ITMP1);
3244 d = reg_of_var(iptr->dst, REG_ITMP3);
3246 M_INTMOVE(s1, REG_ITMP1);
3249 x86_64_alu_reg_reg(X86_64_XOR, d, d);
3250 if (iptr->op1) { /* class/interface */
3251 if (super->flags & ACC_INTERFACE) { /* interface */
3252 x86_64_test_reg_reg(s1, s1);
3254 /* TODO: clean up this calculation */
3255 a = 3; /* mov_membase_reg */
3256 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3258 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3259 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetablelength));
3262 CALCIMMEDIATEBYTES(a, super->index);
3267 a += 3; /* mov_membase_reg */
3268 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*));
3273 x86_64_jcc(X86_64_CC_E, a);
3275 x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3276 x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetablelength), REG_ITMP2);
3277 x86_64_alu_imm_reg(X86_64_SUB, super->index, REG_ITMP2);
3278 x86_64_test_reg_reg(REG_ITMP2, REG_ITMP2);
3280 /* TODO: clean up this calculation */
3282 a += 3; /* mov_membase_reg */
3283 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*));
3288 x86_64_jcc(X86_64_CC_LE, a);
3289 x86_64_mov_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP1);
3290 x86_64_test_reg_reg(REG_ITMP1, REG_ITMP1);
3291 x86_64_setcc_reg(X86_64_CC_NE, d);
3293 } else { /* class */
3294 x86_64_test_reg_reg(s1, s1);
3296 /* TODO: clean up this calculation */
3297 a = 3; /* mov_membase_reg */
3298 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3300 a += 10; /* mov_imm_reg */
3302 a += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3303 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, baseval));
3305 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3306 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, baseval));
3308 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3309 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, diffval));
3316 x86_64_jcc(X86_64_CC_E, a);
3318 x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3319 x86_64_mov_imm_reg((s8) super->vftbl, REG_ITMP2);
3320 x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, baseval), REG_ITMP1);
3321 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, baseval), REG_ITMP3);
3322 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, diffval), REG_ITMP2);
3323 x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP3, REG_ITMP1);
3324 x86_64_alu_reg_reg(X86_64_XOR, d, d);
3325 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
3326 x86_64_setcc_reg(X86_64_CC_BE, d);
3330 panic("internal error: no inlined array instanceof");
3332 store_reg_to_var_int(iptr->dst, d);
3335 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3337 /* op1: 0 == array, 1 == class */
3338 /* val.a: (classinfo*) superclass */
3340 /* superclass is an interface:
3342 * OK if ((sub == NULL) ||
3343 * (sub->vftbl->interfacetablelength > super->index) &&
3344 * (sub->vftbl->interfacetable[-super->index] != NULL));
3346 * superclass is a class:
3348 * OK if ((sub == NULL) || (0
3349 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3350 * super->vftbl->diffvall));
3354 classinfo *super = (classinfo*) iptr->val.a;
3356 d = reg_of_var(iptr->dst, REG_ITMP3);
3357 var_to_reg_int(s1, src, d);
3358 if (iptr->op1) { /* class/interface */
3359 if (super->flags & ACC_INTERFACE) { /* interface */
3360 x86_64_test_reg_reg(s1, s1);
3362 /* TODO: clean up this calculation */
3363 a = 3; /* mov_membase_reg */
3364 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3366 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3367 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetablelength));
3370 CALCIMMEDIATEBYTES(a, super->index);
3375 a += 3; /* mov_membase_reg */
3376 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*));
3381 x86_64_jcc(X86_64_CC_E, a);
3383 x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3384 x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetablelength), REG_ITMP2);
3385 x86_64_alu_imm_reg(X86_64_SUB, super->index, REG_ITMP2);
3386 x86_64_test_reg_reg(REG_ITMP2, REG_ITMP2);
3387 x86_64_jcc(X86_64_CC_LE, 0);
3388 codegen_addxcastrefs(mcodeptr);
3389 x86_64_mov_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP2);
3390 x86_64_test_reg_reg(REG_ITMP2, REG_ITMP2);
3391 x86_64_jcc(X86_64_CC_E, 0);
3392 codegen_addxcastrefs(mcodeptr);
3394 } else { /* class */
3395 x86_64_test_reg_reg(s1, s1);
3397 /* TODO: clean up this calculation */
3398 a = 3; /* mov_membase_reg */
3399 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3400 a += 10; /* mov_imm_reg */
3401 a += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3402 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, baseval));
3404 if (d != REG_ITMP3) {
3405 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3406 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, baseval));
3407 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3408 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, diffval));
3412 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3413 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, baseval));
3415 a += 10; /* mov_imm_reg */
3416 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3417 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, diffval));
3423 x86_64_jcc(X86_64_CC_E, a);
3425 x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3426 x86_64_mov_imm_reg((s8) super->vftbl, REG_ITMP2);
3427 x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, baseval), REG_ITMP1);
3428 if (d != REG_ITMP3) {
3429 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, baseval), REG_ITMP3);
3430 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, diffval), REG_ITMP2);
3431 x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP3, REG_ITMP1);
3434 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, baseval), REG_ITMP2);
3435 x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP2, REG_ITMP1);
3436 x86_64_mov_imm_reg((s8) super->vftbl, REG_ITMP2);
3437 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, diffval), REG_ITMP2);
3439 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
3440 x86_64_jcc(X86_64_CC_A, 0); /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3441 codegen_addxcastrefs(mcodeptr);
3445 panic("internal error: no inlined array checkcast");
3448 store_reg_to_var_int(iptr->dst, d);
3451 case ICMD_CHECKASIZE: /* ..., size ==> ..., size */
3453 if (src->flags & INMEMORY) {
3454 x86_64_alul_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
3457 x86_64_testl_reg_reg(src->regoff, src->regoff);
3459 x86_64_jcc(X86_64_CC_L, 0);
3460 codegen_addxcheckarefs(mcodeptr);
3463 case ICMD_CHECKEXCEPTION: /* ... ==> ... */
3465 x86_64_test_reg_reg(REG_RESULT, REG_RESULT);
3466 x86_64_jcc(X86_64_CC_E, 0);
3467 codegen_addxexceptionrefs(mcodeptr);
3470 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3471 /* op1 = dimension, val.a = array descriptor */
3473 /* check for negative sizes and copy sizes to stack if necessary */
3475 MCODECHECK((iptr->op1 << 1) + 64);
3477 for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3478 var_to_reg_int(s2, src, REG_ITMP1);
3479 x86_64_testl_reg_reg(s2, s2);
3480 x86_64_jcc(X86_64_CC_L, 0);
3481 codegen_addxcheckarefs(mcodeptr);
3483 /* copy sizes to stack (argument numbers >= INT_ARG_CNT) */
3485 if (src->varkind != ARGVAR) {
3486 x86_64_mov_reg_membase(s2, REG_SP, (s1 + INT_ARG_CNT) * 8);
3490 /* a0 = dimension count */
3491 x86_64_mov_imm_reg(iptr->op1, argintregs[0]);
3493 /* a1 = arraydescriptor */
3494 x86_64_mov_imm_reg((s8) iptr->val.a, argintregs[1]);
3496 /* a2 = pointer to dimensions = stack pointer */
3497 x86_64_mov_reg_reg(REG_SP, argintregs[2]);
3499 x86_64_mov_imm_reg((s8) (builtin_nmultianewarray), REG_ITMP1);
3500 x86_64_call_reg(REG_ITMP1);
3502 s1 = reg_of_var(iptr->dst, REG_RESULT);
3503 M_INTMOVE(REG_RESULT, s1);
3504 store_reg_to_var_int(iptr->dst, s1);
3507 default: error("Unknown pseudo command: %d", iptr->opc);
3510 } /* for instruction */
3512 /* copy values to interface registers */
3514 src = bptr->outstack;
3515 len = bptr->outdepth;
3519 if ((src->varkind != STACKVAR)) {
3521 if (IS_FLT_DBL_TYPE(s2)) {
3522 var_to_reg_flt(s1, src, REG_FTMP1);
3523 if (!(interfaces[len][s2].flags & INMEMORY)) {
3524 M_FLTMOVE(s1, interfaces[len][s2].regoff);
3527 x86_64_movq_reg_membase(s1, REG_SP, 8 * interfaces[len][s2].regoff);
3531 var_to_reg_int(s1, src, REG_ITMP1);
3532 if (!(interfaces[len][s2].flags & INMEMORY)) {
3533 M_INTMOVE(s1, interfaces[len][s2].regoff);
3536 x86_64_mov_reg_membase(s1, REG_SP, interfaces[len][s2].regoff * 8);
3542 } /* if (bptr -> flags >= BBREACHED) */
3543 } /* for basic block */
3545 /* bptr -> mpc = (int)((u1*) mcodeptr - mcodebase); */
3549 /* generate bound check stubs */
3551 u1 *xcodeptr = NULL;
3553 for (; xboundrefs != NULL; xboundrefs = xboundrefs->next) {
3554 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3555 gen_resolvebranch(mcodebase + xboundrefs->branchpos,
3556 xboundrefs->branchpos,
3557 xcodeptr - mcodebase - (3 + 10 + 10 + 3));
3561 gen_resolvebranch(mcodebase + xboundrefs->branchpos,
3562 xboundrefs->branchpos,
3563 mcodeptr - mcodebase);
3567 /* move index register into REG_ITMP1 */
3568 x86_64_mov_reg_reg(xboundrefs->reg, REG_ITMP1); /* 3 bytes */
3570 x86_64_mov_imm_reg(0, REG_ITMP2_XPC); /* 10 bytes */
3571 dseg_adddata(mcodeptr);
3572 x86_64_mov_imm_reg(xboundrefs->branchpos - 6, REG_ITMP3); /* 10 bytes */
3573 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3575 if (xcodeptr != NULL) {
3576 x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3579 xcodeptr = mcodeptr;
3581 x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
3582 x86_64_mov_reg_membase(REG_ITMP2_XPC, REG_SP, 0 * 8);
3583 x86_64_mov_imm_reg((s8) string_java_lang_ArrayIndexOutOfBoundsException, argintregs[0]);
3584 x86_64_mov_reg_reg(REG_ITMP1, argintregs[1]);
3585 x86_64_mov_imm_reg((s8) new_exception_int, REG_ITMP3);
3586 x86_64_call_reg(REG_ITMP3);
3587 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_ITMP2_XPC);
3588 x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
3590 x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3591 x86_64_jmp_reg(REG_ITMP3);
3595 /* generate negative array size check stubs */
3599 for (; xcheckarefs != NULL; xcheckarefs = xcheckarefs->next) {
3600 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3601 gen_resolvebranch(mcodebase + xcheckarefs->branchpos,
3602 xcheckarefs->branchpos,
3603 xcodeptr - mcodebase - (10 + 10 + 3));
3607 gen_resolvebranch(mcodebase + xcheckarefs->branchpos,
3608 xcheckarefs->branchpos,
3609 mcodeptr - mcodebase);
3613 x86_64_mov_imm_reg(0, REG_ITMP2_XPC); /* 10 bytes */
3614 dseg_adddata(mcodeptr);
3615 x86_64_mov_imm_reg(xcheckarefs->branchpos - 6, REG_ITMP3); /* 10 bytes */
3616 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3618 if (xcodeptr != NULL) {
3619 x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3622 xcodeptr = mcodeptr;
3624 x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
3625 x86_64_mov_reg_membase(REG_ITMP2_XPC, REG_SP, 0 * 8);
3626 x86_64_mov_imm_reg((s8) string_java_lang_NegativeArraySizeException, argintregs[0]);
3627 x86_64_mov_imm_reg((s8) new_exception, REG_ITMP3);
3628 x86_64_call_reg(REG_ITMP3);
3629 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_ITMP2_XPC);
3630 x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
3632 x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3633 x86_64_jmp_reg(REG_ITMP3);
3637 /* generate cast check stubs */
3641 for (; xcastrefs != NULL; xcastrefs = xcastrefs->next) {
3642 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3643 gen_resolvebranch(mcodebase + xcastrefs->branchpos,
3644 xcastrefs->branchpos,
3645 xcodeptr - mcodebase - (10 + 10 + 3));
3649 gen_resolvebranch(mcodebase + xcastrefs->branchpos,
3650 xcastrefs->branchpos,
3651 mcodeptr - mcodebase);
3655 x86_64_mov_imm_reg(0, REG_ITMP2_XPC); /* 10 bytes */
3656 dseg_adddata(mcodeptr);
3657 x86_64_mov_imm_reg(xcastrefs->branchpos - 6, REG_ITMP3); /* 10 bytes */
3658 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3660 if (xcodeptr != NULL) {
3661 x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3664 xcodeptr = mcodeptr;
3666 x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
3667 x86_64_mov_reg_membase(REG_ITMP2_XPC, REG_SP, 0 * 8);
3668 x86_64_mov_imm_reg((s8) string_java_lang_ClassCastException, argintregs[0]);
3669 x86_64_mov_imm_reg((s8) new_exception, REG_ITMP3);
3670 x86_64_call_reg(REG_ITMP3);
3671 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_ITMP2_XPC);
3672 x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
3674 x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3675 x86_64_jmp_reg(REG_ITMP3);
3679 /* generate divide by zero check stubs */
3683 for (; xdivrefs != NULL; xdivrefs = xdivrefs->next) {
3684 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3685 gen_resolvebranch(mcodebase + xdivrefs->branchpos,
3686 xdivrefs->branchpos,
3687 xcodeptr - mcodebase - (10 + 10 + 3));
3691 gen_resolvebranch(mcodebase + xdivrefs->branchpos,
3692 xdivrefs->branchpos,
3693 mcodeptr - mcodebase);
3697 x86_64_mov_imm_reg(0, REG_ITMP2_XPC); /* 10 bytes */
3698 dseg_adddata(mcodeptr);
3699 x86_64_mov_imm_reg(xdivrefs->branchpos - 6, REG_ITMP3); /* 10 bytes */
3700 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3702 if (xcodeptr != NULL) {
3703 x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3706 xcodeptr = mcodeptr;
3708 x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
3709 x86_64_mov_reg_membase(REG_ITMP2_XPC, REG_SP, 0 * 8);
3710 x86_64_mov_imm_reg((u8) string_java_lang_ArithmeticException, argintregs[0]);
3711 x86_64_mov_imm_reg((u8) string_java_lang_ArithmeticException_message, argintregs[1]);
3712 x86_64_mov_imm_reg((u8) new_exception, REG_ITMP3);
3713 x86_64_call_reg(REG_ITMP3);
3714 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_ITMP2_XPC);
3715 x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
3717 x86_64_mov_imm_reg((u8) asm_handle_exception, REG_ITMP3);
3718 x86_64_jmp_reg(REG_ITMP3);
3722 /* generate exception check stubs */
3726 for (; xexceptionrefs != NULL; xexceptionrefs = xexceptionrefs->next) {
3727 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3728 gen_resolvebranch(mcodebase + xexceptionrefs->branchpos,
3729 xexceptionrefs->branchpos,
3730 xcodeptr - mcodebase - (10 + 10 + 3));
3734 gen_resolvebranch(mcodebase + xexceptionrefs->branchpos,
3735 xexceptionrefs->branchpos,
3736 mcodeptr - mcodebase);
3740 x86_64_mov_imm_reg(0, REG_ITMP2_XPC); /* 10 bytes */
3741 dseg_adddata(mcodeptr);
3742 x86_64_mov_imm_reg(xexceptionrefs->branchpos - 6, REG_ITMP1); /* 10 bytes */
3743 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3745 if (xcodeptr != NULL) {
3746 x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3749 xcodeptr = mcodeptr;
3751 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3752 x86_64_push_reg(REG_ITMP2_XPC);
3753 x86_64_mov_imm_reg((u8) &builtin_get_exceptionptrptr, REG_ITMP1);
3754 x86_64_call_reg(REG_ITMP1);
3755 x86_64_mov_membase_reg(REG_RESULT, 0, REG_ITMP3);
3756 x86_64_mov_imm_membase(0, REG_RESULT, 0);
3757 x86_64_mov_reg_reg(REG_ITMP3, REG_ITMP1_XPTR);
3758 x86_64_pop_reg(REG_ITMP2_XPC);
3760 x86_64_mov_imm_reg((u8) &_exceptionptr, REG_ITMP3);
3761 x86_64_mov_membase_reg(REG_ITMP3, 0, REG_ITMP1_XPTR);
3762 x86_64_mov_imm_membase(0, REG_ITMP3, 0);
3765 x86_64_mov_imm_reg((u8) asm_handle_exception, REG_ITMP3);
3766 x86_64_jmp_reg(REG_ITMP3);
3770 /* generate null pointer check stubs */
3774 for (; xnullrefs != NULL; xnullrefs = xnullrefs->next) {
3775 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3776 gen_resolvebranch(mcodebase + xnullrefs->branchpos,
3777 xnullrefs->branchpos,
3778 xcodeptr - mcodebase - (10 + 10 + 3));
3782 gen_resolvebranch(mcodebase + xnullrefs->branchpos,
3783 xnullrefs->branchpos,
3784 mcodeptr - mcodebase);
3788 x86_64_mov_imm_reg(0, REG_ITMP2_XPC); /* 10 bytes */
3789 dseg_adddata(mcodeptr);
3790 x86_64_mov_imm_reg(xnullrefs->branchpos - 6, REG_ITMP1); /* 10 bytes */
3791 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3793 if (xcodeptr != NULL) {
3794 x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3797 xcodeptr = mcodeptr;
3799 x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
3800 x86_64_mov_reg_membase(REG_ITMP2_XPC, REG_SP, 0 * 8);
3801 x86_64_mov_imm_reg((s8) string_java_lang_NullPointerException, argintregs[0]);
3802 x86_64_mov_imm_reg((s8) new_exception, REG_ITMP3);
3803 x86_64_call_reg(REG_ITMP3);
3804 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_ITMP2_XPC);
3805 x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
3807 x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3808 x86_64_jmp_reg(REG_ITMP3);
3814 codegen_finish((int)((u1*) mcodeptr - mcodebase));
3818 /* function createcompilerstub *************************************************
3820 creates a stub routine which calls the compiler
3822 *******************************************************************************/
3824 #define COMPSTUBSIZE 23
3826 u1 *createcompilerstub(methodinfo *m)
3828 u1 *s = CNEW(u1, COMPSTUBSIZE); /* memory to hold the stub */
3829 mcodeptr = s; /* code generation pointer */
3831 /* code for the stub */
3832 x86_64_mov_imm_reg((s8) m, REG_ITMP1); /* pass method pointer to compiler */
3833 x86_64_mov_imm_reg((s8) asm_call_jit_compiler, REG_ITMP3);/* load address */
3834 x86_64_jmp_reg(REG_ITMP3); /* jump to compiler */
3837 count_cstub_len += COMPSTUBSIZE;
3844 /* function removecompilerstub *************************************************
3846 deletes a compilerstub from memory (simply by freeing it)
3848 *******************************************************************************/
3850 void removecompilerstub(u1 *stub)
3852 CFREE(stub, COMPSTUBSIZE);
3855 /* function: createnativestub **************************************************
3857 creates a stub routine which calls a native method
3859 *******************************************************************************/
3861 #define NATIVESTUBSIZE 420
3863 u1 *createnativestub(functionptr f, methodinfo *m)
3865 u1 *s = CNEW(u1, NATIVESTUBSIZE); /* memory to hold the stub */
3866 int stackframesize; /* size of stackframe if needed */
3867 mcodeptr = s; /* make macros work */
3870 descriptor2types(m); /* set paramcount and paramtypes */
3872 /* if function is static, check for initialized */
3874 if (m->flags & ACC_STATIC) {
3875 /* if class isn't yet initialized, do it */
3876 if (!m->class->initialized) {
3877 /* call helper function which patches this code */
3878 x86_64_mov_imm_reg((u8) m->class, REG_ITMP1);
3879 x86_64_mov_imm_reg((u8) asm_check_clinit, REG_ITMP2);
3880 x86_64_call_reg(REG_ITMP2);
3887 x86_64_alu_imm_reg(X86_64_SUB, (6 + 8 + 1) * 8, REG_SP);
3889 x86_64_mov_reg_membase(argintregs[0], REG_SP, 1 * 8);
3890 x86_64_mov_reg_membase(argintregs[1], REG_SP, 2 * 8);
3891 x86_64_mov_reg_membase(argintregs[2], REG_SP, 3 * 8);
3892 x86_64_mov_reg_membase(argintregs[3], REG_SP, 4 * 8);
3893 x86_64_mov_reg_membase(argintregs[4], REG_SP, 5 * 8);
3894 x86_64_mov_reg_membase(argintregs[5], REG_SP, 6 * 8);
3896 x86_64_movq_reg_membase(argfltregs[0], REG_SP, 7 * 8);
3897 x86_64_movq_reg_membase(argfltregs[1], REG_SP, 8 * 8);
3898 x86_64_movq_reg_membase(argfltregs[2], REG_SP, 9 * 8);
3899 x86_64_movq_reg_membase(argfltregs[3], REG_SP, 10 * 8);
3900 /* x86_64_movq_reg_membase(argfltregs[4], REG_SP, 11 * 8); */
3901 /* x86_64_movq_reg_membase(argfltregs[5], REG_SP, 12 * 8); */
3902 /* x86_64_movq_reg_membase(argfltregs[6], REG_SP, 13 * 8); */
3903 /* x86_64_movq_reg_membase(argfltregs[7], REG_SP, 14 * 8); */
3905 /* show integer hex code for float arguments */
3906 for (p = 0, l = 0; p < m->paramcount; p++) {
3907 if (IS_FLT_DBL_TYPE(m->paramtypes[p])) {
3908 for (s1 = (m->paramcount > INT_ARG_CNT) ? INT_ARG_CNT - 2 : m->paramcount - 2; s1 >= p; s1--) {
3909 x86_64_mov_reg_reg(argintregs[s1], argintregs[s1 + 1]);
3912 x86_64_movd_freg_reg(argfltregs[l], argintregs[p]);
3917 x86_64_mov_imm_reg((s8) m, REG_ITMP1);
3918 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, 0 * 8);
3919 x86_64_mov_imm_reg((s8) builtin_trace_args, REG_ITMP1);
3920 x86_64_call_reg(REG_ITMP1);
3922 x86_64_mov_membase_reg(REG_SP, 1 * 8, argintregs[0]);
3923 x86_64_mov_membase_reg(REG_SP, 2 * 8, argintregs[1]);
3924 x86_64_mov_membase_reg(REG_SP, 3 * 8, argintregs[2]);
3925 x86_64_mov_membase_reg(REG_SP, 4 * 8, argintregs[3]);
3926 x86_64_mov_membase_reg(REG_SP, 5 * 8, argintregs[4]);
3927 x86_64_mov_membase_reg(REG_SP, 6 * 8, argintregs[5]);
3929 x86_64_movq_membase_reg(REG_SP, 7 * 8, argfltregs[0]);
3930 x86_64_movq_membase_reg(REG_SP, 8 * 8, argfltregs[1]);
3931 x86_64_movq_membase_reg(REG_SP, 9 * 8, argfltregs[2]);
3932 x86_64_movq_membase_reg(REG_SP, 10 * 8, argfltregs[3]);
3933 /* x86_64_movq_membase_reg(REG_SP, 11 * 8, argfltregs[4]); */
3934 /* x86_64_movq_membase_reg(REG_SP, 12 * 8, argfltregs[5]); */
3935 /* x86_64_movq_membase_reg(REG_SP, 13 * 8, argfltregs[6]); */
3936 /* x86_64_movq_membase_reg(REG_SP, 14 * 8, argfltregs[7]); */
3938 x86_64_alu_imm_reg(X86_64_ADD, (6 + 8 + 1) * 8, REG_SP);
3942 x86_64_alu_imm_reg(X86_64_SUB, 7 * 8, REG_SP); /* keep stack 16-byte aligned */
3944 /* save callee saved float registers */
3945 x86_64_movq_reg_membase(XMM15, REG_SP, 0 * 8);
3946 x86_64_movq_reg_membase(XMM14, REG_SP, 1 * 8);
3947 x86_64_movq_reg_membase(XMM13, REG_SP, 2 * 8);
3948 x86_64_movq_reg_membase(XMM12, REG_SP, 3 * 8);
3949 x86_64_movq_reg_membase(XMM11, REG_SP, 4 * 8);
3950 x86_64_movq_reg_membase(XMM10, REG_SP, 5 * 8);
3953 /* save argument registers on stack -- if we have to */
3954 if ((m->flags & ACC_STATIC && m->paramcount > (INT_ARG_CNT - 2)) || m->paramcount > (INT_ARG_CNT - 1)) {
3956 int paramshiftcnt = (m->flags & ACC_STATIC) ? 2 : 1;
3957 int stackparamcnt = (m->paramcount > INT_ARG_CNT) ? m->paramcount - INT_ARG_CNT : 0;
3959 stackframesize = stackparamcnt + paramshiftcnt;
3961 /* keep stack 16-byte aligned */
3962 if ((stackframesize % 2) == 0) stackframesize++;
3964 x86_64_alu_imm_reg(X86_64_SUB, stackframesize * 8, REG_SP);
3966 /* copy stack arguments into new stack frame -- if any */
3967 for (i = 0; i < stackparamcnt; i++) {
3968 x86_64_mov_membase_reg(REG_SP, (stackparamcnt + 1 + i) * 8, REG_ITMP1);
3969 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, (paramshiftcnt + i) * 8);
3972 if (m->flags & ACC_STATIC) {
3973 x86_64_mov_reg_membase(argintregs[5], REG_SP, 1 * 8);
3974 x86_64_mov_reg_membase(argintregs[4], REG_SP, 0 * 8);
3977 x86_64_mov_reg_membase(argintregs[5], REG_SP, 0 * 8);
3981 /* keep stack 16-byte aligned -- this is essential for x86_64 */
3982 x86_64_alu_imm_reg(X86_64_SUB, 8, REG_SP);
3986 if (m->flags & ACC_STATIC) {
3987 x86_64_mov_reg_reg(argintregs[3], argintregs[5]);
3988 x86_64_mov_reg_reg(argintregs[2], argintregs[4]);
3989 x86_64_mov_reg_reg(argintregs[1], argintregs[3]);
3990 x86_64_mov_reg_reg(argintregs[0], argintregs[2]);
3992 /* put class into second argument register */
3993 x86_64_mov_imm_reg((s8) m->class, argintregs[1]);
3996 x86_64_mov_reg_reg(argintregs[4], argintregs[5]);
3997 x86_64_mov_reg_reg(argintregs[3], argintregs[4]);
3998 x86_64_mov_reg_reg(argintregs[2], argintregs[3]);
3999 x86_64_mov_reg_reg(argintregs[1], argintregs[2]);
4000 x86_64_mov_reg_reg(argintregs[0], argintregs[1]);
4003 /* put env into first argument register */
4004 x86_64_mov_imm_reg((s8) &env, argintregs[0]);
4006 x86_64_mov_imm_reg((s8) f, REG_ITMP1);
4007 x86_64_call_reg(REG_ITMP1);
4009 /* remove stackframe if there is one */
4010 if (stackframesize) {
4011 x86_64_alu_imm_reg(X86_64_ADD, stackframesize * 8, REG_SP);
4015 x86_64_alu_imm_reg(X86_64_SUB, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
4017 x86_64_mov_reg_membase(REG_RESULT, REG_SP, 0 * 8);
4018 x86_64_movq_reg_membase(REG_FRESULT, REG_SP, 1 * 8);
4020 x86_64_mov_imm_reg((s8) m, argintregs[0]);
4021 x86_64_mov_reg_reg(REG_RESULT, argintregs[1]);
4022 M_FLTMOVE(REG_FRESULT, argfltregs[0]);
4023 M_FLTMOVE(REG_FRESULT, argfltregs[1]);
4025 x86_64_mov_imm_reg((s8) builtin_displaymethodstop, REG_ITMP1);
4026 x86_64_call_reg(REG_ITMP1);
4028 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_RESULT);
4029 x86_64_movq_membase_reg(REG_SP, 1 * 8, REG_FRESULT);
4031 x86_64_alu_imm_reg(X86_64_ADD, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
4035 /* restore callee saved registers */
4036 x86_64_movq_membase_reg(REG_SP, 0 * 8, XMM15);
4037 x86_64_movq_membase_reg(REG_SP, 1 * 8, XMM14);
4038 x86_64_movq_membase_reg(REG_SP, 2 * 8, XMM13);
4039 x86_64_movq_membase_reg(REG_SP, 3 * 8, XMM12);
4040 x86_64_movq_membase_reg(REG_SP, 4 * 8, XMM11);
4041 x86_64_movq_membase_reg(REG_SP, 5 * 8, XMM10);
4043 x86_64_alu_imm_reg(X86_64_ADD, 7 * 8, REG_SP); /* keep stack 16-byte aligned */
4046 x86_64_mov_imm_reg((s8) &_exceptionptr, REG_ITMP3);
4047 x86_64_mov_membase_reg(REG_ITMP3, 0, REG_ITMP3);
4048 x86_64_test_reg_reg(REG_ITMP3, REG_ITMP3);
4049 x86_64_jcc(X86_64_CC_NE, 1);
4053 x86_64_mov_reg_reg(REG_ITMP3, REG_ITMP1_XPTR);
4054 x86_64_mov_imm_reg((s8) &_exceptionptr, REG_ITMP3);
4055 x86_64_alu_reg_reg(X86_64_XOR, REG_ITMP2, REG_ITMP2);
4056 x86_64_mov_reg_membase(REG_ITMP2, REG_ITMP3, 0); /* clear exception pointer */
4058 x86_64_mov_membase_reg(REG_SP, 0, REG_ITMP2_XPC); /* get return address from stack */
4059 x86_64_alu_imm_reg(X86_64_SUB, 3, REG_ITMP2_XPC); /* callq */
4061 x86_64_mov_imm_reg((s8) asm_handle_nat_exception, REG_ITMP3);
4062 x86_64_jmp_reg(REG_ITMP3);
4066 static int stubprinted;
4068 printf("stubsize: %d\n", ((long)mcodeptr - (long) s));
4074 count_nstub_len += NATIVESTUBSIZE;
4081 /* function: removenativestub **************************************************
4083 removes a previously created native-stub from memory
4085 *******************************************************************************/
4087 void removenativestub(u1 *stub)
4089 CFREE(stub, NATIVESTUBSIZE);
4093 /* code generation functions */
4095 void x86_64_emit_ialu(s4 alu_op, stackptr src, instruction *iptr)
4097 s4 s1 = src->prev->regoff;
4098 s4 s2 = src->regoff;
4099 s4 d = iptr->dst->regoff;
4101 if (iptr->dst->flags & INMEMORY) {
4102 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4104 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4105 x86_64_alul_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4107 } else if (s1 == d) {
4108 x86_64_movl_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4109 x86_64_alul_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4112 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4113 x86_64_alul_membase_reg(alu_op, REG_SP, s2 * 8, REG_ITMP1);
4114 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
4117 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4119 x86_64_alul_reg_membase(alu_op, s1, REG_SP, d * 8);
4122 x86_64_movl_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4123 x86_64_alul_reg_reg(alu_op, s1, REG_ITMP1);
4124 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
4127 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4129 x86_64_alul_reg_membase(alu_op, s2, REG_SP, d * 8);
4132 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4133 x86_64_alul_reg_reg(alu_op, s2, REG_ITMP1);
4134 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
4138 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4139 x86_64_alul_reg_membase(alu_op, s2, REG_SP, d * 8);
4143 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4144 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4145 x86_64_alul_membase_reg(alu_op, REG_SP, s2 * 8, d);
4147 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4149 x86_64_alul_membase_reg(alu_op, REG_SP, s2 * 8, d);
4151 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4153 x86_64_alul_membase_reg(alu_op, REG_SP, s1 * 8, d);
4157 x86_64_alul_reg_reg(alu_op, s1, d);
4161 x86_64_alul_reg_reg(alu_op, s2, d);
4169 void x86_64_emit_lalu(s4 alu_op, stackptr src, instruction *iptr)
4171 s4 s1 = src->prev->regoff;
4172 s4 s2 = src->regoff;
4173 s4 d = iptr->dst->regoff;
4175 if (iptr->dst->flags & INMEMORY) {
4176 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4178 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4179 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4181 } else if (s1 == d) {
4182 x86_64_mov_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4183 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4186 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4187 x86_64_alu_membase_reg(alu_op, REG_SP, s2 * 8, REG_ITMP1);
4188 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4191 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4193 x86_64_alu_reg_membase(alu_op, s1, REG_SP, d * 8);
4196 x86_64_mov_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4197 x86_64_alu_reg_reg(alu_op, s1, REG_ITMP1);
4198 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4201 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4203 x86_64_alu_reg_membase(alu_op, s2, REG_SP, d * 8);
4206 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4207 x86_64_alu_reg_reg(alu_op, s2, REG_ITMP1);
4208 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4212 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4213 x86_64_alu_reg_membase(alu_op, s2, REG_SP, d * 8);
4217 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4218 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4219 x86_64_alu_membase_reg(alu_op, REG_SP, s2 * 8, d);
4221 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4223 x86_64_alu_membase_reg(alu_op, REG_SP, s2 * 8, d);
4225 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4227 x86_64_alu_membase_reg(alu_op, REG_SP, s1 * 8, d);
4231 x86_64_alu_reg_reg(alu_op, s1, d);
4235 x86_64_alu_reg_reg(alu_op, s2, d);
4243 void x86_64_emit_ialuconst(s4 alu_op, stackptr src, instruction *iptr)
4245 s4 s1 = src->regoff;
4246 s4 d = iptr->dst->regoff;
4248 if (iptr->dst->flags & INMEMORY) {
4249 if (src->flags & INMEMORY) {
4251 x86_64_alul_imm_membase(alu_op, iptr->val.i, REG_SP, d * 8);
4254 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4255 x86_64_alul_imm_reg(alu_op, iptr->val.i, REG_ITMP1);
4256 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
4260 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4261 x86_64_alul_imm_membase(alu_op, iptr->val.i, REG_SP, d * 8);
4265 if (src->flags & INMEMORY) {
4266 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4267 x86_64_alul_imm_reg(alu_op, iptr->val.i, d);
4271 x86_64_alul_imm_reg(alu_op, iptr->val.i, d);
4278 void x86_64_emit_laluconst(s4 alu_op, stackptr src, instruction *iptr)
4280 s4 s1 = src->regoff;
4281 s4 d = iptr->dst->regoff;
4283 if (iptr->dst->flags & INMEMORY) {
4284 if (src->flags & INMEMORY) {
4286 if (x86_64_is_imm32(iptr->val.l)) {
4287 x86_64_alu_imm_membase(alu_op, iptr->val.l, REG_SP, d * 8);
4290 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4291 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4295 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4297 if (x86_64_is_imm32(iptr->val.l)) {
4298 x86_64_alu_imm_reg(alu_op, iptr->val.l, REG_ITMP1);
4301 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP2);
4302 x86_64_alu_reg_reg(alu_op, REG_ITMP2, REG_ITMP1);
4304 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4308 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4310 if (x86_64_is_imm32(iptr->val.l)) {
4311 x86_64_alu_imm_membase(alu_op, iptr->val.l, REG_SP, d * 8);
4314 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4315 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4320 if (src->flags & INMEMORY) {
4321 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4327 if (x86_64_is_imm32(iptr->val.l)) {
4328 x86_64_alu_imm_reg(alu_op, iptr->val.l, d);
4331 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4332 x86_64_alu_reg_reg(alu_op, REG_ITMP1, d);
4339 void x86_64_emit_ishift(s4 shift_op, stackptr src, instruction *iptr)
4341 s4 s1 = src->prev->regoff;
4342 s4 s2 = src->regoff;
4343 s4 d = iptr->dst->regoff;
4345 M_INTMOVE(RCX, REG_ITMP1); /* save RCX */
4346 if (iptr->dst->flags & INMEMORY) {
4347 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4349 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4350 x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
4353 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4354 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
4355 x86_64_shiftl_reg(shift_op, REG_ITMP2);
4356 x86_64_movl_reg_membase(REG_ITMP2, REG_SP, d * 8);
4359 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4360 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4361 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4362 x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
4364 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4367 x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
4371 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
4372 x86_64_shiftl_reg(shift_op, REG_ITMP2);
4373 x86_64_movl_reg_membase(REG_ITMP2, REG_SP, d * 8);
4378 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4379 x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
4381 M_INTMOVE(REG_ITMP1, RCX); /* restore RCX */
4388 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4389 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4390 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4391 x86_64_shiftl_reg(shift_op, d);
4393 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4394 M_INTMOVE(s1, d); /* maybe src is RCX */
4395 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4396 x86_64_shiftl_reg(shift_op, d);
4398 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4400 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4401 x86_64_shiftl_reg(shift_op, d);
4412 x86_64_shiftl_reg(shift_op, d);
4416 M_INTMOVE(REG_ITMP3, RCX);
4419 M_INTMOVE(REG_ITMP1, RCX); /* restore RCX */
4426 void x86_64_emit_lshift(s4 shift_op, stackptr src, instruction *iptr)
4428 s4 s1 = src->prev->regoff;
4429 s4 s2 = src->regoff;
4430 s4 d = iptr->dst->regoff;
4432 M_INTMOVE(RCX, REG_ITMP1); /* save RCX */
4433 if (iptr->dst->flags & INMEMORY) {
4434 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4436 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4437 x86_64_shift_membase(shift_op, REG_SP, d * 8);
4440 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4441 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
4442 x86_64_shift_reg(shift_op, REG_ITMP2);
4443 x86_64_mov_reg_membase(REG_ITMP2, REG_SP, d * 8);
4446 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4447 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4448 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4449 x86_64_shift_membase(shift_op, REG_SP, d * 8);
4451 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4454 x86_64_shift_membase(shift_op, REG_SP, d * 8);
4458 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
4459 x86_64_shift_reg(shift_op, REG_ITMP2);
4460 x86_64_mov_reg_membase(REG_ITMP2, REG_SP, d * 8);
4465 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4466 x86_64_shift_membase(shift_op, REG_SP, d * 8);
4468 M_INTMOVE(REG_ITMP1, RCX); /* restore RCX */
4475 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4476 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4477 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4478 x86_64_shift_reg(shift_op, d);
4480 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4481 M_INTMOVE(s1, d); /* maybe src is RCX */
4482 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4483 x86_64_shift_reg(shift_op, d);
4485 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4487 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4488 x86_64_shift_reg(shift_op, d);
4498 x86_64_shift_reg(shift_op, d);
4502 M_INTMOVE(REG_ITMP3, RCX);
4505 M_INTMOVE(REG_ITMP1, RCX); /* restore RCX */
4512 void x86_64_emit_ishiftconst(s4 shift_op, stackptr src, instruction *iptr)
4514 s4 s1 = src->regoff;
4515 s4 d = iptr->dst->regoff;
4517 if ((src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
4519 x86_64_shiftl_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
4522 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4523 x86_64_shiftl_imm_reg(shift_op, iptr->val.i, REG_ITMP1);
4524 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
4527 } else if ((src->flags & INMEMORY) && !(iptr->dst->flags & INMEMORY)) {
4528 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4529 x86_64_shiftl_imm_reg(shift_op, iptr->val.i, d);
4531 } else if (!(src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
4532 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4533 x86_64_shiftl_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
4537 x86_64_shiftl_imm_reg(shift_op, iptr->val.i, d);
4543 void x86_64_emit_lshiftconst(s4 shift_op, stackptr src, instruction *iptr)
4545 s4 s1 = src->regoff;
4546 s4 d = iptr->dst->regoff;
4548 if ((src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
4550 x86_64_shift_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
4553 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4554 x86_64_shift_imm_reg(shift_op, iptr->val.i, REG_ITMP1);
4555 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4558 } else if ((src->flags & INMEMORY) && !(iptr->dst->flags & INMEMORY)) {
4559 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4560 x86_64_shift_imm_reg(shift_op, iptr->val.i, d);
4562 } else if (!(src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
4563 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4564 x86_64_shift_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
4568 x86_64_shift_imm_reg(shift_op, iptr->val.i, d);
4574 void x86_64_emit_ifcc(s4 if_op, stackptr src, instruction *iptr)
4576 if (src->flags & INMEMORY) {
4577 x86_64_alul_imm_membase(X86_64_CMP, iptr->val.i, REG_SP, src->regoff * 8);
4580 x86_64_alul_imm_reg(X86_64_CMP, iptr->val.i, src->regoff);
4582 x86_64_jcc(if_op, 0);
4583 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
4588 void x86_64_emit_if_lcc(s4 if_op, stackptr src, instruction *iptr)
4590 s4 s1 = src->regoff;
4592 if (src->flags & INMEMORY) {
4593 if (x86_64_is_imm32(iptr->val.l)) {
4594 x86_64_alu_imm_membase(X86_64_CMP, iptr->val.l, REG_SP, s1 * 8);
4597 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4598 x86_64_alu_reg_membase(X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
4602 if (x86_64_is_imm32(iptr->val.l)) {
4603 x86_64_alu_imm_reg(X86_64_CMP, iptr->val.l, s1);
4606 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4607 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP1, s1);
4610 x86_64_jcc(if_op, 0);
4611 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
4616 void x86_64_emit_if_icmpcc(s4 if_op, stackptr src, instruction *iptr)
4618 s4 s1 = src->prev->regoff;
4619 s4 s2 = src->regoff;
4621 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4622 x86_64_movl_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4623 x86_64_alul_reg_membase(X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
4625 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4626 x86_64_alul_membase_reg(X86_64_CMP, REG_SP, s2 * 8, s1);
4628 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4629 x86_64_alul_reg_membase(X86_64_CMP, s2, REG_SP, s1 * 8);
4632 x86_64_alul_reg_reg(X86_64_CMP, s2, s1);
4634 x86_64_jcc(if_op, 0);
4635 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
4640 void x86_64_emit_if_lcmpcc(s4 if_op, stackptr src, instruction *iptr)
4642 s4 s1 = src->prev->regoff;
4643 s4 s2 = src->regoff;
4645 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4646 x86_64_mov_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4647 x86_64_alu_reg_membase(X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
4649 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4650 x86_64_alu_membase_reg(X86_64_CMP, REG_SP, s2 * 8, s1);
4652 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4653 x86_64_alu_reg_membase(X86_64_CMP, s2, REG_SP, s1 * 8);
4656 x86_64_alu_reg_reg(X86_64_CMP, s2, s1);
4658 x86_64_jcc(if_op, 0);
4659 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
4669 void x86_64_mov_reg_reg(s8 reg, s8 dreg) {
4670 x86_64_emit_rex(1,(reg),0,(dreg));
4671 *(mcodeptr++) = 0x89;
4672 x86_64_emit_reg((reg),(dreg));
4676 void x86_64_mov_imm_reg(s8 imm, s8 reg) {
4677 x86_64_emit_rex(1,0,0,(reg));
4678 *(mcodeptr++) = 0xb8 + ((reg) & 0x07);
4679 x86_64_emit_imm64((imm));
4683 void x86_64_movl_imm_reg(s8 imm, s8 reg) {
4684 x86_64_emit_rex(0,0,0,(reg));
4685 *(mcodeptr++) = 0xb8 + ((reg) & 0x07);
4686 x86_64_emit_imm32((imm));
4690 void x86_64_mov_membase_reg(s8 basereg, s8 disp, s8 reg) {
4691 x86_64_emit_rex(1,(reg),0,(basereg));
4692 *(mcodeptr++) = 0x8b;
4693 x86_64_emit_membase((basereg),(disp),(reg));
4697 void x86_64_movl_membase_reg(s8 basereg, s8 disp, s8 reg) {
4698 x86_64_emit_rex(0,(reg),0,(basereg));
4699 *(mcodeptr++) = 0x8b;
4700 x86_64_emit_membase((basereg),(disp),(reg));
4705 * this one is for INVOKEVIRTUAL/INVOKEINTERFACE to have a
4706 * constant membase immediate length of 32bit
4708 void x86_64_mov_membase32_reg(s8 basereg, s8 disp, s8 reg) {
4709 x86_64_emit_rex(1,(reg),0,(basereg));
4710 *(mcodeptr++) = 0x8b;
4711 x86_64_address_byte(2, (reg), (basereg));
4712 x86_64_emit_imm32((disp));
4716 void x86_64_mov_reg_membase(s8 reg, s8 basereg, s8 disp) {
4717 x86_64_emit_rex(1,(reg),0,(basereg));
4718 *(mcodeptr++) = 0x89;
4719 x86_64_emit_membase((basereg),(disp),(reg));
4723 void x86_64_movl_reg_membase(s8 reg, s8 basereg, s8 disp) {
4724 x86_64_emit_rex(0,(reg),0,(basereg));
4725 *(mcodeptr++) = 0x89;
4726 x86_64_emit_membase((basereg),(disp),(reg));
4730 void x86_64_mov_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4731 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4732 *(mcodeptr++) = 0x8b;
4733 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4737 void x86_64_movl_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4738 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
4739 *(mcodeptr++) = 0x8b;
4740 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4744 void x86_64_mov_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
4745 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4746 *(mcodeptr++) = 0x89;
4747 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4751 void x86_64_movl_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
4752 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
4753 *(mcodeptr++) = 0x89;
4754 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4758 void x86_64_movw_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
4759 *(mcodeptr++) = 0x66;
4760 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
4761 *(mcodeptr++) = 0x89;
4762 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4766 void x86_64_movb_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
4767 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
4768 *(mcodeptr++) = 0x88;
4769 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4773 void x86_64_mov_imm_membase(s8 imm, s8 basereg, s8 disp) {
4774 x86_64_emit_rex(1,0,0,(basereg));
4775 *(mcodeptr++) = 0xc7;
4776 x86_64_emit_membase((basereg),(disp),0);
4777 x86_64_emit_imm32((imm));
4781 void x86_64_movl_imm_membase(s8 imm, s8 basereg, s8 disp) {
4782 x86_64_emit_rex(0,0,0,(basereg));
4783 *(mcodeptr++) = 0xc7;
4784 x86_64_emit_membase((basereg),(disp),0);
4785 x86_64_emit_imm32((imm));
4789 void x86_64_movsbq_reg_reg(s8 reg, s8 dreg) {
4790 x86_64_emit_rex(1,(dreg),0,(reg));
4791 *(mcodeptr++) = 0x0f;
4792 *(mcodeptr++) = 0xbe;
4793 /* XXX: why do reg and dreg have to be exchanged */
4794 x86_64_emit_reg((dreg),(reg));
4798 void x86_64_movsbq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4799 x86_64_emit_rex(1,(dreg),0,(basereg));
4800 *(mcodeptr++) = 0x0f;
4801 *(mcodeptr++) = 0xbe;
4802 x86_64_emit_membase((basereg),(disp),(dreg));
4806 void x86_64_movswq_reg_reg(s8 reg, s8 dreg) {
4807 x86_64_emit_rex(1,(dreg),0,(reg));
4808 *(mcodeptr++) = 0x0f;
4809 *(mcodeptr++) = 0xbf;
4810 /* XXX: why do reg and dreg have to be exchanged */
4811 x86_64_emit_reg((dreg),(reg));
4815 void x86_64_movswq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4816 x86_64_emit_rex(1,(dreg),0,(basereg));
4817 *(mcodeptr++) = 0x0f;
4818 *(mcodeptr++) = 0xbf;
4819 x86_64_emit_membase((basereg),(disp),(dreg));
4823 void x86_64_movslq_reg_reg(s8 reg, s8 dreg) {
4824 x86_64_emit_rex(1,(dreg),0,(reg));
4825 *(mcodeptr++) = 0x63;
4826 /* XXX: why do reg and dreg have to be exchanged */
4827 x86_64_emit_reg((dreg),(reg));
4831 void x86_64_movslq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4832 x86_64_emit_rex(1,(dreg),0,(basereg));
4833 *(mcodeptr++) = 0x63;
4834 x86_64_emit_membase((basereg),(disp),(dreg));
4838 void x86_64_movzwq_reg_reg(s8 reg, s8 dreg) {
4839 x86_64_emit_rex(1,(dreg),0,(reg));
4840 *(mcodeptr++) = 0x0f;
4841 *(mcodeptr++) = 0xb7;
4842 /* XXX: why do reg and dreg have to be exchanged */
4843 x86_64_emit_reg((dreg),(reg));
4847 void x86_64_movzwq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4848 x86_64_emit_rex(1,(dreg),0,(basereg));
4849 *(mcodeptr++) = 0x0f;
4850 *(mcodeptr++) = 0xb7;
4851 x86_64_emit_membase((basereg),(disp),(dreg));
4855 void x86_64_movswq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4856 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4857 *(mcodeptr++) = 0x0f;
4858 *(mcodeptr++) = 0xbf;
4859 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4863 void x86_64_movsbq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4864 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4865 *(mcodeptr++) = 0x0f;
4866 *(mcodeptr++) = 0xbe;
4867 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4871 void x86_64_movzwq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4872 x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4873 *(mcodeptr++) = 0x0f;
4874 *(mcodeptr++) = 0xb7;
4875 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4883 void x86_64_alu_reg_reg(s8 opc, s8 reg, s8 dreg) {
4884 x86_64_emit_rex(1,(reg),0,(dreg));
4885 *(mcodeptr++) = (((opc)) << 3) + 1;
4886 x86_64_emit_reg((reg),(dreg));
4890 void x86_64_alul_reg_reg(s8 opc, s8 reg, s8 dreg) {
4891 x86_64_emit_rex(0,(reg),0,(dreg));
4892 *(mcodeptr++) = (((opc)) << 3) + 1;
4893 x86_64_emit_reg((reg),(dreg));
4897 void x86_64_alu_reg_membase(s8 opc, s8 reg, s8 basereg, s8 disp) {
4898 x86_64_emit_rex(1,(reg),0,(basereg));
4899 *(mcodeptr++) = (((opc)) << 3) + 1;
4900 x86_64_emit_membase((basereg),(disp),(reg));
4904 void x86_64_alul_reg_membase(s8 opc, s8 reg, s8 basereg, s8 disp) {
4905 x86_64_emit_rex(0,(reg),0,(basereg));
4906 *(mcodeptr++) = (((opc)) << 3) + 1;
4907 x86_64_emit_membase((basereg),(disp),(reg));
4911 void x86_64_alu_membase_reg(s8 opc, s8 basereg, s8 disp, s8 reg) {
4912 x86_64_emit_rex(1,(reg),0,(basereg));
4913 *(mcodeptr++) = (((opc)) << 3) + 3;
4914 x86_64_emit_membase((basereg),(disp),(reg));
4918 void x86_64_alul_membase_reg(s8 opc, s8 basereg, s8 disp, s8 reg) {
4919 x86_64_emit_rex(0,(reg),0,(basereg));
4920 *(mcodeptr++) = (((opc)) << 3) + 3;
4921 x86_64_emit_membase((basereg),(disp),(reg));
4925 void x86_64_alu_imm_reg(s8 opc, s8 imm, s8 dreg) {
4926 if (x86_64_is_imm8(imm)) {
4927 x86_64_emit_rex(1,0,0,(dreg));
4928 *(mcodeptr++) = 0x83;
4929 x86_64_emit_reg((opc),(dreg));
4930 x86_64_emit_imm8((imm));
4932 x86_64_emit_rex(1,0,0,(dreg));
4933 *(mcodeptr++) = 0x81;
4934 x86_64_emit_reg((opc),(dreg));
4935 x86_64_emit_imm32((imm));
4940 void x86_64_alul_imm_reg(s8 opc, s8 imm, s8 dreg) {
4941 if (x86_64_is_imm8(imm)) {
4942 x86_64_emit_rex(0,0,0,(dreg));
4943 *(mcodeptr++) = 0x83;
4944 x86_64_emit_reg((opc),(dreg));
4945 x86_64_emit_imm8((imm));
4947 x86_64_emit_rex(0,0,0,(dreg));
4948 *(mcodeptr++) = 0x81;
4949 x86_64_emit_reg((opc),(dreg));
4950 x86_64_emit_imm32((imm));
4955 void x86_64_alu_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
4956 if (x86_64_is_imm8(imm)) {
4957 x86_64_emit_rex(1,(basereg),0,0);
4958 *(mcodeptr++) = 0x83;
4959 x86_64_emit_membase((basereg),(disp),(opc));
4960 x86_64_emit_imm8((imm));
4962 x86_64_emit_rex(1,(basereg),0,0);
4963 *(mcodeptr++) = 0x81;
4964 x86_64_emit_membase((basereg),(disp),(opc));
4965 x86_64_emit_imm32((imm));
4970 void x86_64_alul_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
4971 if (x86_64_is_imm8(imm)) {
4972 x86_64_emit_rex(0,(basereg),0,0);
4973 *(mcodeptr++) = 0x83;
4974 x86_64_emit_membase((basereg),(disp),(opc));
4975 x86_64_emit_imm8((imm));
4977 x86_64_emit_rex(0,(basereg),0,0);
4978 *(mcodeptr++) = 0x81;
4979 x86_64_emit_membase((basereg),(disp),(opc));
4980 x86_64_emit_imm32((imm));
4985 void x86_64_test_reg_reg(s8 reg, s8 dreg) {
4986 x86_64_emit_rex(1,(reg),0,(dreg));
4987 *(mcodeptr++) = 0x85;
4988 x86_64_emit_reg((reg),(dreg));
4992 void x86_64_testl_reg_reg(s8 reg, s8 dreg) {
4993 x86_64_emit_rex(0,(reg),0,(dreg));
4994 *(mcodeptr++) = 0x85;
4995 x86_64_emit_reg((reg),(dreg));
4999 void x86_64_test_imm_reg(s8 imm, s8 reg) {
5000 *(mcodeptr++) = 0xf7;
5001 x86_64_emit_reg(0,(reg));
5002 x86_64_emit_imm32((imm));
5006 void x86_64_testw_imm_reg(s8 imm, s8 reg) {
5007 *(mcodeptr++) = 0x66;
5008 *(mcodeptr++) = 0xf7;
5009 x86_64_emit_reg(0,(reg));
5010 x86_64_emit_imm16((imm));
5014 void x86_64_testb_imm_reg(s8 imm, s8 reg) {
5015 *(mcodeptr++) = 0xf6;
5016 x86_64_emit_reg(0,(reg));
5017 x86_64_emit_imm8((imm));
5021 void x86_64_lea_membase_reg(s8 basereg, s8 disp, s8 reg) {
5022 x86_64_emit_rex(1,(reg),0,(basereg));
5023 *(mcodeptr++) = 0x8d;
5024 x86_64_emit_membase((basereg),(disp),(reg));
5028 void x86_64_leal_membase_reg(s8 basereg, s8 disp, s8 reg) {
5029 x86_64_emit_rex(0,(reg),0,(basereg));
5030 *(mcodeptr++) = 0x8d;
5031 x86_64_emit_membase((basereg),(disp),(reg));
5037 * inc, dec operations
5039 void x86_64_inc_reg(s8 reg) {
5040 x86_64_emit_rex(1,0,0,(reg));
5041 *(mcodeptr++) = 0xff;
5042 x86_64_emit_reg(0,(reg));
5046 void x86_64_incl_reg(s8 reg) {
5047 x86_64_emit_rex(0,0,0,(reg));
5048 *(mcodeptr++) = 0xff;
5049 x86_64_emit_reg(0,(reg));
5053 void x86_64_inc_membase(s8 basereg, s8 disp) {
5054 x86_64_emit_rex(1,(basereg),0,0);
5055 *(mcodeptr++) = 0xff;
5056 x86_64_emit_membase((basereg),(disp),0);
5060 void x86_64_incl_membase(s8 basereg, s8 disp) {
5061 x86_64_emit_rex(0,(basereg),0,0);
5062 *(mcodeptr++) = 0xff;
5063 x86_64_emit_membase((basereg),(disp),0);
5067 void x86_64_dec_reg(s8 reg) {
5068 x86_64_emit_rex(1,0,0,(reg));
5069 *(mcodeptr++) = 0xff;
5070 x86_64_emit_reg(1,(reg));
5074 void x86_64_decl_reg(s8 reg) {
5075 x86_64_emit_rex(0,0,0,(reg));
5076 *(mcodeptr++) = 0xff;
5077 x86_64_emit_reg(1,(reg));
5081 void x86_64_dec_membase(s8 basereg, s8 disp) {
5082 x86_64_emit_rex(1,(basereg),0,0);
5083 *(mcodeptr++) = 0xff;
5084 x86_64_emit_membase((basereg),(disp),1);
5088 void x86_64_decl_membase(s8 basereg, s8 disp) {
5089 x86_64_emit_rex(0,(basereg),0,0);
5090 *(mcodeptr++) = 0xff;
5091 x86_64_emit_membase((basereg),(disp),1);
5097 void x86_64_cltd() {
5098 *(mcodeptr++) = 0x99;
5102 void x86_64_cqto() {
5103 x86_64_emit_rex(1,0,0,0);
5104 *(mcodeptr++) = 0x99;
5109 void x86_64_imul_reg_reg(s8 reg, s8 dreg) {
5110 x86_64_emit_rex(1,(dreg),0,(reg));
5111 *(mcodeptr++) = 0x0f;
5112 *(mcodeptr++) = 0xaf;
5113 x86_64_emit_reg((dreg),(reg));
5117 void x86_64_imull_reg_reg(s8 reg, s8 dreg) {
5118 x86_64_emit_rex(0,(dreg),0,(reg));
5119 *(mcodeptr++) = 0x0f;
5120 *(mcodeptr++) = 0xaf;
5121 x86_64_emit_reg((dreg),(reg));
5125 void x86_64_imul_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5126 x86_64_emit_rex(1,(dreg),0,(basereg));
5127 *(mcodeptr++) = 0x0f;
5128 *(mcodeptr++) = 0xaf;
5129 x86_64_emit_membase((basereg),(disp),(dreg));
5133 void x86_64_imull_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5134 x86_64_emit_rex(0,(dreg),0,(basereg));
5135 *(mcodeptr++) = 0x0f;
5136 *(mcodeptr++) = 0xaf;
5137 x86_64_emit_membase((basereg),(disp),(dreg));
5141 void x86_64_imul_imm_reg(s8 imm, s8 dreg) {
5142 if (x86_64_is_imm8((imm))) {
5143 x86_64_emit_rex(1,0,0,(dreg));
5144 *(mcodeptr++) = 0x6b;
5145 x86_64_emit_reg(0,(dreg));
5146 x86_64_emit_imm8((imm));
5148 x86_64_emit_rex(1,0,0,(dreg));
5149 *(mcodeptr++) = 0x69;
5150 x86_64_emit_reg(0,(dreg));
5151 x86_64_emit_imm32((imm));
5156 void x86_64_imul_imm_reg_reg(s8 imm, s8 reg, s8 dreg) {
5157 if (x86_64_is_imm8((imm))) {
5158 x86_64_emit_rex(1,(dreg),0,(reg));
5159 *(mcodeptr++) = 0x6b;
5160 x86_64_emit_reg((dreg),(reg));
5161 x86_64_emit_imm8((imm));
5163 x86_64_emit_rex(1,(dreg),0,(reg));
5164 *(mcodeptr++) = 0x69;
5165 x86_64_emit_reg((dreg),(reg));
5166 x86_64_emit_imm32((imm));
5171 void x86_64_imull_imm_reg_reg(s8 imm, s8 reg, s8 dreg) {
5172 if (x86_64_is_imm8((imm))) {
5173 x86_64_emit_rex(0,(dreg),0,(reg));
5174 *(mcodeptr++) = 0x6b;
5175 x86_64_emit_reg((dreg),(reg));
5176 x86_64_emit_imm8((imm));
5178 x86_64_emit_rex(0,(dreg),0,(reg));
5179 *(mcodeptr++) = 0x69;
5180 x86_64_emit_reg((dreg),(reg));
5181 x86_64_emit_imm32((imm));
5186 void x86_64_imul_imm_membase_reg(s8 imm, s8 basereg, s8 disp, s8 dreg) {
5187 if (x86_64_is_imm8((imm))) {
5188 x86_64_emit_rex(1,(dreg),0,(basereg));
5189 *(mcodeptr++) = 0x6b;
5190 x86_64_emit_membase((basereg),(disp),(dreg));
5191 x86_64_emit_imm8((imm));
5193 x86_64_emit_rex(1,(dreg),0,(basereg));
5194 *(mcodeptr++) = 0x69;
5195 x86_64_emit_membase((basereg),(disp),(dreg));
5196 x86_64_emit_imm32((imm));
5201 void x86_64_imull_imm_membase_reg(s8 imm, s8 basereg, s8 disp, s8 dreg) {
5202 if (x86_64_is_imm8((imm))) {
5203 x86_64_emit_rex(0,(dreg),0,(basereg));
5204 *(mcodeptr++) = 0x6b;
5205 x86_64_emit_membase((basereg),(disp),(dreg));
5206 x86_64_emit_imm8((imm));
5208 x86_64_emit_rex(0,(dreg),0,(basereg));
5209 *(mcodeptr++) = 0x69;
5210 x86_64_emit_membase((basereg),(disp),(dreg));
5211 x86_64_emit_imm32((imm));
5216 void x86_64_idiv_reg(s8 reg) {
5217 x86_64_emit_rex(1,0,0,(reg));
5218 *(mcodeptr++) = 0xf7;
5219 x86_64_emit_reg(7,(reg));
5223 void x86_64_idivl_reg(s8 reg) {
5224 x86_64_emit_rex(0,0,0,(reg));
5225 *(mcodeptr++) = 0xf7;
5226 x86_64_emit_reg(7,(reg));
5232 *(mcodeptr++) = 0xc3;
5240 void x86_64_shift_reg(s8 opc, s8 reg) {
5241 x86_64_emit_rex(1,0,0,(reg));
5242 *(mcodeptr++) = 0xd3;
5243 x86_64_emit_reg((opc),(reg));
5247 void x86_64_shiftl_reg(s8 opc, s8 reg) {
5248 x86_64_emit_rex(0,0,0,(reg));
5249 *(mcodeptr++) = 0xd3;
5250 x86_64_emit_reg((opc),(reg));
5254 void x86_64_shift_membase(s8 opc, s8 basereg, s8 disp) {
5255 x86_64_emit_rex(1,0,0,(basereg));
5256 *(mcodeptr++) = 0xd3;
5257 x86_64_emit_membase((basereg),(disp),(opc));
5261 void x86_64_shiftl_membase(s8 opc, s8 basereg, s8 disp) {
5262 x86_64_emit_rex(0,0,0,(basereg));
5263 *(mcodeptr++) = 0xd3;
5264 x86_64_emit_membase((basereg),(disp),(opc));
5268 void x86_64_shift_imm_reg(s8 opc, s8 imm, s8 dreg) {
5270 x86_64_emit_rex(1,0,0,(dreg));
5271 *(mcodeptr++) = 0xd1;
5272 x86_64_emit_reg((opc),(dreg));
5274 x86_64_emit_rex(1,0,0,(dreg));
5275 *(mcodeptr++) = 0xc1;
5276 x86_64_emit_reg((opc),(dreg));
5277 x86_64_emit_imm8((imm));
5282 void x86_64_shiftl_imm_reg(s8 opc, s8 imm, s8 dreg) {
5284 x86_64_emit_rex(0,0,0,(dreg));
5285 *(mcodeptr++) = 0xd1;
5286 x86_64_emit_reg((opc),(dreg));
5288 x86_64_emit_rex(0,0,0,(dreg));
5289 *(mcodeptr++) = 0xc1;
5290 x86_64_emit_reg((opc),(dreg));
5291 x86_64_emit_imm8((imm));
5296 void x86_64_shift_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
5298 x86_64_emit_rex(1,0,0,(basereg));
5299 *(mcodeptr++) = 0xd1;
5300 x86_64_emit_membase((basereg),(disp),(opc));
5302 x86_64_emit_rex(1,0,0,(basereg));
5303 *(mcodeptr++) = 0xc1;
5304 x86_64_emit_membase((basereg),(disp),(opc));
5305 x86_64_emit_imm8((imm));
5310 void x86_64_shiftl_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
5312 x86_64_emit_rex(0,0,0,(basereg));
5313 *(mcodeptr++) = 0xd1;
5314 x86_64_emit_membase((basereg),(disp),(opc));
5316 x86_64_emit_rex(0,0,0,(basereg));
5317 *(mcodeptr++) = 0xc1;
5318 x86_64_emit_membase((basereg),(disp),(opc));
5319 x86_64_emit_imm8((imm));
5328 void x86_64_jmp_imm(s8 imm) {
5329 *(mcodeptr++) = 0xe9;
5330 x86_64_emit_imm32((imm));
5334 void x86_64_jmp_reg(s8 reg) {
5335 x86_64_emit_rex(0,0,0,(reg));
5336 *(mcodeptr++) = 0xff;
5337 x86_64_emit_reg(4,(reg));
5341 void x86_64_jcc(s8 opc, s8 imm) {
5342 *(mcodeptr++) = 0x0f;
5343 *(mcodeptr++) = (0x80 + (opc));
5344 x86_64_emit_imm32((imm));
5350 * conditional set and move operations
5353 /* we need the rex byte to get all low bytes */
5354 void x86_64_setcc_reg(s8 opc, s8 reg) {
5355 *(mcodeptr++) = (0x40 | (((reg) >> 3) & 0x01));
5356 *(mcodeptr++) = 0x0f;
5357 *(mcodeptr++) = (0x90 + (opc));
5358 x86_64_emit_reg(0,(reg));
5362 /* we need the rex byte to get all low bytes */
5363 void x86_64_setcc_membase(s8 opc, s8 basereg, s8 disp) {
5364 *(mcodeptr++) = (0x40 | (((basereg) >> 3) & 0x01));
5365 *(mcodeptr++) = 0x0f;
5366 *(mcodeptr++) = (0x90 + (opc));
5367 x86_64_emit_membase((basereg),(disp),0);
5371 void x86_64_cmovcc_reg_reg(s8 opc, s8 reg, s8 dreg) {
5372 x86_64_emit_rex(1,(dreg),0,(reg));
5373 *(mcodeptr++) = 0x0f;
5374 *(mcodeptr++) = (0x40 + (opc));
5375 x86_64_emit_reg((dreg),(reg));
5379 void x86_64_cmovccl_reg_reg(s8 opc, s8 reg, s8 dreg) {
5380 x86_64_emit_rex(0,(dreg),0,(reg));
5381 *(mcodeptr++) = 0x0f;
5382 *(mcodeptr++) = (0x40 + (opc));
5383 x86_64_emit_reg((dreg),(reg));
5388 void x86_64_neg_reg(s8 reg) {
5389 x86_64_emit_rex(1,0,0,(reg));
5390 *(mcodeptr++) = 0xf7;
5391 x86_64_emit_reg(3,(reg));
5395 void x86_64_negl_reg(s8 reg) {
5396 x86_64_emit_rex(0,0,0,(reg));
5397 *(mcodeptr++) = 0xf7;
5398 x86_64_emit_reg(3,(reg));
5402 void x86_64_neg_membase(s8 basereg, s8 disp) {
5403 x86_64_emit_rex(1,0,0,(basereg));
5404 *(mcodeptr++) = 0xf7;
5405 x86_64_emit_membase((basereg),(disp),3);
5409 void x86_64_negl_membase(s8 basereg, s8 disp) {
5410 x86_64_emit_rex(0,0,0,(basereg));
5411 *(mcodeptr++) = 0xf7;
5412 x86_64_emit_membase((basereg),(disp),3);
5417 void x86_64_push_imm(s8 imm) {
5418 *(mcodeptr++) = 0x68;
5419 x86_64_emit_imm32((imm));
5423 void x86_64_pop_reg(s8 reg) {
5424 x86_64_emit_rex(0,0,0,(reg));
5425 *(mcodeptr++) = 0x58 + (0x07 & (reg));
5429 void x86_64_xchg_reg_reg(s8 reg, s8 dreg) {
5430 x86_64_emit_rex(1,(reg),0,(dreg));
5431 *(mcodeptr++) = 0x87;
5432 x86_64_emit_reg((reg),(dreg));
5437 *(mcodeptr++) = 0x90;
5445 void x86_64_call_reg(s8 reg) {
5446 x86_64_emit_rex(1,0,0,(reg));
5447 *(mcodeptr++) = 0xff;
5448 x86_64_emit_reg(2,(reg));
5452 void x86_64_call_imm(s8 imm) {
5453 *(mcodeptr++) = 0xe8;
5454 x86_64_emit_imm32((imm));
5460 * floating point instructions (SSE2)
5462 void x86_64_addsd_reg_reg(s8 reg, s8 dreg) {
5463 *(mcodeptr++) = 0xf2;
5464 x86_64_emit_rex(0,(dreg),0,(reg));
5465 *(mcodeptr++) = 0x0f;
5466 *(mcodeptr++) = 0x58;
5467 x86_64_emit_reg((dreg),(reg));
5471 void x86_64_addss_reg_reg(s8 reg, s8 dreg) {
5472 *(mcodeptr++) = 0xf3;
5473 x86_64_emit_rex(0,(dreg),0,(reg));
5474 *(mcodeptr++) = 0x0f;
5475 *(mcodeptr++) = 0x58;
5476 x86_64_emit_reg((dreg),(reg));
5480 void x86_64_cvtsi2ssq_reg_reg(s8 reg, s8 dreg) {
5481 *(mcodeptr++) = 0xf3;
5482 x86_64_emit_rex(1,(dreg),0,(reg));
5483 *(mcodeptr++) = 0x0f;
5484 *(mcodeptr++) = 0x2a;
5485 x86_64_emit_reg((dreg),(reg));
5489 void x86_64_cvtsi2ss_reg_reg(s8 reg, s8 dreg) {
5490 *(mcodeptr++) = 0xf3;
5491 x86_64_emit_rex(0,(dreg),0,(reg));
5492 *(mcodeptr++) = 0x0f;
5493 *(mcodeptr++) = 0x2a;
5494 x86_64_emit_reg((dreg),(reg));
5498 void x86_64_cvtsi2sdq_reg_reg(s8 reg, s8 dreg) {
5499 *(mcodeptr++) = 0xf2;
5500 x86_64_emit_rex(1,(dreg),0,(reg));
5501 *(mcodeptr++) = 0x0f;
5502 *(mcodeptr++) = 0x2a;
5503 x86_64_emit_reg((dreg),(reg));
5507 void x86_64_cvtsi2sd_reg_reg(s8 reg, s8 dreg) {
5508 *(mcodeptr++) = 0xf2;
5509 x86_64_emit_rex(0,(dreg),0,(reg));
5510 *(mcodeptr++) = 0x0f;
5511 *(mcodeptr++) = 0x2a;
5512 x86_64_emit_reg((dreg),(reg));
5516 void x86_64_cvtss2sd_reg_reg(s8 reg, s8 dreg) {
5517 *(mcodeptr++) = 0xf3;
5518 x86_64_emit_rex(0,(dreg),0,(reg));
5519 *(mcodeptr++) = 0x0f;
5520 *(mcodeptr++) = 0x5a;
5521 x86_64_emit_reg((dreg),(reg));
5525 void x86_64_cvtsd2ss_reg_reg(s8 reg, s8 dreg) {
5526 *(mcodeptr++) = 0xf2;
5527 x86_64_emit_rex(0,(dreg),0,(reg));
5528 *(mcodeptr++) = 0x0f;
5529 *(mcodeptr++) = 0x5a;
5530 x86_64_emit_reg((dreg),(reg));
5534 void x86_64_cvttss2siq_reg_reg(s8 reg, s8 dreg) {
5535 *(mcodeptr++) = 0xf3;
5536 x86_64_emit_rex(1,(dreg),0,(reg));
5537 *(mcodeptr++) = 0x0f;
5538 *(mcodeptr++) = 0x2c;
5539 x86_64_emit_reg((dreg),(reg));
5543 void x86_64_cvttss2si_reg_reg(s8 reg, s8 dreg) {
5544 *(mcodeptr++) = 0xf3;
5545 x86_64_emit_rex(0,(dreg),0,(reg));
5546 *(mcodeptr++) = 0x0f;
5547 *(mcodeptr++) = 0x2c;
5548 x86_64_emit_reg((dreg),(reg));
5552 void x86_64_cvttsd2siq_reg_reg(s8 reg, s8 dreg) {
5553 *(mcodeptr++) = 0xf2;
5554 x86_64_emit_rex(1,(dreg),0,(reg));
5555 *(mcodeptr++) = 0x0f;
5556 *(mcodeptr++) = 0x2c;
5557 x86_64_emit_reg((dreg),(reg));
5561 void x86_64_cvttsd2si_reg_reg(s8 reg, s8 dreg) {
5562 *(mcodeptr++) = 0xf2;
5563 x86_64_emit_rex(0,(dreg),0,(reg));
5564 *(mcodeptr++) = 0x0f;
5565 *(mcodeptr++) = 0x2c;
5566 x86_64_emit_reg((dreg),(reg));
5570 void x86_64_divss_reg_reg(s8 reg, s8 dreg) {
5571 *(mcodeptr++) = 0xf3;
5572 x86_64_emit_rex(0,(dreg),0,(reg));
5573 *(mcodeptr++) = 0x0f;
5574 *(mcodeptr++) = 0x5e;
5575 x86_64_emit_reg((dreg),(reg));
5579 void x86_64_divsd_reg_reg(s8 reg, s8 dreg) {
5580 *(mcodeptr++) = 0xf2;
5581 x86_64_emit_rex(0,(dreg),0,(reg));
5582 *(mcodeptr++) = 0x0f;
5583 *(mcodeptr++) = 0x5e;
5584 x86_64_emit_reg((dreg),(reg));
5588 void x86_64_movd_reg_freg(s8 reg, s8 freg) {
5589 *(mcodeptr++) = 0x66;
5590 x86_64_emit_rex(1,(freg),0,(reg));
5591 *(mcodeptr++) = 0x0f;
5592 *(mcodeptr++) = 0x6e;
5593 x86_64_emit_reg((freg),(reg));
5597 void x86_64_movd_freg_reg(s8 freg, s8 reg) {
5598 *(mcodeptr++) = 0x66;
5599 x86_64_emit_rex(1,(freg),0,(reg));
5600 *(mcodeptr++) = 0x0f;
5601 *(mcodeptr++) = 0x7e;
5602 x86_64_emit_reg((freg),(reg));
5606 void x86_64_movd_reg_membase(s8 reg, s8 basereg, s8 disp) {
5607 *(mcodeptr++) = 0x66;
5608 x86_64_emit_rex(0,(reg),0,(basereg));
5609 *(mcodeptr++) = 0x0f;
5610 *(mcodeptr++) = 0x7e;
5611 x86_64_emit_membase((basereg),(disp),(reg));
5615 void x86_64_movd_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
5616 *(mcodeptr++) = 0x66;
5617 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
5618 *(mcodeptr++) = 0x0f;
5619 *(mcodeptr++) = 0x7e;
5620 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
5624 void x86_64_movd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5625 *(mcodeptr++) = 0x66;
5626 x86_64_emit_rex(1,(dreg),0,(basereg));
5627 *(mcodeptr++) = 0x0f;
5628 *(mcodeptr++) = 0x6e;
5629 x86_64_emit_membase((basereg),(disp),(dreg));
5633 void x86_64_movdl_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5634 *(mcodeptr++) = 0x66;
5635 x86_64_emit_rex(0,(dreg),0,(basereg));
5636 *(mcodeptr++) = 0x0f;
5637 *(mcodeptr++) = 0x6e;
5638 x86_64_emit_membase((basereg),(disp),(dreg));
5642 void x86_64_movd_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
5643 *(mcodeptr++) = 0x66;
5644 x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
5645 *(mcodeptr++) = 0x0f;
5646 *(mcodeptr++) = 0x6e;
5647 x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
5651 void x86_64_movq_reg_reg(s8 reg, s8 dreg) {
5652 *(mcodeptr++) = 0xf3;
5653 x86_64_emit_rex(0,(dreg),0,(reg));
5654 *(mcodeptr++) = 0x0f;
5655 *(mcodeptr++) = 0x7e;
5656 x86_64_emit_reg((dreg),(reg));
5660 void x86_64_movq_reg_membase(s8 reg, s8 basereg, s8 disp) {
5661 *(mcodeptr++) = 0x66;
5662 x86_64_emit_rex(0,(reg),0,(basereg));
5663 *(mcodeptr++) = 0x0f;
5664 *(mcodeptr++) = 0xd6;
5665 x86_64_emit_membase((basereg),(disp),(reg));
5669 void x86_64_movq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5670 *(mcodeptr++) = 0xf3;
5671 x86_64_emit_rex(0,(dreg),0,(basereg));
5672 *(mcodeptr++) = 0x0f;
5673 *(mcodeptr++) = 0x7e;
5674 x86_64_emit_membase((basereg),(disp),(dreg));
5678 void x86_64_movss_reg_reg(s8 reg, s8 dreg) {
5679 *(mcodeptr++) = 0xf3;
5680 x86_64_emit_rex(0,(reg),0,(dreg));
5681 *(mcodeptr++) = 0x0f;
5682 *(mcodeptr++) = 0x10;
5683 x86_64_emit_reg((reg),(dreg));
5687 void x86_64_movsd_reg_reg(s8 reg, s8 dreg) {
5688 *(mcodeptr++) = 0xf2;
5689 x86_64_emit_rex(0,(reg),0,(dreg));
5690 *(mcodeptr++) = 0x0f;
5691 *(mcodeptr++) = 0x10;
5692 x86_64_emit_reg((reg),(dreg));
5696 void x86_64_movss_reg_membase(s8 reg, s8 basereg, s8 disp) {
5697 *(mcodeptr++) = 0xf3;
5698 x86_64_emit_rex(0,(reg),0,(basereg));
5699 *(mcodeptr++) = 0x0f;
5700 *(mcodeptr++) = 0x11;
5701 x86_64_emit_membase((basereg),(disp),(reg));
5705 void x86_64_movsd_reg_membase(s8 reg, s8 basereg, s8 disp) {
5706 *(mcodeptr++) = 0xf2;
5707 x86_64_emit_rex(0,(reg),0,(basereg));
5708 *(mcodeptr++) = 0x0f;
5709 *(mcodeptr++) = 0x11;
5710 x86_64_emit_membase((basereg),(disp),(reg));
5714 void x86_64_movss_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5715 *(mcodeptr++) = 0xf3;
5716 x86_64_emit_rex(0,(dreg),0,(basereg));
5717 *(mcodeptr++) = 0x0f;
5718 *(mcodeptr++) = 0x10;
5719 x86_64_emit_membase((basereg),(disp),(dreg));
5723 void x86_64_movlps_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5724 x86_64_emit_rex(0,(dreg),0,(basereg));
5725 *(mcodeptr++) = 0x0f;
5726 *(mcodeptr++) = 0x12;
5727 x86_64_emit_membase((basereg),(disp),(dreg));
5731 void x86_64_movsd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5732 *(mcodeptr++) = 0xf2;
5733 x86_64_emit_rex(0,(dreg),0,(basereg));
5734 *(mcodeptr++) = 0x0f;
5735 *(mcodeptr++) = 0x10;
5736 x86_64_emit_membase((basereg),(disp),(dreg));
5740 void x86_64_movlpd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5741 *(mcodeptr++) = 0x66;
5742 x86_64_emit_rex(0,(dreg),0,(basereg));
5743 *(mcodeptr++) = 0x0f;
5744 *(mcodeptr++) = 0x12;
5745 x86_64_emit_membase((basereg),(disp),(dreg));
5749 void x86_64_movss_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
5750 *(mcodeptr++) = 0xf3;
5751 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
5752 *(mcodeptr++) = 0x0f;
5753 *(mcodeptr++) = 0x11;
5754 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
5758 void x86_64_movsd_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
5759 *(mcodeptr++) = 0xf2;
5760 x86_64_emit_rex(0,(reg),(indexreg),(basereg));
5761 *(mcodeptr++) = 0x0f;
5762 *(mcodeptr++) = 0x11;
5763 x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
5767 void x86_64_movss_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
5768 *(mcodeptr++) = 0xf3;
5769 x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
5770 *(mcodeptr++) = 0x0f;
5771 *(mcodeptr++) = 0x10;
5772 x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
5776 void x86_64_movsd_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
5777 *(mcodeptr++) = 0xf2;
5778 x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
5779 *(mcodeptr++) = 0x0f;
5780 *(mcodeptr++) = 0x10;
5781 x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
5785 void x86_64_mulss_reg_reg(s8 reg, s8 dreg) {
5786 *(mcodeptr++) = 0xf3;
5787 x86_64_emit_rex(0,(dreg),0,(reg));
5788 *(mcodeptr++) = 0x0f;
5789 *(mcodeptr++) = 0x59;
5790 x86_64_emit_reg((dreg),(reg));
5794 void x86_64_mulsd_reg_reg(s8 reg, s8 dreg) {
5795 *(mcodeptr++) = 0xf2;
5796 x86_64_emit_rex(0,(dreg),0,(reg));
5797 *(mcodeptr++) = 0x0f;
5798 *(mcodeptr++) = 0x59;
5799 x86_64_emit_reg((dreg),(reg));
5803 void x86_64_subss_reg_reg(s8 reg, s8 dreg) {
5804 *(mcodeptr++) = 0xf3;
5805 x86_64_emit_rex(0,(dreg),0,(reg));
5806 *(mcodeptr++) = 0x0f;
5807 *(mcodeptr++) = 0x5c;
5808 x86_64_emit_reg((dreg),(reg));
5812 void x86_64_subsd_reg_reg(s8 reg, s8 dreg) {
5813 *(mcodeptr++) = 0xf2;
5814 x86_64_emit_rex(0,(dreg),0,(reg));
5815 *(mcodeptr++) = 0x0f;
5816 *(mcodeptr++) = 0x5c;
5817 x86_64_emit_reg((dreg),(reg));
5821 void x86_64_ucomiss_reg_reg(s8 reg, s8 dreg) {
5822 x86_64_emit_rex(0,(dreg),0,(reg));
5823 *(mcodeptr++) = 0x0f;
5824 *(mcodeptr++) = 0x2e;
5825 x86_64_emit_reg((dreg),(reg));
5829 void x86_64_ucomisd_reg_reg(s8 reg, s8 dreg) {
5830 *(mcodeptr++) = 0x66;
5831 x86_64_emit_rex(0,(dreg),0,(reg));
5832 *(mcodeptr++) = 0x0f;
5833 *(mcodeptr++) = 0x2e;
5834 x86_64_emit_reg((dreg),(reg));
5838 void x86_64_xorps_reg_reg(s8 reg, s8 dreg) {
5839 x86_64_emit_rex(0,(dreg),0,(reg));
5840 *(mcodeptr++) = 0x0f;
5841 *(mcodeptr++) = 0x57;
5842 x86_64_emit_reg((dreg),(reg));
5846 void x86_64_xorps_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5847 x86_64_emit_rex(0,(dreg),0,(basereg));
5848 *(mcodeptr++) = 0x0f;
5849 *(mcodeptr++) = 0x57;
5850 x86_64_emit_membase((basereg),(disp),(dreg));
5854 void x86_64_xorpd_reg_reg(s8 reg, s8 dreg) {
5855 *(mcodeptr++) = 0x66;
5856 x86_64_emit_rex(0,(dreg),0,(reg));
5857 *(mcodeptr++) = 0x0f;
5858 *(mcodeptr++) = 0x57;
5859 x86_64_emit_reg((dreg),(reg));
5863 void x86_64_xorpd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5864 *(mcodeptr++) = 0x66;
5865 x86_64_emit_rex(0,(dreg),0,(basereg));
5866 *(mcodeptr++) = 0x0f;
5867 *(mcodeptr++) = 0x57;
5868 x86_64_emit_membase((basereg),(disp),(dreg));
5874 * These are local overrides for various environment variables in Emacs.
5875 * Please do not remove this and leave it at the end of the file, where
5876 * Emacs will automagically detect them.
5877 * ---------------------------------------------------------------------
5880 * indent-tabs-mode: t