1 /* jit/x86_64/codegen.c - machine code generator for x86_64
3 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 Institut f. Computersprachen, TU Wien
5 R. Grafl, A. Krall, C. Kruegel, C. Oates, R. Obermaisser, M. Probst,
6 S. Ring, E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich,
9 This file is part of CACAO.
11 This program is free software; you can redistribute it and/or
12 modify it under the terms of the GNU General Public License as
13 published by the Free Software Foundation; either version 2, or (at
14 your option) any later version.
16 This program is distributed in the hope that it will be useful, but
17 WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 General Public License for more details.
21 You should have received a copy of the GNU General Public License
22 along with this program; if not, write to the Free Software
23 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
26 Contact: cacao@complang.tuwien.ac.at
28 Authors: Andreas Krall
31 $Id: codegen.c 1266 2004-07-01 20:38:16Z twisti $
40 #include "jit/x86_64/codegen.h"
41 #include "jit/x86_64/emitfuncs.h"
44 #include "jit/parse.h"
52 /* include independent code generation stuff */
53 #include "jit/codegen.inc"
54 #include "jit/reg.inc"
57 /* register descripton - array ************************************************/
59 /* #define REG_RES 0 reserved register for OS or code generator */
60 /* #define REG_RET 1 return value register */
61 /* #define REG_EXC 2 exception value register (only old jit) */
62 /* #define REG_SAV 3 (callee) saved register */
63 /* #define REG_TMP 4 scratch temporary register (caller saved) */
64 /* #define REG_ARG 5 argument register (caller saved) */
66 /* #define REG_END -1 last entry in tables */
69 REG_RET, REG_ARG, REG_ARG, REG_TMP, REG_RES, REG_SAV, REG_ARG, REG_ARG,
70 REG_ARG, REG_ARG, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV,
75 int nregdescfloat[] = {
76 /* REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_TMP, REG_TMP, REG_TMP, REG_TMP, */
77 /* REG_RES, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV, REG_SAV, */
78 REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
79 REG_RES, REG_RES, REG_RES, REG_TMP, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
85 This function determines a register, to which the result of an operation
86 should go, when it is ultimatively intended to store the result in
88 If v is assigned to an actual register, this register will be returned.
89 Otherwise (when v is spilled) this function returns tempregnum.
90 If not already done, regoff and flags are set in the stack location.
93 static int reg_of_var(stackptr v, int tempregnum)
99 if (!(v->flags & INMEMORY))
103 var = &(interfaces[v->varnum][v->type]);
104 v->regoff = var->regoff;
105 if (!(var->flags & INMEMORY))
109 var = &(locals[v->varnum][v->type]);
110 v->regoff = var->regoff;
111 if (!(var->flags & INMEMORY))
115 v->regoff = v->varnum;
116 if (IS_FLT_DBL_TYPE(v->type)) {
117 if (v->varnum < FLT_ARG_CNT) {
118 v->regoff = argfltregs[v->varnum];
119 return(argfltregs[v->varnum]);
122 if (v->varnum < INT_ARG_CNT) {
123 v->regoff = argintregs[v->varnum];
124 return(argintregs[v->varnum]);
127 v->regoff -= INT_ARG_CNT;
130 v->flags |= INMEMORY;
135 /* NullPointerException signal handler for hardware null pointer check */
137 void catch_NullPointerException(int sig, siginfo_t *siginfo, void *_p)
141 /* long faultaddr; */
143 struct ucontext *_uc = (struct ucontext *) _p;
144 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
145 struct sigaction act;
146 java_objectheader *xptr;
148 /* Reset signal handler - necessary for SysV, does no harm for BSD */
151 /* instr = *((int*)(sigctx->rip)); */
152 /* faultaddr = sigctx->sc_regs[(instr >> 16) & 0x1f]; */
154 /* if (faultaddr == 0) { */
155 act.sa_sigaction = (void *) catch_NullPointerException; /* reinstall handler */
156 act.sa_flags = SA_SIGINFO;
157 sigaction(sig, &act, NULL);
160 sigaddset(&nsig, sig);
161 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
163 xptr = new_exception(string_java_lang_NullPointerException);
165 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
166 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
167 sigctx->rip = (u8) asm_handle_exception;
172 /* faultaddr += (long) ((instr << 16) >> 16); */
173 /* fprintf(stderr, "faulting address: 0x%08x\n", faultaddr); */
174 /* panic("Stack overflow"); */
179 /* ArithmeticException signal handler for hardware divide by zero check */
181 void catch_ArithmeticException(int sig, siginfo_t *siginfo, void *_p)
185 struct ucontext *_uc = (struct ucontext *) _p;
186 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
187 struct sigaction act;
188 java_objectheader *xptr;
190 /* Reset signal handler - necessary for SysV, does no harm for BSD */
192 act.sa_sigaction = (void *) catch_ArithmeticException; /* reinstall handler */
193 act.sa_flags = SA_SIGINFO;
194 sigaction(sig, &act, NULL);
197 sigaddset(&nsig, sig);
198 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
200 xptr = new_exception_message(string_java_lang_ArithmeticException,
201 string_java_lang_ArithmeticException_message);
203 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
204 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
205 sigctx->rip = (u8) asm_handle_exception;
211 void init_exceptions(void)
213 struct sigaction act;
215 /* install signal handlers we need to convert to exceptions */
219 act.sa_sigaction = (void *) catch_NullPointerException;
220 act.sa_flags = SA_SIGINFO;
221 sigaction(SIGSEGV, &act, NULL);
225 act.sa_sigaction = (void *) catch_NullPointerException;
226 act.sa_flags = SA_SIGINFO;
227 sigaction(SIGBUS, &act, NULL);
231 act.sa_sigaction = (void *) catch_ArithmeticException;
232 act.sa_flags = SA_SIGINFO;
233 sigaction(SIGFPE, &act, NULL);
237 /* function gen_mcode **********************************************************
239 generates machine code
241 *******************************************************************************/
243 /* global code generation pointer */
250 int len, s1, s2, s3, d;
264 /* space to save used callee saved registers */
266 savedregs_num += (savintregcnt - maxsavintreguse);
267 savedregs_num += (savfltregcnt - maxsavfltreguse);
269 parentargs_base = maxmemuse + savedregs_num;
271 #if defined(USE_THREADS) /* space to save argument of monitor_enter */
273 if (checksync && (method->flags & ACC_SYNCHRONIZED))
278 /* keep stack 16-byte aligned for calls into libc */
280 if (!isleafmethod || runverbose) {
281 if ((parentargs_base % 2) == 0) {
286 /* create method header */
288 (void) dseg_addaddress(method); /* MethodPointer */
289 (void) dseg_adds4(parentargs_base * 8); /* FrameSize */
291 #if defined(USE_THREADS)
293 /* IsSync contains the offset relative to the stack pointer for the
294 argument of monitor_exit used in the exception handler. Since the
295 offset could be zero and give a wrong meaning of the flag it is
299 if (checksync && (method->flags & ACC_SYNCHRONIZED))
300 (void) dseg_adds4((maxmemuse + 1) * 8); /* IsSync */
305 (void) dseg_adds4(0); /* IsSync */
307 (void) dseg_adds4(isleafmethod); /* IsLeaf */
308 (void) dseg_adds4(savintregcnt - maxsavintreguse); /* IntSave */
309 (void) dseg_adds4(savfltregcnt - maxsavfltreguse); /* FltSave */
310 (void) dseg_adds4(exceptiontablelength); /* ExTableSize */
312 /* create exception table */
314 for (ex = extable; ex != NULL; ex = ex->down) {
315 dseg_addtarget(ex->start);
316 dseg_addtarget(ex->end);
317 dseg_addtarget(ex->handler);
318 (void) dseg_addaddress(ex->catchtype);
321 /* initialize mcode variables */
323 mcodeptr = (u1*) mcodebase;
324 mcodeend = (s4*) (mcodebase + mcodesize);
325 MCODECHECK(128 + mparamcount);
327 /* create stack frame (if necessary) */
329 if (parentargs_base) {
330 x86_64_alu_imm_reg(X86_64_SUB, parentargs_base * 8, REG_SP);
333 /* save return address and used callee saved registers */
336 for (r = savintregcnt - 1; r >= maxsavintreguse; r--) {
337 p--; x86_64_mov_reg_membase(savintregs[r], REG_SP, p * 8);
339 for (r = savfltregcnt - 1; r >= maxsavfltreguse; r--) {
340 p--; x86_64_movq_reg_membase(savfltregs[r], REG_SP, p * 8);
343 /* save monitorenter argument */
345 #if defined(USE_THREADS)
346 if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
347 if (method->flags & ACC_STATIC) {
348 x86_64_mov_imm_reg((s8) class, REG_ITMP1);
349 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, maxmemuse * 8);
352 x86_64_mov_reg_membase(argintregs[0], REG_SP, maxmemuse * 8);
357 /* copy argument registers to stack and call trace function with pointer
358 to arguments on stack.
361 x86_64_alu_imm_reg(X86_64_SUB, (6 + 8 + 1 + 1) * 8, REG_SP);
363 x86_64_mov_reg_membase(argintregs[0], REG_SP, 1 * 8);
364 x86_64_mov_reg_membase(argintregs[1], REG_SP, 2 * 8);
365 x86_64_mov_reg_membase(argintregs[2], REG_SP, 3 * 8);
366 x86_64_mov_reg_membase(argintregs[3], REG_SP, 4 * 8);
367 x86_64_mov_reg_membase(argintregs[4], REG_SP, 5 * 8);
368 x86_64_mov_reg_membase(argintregs[5], REG_SP, 6 * 8);
370 x86_64_movq_reg_membase(argfltregs[0], REG_SP, 7 * 8);
371 x86_64_movq_reg_membase(argfltregs[1], REG_SP, 8 * 8);
372 x86_64_movq_reg_membase(argfltregs[2], REG_SP, 9 * 8);
373 x86_64_movq_reg_membase(argfltregs[3], REG_SP, 10 * 8);
374 /* x86_64_movq_reg_membase(argfltregs[4], REG_SP, 11 * 8); */
375 /* x86_64_movq_reg_membase(argfltregs[5], REG_SP, 12 * 8); */
376 /* x86_64_movq_reg_membase(argfltregs[6], REG_SP, 13 * 8); */
377 /* x86_64_movq_reg_membase(argfltregs[7], REG_SP, 14 * 8); */
379 for (p = 0, l = 0; p < mparamcount; p++) {
382 if (IS_FLT_DBL_TYPE(t)) {
383 for (s1 = (mparamcount > INT_ARG_CNT) ? INT_ARG_CNT - 2 : mparamcount - 2; s1 >= p; s1--) {
384 x86_64_mov_reg_reg(argintregs[s1], argintregs[s1 + 1]);
387 x86_64_movd_freg_reg(argfltregs[l], argintregs[p]);
392 x86_64_mov_imm_reg((s8) method, REG_ITMP2);
393 x86_64_mov_reg_membase(REG_ITMP2, REG_SP, 0 * 8);
394 x86_64_mov_imm_reg((s8) builtin_trace_args, REG_ITMP1);
395 x86_64_call_reg(REG_ITMP1);
397 x86_64_mov_membase_reg(REG_SP, 1 * 8, argintregs[0]);
398 x86_64_mov_membase_reg(REG_SP, 2 * 8, argintregs[1]);
399 x86_64_mov_membase_reg(REG_SP, 3 * 8, argintregs[2]);
400 x86_64_mov_membase_reg(REG_SP, 4 * 8, argintregs[3]);
401 x86_64_mov_membase_reg(REG_SP, 5 * 8, argintregs[4]);
402 x86_64_mov_membase_reg(REG_SP, 6 * 8, argintregs[5]);
404 x86_64_movq_membase_reg(REG_SP, 7 * 8, argfltregs[0]);
405 x86_64_movq_membase_reg(REG_SP, 8 * 8, argfltregs[1]);
406 x86_64_movq_membase_reg(REG_SP, 9 * 8, argfltregs[2]);
407 x86_64_movq_membase_reg(REG_SP, 10 * 8, argfltregs[3]);
408 /* x86_64_movq_membase_reg(REG_SP, 11 * 8, argfltregs[4]); */
409 /* x86_64_movq_membase_reg(REG_SP, 12 * 8, argfltregs[5]); */
410 /* x86_64_movq_membase_reg(REG_SP, 13 * 8, argfltregs[6]); */
411 /* x86_64_movq_membase_reg(REG_SP, 14 * 8, argfltregs[7]); */
413 x86_64_alu_imm_reg(X86_64_ADD, (6 + 8 + 1 + 1) * 8, REG_SP);
416 /* take arguments out of register or stack frame */
418 for (p = 0, l = 0, s1 = 0, s2 = 0; p < mparamcount; p++) {
420 var = &(locals[l][t]);
422 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
425 if (IS_INT_LNG_TYPE(t)) {
433 if (IS_INT_LNG_TYPE(t)) { /* integer args */
434 if (s1 < INT_ARG_CNT) { /* register arguments */
435 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
436 M_INTMOVE(argintregs[s1], r);
438 } else { /* reg arg -> spilled */
439 x86_64_mov_reg_membase(argintregs[s1], REG_SP, r * 8);
441 } else { /* stack arguments */
442 pa = s1 - INT_ARG_CNT;
443 if (s2 >= FLT_ARG_CNT) {
444 pa += s2 - FLT_ARG_CNT;
446 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
447 x86_64_mov_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, r); /* + 8 for return address */
448 } else { /* stack arg -> spilled */
449 x86_64_mov_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, REG_ITMP1); /* + 8 for return address */
450 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, r * 8);
455 } else { /* floating args */
456 if (s2 < FLT_ARG_CNT) { /* register arguments */
457 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
458 M_FLTMOVE(argfltregs[s2], r);
460 } else { /* reg arg -> spilled */
461 x86_64_movq_reg_membase(argfltregs[s2], REG_SP, r * 8);
464 } else { /* stack arguments */
465 pa = s2 - FLT_ARG_CNT;
466 if (s1 >= INT_ARG_CNT) {
467 pa += s1 - INT_ARG_CNT;
469 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
470 x86_64_movq_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, r);
473 x86_64_movq_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, REG_FTMP1);
474 x86_64_movq_reg_membase(REG_FTMP1, REG_SP, r * 8);
481 /* call monitorenter function */
483 #if defined(USE_THREADS)
484 if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
485 x86_64_mov_membase_reg(REG_SP, maxmemuse * 8, argintregs[0]);
486 x86_64_mov_imm_reg((s8) builtin_monitorenter, REG_ITMP1);
487 x86_64_call_reg(REG_ITMP1);
492 /* end of header generation */
494 /* walk through all basic blocks */
495 for (bptr = block; bptr != NULL; bptr = bptr->next) {
497 bptr->mpc = (u4) ((u1 *) mcodeptr - mcodebase);
499 if (bptr->flags >= BBREACHED) {
501 /* branch resolving */
504 for (brefs = bptr->branchrefs; brefs != NULL; brefs = brefs->next) {
505 gen_resolvebranch((u1*) mcodebase + brefs->branchpos,
506 brefs->branchpos, bptr->mpc);
509 /* copy interface registers to their destination */
514 while (src != NULL) {
516 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
517 if (bptr->type == BBTYPE_SBR) {
518 d = reg_of_var(src, REG_ITMP1);
520 store_reg_to_var_int(src, d);
522 } else if (bptr->type == BBTYPE_EXH) {
523 d = reg_of_var(src, REG_ITMP1);
524 M_INTMOVE(REG_ITMP1, d);
525 store_reg_to_var_int(src, d);
529 d = reg_of_var(src, REG_ITMP1);
530 if ((src->varkind != STACKVAR)) {
532 if (IS_FLT_DBL_TYPE(s2)) {
533 s1 = interfaces[len][s2].regoff;
534 if (!(interfaces[len][s2].flags & INMEMORY)) {
538 x86_64_movq_membase_reg(REG_SP, s1 * 8, d);
540 store_reg_to_var_flt(src, d);
543 s1 = interfaces[len][s2].regoff;
544 if (!(interfaces[len][s2].flags & INMEMORY)) {
548 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
550 store_reg_to_var_int(src, d);
557 /* walk through all instructions */
561 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
563 MCODECHECK(64); /* an instruction usually needs < 64 words */
566 case ICMD_NOP: /* ... ==> ... */
569 case ICMD_NULLCHECKPOP: /* ..., objectref ==> ... */
570 if (src->flags & INMEMORY) {
571 x86_64_alu_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
574 x86_64_test_reg_reg(src->regoff, src->regoff);
576 x86_64_jcc(X86_64_CC_E, 0);
577 codegen_addxnullrefs(mcodeptr);
580 /* constant operations ************************************************/
582 case ICMD_ICONST: /* ... ==> ..., constant */
583 /* op1 = 0, val.i = constant */
585 d = reg_of_var(iptr->dst, REG_ITMP1);
586 if (iptr->val.i == 0) {
587 x86_64_alu_reg_reg(X86_64_XOR, d, d);
589 x86_64_movl_imm_reg(iptr->val.i, d);
591 store_reg_to_var_int(iptr->dst, d);
594 case ICMD_ACONST: /* ... ==> ..., constant */
595 /* op1 = 0, val.a = constant */
597 d = reg_of_var(iptr->dst, REG_ITMP1);
598 if (iptr->val.a == 0) {
599 x86_64_alu_reg_reg(X86_64_XOR, d, d);
601 x86_64_mov_imm_reg((s8) iptr->val.a, d);
603 store_reg_to_var_int(iptr->dst, d);
606 case ICMD_LCONST: /* ... ==> ..., constant */
607 /* op1 = 0, val.l = constant */
609 d = reg_of_var(iptr->dst, REG_ITMP1);
610 if (iptr->val.l == 0) {
611 x86_64_alu_reg_reg(X86_64_XOR, d, d);
613 x86_64_mov_imm_reg(iptr->val.l, d);
615 store_reg_to_var_int(iptr->dst, d);
618 case ICMD_FCONST: /* ... ==> ..., constant */
619 /* op1 = 0, val.f = constant */
621 d = reg_of_var(iptr->dst, REG_FTMP1);
622 a = dseg_addfloat(iptr->val.f);
623 x86_64_movdl_membase_reg(RIP, -(((s8) mcodeptr + ((d > 7) ? 9 : 8)) - (s8) mcodebase) + a, d);
624 store_reg_to_var_flt(iptr->dst, d);
627 case ICMD_DCONST: /* ... ==> ..., constant */
628 /* op1 = 0, val.d = constant */
630 d = reg_of_var(iptr->dst, REG_FTMP1);
631 a = dseg_adddouble(iptr->val.d);
632 x86_64_movd_membase_reg(RIP, -(((s8) mcodeptr + 9) - (s8) mcodebase) + a, d);
633 store_reg_to_var_flt(iptr->dst, d);
637 /* load/store operations **********************************************/
639 case ICMD_ILOAD: /* ... ==> ..., content of local variable */
640 /* op1 = local variable */
642 d = reg_of_var(iptr->dst, REG_ITMP1);
643 if ((iptr->dst->varkind == LOCALVAR) &&
644 (iptr->dst->varnum == iptr->op1)) {
647 var = &(locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
648 if (var->flags & INMEMORY) {
649 x86_64_movl_membase_reg(REG_SP, var->regoff * 8, d);
650 store_reg_to_var_int(iptr->dst, d);
653 if (iptr->dst->flags & INMEMORY) {
654 x86_64_mov_reg_membase(var->regoff, REG_SP, iptr->dst->regoff * 8);
657 M_INTMOVE(var->regoff, d);
662 case ICMD_LLOAD: /* ... ==> ..., content of local variable */
663 case ICMD_ALOAD: /* op1 = local variable */
665 d = reg_of_var(iptr->dst, REG_ITMP1);
666 if ((iptr->dst->varkind == LOCALVAR) &&
667 (iptr->dst->varnum == iptr->op1)) {
670 var = &(locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
671 if (var->flags & INMEMORY) {
672 x86_64_mov_membase_reg(REG_SP, var->regoff * 8, d);
673 store_reg_to_var_int(iptr->dst, d);
676 if (iptr->dst->flags & INMEMORY) {
677 x86_64_mov_reg_membase(var->regoff, REG_SP, iptr->dst->regoff * 8);
680 M_INTMOVE(var->regoff, d);
685 case ICMD_FLOAD: /* ... ==> ..., content of local variable */
686 case ICMD_DLOAD: /* op1 = local variable */
688 d = reg_of_var(iptr->dst, REG_FTMP1);
689 if ((iptr->dst->varkind == LOCALVAR) &&
690 (iptr->dst->varnum == iptr->op1)) {
693 var = &(locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
694 if (var->flags & INMEMORY) {
695 x86_64_movq_membase_reg(REG_SP, var->regoff * 8, d);
696 store_reg_to_var_flt(iptr->dst, d);
699 if (iptr->dst->flags & INMEMORY) {
700 x86_64_movq_reg_membase(var->regoff, REG_SP, iptr->dst->regoff * 8);
703 M_FLTMOVE(var->regoff, d);
708 case ICMD_ISTORE: /* ..., value ==> ... */
709 case ICMD_LSTORE: /* op1 = local variable */
712 if ((src->varkind == LOCALVAR) &&
713 (src->varnum == iptr->op1)) {
716 var = &(locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
717 if (var->flags & INMEMORY) {
718 var_to_reg_int(s1, src, REG_ITMP1);
719 x86_64_mov_reg_membase(s1, REG_SP, var->regoff * 8);
722 var_to_reg_int(s1, src, var->regoff);
723 M_INTMOVE(s1, var->regoff);
727 case ICMD_FSTORE: /* ..., value ==> ... */
728 case ICMD_DSTORE: /* op1 = local variable */
730 if ((src->varkind == LOCALVAR) &&
731 (src->varnum == iptr->op1)) {
734 var = &(locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
735 if (var->flags & INMEMORY) {
736 var_to_reg_flt(s1, src, REG_FTMP1);
737 x86_64_movq_reg_membase(s1, REG_SP, var->regoff * 8);
740 var_to_reg_flt(s1, src, var->regoff);
741 M_FLTMOVE(s1, var->regoff);
746 /* pop/dup/swap operations ********************************************/
748 /* attention: double and longs are only one entry in CACAO ICMDs */
750 case ICMD_POP: /* ..., value ==> ... */
751 case ICMD_POP2: /* ..., value, value ==> ... */
754 #define M_COPY(from,to) \
755 d = reg_of_var(to, REG_ITMP1); \
756 if ((from->regoff != to->regoff) || \
757 ((from->flags ^ to->flags) & INMEMORY)) { \
758 if (IS_FLT_DBL_TYPE(from->type)) { \
759 var_to_reg_flt(s1, from, d); \
761 store_reg_to_var_flt(to, d); \
763 var_to_reg_int(s1, from, d); \
765 store_reg_to_var_int(to, d); \
769 case ICMD_DUP: /* ..., a ==> ..., a, a */
770 M_COPY(src, iptr->dst);
773 case ICMD_DUP_X1: /* ..., a, b ==> ..., b, a, b */
775 M_COPY(src, iptr->dst->prev->prev);
777 case ICMD_DUP2: /* ..., a, b ==> ..., a, b, a, b */
779 M_COPY(src, iptr->dst);
780 M_COPY(src->prev, iptr->dst->prev);
783 case ICMD_DUP2_X1: /* ..., a, b, c ==> ..., b, c, a, b, c */
785 M_COPY(src->prev, iptr->dst->prev->prev->prev);
787 case ICMD_DUP_X2: /* ..., a, b, c ==> ..., c, a, b, c */
789 M_COPY(src, iptr->dst);
790 M_COPY(src->prev, iptr->dst->prev);
791 M_COPY(src->prev->prev, iptr->dst->prev->prev);
792 M_COPY(src, iptr->dst->prev->prev->prev);
795 case ICMD_DUP2_X2: /* ..., a, b, c, d ==> ..., c, d, a, b, c, d */
797 M_COPY(src, iptr->dst);
798 M_COPY(src->prev, iptr->dst->prev);
799 M_COPY(src->prev->prev, iptr->dst->prev->prev);
800 M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
801 M_COPY(src, iptr->dst->prev->prev->prev->prev);
802 M_COPY(src->prev, iptr->dst->prev->prev->prev->prev->prev);
805 case ICMD_SWAP: /* ..., a, b ==> ..., b, a */
807 M_COPY(src, iptr->dst->prev);
808 M_COPY(src->prev, iptr->dst);
812 /* integer operations *************************************************/
814 case ICMD_INEG: /* ..., value ==> ..., - value */
816 d = reg_of_var(iptr->dst, REG_NULL);
817 if (iptr->dst->flags & INMEMORY) {
818 if (src->flags & INMEMORY) {
819 if (src->regoff == iptr->dst->regoff) {
820 x86_64_negl_membase(REG_SP, iptr->dst->regoff * 8);
823 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
824 x86_64_negl_reg(REG_ITMP1);
825 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
829 x86_64_movl_reg_membase(src->regoff, REG_SP, iptr->dst->regoff * 8);
830 x86_64_negl_membase(REG_SP, iptr->dst->regoff * 8);
834 if (src->flags & INMEMORY) {
835 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
839 M_INTMOVE(src->regoff, iptr->dst->regoff);
840 x86_64_negl_reg(iptr->dst->regoff);
845 case ICMD_LNEG: /* ..., value ==> ..., - value */
847 d = reg_of_var(iptr->dst, REG_NULL);
848 if (iptr->dst->flags & INMEMORY) {
849 if (src->flags & INMEMORY) {
850 if (src->regoff == iptr->dst->regoff) {
851 x86_64_neg_membase(REG_SP, iptr->dst->regoff * 8);
854 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
855 x86_64_neg_reg(REG_ITMP1);
856 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
860 x86_64_mov_reg_membase(src->regoff, REG_SP, iptr->dst->regoff * 8);
861 x86_64_neg_membase(REG_SP, iptr->dst->regoff * 8);
865 if (src->flags & INMEMORY) {
866 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
867 x86_64_neg_reg(iptr->dst->regoff);
870 M_INTMOVE(src->regoff, iptr->dst->regoff);
871 x86_64_neg_reg(iptr->dst->regoff);
876 case ICMD_I2L: /* ..., value ==> ..., value */
878 d = reg_of_var(iptr->dst, REG_ITMP3);
879 if (src->flags & INMEMORY) {
880 x86_64_movslq_membase_reg(REG_SP, src->regoff * 8, d);
883 x86_64_movslq_reg_reg(src->regoff, d);
885 store_reg_to_var_int(iptr->dst, d);
888 case ICMD_L2I: /* ..., value ==> ..., value */
890 var_to_reg_int(s1, src, REG_ITMP1);
891 d = reg_of_var(iptr->dst, REG_ITMP3);
893 store_reg_to_var_int(iptr->dst, d);
896 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
898 d = reg_of_var(iptr->dst, REG_ITMP3);
899 if (src->flags & INMEMORY) {
900 x86_64_movsbq_membase_reg(REG_SP, src->regoff * 8, d);
903 x86_64_movsbq_reg_reg(src->regoff, d);
905 store_reg_to_var_int(iptr->dst, d);
908 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
910 d = reg_of_var(iptr->dst, REG_ITMP3);
911 if (src->flags & INMEMORY) {
912 x86_64_movzwq_membase_reg(REG_SP, src->regoff * 8, d);
915 x86_64_movzwq_reg_reg(src->regoff, d);
917 store_reg_to_var_int(iptr->dst, d);
920 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
922 d = reg_of_var(iptr->dst, REG_ITMP3);
923 if (src->flags & INMEMORY) {
924 x86_64_movswq_membase_reg(REG_SP, src->regoff * 8, d);
927 x86_64_movswq_reg_reg(src->regoff, d);
929 store_reg_to_var_int(iptr->dst, d);
933 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
935 d = reg_of_var(iptr->dst, REG_NULL);
936 x86_64_emit_ialu(X86_64_ADD, src, iptr);
939 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
940 /* val.i = constant */
942 d = reg_of_var(iptr->dst, REG_NULL);
943 x86_64_emit_ialuconst(X86_64_ADD, src, iptr);
946 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
948 d = reg_of_var(iptr->dst, REG_NULL);
949 x86_64_emit_lalu(X86_64_ADD, src, iptr);
952 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
953 /* val.l = constant */
955 d = reg_of_var(iptr->dst, REG_NULL);
956 x86_64_emit_laluconst(X86_64_ADD, src, iptr);
959 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
961 d = reg_of_var(iptr->dst, REG_NULL);
962 if (iptr->dst->flags & INMEMORY) {
963 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
964 if (src->prev->regoff == iptr->dst->regoff) {
965 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
966 x86_64_alul_reg_membase(X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
969 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
970 x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
971 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
974 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
975 M_INTMOVE(src->prev->regoff, REG_ITMP1);
976 x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
977 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
979 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
980 if (src->prev->regoff == iptr->dst->regoff) {
981 x86_64_alul_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
984 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
985 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
986 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
990 x86_64_movl_reg_membase(src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
991 x86_64_alul_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
995 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
996 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, d);
997 x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
999 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1000 M_INTMOVE(src->prev->regoff, d);
1001 x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1003 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1004 /* workaround for reg alloc */
1005 if (src->regoff == iptr->dst->regoff) {
1006 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1007 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1008 M_INTMOVE(REG_ITMP1, d);
1011 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, d);
1012 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, d);
1016 /* workaround for reg alloc */
1017 if (src->regoff == iptr->dst->regoff) {
1018 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1019 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1020 M_INTMOVE(REG_ITMP1, d);
1023 M_INTMOVE(src->prev->regoff, d);
1024 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, d);
1030 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
1031 /* val.i = constant */
1033 d = reg_of_var(iptr->dst, REG_NULL);
1034 x86_64_emit_ialuconst(X86_64_SUB, src, iptr);
1037 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1039 d = reg_of_var(iptr->dst, REG_NULL);
1040 if (iptr->dst->flags & INMEMORY) {
1041 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1042 if (src->prev->regoff == iptr->dst->regoff) {
1043 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1044 x86_64_alu_reg_membase(X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1047 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1048 x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1049 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1052 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1053 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1054 x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1055 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1057 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1058 if (src->prev->regoff == iptr->dst->regoff) {
1059 x86_64_alu_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1062 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1063 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1064 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1068 x86_64_mov_reg_membase(src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1069 x86_64_alu_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1073 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1074 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, d);
1075 x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1077 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1078 M_INTMOVE(src->prev->regoff, d);
1079 x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1081 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1082 /* workaround for reg alloc */
1083 if (src->regoff == iptr->dst->regoff) {
1084 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1085 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1086 M_INTMOVE(REG_ITMP1, d);
1089 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, d);
1090 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, d);
1094 /* workaround for reg alloc */
1095 if (src->regoff == iptr->dst->regoff) {
1096 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1097 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1098 M_INTMOVE(REG_ITMP1, d);
1101 M_INTMOVE(src->prev->regoff, d);
1102 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, d);
1108 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
1109 /* val.l = constant */
1111 d = reg_of_var(iptr->dst, REG_NULL);
1112 x86_64_emit_laluconst(X86_64_SUB, src, iptr);
1115 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1117 d = reg_of_var(iptr->dst, REG_NULL);
1118 if (iptr->dst->flags & INMEMORY) {
1119 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1120 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1121 x86_64_imull_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1122 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1124 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1125 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1126 x86_64_imull_reg_reg(src->prev->regoff, REG_ITMP1);
1127 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1129 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1130 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1131 x86_64_imull_reg_reg(src->regoff, REG_ITMP1);
1132 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1135 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1136 x86_64_imull_reg_reg(src->regoff, REG_ITMP1);
1137 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1141 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1142 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1143 x86_64_imull_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1145 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1146 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1147 x86_64_imull_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1149 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1150 M_INTMOVE(src->regoff, iptr->dst->regoff);
1151 x86_64_imull_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1154 if (src->regoff == iptr->dst->regoff) {
1155 x86_64_imull_reg_reg(src->prev->regoff, iptr->dst->regoff);
1158 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1159 x86_64_imull_reg_reg(src->regoff, iptr->dst->regoff);
1165 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
1166 /* val.i = constant */
1168 d = reg_of_var(iptr->dst, REG_NULL);
1169 if (iptr->dst->flags & INMEMORY) {
1170 if (src->flags & INMEMORY) {
1171 x86_64_imull_imm_membase_reg(iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1172 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1175 x86_64_imull_imm_reg_reg(iptr->val.i, src->regoff, REG_ITMP1);
1176 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1180 if (src->flags & INMEMORY) {
1181 x86_64_imull_imm_membase_reg(iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1184 if (iptr->val.i == 2) {
1185 M_INTMOVE(src->regoff, iptr->dst->regoff);
1186 x86_64_alul_reg_reg(X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1189 x86_64_imull_imm_reg_reg(iptr->val.i, src->regoff, iptr->dst->regoff); /* 3 cycles */
1195 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1197 d = reg_of_var(iptr->dst, REG_NULL);
1198 if (iptr->dst->flags & INMEMORY) {
1199 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1200 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1201 x86_64_imul_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1202 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1204 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1205 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1206 x86_64_imul_reg_reg(src->prev->regoff, REG_ITMP1);
1207 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1209 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1210 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1211 x86_64_imul_reg_reg(src->regoff, REG_ITMP1);
1212 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1215 x86_64_mov_reg_reg(src->prev->regoff, REG_ITMP1);
1216 x86_64_imul_reg_reg(src->regoff, REG_ITMP1);
1217 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1221 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1222 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1223 x86_64_imul_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1225 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1226 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1227 x86_64_imul_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1229 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1230 M_INTMOVE(src->regoff, iptr->dst->regoff);
1231 x86_64_imul_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1234 if (src->regoff == iptr->dst->regoff) {
1235 x86_64_imul_reg_reg(src->prev->regoff, iptr->dst->regoff);
1238 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1239 x86_64_imul_reg_reg(src->regoff, iptr->dst->regoff);
1245 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
1246 /* val.l = constant */
1248 d = reg_of_var(iptr->dst, REG_NULL);
1249 if (iptr->dst->flags & INMEMORY) {
1250 if (src->flags & INMEMORY) {
1251 if (x86_64_is_imm32(iptr->val.l)) {
1252 x86_64_imul_imm_membase_reg(iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1255 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
1256 x86_64_imul_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1258 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1261 if (x86_64_is_imm32(iptr->val.l)) {
1262 x86_64_imul_imm_reg_reg(iptr->val.l, src->regoff, REG_ITMP1);
1265 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
1266 x86_64_imul_reg_reg(src->regoff, REG_ITMP1);
1268 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1272 if (src->flags & INMEMORY) {
1273 if (x86_64_is_imm32(iptr->val.l)) {
1274 x86_64_imul_imm_membase_reg(iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1277 x86_64_mov_imm_reg(iptr->val.l, iptr->dst->regoff);
1278 x86_64_imul_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1282 /* should match in many cases */
1283 if (iptr->val.l == 2) {
1284 M_INTMOVE(src->regoff, iptr->dst->regoff);
1285 x86_64_alul_reg_reg(X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1288 if (x86_64_is_imm32(iptr->val.l)) {
1289 x86_64_imul_imm_reg_reg(iptr->val.l, src->regoff, iptr->dst->regoff); /* 4 cycles */
1292 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
1293 M_INTMOVE(src->regoff, iptr->dst->regoff);
1294 x86_64_imul_reg_reg(REG_ITMP1, iptr->dst->regoff);
1301 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1303 d = reg_of_var(iptr->dst, REG_NULL);
1304 if (src->prev->flags & INMEMORY) {
1305 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, RAX);
1308 M_INTMOVE(src->prev->regoff, RAX);
1311 if (src->flags & INMEMORY) {
1312 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1315 M_INTMOVE(src->regoff, REG_ITMP3);
1319 x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1320 x86_64_jcc(X86_64_CC_NE, 4 + 6);
1321 x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1322 x86_64_jcc(X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1324 x86_64_mov_reg_reg(RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1326 x86_64_idivl_reg(REG_ITMP3);
1328 if (iptr->dst->flags & INMEMORY) {
1329 x86_64_mov_reg_membase(RAX, REG_SP, iptr->dst->regoff * 8);
1330 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1333 M_INTMOVE(RAX, iptr->dst->regoff);
1335 if (iptr->dst->regoff != RDX) {
1336 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1341 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1343 d = reg_of_var(iptr->dst, REG_NULL);
1344 if (src->prev->flags & INMEMORY) {
1345 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, RAX);
1348 M_INTMOVE(src->prev->regoff, RAX);
1351 if (src->flags & INMEMORY) {
1352 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1355 M_INTMOVE(src->regoff, REG_ITMP3);
1359 x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1360 x86_64_jcc(X86_64_CC_NE, 2 + 4 + 6);
1361 x86_64_alul_reg_reg(X86_64_XOR, RDX, RDX); /* 2 bytes */
1362 x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1363 x86_64_jcc(X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1365 x86_64_mov_reg_reg(RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1367 x86_64_idivl_reg(REG_ITMP3);
1369 if (iptr->dst->flags & INMEMORY) {
1370 x86_64_mov_reg_membase(RDX, REG_SP, iptr->dst->regoff * 8);
1371 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1374 M_INTMOVE(RDX, iptr->dst->regoff);
1376 if (iptr->dst->regoff != RDX) {
1377 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1382 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
1383 /* val.i = constant */
1385 var_to_reg_int(s1, src, REG_ITMP1);
1386 d = reg_of_var(iptr->dst, REG_ITMP3);
1387 M_INTMOVE(s1, REG_ITMP1);
1388 x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1389 x86_64_leal_membase_reg(REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1390 x86_64_cmovccl_reg_reg(X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1391 x86_64_shiftl_imm_reg(X86_64_SAR, iptr->val.i, REG_ITMP1);
1392 x86_64_mov_reg_reg(REG_ITMP1, d);
1393 store_reg_to_var_int(iptr->dst, d);
1396 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
1397 /* val.i = constant */
1399 var_to_reg_int(s1, src, REG_ITMP1);
1400 d = reg_of_var(iptr->dst, REG_ITMP3);
1401 M_INTMOVE(s1, REG_ITMP1);
1402 x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1403 x86_64_leal_membase_reg(REG_ITMP1, iptr->val.i, REG_ITMP2);
1404 x86_64_cmovccl_reg_reg(X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1405 x86_64_alul_imm_reg(X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1406 x86_64_alul_reg_reg(X86_64_SUB, REG_ITMP2, REG_ITMP1);
1407 x86_64_mov_reg_reg(REG_ITMP1, d);
1408 store_reg_to_var_int(iptr->dst, d);
1412 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1414 d = reg_of_var(iptr->dst, REG_NULL);
1415 if (src->prev->flags & INMEMORY) {
1416 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1419 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1422 if (src->flags & INMEMORY) {
1423 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1426 M_INTMOVE(src->regoff, REG_ITMP3);
1430 x86_64_mov_imm_reg(0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1431 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
1432 x86_64_jcc(X86_64_CC_NE, 4 + 6);
1433 x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1434 x86_64_jcc(X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1436 x86_64_mov_reg_reg(RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1438 x86_64_idiv_reg(REG_ITMP3);
1440 if (iptr->dst->flags & INMEMORY) {
1441 x86_64_mov_reg_membase(RAX, REG_SP, iptr->dst->regoff * 8);
1442 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1445 M_INTMOVE(RAX, iptr->dst->regoff);
1447 if (iptr->dst->regoff != RDX) {
1448 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1453 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1455 d = reg_of_var(iptr->dst, REG_NULL);
1456 if (src->prev->flags & INMEMORY) {
1457 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1460 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1463 if (src->flags & INMEMORY) {
1464 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1467 M_INTMOVE(src->regoff, REG_ITMP3);
1471 x86_64_mov_imm_reg(0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1472 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
1473 x86_64_jcc(X86_64_CC_NE, 2 + 4 + 6);
1474 x86_64_alul_reg_reg(X86_64_XOR, RDX, RDX); /* 2 bytes */
1475 x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1476 x86_64_jcc(X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1478 x86_64_mov_reg_reg(RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1480 x86_64_idiv_reg(REG_ITMP3);
1482 if (iptr->dst->flags & INMEMORY) {
1483 x86_64_mov_reg_membase(RDX, REG_SP, iptr->dst->regoff * 8);
1484 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1487 M_INTMOVE(RDX, iptr->dst->regoff);
1489 if (iptr->dst->regoff != RDX) {
1490 x86_64_mov_reg_reg(REG_ITMP2, RDX); /* restore %rdx */
1495 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1496 /* val.i = constant */
1498 var_to_reg_int(s1, src, REG_ITMP1);
1499 d = reg_of_var(iptr->dst, REG_ITMP3);
1500 M_INTMOVE(s1, REG_ITMP1);
1501 x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1502 x86_64_lea_membase_reg(REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1503 x86_64_cmovcc_reg_reg(X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1504 x86_64_shift_imm_reg(X86_64_SAR, iptr->val.i, REG_ITMP1);
1505 x86_64_mov_reg_reg(REG_ITMP1, d);
1506 store_reg_to_var_int(iptr->dst, d);
1509 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1510 /* val.l = constant */
1512 var_to_reg_int(s1, src, REG_ITMP1);
1513 d = reg_of_var(iptr->dst, REG_ITMP3);
1514 M_INTMOVE(s1, REG_ITMP1);
1515 x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1516 x86_64_lea_membase_reg(REG_ITMP1, iptr->val.i, REG_ITMP2);
1517 x86_64_cmovcc_reg_reg(X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1518 x86_64_alu_imm_reg(X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1519 x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP2, REG_ITMP1);
1520 x86_64_mov_reg_reg(REG_ITMP1, d);
1521 store_reg_to_var_int(iptr->dst, d);
1524 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1526 d = reg_of_var(iptr->dst, REG_NULL);
1527 x86_64_emit_ishift(X86_64_SHL, src, iptr);
1530 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1531 /* val.i = constant */
1533 d = reg_of_var(iptr->dst, REG_NULL);
1534 x86_64_emit_ishiftconst(X86_64_SHL, src, iptr);
1537 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1539 d = reg_of_var(iptr->dst, REG_NULL);
1540 x86_64_emit_ishift(X86_64_SAR, src, iptr);
1543 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1544 /* val.i = constant */
1546 d = reg_of_var(iptr->dst, REG_NULL);
1547 x86_64_emit_ishiftconst(X86_64_SAR, src, iptr);
1550 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1552 d = reg_of_var(iptr->dst, REG_NULL);
1553 x86_64_emit_ishift(X86_64_SHR, src, iptr);
1556 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1557 /* val.i = constant */
1559 d = reg_of_var(iptr->dst, REG_NULL);
1560 x86_64_emit_ishiftconst(X86_64_SHR, src, iptr);
1563 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1565 d = reg_of_var(iptr->dst, REG_NULL);
1566 x86_64_emit_lshift(X86_64_SHL, src, iptr);
1569 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1570 /* val.i = constant */
1572 d = reg_of_var(iptr->dst, REG_NULL);
1573 x86_64_emit_lshiftconst(X86_64_SHL, src, iptr);
1576 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1578 d = reg_of_var(iptr->dst, REG_NULL);
1579 x86_64_emit_lshift(X86_64_SAR, src, iptr);
1582 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1583 /* val.i = constant */
1585 d = reg_of_var(iptr->dst, REG_NULL);
1586 x86_64_emit_lshiftconst(X86_64_SAR, src, iptr);
1589 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1591 d = reg_of_var(iptr->dst, REG_NULL);
1592 x86_64_emit_lshift(X86_64_SHR, src, iptr);
1595 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1596 /* val.l = constant */
1598 d = reg_of_var(iptr->dst, REG_NULL);
1599 x86_64_emit_lshiftconst(X86_64_SHR, src, iptr);
1602 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1604 d = reg_of_var(iptr->dst, REG_NULL);
1605 x86_64_emit_ialu(X86_64_AND, src, iptr);
1608 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1609 /* val.i = constant */
1611 d = reg_of_var(iptr->dst, REG_NULL);
1612 x86_64_emit_ialuconst(X86_64_AND, src, iptr);
1615 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1617 d = reg_of_var(iptr->dst, REG_NULL);
1618 x86_64_emit_lalu(X86_64_AND, src, iptr);
1621 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1622 /* val.l = constant */
1624 d = reg_of_var(iptr->dst, REG_NULL);
1625 x86_64_emit_laluconst(X86_64_AND, src, iptr);
1628 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1630 d = reg_of_var(iptr->dst, REG_NULL);
1631 x86_64_emit_ialu(X86_64_OR, src, iptr);
1634 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1635 /* val.i = constant */
1637 d = reg_of_var(iptr->dst, REG_NULL);
1638 x86_64_emit_ialuconst(X86_64_OR, src, iptr);
1641 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1643 d = reg_of_var(iptr->dst, REG_NULL);
1644 x86_64_emit_lalu(X86_64_OR, src, iptr);
1647 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1648 /* val.l = constant */
1650 d = reg_of_var(iptr->dst, REG_NULL);
1651 x86_64_emit_laluconst(X86_64_OR, src, iptr);
1654 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1656 d = reg_of_var(iptr->dst, REG_NULL);
1657 x86_64_emit_ialu(X86_64_XOR, src, iptr);
1660 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1661 /* val.i = constant */
1663 d = reg_of_var(iptr->dst, REG_NULL);
1664 x86_64_emit_ialuconst(X86_64_XOR, src, iptr);
1667 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1669 d = reg_of_var(iptr->dst, REG_NULL);
1670 x86_64_emit_lalu(X86_64_XOR, src, iptr);
1673 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1674 /* val.l = constant */
1676 d = reg_of_var(iptr->dst, REG_NULL);
1677 x86_64_emit_laluconst(X86_64_XOR, src, iptr);
1681 case ICMD_IINC: /* ..., value ==> ..., value + constant */
1682 /* op1 = variable, val.i = constant */
1684 var = &(locals[iptr->op1][TYPE_INT]);
1686 if (var->flags & INMEMORY) {
1687 if (iptr->val.i == 1) {
1688 x86_64_incl_membase(REG_SP, d * 8);
1690 } else if (iptr->val.i == -1) {
1691 x86_64_decl_membase(REG_SP, d * 8);
1694 x86_64_alul_imm_membase(X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1698 if (iptr->val.i == 1) {
1701 } else if (iptr->val.i == -1) {
1705 x86_64_alul_imm_reg(X86_64_ADD, iptr->val.i, d);
1711 /* floating operations ************************************************/
1713 case ICMD_FNEG: /* ..., value ==> ..., - value */
1715 var_to_reg_flt(s1, src, REG_FTMP1);
1716 d = reg_of_var(iptr->dst, REG_FTMP3);
1717 a = dseg_adds4(0x80000000);
1719 x86_64_movss_membase_reg(RIP, -(((s8) mcodeptr + 9) - (s8) mcodebase) + a, REG_FTMP2);
1720 x86_64_xorps_reg_reg(REG_FTMP2, d);
1721 store_reg_to_var_flt(iptr->dst, d);
1724 case ICMD_DNEG: /* ..., value ==> ..., - value */
1726 var_to_reg_flt(s1, src, REG_FTMP1);
1727 d = reg_of_var(iptr->dst, REG_FTMP3);
1728 a = dseg_adds8(0x8000000000000000);
1730 x86_64_movd_membase_reg(RIP, -(((s8) mcodeptr + 9) - (s8) mcodebase) + a, REG_FTMP2);
1731 x86_64_xorpd_reg_reg(REG_FTMP2, d);
1732 store_reg_to_var_flt(iptr->dst, d);
1735 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1737 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1738 var_to_reg_flt(s2, src, REG_FTMP2);
1739 d = reg_of_var(iptr->dst, REG_FTMP3);
1741 x86_64_addss_reg_reg(s2, d);
1742 } else if (s2 == d) {
1743 x86_64_addss_reg_reg(s1, d);
1746 x86_64_addss_reg_reg(s2, d);
1748 store_reg_to_var_flt(iptr->dst, d);
1751 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1753 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1754 var_to_reg_flt(s2, src, REG_FTMP2);
1755 d = reg_of_var(iptr->dst, REG_FTMP3);
1757 x86_64_addsd_reg_reg(s2, d);
1758 } else if (s2 == d) {
1759 x86_64_addsd_reg_reg(s1, d);
1762 x86_64_addsd_reg_reg(s2, d);
1764 store_reg_to_var_flt(iptr->dst, d);
1767 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1769 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1770 var_to_reg_flt(s2, src, REG_FTMP2);
1771 d = reg_of_var(iptr->dst, REG_FTMP3);
1773 M_FLTMOVE(s2, REG_FTMP2);
1777 x86_64_subss_reg_reg(s2, d);
1778 store_reg_to_var_flt(iptr->dst, d);
1781 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1783 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1784 var_to_reg_flt(s2, src, REG_FTMP2);
1785 d = reg_of_var(iptr->dst, REG_FTMP3);
1787 M_FLTMOVE(s2, REG_FTMP2);
1791 x86_64_subsd_reg_reg(s2, d);
1792 store_reg_to_var_flt(iptr->dst, d);
1795 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1797 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1798 var_to_reg_flt(s2, src, REG_FTMP2);
1799 d = reg_of_var(iptr->dst, REG_FTMP3);
1801 x86_64_mulss_reg_reg(s2, d);
1802 } else if (s2 == d) {
1803 x86_64_mulss_reg_reg(s1, d);
1806 x86_64_mulss_reg_reg(s2, d);
1808 store_reg_to_var_flt(iptr->dst, d);
1811 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1813 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1814 var_to_reg_flt(s2, src, REG_FTMP2);
1815 d = reg_of_var(iptr->dst, REG_FTMP3);
1817 x86_64_mulsd_reg_reg(s2, d);
1818 } else if (s2 == d) {
1819 x86_64_mulsd_reg_reg(s1, d);
1822 x86_64_mulsd_reg_reg(s2, d);
1824 store_reg_to_var_flt(iptr->dst, d);
1827 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1829 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1830 var_to_reg_flt(s2, src, REG_FTMP2);
1831 d = reg_of_var(iptr->dst, REG_FTMP3);
1833 M_FLTMOVE(s2, REG_FTMP2);
1837 x86_64_divss_reg_reg(s2, d);
1838 store_reg_to_var_flt(iptr->dst, d);
1841 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1843 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1844 var_to_reg_flt(s2, src, REG_FTMP2);
1845 d = reg_of_var(iptr->dst, REG_FTMP3);
1847 M_FLTMOVE(s2, REG_FTMP2);
1851 x86_64_divsd_reg_reg(s2, d);
1852 store_reg_to_var_flt(iptr->dst, d);
1855 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1857 var_to_reg_int(s1, src, REG_ITMP1);
1858 d = reg_of_var(iptr->dst, REG_FTMP1);
1859 x86_64_cvtsi2ss_reg_reg(s1, d);
1860 store_reg_to_var_flt(iptr->dst, d);
1863 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1865 var_to_reg_int(s1, src, REG_ITMP1);
1866 d = reg_of_var(iptr->dst, REG_FTMP1);
1867 x86_64_cvtsi2sd_reg_reg(s1, d);
1868 store_reg_to_var_flt(iptr->dst, d);
1871 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1873 var_to_reg_int(s1, src, REG_ITMP1);
1874 d = reg_of_var(iptr->dst, REG_FTMP1);
1875 x86_64_cvtsi2ssq_reg_reg(s1, d);
1876 store_reg_to_var_flt(iptr->dst, d);
1879 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1881 var_to_reg_int(s1, src, REG_ITMP1);
1882 d = reg_of_var(iptr->dst, REG_FTMP1);
1883 x86_64_cvtsi2sdq_reg_reg(s1, d);
1884 store_reg_to_var_flt(iptr->dst, d);
1887 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1889 var_to_reg_flt(s1, src, REG_FTMP1);
1890 d = reg_of_var(iptr->dst, REG_ITMP1);
1891 x86_64_cvttss2si_reg_reg(s1, d);
1892 x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, d); /* corner cases */
1893 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1894 x86_64_jcc(X86_64_CC_NE, a);
1895 M_FLTMOVE(s1, REG_FTMP1);
1896 x86_64_mov_imm_reg((s8) asm_builtin_f2i, REG_ITMP2);
1897 x86_64_call_reg(REG_ITMP2);
1898 M_INTMOVE(REG_RESULT, d);
1899 store_reg_to_var_int(iptr->dst, d);
1902 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1904 var_to_reg_flt(s1, src, REG_FTMP1);
1905 d = reg_of_var(iptr->dst, REG_ITMP1);
1906 x86_64_cvttsd2si_reg_reg(s1, d);
1907 x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, d); /* corner cases */
1908 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1909 x86_64_jcc(X86_64_CC_NE, a);
1910 M_FLTMOVE(s1, REG_FTMP1);
1911 x86_64_mov_imm_reg((s8) asm_builtin_d2i, REG_ITMP2);
1912 x86_64_call_reg(REG_ITMP2);
1913 M_INTMOVE(REG_RESULT, d);
1914 store_reg_to_var_int(iptr->dst, d);
1917 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1919 var_to_reg_flt(s1, src, REG_FTMP1);
1920 d = reg_of_var(iptr->dst, REG_ITMP1);
1921 x86_64_cvttss2siq_reg_reg(s1, d);
1922 x86_64_mov_imm_reg(0x8000000000000000, REG_ITMP2);
1923 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, d); /* corner cases */
1924 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1925 x86_64_jcc(X86_64_CC_NE, a);
1926 M_FLTMOVE(s1, REG_FTMP1);
1927 x86_64_mov_imm_reg((s8) asm_builtin_f2l, REG_ITMP2);
1928 x86_64_call_reg(REG_ITMP2);
1929 M_INTMOVE(REG_RESULT, d);
1930 store_reg_to_var_int(iptr->dst, d);
1933 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1935 var_to_reg_flt(s1, src, REG_FTMP1);
1936 d = reg_of_var(iptr->dst, REG_ITMP1);
1937 x86_64_cvttsd2siq_reg_reg(s1, d);
1938 x86_64_mov_imm_reg(0x8000000000000000, REG_ITMP2);
1939 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, d); /* corner cases */
1940 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1941 x86_64_jcc(X86_64_CC_NE, a);
1942 M_FLTMOVE(s1, REG_FTMP1);
1943 x86_64_mov_imm_reg((s8) asm_builtin_d2l, REG_ITMP2);
1944 x86_64_call_reg(REG_ITMP2);
1945 M_INTMOVE(REG_RESULT, d);
1946 store_reg_to_var_int(iptr->dst, d);
1949 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1951 var_to_reg_flt(s1, src, REG_FTMP1);
1952 d = reg_of_var(iptr->dst, REG_FTMP3);
1953 x86_64_cvtss2sd_reg_reg(s1, d);
1954 store_reg_to_var_flt(iptr->dst, d);
1957 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1959 var_to_reg_flt(s1, src, REG_FTMP1);
1960 d = reg_of_var(iptr->dst, REG_FTMP3);
1961 x86_64_cvtsd2ss_reg_reg(s1, d);
1962 store_reg_to_var_flt(iptr->dst, d);
1965 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1966 /* == => 0, < => 1, > => -1 */
1968 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1969 var_to_reg_flt(s2, src, REG_FTMP2);
1970 d = reg_of_var(iptr->dst, REG_ITMP3);
1971 x86_64_alu_reg_reg(X86_64_XOR, d, d);
1972 x86_64_mov_imm_reg(1, REG_ITMP1);
1973 x86_64_mov_imm_reg(-1, REG_ITMP2);
1974 x86_64_ucomiss_reg_reg(s1, s2);
1975 x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
1976 x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
1977 x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1978 store_reg_to_var_int(iptr->dst, d);
1981 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1982 /* == => 0, < => 1, > => -1 */
1984 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1985 var_to_reg_flt(s2, src, REG_FTMP2);
1986 d = reg_of_var(iptr->dst, REG_ITMP3);
1987 x86_64_alu_reg_reg(X86_64_XOR, d, d);
1988 x86_64_mov_imm_reg(1, REG_ITMP1);
1989 x86_64_mov_imm_reg(-1, REG_ITMP2);
1990 x86_64_ucomiss_reg_reg(s1, s2);
1991 x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
1992 x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
1993 x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1994 store_reg_to_var_int(iptr->dst, d);
1997 case ICMD_DCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1998 /* == => 0, < => 1, > => -1 */
2000 var_to_reg_flt(s1, src->prev, REG_FTMP1);
2001 var_to_reg_flt(s2, src, REG_FTMP2);
2002 d = reg_of_var(iptr->dst, REG_ITMP3);
2003 x86_64_alu_reg_reg(X86_64_XOR, d, d);
2004 x86_64_mov_imm_reg(1, REG_ITMP1);
2005 x86_64_mov_imm_reg(-1, REG_ITMP2);
2006 x86_64_ucomisd_reg_reg(s1, s2);
2007 x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
2008 x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
2009 x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
2010 store_reg_to_var_int(iptr->dst, d);
2013 case ICMD_DCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
2014 /* == => 0, < => 1, > => -1 */
2016 var_to_reg_flt(s1, src->prev, REG_FTMP1);
2017 var_to_reg_flt(s2, src, REG_FTMP2);
2018 d = reg_of_var(iptr->dst, REG_ITMP3);
2019 x86_64_alu_reg_reg(X86_64_XOR, d, d);
2020 x86_64_mov_imm_reg(1, REG_ITMP1);
2021 x86_64_mov_imm_reg(-1, REG_ITMP2);
2022 x86_64_ucomisd_reg_reg(s1, s2);
2023 x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
2024 x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
2025 x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
2026 store_reg_to_var_int(iptr->dst, d);
2030 /* memory operations **************************************************/
2032 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., (int) length */
2034 var_to_reg_int(s1, src, REG_ITMP1);
2035 d = reg_of_var(iptr->dst, REG_ITMP3);
2036 gen_nullptr_check(s1);
2037 x86_64_movl_membase_reg(s1, OFFSET(java_arrayheader, size), d);
2038 store_reg_to_var_int(iptr->dst, d);
2041 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2043 var_to_reg_int(s1, src->prev, REG_ITMP1);
2044 var_to_reg_int(s2, src, REG_ITMP2);
2045 d = reg_of_var(iptr->dst, REG_ITMP3);
2046 if (iptr->op1 == 0) {
2047 gen_nullptr_check(s1);
2050 x86_64_mov_memindex_reg(OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
2051 store_reg_to_var_int(iptr->dst, d);
2054 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
2056 var_to_reg_int(s1, src->prev, REG_ITMP1);
2057 var_to_reg_int(s2, src, REG_ITMP2);
2058 d = reg_of_var(iptr->dst, REG_ITMP3);
2059 if (iptr->op1 == 0) {
2060 gen_nullptr_check(s1);
2063 x86_64_mov_memindex_reg(OFFSET(java_longarray, data[0]), s1, s2, 3, d);
2064 store_reg_to_var_int(iptr->dst, d);
2067 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
2069 var_to_reg_int(s1, src->prev, REG_ITMP1);
2070 var_to_reg_int(s2, src, REG_ITMP2);
2071 d = reg_of_var(iptr->dst, REG_ITMP3);
2072 if (iptr->op1 == 0) {
2073 gen_nullptr_check(s1);
2076 x86_64_movl_memindex_reg(OFFSET(java_intarray, data[0]), s1, s2, 2, d);
2077 store_reg_to_var_int(iptr->dst, d);
2080 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2082 var_to_reg_int(s1, src->prev, REG_ITMP1);
2083 var_to_reg_int(s2, src, REG_ITMP2);
2084 d = reg_of_var(iptr->dst, REG_FTMP3);
2085 if (iptr->op1 == 0) {
2086 gen_nullptr_check(s1);
2089 x86_64_movss_memindex_reg(OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2090 store_reg_to_var_flt(iptr->dst, d);
2093 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2095 var_to_reg_int(s1, src->prev, REG_ITMP1);
2096 var_to_reg_int(s2, src, REG_ITMP2);
2097 d = reg_of_var(iptr->dst, REG_FTMP3);
2098 if (iptr->op1 == 0) {
2099 gen_nullptr_check(s1);
2102 x86_64_movsd_memindex_reg(OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2103 store_reg_to_var_flt(iptr->dst, d);
2106 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
2108 var_to_reg_int(s1, src->prev, REG_ITMP1);
2109 var_to_reg_int(s2, src, REG_ITMP2);
2110 d = reg_of_var(iptr->dst, REG_ITMP3);
2111 if (iptr->op1 == 0) {
2112 gen_nullptr_check(s1);
2115 x86_64_movzwq_memindex_reg(OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2116 store_reg_to_var_int(iptr->dst, d);
2119 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
2121 var_to_reg_int(s1, src->prev, REG_ITMP1);
2122 var_to_reg_int(s2, src, REG_ITMP2);
2123 d = reg_of_var(iptr->dst, REG_ITMP3);
2124 if (iptr->op1 == 0) {
2125 gen_nullptr_check(s1);
2128 x86_64_movswq_memindex_reg(OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2129 store_reg_to_var_int(iptr->dst, d);
2132 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
2134 var_to_reg_int(s1, src->prev, REG_ITMP1);
2135 var_to_reg_int(s2, src, REG_ITMP2);
2136 d = reg_of_var(iptr->dst, REG_ITMP3);
2137 if (iptr->op1 == 0) {
2138 gen_nullptr_check(s1);
2141 x86_64_movsbq_memindex_reg(OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2142 store_reg_to_var_int(iptr->dst, d);
2146 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2148 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2149 var_to_reg_int(s2, src->prev, REG_ITMP2);
2150 if (iptr->op1 == 0) {
2151 gen_nullptr_check(s1);
2154 var_to_reg_int(s3, src, REG_ITMP3);
2155 x86_64_mov_reg_memindex(s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2158 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2160 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2161 var_to_reg_int(s2, src->prev, REG_ITMP2);
2162 if (iptr->op1 == 0) {
2163 gen_nullptr_check(s1);
2166 var_to_reg_int(s3, src, REG_ITMP3);
2167 x86_64_mov_reg_memindex(s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2170 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2172 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2173 var_to_reg_int(s2, src->prev, REG_ITMP2);
2174 if (iptr->op1 == 0) {
2175 gen_nullptr_check(s1);
2178 var_to_reg_int(s3, src, REG_ITMP3);
2179 x86_64_movl_reg_memindex(s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2182 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2184 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2185 var_to_reg_int(s2, src->prev, REG_ITMP2);
2186 if (iptr->op1 == 0) {
2187 gen_nullptr_check(s1);
2190 var_to_reg_flt(s3, src, REG_FTMP3);
2191 x86_64_movss_reg_memindex(s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2194 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2196 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2197 var_to_reg_int(s2, src->prev, REG_ITMP2);
2198 if (iptr->op1 == 0) {
2199 gen_nullptr_check(s1);
2202 var_to_reg_flt(s3, src, REG_FTMP3);
2203 x86_64_movsd_reg_memindex(s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2206 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2208 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2209 var_to_reg_int(s2, src->prev, REG_ITMP2);
2210 if (iptr->op1 == 0) {
2211 gen_nullptr_check(s1);
2214 var_to_reg_int(s3, src, REG_ITMP3);
2215 x86_64_movw_reg_memindex(s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2218 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2220 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2221 var_to_reg_int(s2, src->prev, REG_ITMP2);
2222 if (iptr->op1 == 0) {
2223 gen_nullptr_check(s1);
2226 var_to_reg_int(s3, src, REG_ITMP3);
2227 x86_64_movw_reg_memindex(s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2230 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2232 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2233 var_to_reg_int(s2, src->prev, REG_ITMP2);
2234 if (iptr->op1 == 0) {
2235 gen_nullptr_check(s1);
2238 var_to_reg_int(s3, src, REG_ITMP3);
2239 x86_64_movb_reg_memindex(s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2243 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2244 /* op1 = type, val.a = field address */
2246 /* if class isn't yet initialized, do it */
2247 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2248 /* call helper function which patches this code */
2249 x86_64_mov_imm_reg((s8) ((fieldinfo *) iptr->val.a)->class, REG_ITMP1);
2250 x86_64_mov_imm_reg((s8) asm_check_clinit, REG_ITMP2);
2251 x86_64_call_reg(REG_ITMP2);
2254 a = dseg_addaddress(&(((fieldinfo *) iptr->val.a)->value));
2255 /* x86_64_mov_imm_reg(0, REG_ITMP2); */
2256 /* dseg_adddata(mcodeptr); */
2257 /* x86_64_mov_membase_reg(REG_ITMP2, a, REG_ITMP2); */
2258 x86_64_mov_membase_reg(RIP, -(((s8) mcodeptr + 7) - (s8) mcodebase) + a, REG_ITMP2);
2259 switch (iptr->op1) {
2261 var_to_reg_int(s2, src, REG_ITMP1);
2262 x86_64_movl_reg_membase(s2, REG_ITMP2, 0);
2266 var_to_reg_int(s2, src, REG_ITMP1);
2267 x86_64_mov_reg_membase(s2, REG_ITMP2, 0);
2270 var_to_reg_flt(s2, src, REG_FTMP1);
2271 x86_64_movss_reg_membase(s2, REG_ITMP2, 0);
2274 var_to_reg_flt(s2, src, REG_FTMP1);
2275 x86_64_movsd_reg_membase(s2, REG_ITMP2, 0);
2277 default: panic("internal error");
2281 case ICMD_GETSTATIC: /* ... ==> ..., value */
2282 /* op1 = type, val.a = field address */
2284 /* if class isn't yet initialized, do it */
2285 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2286 /* call helper function which patches this code */
2287 x86_64_mov_imm_reg((s8) ((fieldinfo *) iptr->val.a)->class, REG_ITMP1);
2288 x86_64_mov_imm_reg((s8) asm_check_clinit, REG_ITMP2);
2289 x86_64_call_reg(REG_ITMP2);
2292 a = dseg_addaddress(&(((fieldinfo *) iptr->val.a)->value));
2293 /* x86_64_mov_imm_reg(0, REG_ITMP2); */
2294 /* dseg_adddata(mcodeptr); */
2295 /* x86_64_mov_membase_reg(REG_ITMP2, a, REG_ITMP2); */
2296 x86_64_mov_membase_reg(RIP, -(((s8) mcodeptr + 7) - (s8) mcodebase) + a, REG_ITMP2);
2297 switch (iptr->op1) {
2299 d = reg_of_var(iptr->dst, REG_ITMP1);
2300 x86_64_movl_membase_reg(REG_ITMP2, 0, d);
2301 store_reg_to_var_int(iptr->dst, d);
2305 d = reg_of_var(iptr->dst, REG_ITMP1);
2306 x86_64_mov_membase_reg(REG_ITMP2, 0, d);
2307 store_reg_to_var_int(iptr->dst, d);
2310 d = reg_of_var(iptr->dst, REG_ITMP1);
2311 x86_64_movss_membase_reg(REG_ITMP2, 0, d);
2312 store_reg_to_var_flt(iptr->dst, d);
2315 d = reg_of_var(iptr->dst, REG_ITMP1);
2316 x86_64_movsd_membase_reg(REG_ITMP2, 0, d);
2317 store_reg_to_var_flt(iptr->dst, d);
2319 default: panic("internal error");
2323 case ICMD_PUTFIELD: /* ..., value ==> ... */
2324 /* op1 = type, val.i = field offset */
2326 /* if class isn't yet initialized, do it */
2327 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2328 /* call helper function which patches this code */
2329 x86_64_mov_imm_reg((s8) ((fieldinfo *) iptr->val.a)->class, REG_ITMP1);
2330 x86_64_mov_imm_reg((s8) asm_check_clinit, REG_ITMP2);
2331 x86_64_call_reg(REG_ITMP2);
2334 a = ((fieldinfo *)(iptr->val.a))->offset;
2335 var_to_reg_int(s1, src->prev, REG_ITMP1);
2336 switch (iptr->op1) {
2338 var_to_reg_int(s2, src, REG_ITMP2);
2339 gen_nullptr_check(s1);
2340 x86_64_movl_reg_membase(s2, s1, a);
2344 var_to_reg_int(s2, src, REG_ITMP2);
2345 gen_nullptr_check(s1);
2346 x86_64_mov_reg_membase(s2, s1, a);
2349 var_to_reg_flt(s2, src, REG_FTMP2);
2350 gen_nullptr_check(s1);
2351 x86_64_movss_reg_membase(s2, s1, a);
2354 var_to_reg_flt(s2, src, REG_FTMP2);
2355 gen_nullptr_check(s1);
2356 x86_64_movsd_reg_membase(s2, s1, a);
2358 default: panic ("internal error");
2362 case ICMD_GETFIELD: /* ... ==> ..., value */
2363 /* op1 = type, val.i = field offset */
2365 a = ((fieldinfo *)(iptr->val.a))->offset;
2366 var_to_reg_int(s1, src, REG_ITMP1);
2367 switch (iptr->op1) {
2369 d = reg_of_var(iptr->dst, REG_ITMP1);
2370 gen_nullptr_check(s1);
2371 x86_64_movl_membase_reg(s1, a, d);
2372 store_reg_to_var_int(iptr->dst, d);
2376 d = reg_of_var(iptr->dst, REG_ITMP1);
2377 gen_nullptr_check(s1);
2378 x86_64_mov_membase_reg(s1, a, d);
2379 store_reg_to_var_int(iptr->dst, d);
2382 d = reg_of_var(iptr->dst, REG_FTMP1);
2383 gen_nullptr_check(s1);
2384 x86_64_movss_membase_reg(s1, a, d);
2385 store_reg_to_var_flt(iptr->dst, d);
2388 d = reg_of_var(iptr->dst, REG_FTMP1);
2389 gen_nullptr_check(s1);
2390 x86_64_movsd_membase_reg(s1, a, d);
2391 store_reg_to_var_flt(iptr->dst, d);
2393 default: panic ("internal error");
2398 /* branch operations **************************************************/
2400 /* #define ALIGNCODENOP {if((int)((long)mcodeptr&7)){M_NOP;}} */
2401 #define ALIGNCODENOP do {} while (0)
2403 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2405 var_to_reg_int(s1, src, REG_ITMP1);
2406 M_INTMOVE(s1, REG_ITMP1_XPTR);
2408 x86_64_call_imm(0); /* passing exception pointer */
2409 x86_64_pop_reg(REG_ITMP2_XPC);
2411 x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
2412 x86_64_jmp_reg(REG_ITMP3);
2416 case ICMD_GOTO: /* ... ==> ... */
2417 /* op1 = target JavaVM pc */
2420 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2424 case ICMD_JSR: /* ... ==> ... */
2425 /* op1 = target JavaVM pc */
2428 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2431 case ICMD_RET: /* ... ==> ... */
2432 /* op1 = local variable */
2434 var = &(locals[iptr->op1][TYPE_ADR]);
2435 var_to_reg_int(s1, var, REG_ITMP1);
2439 case ICMD_IFNULL: /* ..., value ==> ... */
2440 /* op1 = target JavaVM pc */
2442 if (src->flags & INMEMORY) {
2443 x86_64_alu_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
2446 x86_64_test_reg_reg(src->regoff, src->regoff);
2448 x86_64_jcc(X86_64_CC_E, 0);
2449 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2452 case ICMD_IFNONNULL: /* ..., value ==> ... */
2453 /* op1 = target JavaVM pc */
2455 if (src->flags & INMEMORY) {
2456 x86_64_alu_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
2459 x86_64_test_reg_reg(src->regoff, src->regoff);
2461 x86_64_jcc(X86_64_CC_NE, 0);
2462 codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2465 case ICMD_IFEQ: /* ..., value ==> ... */
2466 /* op1 = target JavaVM pc, val.i = constant */
2468 x86_64_emit_ifcc(X86_64_CC_E, src, iptr);
2471 case ICMD_IFLT: /* ..., value ==> ... */
2472 /* op1 = target JavaVM pc, val.i = constant */
2474 x86_64_emit_ifcc(X86_64_CC_L, src, iptr);
2477 case ICMD_IFLE: /* ..., value ==> ... */
2478 /* op1 = target JavaVM pc, val.i = constant */
2480 x86_64_emit_ifcc(X86_64_CC_LE, src, iptr);
2483 case ICMD_IFNE: /* ..., value ==> ... */
2484 /* op1 = target JavaVM pc, val.i = constant */
2486 x86_64_emit_ifcc(X86_64_CC_NE, src, iptr);
2489 case ICMD_IFGT: /* ..., value ==> ... */
2490 /* op1 = target JavaVM pc, val.i = constant */
2492 x86_64_emit_ifcc(X86_64_CC_G, src, iptr);
2495 case ICMD_IFGE: /* ..., value ==> ... */
2496 /* op1 = target JavaVM pc, val.i = constant */
2498 x86_64_emit_ifcc(X86_64_CC_GE, src, iptr);
2501 case ICMD_IF_LEQ: /* ..., value ==> ... */
2502 /* op1 = target JavaVM pc, val.l = constant */
2504 x86_64_emit_if_lcc(X86_64_CC_E, src, iptr);
2507 case ICMD_IF_LLT: /* ..., value ==> ... */
2508 /* op1 = target JavaVM pc, val.l = constant */
2510 x86_64_emit_if_lcc(X86_64_CC_L, src, iptr);
2513 case ICMD_IF_LLE: /* ..., value ==> ... */
2514 /* op1 = target JavaVM pc, val.l = constant */
2516 x86_64_emit_if_lcc(X86_64_CC_LE, src, iptr);
2519 case ICMD_IF_LNE: /* ..., value ==> ... */
2520 /* op1 = target JavaVM pc, val.l = constant */
2522 x86_64_emit_if_lcc(X86_64_CC_NE, src, iptr);
2525 case ICMD_IF_LGT: /* ..., value ==> ... */
2526 /* op1 = target JavaVM pc, val.l = constant */
2528 x86_64_emit_if_lcc(X86_64_CC_G, src, iptr);
2531 case ICMD_IF_LGE: /* ..., value ==> ... */
2532 /* op1 = target JavaVM pc, val.l = constant */
2534 x86_64_emit_if_lcc(X86_64_CC_GE, src, iptr);
2537 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2538 /* op1 = target JavaVM pc */
2540 x86_64_emit_if_icmpcc(X86_64_CC_E, src, iptr);
2543 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2544 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2546 x86_64_emit_if_lcmpcc(X86_64_CC_E, src, iptr);
2549 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2550 /* op1 = target JavaVM pc */
2552 x86_64_emit_if_icmpcc(X86_64_CC_NE, src, iptr);
2555 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2556 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2558 x86_64_emit_if_lcmpcc(X86_64_CC_NE, src, iptr);
2561 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2562 /* op1 = target JavaVM pc */
2564 x86_64_emit_if_icmpcc(X86_64_CC_L, src, iptr);
2567 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2568 /* op1 = target JavaVM pc */
2570 x86_64_emit_if_lcmpcc(X86_64_CC_L, src, iptr);
2573 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2574 /* op1 = target JavaVM pc */
2576 x86_64_emit_if_icmpcc(X86_64_CC_G, src, iptr);
2579 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2580 /* op1 = target JavaVM pc */
2582 x86_64_emit_if_lcmpcc(X86_64_CC_G, src, iptr);
2585 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2586 /* op1 = target JavaVM pc */
2588 x86_64_emit_if_icmpcc(X86_64_CC_LE, src, iptr);
2591 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2592 /* op1 = target JavaVM pc */
2594 x86_64_emit_if_lcmpcc(X86_64_CC_LE, src, iptr);
2597 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2598 /* op1 = target JavaVM pc */
2600 x86_64_emit_if_icmpcc(X86_64_CC_GE, src, iptr);
2603 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2604 /* op1 = target JavaVM pc */
2606 x86_64_emit_if_lcmpcc(X86_64_CC_GE, src, iptr);
2609 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST */
2611 case ICMD_ELSE_ICONST: /* handled by IFxx_ICONST */
2614 case ICMD_IFEQ_ICONST: /* ..., value ==> ..., constant */
2615 /* val.i = constant */
2617 var_to_reg_int(s1, src, REG_ITMP1);
2618 d = reg_of_var(iptr->dst, REG_ITMP3);
2620 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2622 M_INTMOVE(s1, REG_ITMP1);
2625 x86_64_movl_imm_reg(iptr[1].val.i, d);
2627 x86_64_movl_imm_reg(s3, REG_ITMP2);
2628 x86_64_testl_reg_reg(s1, s1);
2629 x86_64_cmovccl_reg_reg(X86_64_CC_E, REG_ITMP2, d);
2630 store_reg_to_var_int(iptr->dst, d);
2633 case ICMD_IFNE_ICONST: /* ..., value ==> ..., constant */
2634 /* val.i = constant */
2636 var_to_reg_int(s1, src, REG_ITMP1);
2637 d = reg_of_var(iptr->dst, REG_ITMP3);
2639 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2641 M_INTMOVE(s1, REG_ITMP1);
2644 x86_64_movl_imm_reg(iptr[1].val.i, d);
2646 x86_64_movl_imm_reg(s3, REG_ITMP2);
2647 x86_64_testl_reg_reg(s1, s1);
2648 x86_64_cmovccl_reg_reg(X86_64_CC_NE, REG_ITMP2, d);
2649 store_reg_to_var_int(iptr->dst, d);
2652 case ICMD_IFLT_ICONST: /* ..., value ==> ..., constant */
2653 /* val.i = constant */
2655 var_to_reg_int(s1, src, REG_ITMP1);
2656 d = reg_of_var(iptr->dst, REG_ITMP3);
2658 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2660 M_INTMOVE(s1, REG_ITMP1);
2663 x86_64_movl_imm_reg(iptr[1].val.i, d);
2665 x86_64_movl_imm_reg(s3, REG_ITMP2);
2666 x86_64_testl_reg_reg(s1, s1);
2667 x86_64_cmovccl_reg_reg(X86_64_CC_L, REG_ITMP2, d);
2668 store_reg_to_var_int(iptr->dst, d);
2671 case ICMD_IFGE_ICONST: /* ..., value ==> ..., constant */
2672 /* val.i = constant */
2674 var_to_reg_int(s1, src, REG_ITMP1);
2675 d = reg_of_var(iptr->dst, REG_ITMP3);
2677 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2679 M_INTMOVE(s1, REG_ITMP1);
2682 x86_64_movl_imm_reg(iptr[1].val.i, d);
2684 x86_64_movl_imm_reg(s3, REG_ITMP2);
2685 x86_64_testl_reg_reg(s1, s1);
2686 x86_64_cmovccl_reg_reg(X86_64_CC_GE, REG_ITMP2, d);
2687 store_reg_to_var_int(iptr->dst, d);
2690 case ICMD_IFGT_ICONST: /* ..., value ==> ..., constant */
2691 /* val.i = constant */
2693 var_to_reg_int(s1, src, REG_ITMP1);
2694 d = reg_of_var(iptr->dst, REG_ITMP3);
2696 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2698 M_INTMOVE(s1, REG_ITMP1);
2701 x86_64_movl_imm_reg(iptr[1].val.i, d);
2703 x86_64_movl_imm_reg(s3, REG_ITMP2);
2704 x86_64_testl_reg_reg(s1, s1);
2705 x86_64_cmovccl_reg_reg(X86_64_CC_G, REG_ITMP2, d);
2706 store_reg_to_var_int(iptr->dst, d);
2709 case ICMD_IFLE_ICONST: /* ..., value ==> ..., constant */
2710 /* val.i = constant */
2712 var_to_reg_int(s1, src, REG_ITMP1);
2713 d = reg_of_var(iptr->dst, REG_ITMP3);
2715 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2717 M_INTMOVE(s1, REG_ITMP1);
2720 x86_64_movl_imm_reg(iptr[1].val.i, d);
2722 x86_64_movl_imm_reg(s3, REG_ITMP2);
2723 x86_64_testl_reg_reg(s1, s1);
2724 x86_64_cmovccl_reg_reg(X86_64_CC_LE, REG_ITMP2, d);
2725 store_reg_to_var_int(iptr->dst, d);
2729 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2733 var_to_reg_int(s1, src, REG_RESULT);
2734 M_INTMOVE(s1, REG_RESULT);
2736 #if defined(USE_THREADS)
2737 if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
2738 x86_64_mov_membase_reg(REG_SP, maxmemuse * 8, argintregs[0]);
2739 x86_64_mov_reg_membase(REG_RESULT, REG_SP, maxmemuse * 8);
2740 x86_64_mov_imm_reg((u8) builtin_monitorexit, REG_ITMP1);
2741 x86_64_call_reg(REG_ITMP1);
2742 x86_64_mov_membase_reg(REG_SP, maxmemuse * 8, REG_RESULT);
2746 goto nowperformreturn;
2748 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2751 var_to_reg_flt(s1, src, REG_FRESULT);
2752 M_FLTMOVE(s1, REG_FRESULT);
2754 #if defined(USE_THREADS)
2755 if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
2756 x86_64_mov_membase_reg(REG_SP, maxmemuse * 8, argintregs[0]);
2757 x86_64_movq_reg_membase(REG_FRESULT, REG_SP, maxmemuse * 8);
2758 x86_64_mov_imm_reg((u8) builtin_monitorexit, REG_ITMP1);
2759 x86_64_call_reg(REG_ITMP1);
2760 x86_64_movq_membase_reg(REG_SP, maxmemuse * 8, REG_FRESULT);
2764 goto nowperformreturn;
2766 case ICMD_RETURN: /* ... ==> ... */
2768 #if defined(USE_THREADS)
2769 if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
2770 x86_64_mov_membase_reg(REG_SP, maxmemuse * 8, argintregs[0]);
2771 x86_64_mov_imm_reg((u8) builtin_monitorexit, REG_ITMP1);
2772 x86_64_call_reg(REG_ITMP1);
2780 p = parentargs_base;
2782 /* call trace function */
2784 x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
2786 x86_64_mov_reg_membase(REG_RESULT, REG_SP, 0 * 8);
2787 x86_64_movq_reg_membase(REG_FRESULT, REG_SP, 1 * 8);
2789 x86_64_mov_imm_reg((s8) method, argintregs[0]);
2790 x86_64_mov_reg_reg(REG_RESULT, argintregs[1]);
2791 M_FLTMOVE(REG_FRESULT, argfltregs[0]);
2792 M_FLTMOVE(REG_FRESULT, argfltregs[1]);
2794 x86_64_mov_imm_reg((s8) builtin_displaymethodstop, REG_ITMP1);
2795 x86_64_call_reg(REG_ITMP1);
2797 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_RESULT);
2798 x86_64_movq_membase_reg(REG_SP, 1 * 8, REG_FRESULT);
2800 x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
2803 /* restore saved registers */
2804 for (r = savintregcnt - 1; r >= maxsavintreguse; r--) {
2805 p--; x86_64_mov_membase_reg(REG_SP, p * 8, savintregs[r]);
2807 for (r = savfltregcnt - 1; r >= maxsavfltreguse; r--) {
2808 p--; x86_64_movq_membase_reg(REG_SP, p * 8, savfltregs[r]);
2811 /* deallocate stack */
2812 if (parentargs_base) {
2813 x86_64_alu_imm_reg(X86_64_ADD, parentargs_base * 8, REG_SP);
2822 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2827 tptr = (void **) iptr->target;
2829 s4ptr = iptr->val.a;
2830 l = s4ptr[1]; /* low */
2831 i = s4ptr[2]; /* high */
2833 var_to_reg_int(s1, src, REG_ITMP1);
2834 M_INTMOVE(s1, REG_ITMP1);
2836 x86_64_alul_imm_reg(X86_64_SUB, l, REG_ITMP1);
2841 x86_64_alul_imm_reg(X86_64_CMP, i - 1, REG_ITMP1);
2842 x86_64_jcc(X86_64_CC_A, 0);
2844 /* codegen_addreference(BlockPtrOfPC(s4ptr[0]), mcodeptr); */
2845 codegen_addreference((basicblock *) tptr[0], mcodeptr);
2847 /* build jump table top down and use address of lowest entry */
2849 /* s4ptr += 3 + i; */
2853 /* dseg_addtarget(BlockPtrOfPC(*--s4ptr)); */
2854 dseg_addtarget((basicblock *) tptr[0]);
2858 /* length of dataseg after last dseg_addtarget is used by load */
2860 x86_64_mov_imm_reg(0, REG_ITMP2);
2861 dseg_adddata(mcodeptr);
2862 x86_64_mov_memindex_reg(-dseglen, REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
2863 x86_64_jmp_reg(REG_ITMP1);
2869 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2871 s4 i, l, val, *s4ptr;
2874 tptr = (void **) iptr->target;
2876 s4ptr = iptr->val.a;
2877 l = s4ptr[0]; /* default */
2878 i = s4ptr[1]; /* count */
2880 MCODECHECK((i<<2)+8);
2881 var_to_reg_int(s1, src, REG_ITMP1); /* reg compare should always be faster */
2887 x86_64_alul_imm_reg(X86_64_CMP, val, s1);
2888 x86_64_jcc(X86_64_CC_E, 0);
2889 /* codegen_addreference(BlockPtrOfPC(s4ptr[1]), mcodeptr); */
2890 codegen_addreference((basicblock *) tptr[0], mcodeptr);
2894 /* codegen_addreference(BlockPtrOfPC(l), mcodeptr); */
2896 tptr = (void **) iptr->target;
2897 codegen_addreference((basicblock *) tptr[0], mcodeptr);
2904 case ICMD_BUILTIN3: /* ..., arg1, arg2, arg3 ==> ... */
2905 /* op1 = return type, val.a = function pointer*/
2909 case ICMD_BUILTIN2: /* ..., arg1, arg2 ==> ... */
2910 /* op1 = return type, val.a = function pointer*/
2914 case ICMD_BUILTIN1: /* ..., arg1 ==> ... */
2915 /* op1 = return type, val.a = function pointer*/
2919 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2920 /* op1 = arg count, val.a = method pointer */
2922 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2923 /* op1 = arg count, val.a = method pointer */
2925 case ICMD_INVOKEVIRTUAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2926 /* op1 = arg count, val.a = method pointer */
2928 case ICMD_INVOKEINTERFACE:/*.., objectref, [arg1, [arg2 ...]] ==> ... */
2929 /* op1 = arg count, val.a = method pointer */
2940 MCODECHECK((s3 << 1) + 64);
2945 /* copy arguments to registers or stack location */
2946 for (; --s3 >= 0; src = src->prev) {
2947 IS_INT_LNG_TYPE(src->type) ? iarg++ : farg++;
2953 s2 = (iarg > INT_ARG_CNT) ? iarg - INT_ARG_CNT : 0 + (farg > FLT_ARG_CNT) ? farg - FLT_ARG_CNT : 0;
2955 for (; --s3 >= 0; src = src->prev) {
2956 IS_INT_LNG_TYPE(src->type) ? iarg-- : farg--;
2957 if (src->varkind == ARGVAR) {
2958 if (IS_INT_LNG_TYPE(src->type)) {
2959 if (iarg >= INT_ARG_CNT) {
2963 if (farg >= FLT_ARG_CNT) {
2970 if (IS_INT_LNG_TYPE(src->type)) {
2971 if (iarg < INT_ARG_CNT) {
2972 s1 = argintregs[iarg];
2973 var_to_reg_int(d, src, s1);
2977 var_to_reg_int(d, src, REG_ITMP1);
2979 x86_64_mov_reg_membase(d, REG_SP, s2 * 8);
2983 if (farg < FLT_ARG_CNT) {
2984 s1 = argfltregs[farg];
2985 var_to_reg_flt(d, src, s1);
2989 var_to_reg_flt(d, src, REG_FTMP1);
2991 x86_64_movq_reg_membase(d, REG_SP, s2 * 8);
2997 switch (iptr->opc) {
3005 x86_64_mov_imm_reg(a, REG_ITMP1);
3006 x86_64_call_reg(REG_ITMP1);
3009 case ICMD_INVOKESTATIC:
3011 a = (s8) m->stubroutine;
3014 x86_64_mov_imm_reg(a, REG_ITMP2);
3015 x86_64_call_reg(REG_ITMP2);
3018 case ICMD_INVOKESPECIAL:
3020 a = (s8) m->stubroutine;
3023 gen_nullptr_check(argintregs[0]); /* first argument contains pointer */
3024 x86_64_mov_membase_reg(argintregs[0], 0, REG_ITMP2); /* access memory for hardware nullptr */
3025 x86_64_mov_imm_reg(a, REG_ITMP2);
3026 x86_64_call_reg(REG_ITMP2);
3029 case ICMD_INVOKEVIRTUAL:
3033 gen_nullptr_check(argintregs[0]);
3034 x86_64_mov_membase_reg(argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3035 x86_64_mov_membase32_reg(REG_ITMP2, OFFSET(vftbl, table[0]) + sizeof(methodptr) * m->vftblindex, REG_ITMP1);
3036 x86_64_call_reg(REG_ITMP1);
3039 case ICMD_INVOKEINTERFACE:
3044 gen_nullptr_check(argintregs[0]);
3045 x86_64_mov_membase_reg(argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3046 x86_64_mov_membase_reg(REG_ITMP2, OFFSET(vftbl, interfacetable[0]) - sizeof(methodptr) * ci->index, REG_ITMP2);
3047 x86_64_mov_membase32_reg(REG_ITMP2, sizeof(methodptr) * (m - ci->methods), REG_ITMP1);
3048 x86_64_call_reg(REG_ITMP1);
3053 error("Unkown ICMD-Command: %d", iptr->opc);
3056 /* d contains return type */
3058 if (d != TYPE_VOID) {
3059 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3060 s1 = reg_of_var(iptr->dst, REG_RESULT);
3061 M_INTMOVE(REG_RESULT, s1);
3062 store_reg_to_var_int(iptr->dst, s1);
3065 s1 = reg_of_var(iptr->dst, REG_FRESULT);
3066 M_FLTMOVE(REG_FRESULT, s1);
3067 store_reg_to_var_flt(iptr->dst, s1);
3074 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3076 /* op1: 0 == array, 1 == class */
3077 /* val.a: (classinfo*) superclass */
3079 /* superclass is an interface:
3081 * return (sub != NULL) &&
3082 * (sub->vftbl->interfacetablelength > super->index) &&
3083 * (sub->vftbl->interfacetable[-super->index] != NULL);
3085 * superclass is a class:
3087 * return ((sub != NULL) && (0
3088 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3089 * super->vftbl->diffvall));
3093 classinfo *super = (classinfo*) iptr->val.a;
3095 var_to_reg_int(s1, src, REG_ITMP1);
3096 d = reg_of_var(iptr->dst, REG_ITMP3);
3098 M_INTMOVE(s1, REG_ITMP1);
3101 x86_64_alu_reg_reg(X86_64_XOR, d, d);
3102 if (iptr->op1) { /* class/interface */
3103 if (super->flags & ACC_INTERFACE) { /* interface */
3104 x86_64_test_reg_reg(s1, s1);
3106 /* TODO: clean up this calculation */
3107 a = 3; /* mov_membase_reg */
3108 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3110 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3111 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetablelength));
3114 CALCIMMEDIATEBYTES(a, super->index);
3119 a += 3; /* mov_membase_reg */
3120 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*));
3125 x86_64_jcc(X86_64_CC_E, a);
3127 x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3128 x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetablelength), REG_ITMP2);
3129 x86_64_alu_imm_reg(X86_64_SUB, super->index, REG_ITMP2);
3130 x86_64_test_reg_reg(REG_ITMP2, REG_ITMP2);
3132 /* TODO: clean up this calculation */
3134 a += 3; /* mov_membase_reg */
3135 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*));
3140 x86_64_jcc(X86_64_CC_LE, a);
3141 x86_64_mov_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP1);
3142 x86_64_test_reg_reg(REG_ITMP1, REG_ITMP1);
3143 x86_64_setcc_reg(X86_64_CC_NE, d);
3145 } else { /* class */
3146 x86_64_test_reg_reg(s1, s1);
3148 /* TODO: clean up this calculation */
3149 a = 3; /* mov_membase_reg */
3150 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3152 a += 10; /* mov_imm_reg */
3154 a += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3155 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, baseval));
3157 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3158 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, baseval));
3160 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3161 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, diffval));
3168 x86_64_jcc(X86_64_CC_E, a);
3170 x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3171 x86_64_mov_imm_reg((s8) super->vftbl, REG_ITMP2);
3172 x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, baseval), REG_ITMP1);
3173 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, baseval), REG_ITMP3);
3174 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, diffval), REG_ITMP2);
3175 x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP3, REG_ITMP1);
3176 x86_64_alu_reg_reg(X86_64_XOR, d, d);
3177 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
3178 x86_64_setcc_reg(X86_64_CC_BE, d);
3182 panic("internal error: no inlined array instanceof");
3184 store_reg_to_var_int(iptr->dst, d);
3187 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3189 /* op1: 0 == array, 1 == class */
3190 /* val.a: (classinfo*) superclass */
3192 /* superclass is an interface:
3194 * OK if ((sub == NULL) ||
3195 * (sub->vftbl->interfacetablelength > super->index) &&
3196 * (sub->vftbl->interfacetable[-super->index] != NULL));
3198 * superclass is a class:
3200 * OK if ((sub == NULL) || (0
3201 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3202 * super->vftbl->diffvall));
3206 classinfo *super = (classinfo*) iptr->val.a;
3208 d = reg_of_var(iptr->dst, REG_ITMP3);
3209 var_to_reg_int(s1, src, d);
3210 if (iptr->op1) { /* class/interface */
3211 if (super->flags & ACC_INTERFACE) { /* interface */
3212 x86_64_test_reg_reg(s1, s1);
3214 /* TODO: clean up this calculation */
3215 a = 3; /* mov_membase_reg */
3216 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3218 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3219 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetablelength));
3222 CALCIMMEDIATEBYTES(a, super->index);
3227 a += 3; /* mov_membase_reg */
3228 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*));
3233 x86_64_jcc(X86_64_CC_E, a);
3235 x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3236 x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetablelength), REG_ITMP2);
3237 x86_64_alu_imm_reg(X86_64_SUB, super->index, REG_ITMP2);
3238 x86_64_test_reg_reg(REG_ITMP2, REG_ITMP2);
3239 x86_64_jcc(X86_64_CC_LE, 0);
3240 codegen_addxcastrefs(mcodeptr);
3241 x86_64_mov_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP2);
3242 x86_64_test_reg_reg(REG_ITMP2, REG_ITMP2);
3243 x86_64_jcc(X86_64_CC_E, 0);
3244 codegen_addxcastrefs(mcodeptr);
3246 } else { /* class */
3247 x86_64_test_reg_reg(s1, s1);
3249 /* TODO: clean up this calculation */
3250 a = 3; /* mov_membase_reg */
3251 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3252 a += 10; /* mov_imm_reg */
3253 a += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3254 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, baseval));
3256 if (d != REG_ITMP3) {
3257 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3258 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, baseval));
3259 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3260 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, diffval));
3264 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3265 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, baseval));
3267 a += 10; /* mov_imm_reg */
3268 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3269 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, diffval));
3275 x86_64_jcc(X86_64_CC_E, a);
3277 x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3278 x86_64_mov_imm_reg((s8) super->vftbl, REG_ITMP2);
3279 x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, baseval), REG_ITMP1);
3280 if (d != REG_ITMP3) {
3281 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, baseval), REG_ITMP3);
3282 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, diffval), REG_ITMP2);
3283 x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP3, REG_ITMP1);
3286 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, baseval), REG_ITMP2);
3287 x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP2, REG_ITMP1);
3288 x86_64_mov_imm_reg((s8) super->vftbl, REG_ITMP2);
3289 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, diffval), REG_ITMP2);
3291 x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
3292 x86_64_jcc(X86_64_CC_A, 0); /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3293 codegen_addxcastrefs(mcodeptr);
3297 panic("internal error: no inlined array checkcast");
3300 store_reg_to_var_int(iptr->dst, d);
3303 case ICMD_CHECKASIZE: /* ..., size ==> ..., size */
3305 if (src->flags & INMEMORY) {
3306 x86_64_alul_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
3309 x86_64_testl_reg_reg(src->regoff, src->regoff);
3311 x86_64_jcc(X86_64_CC_L, 0);
3312 codegen_addxcheckarefs(mcodeptr);
3315 case ICMD_CHECKEXCEPTION: /* ... ==> ... */
3317 x86_64_test_reg_reg(REG_RESULT, REG_RESULT);
3318 x86_64_jcc(X86_64_CC_E, 0);
3319 codegen_addxexceptionrefs(mcodeptr);
3322 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3323 /* op1 = dimension, val.a = array descriptor */
3325 /* check for negative sizes and copy sizes to stack if necessary */
3327 MCODECHECK((iptr->op1 << 1) + 64);
3329 for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3330 var_to_reg_int(s2, src, REG_ITMP1);
3331 x86_64_testl_reg_reg(s2, s2);
3332 x86_64_jcc(X86_64_CC_L, 0);
3333 codegen_addxcheckarefs(mcodeptr);
3335 /* copy sizes to stack (argument numbers >= INT_ARG_CNT) */
3337 if (src->varkind != ARGVAR) {
3338 x86_64_mov_reg_membase(s2, REG_SP, (s1 + INT_ARG_CNT) * 8);
3342 /* a0 = dimension count */
3343 x86_64_mov_imm_reg(iptr->op1, argintregs[0]);
3345 /* a1 = arraydescriptor */
3346 x86_64_mov_imm_reg((s8) iptr->val.a, argintregs[1]);
3348 /* a2 = pointer to dimensions = stack pointer */
3349 x86_64_mov_reg_reg(REG_SP, argintregs[2]);
3351 x86_64_mov_imm_reg((s8) (builtin_nmultianewarray), REG_ITMP1);
3352 x86_64_call_reg(REG_ITMP1);
3354 s1 = reg_of_var(iptr->dst, REG_RESULT);
3355 M_INTMOVE(REG_RESULT, s1);
3356 store_reg_to_var_int(iptr->dst, s1);
3359 default: error("Unknown pseudo command: %d", iptr->opc);
3362 } /* for instruction */
3364 /* copy values to interface registers */
3366 src = bptr->outstack;
3367 len = bptr->outdepth;
3371 if ((src->varkind != STACKVAR)) {
3373 if (IS_FLT_DBL_TYPE(s2)) {
3374 var_to_reg_flt(s1, src, REG_FTMP1);
3375 if (!(interfaces[len][s2].flags & INMEMORY)) {
3376 M_FLTMOVE(s1, interfaces[len][s2].regoff);
3379 x86_64_movq_reg_membase(s1, REG_SP, 8 * interfaces[len][s2].regoff);
3383 var_to_reg_int(s1, src, REG_ITMP1);
3384 if (!(interfaces[len][s2].flags & INMEMORY)) {
3385 M_INTMOVE(s1, interfaces[len][s2].regoff);
3388 x86_64_mov_reg_membase(s1, REG_SP, interfaces[len][s2].regoff * 8);
3394 } /* if (bptr -> flags >= BBREACHED) */
3395 } /* for basic block */
3397 /* bptr -> mpc = (int)((u1*) mcodeptr - mcodebase); */
3401 /* generate bound check stubs */
3403 u1 *xcodeptr = NULL;
3405 for (; xboundrefs != NULL; xboundrefs = xboundrefs->next) {
3406 gen_resolvebranch(mcodebase + xboundrefs->branchpos,
3407 xboundrefs->branchpos,
3408 mcodeptr - mcodebase);
3412 /* move index register into REG_ITMP1 */
3413 x86_64_mov_reg_reg(xboundrefs->reg, REG_ITMP1); /* 3 bytes */
3415 x86_64_mov_imm_reg(0, REG_ITMP2_XPC); /* 10 bytes */
3416 dseg_adddata(mcodeptr);
3417 x86_64_mov_imm_reg(xboundrefs->branchpos - 6, REG_ITMP3); /* 10 bytes */
3418 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3420 if (xcodeptr != NULL) {
3421 x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3424 xcodeptr = mcodeptr;
3426 x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
3427 x86_64_mov_reg_membase(REG_ITMP2_XPC, REG_SP, 0 * 8);
3428 x86_64_mov_imm_reg((s8) string_java_lang_ArrayIndexOutOfBoundsException, argintregs[0]);
3429 x86_64_mov_reg_reg(REG_ITMP1, argintregs[1]);
3430 x86_64_mov_imm_reg((s8) new_exception_int, REG_ITMP3);
3431 x86_64_call_reg(REG_ITMP3);
3432 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_ITMP2_XPC);
3433 x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
3435 x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3436 x86_64_jmp_reg(REG_ITMP3);
3440 /* generate negative array size check stubs */
3444 for (; xcheckarefs != NULL; xcheckarefs = xcheckarefs->next) {
3445 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3446 gen_resolvebranch(mcodebase + xcheckarefs->branchpos,
3447 xcheckarefs->branchpos,
3448 xcodeptr - mcodebase - (10 + 10 + 3));
3452 gen_resolvebranch(mcodebase + xcheckarefs->branchpos,
3453 xcheckarefs->branchpos,
3454 mcodeptr - mcodebase);
3458 x86_64_mov_imm_reg(0, REG_ITMP2_XPC); /* 10 bytes */
3459 dseg_adddata(mcodeptr);
3460 x86_64_mov_imm_reg(xcheckarefs->branchpos - 6, REG_ITMP3); /* 10 bytes */
3461 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3463 if (xcodeptr != NULL) {
3464 x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3467 xcodeptr = mcodeptr;
3469 x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
3470 x86_64_mov_reg_membase(REG_ITMP2_XPC, REG_SP, 0 * 8);
3471 x86_64_mov_imm_reg((s8) string_java_lang_NegativeArraySizeException, argintregs[0]);
3472 x86_64_mov_imm_reg((s8) new_exception, REG_ITMP3);
3473 x86_64_call_reg(REG_ITMP3);
3474 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_ITMP2_XPC);
3475 x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
3477 x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3478 x86_64_jmp_reg(REG_ITMP3);
3482 /* generate cast check stubs */
3486 for (; xcastrefs != NULL; xcastrefs = xcastrefs->next) {
3487 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3488 gen_resolvebranch(mcodebase + xcastrefs->branchpos,
3489 xcastrefs->branchpos,
3490 xcodeptr - mcodebase - (10 + 10 + 3));
3494 gen_resolvebranch(mcodebase + xcastrefs->branchpos,
3495 xcastrefs->branchpos,
3496 mcodeptr - mcodebase);
3500 x86_64_mov_imm_reg(0, REG_ITMP2_XPC); /* 10 bytes */
3501 dseg_adddata(mcodeptr);
3502 x86_64_mov_imm_reg(xcastrefs->branchpos - 6, REG_ITMP3); /* 10 bytes */
3503 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3505 if (xcodeptr != NULL) {
3506 x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3509 xcodeptr = mcodeptr;
3511 x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
3512 x86_64_mov_reg_membase(REG_ITMP2_XPC, REG_SP, 0 * 8);
3513 x86_64_mov_imm_reg((s8) string_java_lang_ClassCastException, argintregs[0]);
3514 x86_64_mov_imm_reg((s8) new_exception, REG_ITMP3);
3515 x86_64_call_reg(REG_ITMP3);
3516 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_ITMP2_XPC);
3517 x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
3519 x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3520 x86_64_jmp_reg(REG_ITMP3);
3524 /* generate divide by zero check stubs */
3528 for (; xdivrefs != NULL; xdivrefs = xdivrefs->next) {
3529 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3530 gen_resolvebranch(mcodebase + xdivrefs->branchpos,
3531 xdivrefs->branchpos,
3532 xcodeptr - mcodebase - (10 + 10 + 3));
3536 gen_resolvebranch(mcodebase + xdivrefs->branchpos,
3537 xdivrefs->branchpos,
3538 mcodeptr - mcodebase);
3542 x86_64_mov_imm_reg(0, REG_ITMP2_XPC); /* 10 bytes */
3543 dseg_adddata(mcodeptr);
3544 x86_64_mov_imm_reg(xdivrefs->branchpos - 6, REG_ITMP3); /* 10 bytes */
3545 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3547 if (xcodeptr != NULL) {
3548 x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3551 xcodeptr = mcodeptr;
3553 x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
3554 x86_64_mov_reg_membase(REG_ITMP2_XPC, REG_SP, 0 * 8);
3555 x86_64_mov_imm_reg((u8) string_java_lang_ArithmeticException, argintregs[0]);
3556 x86_64_mov_imm_reg((u8) string_java_lang_ArithmeticException_message, argintregs[1]);
3557 x86_64_mov_imm_reg((u8) new_exception, REG_ITMP3);
3558 x86_64_call_reg(REG_ITMP3);
3559 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_ITMP2_XPC);
3560 x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
3562 x86_64_mov_imm_reg((u8) asm_handle_exception, REG_ITMP3);
3563 x86_64_jmp_reg(REG_ITMP3);
3567 /* generate exception check stubs */
3571 for (; xexceptionrefs != NULL; xexceptionrefs = xexceptionrefs->next) {
3572 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3573 gen_resolvebranch(mcodebase + xexceptionrefs->branchpos,
3574 xexceptionrefs->branchpos,
3575 xcodeptr - mcodebase - (10 + 10 + 3));
3579 gen_resolvebranch(mcodebase + xexceptionrefs->branchpos,
3580 xexceptionrefs->branchpos,
3581 mcodeptr - mcodebase);
3585 x86_64_mov_imm_reg(0, REG_ITMP2_XPC); /* 10 bytes */
3586 dseg_adddata(mcodeptr);
3587 x86_64_mov_imm_reg(xexceptionrefs->branchpos - 6, REG_ITMP1); /* 10 bytes */
3588 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3590 if (xcodeptr != NULL) {
3591 x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3594 xcodeptr = mcodeptr;
3596 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3597 x86_64_push_reg(REG_ITMP2_XPC);
3598 x86_64_mov_imm_reg((u8) &builtin_get_exceptionptrptr, REG_ITMP1);
3599 x86_64_call_reg(REG_ITMP1);
3600 x86_64_mov_membase_reg(REG_RESULT, 0, REG_ITMP3);
3601 x86_64_mov_imm_membase(0, REG_RESULT, 0);
3602 x86_64_mov_reg_reg(REG_ITMP3, REG_ITMP1_XPTR);
3603 x86_64_pop_reg(REG_ITMP2_XPC);
3605 x86_64_mov_imm_reg((u8) &_exceptionptr, REG_ITMP3);
3606 x86_64_mov_membase_reg(REG_ITMP3, 0, REG_ITMP1_XPTR);
3607 x86_64_mov_imm_membase(0, REG_ITMP3, 0);
3610 x86_64_mov_imm_reg((u8) asm_handle_exception, REG_ITMP3);
3611 x86_64_jmp_reg(REG_ITMP3);
3615 /* generate null pointer check stubs */
3619 for (; xnullrefs != NULL; xnullrefs = xnullrefs->next) {
3620 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3621 gen_resolvebranch(mcodebase + xnullrefs->branchpos,
3622 xnullrefs->branchpos,
3623 xcodeptr - mcodebase - (10 + 10 + 3));
3627 gen_resolvebranch(mcodebase + xnullrefs->branchpos,
3628 xnullrefs->branchpos,
3629 mcodeptr - mcodebase);
3633 x86_64_mov_imm_reg(0, REG_ITMP2_XPC); /* 10 bytes */
3634 dseg_adddata(mcodeptr);
3635 x86_64_mov_imm_reg(xnullrefs->branchpos - 6, REG_ITMP1); /* 10 bytes */
3636 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3638 if (xcodeptr != NULL) {
3639 x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3642 xcodeptr = mcodeptr;
3644 x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
3645 x86_64_mov_reg_membase(REG_ITMP2_XPC, REG_SP, 0 * 8);
3646 x86_64_mov_imm_reg((s8) string_java_lang_NullPointerException, argintregs[0]);
3647 x86_64_mov_imm_reg((s8) new_exception, REG_ITMP3);
3648 x86_64_call_reg(REG_ITMP3);
3649 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_ITMP2_XPC);
3650 x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
3652 x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3653 x86_64_jmp_reg(REG_ITMP3);
3659 codegen_finish((int)((u1*) mcodeptr - mcodebase));
3663 /* function createcompilerstub *************************************************
3665 creates a stub routine which calls the compiler
3667 *******************************************************************************/
3669 #define COMPSTUBSIZE 23
3671 u1 *createcompilerstub(methodinfo *m)
3673 u1 *s = CNEW(u1, COMPSTUBSIZE); /* memory to hold the stub */
3674 mcodeptr = s; /* code generation pointer */
3676 /* code for the stub */
3677 x86_64_mov_imm_reg((s8) m, REG_ITMP1); /* pass method pointer to compiler */
3678 x86_64_mov_imm_reg((s8) asm_call_jit_compiler, REG_ITMP3);/* load address */
3679 x86_64_jmp_reg(REG_ITMP3); /* jump to compiler */
3682 count_cstub_len += COMPSTUBSIZE;
3689 /* function removecompilerstub *************************************************
3691 deletes a compilerstub from memory (simply by freeing it)
3693 *******************************************************************************/
3695 void removecompilerstub(u1 *stub)
3697 CFREE(stub, COMPSTUBSIZE);
3700 /* function: createnativestub **************************************************
3702 creates a stub routine which calls a native method
3704 *******************************************************************************/
3706 #define NATIVESTUBSIZE 420
3708 u1 *createnativestub(functionptr f, methodinfo *m)
3710 u1 *s = CNEW(u1, NATIVESTUBSIZE); /* memory to hold the stub */
3711 int stackframesize; /* size of stackframe if needed */
3712 mcodeptr = s; /* make macros work */
3715 descriptor2types(m); /* set paramcount and paramtypes */
3717 /* if function is static, check for initialized */
3719 if (m->flags & ACC_STATIC) {
3720 /* if class isn't yet initialized, do it */
3721 if (!m->class->initialized) {
3722 /* call helper function which patches this code */
3723 x86_64_mov_imm_reg((u8) m->class, REG_ITMP1);
3724 x86_64_mov_imm_reg((u8) asm_check_clinit, REG_ITMP2);
3725 x86_64_call_reg(REG_ITMP2);
3732 x86_64_alu_imm_reg(X86_64_SUB, (6 + 8 + 1) * 8, REG_SP);
3734 x86_64_mov_reg_membase(argintregs[0], REG_SP, 1 * 8);
3735 x86_64_mov_reg_membase(argintregs[1], REG_SP, 2 * 8);
3736 x86_64_mov_reg_membase(argintregs[2], REG_SP, 3 * 8);
3737 x86_64_mov_reg_membase(argintregs[3], REG_SP, 4 * 8);
3738 x86_64_mov_reg_membase(argintregs[4], REG_SP, 5 * 8);
3739 x86_64_mov_reg_membase(argintregs[5], REG_SP, 6 * 8);
3741 x86_64_movq_reg_membase(argfltregs[0], REG_SP, 7 * 8);
3742 x86_64_movq_reg_membase(argfltregs[1], REG_SP, 8 * 8);
3743 x86_64_movq_reg_membase(argfltregs[2], REG_SP, 9 * 8);
3744 x86_64_movq_reg_membase(argfltregs[3], REG_SP, 10 * 8);
3745 /* x86_64_movq_reg_membase(argfltregs[4], REG_SP, 11 * 8); */
3746 /* x86_64_movq_reg_membase(argfltregs[5], REG_SP, 12 * 8); */
3747 /* x86_64_movq_reg_membase(argfltregs[6], REG_SP, 13 * 8); */
3748 /* x86_64_movq_reg_membase(argfltregs[7], REG_SP, 14 * 8); */
3750 /* show integer hex code for float arguments */
3751 for (p = 0, l = 0; p < m->paramcount; p++) {
3752 if (IS_FLT_DBL_TYPE(m->paramtypes[p])) {
3753 for (s1 = (m->paramcount > INT_ARG_CNT) ? INT_ARG_CNT - 2 : m->paramcount - 2; s1 >= p; s1--) {
3754 x86_64_mov_reg_reg(argintregs[s1], argintregs[s1 + 1]);
3757 x86_64_movd_freg_reg(argfltregs[l], argintregs[p]);
3762 x86_64_mov_imm_reg((s8) m, REG_ITMP1);
3763 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, 0 * 8);
3764 x86_64_mov_imm_reg((s8) builtin_trace_args, REG_ITMP1);
3765 x86_64_call_reg(REG_ITMP1);
3767 x86_64_mov_membase_reg(REG_SP, 1 * 8, argintregs[0]);
3768 x86_64_mov_membase_reg(REG_SP, 2 * 8, argintregs[1]);
3769 x86_64_mov_membase_reg(REG_SP, 3 * 8, argintregs[2]);
3770 x86_64_mov_membase_reg(REG_SP, 4 * 8, argintregs[3]);
3771 x86_64_mov_membase_reg(REG_SP, 5 * 8, argintregs[4]);
3772 x86_64_mov_membase_reg(REG_SP, 6 * 8, argintregs[5]);
3774 x86_64_movq_membase_reg(REG_SP, 7 * 8, argfltregs[0]);
3775 x86_64_movq_membase_reg(REG_SP, 8 * 8, argfltregs[1]);
3776 x86_64_movq_membase_reg(REG_SP, 9 * 8, argfltregs[2]);
3777 x86_64_movq_membase_reg(REG_SP, 10 * 8, argfltregs[3]);
3778 /* x86_64_movq_membase_reg(REG_SP, 11 * 8, argfltregs[4]); */
3779 /* x86_64_movq_membase_reg(REG_SP, 12 * 8, argfltregs[5]); */
3780 /* x86_64_movq_membase_reg(REG_SP, 13 * 8, argfltregs[6]); */
3781 /* x86_64_movq_membase_reg(REG_SP, 14 * 8, argfltregs[7]); */
3783 x86_64_alu_imm_reg(X86_64_ADD, (6 + 8 + 1) * 8, REG_SP);
3787 x86_64_alu_imm_reg(X86_64_SUB, 7 * 8, REG_SP); /* keep stack 16-byte aligned */
3789 /* save callee saved float registers */
3790 x86_64_movq_reg_membase(XMM15, REG_SP, 0 * 8);
3791 x86_64_movq_reg_membase(XMM14, REG_SP, 1 * 8);
3792 x86_64_movq_reg_membase(XMM13, REG_SP, 2 * 8);
3793 x86_64_movq_reg_membase(XMM12, REG_SP, 3 * 8);
3794 x86_64_movq_reg_membase(XMM11, REG_SP, 4 * 8);
3795 x86_64_movq_reg_membase(XMM10, REG_SP, 5 * 8);
3798 /* save argument registers on stack -- if we have to */
3799 if ((m->flags & ACC_STATIC && m->paramcount > (INT_ARG_CNT - 2)) || m->paramcount > (INT_ARG_CNT - 1)) {
3801 int paramshiftcnt = (m->flags & ACC_STATIC) ? 2 : 1;
3802 int stackparamcnt = (m->paramcount > INT_ARG_CNT) ? m->paramcount - INT_ARG_CNT : 0;
3804 stackframesize = stackparamcnt + paramshiftcnt;
3806 /* keep stack 16-byte aligned */
3807 if ((stackframesize % 2) == 0) stackframesize++;
3809 x86_64_alu_imm_reg(X86_64_SUB, stackframesize * 8, REG_SP);
3811 /* copy stack arguments into new stack frame -- if any */
3812 for (i = 0; i < stackparamcnt; i++) {
3813 x86_64_mov_membase_reg(REG_SP, (stackparamcnt + 1 + i) * 8, REG_ITMP1);
3814 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, (paramshiftcnt + i) * 8);
3817 if (m->flags & ACC_STATIC) {
3818 x86_64_mov_reg_membase(argintregs[5], REG_SP, 1 * 8);
3819 x86_64_mov_reg_membase(argintregs[4], REG_SP, 0 * 8);
3822 x86_64_mov_reg_membase(argintregs[5], REG_SP, 0 * 8);
3826 /* keep stack 16-byte aligned -- this is essential for x86_64 */
3827 x86_64_alu_imm_reg(X86_64_SUB, 8, REG_SP);
3831 if (m->flags & ACC_STATIC) {
3832 x86_64_mov_reg_reg(argintregs[3], argintregs[5]);
3833 x86_64_mov_reg_reg(argintregs[2], argintregs[4]);
3834 x86_64_mov_reg_reg(argintregs[1], argintregs[3]);
3835 x86_64_mov_reg_reg(argintregs[0], argintregs[2]);
3837 /* put class into second argument register */
3838 x86_64_mov_imm_reg((s8) m->class, argintregs[1]);
3841 x86_64_mov_reg_reg(argintregs[4], argintregs[5]);
3842 x86_64_mov_reg_reg(argintregs[3], argintregs[4]);
3843 x86_64_mov_reg_reg(argintregs[2], argintregs[3]);
3844 x86_64_mov_reg_reg(argintregs[1], argintregs[2]);
3845 x86_64_mov_reg_reg(argintregs[0], argintregs[1]);
3848 /* put env into first argument register */
3849 x86_64_mov_imm_reg((s8) &env, argintregs[0]);
3851 x86_64_mov_imm_reg((s8) f, REG_ITMP1);
3852 x86_64_call_reg(REG_ITMP1);
3854 /* remove stackframe if there is one */
3855 if (stackframesize) {
3856 x86_64_alu_imm_reg(X86_64_ADD, stackframesize * 8, REG_SP);
3860 x86_64_alu_imm_reg(X86_64_SUB, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
3862 x86_64_mov_reg_membase(REG_RESULT, REG_SP, 0 * 8);
3863 x86_64_movq_reg_membase(REG_FRESULT, REG_SP, 1 * 8);
3865 x86_64_mov_imm_reg((s8) m, argintregs[0]);
3866 x86_64_mov_reg_reg(REG_RESULT, argintregs[1]);
3867 M_FLTMOVE(REG_FRESULT, argfltregs[0]);
3868 M_FLTMOVE(REG_FRESULT, argfltregs[1]);
3870 x86_64_mov_imm_reg((s8) builtin_displaymethodstop, REG_ITMP1);
3871 x86_64_call_reg(REG_ITMP1);
3873 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_RESULT);
3874 x86_64_movq_membase_reg(REG_SP, 1 * 8, REG_FRESULT);
3876 x86_64_alu_imm_reg(X86_64_ADD, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
3880 /* restore callee saved registers */
3881 x86_64_movq_membase_reg(REG_SP, 0 * 8, XMM15);
3882 x86_64_movq_membase_reg(REG_SP, 1 * 8, XMM14);
3883 x86_64_movq_membase_reg(REG_SP, 2 * 8, XMM13);
3884 x86_64_movq_membase_reg(REG_SP, 3 * 8, XMM12);
3885 x86_64_movq_membase_reg(REG_SP, 4 * 8, XMM11);
3886 x86_64_movq_membase_reg(REG_SP, 5 * 8, XMM10);
3888 x86_64_alu_imm_reg(X86_64_ADD, 7 * 8, REG_SP); /* keep stack 16-byte aligned */
3891 x86_64_mov_imm_reg((s8) &_exceptionptr, REG_ITMP3);
3892 x86_64_mov_membase_reg(REG_ITMP3, 0, REG_ITMP3);
3893 x86_64_test_reg_reg(REG_ITMP3, REG_ITMP3);
3894 x86_64_jcc(X86_64_CC_NE, 1);
3898 x86_64_mov_reg_reg(REG_ITMP3, REG_ITMP1_XPTR);
3899 x86_64_mov_imm_reg((s8) &_exceptionptr, REG_ITMP3);
3900 x86_64_alu_reg_reg(X86_64_XOR, REG_ITMP2, REG_ITMP2);
3901 x86_64_mov_reg_membase(REG_ITMP2, REG_ITMP3, 0); /* clear exception pointer */
3903 x86_64_mov_membase_reg(REG_SP, 0, REG_ITMP2_XPC); /* get return address from stack */
3904 x86_64_alu_imm_reg(X86_64_SUB, 3, REG_ITMP2_XPC); /* callq */
3906 x86_64_mov_imm_reg((s8) asm_handle_nat_exception, REG_ITMP3);
3907 x86_64_jmp_reg(REG_ITMP3);
3911 static int stubprinted;
3913 printf("stubsize: %d\n", ((long)mcodeptr - (long) s));
3919 count_nstub_len += NATIVESTUBSIZE;
3926 /* function: removenativestub **************************************************
3928 removes a previously created native-stub from memory
3930 *******************************************************************************/
3932 void removenativestub(u1 *stub)
3934 CFREE(stub, NATIVESTUBSIZE);
3939 * These are local overrides for various environment variables in Emacs.
3940 * Please do not remove this and leave it at the end of the file, where
3941 * Emacs will automagically detect them.
3942 * ---------------------------------------------------------------------
3945 * indent-tabs-mode: t