1 /* vm/jit/x86_64/codegen.c - machine code generator for x86_64
3 Copyright (C) 1996-2005 R. Grafl, A. Krall, C. Kruegel, C. Oates,
4 R. Obermaisser, M. Platter, M. Probst, S. Ring, E. Steiner,
5 C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich, J. Wenninger,
6 Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
25 Contact: cacao@complang.tuwien.ac.at
27 Authors: Andreas Krall
30 $Id: codegen.c 2352 2005-04-22 13:55:26Z twisti $
40 #include "cacao/cacao.h"
41 #include "native/native.h"
42 #include "vm/global.h"
43 #include "vm/builtin.h"
44 #include "vm/loader.h"
45 #include "vm/tables.h"
46 #include "vm/jit/asmpart.h"
47 #include "vm/jit/jit.h"
48 #include "vm/jit/reg.h"
49 #include "vm/jit/parse.h"
50 #include "vm/jit/patcher.h"
51 #include "vm/jit/x86_64/arch.h"
52 #include "vm/jit/x86_64/codegen.h"
53 #include "vm/jit/x86_64/emitfuncs.h"
54 #include "vm/jit/x86_64/types.h"
55 #include "vm/jit/x86_64/asmoffsets.h"
58 /* register descripton - array ************************************************/
60 /* #define REG_RES 0 reserved register for OS or code generator */
61 /* #define REG_RET 1 return value register */
62 /* #define REG_EXC 2 exception value register (only old jit) */
63 /* #define REG_SAV 3 (callee) saved register */
64 /* #define REG_TMP 4 scratch temporary register (caller saved) */
65 /* #define REG_ARG 5 argument register (caller saved) */
67 /* #define REG_END -1 last entry in tables */
69 static int nregdescint[] = {
70 REG_RET, REG_ARG, REG_ARG, REG_TMP, REG_RES, REG_SAV, REG_ARG, REG_ARG,
71 REG_ARG, REG_ARG, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV,
76 static int nregdescfloat[] = {
77 REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG,
78 REG_RES, REG_RES, REG_RES, REG_TMP, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
83 /* Include independent code generation stuff -- include after register */
84 /* descriptions to avoid extern definitions. */
86 #include "vm/jit/codegen.inc"
87 #include "vm/jit/reg.inc"
89 #include "vm/jit/lsra.inc"
93 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
94 void thread_restartcriticalsection(ucontext_t *uc)
98 critical = thread_checkcritical((void *) uc->uc_mcontext.gregs[REG_RIP]);
101 uc->uc_mcontext.gregs[REG_RIP] = (u8) critical;
106 /* NullPointerException signal handler for hardware null pointer check */
108 void catch_NullPointerException(int sig, siginfo_t *siginfo, void *_p)
112 struct ucontext *_uc = (struct ucontext *) _p;
113 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
114 struct sigaction act;
115 java_objectheader *xptr;
117 /* Reset signal handler - necessary for SysV, does no harm for BSD */
119 act.sa_sigaction = catch_NullPointerException; /* reinstall handler */
120 act.sa_flags = SA_SIGINFO;
121 sigaction(sig, &act, NULL);
124 sigaddset(&nsig, sig);
125 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
127 xptr = new_nullpointerexception();
129 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
130 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
131 sigctx->rip = (u8) asm_handle_exception;
137 /* ArithmeticException signal handler for hardware divide by zero check */
139 void catch_ArithmeticException(int sig, siginfo_t *siginfo, void *_p)
143 struct ucontext *_uc = (struct ucontext *) _p;
144 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
145 struct sigaction act;
146 java_objectheader *xptr;
148 /* Reset signal handler - necessary for SysV, does no harm for BSD */
150 act.sa_sigaction = catch_ArithmeticException; /* reinstall handler */
151 act.sa_flags = SA_SIGINFO;
152 sigaction(sig, &act, NULL);
155 sigaddset(&nsig, sig);
156 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
158 xptr = new_arithmeticexception();
160 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
161 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
162 sigctx->rip = (u8) asm_handle_exception;
168 void init_exceptions(void)
170 struct sigaction act;
172 /* install signal handlers we need to convert to exceptions */
173 sigemptyset(&act.sa_mask);
177 act.sa_sigaction = catch_NullPointerException;
178 act.sa_flags = SA_SIGINFO;
179 sigaction(SIGSEGV, &act, NULL);
183 act.sa_sigaction = catch_NullPointerException;
184 act.sa_flags = SA_SIGINFO;
185 sigaction(SIGBUS, &act, NULL);
189 act.sa_sigaction = catch_ArithmeticException;
190 act.sa_flags = SA_SIGINFO;
191 sigaction(SIGFPE, &act, NULL);
195 /* function gen_mcode **********************************************************
197 generates machine code
199 *******************************************************************************/
201 void codegen(methodinfo *m, codegendata *cd, registerdata *rd)
203 s4 len, s1, s2, s3, d;
218 /* space to save used callee saved registers */
220 savedregs_num += (rd->savintregcnt - rd->maxsavintreguse);
221 savedregs_num += (rd->savfltregcnt - rd->maxsavfltreguse);
223 parentargs_base = rd->maxmemuse + savedregs_num;
225 #if defined(USE_THREADS) /* space to save argument of monitor_enter */
227 if (checksync && (m->flags & ACC_SYNCHRONIZED))
232 /* Keep stack of non-leaf functions 16-byte aligned for calls into native */
233 /* code e.g. libc or jni (alignment problems with movaps). */
235 if (!m->isleafmethod || runverbose)
236 parentargs_base |= 0x1;
238 /* create method header */
240 (void) dseg_addaddress(cd, m); /* MethodPointer */
241 (void) dseg_adds4(cd, parentargs_base * 8); /* FrameSize */
243 #if defined(USE_THREADS)
245 /* IsSync contains the offset relative to the stack pointer for the
246 argument of monitor_exit used in the exception handler. Since the
247 offset could be zero and give a wrong meaning of the flag it is
251 if (checksync && (m->flags & ACC_SYNCHRONIZED))
252 (void) dseg_adds4(cd, (rd->maxmemuse + 1) * 8); /* IsSync */
257 (void) dseg_adds4(cd, 0); /* IsSync */
259 (void) dseg_adds4(cd, m->isleafmethod); /* IsLeaf */
260 (void) dseg_adds4(cd, rd->savintregcnt - rd->maxsavintreguse);/* IntSave */
261 (void) dseg_adds4(cd, rd->savfltregcnt - rd->maxsavfltreguse);/* FltSave */
262 (void) dseg_adds4(cd, cd->exceptiontablelength); /* ExTableSize */
264 /* create exception table */
266 for (ex = cd->exceptiontable; ex != NULL; ex = ex->down) {
267 dseg_addtarget(cd, ex->start);
268 dseg_addtarget(cd, ex->end);
269 dseg_addtarget(cd, ex->handler);
270 (void) dseg_addaddress(cd, ex->catchtype.cls);
273 /* initialize mcode variables */
275 cd->mcodeptr = (u1 *) cd->mcodebase;
276 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
277 MCODECHECK(128 + m->paramcount);
279 /* create stack frame (if necessary) */
281 if (parentargs_base) {
282 x86_64_alu_imm_reg(cd, X86_64_SUB, parentargs_base * 8, REG_SP);
285 /* save used callee saved registers */
288 for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
289 p--; x86_64_mov_reg_membase(cd, rd->savintregs[i], REG_SP, p * 8);
291 for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
292 p--; x86_64_movq_reg_membase(cd, rd->savfltregs[i], REG_SP, p * 8);
295 /* take arguments out of register or stack frame */
297 for (p = 0, l = 0, s1 = 0, s2 = 0; p < m->paramcount; p++) {
298 t = m->paramtypes[p];
299 var = &(rd->locals[l][t]);
301 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
304 if (IS_INT_LNG_TYPE(t)) {
311 if (IS_INT_LNG_TYPE(t)) { /* integer args */
312 if (s1 < INT_ARG_CNT) { /* register arguments */
313 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
314 M_INTMOVE(rd->argintregs[s1], var->regoff);
316 } else { /* reg arg -> spilled */
317 x86_64_mov_reg_membase(cd, rd->argintregs[s1], REG_SP, var->regoff * 8);
320 } else { /* stack arguments */
321 pa = s1 - INT_ARG_CNT;
322 if (s2 >= FLT_ARG_CNT) {
323 pa += s2 - FLT_ARG_CNT;
325 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
326 x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff); /* + 8 for return address */
327 } else { /* stack arg -> spilled */
328 x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_ITMP1); /* + 8 for return address */
329 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, var->regoff * 8);
334 } else { /* floating args */
335 if (s2 < FLT_ARG_CNT) { /* register arguments */
336 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
337 M_FLTMOVE(rd->argfltregs[s2], var->regoff);
339 } else { /* reg arg -> spilled */
340 x86_64_movq_reg_membase(cd, rd->argfltregs[s2], REG_SP, var->regoff * 8);
343 } else { /* stack arguments */
344 pa = s2 - FLT_ARG_CNT;
345 if (s1 >= INT_ARG_CNT) {
346 pa += s1 - INT_ARG_CNT;
348 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
349 x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff);
352 x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_FTMP1);
353 x86_64_movq_reg_membase(cd, REG_FTMP1, REG_SP, var->regoff * 8);
360 /* save monitorenter argument */
362 #if defined(USE_THREADS)
363 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
366 if (m->flags & ACC_STATIC) {
367 func_enter = (u8) builtin_staticmonitorenter;
368 x86_64_mov_imm_reg(cd, (s8) m->class, REG_ITMP1);
369 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, rd->maxmemuse * 8);
372 func_enter = (u8) builtin_monitorenter;
373 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, rd->maxmemuse * 8);
376 /* call monitorenter function */
378 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
379 x86_64_mov_imm_reg(cd, func_enter, REG_ITMP1);
380 x86_64_call_reg(cd, REG_ITMP1);
384 /* Copy argument registers to stack and call trace function with pointer */
385 /* to arguments on stack. */
388 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1 + 1) * 8, REG_SP);
390 /* save integer argument registers */
392 for (p = 0; p < INT_ARG_CNT; p++) {
393 x86_64_mov_reg_membase(cd, rd->argintregs[p], REG_SP, (1 + p) * 8);
396 /* save float argument registers */
398 for (p = 0; p < FLT_ARG_CNT; p++) {
399 x86_64_movq_reg_membase(cd, rd->argfltregs[p], REG_SP, (1 + INT_ARG_CNT + p) * 8);
402 /* show integer hex code for float arguments */
404 for (p = 0, l = 0; p < m->paramcount && p < INT_ARG_CNT; p++) {
405 t = m->paramtypes[p];
407 /* if the paramtype is a float, we have to right shift all */
408 /* following integer registers */
410 if (IS_FLT_DBL_TYPE(t)) {
411 for (s1 = INT_ARG_CNT - 2; s1 >= p; s1--) {
412 x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
415 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[p]);
420 x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP2);
421 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_SP, 0 * 8);
422 x86_64_mov_imm_reg(cd, (u8) builtin_trace_args, REG_ITMP1);
423 x86_64_call_reg(cd, REG_ITMP1);
425 /* restore integer argument registers */
427 for (p = 0; p < INT_ARG_CNT; p++) {
428 x86_64_mov_membase_reg(cd, REG_SP, (1 + p) * 8, rd->argintregs[p]);
431 /* restore float argument registers */
433 for (p = 0; p < FLT_ARG_CNT; p++) {
434 x86_64_movq_membase_reg(cd, REG_SP, (1 + INT_ARG_CNT + p) * 8, rd->argfltregs[p]);
437 x86_64_alu_imm_reg(cd, X86_64_ADD, (6 + 8 + 1 + 1) * 8, REG_SP);
442 /* end of header generation */
444 /* walk through all basic blocks */
445 for (bptr = m->basicblocks; bptr != NULL; bptr = bptr->next) {
447 bptr->mpc = (u4) ((u1 *) cd->mcodeptr - cd->mcodebase);
449 if (bptr->flags >= BBREACHED) {
451 /* branch resolving */
454 for (bref = bptr->branchrefs; bref != NULL; bref = bref->next) {
455 gen_resolvebranch((u1 *) cd->mcodebase + bref->branchpos,
460 /* copy interface registers to their destination */
464 MCODECHECK(64 + len);
468 while (src != NULL) {
470 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
471 if (bptr->type == BBTYPE_SBR) {
472 /* d = reg_of_var(rd, src, REG_ITMP1); */
473 if (!(src->flags & INMEMORY))
477 x86_64_pop_reg(cd, d);
478 store_reg_to_var_int(src, d);
480 } else if (bptr->type == BBTYPE_EXH) {
481 /* d = reg_of_var(rd, src, REG_ITMP1); */
482 if (!(src->flags & INMEMORY))
486 M_INTMOVE(REG_ITMP1, d);
487 store_reg_to_var_int(src, d);
496 while (src != NULL) {
498 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
499 if (bptr->type == BBTYPE_SBR) {
500 d = reg_of_var(rd, src, REG_ITMP1);
501 x86_64_pop_reg(cd, d);
502 store_reg_to_var_int(src, d);
504 } else if (bptr->type == BBTYPE_EXH) {
505 d = reg_of_var(rd, src, REG_ITMP1);
506 M_INTMOVE(REG_ITMP1, d);
507 store_reg_to_var_int(src, d);
511 d = reg_of_var(rd, src, REG_ITMP1);
512 if ((src->varkind != STACKVAR)) {
514 if (IS_FLT_DBL_TYPE(s2)) {
515 s1 = rd->interfaces[len][s2].regoff;
516 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
520 x86_64_movq_membase_reg(cd, REG_SP, s1 * 8, d);
522 store_reg_to_var_flt(src, d);
525 s1 = rd->interfaces[len][s2].regoff;
526 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
530 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, d);
532 store_reg_to_var_int(src, d);
541 /* walk through all instructions */
545 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
547 MCODECHECK(128); /* XXX are 128 bytes enough? */
550 case ICMD_INLINE_START: /* internal ICMDs */
551 case ICMD_INLINE_END:
554 case ICMD_NOP: /* ... ==> ... */
557 case ICMD_NULLCHECKPOP: /* ..., objectref ==> ... */
558 if (src->flags & INMEMORY) {
559 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
562 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
564 x86_64_jcc(cd, X86_64_CC_E, 0);
565 codegen_addxnullrefs(cd, cd->mcodeptr);
568 /* constant operations ************************************************/
570 case ICMD_ICONST: /* ... ==> ..., constant */
571 /* op1 = 0, val.i = constant */
573 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
574 if (iptr->val.i == 0) {
575 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
577 x86_64_movl_imm_reg(cd, iptr->val.i, d);
579 store_reg_to_var_int(iptr->dst, d);
582 case ICMD_ACONST: /* ... ==> ..., constant */
583 /* op1 = 0, val.a = constant */
585 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
586 if (iptr->val.a == 0) {
587 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
589 x86_64_mov_imm_reg(cd, (s8) iptr->val.a, d);
591 store_reg_to_var_int(iptr->dst, d);
594 case ICMD_LCONST: /* ... ==> ..., constant */
595 /* op1 = 0, val.l = constant */
597 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
598 if (iptr->val.l == 0) {
599 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
601 x86_64_mov_imm_reg(cd, iptr->val.l, d);
603 store_reg_to_var_int(iptr->dst, d);
606 case ICMD_FCONST: /* ... ==> ..., constant */
607 /* op1 = 0, val.f = constant */
609 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
610 a = dseg_addfloat(cd, iptr->val.f);
611 x86_64_movdl_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + ((d > 7) ? 9 : 8)) - (s8) cd->mcodebase) + a, d);
612 store_reg_to_var_flt(iptr->dst, d);
615 case ICMD_DCONST: /* ... ==> ..., constant */
616 /* op1 = 0, val.d = constant */
618 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
619 a = dseg_adddouble(cd, iptr->val.d);
620 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, d);
621 store_reg_to_var_flt(iptr->dst, d);
625 /* load/store operations **********************************************/
627 case ICMD_ILOAD: /* ... ==> ..., content of local variable */
628 /* op1 = local variable */
630 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
631 if ((iptr->dst->varkind == LOCALVAR) &&
632 (iptr->dst->varnum == iptr->op1)) {
635 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
636 if (var->flags & INMEMORY) {
637 x86_64_movl_membase_reg(cd, REG_SP, var->regoff * 8, d);
638 store_reg_to_var_int(iptr->dst, d);
641 if (iptr->dst->flags & INMEMORY) {
642 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
645 M_INTMOVE(var->regoff, d);
650 case ICMD_LLOAD: /* ... ==> ..., content of local variable */
651 case ICMD_ALOAD: /* op1 = local variable */
653 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
654 if ((iptr->dst->varkind == LOCALVAR) &&
655 (iptr->dst->varnum == iptr->op1)) {
658 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
659 if (var->flags & INMEMORY) {
660 x86_64_mov_membase_reg(cd, REG_SP, var->regoff * 8, d);
661 store_reg_to_var_int(iptr->dst, d);
664 if (iptr->dst->flags & INMEMORY) {
665 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
668 M_INTMOVE(var->regoff, d);
673 case ICMD_FLOAD: /* ... ==> ..., content of local variable */
674 case ICMD_DLOAD: /* op1 = local variable */
676 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
677 if ((iptr->dst->varkind == LOCALVAR) &&
678 (iptr->dst->varnum == iptr->op1)) {
681 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
682 if (var->flags & INMEMORY) {
683 x86_64_movq_membase_reg(cd, REG_SP, var->regoff * 8, d);
684 store_reg_to_var_flt(iptr->dst, d);
687 if (iptr->dst->flags & INMEMORY) {
688 x86_64_movq_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
691 M_FLTMOVE(var->regoff, d);
696 case ICMD_ISTORE: /* ..., value ==> ... */
697 case ICMD_LSTORE: /* op1 = local variable */
700 if ((src->varkind == LOCALVAR) &&
701 (src->varnum == iptr->op1)) {
704 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
705 if (var->flags & INMEMORY) {
706 var_to_reg_int(s1, src, REG_ITMP1);
707 x86_64_mov_reg_membase(cd, s1, REG_SP, var->regoff * 8);
710 var_to_reg_int(s1, src, var->regoff);
711 M_INTMOVE(s1, var->regoff);
715 case ICMD_FSTORE: /* ..., value ==> ... */
716 case ICMD_DSTORE: /* op1 = local variable */
718 if ((src->varkind == LOCALVAR) &&
719 (src->varnum == iptr->op1)) {
722 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
723 if (var->flags & INMEMORY) {
724 var_to_reg_flt(s1, src, REG_FTMP1);
725 x86_64_movq_reg_membase(cd, s1, REG_SP, var->regoff * 8);
728 var_to_reg_flt(s1, src, var->regoff);
729 M_FLTMOVE(s1, var->regoff);
734 /* pop/dup/swap operations ********************************************/
736 /* attention: double and longs are only one entry in CACAO ICMDs */
738 case ICMD_POP: /* ..., value ==> ... */
739 case ICMD_POP2: /* ..., value, value ==> ... */
742 case ICMD_DUP: /* ..., a ==> ..., a, a */
743 M_COPY(src, iptr->dst);
746 case ICMD_DUP_X1: /* ..., a, b ==> ..., b, a, b */
748 M_COPY(src, iptr->dst);
749 M_COPY(src->prev, iptr->dst->prev);
750 M_COPY(iptr->dst, iptr->dst->prev->prev);
753 case ICMD_DUP_X2: /* ..., a, b, c ==> ..., c, a, b, c */
755 M_COPY(src, iptr->dst);
756 M_COPY(src->prev, iptr->dst->prev);
757 M_COPY(src->prev->prev, iptr->dst->prev->prev);
758 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
761 case ICMD_DUP2: /* ..., a, b ==> ..., a, b, a, b */
763 M_COPY(src, iptr->dst);
764 M_COPY(src->prev, iptr->dst->prev);
767 case ICMD_DUP2_X1: /* ..., a, b, c ==> ..., b, c, a, b, c */
769 M_COPY(src, iptr->dst);
770 M_COPY(src->prev, iptr->dst->prev);
771 M_COPY(src->prev->prev, iptr->dst->prev->prev);
772 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
773 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev);
776 case ICMD_DUP2_X2: /* ..., a, b, c, d ==> ..., c, d, a, b, c, d */
778 M_COPY(src, iptr->dst);
779 M_COPY(src->prev, iptr->dst->prev);
780 M_COPY(src->prev->prev, iptr->dst->prev->prev);
781 M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
782 M_COPY(iptr->dst, iptr->dst->prev->prev->prev->prev);
783 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev->prev);
786 case ICMD_SWAP: /* ..., a, b ==> ..., b, a */
788 M_COPY(src, iptr->dst->prev);
789 M_COPY(src->prev, iptr->dst);
793 /* integer operations *************************************************/
795 case ICMD_INEG: /* ..., value ==> ..., - value */
797 d = reg_of_var(rd, iptr->dst, REG_NULL);
798 if (iptr->dst->flags & INMEMORY) {
799 if (src->flags & INMEMORY) {
800 if (src->regoff == iptr->dst->regoff) {
801 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
804 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
805 x86_64_negl_reg(cd, REG_ITMP1);
806 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
810 x86_64_movl_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
811 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
815 if (src->flags & INMEMORY) {
816 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
817 x86_64_negl_reg(cd, d);
820 M_INTMOVE(src->regoff, iptr->dst->regoff);
821 x86_64_negl_reg(cd, iptr->dst->regoff);
826 case ICMD_LNEG: /* ..., value ==> ..., - value */
828 d = reg_of_var(rd, iptr->dst, REG_NULL);
829 if (iptr->dst->flags & INMEMORY) {
830 if (src->flags & INMEMORY) {
831 if (src->regoff == iptr->dst->regoff) {
832 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
835 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
836 x86_64_neg_reg(cd, REG_ITMP1);
837 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
841 x86_64_mov_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
842 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
846 if (src->flags & INMEMORY) {
847 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
848 x86_64_neg_reg(cd, iptr->dst->regoff);
851 M_INTMOVE(src->regoff, iptr->dst->regoff);
852 x86_64_neg_reg(cd, iptr->dst->regoff);
857 case ICMD_I2L: /* ..., value ==> ..., value */
859 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
860 if (src->flags & INMEMORY) {
861 x86_64_movslq_membase_reg(cd, REG_SP, src->regoff * 8, d);
864 x86_64_movslq_reg_reg(cd, src->regoff, d);
866 store_reg_to_var_int(iptr->dst, d);
869 case ICMD_L2I: /* ..., value ==> ..., value */
871 var_to_reg_int(s1, src, REG_ITMP1);
872 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
874 store_reg_to_var_int(iptr->dst, d);
877 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
879 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
880 if (src->flags & INMEMORY) {
881 x86_64_movsbq_membase_reg(cd, REG_SP, src->regoff * 8, d);
884 x86_64_movsbq_reg_reg(cd, src->regoff, d);
886 store_reg_to_var_int(iptr->dst, d);
889 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
891 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
892 if (src->flags & INMEMORY) {
893 x86_64_movzwq_membase_reg(cd, REG_SP, src->regoff * 8, d);
896 x86_64_movzwq_reg_reg(cd, src->regoff, d);
898 store_reg_to_var_int(iptr->dst, d);
901 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
903 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
904 if (src->flags & INMEMORY) {
905 x86_64_movswq_membase_reg(cd, REG_SP, src->regoff * 8, d);
908 x86_64_movswq_reg_reg(cd, src->regoff, d);
910 store_reg_to_var_int(iptr->dst, d);
914 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
916 d = reg_of_var(rd, iptr->dst, REG_NULL);
917 x86_64_emit_ialu(cd, X86_64_ADD, src, iptr);
920 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
921 /* val.i = constant */
923 d = reg_of_var(rd, iptr->dst, REG_NULL);
924 x86_64_emit_ialuconst(cd, X86_64_ADD, src, iptr);
927 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
929 d = reg_of_var(rd, iptr->dst, REG_NULL);
930 x86_64_emit_lalu(cd, X86_64_ADD, src, iptr);
933 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
934 /* val.l = constant */
936 d = reg_of_var(rd, iptr->dst, REG_NULL);
937 x86_64_emit_laluconst(cd, X86_64_ADD, src, iptr);
940 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
942 d = reg_of_var(rd, iptr->dst, REG_NULL);
943 if (iptr->dst->flags & INMEMORY) {
944 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
945 if (src->prev->regoff == iptr->dst->regoff) {
946 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
947 x86_64_alul_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
950 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
951 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
952 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
955 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
956 M_INTMOVE(src->prev->regoff, REG_ITMP1);
957 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
958 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
960 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
961 if (src->prev->regoff == iptr->dst->regoff) {
962 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
965 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
966 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
967 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
971 x86_64_movl_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
972 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
976 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
977 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
978 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
980 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
981 M_INTMOVE(src->prev->regoff, d);
982 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
984 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
985 /* workaround for reg alloc */
986 if (src->regoff == iptr->dst->regoff) {
987 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
988 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
989 M_INTMOVE(REG_ITMP1, d);
992 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
993 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
997 /* workaround for reg alloc */
998 if (src->regoff == iptr->dst->regoff) {
999 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1000 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1001 M_INTMOVE(REG_ITMP1, d);
1004 M_INTMOVE(src->prev->regoff, d);
1005 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
1011 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
1012 /* val.i = constant */
1014 d = reg_of_var(rd, iptr->dst, REG_NULL);
1015 x86_64_emit_ialuconst(cd, X86_64_SUB, src, iptr);
1018 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1020 d = reg_of_var(rd, iptr->dst, REG_NULL);
1021 if (iptr->dst->flags & INMEMORY) {
1022 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1023 if (src->prev->regoff == iptr->dst->regoff) {
1024 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1025 x86_64_alu_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1028 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1029 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1030 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1033 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1034 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1035 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1036 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1038 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1039 if (src->prev->regoff == iptr->dst->regoff) {
1040 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1043 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1044 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1045 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1049 x86_64_mov_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1050 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1054 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1055 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1056 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1058 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1059 M_INTMOVE(src->prev->regoff, d);
1060 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1062 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1063 /* workaround for reg alloc */
1064 if (src->regoff == iptr->dst->regoff) {
1065 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1066 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1067 M_INTMOVE(REG_ITMP1, d);
1070 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1071 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1075 /* workaround for reg alloc */
1076 if (src->regoff == iptr->dst->regoff) {
1077 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1078 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1079 M_INTMOVE(REG_ITMP1, d);
1082 M_INTMOVE(src->prev->regoff, d);
1083 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1089 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
1090 /* val.l = constant */
1092 d = reg_of_var(rd, iptr->dst, REG_NULL);
1093 x86_64_emit_laluconst(cd, X86_64_SUB, src, iptr);
1096 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1098 d = reg_of_var(rd, iptr->dst, REG_NULL);
1099 if (iptr->dst->flags & INMEMORY) {
1100 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1101 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1102 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1103 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1105 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1106 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1107 x86_64_imull_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1108 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1110 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1111 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1112 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1113 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1116 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1117 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1118 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1122 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1123 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1124 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1126 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1127 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1128 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1130 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1131 M_INTMOVE(src->regoff, iptr->dst->regoff);
1132 x86_64_imull_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1135 if (src->regoff == iptr->dst->regoff) {
1136 x86_64_imull_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1139 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1140 x86_64_imull_reg_reg(cd, src->regoff, iptr->dst->regoff);
1146 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
1147 /* val.i = constant */
1149 d = reg_of_var(rd, iptr->dst, REG_NULL);
1150 if (iptr->dst->flags & INMEMORY) {
1151 if (src->flags & INMEMORY) {
1152 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1153 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1156 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, REG_ITMP1);
1157 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1161 if (src->flags & INMEMORY) {
1162 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1165 if (iptr->val.i == 2) {
1166 M_INTMOVE(src->regoff, iptr->dst->regoff);
1167 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1170 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, iptr->dst->regoff); /* 3 cycles */
1176 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1178 d = reg_of_var(rd, iptr->dst, REG_NULL);
1179 if (iptr->dst->flags & INMEMORY) {
1180 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1181 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1182 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1183 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1185 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1186 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1187 x86_64_imul_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1188 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1190 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1191 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1192 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1193 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1196 x86_64_mov_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1197 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1198 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1202 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1203 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1204 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1206 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1207 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1208 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1210 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1211 M_INTMOVE(src->regoff, iptr->dst->regoff);
1212 x86_64_imul_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1215 if (src->regoff == iptr->dst->regoff) {
1216 x86_64_imul_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1219 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1220 x86_64_imul_reg_reg(cd, src->regoff, iptr->dst->regoff);
1226 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
1227 /* val.l = constant */
1229 d = reg_of_var(rd, iptr->dst, REG_NULL);
1230 if (iptr->dst->flags & INMEMORY) {
1231 if (src->flags & INMEMORY) {
1232 if (IS_IMM32(iptr->val.l)) {
1233 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1236 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1237 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1239 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1242 if (IS_IMM32(iptr->val.l)) {
1243 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, REG_ITMP1);
1246 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1247 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1249 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1253 if (src->flags & INMEMORY) {
1254 if (IS_IMM32(iptr->val.l)) {
1255 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1258 x86_64_mov_imm_reg(cd, iptr->val.l, iptr->dst->regoff);
1259 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1263 /* should match in many cases */
1264 if (iptr->val.l == 2) {
1265 M_INTMOVE(src->regoff, iptr->dst->regoff);
1266 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1269 if (IS_IMM32(iptr->val.l)) {
1270 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, iptr->dst->regoff); /* 4 cycles */
1273 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1274 M_INTMOVE(src->regoff, iptr->dst->regoff);
1275 x86_64_imul_reg_reg(cd, REG_ITMP1, iptr->dst->regoff);
1282 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1284 d = reg_of_var(rd, iptr->dst, REG_NULL);
1285 if (src->prev->flags & INMEMORY) {
1286 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1289 M_INTMOVE(src->prev->regoff, RAX);
1292 if (src->flags & INMEMORY) {
1293 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1296 M_INTMOVE(src->regoff, REG_ITMP3);
1300 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1301 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1302 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1303 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1305 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1307 x86_64_idivl_reg(cd, REG_ITMP3);
1309 if (iptr->dst->flags & INMEMORY) {
1310 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1311 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1314 M_INTMOVE(RAX, iptr->dst->regoff);
1316 if (iptr->dst->regoff != RDX) {
1317 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1322 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1324 d = reg_of_var(rd, iptr->dst, REG_NULL);
1325 if (src->prev->flags & INMEMORY) {
1326 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1329 M_INTMOVE(src->prev->regoff, RAX);
1332 if (src->flags & INMEMORY) {
1333 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1336 M_INTMOVE(src->regoff, REG_ITMP3);
1340 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1342 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1343 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1346 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1347 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1348 x86_64_jcc(cd, X86_64_CC_E, 1 + 3); /* 6 bytes */
1351 x86_64_idivl_reg(cd, REG_ITMP3);
1353 if (iptr->dst->flags & INMEMORY) {
1354 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1355 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1358 M_INTMOVE(RDX, iptr->dst->regoff);
1360 if (iptr->dst->regoff != RDX) {
1361 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1366 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
1367 /* val.i = constant */
1369 var_to_reg_int(s1, src, REG_ITMP1);
1370 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1371 M_INTMOVE(s1, REG_ITMP1);
1372 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1373 x86_64_leal_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1374 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1375 x86_64_shiftl_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1376 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1377 store_reg_to_var_int(iptr->dst, d);
1380 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
1381 /* val.i = constant */
1383 var_to_reg_int(s1, src, REG_ITMP1);
1384 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1385 M_INTMOVE(s1, REG_ITMP1);
1386 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1387 x86_64_leal_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1388 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1389 x86_64_alul_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1390 x86_64_alul_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1391 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1392 store_reg_to_var_int(iptr->dst, d);
1396 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1398 d = reg_of_var(rd, iptr->dst, REG_NULL);
1399 if (src->prev->flags & INMEMORY) {
1400 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1403 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1406 if (src->flags & INMEMORY) {
1407 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1410 M_INTMOVE(src->regoff, REG_ITMP3);
1414 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1415 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1416 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1417 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1418 x86_64_jcc(cd, X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1420 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1422 x86_64_idiv_reg(cd, REG_ITMP3);
1424 if (iptr->dst->flags & INMEMORY) {
1425 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1426 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1429 M_INTMOVE(RAX, iptr->dst->regoff);
1431 if (iptr->dst->regoff != RDX) {
1432 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1437 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1439 d = reg_of_var(rd, iptr->dst, REG_NULL);
1440 if (src->prev->flags & INMEMORY) {
1441 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1444 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1447 if (src->flags & INMEMORY) {
1448 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1451 M_INTMOVE(src->regoff, REG_ITMP3);
1455 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1457 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1458 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1459 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1462 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1463 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1464 x86_64_jcc(cd, X86_64_CC_E, 2 + 3); /* 6 bytes */
1467 x86_64_idiv_reg(cd, REG_ITMP3);
1469 if (iptr->dst->flags & INMEMORY) {
1470 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1471 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1474 M_INTMOVE(RDX, iptr->dst->regoff);
1476 if (iptr->dst->regoff != RDX) {
1477 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1482 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1483 /* val.i = constant */
1485 var_to_reg_int(s1, src, REG_ITMP1);
1486 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1487 M_INTMOVE(s1, REG_ITMP1);
1488 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1489 x86_64_lea_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1490 x86_64_cmovcc_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1491 x86_64_shift_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1492 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1493 store_reg_to_var_int(iptr->dst, d);
1496 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1497 /* val.l = constant */
1499 var_to_reg_int(s1, src, REG_ITMP1);
1500 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1501 M_INTMOVE(s1, REG_ITMP1);
1502 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1503 x86_64_lea_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1504 x86_64_cmovcc_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1505 x86_64_alu_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1506 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1507 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1508 store_reg_to_var_int(iptr->dst, d);
1511 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1513 d = reg_of_var(rd, iptr->dst, REG_NULL);
1514 x86_64_emit_ishift(cd, X86_64_SHL, src, iptr);
1517 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1518 /* val.i = constant */
1520 d = reg_of_var(rd, iptr->dst, REG_NULL);
1521 x86_64_emit_ishiftconst(cd, X86_64_SHL, src, iptr);
1524 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1526 d = reg_of_var(rd, iptr->dst, REG_NULL);
1527 x86_64_emit_ishift(cd, X86_64_SAR, src, iptr);
1530 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1531 /* val.i = constant */
1533 d = reg_of_var(rd, iptr->dst, REG_NULL);
1534 x86_64_emit_ishiftconst(cd, X86_64_SAR, src, iptr);
1537 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1539 d = reg_of_var(rd, iptr->dst, REG_NULL);
1540 x86_64_emit_ishift(cd, X86_64_SHR, src, iptr);
1543 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1544 /* val.i = constant */
1546 d = reg_of_var(rd, iptr->dst, REG_NULL);
1547 x86_64_emit_ishiftconst(cd, X86_64_SHR, src, iptr);
1550 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1552 d = reg_of_var(rd, iptr->dst, REG_NULL);
1553 x86_64_emit_lshift(cd, X86_64_SHL, src, iptr);
1556 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1557 /* val.i = constant */
1559 d = reg_of_var(rd, iptr->dst, REG_NULL);
1560 x86_64_emit_lshiftconst(cd, X86_64_SHL, src, iptr);
1563 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1565 d = reg_of_var(rd, iptr->dst, REG_NULL);
1566 x86_64_emit_lshift(cd, X86_64_SAR, src, iptr);
1569 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1570 /* val.i = constant */
1572 d = reg_of_var(rd, iptr->dst, REG_NULL);
1573 x86_64_emit_lshiftconst(cd, X86_64_SAR, src, iptr);
1576 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1578 d = reg_of_var(rd, iptr->dst, REG_NULL);
1579 x86_64_emit_lshift(cd, X86_64_SHR, src, iptr);
1582 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1583 /* val.l = constant */
1585 d = reg_of_var(rd, iptr->dst, REG_NULL);
1586 x86_64_emit_lshiftconst(cd, X86_64_SHR, src, iptr);
1589 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1591 d = reg_of_var(rd, iptr->dst, REG_NULL);
1592 x86_64_emit_ialu(cd, X86_64_AND, src, iptr);
1595 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1596 /* val.i = constant */
1598 d = reg_of_var(rd, iptr->dst, REG_NULL);
1599 x86_64_emit_ialuconst(cd, X86_64_AND, src, iptr);
1602 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1604 d = reg_of_var(rd, iptr->dst, REG_NULL);
1605 x86_64_emit_lalu(cd, X86_64_AND, src, iptr);
1608 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1609 /* val.l = constant */
1611 d = reg_of_var(rd, iptr->dst, REG_NULL);
1612 x86_64_emit_laluconst(cd, X86_64_AND, src, iptr);
1615 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1617 d = reg_of_var(rd, iptr->dst, REG_NULL);
1618 x86_64_emit_ialu(cd, X86_64_OR, src, iptr);
1621 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1622 /* val.i = constant */
1624 d = reg_of_var(rd, iptr->dst, REG_NULL);
1625 x86_64_emit_ialuconst(cd, X86_64_OR, src, iptr);
1628 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1630 d = reg_of_var(rd, iptr->dst, REG_NULL);
1631 x86_64_emit_lalu(cd, X86_64_OR, src, iptr);
1634 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1635 /* val.l = constant */
1637 d = reg_of_var(rd, iptr->dst, REG_NULL);
1638 x86_64_emit_laluconst(cd, X86_64_OR, src, iptr);
1641 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1643 d = reg_of_var(rd, iptr->dst, REG_NULL);
1644 x86_64_emit_ialu(cd, X86_64_XOR, src, iptr);
1647 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1648 /* val.i = constant */
1650 d = reg_of_var(rd, iptr->dst, REG_NULL);
1651 x86_64_emit_ialuconst(cd, X86_64_XOR, src, iptr);
1654 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1656 d = reg_of_var(rd, iptr->dst, REG_NULL);
1657 x86_64_emit_lalu(cd, X86_64_XOR, src, iptr);
1660 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1661 /* val.l = constant */
1663 d = reg_of_var(rd, iptr->dst, REG_NULL);
1664 x86_64_emit_laluconst(cd, X86_64_XOR, src, iptr);
1668 case ICMD_IINC: /* ..., value ==> ..., value + constant */
1669 /* op1 = variable, val.i = constant */
1671 /* using inc and dec is definitely faster than add -- tested */
1674 var = &(rd->locals[iptr->op1][TYPE_INT]);
1676 if (var->flags & INMEMORY) {
1677 if (iptr->val.i == 1) {
1678 x86_64_incl_membase(cd, REG_SP, d * 8);
1680 } else if (iptr->val.i == -1) {
1681 x86_64_decl_membase(cd, REG_SP, d * 8);
1684 x86_64_alul_imm_membase(cd, X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1688 if (iptr->val.i == 1) {
1689 x86_64_incl_reg(cd, d);
1691 } else if (iptr->val.i == -1) {
1692 x86_64_decl_reg(cd, d);
1695 x86_64_alul_imm_reg(cd, X86_64_ADD, iptr->val.i, d);
1701 /* floating operations ************************************************/
1703 case ICMD_FNEG: /* ..., value ==> ..., - value */
1705 var_to_reg_flt(s1, src, REG_FTMP1);
1706 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1707 a = dseg_adds4(cd, 0x80000000);
1709 x86_64_movss_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1710 x86_64_xorps_reg_reg(cd, REG_FTMP2, d);
1711 store_reg_to_var_flt(iptr->dst, d);
1714 case ICMD_DNEG: /* ..., value ==> ..., - value */
1716 var_to_reg_flt(s1, src, REG_FTMP1);
1717 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1718 a = dseg_adds8(cd, 0x8000000000000000);
1720 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1721 x86_64_xorpd_reg_reg(cd, REG_FTMP2, d);
1722 store_reg_to_var_flt(iptr->dst, d);
1725 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1727 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1728 var_to_reg_flt(s2, src, REG_FTMP2);
1729 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1731 x86_64_addss_reg_reg(cd, s2, d);
1732 } else if (s2 == d) {
1733 x86_64_addss_reg_reg(cd, s1, d);
1736 x86_64_addss_reg_reg(cd, s2, d);
1738 store_reg_to_var_flt(iptr->dst, d);
1741 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1743 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1744 var_to_reg_flt(s2, src, REG_FTMP2);
1745 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1747 x86_64_addsd_reg_reg(cd, s2, d);
1748 } else if (s2 == d) {
1749 x86_64_addsd_reg_reg(cd, s1, d);
1752 x86_64_addsd_reg_reg(cd, s2, d);
1754 store_reg_to_var_flt(iptr->dst, d);
1757 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1759 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1760 var_to_reg_flt(s2, src, REG_FTMP2);
1761 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1763 M_FLTMOVE(s2, REG_FTMP2);
1767 x86_64_subss_reg_reg(cd, s2, d);
1768 store_reg_to_var_flt(iptr->dst, d);
1771 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1773 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1774 var_to_reg_flt(s2, src, REG_FTMP2);
1775 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1777 M_FLTMOVE(s2, REG_FTMP2);
1781 x86_64_subsd_reg_reg(cd, s2, d);
1782 store_reg_to_var_flt(iptr->dst, d);
1785 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1787 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1788 var_to_reg_flt(s2, src, REG_FTMP2);
1789 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1791 x86_64_mulss_reg_reg(cd, s2, d);
1792 } else if (s2 == d) {
1793 x86_64_mulss_reg_reg(cd, s1, d);
1796 x86_64_mulss_reg_reg(cd, s2, d);
1798 store_reg_to_var_flt(iptr->dst, d);
1801 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1803 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1804 var_to_reg_flt(s2, src, REG_FTMP2);
1805 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1807 x86_64_mulsd_reg_reg(cd, s2, d);
1808 } else if (s2 == d) {
1809 x86_64_mulsd_reg_reg(cd, s1, d);
1812 x86_64_mulsd_reg_reg(cd, s2, d);
1814 store_reg_to_var_flt(iptr->dst, d);
1817 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1819 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1820 var_to_reg_flt(s2, src, REG_FTMP2);
1821 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1823 M_FLTMOVE(s2, REG_FTMP2);
1827 x86_64_divss_reg_reg(cd, s2, d);
1828 store_reg_to_var_flt(iptr->dst, d);
1831 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1833 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1834 var_to_reg_flt(s2, src, REG_FTMP2);
1835 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1837 M_FLTMOVE(s2, REG_FTMP2);
1841 x86_64_divsd_reg_reg(cd, s2, d);
1842 store_reg_to_var_flt(iptr->dst, d);
1845 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1847 var_to_reg_int(s1, src, REG_ITMP1);
1848 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1849 x86_64_cvtsi2ss_reg_reg(cd, s1, d);
1850 store_reg_to_var_flt(iptr->dst, d);
1853 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1855 var_to_reg_int(s1, src, REG_ITMP1);
1856 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1857 x86_64_cvtsi2sd_reg_reg(cd, s1, d);
1858 store_reg_to_var_flt(iptr->dst, d);
1861 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1863 var_to_reg_int(s1, src, REG_ITMP1);
1864 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1865 x86_64_cvtsi2ssq_reg_reg(cd, s1, d);
1866 store_reg_to_var_flt(iptr->dst, d);
1869 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1871 var_to_reg_int(s1, src, REG_ITMP1);
1872 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1873 x86_64_cvtsi2sdq_reg_reg(cd, s1, d);
1874 store_reg_to_var_flt(iptr->dst, d);
1877 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1879 var_to_reg_flt(s1, src, REG_FTMP1);
1880 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1881 x86_64_cvttss2si_reg_reg(cd, s1, d);
1882 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1883 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1884 x86_64_jcc(cd, X86_64_CC_NE, a);
1885 M_FLTMOVE(s1, REG_FTMP1);
1886 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2i, REG_ITMP2);
1887 x86_64_call_reg(cd, REG_ITMP2);
1888 M_INTMOVE(REG_RESULT, d);
1889 store_reg_to_var_int(iptr->dst, d);
1892 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1894 var_to_reg_flt(s1, src, REG_FTMP1);
1895 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1896 x86_64_cvttsd2si_reg_reg(cd, s1, d);
1897 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1898 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1899 x86_64_jcc(cd, X86_64_CC_NE, a);
1900 M_FLTMOVE(s1, REG_FTMP1);
1901 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2i, REG_ITMP2);
1902 x86_64_call_reg(cd, REG_ITMP2);
1903 M_INTMOVE(REG_RESULT, d);
1904 store_reg_to_var_int(iptr->dst, d);
1907 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1909 var_to_reg_flt(s1, src, REG_FTMP1);
1910 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1911 x86_64_cvttss2siq_reg_reg(cd, s1, d);
1912 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1913 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1914 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1915 x86_64_jcc(cd, X86_64_CC_NE, a);
1916 M_FLTMOVE(s1, REG_FTMP1);
1917 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2l, REG_ITMP2);
1918 x86_64_call_reg(cd, REG_ITMP2);
1919 M_INTMOVE(REG_RESULT, d);
1920 store_reg_to_var_int(iptr->dst, d);
1923 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1925 var_to_reg_flt(s1, src, REG_FTMP1);
1926 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1927 x86_64_cvttsd2siq_reg_reg(cd, s1, d);
1928 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1929 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1930 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1931 x86_64_jcc(cd, X86_64_CC_NE, a);
1932 M_FLTMOVE(s1, REG_FTMP1);
1933 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2l, REG_ITMP2);
1934 x86_64_call_reg(cd, REG_ITMP2);
1935 M_INTMOVE(REG_RESULT, d);
1936 store_reg_to_var_int(iptr->dst, d);
1939 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1941 var_to_reg_flt(s1, src, REG_FTMP1);
1942 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1943 x86_64_cvtss2sd_reg_reg(cd, s1, d);
1944 store_reg_to_var_flt(iptr->dst, d);
1947 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1949 var_to_reg_flt(s1, src, REG_FTMP1);
1950 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1951 x86_64_cvtsd2ss_reg_reg(cd, s1, d);
1952 store_reg_to_var_flt(iptr->dst, d);
1955 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1956 /* == => 0, < => 1, > => -1 */
1958 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1959 var_to_reg_flt(s2, src, REG_FTMP2);
1960 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1961 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1962 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1963 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1964 x86_64_ucomiss_reg_reg(cd, s1, s2);
1965 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1966 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1967 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1968 store_reg_to_var_int(iptr->dst, d);
1971 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1972 /* == => 0, < => 1, > => -1 */
1974 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1975 var_to_reg_flt(s2, src, REG_FTMP2);
1976 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1977 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1978 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1979 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1980 x86_64_ucomiss_reg_reg(cd, s1, s2);
1981 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1982 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1983 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1984 store_reg_to_var_int(iptr->dst, d);
1987 case ICMD_DCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1988 /* == => 0, < => 1, > => -1 */
1990 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1991 var_to_reg_flt(s2, src, REG_FTMP2);
1992 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1993 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1994 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1995 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1996 x86_64_ucomisd_reg_reg(cd, s1, s2);
1997 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1998 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1999 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
2000 store_reg_to_var_int(iptr->dst, d);
2003 case ICMD_DCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
2004 /* == => 0, < => 1, > => -1 */
2006 var_to_reg_flt(s1, src->prev, REG_FTMP1);
2007 var_to_reg_flt(s2, src, REG_FTMP2);
2008 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2009 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
2010 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
2011 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
2012 x86_64_ucomisd_reg_reg(cd, s1, s2);
2013 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
2014 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
2015 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
2016 store_reg_to_var_int(iptr->dst, d);
2020 /* memory operations **************************************************/
2022 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., (int) length */
2024 var_to_reg_int(s1, src, REG_ITMP1);
2025 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2026 gen_nullptr_check(s1);
2027 x86_64_movl_membase_reg(cd, s1, OFFSET(java_arrayheader, size), d);
2028 store_reg_to_var_int(iptr->dst, d);
2031 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2033 var_to_reg_int(s1, src->prev, REG_ITMP1);
2034 var_to_reg_int(s2, src, REG_ITMP2);
2035 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2036 if (iptr->op1 == 0) {
2037 gen_nullptr_check(s1);
2040 x86_64_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
2041 store_reg_to_var_int(iptr->dst, d);
2044 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
2046 var_to_reg_int(s1, src->prev, REG_ITMP1);
2047 var_to_reg_int(s2, src, REG_ITMP2);
2048 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2049 if (iptr->op1 == 0) {
2050 gen_nullptr_check(s1);
2053 x86_64_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]), s1, s2, 3, d);
2054 store_reg_to_var_int(iptr->dst, d);
2057 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
2059 var_to_reg_int(s1, src->prev, REG_ITMP1);
2060 var_to_reg_int(s2, src, REG_ITMP2);
2061 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2062 if (iptr->op1 == 0) {
2063 gen_nullptr_check(s1);
2066 x86_64_movl_memindex_reg(cd, OFFSET(java_intarray, data[0]), s1, s2, 2, d);
2067 store_reg_to_var_int(iptr->dst, d);
2070 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2072 var_to_reg_int(s1, src->prev, REG_ITMP1);
2073 var_to_reg_int(s2, src, REG_ITMP2);
2074 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2075 if (iptr->op1 == 0) {
2076 gen_nullptr_check(s1);
2079 x86_64_movss_memindex_reg(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2080 store_reg_to_var_flt(iptr->dst, d);
2083 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2085 var_to_reg_int(s1, src->prev, REG_ITMP1);
2086 var_to_reg_int(s2, src, REG_ITMP2);
2087 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2088 if (iptr->op1 == 0) {
2089 gen_nullptr_check(s1);
2092 x86_64_movsd_memindex_reg(cd, OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2093 store_reg_to_var_flt(iptr->dst, d);
2096 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
2098 var_to_reg_int(s1, src->prev, REG_ITMP1);
2099 var_to_reg_int(s2, src, REG_ITMP2);
2100 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2101 if (iptr->op1 == 0) {
2102 gen_nullptr_check(s1);
2105 x86_64_movzwq_memindex_reg(cd, OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2106 store_reg_to_var_int(iptr->dst, d);
2109 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
2111 var_to_reg_int(s1, src->prev, REG_ITMP1);
2112 var_to_reg_int(s2, src, REG_ITMP2);
2113 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2114 if (iptr->op1 == 0) {
2115 gen_nullptr_check(s1);
2118 x86_64_movswq_memindex_reg(cd, OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2119 store_reg_to_var_int(iptr->dst, d);
2122 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
2124 var_to_reg_int(s1, src->prev, REG_ITMP1);
2125 var_to_reg_int(s2, src, REG_ITMP2);
2126 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2127 if (iptr->op1 == 0) {
2128 gen_nullptr_check(s1);
2131 x86_64_movsbq_memindex_reg(cd, OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2132 store_reg_to_var_int(iptr->dst, d);
2136 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2138 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2139 var_to_reg_int(s2, src->prev, REG_ITMP2);
2140 if (iptr->op1 == 0) {
2141 gen_nullptr_check(s1);
2144 var_to_reg_int(s3, src, REG_ITMP3);
2145 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2148 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2150 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2151 var_to_reg_int(s2, src->prev, REG_ITMP2);
2152 if (iptr->op1 == 0) {
2153 gen_nullptr_check(s1);
2156 var_to_reg_int(s3, src, REG_ITMP3);
2157 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2160 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2162 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2163 var_to_reg_int(s2, src->prev, REG_ITMP2);
2164 if (iptr->op1 == 0) {
2165 gen_nullptr_check(s1);
2168 var_to_reg_int(s3, src, REG_ITMP3);
2169 x86_64_movl_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2172 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2174 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2175 var_to_reg_int(s2, src->prev, REG_ITMP2);
2176 if (iptr->op1 == 0) {
2177 gen_nullptr_check(s1);
2180 var_to_reg_flt(s3, src, REG_FTMP3);
2181 x86_64_movss_reg_memindex(cd, s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2184 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2186 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2187 var_to_reg_int(s2, src->prev, REG_ITMP2);
2188 if (iptr->op1 == 0) {
2189 gen_nullptr_check(s1);
2192 var_to_reg_flt(s3, src, REG_FTMP3);
2193 x86_64_movsd_reg_memindex(cd, s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2196 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2198 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2199 var_to_reg_int(s2, src->prev, REG_ITMP2);
2200 if (iptr->op1 == 0) {
2201 gen_nullptr_check(s1);
2204 var_to_reg_int(s3, src, REG_ITMP3);
2205 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2208 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2210 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2211 var_to_reg_int(s2, src->prev, REG_ITMP2);
2212 if (iptr->op1 == 0) {
2213 gen_nullptr_check(s1);
2216 var_to_reg_int(s3, src, REG_ITMP3);
2217 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2220 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2222 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2223 var_to_reg_int(s2, src->prev, REG_ITMP2);
2224 if (iptr->op1 == 0) {
2225 gen_nullptr_check(s1);
2228 var_to_reg_int(s3, src, REG_ITMP3);
2229 x86_64_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2232 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2234 var_to_reg_int(s1, src->prev, REG_ITMP1);
2235 var_to_reg_int(s2, src, REG_ITMP2);
2236 if (iptr->op1 == 0) {
2237 gen_nullptr_check(s1);
2240 x86_64_movl_imm_memindex(cd, iptr->val.i, OFFSET(java_intarray, data[0]), s1, s2, 2);
2243 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2245 var_to_reg_int(s1, src->prev, REG_ITMP1);
2246 var_to_reg_int(s2, src, REG_ITMP2);
2247 if (iptr->op1 == 0) {
2248 gen_nullptr_check(s1);
2252 if (IS_IMM32(iptr->val.l)) {
2253 x86_64_mov_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2256 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2257 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l >> 32), OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2261 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2263 var_to_reg_int(s1, src->prev, REG_ITMP1);
2264 var_to_reg_int(s2, src, REG_ITMP2);
2265 if (iptr->op1 == 0) {
2266 gen_nullptr_check(s1);
2269 x86_64_mov_imm_memindex(cd, 0, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2272 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2274 var_to_reg_int(s1, src->prev, REG_ITMP1);
2275 var_to_reg_int(s2, src, REG_ITMP2);
2276 if (iptr->op1 == 0) {
2277 gen_nullptr_check(s1);
2280 x86_64_movb_imm_memindex(cd, iptr->val.i, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2283 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2285 var_to_reg_int(s1, src->prev, REG_ITMP1);
2286 var_to_reg_int(s2, src, REG_ITMP2);
2287 if (iptr->op1 == 0) {
2288 gen_nullptr_check(s1);
2291 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_chararray, data[0]), s1, s2, 1);
2294 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2296 var_to_reg_int(s1, src->prev, REG_ITMP1);
2297 var_to_reg_int(s2, src, REG_ITMP2);
2298 if (iptr->op1 == 0) {
2299 gen_nullptr_check(s1);
2302 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2306 case ICMD_GETSTATIC: /* ... ==> ..., value */
2307 /* op1 = type, val.a = field address */
2310 codegen_addpatchref(cd, cd->mcodeptr,
2311 PATCHER_get_putstatic,
2312 (unresolved_field *) iptr->target);
2316 fieldinfo *fi = iptr->val.a;
2318 if (!fi->class->initialized) {
2319 codegen_addpatchref(cd, cd->mcodeptr,
2320 PATCHER_clinit, fi->class);
2323 a = (ptrint) &(fi->value);
2326 /* This approach is much faster than moving the field address */
2327 /* inline into a register. */
2328 a = dseg_addaddress(cd, a);
2329 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP2);
2330 switch (iptr->op1) {
2332 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2333 x86_64_movl_membase_reg(cd, REG_ITMP2, 0, d);
2334 store_reg_to_var_int(iptr->dst, d);
2338 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2339 x86_64_mov_membase_reg(cd, REG_ITMP2, 0, d);
2340 store_reg_to_var_int(iptr->dst, d);
2343 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2344 x86_64_movss_membase_reg(cd, REG_ITMP2, 0, d);
2345 store_reg_to_var_flt(iptr->dst, d);
2348 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2349 x86_64_movsd_membase_reg(cd, REG_ITMP2, 0, d);
2350 store_reg_to_var_flt(iptr->dst, d);
2355 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2356 /* op1 = type, val.a = field address */
2359 codegen_addpatchref(cd, cd->mcodeptr,
2360 PATCHER_get_putstatic,
2361 (unresolved_field *) iptr->target);
2365 fieldinfo *fi = iptr->val.a;
2367 if (!fi->class->initialized) {
2368 codegen_addpatchref(cd, cd->mcodeptr,
2369 PATCHER_clinit, fi->class);
2371 if (showdisassemble) {
2380 a = (ptrint) &(fi->value);
2383 /* This approach is much faster than moving the field address */
2384 /* inline into a register. */
2385 a = dseg_addaddress(cd, a);
2386 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP2);
2387 switch (iptr->op1) {
2389 var_to_reg_int(s2, src, REG_ITMP1);
2390 x86_64_movl_reg_membase(cd, s2, REG_ITMP2, 0);
2394 var_to_reg_int(s2, src, REG_ITMP1);
2395 x86_64_mov_reg_membase(cd, s2, REG_ITMP2, 0);
2398 var_to_reg_flt(s2, src, REG_FTMP1);
2399 x86_64_movss_reg_membase(cd, s2, REG_ITMP2, 0);
2402 var_to_reg_flt(s2, src, REG_FTMP1);
2403 x86_64_movsd_reg_membase(cd, s2, REG_ITMP2, 0);
2408 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2409 /* val = value (in current instruction) */
2410 /* op1 = type, val.a = field address (in */
2411 /* following NOP) */
2413 if (!iptr[1].val.a) {
2414 codegen_addpatchref(cd, cd->mcodeptr,
2415 PATCHER_get_putstatic,
2416 (unresolved_field *) iptr[1].target);
2420 fieldinfo *fi = iptr[1].val.a;
2422 if (!fi->class->initialized) {
2423 codegen_addpatchref(cd, cd->mcodeptr,
2424 PATCHER_clinit, fi->class);
2427 a = (ptrint) &(fi->value);
2430 /* This approach is much faster than moving the field address */
2431 /* inline into a register. */
2432 a = dseg_addaddress(cd, a);
2433 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP1);
2434 switch (iptr->op1) {
2437 x86_64_movl_imm_membase(cd, iptr->val.i, REG_ITMP1, 0);
2442 if (IS_IMM32(iptr->val.l)) {
2443 x86_64_mov_imm_membase(cd, iptr->val.l, REG_ITMP1, 0);
2445 x86_64_movl_imm_membase(cd, iptr->val.l, REG_ITMP1, 0);
2446 x86_64_movl_imm_membase(cd, iptr->val.l >> 32, REG_ITMP1, 4);
2452 case ICMD_GETFIELD: /* ... ==> ..., value */
2453 /* op1 = type, val.i = field offset */
2455 var_to_reg_int(s1, src, REG_ITMP1);
2456 gen_nullptr_check(s1);
2459 codegen_addpatchref(cd, cd->mcodeptr,
2460 PATCHER_get_putfield,
2461 (unresolved_field *) iptr->target);
2464 a = ((fieldinfo *) (iptr->val.a))->offset;
2466 switch (iptr->op1) {
2468 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2469 x86_64_movl_membase32_reg(cd, s1, a, d);
2470 store_reg_to_var_int(iptr->dst, d);
2474 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2475 x86_64_mov_membase32_reg(cd, s1, a, d);
2476 store_reg_to_var_int(iptr->dst, d);
2479 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2480 x86_64_movss_membase32_reg(cd, s1, a, d);
2481 store_reg_to_var_flt(iptr->dst, d);
2484 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2485 x86_64_movsd_membase32_reg(cd, s1, a, d);
2486 store_reg_to_var_flt(iptr->dst, d);
2491 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2492 /* op1 = type, val.i = field offset */
2494 var_to_reg_int(s1, src->prev, REG_ITMP1);
2495 gen_nullptr_check(s1);
2496 if (IS_INT_LNG_TYPE(iptr->op1)) {
2497 var_to_reg_int(s2, src, REG_ITMP2);
2499 var_to_reg_flt(s2, src, REG_FTMP2);
2503 codegen_addpatchref(cd, cd->mcodeptr,
2504 PATCHER_get_putfield,
2505 (unresolved_field *) iptr->target);
2508 a = ((fieldinfo *) (iptr->val.a))->offset;
2510 switch (iptr->op1) {
2512 x86_64_movl_reg_membase32(cd, s2, s1, a);
2516 x86_64_mov_reg_membase32(cd, s2, s1, a);
2519 x86_64_movss_reg_membase32(cd, s2, s1, a);
2522 x86_64_movsd_reg_membase32(cd, s2, s1, a);
2527 case ICMD_PUTFIELDCONST: /* ..., objectref, value ==> ... */
2528 /* val = value (in current instruction) */
2529 /* op1 = type, val.a = field address (in */
2530 /* following NOP) */
2532 var_to_reg_int(s1, src, REG_ITMP1);
2533 gen_nullptr_check(s1);
2535 if (!iptr[1].val.a) {
2536 codegen_addpatchref(cd, cd->mcodeptr,
2537 PATCHER_get_putfield,
2538 (unresolved_field *) iptr[1].target);
2541 a = ((fieldinfo *) (iptr[1].val.a))->offset;
2543 switch (iptr->op1) {
2546 x86_64_movl_imm_membase32(cd, iptr->val.i, s1, a);
2551 if (IS_IMM32(iptr->val.l)) {
2552 x86_64_mov_imm_membase32(cd, iptr->val.l, s1, a);
2554 x86_64_movl_imm_membase32(cd, iptr->val.l, s1, a);
2555 x86_64_movl_imm_membase32(cd, iptr->val.l >> 32, s1, a + 4);
2562 /* branch operations **************************************************/
2564 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2566 var_to_reg_int(s1, src, REG_ITMP1);
2567 M_INTMOVE(s1, REG_ITMP1_XPTR);
2569 x86_64_call_imm(cd, 0); /* passing exception pointer */
2570 x86_64_pop_reg(cd, REG_ITMP2_XPC);
2572 x86_64_mov_imm_reg(cd, (ptrint) asm_handle_exception, REG_ITMP3);
2573 x86_64_jmp_reg(cd, REG_ITMP3);
2576 case ICMD_GOTO: /* ... ==> ... */
2577 /* op1 = target JavaVM pc */
2579 x86_64_jmp_imm(cd, 0);
2580 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2583 case ICMD_JSR: /* ... ==> ... */
2584 /* op1 = target JavaVM pc */
2586 x86_64_call_imm(cd, 0);
2587 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2590 case ICMD_RET: /* ... ==> ... */
2591 /* op1 = local variable */
2593 var = &(rd->locals[iptr->op1][TYPE_ADR]);
2594 var_to_reg_int(s1, var, REG_ITMP1);
2595 x86_64_jmp_reg(cd, s1);
2598 case ICMD_IFNULL: /* ..., value ==> ... */
2599 /* op1 = target JavaVM pc */
2601 if (src->flags & INMEMORY) {
2602 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2605 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2607 x86_64_jcc(cd, X86_64_CC_E, 0);
2608 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2611 case ICMD_IFNONNULL: /* ..., value ==> ... */
2612 /* op1 = target JavaVM pc */
2614 if (src->flags & INMEMORY) {
2615 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2618 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2620 x86_64_jcc(cd, X86_64_CC_NE, 0);
2621 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2624 case ICMD_IFEQ: /* ..., value ==> ... */
2625 /* op1 = target JavaVM pc, val.i = constant */
2627 x86_64_emit_ifcc(cd, X86_64_CC_E, src, iptr);
2630 case ICMD_IFLT: /* ..., value ==> ... */
2631 /* op1 = target JavaVM pc, val.i = constant */
2633 x86_64_emit_ifcc(cd, X86_64_CC_L, src, iptr);
2636 case ICMD_IFLE: /* ..., value ==> ... */
2637 /* op1 = target JavaVM pc, val.i = constant */
2639 x86_64_emit_ifcc(cd, X86_64_CC_LE, src, iptr);
2642 case ICMD_IFNE: /* ..., value ==> ... */
2643 /* op1 = target JavaVM pc, val.i = constant */
2645 x86_64_emit_ifcc(cd, X86_64_CC_NE, src, iptr);
2648 case ICMD_IFGT: /* ..., value ==> ... */
2649 /* op1 = target JavaVM pc, val.i = constant */
2651 x86_64_emit_ifcc(cd, X86_64_CC_G, src, iptr);
2654 case ICMD_IFGE: /* ..., value ==> ... */
2655 /* op1 = target JavaVM pc, val.i = constant */
2657 x86_64_emit_ifcc(cd, X86_64_CC_GE, src, iptr);
2660 case ICMD_IF_LEQ: /* ..., value ==> ... */
2661 /* op1 = target JavaVM pc, val.l = constant */
2663 x86_64_emit_if_lcc(cd, X86_64_CC_E, src, iptr);
2666 case ICMD_IF_LLT: /* ..., value ==> ... */
2667 /* op1 = target JavaVM pc, val.l = constant */
2669 x86_64_emit_if_lcc(cd, X86_64_CC_L, src, iptr);
2672 case ICMD_IF_LLE: /* ..., value ==> ... */
2673 /* op1 = target JavaVM pc, val.l = constant */
2675 x86_64_emit_if_lcc(cd, X86_64_CC_LE, src, iptr);
2678 case ICMD_IF_LNE: /* ..., value ==> ... */
2679 /* op1 = target JavaVM pc, val.l = constant */
2681 x86_64_emit_if_lcc(cd, X86_64_CC_NE, src, iptr);
2684 case ICMD_IF_LGT: /* ..., value ==> ... */
2685 /* op1 = target JavaVM pc, val.l = constant */
2687 x86_64_emit_if_lcc(cd, X86_64_CC_G, src, iptr);
2690 case ICMD_IF_LGE: /* ..., value ==> ... */
2691 /* op1 = target JavaVM pc, val.l = constant */
2693 x86_64_emit_if_lcc(cd, X86_64_CC_GE, src, iptr);
2696 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2697 /* op1 = target JavaVM pc */
2699 x86_64_emit_if_icmpcc(cd, X86_64_CC_E, src, iptr);
2702 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2703 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2705 x86_64_emit_if_lcmpcc(cd, X86_64_CC_E, src, iptr);
2708 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2709 /* op1 = target JavaVM pc */
2711 x86_64_emit_if_icmpcc(cd, X86_64_CC_NE, src, iptr);
2714 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2715 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2717 x86_64_emit_if_lcmpcc(cd, X86_64_CC_NE, src, iptr);
2720 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2721 /* op1 = target JavaVM pc */
2723 x86_64_emit_if_icmpcc(cd, X86_64_CC_L, src, iptr);
2726 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2727 /* op1 = target JavaVM pc */
2729 x86_64_emit_if_lcmpcc(cd, X86_64_CC_L, src, iptr);
2732 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2733 /* op1 = target JavaVM pc */
2735 x86_64_emit_if_icmpcc(cd, X86_64_CC_G, src, iptr);
2738 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2739 /* op1 = target JavaVM pc */
2741 x86_64_emit_if_lcmpcc(cd, X86_64_CC_G, src, iptr);
2744 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2745 /* op1 = target JavaVM pc */
2747 x86_64_emit_if_icmpcc(cd, X86_64_CC_LE, src, iptr);
2750 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2751 /* op1 = target JavaVM pc */
2753 x86_64_emit_if_lcmpcc(cd, X86_64_CC_LE, src, iptr);
2756 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2757 /* op1 = target JavaVM pc */
2759 x86_64_emit_if_icmpcc(cd, X86_64_CC_GE, src, iptr);
2762 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2763 /* op1 = target JavaVM pc */
2765 x86_64_emit_if_lcmpcc(cd, X86_64_CC_GE, src, iptr);
2768 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST */
2770 case ICMD_ELSE_ICONST: /* handled by IFxx_ICONST */
2773 case ICMD_IFEQ_ICONST: /* ..., value ==> ..., constant */
2774 /* val.i = constant */
2776 var_to_reg_int(s1, src, REG_ITMP1);
2777 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2778 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2780 M_INTMOVE(s1, REG_ITMP1);
2783 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2785 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2786 x86_64_testl_reg_reg(cd, s1, s1);
2787 x86_64_cmovccl_reg_reg(cd, X86_64_CC_E, REG_ITMP2, d);
2788 store_reg_to_var_int(iptr->dst, d);
2791 case ICMD_IFNE_ICONST: /* ..., value ==> ..., constant */
2792 /* val.i = constant */
2794 var_to_reg_int(s1, src, REG_ITMP1);
2795 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2796 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2798 M_INTMOVE(s1, REG_ITMP1);
2801 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2803 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2804 x86_64_testl_reg_reg(cd, s1, s1);
2805 x86_64_cmovccl_reg_reg(cd, X86_64_CC_NE, REG_ITMP2, d);
2806 store_reg_to_var_int(iptr->dst, d);
2809 case ICMD_IFLT_ICONST: /* ..., value ==> ..., constant */
2810 /* val.i = constant */
2812 var_to_reg_int(s1, src, REG_ITMP1);
2813 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2814 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2816 M_INTMOVE(s1, REG_ITMP1);
2819 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2821 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2822 x86_64_testl_reg_reg(cd, s1, s1);
2823 x86_64_cmovccl_reg_reg(cd, X86_64_CC_L, REG_ITMP2, d);
2824 store_reg_to_var_int(iptr->dst, d);
2827 case ICMD_IFGE_ICONST: /* ..., value ==> ..., constant */
2828 /* val.i = constant */
2830 var_to_reg_int(s1, src, REG_ITMP1);
2831 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2832 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2834 M_INTMOVE(s1, REG_ITMP1);
2837 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2839 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2840 x86_64_testl_reg_reg(cd, s1, s1);
2841 x86_64_cmovccl_reg_reg(cd, X86_64_CC_GE, REG_ITMP2, d);
2842 store_reg_to_var_int(iptr->dst, d);
2845 case ICMD_IFGT_ICONST: /* ..., value ==> ..., constant */
2846 /* val.i = constant */
2848 var_to_reg_int(s1, src, REG_ITMP1);
2849 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2850 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2852 M_INTMOVE(s1, REG_ITMP1);
2855 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2857 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2858 x86_64_testl_reg_reg(cd, s1, s1);
2859 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP2, d);
2860 store_reg_to_var_int(iptr->dst, d);
2863 case ICMD_IFLE_ICONST: /* ..., value ==> ..., constant */
2864 /* val.i = constant */
2866 var_to_reg_int(s1, src, REG_ITMP1);
2867 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2868 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2870 M_INTMOVE(s1, REG_ITMP1);
2873 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2875 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2876 x86_64_testl_reg_reg(cd, s1, s1);
2877 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, d);
2878 store_reg_to_var_int(iptr->dst, d);
2882 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2886 var_to_reg_int(s1, src, REG_RESULT);
2887 M_INTMOVE(s1, REG_RESULT);
2889 goto nowperformreturn;
2891 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2894 var_to_reg_flt(s1, src, REG_FRESULT);
2895 M_FLTMOVE(s1, REG_FRESULT);
2897 goto nowperformreturn;
2899 case ICMD_RETURN: /* ... ==> ... */
2905 p = parentargs_base;
2907 /* call trace function */
2909 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
2911 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
2912 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
2914 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
2915 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
2916 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
2917 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
2919 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
2920 x86_64_call_reg(cd, REG_ITMP1);
2922 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
2923 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
2925 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
2928 #if defined(USE_THREADS)
2929 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2930 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
2932 /* we need to save the proper return value */
2933 switch (iptr->opc) {
2937 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, rd->maxmemuse * 8);
2941 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, rd->maxmemuse * 8);
2945 x86_64_mov_imm_reg(cd, (u8) builtin_monitorexit, REG_ITMP1);
2946 x86_64_call_reg(cd, REG_ITMP1);
2948 /* and now restore the proper return value */
2949 switch (iptr->opc) {
2953 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_RESULT);
2957 x86_64_movq_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_FRESULT);
2963 /* restore saved registers */
2964 for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
2965 p--; x86_64_mov_membase_reg(cd, REG_SP, p * 8, rd->savintregs[i]);
2967 for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
2968 p--; x86_64_movq_membase_reg(cd, REG_SP, p * 8, rd->savfltregs[i]);
2971 /* deallocate stack */
2972 if (parentargs_base) {
2973 x86_64_alu_imm_reg(cd, X86_64_ADD, parentargs_base * 8, REG_SP);
2981 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2986 tptr = (void **) iptr->target;
2988 s4ptr = iptr->val.a;
2989 l = s4ptr[1]; /* low */
2990 i = s4ptr[2]; /* high */
2992 var_to_reg_int(s1, src, REG_ITMP1);
2993 M_INTMOVE(s1, REG_ITMP1);
2995 x86_64_alul_imm_reg(cd, X86_64_SUB, l, REG_ITMP1);
3000 x86_64_alul_imm_reg(cd, X86_64_CMP, i - 1, REG_ITMP1);
3001 x86_64_jcc(cd, X86_64_CC_A, 0);
3003 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[0]), cd->mcodeptr); */
3004 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3006 /* build jump table top down and use address of lowest entry */
3008 /* s4ptr += 3 + i; */
3012 /* dseg_addtarget(cd, BlockPtrOfPC(*--s4ptr)); */
3013 dseg_addtarget(cd, (basicblock *) tptr[0]);
3017 /* length of dataseg after last dseg_addtarget is used by load */
3019 x86_64_mov_imm_reg(cd, 0, REG_ITMP2);
3020 dseg_adddata(cd, cd->mcodeptr);
3021 x86_64_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
3022 x86_64_jmp_reg(cd, REG_ITMP1);
3027 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
3029 s4 i, l, val, *s4ptr;
3032 tptr = (void **) iptr->target;
3034 s4ptr = iptr->val.a;
3035 l = s4ptr[0]; /* default */
3036 i = s4ptr[1]; /* count */
3038 MCODECHECK((i<<2)+8);
3039 var_to_reg_int(s1, src, REG_ITMP1); /* reg compare should always be faster */
3045 x86_64_alul_imm_reg(cd, X86_64_CMP, val, s1);
3046 x86_64_jcc(cd, X86_64_CC_E, 0);
3047 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[1]), cd->mcodeptr); */
3048 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3051 x86_64_jmp_imm(cd, 0);
3052 /* codegen_addreference(cd, BlockPtrOfPC(l), cd->mcodeptr); */
3054 tptr = (void **) iptr->target;
3055 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3060 case ICMD_BUILTIN3: /* ..., arg1, arg2, arg3 ==> ... */
3061 /* op1 = return type, val.a = function pointer*/
3065 case ICMD_BUILTIN2: /* ..., arg1, arg2 ==> ... */
3066 /* op1 = return type, val.a = function pointer*/
3070 case ICMD_BUILTIN1: /* ..., arg1 ==> ... */
3071 /* op1 = return type, val.a = function pointer*/
3075 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
3076 /* op1 = arg count, val.a = method pointer */
3078 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3079 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
3080 case ICMD_INVOKEINTERFACE:
3090 MCODECHECK((s3 << 1) + 64);
3092 /* copy arguments to registers or stack location ******************/
3094 /* count integer and float arguments */
3099 for (s2 = s3, tmpsrc = src; --s2 >= 0; tmpsrc = tmpsrc->prev) {
3100 IS_INT_LNG_TYPE(tmpsrc->type) ? iarg++ : farg++;
3103 /* calculate amount of arguments to be on stack */
3105 s2 = (iarg > INT_ARG_CNT) ? iarg - INT_ARG_CNT : 0 +
3106 (farg > FLT_ARG_CNT) ? farg - FLT_ARG_CNT : 0;
3108 for (; --s3 >= 0; src = src->prev) {
3109 /* decrement the current argument type */
3110 IS_INT_LNG_TYPE(src->type) ? iarg-- : farg--;
3112 if (src->varkind == ARGVAR) {
3113 if (IS_INT_LNG_TYPE(src->type)) {
3114 if (iarg >= INT_ARG_CNT) {
3118 if (farg >= FLT_ARG_CNT) {
3125 if (IS_INT_LNG_TYPE(src->type)) {
3126 if (iarg < INT_ARG_CNT) {
3127 s1 = rd->argintregs[iarg];
3128 var_to_reg_int(d, src, s1);
3132 var_to_reg_int(d, src, REG_ITMP1);
3134 x86_64_mov_reg_membase(cd, d, REG_SP, s2 * 8);
3138 if (farg < FLT_ARG_CNT) {
3139 s1 = rd->argfltregs[farg];
3140 var_to_reg_flt(d, src, s1);
3144 var_to_reg_flt(d, src, REG_FTMP1);
3146 x86_64_movq_reg_membase(cd, d, REG_SP, s2 * 8);
3152 switch (iptr->opc) {
3159 x86_64_mov_imm_reg(cd, a, REG_ITMP1);
3160 x86_64_call_reg(cd, REG_ITMP1);
3163 case ICMD_INVOKESPECIAL:
3164 /* first argument contains pointer */
3165 gen_nullptr_check(rd->argintregs[0]);
3167 /* access memory for hardware nullptr */
3168 x86_64_mov_membase_reg(cd, rd->argintregs[0], 0, REG_ITMP2);
3172 case ICMD_INVOKESTATIC:
3174 unresolved_method *um = iptr->target;
3176 codegen_addpatchref(cd, cd->mcodeptr,
3177 PATCHER_invokestatic_special, um);
3180 d = um->methodref->parseddesc.md->returntype.type;
3183 a = (ptrint) lm->stubroutine;
3184 d = lm->parseddesc->returntype.type;
3187 x86_64_mov_imm_reg(cd, a, REG_ITMP2);
3188 x86_64_call_reg(cd, REG_ITMP2);
3191 case ICMD_INVOKEVIRTUAL:
3192 gen_nullptr_check(rd->argintregs[0]);
3195 unresolved_method *um = iptr->target;
3197 codegen_addpatchref(cd, cd->mcodeptr,
3198 PATCHER_invokevirtual, um);
3201 d = um->methodref->parseddesc.md->returntype.type;
3204 s1 = OFFSET(vftbl_t, table[0]) +
3205 sizeof(methodptr) * lm->vftblindex;
3206 d = lm->parseddesc->returntype.type;
3209 x86_64_mov_membase_reg(cd, rd->argintregs[0],
3210 OFFSET(java_objectheader, vftbl),
3212 x86_64_mov_membase32_reg(cd, REG_ITMP2, s1, REG_ITMP1);
3213 x86_64_call_reg(cd, REG_ITMP1);
3216 case ICMD_INVOKEINTERFACE:
3217 gen_nullptr_check(rd->argintregs[0]);
3220 unresolved_method *um = iptr->target;
3222 codegen_addpatchref(cd, cd->mcodeptr,
3223 PATCHER_invokeinterface, um);
3226 d = um->methodref->parseddesc.md->returntype.type;
3229 classinfo * ci = lm->class;
3231 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3232 sizeof(methodptr) * ci->index;
3234 s2 = sizeof(methodptr) * (lm - ci->methods);
3236 d = lm->parseddesc->returntype.type;
3239 x86_64_mov_membase_reg(cd, rd->argintregs[0],
3240 OFFSET(java_objectheader, vftbl),
3242 x86_64_mov_membase32_reg(cd, REG_ITMP2, s1, REG_ITMP2);
3243 x86_64_mov_membase32_reg(cd, REG_ITMP2, s2, REG_ITMP1);
3244 x86_64_call_reg(cd, REG_ITMP1);
3248 /* d contains return type */
3250 if (d != TYPE_VOID) {
3251 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3252 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3253 M_INTMOVE(REG_RESULT, s1);
3254 store_reg_to_var_int(iptr->dst, s1);
3257 s1 = reg_of_var(rd, iptr->dst, REG_FRESULT);
3258 M_FLTMOVE(REG_FRESULT, s1);
3259 store_reg_to_var_flt(iptr->dst, s1);
3266 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3268 /* op1: 0 == array, 1 == class */
3269 /* val.a: (classinfo *) superclass */
3271 /* superclass is an interface:
3273 * OK if ((sub == NULL) ||
3274 * (sub->vftbl->interfacetablelength > super->index) &&
3275 * (sub->vftbl->interfacetable[-super->index] != NULL));
3277 * superclass is a class:
3279 * OK if ((sub == NULL) || (0
3280 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3281 * super->vftbl->diffval));
3286 vftbl_t *supervftbl;
3289 super = (classinfo *) iptr->val.a;
3296 superindex = super->index;
3297 supervftbl = super->vftbl;
3300 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3301 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3303 var_to_reg_int(s1, src, REG_ITMP1);
3305 /* calculate interface checkcast code size */
3307 s2 = 3; /* mov_membase_reg */
3308 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3310 s2 += 3 + 4 /* movl_membase32_reg */ + 3 + 4 /* sub imm32 */ +
3311 3 /* test */ + 6 /* jcc */ + 3 + 4 /* mov_membase32_reg */ +
3312 3 /* test */ + 6 /* jcc */;
3314 /* calculate class checkcast code size */
3316 s3 = 3; /* mov_membase_reg */
3317 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3318 s3 += 10 /* mov_imm_reg */ + 3 + 4 /* movl_membase32_reg */;
3321 if (s1 != REG_ITMP1) {
3322 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3323 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, baseval));
3324 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3325 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, diffval));
3331 s3 += 3 + 4 /* movl_membase32_reg */ + 3 /* sub */ +
3332 10 /* mov_imm_reg */ + 3 /* movl_membase_reg */;
3333 CALCOFFSETBYTES(s3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3336 s3 += 3 /* cmp */ + 6 /* jcc */;
3338 /* if class is not resolved, check which code to call */
3341 x86_64_test_reg_reg(cd, s1, s1);
3342 x86_64_jcc(cd, X86_64_CC_Z, 6 + 7 + 6 + s2 + 5 + s3);
3344 codegen_addpatchref(cd, cd->mcodeptr,
3345 PATCHER_checkcast_instanceof_flags,
3346 (constant_classref *) iptr->target);
3348 x86_64_movl_imm_reg(cd, 0, REG_ITMP2); /* super->flags */
3349 x86_64_alul_imm_reg(cd, X86_64_AND, ACC_INTERFACE, REG_ITMP2);
3350 x86_64_jcc(cd, X86_64_CC_Z, s2 + 5);
3353 /* interface checkcast code */
3355 if (!super || (super->flags & ACC_INTERFACE)) {
3357 x86_64_test_reg_reg(cd, s1, s1);
3358 x86_64_jcc(cd, X86_64_CC_Z, s2);
3361 x86_64_mov_membase_reg(cd, s1,
3362 OFFSET(java_objectheader, vftbl),
3366 codegen_addpatchref(cd, cd->mcodeptr,
3367 PATCHER_checkcast_instanceof_interface,
3368 (constant_classref *) iptr->target);
3370 x86_64_movl_membase32_reg(cd, REG_ITMP2,
3371 OFFSET(vftbl_t, interfacetablelength),
3373 x86_64_alu_imm32_reg(cd, X86_64_SUB, superindex, REG_ITMP3);
3374 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3375 x86_64_jcc(cd, X86_64_CC_LE, 0);
3376 codegen_addxcastrefs(cd, cd->mcodeptr);
3377 x86_64_mov_membase32_reg(cd, REG_ITMP2,
3378 OFFSET(vftbl_t, interfacetable[0]) -
3379 superindex * sizeof(methodptr*),
3381 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3382 x86_64_jcc(cd, X86_64_CC_E, 0);
3383 codegen_addxcastrefs(cd, cd->mcodeptr);
3386 x86_64_jmp_imm(cd, s3);
3389 /* class checkcast code */
3391 if (!super || !(super->flags & ACC_INTERFACE)) {
3393 x86_64_test_reg_reg(cd, s1, s1);
3394 x86_64_jcc(cd, X86_64_CC_Z, s3);
3397 x86_64_mov_membase_reg(cd, s1,
3398 OFFSET(java_objectheader, vftbl),
3402 codegen_addpatchref(cd, cd->mcodeptr,
3403 PATCHER_checkcast_class,
3404 (constant_classref *) iptr->target);
3406 x86_64_mov_imm_reg(cd, (ptrint) supervftbl, REG_ITMP3);
3407 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3408 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3410 x86_64_movl_membase32_reg(cd, REG_ITMP2,
3411 OFFSET(vftbl_t, baseval),
3413 /* if (s1 != REG_ITMP1) { */
3414 /* x86_64_movl_membase_reg(cd, REG_ITMP3, */
3415 /* OFFSET(vftbl_t, baseval), */
3417 /* x86_64_movl_membase_reg(cd, REG_ITMP3, */
3418 /* OFFSET(vftbl_t, diffval), */
3420 /* #if defined(USE_THREADS) && defined(NATIVE_THREADS) */
3421 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3423 /* x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP1, REG_ITMP2); */
3426 x86_64_movl_membase32_reg(cd, REG_ITMP3,
3427 OFFSET(vftbl_t, baseval),
3429 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP2);
3430 x86_64_mov_imm_reg(cd, (ptrint) supervftbl, REG_ITMP3);
3431 x86_64_movl_membase_reg(cd, REG_ITMP3,
3432 OFFSET(vftbl_t, diffval),
3435 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3436 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3438 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP3, REG_ITMP2);
3439 x86_64_jcc(cd, X86_64_CC_A, 0); /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3440 codegen_addxcastrefs(cd, cd->mcodeptr);
3442 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3444 store_reg_to_var_int(iptr->dst, d);
3445 /* if (iptr->dst->flags & INMEMORY) { */
3446 /* x86_64_mov_reg_membase(cd, s1, REG_SP, iptr->dst->regoff * 8); */
3448 /* M_INTMOVE(s1, iptr->dst->regoff); */
3453 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3455 /* op1: 0 == array, 1 == class */
3456 /* val.a: (classinfo *) superclass */
3458 /* superclass is an interface:
3460 * return (sub != NULL) &&
3461 * (sub->vftbl->interfacetablelength > super->index) &&
3462 * (sub->vftbl->interfacetable[-super->index] != NULL);
3464 * superclass is a class:
3466 * return ((sub != NULL) && (0
3467 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3468 * super->vftbl->diffvall));
3473 vftbl_t *supervftbl;
3476 super = (classinfo *) iptr->val.a;
3483 superindex = super->index;
3484 supervftbl = super->vftbl;
3487 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3488 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3491 var_to_reg_int(s1, src, REG_ITMP1);
3492 d = reg_of_var(rd, iptr->dst, REG_ITMP2);
3494 M_INTMOVE(s1, REG_ITMP1);
3498 /* calculate interface instanceof code size */
3500 s2 = 3; /* mov_membase_reg */
3501 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3502 s2 += 3 + 4 /* movl_membase32_reg */ + 3 + 4 /* sub_imm32 */ +
3503 3 /* test */ + 6 /* jcc */ + 3 + 4 /* mov_membase32_reg */ +
3504 3 /* test */ + 4 /* setcc */;
3506 /* calculate class instanceof code size */
3508 s3 = 3; /* mov_membase_reg */
3509 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3510 s3 += 10; /* mov_imm_reg */
3511 s3 += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3512 CALCOFFSETBYTES(s3, REG_ITMP1, OFFSET(vftbl_t, baseval));
3513 s3 += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3514 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, baseval));
3515 s3 += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3516 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3517 s3 += 3 /* sub */ + 3 /* xor */ + 3 /* cmp */ + 4 /* setcc */;
3519 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3521 /* if class is not resolved, check which code to call */
3524 x86_64_test_reg_reg(cd, s1, s1);
3525 x86_64_jcc(cd, X86_64_CC_Z, 6 + 7 + 6 + s2 + 5 + s3);
3527 codegen_addpatchref(cd, cd->mcodeptr,
3528 PATCHER_checkcast_instanceof_flags,
3529 (constant_classref *) iptr->target);
3531 x86_64_movl_imm_reg(cd, 0, REG_ITMP3); /* super->flags */
3532 x86_64_alul_imm_reg(cd, X86_64_AND, ACC_INTERFACE, REG_ITMP3);
3533 x86_64_jcc(cd, X86_64_CC_Z, s2 + 5);
3536 /* interface instanceof code */
3538 if (!super || (super->flags & ACC_INTERFACE)) {
3540 x86_64_test_reg_reg(cd, s1, s1);
3541 x86_64_jcc(cd, X86_64_CC_Z, s2);
3544 x86_64_mov_membase_reg(cd, s1,
3545 OFFSET(java_objectheader, vftbl),
3548 codegen_addpatchref(cd, cd->mcodeptr,
3549 PATCHER_checkcast_instanceof_interface,
3550 (constant_classref *) iptr->target);
3552 x86_64_movl_membase32_reg(cd, REG_ITMP1,
3553 OFFSET(vftbl_t, interfacetablelength),
3555 x86_64_alu_imm32_reg(cd, X86_64_SUB, superindex, REG_ITMP3);
3556 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3558 a = 3 + 4 /* mov_membase32_reg */ + 3 /* test */ + 4 /* setcc */;
3560 x86_64_jcc(cd, X86_64_CC_LE, a);
3561 x86_64_mov_membase32_reg(cd, REG_ITMP1,
3562 OFFSET(vftbl_t, interfacetable[0]) -
3563 superindex * sizeof(methodptr*),
3565 x86_64_test_reg_reg(cd, REG_ITMP1, REG_ITMP1);
3566 x86_64_setcc_reg(cd, X86_64_CC_NE, d);
3569 x86_64_jmp_imm(cd, s3);
3572 /* class instanceof code */
3574 if (!super || !(super->flags & ACC_INTERFACE)) {
3576 x86_64_test_reg_reg(cd, s1, s1);
3577 x86_64_jcc(cd, X86_64_CC_E, s3);
3580 x86_64_mov_membase_reg(cd, s1,
3581 OFFSET(java_objectheader, vftbl),
3585 codegen_addpatchref(cd, cd->mcodeptr,
3586 PATCHER_instanceof_class,
3587 (constant_classref *) iptr->target);
3589 x86_64_mov_imm_reg(cd, (ptrint) supervftbl, REG_ITMP2);
3590 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3591 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3593 x86_64_movl_membase_reg(cd, REG_ITMP1,
3594 OFFSET(vftbl_t, baseval),
3596 x86_64_movl_membase_reg(cd, REG_ITMP2,
3597 OFFSET(vftbl_t, diffval),
3599 x86_64_movl_membase_reg(cd, REG_ITMP2,
3600 OFFSET(vftbl_t, baseval),
3602 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3603 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3605 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
3606 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d); /* may be REG_ITMP2 */
3607 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP3, REG_ITMP1);
3608 x86_64_setcc_reg(cd, X86_64_CC_BE, d);
3610 store_reg_to_var_int(iptr->dst, d);
3614 case ICMD_CHECKASIZE: /* ..., size ==> ..., size */
3616 if (src->flags & INMEMORY) {
3617 x86_64_alul_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
3620 x86_64_testl_reg_reg(cd, src->regoff, src->regoff);
3622 x86_64_jcc(cd, X86_64_CC_L, 0);
3623 codegen_addxcheckarefs(cd, cd->mcodeptr);
3626 case ICMD_CHECKEXCEPTION: /* ... ==> ... */
3628 x86_64_test_reg_reg(cd, REG_RESULT, REG_RESULT);
3629 x86_64_jcc(cd, X86_64_CC_E, 0);
3630 codegen_addxexceptionrefs(cd, cd->mcodeptr);
3633 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3634 /* op1 = dimension, val.a = array descriptor */
3636 /* check for negative sizes and copy sizes to stack if necessary */
3638 MCODECHECK((iptr->op1 << 1) + 64);
3640 for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3641 var_to_reg_int(s2, src, REG_ITMP1);
3642 x86_64_testl_reg_reg(cd, s2, s2);
3643 x86_64_jcc(cd, X86_64_CC_L, 0);
3644 codegen_addxcheckarefs(cd, cd->mcodeptr);
3646 /* copy SAVEDVAR sizes to stack */
3648 if (src->varkind != ARGVAR) {
3649 x86_64_mov_reg_membase(cd, s2, REG_SP, s1 * 8);
3653 /* a0 = dimension count */
3654 x86_64_mov_imm_reg(cd, iptr->op1, rd->argintregs[0]);
3656 /* a1 = arrayvftbl */
3657 x86_64_mov_imm_reg(cd, (ptrint) iptr->val.a, rd->argintregs[1]);
3659 /* a2 = pointer to dimensions = stack pointer */
3660 x86_64_mov_reg_reg(cd, REG_SP, rd->argintregs[2]);
3662 /* contains the correct function to call (from parse.c) */
3663 x86_64_mov_imm_reg(cd, (ptrint) iptr->target, REG_ITMP1);
3664 x86_64_call_reg(cd, REG_ITMP1);
3666 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3667 M_INTMOVE(REG_RESULT, s1);
3668 store_reg_to_var_int(iptr->dst, s1);
3672 throw_cacao_exception_exit(string_java_lang_InternalError,
3673 "Unknown ICMD %d", iptr->opc);
3676 } /* for instruction */
3678 /* copy values to interface registers */
3680 src = bptr->outstack;
3681 len = bptr->outdepth;
3682 MCODECHECK(64 + len);
3688 if ((src->varkind != STACKVAR)) {
3690 if (IS_FLT_DBL_TYPE(s2)) {
3691 var_to_reg_flt(s1, src, REG_FTMP1);
3692 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3693 M_FLTMOVE(s1, rd->interfaces[len][s2].regoff);
3696 x86_64_movq_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3700 var_to_reg_int(s1, src, REG_ITMP1);
3701 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3702 M_INTMOVE(s1, rd->interfaces[len][s2].regoff);
3705 x86_64_mov_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3711 } /* if (bptr -> flags >= BBREACHED) */
3712 } /* for basic block */
3716 /* generate bound check stubs */
3718 u1 *xcodeptr = NULL;
3721 for (bref = cd->xboundrefs; bref != NULL; bref = bref->next) {
3722 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3724 cd->mcodeptr - cd->mcodebase);
3728 /* move index register into REG_ITMP1 */
3729 x86_64_mov_reg_reg(cd, bref->reg, REG_ITMP1); /* 3 bytes */
3731 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3732 dseg_adddata(cd, cd->mcodeptr);
3733 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3734 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3736 if (xcodeptr != NULL) {
3737 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3740 xcodeptr = cd->mcodeptr;
3742 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3743 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3745 x86_64_mov_reg_reg(cd, REG_ITMP1, rd->argintregs[0]);
3746 x86_64_mov_imm_reg(cd, (ptrint) new_arrayindexoutofboundsexception, REG_ITMP3);
3747 x86_64_call_reg(cd, REG_ITMP3);
3749 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3750 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3752 x86_64_mov_imm_reg(cd, (ptrint) asm_handle_exception, REG_ITMP3);
3753 x86_64_jmp_reg(cd, REG_ITMP3);
3757 /* generate negative array size check stubs */
3761 for (bref = cd->xcheckarefs; bref != NULL; bref = bref->next) {
3762 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3763 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3765 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3769 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3771 cd->mcodeptr - cd->mcodebase);
3775 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3776 dseg_adddata(cd, cd->mcodeptr);
3777 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3778 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3780 if (xcodeptr != NULL) {
3781 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3784 xcodeptr = cd->mcodeptr;
3786 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3787 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3789 x86_64_mov_imm_reg(cd, (u8) new_negativearraysizeexception, REG_ITMP3);
3790 x86_64_call_reg(cd, REG_ITMP3);
3792 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3793 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3795 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3796 x86_64_jmp_reg(cd, REG_ITMP3);
3800 /* generate cast check stubs */
3804 for (bref = cd->xcastrefs; bref != NULL; bref = bref->next) {
3805 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3806 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3808 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3812 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3814 cd->mcodeptr - cd->mcodebase);
3818 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3819 dseg_adddata(cd, cd->mcodeptr);
3820 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3821 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3823 if (xcodeptr != NULL) {
3824 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3827 xcodeptr = cd->mcodeptr;
3829 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3830 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3832 x86_64_mov_imm_reg(cd, (u8) new_classcastexception, REG_ITMP3);
3833 x86_64_call_reg(cd, REG_ITMP3);
3835 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3836 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3838 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3839 x86_64_jmp_reg(cd, REG_ITMP3);
3843 /* generate divide by zero check stubs */
3847 for (bref = cd->xdivrefs; bref != NULL; bref = bref->next) {
3848 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3849 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3851 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3855 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3857 cd->mcodeptr - cd->mcodebase);
3861 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3862 dseg_adddata(cd, cd->mcodeptr);
3863 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3864 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3866 if (xcodeptr != NULL) {
3867 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3870 xcodeptr = cd->mcodeptr;
3872 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3873 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3875 x86_64_mov_imm_reg(cd, (u8) new_arithmeticexception, REG_ITMP3);
3876 x86_64_call_reg(cd, REG_ITMP3);
3878 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3879 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3881 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3882 x86_64_jmp_reg(cd, REG_ITMP3);
3886 /* generate exception check stubs */
3890 for (bref = cd->xexceptionrefs; bref != NULL; bref = bref->next) {
3891 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3892 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3894 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3898 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3900 cd->mcodeptr - cd->mcodebase);
3904 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3905 dseg_adddata(cd, cd->mcodeptr);
3906 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
3907 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3909 if (xcodeptr != NULL) {
3910 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3913 xcodeptr = cd->mcodeptr;
3915 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3916 x86_64_alu_imm_reg(cd, X86_64_SUB, 8, REG_SP);
3917 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0);
3918 x86_64_mov_imm_reg(cd, (u8) &builtin_get_exceptionptrptr, REG_ITMP1);
3919 x86_64_call_reg(cd, REG_ITMP1);
3920 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
3921 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
3922 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
3923 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC);
3924 x86_64_alu_imm_reg(cd, X86_64_ADD, 8, REG_SP);
3926 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
3927 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP1_XPTR);
3928 x86_64_mov_imm_membase(cd, 0, REG_ITMP3, 0);
3931 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3932 x86_64_jmp_reg(cd, REG_ITMP3);
3936 /* generate null pointer check stubs */
3940 for (bref = cd->xnullrefs; bref != NULL; bref = bref->next) {
3941 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3942 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3944 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3948 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3950 cd->mcodeptr - cd->mcodebase);
3954 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3955 dseg_adddata(cd, cd->mcodeptr);
3956 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
3957 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3959 if (xcodeptr != NULL) {
3960 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3963 xcodeptr = cd->mcodeptr;
3965 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3966 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3968 x86_64_mov_imm_reg(cd, (ptrint) new_nullpointerexception, REG_ITMP3);
3969 x86_64_call_reg(cd, REG_ITMP3);
3971 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3972 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3974 x86_64_mov_imm_reg(cd, (ptrint) asm_handle_exception, REG_ITMP3);
3975 x86_64_jmp_reg(cd, REG_ITMP3);
3979 /* generate code patching stub call code */
3986 tmpcd = DNEW(codegendata);
3988 for (pref = cd->patchrefs; pref != NULL; pref = pref->next) {
3991 /* Get machine code which is patched back in later. A */
3992 /* `call rel32' is 5 bytes long (but read 8 bytes). */
3993 xcodeptr = cd->mcodebase + pref->branchpos;
3994 mcode = *((ptrint *) xcodeptr);
3996 /* patch in `call rel32' to call the following code */
3997 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
3998 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4000 /* move machine code bytes and classinfo pointer into registers */
4001 x86_64_mov_imm_reg(cd, (ptrint) mcode, REG_ITMP3);
4002 x86_64_push_reg(cd, REG_ITMP3);
4003 x86_64_mov_imm_reg(cd, (ptrint) pref->ref, REG_ITMP3);
4004 x86_64_push_reg(cd, REG_ITMP3);
4006 x86_64_mov_imm_reg(cd, (ptrint) pref->patcher, REG_ITMP3);
4007 x86_64_push_reg(cd, REG_ITMP3);
4009 x86_64_mov_imm_reg(cd, (ptrint) asm_wrapper_patcher, REG_ITMP3);
4010 x86_64_jmp_reg(cd, REG_ITMP3);
4015 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
4019 /* function createcompilerstub *************************************************
4021 creates a stub routine which calls the compiler
4023 *******************************************************************************/
4025 #define COMPSTUBSIZE 23
4027 u1 *createcompilerstub(methodinfo *m)
4029 u1 *s = CNEW(u1, COMPSTUBSIZE); /* memory to hold the stub */
4033 /* mark start of dump memory area */
4035 dumpsize = dump_size();
4037 cd = DNEW(codegendata);
4040 /* code for the stub */
4041 x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP1); /* pass method to compiler */
4042 x86_64_mov_imm_reg(cd, (u8) asm_call_jit_compiler, REG_ITMP3);/* load address */
4043 x86_64_jmp_reg(cd, REG_ITMP3); /* jump to compiler */
4045 #if defined(STATISTICS)
4047 count_cstub_len += COMPSTUBSIZE;
4050 /* release dump area */
4052 dump_release(dumpsize);
4058 /* function removecompilerstub *************************************************
4060 deletes a compilerstub from memory (simply by freeing it)
4062 *******************************************************************************/
4064 void removecompilerstub(u1 *stub)
4066 CFREE(stub, COMPSTUBSIZE);
4070 /* function: createnativestub **************************************************
4072 creates a stub routine which calls a native method
4074 *******************************************************************************/
4076 /* #if defined(USE_THREADS) && defined(NATIVE_THREADS) */
4077 /* static java_objectheader **(*callgetexceptionptrptr)() = builtin_get_exceptionptrptr; */
4080 #define NATIVESTUBSIZE 700 /* keep this size high enough! */
4082 u1 *createnativestub(functionptr f, methodinfo *m)
4084 u1 *s; /* pointer to stub memory */
4087 t_inlining_globals *id;
4089 s4 stackframesize; /* size of stackframe if needed */
4091 s4 iargs; /* count of integer arguments */
4092 s4 fargs; /* count of float arguments */
4095 void **callAddrPatchPos=0;
4097 void **jmpInstrPatchPos=0;
4099 /* initialize variables */
4104 /* mark start of dump memory area */
4106 dumpsize = dump_size();
4108 cd = DNEW(codegendata);
4109 rd = DNEW(registerdata);
4110 id = DNEW(t_inlining_globals);
4112 /* setup registers before using it */
4114 inlining_setup(m, id);
4115 reg_setup(m, rd, id);
4117 /* set paramcount and paramtypes */
4119 method_descriptor2types(m);
4121 /* count integer and float arguments */
4123 tptr = m->paramtypes;
4124 for (i = 0; i < m->paramcount; i++) {
4125 IS_INT_LNG_TYPE(*tptr++) ? iargs++ : fargs++;
4128 s = CNEW(u1, NATIVESTUBSIZE); /* memory to hold the stub */
4130 /* set some required varibles which are normally set by codegen_setup */
4133 cd->patchrefs = NULL;
4135 /* if function is static, check for initialized */
4137 if ((m->flags & ACC_STATIC) && !m->class->initialized) {
4138 codegen_addpatchref(cd, cd->mcodeptr, PATCHER_clinit, m->class);
4144 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4146 /* save integer and float argument registers */
4148 for (i = 0; i < INT_ARG_CNT; i++) {
4149 x86_64_mov_reg_membase(cd, rd->argintregs[i], REG_SP, (1 + i) * 8);
4152 for (i = 0; i < FLT_ARG_CNT; i++) {
4153 x86_64_movq_reg_membase(cd, rd->argfltregs[i], REG_SP, (1 + INT_ARG_CNT + i) * 8);
4156 /* show integer hex code for float arguments */
4158 for (i = 0, l = 0; i < m->paramcount && i < INT_ARG_CNT; i++) {
4159 /* if the paramtype is a float, we have to right shift all */
4160 /* following integer registers */
4162 if (IS_FLT_DBL_TYPE(m->paramtypes[i])) {
4163 for (s1 = INT_ARG_CNT - 2; s1 >= i; s1--) {
4164 x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
4167 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[i]);
4172 x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP1);
4173 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, 0 * 8);
4174 x86_64_mov_imm_reg(cd, (u8) builtin_trace_args, REG_ITMP1);
4175 x86_64_call_reg(cd, REG_ITMP1);
4177 /* restore integer and float argument registers */
4179 for (i = 0; i < INT_ARG_CNT; i++) {
4180 x86_64_mov_membase_reg(cd, REG_SP, (1 + i) * 8, rd->argintregs[i]);
4183 for (i = 0; i < FLT_ARG_CNT; i++) {
4184 x86_64_movq_membase_reg(cd, REG_SP, (1 + INT_ARG_CNT + i) * 8, rd->argfltregs[i]);
4187 x86_64_alu_imm_reg(cd, X86_64_ADD, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4190 #if !defined(STATIC_CLASSPATH)
4191 /* call method to resolve native function if needed */
4193 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4195 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, 1 * 8);
4196 x86_64_mov_reg_membase(cd, rd->argintregs[1], REG_SP, 2 * 8);
4197 x86_64_mov_reg_membase(cd, rd->argintregs[2], REG_SP, 3 * 8);
4198 x86_64_mov_reg_membase(cd, rd->argintregs[3], REG_SP, 4 * 8);
4199 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 5 * 8);
4200 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 6 * 8);
4202 x86_64_movq_reg_membase(cd, rd->argfltregs[0], REG_SP, 7 * 8);
4203 x86_64_movq_reg_membase(cd, rd->argfltregs[1], REG_SP, 8 * 8);
4204 x86_64_movq_reg_membase(cd, rd->argfltregs[2], REG_SP, 9 * 8);
4205 x86_64_movq_reg_membase(cd, rd->argfltregs[3], REG_SP, 10 * 8);
4206 x86_64_movq_reg_membase(cd, rd->argfltregs[4], REG_SP, 11 * 8);
4207 x86_64_movq_reg_membase(cd, rd->argfltregs[5], REG_SP, 12 * 8);
4208 x86_64_movq_reg_membase(cd, rd->argfltregs[6], REG_SP, 13 * 8);
4209 x86_64_movq_reg_membase(cd, rd->argfltregs[7], REG_SP, 14 * 8);
4211 /* needed to patch a jump over this block */
4212 x86_64_jmp_imm(cd, 0);
4213 jmpInstrPos = cd->mcodeptr - 4;
4215 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
4217 x86_64_mov_imm_reg(cd, 0, rd->argintregs[1]);
4218 callAddrPatchPos = cd->mcodeptr - 8; /* at this position the place is specified where the native function adress should be patched into*/
4220 x86_64_mov_imm_reg(cd, 0, rd->argintregs[2]);
4221 jmpInstrPatchPos = cd->mcodeptr - 8;
4223 x86_64_mov_imm_reg(cd, jmpInstrPos, rd->argintregs[3]);
4225 x86_64_mov_imm_reg(cd, (u8) codegen_resolve_native, REG_ITMP1);
4226 x86_64_call_reg(cd, REG_ITMP1);
4228 *(jmpInstrPatchPos) = cd->mcodeptr - jmpInstrPos - 1; /*=opcode jmp_imm size*/
4230 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, rd->argintregs[0]);
4231 x86_64_mov_membase_reg(cd, REG_SP, 2 * 8, rd->argintregs[1]);
4232 x86_64_mov_membase_reg(cd, REG_SP, 3 * 8, rd->argintregs[2]);
4233 x86_64_mov_membase_reg(cd, REG_SP, 4 * 8, rd->argintregs[3]);
4234 x86_64_mov_membase_reg(cd, REG_SP, 5 * 8, rd->argintregs[4]);
4235 x86_64_mov_membase_reg(cd, REG_SP, 6 * 8, rd->argintregs[5]);
4237 x86_64_movq_membase_reg(cd, REG_SP, 7 * 8, rd->argfltregs[0]);
4238 x86_64_movq_membase_reg(cd, REG_SP, 8 * 8, rd->argfltregs[1]);
4239 x86_64_movq_membase_reg(cd, REG_SP, 9 * 8, rd->argfltregs[2]);
4240 x86_64_movq_membase_reg(cd, REG_SP, 10 * 8, rd->argfltregs[3]);
4241 x86_64_movq_membase_reg(cd, REG_SP, 11 * 8, rd->argfltregs[4]);
4242 x86_64_movq_membase_reg(cd, REG_SP, 12 * 8, rd->argfltregs[5]);
4243 x86_64_movq_membase_reg(cd, REG_SP, 13 * 8, rd->argfltregs[6]);
4244 x86_64_movq_membase_reg(cd, REG_SP, 14 * 8, rd->argfltregs[7]);
4246 x86_64_alu_imm_reg(cd, X86_64_ADD, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4250 /* save argument registers on stack -- if we have to */
4252 if ((((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) || iargs > (INT_ARG_CNT - 1)) ||
4253 (fargs > FLT_ARG_CNT)) {
4260 /* do we need to shift integer argument register onto stack? */
4262 if ((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) {
4263 /* do we need to shift 2 arguments? */
4264 if (iargs > (INT_ARG_CNT - 1)) {
4271 } else if (iargs > (INT_ARG_CNT - 1)) {
4275 /* calculate required stack space */
4277 stackparamcnt += (iargs > INT_ARG_CNT) ? iargs - INT_ARG_CNT : 0;
4278 stackparamcnt += (fargs > FLT_ARG_CNT) ? fargs - FLT_ARG_CNT : 0;
4280 stackframesize = stackparamcnt + paramshiftcnt;
4282 /* keep stack 16-byte aligned */
4283 if (!(stackframesize & 0x1))
4286 x86_64_alu_imm_reg(cd, X86_64_SUB, stackframesize * 8, REG_SP);
4288 /* shift integer arguments if required */
4290 if ((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) {
4291 /* do we need to shift 2 arguments? */
4292 if (iargs > (INT_ARG_CNT - 1))
4293 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 1 * 8);
4295 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 0 * 8);
4297 } else if (iargs > (INT_ARG_CNT - 1)) {
4298 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 0 * 8);
4301 /* copy stack arguments into new stack frame -- if any */
4302 for (i = 0; i < stackparamcnt; i++) {
4303 x86_64_mov_membase_reg(cd, REG_SP, (stackframesize + 1 + i) * 8, REG_ITMP1);
4304 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, (paramshiftcnt + i) * 8);
4308 /* keep stack 16-byte aligned */
4309 x86_64_alu_imm_reg(cd, X86_64_SUB, 1 * 8, REG_SP);
4313 /* shift integer arguments for `env' and `class' arguments */
4315 if (m->flags & ACC_STATIC) {
4316 /* shift iargs count if less than INT_ARG_CNT, or all */
4317 for (i = (iargs < (INT_ARG_CNT - 2)) ? iargs : (INT_ARG_CNT - 2); i >= 0; i--) {
4318 x86_64_mov_reg_reg(cd, rd->argintregs[i], rd->argintregs[i + 2]);
4321 /* put class into second argument register */
4322 x86_64_mov_imm_reg(cd, (u8) m->class, rd->argintregs[1]);
4325 /* shift iargs count if less than INT_ARG_CNT, or all */
4326 for (i = (iargs < (INT_ARG_CNT - 1)) ? iargs : (INT_ARG_CNT - 1); i >= 0; i--) {
4327 x86_64_mov_reg_reg(cd, rd->argintregs[i], rd->argintregs[i + 1]);
4331 /* put env into first argument register */
4332 x86_64_mov_imm_reg(cd, (u8) &env, rd->argintregs[0]);
4334 /* do the native function call */
4335 x86_64_mov_imm_reg(cd, (u8) f, REG_ITMP1);
4336 #if !defined(STATIC_CLASSPATH)
4338 (*callAddrPatchPos) = cd->mcodeptr - 8;
4340 x86_64_call_reg(cd, REG_ITMP1);
4342 /* remove stackframe if there is one */
4343 if (stackframesize) {
4344 x86_64_alu_imm_reg(cd, X86_64_ADD, stackframesize * 8, REG_SP);
4348 x86_64_alu_imm_reg(cd, X86_64_SUB, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
4350 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
4351 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
4353 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
4354 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
4355 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
4356 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
4358 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
4359 x86_64_call_reg(cd, REG_ITMP1);
4361 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
4362 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
4364 x86_64_alu_imm_reg(cd, X86_64_ADD, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
4367 /* check for exception */
4369 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4370 x86_64_push_reg(cd, REG_RESULT);
4371 /* x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
4372 x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
4373 x86_64_call_reg(cd, REG_ITMP3);
4374 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
4375 x86_64_pop_reg(cd, REG_RESULT);
4377 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
4378 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP3);
4380 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
4381 x86_64_jcc(cd, X86_64_CC_NE, 1);
4385 /* handle exception */
4387 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4388 x86_64_push_reg(cd, REG_ITMP3);
4389 /* x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
4390 x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
4391 x86_64_call_reg(cd, REG_ITMP3);
4392 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
4393 x86_64_pop_reg(cd, REG_ITMP1_XPTR);
4395 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
4396 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
4397 x86_64_alu_reg_reg(cd, X86_64_XOR, REG_ITMP2, REG_ITMP2);
4398 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_ITMP3, 0); /* clear exception pointer */
4401 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC); /* get return address from stack */
4402 x86_64_alu_imm_reg(cd, X86_64_SUB, 3, REG_ITMP2_XPC); /* callq */
4404 x86_64_mov_imm_reg(cd, (u8) asm_handle_nat_exception, REG_ITMP3);
4405 x86_64_jmp_reg(cd, REG_ITMP3);
4408 /* patch in a <clinit> call if required ***********************************/
4416 tmpcd = DNEW(codegendata);
4418 /* there can only be one patch ref entry */
4419 pref = cd->patchrefs;
4422 /* Get machine code which is patched back in later. A */
4423 /* `call rel32' is 5 bytes long (but read 8 bytes). */
4424 xcodeptr = cd->mcodebase + pref->branchpos;
4425 mcode = *((ptrint *) xcodeptr);
4427 /* patch in `call rel32' to call the following code */
4428 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
4429 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4431 /* move machine code bytes and classinfo pointer into registers */
4432 x86_64_mov_imm_reg(cd, (ptrint) mcode, REG_ITMP3);
4433 x86_64_push_reg(cd, REG_ITMP3);
4434 x86_64_mov_imm_reg(cd, (ptrint) pref->ref, REG_ITMP3);
4435 x86_64_push_reg(cd, REG_ITMP3);
4437 x86_64_mov_imm_reg(cd, (ptrint) pref->patcher, REG_ITMP3);
4438 x86_64_push_reg(cd, REG_ITMP3);
4440 x86_64_mov_imm_reg(cd, (ptrint) asm_wrapper_patcher, REG_ITMP3);
4441 x86_64_jmp_reg(cd, REG_ITMP3);
4445 /* Check if the stub size is big enough to hold the whole stub generated. */
4446 /* If not, this can lead into unpredictable crashes, because of heap */
4448 if ((s4) (cd->mcodeptr - s) > NATIVESTUBSIZE) {
4449 throw_cacao_exception_exit(string_java_lang_InternalError,
4450 "Native stub size %d is to small for current stub size %d",
4451 NATIVESTUBSIZE, (s4) (cd->mcodeptr - s));
4454 #if defined(STATISTICS)
4456 count_nstub_len += NATIVESTUBSIZE;
4459 /* release dump area */
4461 dump_release(dumpsize);
4467 /* function: removenativestub **************************************************
4469 removes a previously created native-stub from memory
4471 *******************************************************************************/
4473 void removenativestub(u1 *stub)
4475 CFREE(stub, NATIVESTUBSIZE);
4480 * These are local overrides for various environment variables in Emacs.
4481 * Please do not remove this and leave it at the end of the file, where
4482 * Emacs will automagically detect them.
4483 * ---------------------------------------------------------------------
4486 * indent-tabs-mode: t