1 /* vm/jit/x86_64/codegen.c - machine code generator for x86_64
3 Copyright (C) 1996-2005 R. Grafl, A. Krall, C. Kruegel, C. Oates,
4 R. Obermaisser, M. Platter, M. Probst, S. Ring, E. Steiner,
5 C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich, J. Wenninger,
6 Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
25 Contact: cacao@complang.tuwien.ac.at
27 Authors: Andreas Krall
30 $Id: codegen.c 2272 2005-04-11 15:49:51Z twisti $
40 #include "cacao/cacao.h"
41 #include "native/native.h"
42 #include "vm/global.h"
43 #include "vm/builtin.h"
44 #include "vm/loader.h"
45 #include "vm/tables.h"
46 #include "vm/jit/asmpart.h"
47 #include "vm/jit/jit.h"
48 #include "vm/jit/reg.h"
49 #include "vm/jit/parse.h"
50 #include "vm/jit/x86_64/arch.h"
51 #include "vm/jit/x86_64/codegen.h"
52 #include "vm/jit/x86_64/emitfuncs.h"
53 #include "vm/jit/x86_64/types.h"
54 #include "vm/jit/x86_64/asmoffsets.h"
57 /* register descripton - array ************************************************/
59 /* #define REG_RES 0 reserved register for OS or code generator */
60 /* #define REG_RET 1 return value register */
61 /* #define REG_EXC 2 exception value register (only old jit) */
62 /* #define REG_SAV 3 (callee) saved register */
63 /* #define REG_TMP 4 scratch temporary register (caller saved) */
64 /* #define REG_ARG 5 argument register (caller saved) */
66 /* #define REG_END -1 last entry in tables */
68 static int nregdescint[] = {
69 REG_RET, REG_ARG, REG_ARG, REG_TMP, REG_RES, REG_SAV, REG_ARG, REG_ARG,
70 REG_ARG, REG_ARG, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV,
75 static int nregdescfloat[] = {
76 REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG,
77 REG_RES, REG_RES, REG_RES, REG_TMP, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
82 /* Include independent code generation stuff -- include after register */
83 /* descriptions to avoid extern definitions. */
85 #include "vm/jit/codegen.inc"
86 #include "vm/jit/reg.inc"
88 #include "vm/jit/lsra.inc"
92 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
93 void thread_restartcriticalsection(ucontext_t *uc)
97 critical = thread_checkcritical((void *) uc->uc_mcontext.gregs[REG_RIP]);
100 uc->uc_mcontext.gregs[REG_RIP] = (u8) critical;
105 /* NullPointerException signal handler for hardware null pointer check */
107 void catch_NullPointerException(int sig, siginfo_t *siginfo, void *_p)
111 struct ucontext *_uc = (struct ucontext *) _p;
112 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
113 struct sigaction act;
114 java_objectheader *xptr;
116 /* Reset signal handler - necessary for SysV, does no harm for BSD */
118 act.sa_sigaction = catch_NullPointerException; /* reinstall handler */
119 act.sa_flags = SA_SIGINFO;
120 sigaction(sig, &act, NULL);
123 sigaddset(&nsig, sig);
124 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
126 xptr = new_nullpointerexception();
128 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
129 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
130 sigctx->rip = (u8) asm_handle_exception;
136 /* ArithmeticException signal handler for hardware divide by zero check */
138 void catch_ArithmeticException(int sig, siginfo_t *siginfo, void *_p)
142 struct ucontext *_uc = (struct ucontext *) _p;
143 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
144 struct sigaction act;
145 java_objectheader *xptr;
147 /* Reset signal handler - necessary for SysV, does no harm for BSD */
149 act.sa_sigaction = catch_ArithmeticException; /* reinstall handler */
150 act.sa_flags = SA_SIGINFO;
151 sigaction(sig, &act, NULL);
154 sigaddset(&nsig, sig);
155 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
157 xptr = new_arithmeticexception();
159 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
160 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
161 sigctx->rip = (u8) asm_handle_exception;
167 void init_exceptions(void)
169 struct sigaction act;
171 /* install signal handlers we need to convert to exceptions */
172 sigemptyset(&act.sa_mask);
176 act.sa_sigaction = catch_NullPointerException;
177 act.sa_flags = SA_SIGINFO;
178 sigaction(SIGSEGV, &act, NULL);
182 act.sa_sigaction = catch_NullPointerException;
183 act.sa_flags = SA_SIGINFO;
184 sigaction(SIGBUS, &act, NULL);
188 act.sa_sigaction = catch_ArithmeticException;
189 act.sa_flags = SA_SIGINFO;
190 sigaction(SIGFPE, &act, NULL);
194 /* function gen_mcode **********************************************************
196 generates machine code
198 *******************************************************************************/
200 void codegen(methodinfo *m, codegendata *cd, registerdata *rd)
202 s4 len, s1, s2, s3, d;
217 /* space to save used callee saved registers */
219 savedregs_num += (rd->savintregcnt - rd->maxsavintreguse);
220 savedregs_num += (rd->savfltregcnt - rd->maxsavfltreguse);
222 parentargs_base = rd->maxmemuse + savedregs_num;
224 #if defined(USE_THREADS) /* space to save argument of monitor_enter */
226 if (checksync && (m->flags & ACC_SYNCHRONIZED))
231 /* Keep stack of non-leaf functions 16-byte aligned for calls into native */
232 /* code e.g. libc or jni (alignment problems with movaps). */
234 if (!m->isleafmethod || runverbose)
235 parentargs_base |= 0x1;
237 /* create method header */
239 (void) dseg_addaddress(cd, m); /* MethodPointer */
240 (void) dseg_adds4(cd, parentargs_base * 8); /* FrameSize */
242 #if defined(USE_THREADS)
244 /* IsSync contains the offset relative to the stack pointer for the
245 argument of monitor_exit used in the exception handler. Since the
246 offset could be zero and give a wrong meaning of the flag it is
250 if (checksync && (m->flags & ACC_SYNCHRONIZED))
251 (void) dseg_adds4(cd, (rd->maxmemuse + 1) * 8); /* IsSync */
256 (void) dseg_adds4(cd, 0); /* IsSync */
258 (void) dseg_adds4(cd, m->isleafmethod); /* IsLeaf */
259 (void) dseg_adds4(cd, rd->savintregcnt - rd->maxsavintreguse);/* IntSave */
260 (void) dseg_adds4(cd, rd->savfltregcnt - rd->maxsavfltreguse);/* FltSave */
261 (void) dseg_adds4(cd, cd->exceptiontablelength); /* ExTableSize */
263 /* create exception table */
265 for (ex = cd->exceptiontable; ex != NULL; ex = ex->down) {
266 dseg_addtarget(cd, ex->start);
267 dseg_addtarget(cd, ex->end);
268 dseg_addtarget(cd, ex->handler);
269 (void) dseg_addaddress(cd, ex->catchtype.cls);
272 /* initialize mcode variables */
274 cd->mcodeptr = (u1 *) cd->mcodebase;
275 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
276 MCODECHECK(128 + m->paramcount);
278 /* create stack frame (if necessary) */
280 if (parentargs_base) {
281 x86_64_alu_imm_reg(cd, X86_64_SUB, parentargs_base * 8, REG_SP);
284 /* save used callee saved registers */
287 for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
288 p--; x86_64_mov_reg_membase(cd, rd->savintregs[i], REG_SP, p * 8);
290 for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
291 p--; x86_64_movq_reg_membase(cd, rd->savfltregs[i], REG_SP, p * 8);
294 /* take arguments out of register or stack frame */
296 for (p = 0, l = 0, s1 = 0, s2 = 0; p < m->paramcount; p++) {
297 t = m->paramtypes[p];
298 var = &(rd->locals[l][t]);
300 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
303 if (IS_INT_LNG_TYPE(t)) {
310 if (IS_INT_LNG_TYPE(t)) { /* integer args */
311 if (s1 < INT_ARG_CNT) { /* register arguments */
312 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
313 M_INTMOVE(rd->argintregs[s1], var->regoff);
315 } else { /* reg arg -> spilled */
316 x86_64_mov_reg_membase(cd, rd->argintregs[s1], REG_SP, var->regoff * 8);
319 } else { /* stack arguments */
320 pa = s1 - INT_ARG_CNT;
321 if (s2 >= FLT_ARG_CNT) {
322 pa += s2 - FLT_ARG_CNT;
324 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
325 x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff); /* + 8 for return address */
326 } else { /* stack arg -> spilled */
327 x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_ITMP1); /* + 8 for return address */
328 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, var->regoff * 8);
333 } else { /* floating args */
334 if (s2 < FLT_ARG_CNT) { /* register arguments */
335 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
336 M_FLTMOVE(rd->argfltregs[s2], var->regoff);
338 } else { /* reg arg -> spilled */
339 x86_64_movq_reg_membase(cd, rd->argfltregs[s2], REG_SP, var->regoff * 8);
342 } else { /* stack arguments */
343 pa = s2 - FLT_ARG_CNT;
344 if (s1 >= INT_ARG_CNT) {
345 pa += s1 - INT_ARG_CNT;
347 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
348 x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff);
351 x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_FTMP1);
352 x86_64_movq_reg_membase(cd, REG_FTMP1, REG_SP, var->regoff * 8);
359 /* save monitorenter argument */
361 #if defined(USE_THREADS)
362 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
365 if (m->flags & ACC_STATIC) {
366 func_enter = (u8) builtin_staticmonitorenter;
367 x86_64_mov_imm_reg(cd, (s8) m->class, REG_ITMP1);
368 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, rd->maxmemuse * 8);
371 func_enter = (u8) builtin_monitorenter;
372 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, rd->maxmemuse * 8);
375 /* call monitorenter function */
377 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
378 x86_64_mov_imm_reg(cd, func_enter, REG_ITMP1);
379 x86_64_call_reg(cd, REG_ITMP1);
383 /* Copy argument registers to stack and call trace function with pointer */
384 /* to arguments on stack. */
387 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1 + 1) * 8, REG_SP);
389 /* save integer argument registers */
391 for (p = 0; p < INT_ARG_CNT; p++) {
392 x86_64_mov_reg_membase(cd, rd->argintregs[p], REG_SP, (1 + p) * 8);
395 /* save float argument registers */
397 for (p = 0; p < FLT_ARG_CNT; p++) {
398 x86_64_movq_reg_membase(cd, rd->argfltregs[p], REG_SP, (1 + INT_ARG_CNT + p) * 8);
401 /* show integer hex code for float arguments */
403 for (p = 0, l = 0; p < m->paramcount && p < INT_ARG_CNT; p++) {
404 t = m->paramtypes[p];
406 /* if the paramtype is a float, we have to right shift all */
407 /* following integer registers */
409 if (IS_FLT_DBL_TYPE(t)) {
410 for (s1 = INT_ARG_CNT - 2; s1 >= p; s1--) {
411 x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
414 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[p]);
419 x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP2);
420 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_SP, 0 * 8);
421 x86_64_mov_imm_reg(cd, (u8) builtin_trace_args, REG_ITMP1);
422 x86_64_call_reg(cd, REG_ITMP1);
424 /* restore integer argument registers */
426 for (p = 0; p < INT_ARG_CNT; p++) {
427 x86_64_mov_membase_reg(cd, REG_SP, (1 + p) * 8, rd->argintregs[p]);
430 /* restore float argument registers */
432 for (p = 0; p < FLT_ARG_CNT; p++) {
433 x86_64_movq_membase_reg(cd, REG_SP, (1 + INT_ARG_CNT + p) * 8, rd->argfltregs[p]);
436 x86_64_alu_imm_reg(cd, X86_64_ADD, (6 + 8 + 1 + 1) * 8, REG_SP);
441 /* end of header generation */
443 /* walk through all basic blocks */
444 for (bptr = m->basicblocks; bptr != NULL; bptr = bptr->next) {
446 bptr->mpc = (u4) ((u1 *) cd->mcodeptr - cd->mcodebase);
448 if (bptr->flags >= BBREACHED) {
450 /* branch resolving */
453 for (bref = bptr->branchrefs; bref != NULL; bref = bref->next) {
454 gen_resolvebranch((u1 *) cd->mcodebase + bref->branchpos,
459 /* copy interface registers to their destination */
463 MCODECHECK(64 + len);
467 while (src != NULL) {
469 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
470 if (bptr->type == BBTYPE_SBR) {
471 /* d = reg_of_var(rd, src, REG_ITMP1); */
472 if (!(src->flags & INMEMORY))
476 x86_64_pop_reg(cd, d);
477 store_reg_to_var_int(src, d);
479 } else if (bptr->type == BBTYPE_EXH) {
480 /* d = reg_of_var(rd, src, REG_ITMP1); */
481 if (!(src->flags & INMEMORY))
485 M_INTMOVE(REG_ITMP1, d);
486 store_reg_to_var_int(src, d);
495 while (src != NULL) {
497 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
498 if (bptr->type == BBTYPE_SBR) {
499 d = reg_of_var(rd, src, REG_ITMP1);
500 x86_64_pop_reg(cd, d);
501 store_reg_to_var_int(src, d);
503 } else if (bptr->type == BBTYPE_EXH) {
504 d = reg_of_var(rd, src, REG_ITMP1);
505 M_INTMOVE(REG_ITMP1, d);
506 store_reg_to_var_int(src, d);
510 d = reg_of_var(rd, src, REG_ITMP1);
511 if ((src->varkind != STACKVAR)) {
513 if (IS_FLT_DBL_TYPE(s2)) {
514 s1 = rd->interfaces[len][s2].regoff;
515 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
519 x86_64_movq_membase_reg(cd, REG_SP, s1 * 8, d);
521 store_reg_to_var_flt(src, d);
524 s1 = rd->interfaces[len][s2].regoff;
525 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
529 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, d);
531 store_reg_to_var_int(src, d);
540 /* walk through all instructions */
544 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
546 MCODECHECK(64); /* an instruction usually needs < 64 words */
548 case ICMD_INLINE_START: /* internal ICMDs */
549 case ICMD_INLINE_END:
552 case ICMD_NOP: /* ... ==> ... */
555 case ICMD_NULLCHECKPOP: /* ..., objectref ==> ... */
556 if (src->flags & INMEMORY) {
557 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
560 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
562 x86_64_jcc(cd, X86_64_CC_E, 0);
563 codegen_addxnullrefs(cd, cd->mcodeptr);
566 /* constant operations ************************************************/
568 case ICMD_ICONST: /* ... ==> ..., constant */
569 /* op1 = 0, val.i = constant */
571 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
572 if (iptr->val.i == 0) {
573 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
575 x86_64_movl_imm_reg(cd, iptr->val.i, d);
577 store_reg_to_var_int(iptr->dst, d);
580 case ICMD_ACONST: /* ... ==> ..., constant */
581 /* op1 = 0, val.a = constant */
583 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
584 if (iptr->val.a == 0) {
585 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
587 x86_64_mov_imm_reg(cd, (s8) iptr->val.a, d);
589 store_reg_to_var_int(iptr->dst, d);
592 case ICMD_LCONST: /* ... ==> ..., constant */
593 /* op1 = 0, val.l = constant */
595 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
596 if (iptr->val.l == 0) {
597 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
599 x86_64_mov_imm_reg(cd, iptr->val.l, d);
601 store_reg_to_var_int(iptr->dst, d);
604 case ICMD_FCONST: /* ... ==> ..., constant */
605 /* op1 = 0, val.f = constant */
607 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
608 a = dseg_addfloat(cd, iptr->val.f);
609 x86_64_movdl_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + ((d > 7) ? 9 : 8)) - (s8) cd->mcodebase) + a, d);
610 store_reg_to_var_flt(iptr->dst, d);
613 case ICMD_DCONST: /* ... ==> ..., constant */
614 /* op1 = 0, val.d = constant */
616 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
617 a = dseg_adddouble(cd, iptr->val.d);
618 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, d);
619 store_reg_to_var_flt(iptr->dst, d);
623 /* load/store operations **********************************************/
625 case ICMD_ILOAD: /* ... ==> ..., content of local variable */
626 /* op1 = local variable */
628 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
629 if ((iptr->dst->varkind == LOCALVAR) &&
630 (iptr->dst->varnum == iptr->op1)) {
633 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
634 if (var->flags & INMEMORY) {
635 x86_64_movl_membase_reg(cd, REG_SP, var->regoff * 8, d);
636 store_reg_to_var_int(iptr->dst, d);
639 if (iptr->dst->flags & INMEMORY) {
640 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
643 M_INTMOVE(var->regoff, d);
648 case ICMD_LLOAD: /* ... ==> ..., content of local variable */
649 case ICMD_ALOAD: /* op1 = local variable */
651 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
652 if ((iptr->dst->varkind == LOCALVAR) &&
653 (iptr->dst->varnum == iptr->op1)) {
656 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
657 if (var->flags & INMEMORY) {
658 x86_64_mov_membase_reg(cd, REG_SP, var->regoff * 8, d);
659 store_reg_to_var_int(iptr->dst, d);
662 if (iptr->dst->flags & INMEMORY) {
663 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
666 M_INTMOVE(var->regoff, d);
671 case ICMD_FLOAD: /* ... ==> ..., content of local variable */
672 case ICMD_DLOAD: /* op1 = local variable */
674 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
675 if ((iptr->dst->varkind == LOCALVAR) &&
676 (iptr->dst->varnum == iptr->op1)) {
679 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
680 if (var->flags & INMEMORY) {
681 x86_64_movq_membase_reg(cd, REG_SP, var->regoff * 8, d);
682 store_reg_to_var_flt(iptr->dst, d);
685 if (iptr->dst->flags & INMEMORY) {
686 x86_64_movq_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
689 M_FLTMOVE(var->regoff, d);
694 case ICMD_ISTORE: /* ..., value ==> ... */
695 case ICMD_LSTORE: /* op1 = local variable */
698 if ((src->varkind == LOCALVAR) &&
699 (src->varnum == iptr->op1)) {
702 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
703 if (var->flags & INMEMORY) {
704 var_to_reg_int(s1, src, REG_ITMP1);
705 x86_64_mov_reg_membase(cd, s1, REG_SP, var->regoff * 8);
708 var_to_reg_int(s1, src, var->regoff);
709 M_INTMOVE(s1, var->regoff);
713 case ICMD_FSTORE: /* ..., value ==> ... */
714 case ICMD_DSTORE: /* op1 = local variable */
716 if ((src->varkind == LOCALVAR) &&
717 (src->varnum == iptr->op1)) {
720 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
721 if (var->flags & INMEMORY) {
722 var_to_reg_flt(s1, src, REG_FTMP1);
723 x86_64_movq_reg_membase(cd, s1, REG_SP, var->regoff * 8);
726 var_to_reg_flt(s1, src, var->regoff);
727 M_FLTMOVE(s1, var->regoff);
732 /* pop/dup/swap operations ********************************************/
734 /* attention: double and longs are only one entry in CACAO ICMDs */
736 case ICMD_POP: /* ..., value ==> ... */
737 case ICMD_POP2: /* ..., value, value ==> ... */
740 case ICMD_DUP: /* ..., a ==> ..., a, a */
741 M_COPY(src, iptr->dst);
744 case ICMD_DUP_X1: /* ..., a, b ==> ..., b, a, b */
746 M_COPY(src, iptr->dst);
747 M_COPY(src->prev, iptr->dst->prev);
748 M_COPY(iptr->dst, iptr->dst->prev->prev);
751 case ICMD_DUP_X2: /* ..., a, b, c ==> ..., c, a, b, c */
753 M_COPY(src, iptr->dst);
754 M_COPY(src->prev, iptr->dst->prev);
755 M_COPY(src->prev->prev, iptr->dst->prev->prev);
756 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
759 case ICMD_DUP2: /* ..., a, b ==> ..., a, b, a, b */
761 M_COPY(src, iptr->dst);
762 M_COPY(src->prev, iptr->dst->prev);
765 case ICMD_DUP2_X1: /* ..., a, b, c ==> ..., b, c, a, b, c */
767 M_COPY(src, iptr->dst);
768 M_COPY(src->prev, iptr->dst->prev);
769 M_COPY(src->prev->prev, iptr->dst->prev->prev);
770 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
771 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev);
774 case ICMD_DUP2_X2: /* ..., a, b, c, d ==> ..., c, d, a, b, c, d */
776 M_COPY(src, iptr->dst);
777 M_COPY(src->prev, iptr->dst->prev);
778 M_COPY(src->prev->prev, iptr->dst->prev->prev);
779 M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
780 M_COPY(iptr->dst, iptr->dst->prev->prev->prev->prev);
781 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev->prev);
784 case ICMD_SWAP: /* ..., a, b ==> ..., b, a */
786 M_COPY(src, iptr->dst->prev);
787 M_COPY(src->prev, iptr->dst);
791 /* integer operations *************************************************/
793 case ICMD_INEG: /* ..., value ==> ..., - value */
795 d = reg_of_var(rd, iptr->dst, REG_NULL);
796 if (iptr->dst->flags & INMEMORY) {
797 if (src->flags & INMEMORY) {
798 if (src->regoff == iptr->dst->regoff) {
799 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
802 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
803 x86_64_negl_reg(cd, REG_ITMP1);
804 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
808 x86_64_movl_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
809 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
813 if (src->flags & INMEMORY) {
814 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
815 x86_64_negl_reg(cd, d);
818 M_INTMOVE(src->regoff, iptr->dst->regoff);
819 x86_64_negl_reg(cd, iptr->dst->regoff);
824 case ICMD_LNEG: /* ..., value ==> ..., - value */
826 d = reg_of_var(rd, iptr->dst, REG_NULL);
827 if (iptr->dst->flags & INMEMORY) {
828 if (src->flags & INMEMORY) {
829 if (src->regoff == iptr->dst->regoff) {
830 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
833 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
834 x86_64_neg_reg(cd, REG_ITMP1);
835 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
839 x86_64_mov_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
840 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
844 if (src->flags & INMEMORY) {
845 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
846 x86_64_neg_reg(cd, iptr->dst->regoff);
849 M_INTMOVE(src->regoff, iptr->dst->regoff);
850 x86_64_neg_reg(cd, iptr->dst->regoff);
855 case ICMD_I2L: /* ..., value ==> ..., value */
857 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
858 if (src->flags & INMEMORY) {
859 x86_64_movslq_membase_reg(cd, REG_SP, src->regoff * 8, d);
862 x86_64_movslq_reg_reg(cd, src->regoff, d);
864 store_reg_to_var_int(iptr->dst, d);
867 case ICMD_L2I: /* ..., value ==> ..., value */
869 var_to_reg_int(s1, src, REG_ITMP1);
870 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
872 store_reg_to_var_int(iptr->dst, d);
875 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
877 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
878 if (src->flags & INMEMORY) {
879 x86_64_movsbq_membase_reg(cd, REG_SP, src->regoff * 8, d);
882 x86_64_movsbq_reg_reg(cd, src->regoff, d);
884 store_reg_to_var_int(iptr->dst, d);
887 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
889 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
890 if (src->flags & INMEMORY) {
891 x86_64_movzwq_membase_reg(cd, REG_SP, src->regoff * 8, d);
894 x86_64_movzwq_reg_reg(cd, src->regoff, d);
896 store_reg_to_var_int(iptr->dst, d);
899 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
901 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
902 if (src->flags & INMEMORY) {
903 x86_64_movswq_membase_reg(cd, REG_SP, src->regoff * 8, d);
906 x86_64_movswq_reg_reg(cd, src->regoff, d);
908 store_reg_to_var_int(iptr->dst, d);
912 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
914 d = reg_of_var(rd, iptr->dst, REG_NULL);
915 x86_64_emit_ialu(cd, X86_64_ADD, src, iptr);
918 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
919 /* val.i = constant */
921 d = reg_of_var(rd, iptr->dst, REG_NULL);
922 x86_64_emit_ialuconst(cd, X86_64_ADD, src, iptr);
925 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
927 d = reg_of_var(rd, iptr->dst, REG_NULL);
928 x86_64_emit_lalu(cd, X86_64_ADD, src, iptr);
931 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
932 /* val.l = constant */
934 d = reg_of_var(rd, iptr->dst, REG_NULL);
935 x86_64_emit_laluconst(cd, X86_64_ADD, src, iptr);
938 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
940 d = reg_of_var(rd, iptr->dst, REG_NULL);
941 if (iptr->dst->flags & INMEMORY) {
942 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
943 if (src->prev->regoff == iptr->dst->regoff) {
944 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
945 x86_64_alul_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
948 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
949 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
950 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
953 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
954 M_INTMOVE(src->prev->regoff, REG_ITMP1);
955 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
956 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
958 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
959 if (src->prev->regoff == iptr->dst->regoff) {
960 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
963 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
964 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
965 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
969 x86_64_movl_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
970 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
974 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
975 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
976 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
978 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
979 M_INTMOVE(src->prev->regoff, d);
980 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
982 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
983 /* workaround for reg alloc */
984 if (src->regoff == iptr->dst->regoff) {
985 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
986 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
987 M_INTMOVE(REG_ITMP1, d);
990 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
991 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
995 /* workaround for reg alloc */
996 if (src->regoff == iptr->dst->regoff) {
997 M_INTMOVE(src->prev->regoff, REG_ITMP1);
998 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
999 M_INTMOVE(REG_ITMP1, d);
1002 M_INTMOVE(src->prev->regoff, d);
1003 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
1009 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
1010 /* val.i = constant */
1012 d = reg_of_var(rd, iptr->dst, REG_NULL);
1013 x86_64_emit_ialuconst(cd, X86_64_SUB, src, iptr);
1016 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1018 d = reg_of_var(rd, iptr->dst, REG_NULL);
1019 if (iptr->dst->flags & INMEMORY) {
1020 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1021 if (src->prev->regoff == iptr->dst->regoff) {
1022 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1023 x86_64_alu_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1026 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1027 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1028 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1031 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1032 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1033 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1034 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1036 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1037 if (src->prev->regoff == iptr->dst->regoff) {
1038 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1041 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1042 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1043 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1047 x86_64_mov_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1048 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1052 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1053 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1054 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1056 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1057 M_INTMOVE(src->prev->regoff, d);
1058 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1060 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1061 /* workaround for reg alloc */
1062 if (src->regoff == iptr->dst->regoff) {
1063 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1064 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1065 M_INTMOVE(REG_ITMP1, d);
1068 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1069 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1073 /* workaround for reg alloc */
1074 if (src->regoff == iptr->dst->regoff) {
1075 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1076 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1077 M_INTMOVE(REG_ITMP1, d);
1080 M_INTMOVE(src->prev->regoff, d);
1081 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1087 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
1088 /* val.l = constant */
1090 d = reg_of_var(rd, iptr->dst, REG_NULL);
1091 x86_64_emit_laluconst(cd, X86_64_SUB, src, iptr);
1094 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1096 d = reg_of_var(rd, iptr->dst, REG_NULL);
1097 if (iptr->dst->flags & INMEMORY) {
1098 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1099 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1100 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1101 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1103 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1104 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1105 x86_64_imull_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1106 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1108 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1109 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1110 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1111 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1114 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1115 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1116 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1120 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1121 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1122 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1124 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1125 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1126 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1128 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1129 M_INTMOVE(src->regoff, iptr->dst->regoff);
1130 x86_64_imull_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1133 if (src->regoff == iptr->dst->regoff) {
1134 x86_64_imull_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1137 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1138 x86_64_imull_reg_reg(cd, src->regoff, iptr->dst->regoff);
1144 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
1145 /* val.i = constant */
1147 d = reg_of_var(rd, iptr->dst, REG_NULL);
1148 if (iptr->dst->flags & INMEMORY) {
1149 if (src->flags & INMEMORY) {
1150 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1151 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1154 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, REG_ITMP1);
1155 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1159 if (src->flags & INMEMORY) {
1160 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1163 if (iptr->val.i == 2) {
1164 M_INTMOVE(src->regoff, iptr->dst->regoff);
1165 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1168 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, iptr->dst->regoff); /* 3 cycles */
1174 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1176 d = reg_of_var(rd, iptr->dst, REG_NULL);
1177 if (iptr->dst->flags & INMEMORY) {
1178 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1179 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1180 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1181 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1183 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1184 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1185 x86_64_imul_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1186 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1188 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1189 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1190 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1191 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1194 x86_64_mov_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1195 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1196 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1200 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1201 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1202 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1204 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1205 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1206 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1208 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1209 M_INTMOVE(src->regoff, iptr->dst->regoff);
1210 x86_64_imul_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1213 if (src->regoff == iptr->dst->regoff) {
1214 x86_64_imul_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1217 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1218 x86_64_imul_reg_reg(cd, src->regoff, iptr->dst->regoff);
1224 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
1225 /* val.l = constant */
1227 d = reg_of_var(rd, iptr->dst, REG_NULL);
1228 if (iptr->dst->flags & INMEMORY) {
1229 if (src->flags & INMEMORY) {
1230 if (IS_IMM32(iptr->val.l)) {
1231 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1234 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1235 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1237 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1240 if (IS_IMM32(iptr->val.l)) {
1241 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, REG_ITMP1);
1244 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1245 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1247 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1251 if (src->flags & INMEMORY) {
1252 if (IS_IMM32(iptr->val.l)) {
1253 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1256 x86_64_mov_imm_reg(cd, iptr->val.l, iptr->dst->regoff);
1257 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1261 /* should match in many cases */
1262 if (iptr->val.l == 2) {
1263 M_INTMOVE(src->regoff, iptr->dst->regoff);
1264 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1267 if (IS_IMM32(iptr->val.l)) {
1268 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, iptr->dst->regoff); /* 4 cycles */
1271 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1272 M_INTMOVE(src->regoff, iptr->dst->regoff);
1273 x86_64_imul_reg_reg(cd, REG_ITMP1, iptr->dst->regoff);
1280 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1282 d = reg_of_var(rd, iptr->dst, REG_NULL);
1283 if (src->prev->flags & INMEMORY) {
1284 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1287 M_INTMOVE(src->prev->regoff, RAX);
1290 if (src->flags & INMEMORY) {
1291 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1294 M_INTMOVE(src->regoff, REG_ITMP3);
1298 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1299 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1300 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1301 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1303 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1305 x86_64_idivl_reg(cd, REG_ITMP3);
1307 if (iptr->dst->flags & INMEMORY) {
1308 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1309 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1312 M_INTMOVE(RAX, iptr->dst->regoff);
1314 if (iptr->dst->regoff != RDX) {
1315 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1320 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1322 d = reg_of_var(rd, iptr->dst, REG_NULL);
1323 if (src->prev->flags & INMEMORY) {
1324 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1327 M_INTMOVE(src->prev->regoff, RAX);
1330 if (src->flags & INMEMORY) {
1331 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1334 M_INTMOVE(src->regoff, REG_ITMP3);
1338 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1340 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1341 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1344 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1345 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1346 x86_64_jcc(cd, X86_64_CC_E, 1 + 3); /* 6 bytes */
1349 x86_64_idivl_reg(cd, REG_ITMP3);
1351 if (iptr->dst->flags & INMEMORY) {
1352 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1353 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1356 M_INTMOVE(RDX, iptr->dst->regoff);
1358 if (iptr->dst->regoff != RDX) {
1359 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1364 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
1365 /* val.i = constant */
1367 var_to_reg_int(s1, src, REG_ITMP1);
1368 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1369 M_INTMOVE(s1, REG_ITMP1);
1370 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1371 x86_64_leal_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1372 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1373 x86_64_shiftl_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1374 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1375 store_reg_to_var_int(iptr->dst, d);
1378 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
1379 /* val.i = constant */
1381 var_to_reg_int(s1, src, REG_ITMP1);
1382 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1383 M_INTMOVE(s1, REG_ITMP1);
1384 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1385 x86_64_leal_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1386 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1387 x86_64_alul_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1388 x86_64_alul_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1389 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1390 store_reg_to_var_int(iptr->dst, d);
1394 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1396 d = reg_of_var(rd, iptr->dst, REG_NULL);
1397 if (src->prev->flags & INMEMORY) {
1398 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1401 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1404 if (src->flags & INMEMORY) {
1405 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1408 M_INTMOVE(src->regoff, REG_ITMP3);
1412 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1413 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1414 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1415 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1416 x86_64_jcc(cd, X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1418 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1420 x86_64_idiv_reg(cd, REG_ITMP3);
1422 if (iptr->dst->flags & INMEMORY) {
1423 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1424 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1427 M_INTMOVE(RAX, iptr->dst->regoff);
1429 if (iptr->dst->regoff != RDX) {
1430 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1435 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1437 d = reg_of_var(rd, iptr->dst, REG_NULL);
1438 if (src->prev->flags & INMEMORY) {
1439 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1442 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1445 if (src->flags & INMEMORY) {
1446 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1449 M_INTMOVE(src->regoff, REG_ITMP3);
1453 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1455 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1456 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1457 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1460 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1461 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1462 x86_64_jcc(cd, X86_64_CC_E, 2 + 3); /* 6 bytes */
1465 x86_64_idiv_reg(cd, REG_ITMP3);
1467 if (iptr->dst->flags & INMEMORY) {
1468 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1469 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1472 M_INTMOVE(RDX, iptr->dst->regoff);
1474 if (iptr->dst->regoff != RDX) {
1475 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1480 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1481 /* val.i = constant */
1483 var_to_reg_int(s1, src, REG_ITMP1);
1484 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1485 M_INTMOVE(s1, REG_ITMP1);
1486 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1487 x86_64_lea_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1488 x86_64_cmovcc_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1489 x86_64_shift_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1490 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1491 store_reg_to_var_int(iptr->dst, d);
1494 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1495 /* val.l = constant */
1497 var_to_reg_int(s1, src, REG_ITMP1);
1498 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1499 M_INTMOVE(s1, REG_ITMP1);
1500 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1501 x86_64_lea_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1502 x86_64_cmovcc_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1503 x86_64_alu_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1504 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1505 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1506 store_reg_to_var_int(iptr->dst, d);
1509 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1511 d = reg_of_var(rd, iptr->dst, REG_NULL);
1512 x86_64_emit_ishift(cd, X86_64_SHL, src, iptr);
1515 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1516 /* val.i = constant */
1518 d = reg_of_var(rd, iptr->dst, REG_NULL);
1519 x86_64_emit_ishiftconst(cd, X86_64_SHL, src, iptr);
1522 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1524 d = reg_of_var(rd, iptr->dst, REG_NULL);
1525 x86_64_emit_ishift(cd, X86_64_SAR, src, iptr);
1528 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1529 /* val.i = constant */
1531 d = reg_of_var(rd, iptr->dst, REG_NULL);
1532 x86_64_emit_ishiftconst(cd, X86_64_SAR, src, iptr);
1535 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1537 d = reg_of_var(rd, iptr->dst, REG_NULL);
1538 x86_64_emit_ishift(cd, X86_64_SHR, src, iptr);
1541 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1542 /* val.i = constant */
1544 d = reg_of_var(rd, iptr->dst, REG_NULL);
1545 x86_64_emit_ishiftconst(cd, X86_64_SHR, src, iptr);
1548 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1550 d = reg_of_var(rd, iptr->dst, REG_NULL);
1551 x86_64_emit_lshift(cd, X86_64_SHL, src, iptr);
1554 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1555 /* val.i = constant */
1557 d = reg_of_var(rd, iptr->dst, REG_NULL);
1558 x86_64_emit_lshiftconst(cd, X86_64_SHL, src, iptr);
1561 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1563 d = reg_of_var(rd, iptr->dst, REG_NULL);
1564 x86_64_emit_lshift(cd, X86_64_SAR, src, iptr);
1567 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1568 /* val.i = constant */
1570 d = reg_of_var(rd, iptr->dst, REG_NULL);
1571 x86_64_emit_lshiftconst(cd, X86_64_SAR, src, iptr);
1574 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1576 d = reg_of_var(rd, iptr->dst, REG_NULL);
1577 x86_64_emit_lshift(cd, X86_64_SHR, src, iptr);
1580 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1581 /* val.l = constant */
1583 d = reg_of_var(rd, iptr->dst, REG_NULL);
1584 x86_64_emit_lshiftconst(cd, X86_64_SHR, src, iptr);
1587 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1589 d = reg_of_var(rd, iptr->dst, REG_NULL);
1590 x86_64_emit_ialu(cd, X86_64_AND, src, iptr);
1593 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1594 /* val.i = constant */
1596 d = reg_of_var(rd, iptr->dst, REG_NULL);
1597 x86_64_emit_ialuconst(cd, X86_64_AND, src, iptr);
1600 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1602 d = reg_of_var(rd, iptr->dst, REG_NULL);
1603 x86_64_emit_lalu(cd, X86_64_AND, src, iptr);
1606 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1607 /* val.l = constant */
1609 d = reg_of_var(rd, iptr->dst, REG_NULL);
1610 x86_64_emit_laluconst(cd, X86_64_AND, src, iptr);
1613 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1615 d = reg_of_var(rd, iptr->dst, REG_NULL);
1616 x86_64_emit_ialu(cd, X86_64_OR, src, iptr);
1619 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1620 /* val.i = constant */
1622 d = reg_of_var(rd, iptr->dst, REG_NULL);
1623 x86_64_emit_ialuconst(cd, X86_64_OR, src, iptr);
1626 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1628 d = reg_of_var(rd, iptr->dst, REG_NULL);
1629 x86_64_emit_lalu(cd, X86_64_OR, src, iptr);
1632 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1633 /* val.l = constant */
1635 d = reg_of_var(rd, iptr->dst, REG_NULL);
1636 x86_64_emit_laluconst(cd, X86_64_OR, src, iptr);
1639 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1641 d = reg_of_var(rd, iptr->dst, REG_NULL);
1642 x86_64_emit_ialu(cd, X86_64_XOR, src, iptr);
1645 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1646 /* val.i = constant */
1648 d = reg_of_var(rd, iptr->dst, REG_NULL);
1649 x86_64_emit_ialuconst(cd, X86_64_XOR, src, iptr);
1652 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1654 d = reg_of_var(rd, iptr->dst, REG_NULL);
1655 x86_64_emit_lalu(cd, X86_64_XOR, src, iptr);
1658 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1659 /* val.l = constant */
1661 d = reg_of_var(rd, iptr->dst, REG_NULL);
1662 x86_64_emit_laluconst(cd, X86_64_XOR, src, iptr);
1666 case ICMD_IINC: /* ..., value ==> ..., value + constant */
1667 /* op1 = variable, val.i = constant */
1669 /* using inc and dec is definitely faster than add -- tested */
1672 var = &(rd->locals[iptr->op1][TYPE_INT]);
1674 if (var->flags & INMEMORY) {
1675 if (iptr->val.i == 1) {
1676 x86_64_incl_membase(cd, REG_SP, d * 8);
1678 } else if (iptr->val.i == -1) {
1679 x86_64_decl_membase(cd, REG_SP, d * 8);
1682 x86_64_alul_imm_membase(cd, X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1686 if (iptr->val.i == 1) {
1687 x86_64_incl_reg(cd, d);
1689 } else if (iptr->val.i == -1) {
1690 x86_64_decl_reg(cd, d);
1693 x86_64_alul_imm_reg(cd, X86_64_ADD, iptr->val.i, d);
1699 /* floating operations ************************************************/
1701 case ICMD_FNEG: /* ..., value ==> ..., - value */
1703 var_to_reg_flt(s1, src, REG_FTMP1);
1704 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1705 a = dseg_adds4(cd, 0x80000000);
1707 x86_64_movss_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1708 x86_64_xorps_reg_reg(cd, REG_FTMP2, d);
1709 store_reg_to_var_flt(iptr->dst, d);
1712 case ICMD_DNEG: /* ..., value ==> ..., - value */
1714 var_to_reg_flt(s1, src, REG_FTMP1);
1715 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1716 a = dseg_adds8(cd, 0x8000000000000000);
1718 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1719 x86_64_xorpd_reg_reg(cd, REG_FTMP2, d);
1720 store_reg_to_var_flt(iptr->dst, d);
1723 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1725 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1726 var_to_reg_flt(s2, src, REG_FTMP2);
1727 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1729 x86_64_addss_reg_reg(cd, s2, d);
1730 } else if (s2 == d) {
1731 x86_64_addss_reg_reg(cd, s1, d);
1734 x86_64_addss_reg_reg(cd, s2, d);
1736 store_reg_to_var_flt(iptr->dst, d);
1739 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1741 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1742 var_to_reg_flt(s2, src, REG_FTMP2);
1743 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1745 x86_64_addsd_reg_reg(cd, s2, d);
1746 } else if (s2 == d) {
1747 x86_64_addsd_reg_reg(cd, s1, d);
1750 x86_64_addsd_reg_reg(cd, s2, d);
1752 store_reg_to_var_flt(iptr->dst, d);
1755 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1757 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1758 var_to_reg_flt(s2, src, REG_FTMP2);
1759 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1761 M_FLTMOVE(s2, REG_FTMP2);
1765 x86_64_subss_reg_reg(cd, s2, d);
1766 store_reg_to_var_flt(iptr->dst, d);
1769 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1771 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1772 var_to_reg_flt(s2, src, REG_FTMP2);
1773 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1775 M_FLTMOVE(s2, REG_FTMP2);
1779 x86_64_subsd_reg_reg(cd, s2, d);
1780 store_reg_to_var_flt(iptr->dst, d);
1783 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1785 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1786 var_to_reg_flt(s2, src, REG_FTMP2);
1787 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1789 x86_64_mulss_reg_reg(cd, s2, d);
1790 } else if (s2 == d) {
1791 x86_64_mulss_reg_reg(cd, s1, d);
1794 x86_64_mulss_reg_reg(cd, s2, d);
1796 store_reg_to_var_flt(iptr->dst, d);
1799 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1801 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1802 var_to_reg_flt(s2, src, REG_FTMP2);
1803 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1805 x86_64_mulsd_reg_reg(cd, s2, d);
1806 } else if (s2 == d) {
1807 x86_64_mulsd_reg_reg(cd, s1, d);
1810 x86_64_mulsd_reg_reg(cd, s2, d);
1812 store_reg_to_var_flt(iptr->dst, d);
1815 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1817 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1818 var_to_reg_flt(s2, src, REG_FTMP2);
1819 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1821 M_FLTMOVE(s2, REG_FTMP2);
1825 x86_64_divss_reg_reg(cd, s2, d);
1826 store_reg_to_var_flt(iptr->dst, d);
1829 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1831 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1832 var_to_reg_flt(s2, src, REG_FTMP2);
1833 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1835 M_FLTMOVE(s2, REG_FTMP2);
1839 x86_64_divsd_reg_reg(cd, s2, d);
1840 store_reg_to_var_flt(iptr->dst, d);
1843 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1845 var_to_reg_int(s1, src, REG_ITMP1);
1846 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1847 x86_64_cvtsi2ss_reg_reg(cd, s1, d);
1848 store_reg_to_var_flt(iptr->dst, d);
1851 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1853 var_to_reg_int(s1, src, REG_ITMP1);
1854 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1855 x86_64_cvtsi2sd_reg_reg(cd, s1, d);
1856 store_reg_to_var_flt(iptr->dst, d);
1859 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1861 var_to_reg_int(s1, src, REG_ITMP1);
1862 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1863 x86_64_cvtsi2ssq_reg_reg(cd, s1, d);
1864 store_reg_to_var_flt(iptr->dst, d);
1867 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1869 var_to_reg_int(s1, src, REG_ITMP1);
1870 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1871 x86_64_cvtsi2sdq_reg_reg(cd, s1, d);
1872 store_reg_to_var_flt(iptr->dst, d);
1875 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1877 var_to_reg_flt(s1, src, REG_FTMP1);
1878 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1879 x86_64_cvttss2si_reg_reg(cd, s1, d);
1880 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1881 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1882 x86_64_jcc(cd, X86_64_CC_NE, a);
1883 M_FLTMOVE(s1, REG_FTMP1);
1884 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2i, REG_ITMP2);
1885 x86_64_call_reg(cd, REG_ITMP2);
1886 M_INTMOVE(REG_RESULT, d);
1887 store_reg_to_var_int(iptr->dst, d);
1890 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1892 var_to_reg_flt(s1, src, REG_FTMP1);
1893 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1894 x86_64_cvttsd2si_reg_reg(cd, s1, d);
1895 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1896 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1897 x86_64_jcc(cd, X86_64_CC_NE, a);
1898 M_FLTMOVE(s1, REG_FTMP1);
1899 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2i, REG_ITMP2);
1900 x86_64_call_reg(cd, REG_ITMP2);
1901 M_INTMOVE(REG_RESULT, d);
1902 store_reg_to_var_int(iptr->dst, d);
1905 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1907 var_to_reg_flt(s1, src, REG_FTMP1);
1908 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1909 x86_64_cvttss2siq_reg_reg(cd, s1, d);
1910 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1911 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1912 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1913 x86_64_jcc(cd, X86_64_CC_NE, a);
1914 M_FLTMOVE(s1, REG_FTMP1);
1915 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2l, REG_ITMP2);
1916 x86_64_call_reg(cd, REG_ITMP2);
1917 M_INTMOVE(REG_RESULT, d);
1918 store_reg_to_var_int(iptr->dst, d);
1921 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1923 var_to_reg_flt(s1, src, REG_FTMP1);
1924 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1925 x86_64_cvttsd2siq_reg_reg(cd, s1, d);
1926 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1927 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1928 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1929 x86_64_jcc(cd, X86_64_CC_NE, a);
1930 M_FLTMOVE(s1, REG_FTMP1);
1931 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2l, REG_ITMP2);
1932 x86_64_call_reg(cd, REG_ITMP2);
1933 M_INTMOVE(REG_RESULT, d);
1934 store_reg_to_var_int(iptr->dst, d);
1937 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1939 var_to_reg_flt(s1, src, REG_FTMP1);
1940 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1941 x86_64_cvtss2sd_reg_reg(cd, s1, d);
1942 store_reg_to_var_flt(iptr->dst, d);
1945 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1947 var_to_reg_flt(s1, src, REG_FTMP1);
1948 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1949 x86_64_cvtsd2ss_reg_reg(cd, s1, d);
1950 store_reg_to_var_flt(iptr->dst, d);
1953 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1954 /* == => 0, < => 1, > => -1 */
1956 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1957 var_to_reg_flt(s2, src, REG_FTMP2);
1958 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1959 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1960 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1961 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1962 x86_64_ucomiss_reg_reg(cd, s1, s2);
1963 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1964 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1965 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1966 store_reg_to_var_int(iptr->dst, d);
1969 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1970 /* == => 0, < => 1, > => -1 */
1972 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1973 var_to_reg_flt(s2, src, REG_FTMP2);
1974 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1975 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1976 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1977 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1978 x86_64_ucomiss_reg_reg(cd, s1, s2);
1979 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1980 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1981 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1982 store_reg_to_var_int(iptr->dst, d);
1985 case ICMD_DCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1986 /* == => 0, < => 1, > => -1 */
1988 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1989 var_to_reg_flt(s2, src, REG_FTMP2);
1990 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1991 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1992 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1993 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1994 x86_64_ucomisd_reg_reg(cd, s1, s2);
1995 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1996 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1997 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1998 store_reg_to_var_int(iptr->dst, d);
2001 case ICMD_DCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
2002 /* == => 0, < => 1, > => -1 */
2004 var_to_reg_flt(s1, src->prev, REG_FTMP1);
2005 var_to_reg_flt(s2, src, REG_FTMP2);
2006 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2007 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
2008 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
2009 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
2010 x86_64_ucomisd_reg_reg(cd, s1, s2);
2011 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
2012 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
2013 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
2014 store_reg_to_var_int(iptr->dst, d);
2018 /* memory operations **************************************************/
2020 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., (int) length */
2022 var_to_reg_int(s1, src, REG_ITMP1);
2023 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2024 gen_nullptr_check(s1);
2025 x86_64_movl_membase_reg(cd, s1, OFFSET(java_arrayheader, size), d);
2026 store_reg_to_var_int(iptr->dst, d);
2029 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2031 var_to_reg_int(s1, src->prev, REG_ITMP1);
2032 var_to_reg_int(s2, src, REG_ITMP2);
2033 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2034 if (iptr->op1 == 0) {
2035 gen_nullptr_check(s1);
2038 x86_64_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
2039 store_reg_to_var_int(iptr->dst, d);
2042 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
2044 var_to_reg_int(s1, src->prev, REG_ITMP1);
2045 var_to_reg_int(s2, src, REG_ITMP2);
2046 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2047 if (iptr->op1 == 0) {
2048 gen_nullptr_check(s1);
2051 x86_64_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]), s1, s2, 3, d);
2052 store_reg_to_var_int(iptr->dst, d);
2055 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
2057 var_to_reg_int(s1, src->prev, REG_ITMP1);
2058 var_to_reg_int(s2, src, REG_ITMP2);
2059 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2060 if (iptr->op1 == 0) {
2061 gen_nullptr_check(s1);
2064 x86_64_movl_memindex_reg(cd, OFFSET(java_intarray, data[0]), s1, s2, 2, d);
2065 store_reg_to_var_int(iptr->dst, d);
2068 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2070 var_to_reg_int(s1, src->prev, REG_ITMP1);
2071 var_to_reg_int(s2, src, REG_ITMP2);
2072 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2073 if (iptr->op1 == 0) {
2074 gen_nullptr_check(s1);
2077 x86_64_movss_memindex_reg(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2078 store_reg_to_var_flt(iptr->dst, d);
2081 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2083 var_to_reg_int(s1, src->prev, REG_ITMP1);
2084 var_to_reg_int(s2, src, REG_ITMP2);
2085 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2086 if (iptr->op1 == 0) {
2087 gen_nullptr_check(s1);
2090 x86_64_movsd_memindex_reg(cd, OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2091 store_reg_to_var_flt(iptr->dst, d);
2094 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
2096 var_to_reg_int(s1, src->prev, REG_ITMP1);
2097 var_to_reg_int(s2, src, REG_ITMP2);
2098 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2099 if (iptr->op1 == 0) {
2100 gen_nullptr_check(s1);
2103 x86_64_movzwq_memindex_reg(cd, OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2104 store_reg_to_var_int(iptr->dst, d);
2107 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
2109 var_to_reg_int(s1, src->prev, REG_ITMP1);
2110 var_to_reg_int(s2, src, REG_ITMP2);
2111 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2112 if (iptr->op1 == 0) {
2113 gen_nullptr_check(s1);
2116 x86_64_movswq_memindex_reg(cd, OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2117 store_reg_to_var_int(iptr->dst, d);
2120 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
2122 var_to_reg_int(s1, src->prev, REG_ITMP1);
2123 var_to_reg_int(s2, src, REG_ITMP2);
2124 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2125 if (iptr->op1 == 0) {
2126 gen_nullptr_check(s1);
2129 x86_64_movsbq_memindex_reg(cd, OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2130 store_reg_to_var_int(iptr->dst, d);
2134 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2136 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2137 var_to_reg_int(s2, src->prev, REG_ITMP2);
2138 if (iptr->op1 == 0) {
2139 gen_nullptr_check(s1);
2142 var_to_reg_int(s3, src, REG_ITMP3);
2143 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2146 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2148 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2149 var_to_reg_int(s2, src->prev, REG_ITMP2);
2150 if (iptr->op1 == 0) {
2151 gen_nullptr_check(s1);
2154 var_to_reg_int(s3, src, REG_ITMP3);
2155 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2158 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2160 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2161 var_to_reg_int(s2, src->prev, REG_ITMP2);
2162 if (iptr->op1 == 0) {
2163 gen_nullptr_check(s1);
2166 var_to_reg_int(s3, src, REG_ITMP3);
2167 x86_64_movl_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2170 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2172 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2173 var_to_reg_int(s2, src->prev, REG_ITMP2);
2174 if (iptr->op1 == 0) {
2175 gen_nullptr_check(s1);
2178 var_to_reg_flt(s3, src, REG_FTMP3);
2179 x86_64_movss_reg_memindex(cd, s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2182 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2184 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2185 var_to_reg_int(s2, src->prev, REG_ITMP2);
2186 if (iptr->op1 == 0) {
2187 gen_nullptr_check(s1);
2190 var_to_reg_flt(s3, src, REG_FTMP3);
2191 x86_64_movsd_reg_memindex(cd, s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2194 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2196 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2197 var_to_reg_int(s2, src->prev, REG_ITMP2);
2198 if (iptr->op1 == 0) {
2199 gen_nullptr_check(s1);
2202 var_to_reg_int(s3, src, REG_ITMP3);
2203 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2206 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2208 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2209 var_to_reg_int(s2, src->prev, REG_ITMP2);
2210 if (iptr->op1 == 0) {
2211 gen_nullptr_check(s1);
2214 var_to_reg_int(s3, src, REG_ITMP3);
2215 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2218 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2220 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2221 var_to_reg_int(s2, src->prev, REG_ITMP2);
2222 if (iptr->op1 == 0) {
2223 gen_nullptr_check(s1);
2226 var_to_reg_int(s3, src, REG_ITMP3);
2227 x86_64_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2230 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2232 var_to_reg_int(s1, src->prev, REG_ITMP1);
2233 var_to_reg_int(s2, src, REG_ITMP2);
2234 if (iptr->op1 == 0) {
2235 gen_nullptr_check(s1);
2238 x86_64_movl_imm_memindex(cd, iptr->val.i, OFFSET(java_intarray, data[0]), s1, s2, 2);
2241 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2243 var_to_reg_int(s1, src->prev, REG_ITMP1);
2244 var_to_reg_int(s2, src, REG_ITMP2);
2245 if (iptr->op1 == 0) {
2246 gen_nullptr_check(s1);
2250 if (IS_IMM32(iptr->val.l)) {
2251 x86_64_mov_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2254 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2255 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l >> 32), OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2259 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2261 var_to_reg_int(s1, src->prev, REG_ITMP1);
2262 var_to_reg_int(s2, src, REG_ITMP2);
2263 if (iptr->op1 == 0) {
2264 gen_nullptr_check(s1);
2267 x86_64_mov_imm_memindex(cd, 0, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2270 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2272 var_to_reg_int(s1, src->prev, REG_ITMP1);
2273 var_to_reg_int(s2, src, REG_ITMP2);
2274 if (iptr->op1 == 0) {
2275 gen_nullptr_check(s1);
2278 x86_64_movb_imm_memindex(cd, iptr->val.i, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2281 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2283 var_to_reg_int(s1, src->prev, REG_ITMP1);
2284 var_to_reg_int(s2, src, REG_ITMP2);
2285 if (iptr->op1 == 0) {
2286 gen_nullptr_check(s1);
2289 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_chararray, data[0]), s1, s2, 1);
2292 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2294 var_to_reg_int(s1, src->prev, REG_ITMP1);
2295 var_to_reg_int(s2, src, REG_ITMP2);
2296 if (iptr->op1 == 0) {
2297 gen_nullptr_check(s1);
2300 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2304 case ICMD_GETSTATIC: /* ... ==> ..., value */
2305 /* op1 = type, val.a = field address */
2308 unresolved_field *uf = iptr->target;
2309 codegen_addpatchref(cd, cd->mcodeptr, asm_get_putstatic, uf);
2313 fieldinfo *fi = iptr->val.a;
2315 if (!fi->class->initialized) {
2316 codegen_addpatchref(cd, cd->mcodeptr, asm_check_clinit, fi->class);
2319 a = (ptrint) &(fi->value);
2322 /* This approach is much faster than moving the field address */
2323 /* inline into a register. */
2324 a = dseg_addaddress(cd, a);
2325 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP2);
2326 switch (iptr->op1) {
2328 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2329 x86_64_movl_membase_reg(cd, REG_ITMP2, 0, d);
2330 store_reg_to_var_int(iptr->dst, d);
2334 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2335 x86_64_mov_membase_reg(cd, REG_ITMP2, 0, d);
2336 store_reg_to_var_int(iptr->dst, d);
2339 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2340 x86_64_movss_membase_reg(cd, REG_ITMP2, 0, d);
2341 store_reg_to_var_flt(iptr->dst, d);
2344 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2345 x86_64_movsd_membase_reg(cd, REG_ITMP2, 0, d);
2346 store_reg_to_var_flt(iptr->dst, d);
2351 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2352 /* op1 = type, val.a = field address */
2355 unresolved_field *uf = iptr->target;
2356 codegen_addpatchref(cd, cd->mcodeptr, asm_get_putstatic, uf);
2360 fieldinfo *fi = iptr->val.a;
2362 if (!fi->class->initialized) {
2363 codegen_addpatchref(cd, cd->mcodeptr, asm_check_clinit, fi->class);
2365 if (showdisassemble) {
2374 a = (ptrint) &(fi->value);
2377 /* This approach is much faster than moving the field address */
2378 /* inline into a register. */
2379 a = dseg_addaddress(cd, a);
2380 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP2);
2381 switch (iptr->op1) {
2383 var_to_reg_int(s2, src, REG_ITMP1);
2384 x86_64_movl_reg_membase(cd, s2, REG_ITMP2, 0);
2388 var_to_reg_int(s2, src, REG_ITMP1);
2389 x86_64_mov_reg_membase(cd, s2, REG_ITMP2, 0);
2392 var_to_reg_flt(s2, src, REG_FTMP1);
2393 x86_64_movss_reg_membase(cd, s2, REG_ITMP2, 0);
2396 var_to_reg_flt(s2, src, REG_FTMP1);
2397 x86_64_movsd_reg_membase(cd, s2, REG_ITMP2, 0);
2402 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2403 /* val = value (in current instruction) */
2404 /* op1 = type, val.a = field address (in */
2405 /* following NOP) */
2407 if (!iptr[1].val.a) {
2408 unresolved_field *uf = iptr[1].target;
2409 codegen_addpatchref(cd, cd->mcodeptr, asm_get_putstatic, uf);
2413 fieldinfo *fi = iptr[1].val.a;
2415 if (!fi->class->initialized) {
2416 codegen_addpatchref(cd, cd->mcodeptr, asm_check_clinit, fi->class);
2419 a = (ptrint) &(fi->value);
2422 /* This approach is much faster than moving the field address */
2423 /* inline into a register. */
2424 a = dseg_addaddress(cd, a);
2425 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP1);
2426 switch (iptr->op1) {
2429 x86_64_movl_imm_membase(cd, iptr->val.i, REG_ITMP1, 0);
2434 if (IS_IMM32(iptr->val.l)) {
2435 x86_64_mov_imm_membase(cd, iptr->val.l, REG_ITMP1, 0);
2437 x86_64_movl_imm_membase(cd, iptr->val.l, REG_ITMP1, 0);
2438 x86_64_movl_imm_membase(cd, iptr->val.l >> 32, REG_ITMP1, 4);
2444 case ICMD_GETFIELD: /* ... ==> ..., value */
2445 /* op1 = type, val.i = field offset */
2447 var_to_reg_int(s1, src, REG_ITMP1);
2448 gen_nullptr_check(s1);
2451 codegen_addpatchref(cd, cd->mcodeptr, asm_get_putfield,
2452 (unresolved_field *) iptr->target);
2455 a = ((fieldinfo *) (iptr->val.a))->offset;
2457 switch (iptr->op1) {
2459 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2460 x86_64_movl_membase32_reg(cd, s1, a, d);
2461 store_reg_to_var_int(iptr->dst, d);
2465 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2466 x86_64_mov_membase32_reg(cd, s1, a, d);
2467 store_reg_to_var_int(iptr->dst, d);
2470 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2471 x86_64_movss_membase32_reg(cd, s1, a, d);
2472 store_reg_to_var_flt(iptr->dst, d);
2475 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2476 x86_64_movsd_membase32_reg(cd, s1, a, d);
2477 store_reg_to_var_flt(iptr->dst, d);
2482 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2483 /* op1 = type, val.i = field offset */
2485 var_to_reg_int(s1, src->prev, REG_ITMP1);
2486 gen_nullptr_check(s1);
2487 if (IS_INT_LNG_TYPE(iptr->op1)) {
2488 var_to_reg_int(s2, src, REG_ITMP2);
2490 var_to_reg_flt(s2, src, REG_FTMP2);
2494 codegen_addpatchref(cd, cd->mcodeptr, asm_get_putfield,
2495 (unresolved_field *) iptr->target);
2498 a = ((fieldinfo *) (iptr->val.a))->offset;
2500 switch (iptr->op1) {
2502 x86_64_movl_reg_membase32(cd, s2, s1, a);
2506 x86_64_mov_reg_membase32(cd, s2, s1, a);
2509 x86_64_movss_reg_membase32(cd, s2, s1, a);
2512 x86_64_movsd_reg_membase32(cd, s2, s1, a);
2517 case ICMD_PUTFIELDCONST: /* ..., objectref, value ==> ... */
2518 /* val = value (in current instruction) */
2519 /* op1 = type, val.a = field address (in */
2520 /* following NOP) */
2522 var_to_reg_int(s1, src, REG_ITMP1);
2523 gen_nullptr_check(s1);
2525 if (!iptr[1].val.a) {
2526 unresolved_field *uf = iptr[1].target;
2527 codegen_addpatchref(cd, cd->mcodeptr, asm_get_putfield, uf);
2530 a = ((fieldinfo *) (iptr[1].val.a))->offset;
2532 switch (iptr->op1) {
2535 x86_64_movl_imm_membase32(cd, iptr->val.i, s1, a);
2540 if (IS_IMM32(iptr->val.l)) {
2541 x86_64_mov_imm_membase32(cd, iptr->val.l, s1, a);
2543 x86_64_movl_imm_membase32(cd, iptr->val.l, s1, a);
2544 x86_64_movl_imm_membase32(cd, iptr->val.l >> 32, s1, a + 4);
2551 /* branch operations **************************************************/
2553 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2555 var_to_reg_int(s1, src, REG_ITMP1);
2556 M_INTMOVE(s1, REG_ITMP1_XPTR);
2558 x86_64_call_imm(cd, 0); /* passing exception pointer */
2559 x86_64_pop_reg(cd, REG_ITMP2_XPC);
2561 x86_64_mov_imm_reg(cd, (ptrint) asm_handle_exception, REG_ITMP3);
2562 x86_64_jmp_reg(cd, REG_ITMP3);
2565 case ICMD_GOTO: /* ... ==> ... */
2566 /* op1 = target JavaVM pc */
2568 x86_64_jmp_imm(cd, 0);
2569 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2572 case ICMD_JSR: /* ... ==> ... */
2573 /* op1 = target JavaVM pc */
2575 x86_64_call_imm(cd, 0);
2576 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2579 case ICMD_RET: /* ... ==> ... */
2580 /* op1 = local variable */
2582 var = &(rd->locals[iptr->op1][TYPE_ADR]);
2583 var_to_reg_int(s1, var, REG_ITMP1);
2584 x86_64_jmp_reg(cd, s1);
2587 case ICMD_IFNULL: /* ..., value ==> ... */
2588 /* op1 = target JavaVM pc */
2590 if (src->flags & INMEMORY) {
2591 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2594 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2596 x86_64_jcc(cd, X86_64_CC_E, 0);
2597 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2600 case ICMD_IFNONNULL: /* ..., value ==> ... */
2601 /* op1 = target JavaVM pc */
2603 if (src->flags & INMEMORY) {
2604 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2607 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2609 x86_64_jcc(cd, X86_64_CC_NE, 0);
2610 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2613 case ICMD_IFEQ: /* ..., value ==> ... */
2614 /* op1 = target JavaVM pc, val.i = constant */
2616 x86_64_emit_ifcc(cd, X86_64_CC_E, src, iptr);
2619 case ICMD_IFLT: /* ..., value ==> ... */
2620 /* op1 = target JavaVM pc, val.i = constant */
2622 x86_64_emit_ifcc(cd, X86_64_CC_L, src, iptr);
2625 case ICMD_IFLE: /* ..., value ==> ... */
2626 /* op1 = target JavaVM pc, val.i = constant */
2628 x86_64_emit_ifcc(cd, X86_64_CC_LE, src, iptr);
2631 case ICMD_IFNE: /* ..., value ==> ... */
2632 /* op1 = target JavaVM pc, val.i = constant */
2634 x86_64_emit_ifcc(cd, X86_64_CC_NE, src, iptr);
2637 case ICMD_IFGT: /* ..., value ==> ... */
2638 /* op1 = target JavaVM pc, val.i = constant */
2640 x86_64_emit_ifcc(cd, X86_64_CC_G, src, iptr);
2643 case ICMD_IFGE: /* ..., value ==> ... */
2644 /* op1 = target JavaVM pc, val.i = constant */
2646 x86_64_emit_ifcc(cd, X86_64_CC_GE, src, iptr);
2649 case ICMD_IF_LEQ: /* ..., value ==> ... */
2650 /* op1 = target JavaVM pc, val.l = constant */
2652 x86_64_emit_if_lcc(cd, X86_64_CC_E, src, iptr);
2655 case ICMD_IF_LLT: /* ..., value ==> ... */
2656 /* op1 = target JavaVM pc, val.l = constant */
2658 x86_64_emit_if_lcc(cd, X86_64_CC_L, src, iptr);
2661 case ICMD_IF_LLE: /* ..., value ==> ... */
2662 /* op1 = target JavaVM pc, val.l = constant */
2664 x86_64_emit_if_lcc(cd, X86_64_CC_LE, src, iptr);
2667 case ICMD_IF_LNE: /* ..., value ==> ... */
2668 /* op1 = target JavaVM pc, val.l = constant */
2670 x86_64_emit_if_lcc(cd, X86_64_CC_NE, src, iptr);
2673 case ICMD_IF_LGT: /* ..., value ==> ... */
2674 /* op1 = target JavaVM pc, val.l = constant */
2676 x86_64_emit_if_lcc(cd, X86_64_CC_G, src, iptr);
2679 case ICMD_IF_LGE: /* ..., value ==> ... */
2680 /* op1 = target JavaVM pc, val.l = constant */
2682 x86_64_emit_if_lcc(cd, X86_64_CC_GE, src, iptr);
2685 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2686 /* op1 = target JavaVM pc */
2688 x86_64_emit_if_icmpcc(cd, X86_64_CC_E, src, iptr);
2691 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2692 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2694 x86_64_emit_if_lcmpcc(cd, X86_64_CC_E, src, iptr);
2697 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2698 /* op1 = target JavaVM pc */
2700 x86_64_emit_if_icmpcc(cd, X86_64_CC_NE, src, iptr);
2703 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2704 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2706 x86_64_emit_if_lcmpcc(cd, X86_64_CC_NE, src, iptr);
2709 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2710 /* op1 = target JavaVM pc */
2712 x86_64_emit_if_icmpcc(cd, X86_64_CC_L, src, iptr);
2715 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2716 /* op1 = target JavaVM pc */
2718 x86_64_emit_if_lcmpcc(cd, X86_64_CC_L, src, iptr);
2721 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2722 /* op1 = target JavaVM pc */
2724 x86_64_emit_if_icmpcc(cd, X86_64_CC_G, src, iptr);
2727 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2728 /* op1 = target JavaVM pc */
2730 x86_64_emit_if_lcmpcc(cd, X86_64_CC_G, src, iptr);
2733 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2734 /* op1 = target JavaVM pc */
2736 x86_64_emit_if_icmpcc(cd, X86_64_CC_LE, src, iptr);
2739 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2740 /* op1 = target JavaVM pc */
2742 x86_64_emit_if_lcmpcc(cd, X86_64_CC_LE, src, iptr);
2745 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2746 /* op1 = target JavaVM pc */
2748 x86_64_emit_if_icmpcc(cd, X86_64_CC_GE, src, iptr);
2751 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2752 /* op1 = target JavaVM pc */
2754 x86_64_emit_if_lcmpcc(cd, X86_64_CC_GE, src, iptr);
2757 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST */
2759 case ICMD_ELSE_ICONST: /* handled by IFxx_ICONST */
2762 case ICMD_IFEQ_ICONST: /* ..., value ==> ..., constant */
2763 /* val.i = constant */
2765 var_to_reg_int(s1, src, REG_ITMP1);
2766 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2767 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2769 M_INTMOVE(s1, REG_ITMP1);
2772 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2774 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2775 x86_64_testl_reg_reg(cd, s1, s1);
2776 x86_64_cmovccl_reg_reg(cd, X86_64_CC_E, REG_ITMP2, d);
2777 store_reg_to_var_int(iptr->dst, d);
2780 case ICMD_IFNE_ICONST: /* ..., value ==> ..., constant */
2781 /* val.i = constant */
2783 var_to_reg_int(s1, src, REG_ITMP1);
2784 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2785 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2787 M_INTMOVE(s1, REG_ITMP1);
2790 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2792 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2793 x86_64_testl_reg_reg(cd, s1, s1);
2794 x86_64_cmovccl_reg_reg(cd, X86_64_CC_NE, REG_ITMP2, d);
2795 store_reg_to_var_int(iptr->dst, d);
2798 case ICMD_IFLT_ICONST: /* ..., value ==> ..., constant */
2799 /* val.i = constant */
2801 var_to_reg_int(s1, src, REG_ITMP1);
2802 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2803 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2805 M_INTMOVE(s1, REG_ITMP1);
2808 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2810 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2811 x86_64_testl_reg_reg(cd, s1, s1);
2812 x86_64_cmovccl_reg_reg(cd, X86_64_CC_L, REG_ITMP2, d);
2813 store_reg_to_var_int(iptr->dst, d);
2816 case ICMD_IFGE_ICONST: /* ..., value ==> ..., constant */
2817 /* val.i = constant */
2819 var_to_reg_int(s1, src, REG_ITMP1);
2820 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2821 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2823 M_INTMOVE(s1, REG_ITMP1);
2826 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2828 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2829 x86_64_testl_reg_reg(cd, s1, s1);
2830 x86_64_cmovccl_reg_reg(cd, X86_64_CC_GE, REG_ITMP2, d);
2831 store_reg_to_var_int(iptr->dst, d);
2834 case ICMD_IFGT_ICONST: /* ..., value ==> ..., constant */
2835 /* val.i = constant */
2837 var_to_reg_int(s1, src, REG_ITMP1);
2838 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2839 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2841 M_INTMOVE(s1, REG_ITMP1);
2844 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2846 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2847 x86_64_testl_reg_reg(cd, s1, s1);
2848 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP2, d);
2849 store_reg_to_var_int(iptr->dst, d);
2852 case ICMD_IFLE_ICONST: /* ..., value ==> ..., constant */
2853 /* val.i = constant */
2855 var_to_reg_int(s1, src, REG_ITMP1);
2856 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2857 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2859 M_INTMOVE(s1, REG_ITMP1);
2862 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2864 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2865 x86_64_testl_reg_reg(cd, s1, s1);
2866 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, d);
2867 store_reg_to_var_int(iptr->dst, d);
2871 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2875 var_to_reg_int(s1, src, REG_RESULT);
2876 M_INTMOVE(s1, REG_RESULT);
2878 goto nowperformreturn;
2880 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2883 var_to_reg_flt(s1, src, REG_FRESULT);
2884 M_FLTMOVE(s1, REG_FRESULT);
2886 goto nowperformreturn;
2888 case ICMD_RETURN: /* ... ==> ... */
2894 p = parentargs_base;
2896 /* call trace function */
2898 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
2900 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
2901 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
2903 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
2904 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
2905 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
2906 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
2908 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
2909 x86_64_call_reg(cd, REG_ITMP1);
2911 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
2912 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
2914 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
2917 #if defined(USE_THREADS)
2918 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2919 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
2921 /* we need to save the proper return value */
2922 switch (iptr->opc) {
2926 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, rd->maxmemuse * 8);
2930 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, rd->maxmemuse * 8);
2934 x86_64_mov_imm_reg(cd, (u8) builtin_monitorexit, REG_ITMP1);
2935 x86_64_call_reg(cd, REG_ITMP1);
2937 /* and now restore the proper return value */
2938 switch (iptr->opc) {
2942 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_RESULT);
2946 x86_64_movq_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_FRESULT);
2952 /* restore saved registers */
2953 for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
2954 p--; x86_64_mov_membase_reg(cd, REG_SP, p * 8, rd->savintregs[i]);
2956 for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
2957 p--; x86_64_movq_membase_reg(cd, REG_SP, p * 8, rd->savfltregs[i]);
2960 /* deallocate stack */
2961 if (parentargs_base) {
2962 x86_64_alu_imm_reg(cd, X86_64_ADD, parentargs_base * 8, REG_SP);
2970 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2975 tptr = (void **) iptr->target;
2977 s4ptr = iptr->val.a;
2978 l = s4ptr[1]; /* low */
2979 i = s4ptr[2]; /* high */
2981 var_to_reg_int(s1, src, REG_ITMP1);
2982 M_INTMOVE(s1, REG_ITMP1);
2984 x86_64_alul_imm_reg(cd, X86_64_SUB, l, REG_ITMP1);
2989 x86_64_alul_imm_reg(cd, X86_64_CMP, i - 1, REG_ITMP1);
2990 x86_64_jcc(cd, X86_64_CC_A, 0);
2992 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[0]), cd->mcodeptr); */
2993 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
2995 /* build jump table top down and use address of lowest entry */
2997 /* s4ptr += 3 + i; */
3001 /* dseg_addtarget(cd, BlockPtrOfPC(*--s4ptr)); */
3002 dseg_addtarget(cd, (basicblock *) tptr[0]);
3006 /* length of dataseg after last dseg_addtarget is used by load */
3008 x86_64_mov_imm_reg(cd, 0, REG_ITMP2);
3009 dseg_adddata(cd, cd->mcodeptr);
3010 x86_64_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
3011 x86_64_jmp_reg(cd, REG_ITMP1);
3016 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
3018 s4 i, l, val, *s4ptr;
3021 tptr = (void **) iptr->target;
3023 s4ptr = iptr->val.a;
3024 l = s4ptr[0]; /* default */
3025 i = s4ptr[1]; /* count */
3027 MCODECHECK((i<<2)+8);
3028 var_to_reg_int(s1, src, REG_ITMP1); /* reg compare should always be faster */
3034 x86_64_alul_imm_reg(cd, X86_64_CMP, val, s1);
3035 x86_64_jcc(cd, X86_64_CC_E, 0);
3036 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[1]), cd->mcodeptr); */
3037 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3040 x86_64_jmp_imm(cd, 0);
3041 /* codegen_addreference(cd, BlockPtrOfPC(l), cd->mcodeptr); */
3043 tptr = (void **) iptr->target;
3044 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3049 case ICMD_BUILTIN3: /* ..., arg1, arg2, arg3 ==> ... */
3050 /* op1 = return type, val.a = function pointer*/
3054 case ICMD_BUILTIN2: /* ..., arg1, arg2 ==> ... */
3055 /* op1 = return type, val.a = function pointer*/
3059 case ICMD_BUILTIN1: /* ..., arg1 ==> ... */
3060 /* op1 = return type, val.a = function pointer*/
3064 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
3065 /* op1 = arg count, val.a = method pointer */
3067 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3068 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
3069 case ICMD_INVOKEINTERFACE:
3079 MCODECHECK((s3 << 1) + 64);
3081 /* copy arguments to registers or stack location ******************/
3083 /* count integer and float arguments */
3088 for (s2 = s3, tmpsrc = src; --s2 >= 0; tmpsrc = tmpsrc->prev) {
3089 IS_INT_LNG_TYPE(tmpsrc->type) ? iarg++ : farg++;
3092 /* calculate amount of arguments to be on stack */
3094 s2 = (iarg > INT_ARG_CNT) ? iarg - INT_ARG_CNT : 0 +
3095 (farg > FLT_ARG_CNT) ? farg - FLT_ARG_CNT : 0;
3097 for (; --s3 >= 0; src = src->prev) {
3098 /* decrement the current argument type */
3099 IS_INT_LNG_TYPE(src->type) ? iarg-- : farg--;
3101 if (src->varkind == ARGVAR) {
3102 if (IS_INT_LNG_TYPE(src->type)) {
3103 if (iarg >= INT_ARG_CNT) {
3107 if (farg >= FLT_ARG_CNT) {
3114 if (IS_INT_LNG_TYPE(src->type)) {
3115 if (iarg < INT_ARG_CNT) {
3116 s1 = rd->argintregs[iarg];
3117 var_to_reg_int(d, src, s1);
3121 var_to_reg_int(d, src, REG_ITMP1);
3123 x86_64_mov_reg_membase(cd, d, REG_SP, s2 * 8);
3127 if (farg < FLT_ARG_CNT) {
3128 s1 = rd->argfltregs[farg];
3129 var_to_reg_flt(d, src, s1);
3133 var_to_reg_flt(d, src, REG_FTMP1);
3135 x86_64_movq_reg_membase(cd, d, REG_SP, s2 * 8);
3141 switch (iptr->opc) {
3148 x86_64_mov_imm_reg(cd, a, REG_ITMP1);
3149 x86_64_call_reg(cd, REG_ITMP1);
3152 case ICMD_INVOKESPECIAL:
3153 /* first argument contains pointer */
3154 gen_nullptr_check(rd->argintregs[0]);
3156 /* access memory for hardware nullptr */
3157 x86_64_mov_membase_reg(cd, rd->argintregs[0], 0, REG_ITMP2);
3161 case ICMD_INVOKESTATIC:
3163 unresolved_method *um = iptr->target;
3165 codegen_addpatchref(cd, cd->mcodeptr,
3166 asm_invokestatic_special, um);
3169 d = um->methodref->parseddesc.md->returntype.type;
3172 a = (ptrint) lm->stubroutine;
3173 d = lm->parseddesc->returntype.type;
3176 x86_64_mov_imm_reg(cd, a, REG_ITMP2);
3177 x86_64_call_reg(cd, REG_ITMP2);
3180 case ICMD_INVOKEVIRTUAL:
3181 gen_nullptr_check(rd->argintregs[0]);
3184 unresolved_method *um = iptr->target;
3186 codegen_addpatchref(cd, cd->mcodeptr,
3187 asm_invokevirtual, um);
3190 d = um->methodref->parseddesc.md->returntype.type;
3193 s1 = OFFSET(vftbl_t, table[0]) +
3194 sizeof(methodptr) * lm->vftblindex;
3195 d = lm->parseddesc->returntype.type;
3198 x86_64_mov_membase_reg(cd, rd->argintregs[0],
3199 OFFSET(java_objectheader, vftbl),
3201 x86_64_mov_membase32_reg(cd, REG_ITMP2, s1, REG_ITMP1);
3202 x86_64_call_reg(cd, REG_ITMP1);
3205 case ICMD_INVOKEINTERFACE:
3206 gen_nullptr_check(rd->argintregs[0]);
3209 unresolved_method *um = iptr->target;
3211 codegen_addpatchref(cd, cd->mcodeptr,
3212 asm_invokeinterface, um);
3215 d = um->methodref->parseddesc.md->returntype.type;
3218 classinfo * ci = lm->class;
3220 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3221 sizeof(methodptr) * ci->index;
3223 s2 = sizeof(methodptr) * (lm - ci->methods);
3225 d = lm->parseddesc->returntype.type;
3228 x86_64_mov_membase_reg(cd, rd->argintregs[0],
3229 OFFSET(java_objectheader, vftbl),
3231 x86_64_mov_membase32_reg(cd, REG_ITMP2, s1, REG_ITMP2);
3232 x86_64_mov_membase32_reg(cd, REG_ITMP2, s2, REG_ITMP1);
3233 x86_64_call_reg(cd, REG_ITMP1);
3237 /* d contains return type */
3239 if (d != TYPE_VOID) {
3240 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3241 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3242 M_INTMOVE(REG_RESULT, s1);
3243 store_reg_to_var_int(iptr->dst, s1);
3246 s1 = reg_of_var(rd, iptr->dst, REG_FRESULT);
3247 M_FLTMOVE(REG_FRESULT, s1);
3248 store_reg_to_var_flt(iptr->dst, s1);
3255 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3257 /* op1: 0 == array, 1 == class */
3258 /* val.a: (classinfo *) superclass */
3260 /* superclass is an interface:
3262 * OK if ((sub == NULL) ||
3263 * (sub->vftbl->interfacetablelength > super->index) &&
3264 * (sub->vftbl->interfacetable[-super->index] != NULL));
3266 * superclass is a class:
3268 * OK if ((sub == NULL) || (0
3269 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3270 * super->vftbl->diffval));
3275 vftbl_t *supervftbl;
3278 super = (classinfo *) iptr->val.a;
3285 superindex = super->index;
3286 supervftbl = super->vftbl;
3289 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3290 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3292 var_to_reg_int(s1, src, REG_ITMP1);
3294 /* calculate interface checkcast code size */
3296 s2 = 3; /* mov_membase_reg */
3297 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3299 s2 += 3 + 4 /* movl_membase32_reg */ + 3 + 4 /* sub imm32 */ +
3300 3 /* test */ + 6 /* jcc */ + 3 + 4 /* mov_membase32_reg */ +
3301 3 /* test */ + 6 /* jcc */;
3303 /* calculate class checkcast code size */
3305 s3 = 3; /* mov_membase_reg */
3306 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3307 s3 += 10 /* mov_imm_reg */ + 3 + 4 /* movl_membase32_reg */;
3310 if (s1 != REG_ITMP1) {
3311 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3312 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, baseval));
3313 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3314 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, diffval));
3320 s3 += 3 + 4 /* movl_membase32_reg */ + 3 /* sub */ +
3321 10 /* mov_imm_reg */ + 3 /* movl_membase_reg */;
3322 CALCOFFSETBYTES(s3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3325 s3 += 3 /* cmp */ + 6 /* jcc */;
3327 /* if class is not resolved, check which code to call */
3330 x86_64_test_reg_reg(cd, s1, s1);
3331 x86_64_jcc(cd, X86_64_CC_Z, 6 + 7 + 6 + s2 + 5 + s3);
3333 codegen_addpatchref(cd, cd->mcodeptr,
3334 asm_checkcast_instanceof_flags,
3335 (constant_classref *) iptr->target);
3337 x86_64_movl_imm_reg(cd, 0, REG_ITMP2); /* super->flags */
3338 x86_64_alul_imm_reg(cd, X86_64_AND, ACC_INTERFACE, REG_ITMP2);
3339 x86_64_jcc(cd, X86_64_CC_Z, s2 + 5);
3342 /* interface checkcast code */
3344 if (!super || (super->flags & ACC_INTERFACE)) {
3346 x86_64_test_reg_reg(cd, s1, s1);
3347 x86_64_jcc(cd, X86_64_CC_Z, s2);
3350 x86_64_mov_membase_reg(cd, s1,
3351 OFFSET(java_objectheader, vftbl),
3355 codegen_addpatchref(cd, cd->mcodeptr,
3356 asm_checkcast_instanceof_interface,
3357 (constant_classref *) iptr->target);
3359 x86_64_movl_membase32_reg(cd, REG_ITMP2,
3360 OFFSET(vftbl_t, interfacetablelength),
3362 x86_64_alu_imm32_reg(cd, X86_64_SUB, superindex, REG_ITMP3);
3363 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3364 x86_64_jcc(cd, X86_64_CC_LE, 0);
3365 codegen_addxcastrefs(cd, cd->mcodeptr);
3366 x86_64_mov_membase32_reg(cd, REG_ITMP2,
3367 OFFSET(vftbl_t, interfacetable[0]) -
3368 superindex * sizeof(methodptr*),
3370 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3371 x86_64_jcc(cd, X86_64_CC_E, 0);
3372 codegen_addxcastrefs(cd, cd->mcodeptr);
3375 x86_64_jmp_imm(cd, s3);
3378 /* class checkcast code */
3380 if (!super || !(super->flags & ACC_INTERFACE)) {
3382 x86_64_test_reg_reg(cd, s1, s1);
3383 x86_64_jcc(cd, X86_64_CC_Z, s3);
3386 x86_64_mov_membase_reg(cd, s1,
3387 OFFSET(java_objectheader, vftbl),
3391 codegen_addpatchref(cd, cd->mcodeptr,
3392 asm_checkcast_class,
3393 (constant_classref *) iptr->target);
3395 x86_64_mov_imm_reg(cd, (ptrint) supervftbl, REG_ITMP3);
3396 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3397 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3399 x86_64_movl_membase32_reg(cd, REG_ITMP2,
3400 OFFSET(vftbl_t, baseval),
3402 /* if (s1 != REG_ITMP1) { */
3403 /* x86_64_movl_membase_reg(cd, REG_ITMP3, */
3404 /* OFFSET(vftbl_t, baseval), */
3406 /* x86_64_movl_membase_reg(cd, REG_ITMP3, */
3407 /* OFFSET(vftbl_t, diffval), */
3409 /* #if defined(USE_THREADS) && defined(NATIVE_THREADS) */
3410 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3412 /* x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP1, REG_ITMP2); */
3415 x86_64_movl_membase32_reg(cd, REG_ITMP3,
3416 OFFSET(vftbl_t, baseval),
3418 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP2);
3419 x86_64_mov_imm_reg(cd, (ptrint) supervftbl, REG_ITMP3);
3420 x86_64_movl_membase_reg(cd, REG_ITMP3,
3421 OFFSET(vftbl_t, diffval),
3424 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3425 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3427 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP3, REG_ITMP2);
3428 x86_64_jcc(cd, X86_64_CC_A, 0); /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3429 codegen_addxcastrefs(cd, cd->mcodeptr);
3431 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3433 store_reg_to_var_int(iptr->dst, d);
3434 /* if (iptr->dst->flags & INMEMORY) { */
3435 /* x86_64_mov_reg_membase(cd, s1, REG_SP, iptr->dst->regoff * 8); */
3437 /* M_INTMOVE(s1, iptr->dst->regoff); */
3442 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3444 /* op1: 0 == array, 1 == class */
3445 /* val.a: (classinfo *) superclass */
3447 /* superclass is an interface:
3449 * return (sub != NULL) &&
3450 * (sub->vftbl->interfacetablelength > super->index) &&
3451 * (sub->vftbl->interfacetable[-super->index] != NULL);
3453 * superclass is a class:
3455 * return ((sub != NULL) && (0
3456 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3457 * super->vftbl->diffvall));
3462 vftbl_t *supervftbl;
3465 super = (classinfo *) iptr->val.a;
3472 superindex = super->index;
3473 supervftbl = super->vftbl;
3476 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3477 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3480 var_to_reg_int(s1, src, REG_ITMP1);
3482 /* calculate interface instanceof code size */
3484 s2 = 3; /* mov_membase_reg */
3485 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3486 s2 += 3 + 4 /* movl_membase32_reg */ + 3 + 4 /* sub_imm32 */ +
3487 3 /* test */ + 6 /* jcc */ + 3 + 4 /* mov_membase32_reg */ +
3488 3 /* test */ + 4 /* setcc */;
3490 /* calculate class instanceof code size */
3492 s3 = 3; /* mov_membase_reg */
3493 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3494 s3 += 10; /* mov_imm_reg */
3495 s3 += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3496 CALCOFFSETBYTES(s3, REG_ITMP1, OFFSET(vftbl_t, baseval));
3497 s3 += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3498 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, baseval));
3499 s3 += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3500 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3501 s3 += 3 /* sub */ + 3 /* xor */ + 3 /* cmp */ + 4 /* setcc */;
3503 d = reg_of_var(rd, iptr->dst, REG_ITMP2);
3505 M_INTMOVE(s1, REG_ITMP1);
3508 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3510 /* if class is not resolved, check which code to call */
3513 x86_64_test_reg_reg(cd, s1, s1);
3514 x86_64_jcc(cd, X86_64_CC_Z, 6 + 7 + 6 + s2 + 5 + s3);
3516 codegen_addpatchref(cd, cd->mcodeptr,
3517 asm_checkcast_instanceof_flags,
3518 (constant_classref *) iptr->target);
3520 x86_64_movl_imm_reg(cd, 0, REG_ITMP3); /* super->flags */
3521 x86_64_alul_imm_reg(cd, X86_64_AND, ACC_INTERFACE, REG_ITMP3);
3522 x86_64_jcc(cd, X86_64_CC_Z, s2 + 5);
3525 /* interface instanceof code */
3527 if (!super || (super->flags & ACC_INTERFACE)) {
3529 x86_64_test_reg_reg(cd, s1, s1);
3530 x86_64_jcc(cd, X86_64_CC_Z, s2);
3533 x86_64_mov_membase_reg(cd, s1,
3534 OFFSET(java_objectheader, vftbl),
3537 codegen_addpatchref(cd, cd->mcodeptr,
3538 asm_checkcast_instanceof_interface,
3539 (constant_classref *) iptr->target);
3541 x86_64_movl_membase32_reg(cd, REG_ITMP1,
3542 OFFSET(vftbl_t, interfacetablelength),
3544 x86_64_alu_imm32_reg(cd, X86_64_SUB, superindex, REG_ITMP3);
3545 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3547 a = 3 + 4 /* mov_membase32_reg */ + 3 /* test */ + 4 /* setcc */;
3549 x86_64_jcc(cd, X86_64_CC_LE, a);
3550 x86_64_mov_membase32_reg(cd, REG_ITMP1,
3551 OFFSET(vftbl_t, interfacetable[0]) -
3552 superindex * sizeof(methodptr*),
3554 x86_64_test_reg_reg(cd, REG_ITMP1, REG_ITMP1);
3555 x86_64_setcc_reg(cd, X86_64_CC_NE, d);
3558 x86_64_jmp_imm(cd, s3);
3561 /* class instanceof code */
3563 if (!super || !(super->flags & ACC_INTERFACE)) {
3565 x86_64_test_reg_reg(cd, s1, s1);
3566 x86_64_jcc(cd, X86_64_CC_E, s3);
3569 x86_64_mov_membase_reg(cd, s1,
3570 OFFSET(java_objectheader, vftbl),
3574 codegen_addpatchref(cd, cd->mcodeptr,
3575 asm_instanceof_class,
3576 (constant_classref *) iptr->target);
3578 x86_64_mov_imm_reg(cd, (ptrint) supervftbl, REG_ITMP2);
3579 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3580 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3582 x86_64_movl_membase_reg(cd, REG_ITMP1,
3583 OFFSET(vftbl_t, baseval),
3585 x86_64_movl_membase_reg(cd, REG_ITMP2,
3586 OFFSET(vftbl_t, baseval),
3588 x86_64_movl_membase_reg(cd, REG_ITMP2,
3589 OFFSET(vftbl_t, diffval),
3591 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3592 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3594 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP1);
3595 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3596 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
3597 x86_64_setcc_reg(cd, X86_64_CC_BE, d);
3599 store_reg_to_var_int(iptr->dst, d);
3603 case ICMD_CHECKASIZE: /* ..., size ==> ..., size */
3605 if (src->flags & INMEMORY) {
3606 x86_64_alul_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
3609 x86_64_testl_reg_reg(cd, src->regoff, src->regoff);
3611 x86_64_jcc(cd, X86_64_CC_L, 0);
3612 codegen_addxcheckarefs(cd, cd->mcodeptr);
3615 case ICMD_CHECKEXCEPTION: /* ... ==> ... */
3617 x86_64_test_reg_reg(cd, REG_RESULT, REG_RESULT);
3618 x86_64_jcc(cd, X86_64_CC_E, 0);
3619 codegen_addxexceptionrefs(cd, cd->mcodeptr);
3622 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3623 /* op1 = dimension, val.a = array descriptor */
3625 /* check for negative sizes and copy sizes to stack if necessary */
3627 MCODECHECK((iptr->op1 << 1) + 64);
3629 for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3630 var_to_reg_int(s2, src, REG_ITMP1);
3631 x86_64_testl_reg_reg(cd, s2, s2);
3632 x86_64_jcc(cd, X86_64_CC_L, 0);
3633 codegen_addxcheckarefs(cd, cd->mcodeptr);
3635 /* copy SAVEDVAR sizes to stack */
3637 if (src->varkind != ARGVAR) {
3638 x86_64_mov_reg_membase(cd, s2, REG_SP, s1 * 8);
3642 /* a0 = dimension count */
3643 x86_64_mov_imm_reg(cd, iptr->op1, rd->argintregs[0]);
3645 /* a1 = arrayvftbl */
3646 x86_64_mov_imm_reg(cd, (ptrint) iptr->val.a, rd->argintregs[1]);
3648 /* a2 = pointer to dimensions = stack pointer */
3649 x86_64_mov_reg_reg(cd, REG_SP, rd->argintregs[2]);
3651 /* contains the correct function to call (from parse.c) */
3652 x86_64_mov_imm_reg(cd, (ptrint) iptr->target, REG_ITMP1);
3653 x86_64_call_reg(cd, REG_ITMP1);
3655 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3656 M_INTMOVE(REG_RESULT, s1);
3657 store_reg_to_var_int(iptr->dst, s1);
3661 throw_cacao_exception_exit(string_java_lang_InternalError,
3662 "Unknown ICMD %d", iptr->opc);
3665 } /* for instruction */
3667 /* copy values to interface registers */
3669 src = bptr->outstack;
3670 len = bptr->outdepth;
3671 MCODECHECK(64 + len);
3677 if ((src->varkind != STACKVAR)) {
3679 if (IS_FLT_DBL_TYPE(s2)) {
3680 var_to_reg_flt(s1, src, REG_FTMP1);
3681 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3682 M_FLTMOVE(s1, rd->interfaces[len][s2].regoff);
3685 x86_64_movq_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3689 var_to_reg_int(s1, src, REG_ITMP1);
3690 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3691 M_INTMOVE(s1, rd->interfaces[len][s2].regoff);
3694 x86_64_mov_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3700 } /* if (bptr -> flags >= BBREACHED) */
3701 } /* for basic block */
3705 /* generate bound check stubs */
3707 u1 *xcodeptr = NULL;
3710 for (bref = cd->xboundrefs; bref != NULL; bref = bref->next) {
3711 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3713 cd->mcodeptr - cd->mcodebase);
3717 /* move index register into REG_ITMP1 */
3718 x86_64_mov_reg_reg(cd, bref->reg, REG_ITMP1); /* 3 bytes */
3720 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3721 dseg_adddata(cd, cd->mcodeptr);
3722 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3723 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3725 if (xcodeptr != NULL) {
3726 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3729 xcodeptr = cd->mcodeptr;
3731 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3732 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3734 x86_64_mov_reg_reg(cd, REG_ITMP1, rd->argintregs[0]);
3735 x86_64_mov_imm_reg(cd, (ptrint) new_arrayindexoutofboundsexception, REG_ITMP3);
3736 x86_64_call_reg(cd, REG_ITMP3);
3738 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3739 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3741 x86_64_mov_imm_reg(cd, (ptrint) asm_handle_exception, REG_ITMP3);
3742 x86_64_jmp_reg(cd, REG_ITMP3);
3746 /* generate negative array size check stubs */
3750 for (bref = cd->xcheckarefs; bref != NULL; bref = bref->next) {
3751 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3752 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3754 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3758 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3760 cd->mcodeptr - cd->mcodebase);
3764 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3765 dseg_adddata(cd, cd->mcodeptr);
3766 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3767 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3769 if (xcodeptr != NULL) {
3770 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3773 xcodeptr = cd->mcodeptr;
3775 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3776 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3778 x86_64_mov_imm_reg(cd, (u8) new_negativearraysizeexception, REG_ITMP3);
3779 x86_64_call_reg(cd, REG_ITMP3);
3781 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3782 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3784 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3785 x86_64_jmp_reg(cd, REG_ITMP3);
3789 /* generate cast check stubs */
3793 for (bref = cd->xcastrefs; bref != NULL; bref = bref->next) {
3794 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3795 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3797 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3801 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3803 cd->mcodeptr - cd->mcodebase);
3807 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3808 dseg_adddata(cd, cd->mcodeptr);
3809 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3810 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3812 if (xcodeptr != NULL) {
3813 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3816 xcodeptr = cd->mcodeptr;
3818 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3819 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3821 x86_64_mov_imm_reg(cd, (u8) new_classcastexception, REG_ITMP3);
3822 x86_64_call_reg(cd, REG_ITMP3);
3824 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3825 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3827 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3828 x86_64_jmp_reg(cd, REG_ITMP3);
3832 /* generate divide by zero check stubs */
3836 for (bref = cd->xdivrefs; bref != NULL; bref = bref->next) {
3837 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3838 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3840 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3844 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3846 cd->mcodeptr - cd->mcodebase);
3850 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3851 dseg_adddata(cd, cd->mcodeptr);
3852 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3853 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3855 if (xcodeptr != NULL) {
3856 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3859 xcodeptr = cd->mcodeptr;
3861 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3862 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3864 x86_64_mov_imm_reg(cd, (u8) new_arithmeticexception, REG_ITMP3);
3865 x86_64_call_reg(cd, REG_ITMP3);
3867 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3868 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3870 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3871 x86_64_jmp_reg(cd, REG_ITMP3);
3875 /* generate exception check stubs */
3879 for (bref = cd->xexceptionrefs; bref != NULL; bref = bref->next) {
3880 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3881 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3883 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3887 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3889 cd->mcodeptr - cd->mcodebase);
3893 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3894 dseg_adddata(cd, cd->mcodeptr);
3895 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
3896 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3898 if (xcodeptr != NULL) {
3899 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3902 xcodeptr = cd->mcodeptr;
3904 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3905 x86_64_alu_imm_reg(cd, X86_64_SUB, 8, REG_SP);
3906 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0);
3907 x86_64_mov_imm_reg(cd, (u8) &builtin_get_exceptionptrptr, REG_ITMP1);
3908 x86_64_call_reg(cd, REG_ITMP1);
3909 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
3910 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
3911 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
3912 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC);
3913 x86_64_alu_imm_reg(cd, X86_64_ADD, 8, REG_SP);
3915 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
3916 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP1_XPTR);
3917 x86_64_mov_imm_membase(cd, 0, REG_ITMP3, 0);
3920 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3921 x86_64_jmp_reg(cd, REG_ITMP3);
3925 /* generate null pointer check stubs */
3929 for (bref = cd->xnullrefs; bref != NULL; bref = bref->next) {
3930 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3931 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3933 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3937 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3939 cd->mcodeptr - cd->mcodebase);
3943 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3944 dseg_adddata(cd, cd->mcodeptr);
3945 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
3946 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3948 if (xcodeptr != NULL) {
3949 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3952 xcodeptr = cd->mcodeptr;
3954 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3955 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3957 x86_64_mov_imm_reg(cd, (ptrint) new_nullpointerexception, REG_ITMP3);
3958 x86_64_call_reg(cd, REG_ITMP3);
3960 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3961 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3963 x86_64_mov_imm_reg(cd, (ptrint) asm_handle_exception, REG_ITMP3);
3964 x86_64_jmp_reg(cd, REG_ITMP3);
3968 /* generate code patching stub call code */
3975 tmpcd = DNEW(codegendata);
3977 for (pref = cd->patchrefs; pref != NULL; pref = pref->next) {
3980 /* Get machine code which is patched back in later. A */
3981 /* `call rel32' is 5 bytes long (but read 8 bytes). */
3982 xcodeptr = cd->mcodebase + pref->branchpos;
3983 mcode = *((ptrint *) xcodeptr);
3985 /* patch in `call rel32' to call the following code */
3986 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
3987 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
3989 /* move machine code bytes and classinfo pointer into registers */
3990 x86_64_mov_imm_reg(cd, (ptrint) mcode, REG_ITMP3);
3991 x86_64_push_reg(cd, REG_ITMP3);
3992 x86_64_mov_imm_reg(cd, (ptrint) pref->ref, REG_ITMP3);
3993 x86_64_push_reg(cd, REG_ITMP3);
3995 x86_64_mov_imm_reg(cd, (ptrint) pref->asmwrapper, REG_ITMP3);
3996 x86_64_jmp_reg(cd, REG_ITMP3);
4001 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
4005 /* function createcompilerstub *************************************************
4007 creates a stub routine which calls the compiler
4009 *******************************************************************************/
4011 #define COMPSTUBSIZE 23
4013 u1 *createcompilerstub(methodinfo *m)
4015 u1 *s = CNEW(u1, COMPSTUBSIZE); /* memory to hold the stub */
4019 /* mark start of dump memory area */
4021 dumpsize = dump_size();
4023 cd = DNEW(codegendata);
4026 /* code for the stub */
4027 x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP1); /* pass method to compiler */
4028 x86_64_mov_imm_reg(cd, (u8) asm_call_jit_compiler, REG_ITMP3);/* load address */
4029 x86_64_jmp_reg(cd, REG_ITMP3); /* jump to compiler */
4031 #if defined(STATISTICS)
4033 count_cstub_len += COMPSTUBSIZE;
4036 /* release dump area */
4038 dump_release(dumpsize);
4044 /* function removecompilerstub *************************************************
4046 deletes a compilerstub from memory (simply by freeing it)
4048 *******************************************************************************/
4050 void removecompilerstub(u1 *stub)
4052 CFREE(stub, COMPSTUBSIZE);
4056 /* function: createnativestub **************************************************
4058 creates a stub routine which calls a native method
4060 *******************************************************************************/
4062 /* #if defined(USE_THREADS) && defined(NATIVE_THREADS) */
4063 /* static java_objectheader **(*callgetexceptionptrptr)() = builtin_get_exceptionptrptr; */
4066 #define NATIVESTUBSIZE 700 /* keep this size high enough! */
4068 u1 *createnativestub(functionptr f, methodinfo *m)
4070 u1 *s; /* pointer to stub memory */
4073 t_inlining_globals *id;
4075 s4 stackframesize; /* size of stackframe if needed */
4077 s4 iargs; /* count of integer arguments */
4078 s4 fargs; /* count of float arguments */
4081 void **callAddrPatchPos=0;
4083 void **jmpInstrPatchPos=0;
4085 /* initialize variables */
4090 /* mark start of dump memory area */
4092 dumpsize = dump_size();
4094 cd = DNEW(codegendata);
4095 rd = DNEW(registerdata);
4096 id = DNEW(t_inlining_globals);
4098 /* setup registers before using it */
4100 inlining_setup(m, id);
4101 reg_setup(m, rd, id);
4103 /* set paramcount and paramtypes */
4105 method_descriptor2types(m);
4107 /* count integer and float arguments */
4109 tptr = m->paramtypes;
4110 for (i = 0; i < m->paramcount; i++) {
4111 IS_INT_LNG_TYPE(*tptr++) ? iargs++ : fargs++;
4114 s = CNEW(u1, NATIVESTUBSIZE); /* memory to hold the stub */
4116 /* set some required varibles which are normally set by codegen_setup */
4119 cd->patchrefs = NULL;
4121 /* if function is static, check for initialized */
4123 if ((m->flags & ACC_STATIC) && !m->class->initialized) {
4124 codegen_addpatchref(cd, cd->mcodeptr, asm_check_clinit, m->class);
4130 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4132 /* save integer and float argument registers */
4134 for (i = 0; i < INT_ARG_CNT; i++) {
4135 x86_64_mov_reg_membase(cd, rd->argintregs[i], REG_SP, (1 + i) * 8);
4138 for (i = 0; i < FLT_ARG_CNT; i++) {
4139 x86_64_movq_reg_membase(cd, rd->argfltregs[i], REG_SP, (1 + INT_ARG_CNT + i) * 8);
4142 /* show integer hex code for float arguments */
4144 for (i = 0, l = 0; i < m->paramcount && i < INT_ARG_CNT; i++) {
4145 /* if the paramtype is a float, we have to right shift all */
4146 /* following integer registers */
4148 if (IS_FLT_DBL_TYPE(m->paramtypes[i])) {
4149 for (s1 = INT_ARG_CNT - 2; s1 >= i; s1--) {
4150 x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
4153 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[i]);
4158 x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP1);
4159 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, 0 * 8);
4160 x86_64_mov_imm_reg(cd, (u8) builtin_trace_args, REG_ITMP1);
4161 x86_64_call_reg(cd, REG_ITMP1);
4163 /* restore integer and float argument registers */
4165 for (i = 0; i < INT_ARG_CNT; i++) {
4166 x86_64_mov_membase_reg(cd, REG_SP, (1 + i) * 8, rd->argintregs[i]);
4169 for (i = 0; i < FLT_ARG_CNT; i++) {
4170 x86_64_movq_membase_reg(cd, REG_SP, (1 + INT_ARG_CNT + i) * 8, rd->argfltregs[i]);
4173 x86_64_alu_imm_reg(cd, X86_64_ADD, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4176 #if !defined(STATIC_CLASSPATH)
4177 /* call method to resolve native function if needed */
4179 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4181 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, 1 * 8);
4182 x86_64_mov_reg_membase(cd, rd->argintregs[1], REG_SP, 2 * 8);
4183 x86_64_mov_reg_membase(cd, rd->argintregs[2], REG_SP, 3 * 8);
4184 x86_64_mov_reg_membase(cd, rd->argintregs[3], REG_SP, 4 * 8);
4185 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 5 * 8);
4186 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 6 * 8);
4188 x86_64_movq_reg_membase(cd, rd->argfltregs[0], REG_SP, 7 * 8);
4189 x86_64_movq_reg_membase(cd, rd->argfltregs[1], REG_SP, 8 * 8);
4190 x86_64_movq_reg_membase(cd, rd->argfltregs[2], REG_SP, 9 * 8);
4191 x86_64_movq_reg_membase(cd, rd->argfltregs[3], REG_SP, 10 * 8);
4192 x86_64_movq_reg_membase(cd, rd->argfltregs[4], REG_SP, 11 * 8);
4193 x86_64_movq_reg_membase(cd, rd->argfltregs[5], REG_SP, 12 * 8);
4194 x86_64_movq_reg_membase(cd, rd->argfltregs[6], REG_SP, 13 * 8);
4195 x86_64_movq_reg_membase(cd, rd->argfltregs[7], REG_SP, 14 * 8);
4197 /* needed to patch a jump over this block */
4198 x86_64_jmp_imm(cd, 0);
4199 jmpInstrPos = cd->mcodeptr - 4;
4201 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
4203 x86_64_mov_imm_reg(cd, 0, rd->argintregs[1]);
4204 callAddrPatchPos = cd->mcodeptr - 8; /* at this position the place is specified where the native function adress should be patched into*/
4206 x86_64_mov_imm_reg(cd, 0, rd->argintregs[2]);
4207 jmpInstrPatchPos = cd->mcodeptr - 8;
4209 x86_64_mov_imm_reg(cd, jmpInstrPos, rd->argintregs[3]);
4211 x86_64_mov_imm_reg(cd, (u8) codegen_resolve_native, REG_ITMP1);
4212 x86_64_call_reg(cd, REG_ITMP1);
4214 *(jmpInstrPatchPos) = cd->mcodeptr - jmpInstrPos - 1; /*=opcode jmp_imm size*/
4216 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, rd->argintregs[0]);
4217 x86_64_mov_membase_reg(cd, REG_SP, 2 * 8, rd->argintregs[1]);
4218 x86_64_mov_membase_reg(cd, REG_SP, 3 * 8, rd->argintregs[2]);
4219 x86_64_mov_membase_reg(cd, REG_SP, 4 * 8, rd->argintregs[3]);
4220 x86_64_mov_membase_reg(cd, REG_SP, 5 * 8, rd->argintregs[4]);
4221 x86_64_mov_membase_reg(cd, REG_SP, 6 * 8, rd->argintregs[5]);
4223 x86_64_movq_membase_reg(cd, REG_SP, 7 * 8, rd->argfltregs[0]);
4224 x86_64_movq_membase_reg(cd, REG_SP, 8 * 8, rd->argfltregs[1]);
4225 x86_64_movq_membase_reg(cd, REG_SP, 9 * 8, rd->argfltregs[2]);
4226 x86_64_movq_membase_reg(cd, REG_SP, 10 * 8, rd->argfltregs[3]);
4227 x86_64_movq_membase_reg(cd, REG_SP, 11 * 8, rd->argfltregs[4]);
4228 x86_64_movq_membase_reg(cd, REG_SP, 12 * 8, rd->argfltregs[5]);
4229 x86_64_movq_membase_reg(cd, REG_SP, 13 * 8, rd->argfltregs[6]);
4230 x86_64_movq_membase_reg(cd, REG_SP, 14 * 8, rd->argfltregs[7]);
4232 x86_64_alu_imm_reg(cd, X86_64_ADD, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4236 /* save argument registers on stack -- if we have to */
4238 if ((((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) || iargs > (INT_ARG_CNT - 1)) ||
4239 (fargs > FLT_ARG_CNT)) {
4246 /* do we need to shift integer argument register onto stack? */
4248 if ((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) {
4249 /* do we need to shift 2 arguments? */
4250 if (iargs > (INT_ARG_CNT - 1)) {
4257 } else if (iargs > (INT_ARG_CNT - 1)) {
4261 /* calculate required stack space */
4263 stackparamcnt += (iargs > INT_ARG_CNT) ? iargs - INT_ARG_CNT : 0;
4264 stackparamcnt += (fargs > FLT_ARG_CNT) ? fargs - FLT_ARG_CNT : 0;
4266 stackframesize = stackparamcnt + paramshiftcnt;
4268 /* keep stack 16-byte aligned */
4269 if (!(stackframesize & 0x1))
4272 x86_64_alu_imm_reg(cd, X86_64_SUB, stackframesize * 8, REG_SP);
4274 /* shift integer arguments if required */
4276 if ((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) {
4277 /* do we need to shift 2 arguments? */
4278 if (iargs > (INT_ARG_CNT - 1))
4279 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 1 * 8);
4281 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 0 * 8);
4283 } else if (iargs > (INT_ARG_CNT - 1)) {
4284 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 0 * 8);
4287 /* copy stack arguments into new stack frame -- if any */
4288 for (i = 0; i < stackparamcnt; i++) {
4289 x86_64_mov_membase_reg(cd, REG_SP, (stackframesize + 1 + i) * 8, REG_ITMP1);
4290 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, (paramshiftcnt + i) * 8);
4294 /* keep stack 16-byte aligned */
4295 x86_64_alu_imm_reg(cd, X86_64_SUB, 1 * 8, REG_SP);
4299 /* shift integer arguments for `env' and `class' arguments */
4301 if (m->flags & ACC_STATIC) {
4302 /* shift iargs count if less than INT_ARG_CNT, or all */
4303 for (i = (iargs < (INT_ARG_CNT - 2)) ? iargs : (INT_ARG_CNT - 2); i >= 0; i--) {
4304 x86_64_mov_reg_reg(cd, rd->argintregs[i], rd->argintregs[i + 2]);
4307 /* put class into second argument register */
4308 x86_64_mov_imm_reg(cd, (u8) m->class, rd->argintregs[1]);
4311 /* shift iargs count if less than INT_ARG_CNT, or all */
4312 for (i = (iargs < (INT_ARG_CNT - 1)) ? iargs : (INT_ARG_CNT - 1); i >= 0; i--) {
4313 x86_64_mov_reg_reg(cd, rd->argintregs[i], rd->argintregs[i + 1]);
4317 /* put env into first argument register */
4318 x86_64_mov_imm_reg(cd, (u8) &env, rd->argintregs[0]);
4320 /* do the native function call */
4321 x86_64_mov_imm_reg(cd, (u8) f, REG_ITMP1);
4322 #if !defined(STATIC_CLASSPATH)
4324 (*callAddrPatchPos) = cd->mcodeptr - 8;
4326 x86_64_call_reg(cd, REG_ITMP1);
4328 /* remove stackframe if there is one */
4329 if (stackframesize) {
4330 x86_64_alu_imm_reg(cd, X86_64_ADD, stackframesize * 8, REG_SP);
4334 x86_64_alu_imm_reg(cd, X86_64_SUB, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
4336 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
4337 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
4339 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
4340 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
4341 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
4342 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
4344 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
4345 x86_64_call_reg(cd, REG_ITMP1);
4347 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
4348 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
4350 x86_64_alu_imm_reg(cd, X86_64_ADD, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
4353 /* check for exception */
4355 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4356 x86_64_push_reg(cd, REG_RESULT);
4357 /* x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
4358 x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
4359 x86_64_call_reg(cd, REG_ITMP3);
4360 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
4361 x86_64_pop_reg(cd, REG_RESULT);
4363 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
4364 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP3);
4366 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
4367 x86_64_jcc(cd, X86_64_CC_NE, 1);
4371 /* handle exception */
4373 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4374 x86_64_push_reg(cd, REG_ITMP3);
4375 /* x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
4376 x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
4377 x86_64_call_reg(cd, REG_ITMP3);
4378 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
4379 x86_64_pop_reg(cd, REG_ITMP1_XPTR);
4381 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
4382 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
4383 x86_64_alu_reg_reg(cd, X86_64_XOR, REG_ITMP2, REG_ITMP2);
4384 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_ITMP3, 0); /* clear exception pointer */
4387 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC); /* get return address from stack */
4388 x86_64_alu_imm_reg(cd, X86_64_SUB, 3, REG_ITMP2_XPC); /* callq */
4390 x86_64_mov_imm_reg(cd, (u8) asm_handle_nat_exception, REG_ITMP3);
4391 x86_64_jmp_reg(cd, REG_ITMP3);
4394 /* patch in a <clinit> call if required ***********************************/
4402 tmpcd = DNEW(codegendata);
4404 /* there can only be one patch ref entry */
4405 pref = cd->patchrefs;
4408 /* Get machine code which is patched back in later. A */
4409 /* `call rel32' is 5 bytes long (but read 8 bytes). */
4410 xcodeptr = cd->mcodebase + pref->branchpos;
4411 mcode = *((ptrint *) xcodeptr);
4413 /* patch in `call rel32' to call the following code */
4414 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
4415 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4417 /* move machine code bytes and classinfo pointer into registers */
4418 x86_64_mov_imm_reg(cd, (ptrint) mcode, REG_ITMP3);
4419 x86_64_push_reg(cd, REG_ITMP3);
4420 x86_64_mov_imm_reg(cd, (ptrint) pref->ref, REG_ITMP3);
4421 x86_64_push_reg(cd, REG_ITMP3);
4423 x86_64_mov_imm_reg(cd, (ptrint) pref->asmwrapper, REG_ITMP3);
4424 x86_64_jmp_reg(cd, REG_ITMP3);
4428 /* Check if the stub size is big enough to hold the whole stub generated. */
4429 /* If not, this can lead into unpredictable crashes, because of heap */
4431 if ((s4) (cd->mcodeptr - s) > NATIVESTUBSIZE) {
4432 throw_cacao_exception_exit(string_java_lang_InternalError,
4433 "Native stub size %d is to small for current stub size %d",
4434 NATIVESTUBSIZE, (s4) (cd->mcodeptr - s));
4437 #if defined(STATISTICS)
4439 count_nstub_len += NATIVESTUBSIZE;
4442 /* release dump area */
4444 dump_release(dumpsize);
4450 /* function: removenativestub **************************************************
4452 removes a previously created native-stub from memory
4454 *******************************************************************************/
4456 void removenativestub(u1 *stub)
4458 CFREE(stub, NATIVESTUBSIZE);
4463 * These are local overrides for various environment variables in Emacs.
4464 * Please do not remove this and leave it at the end of the file, where
4465 * Emacs will automagically detect them.
4466 * ---------------------------------------------------------------------
4469 * indent-tabs-mode: t