1 /* vm/jit/x86_64/codegen.c - machine code generator for x86_64
3 Copyright (C) 1996-2005 R. Grafl, A. Krall, C. Kruegel, C. Oates,
4 R. Obermaisser, M. Platter, M. Probst, S. Ring, E. Steiner,
5 C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich, J. Wenninger,
6 Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
25 Contact: cacao@complang.tuwien.ac.at
27 Authors: Andreas Krall
30 $Id: codegen.c 1757 2004-12-13 16:58:37Z twisti $
40 #include "native/native.h"
41 #include "vm/global.h"
42 #include "vm/builtin.h"
43 #include "vm/loader.h"
44 #include "vm/tables.h"
45 #include "vm/jit/asmpart.h"
46 #include "vm/jit/jit.h"
47 #include "vm/jit/reg.h"
48 #include "vm/jit/parse.h"
49 #include "vm/jit/x86_64/arch.h"
50 #include "vm/jit/x86_64/codegen.h"
51 #include "vm/jit/x86_64/emitfuncs.h"
52 #include "vm/jit/x86_64/types.h"
53 #include "vm/jit/x86_64/asmoffsets.h"
56 /* register descripton - array ************************************************/
58 /* #define REG_RES 0 reserved register for OS or code generator */
59 /* #define REG_RET 1 return value register */
60 /* #define REG_EXC 2 exception value register (only old jit) */
61 /* #define REG_SAV 3 (callee) saved register */
62 /* #define REG_TMP 4 scratch temporary register (caller saved) */
63 /* #define REG_ARG 5 argument register (caller saved) */
65 /* #define REG_END -1 last entry in tables */
67 static int nregdescint[] = {
68 REG_RET, REG_ARG, REG_ARG, REG_TMP, REG_RES, REG_SAV, REG_ARG, REG_ARG,
69 REG_ARG, REG_ARG, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV,
74 static int nregdescfloat[] = {
75 REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG,
76 REG_RES, REG_RES, REG_RES, REG_TMP, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
81 /* Include independent code generation stuff -- include after register */
82 /* descriptions to avoid extern definitions. */
84 #include "vm/jit/codegen.inc"
85 #include "vm/jit/reg.inc"
87 #include "vm/jit/lsra.inc"
91 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
92 void thread_restartcriticalsection(ucontext_t *uc)
96 critical = thread_checkcritical((void *) uc->uc_mcontext.gregs[REG_RIP]);
99 uc->uc_mcontext.gregs[REG_RIP] = (u8) critical;
104 /* NullPointerException signal handler for hardware null pointer check */
106 void catch_NullPointerException(int sig, siginfo_t *siginfo, void *_p)
110 /* long faultaddr; */
112 struct ucontext *_uc = (struct ucontext *) _p;
113 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
114 struct sigaction act;
115 java_objectheader *xptr;
117 /* Reset signal handler - necessary for SysV, does no harm for BSD */
120 /* instr = *((int*)(sigctx->rip)); */
121 /* faultaddr = sigctx->sc_regs[(instr >> 16) & 0x1f]; */
123 /* if (faultaddr == 0) { */
124 act.sa_sigaction = (functionptr) catch_NullPointerException; /* reinstall handler */
125 act.sa_flags = SA_SIGINFO;
126 sigaction(sig, &act, NULL);
129 sigaddset(&nsig, sig);
130 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
132 xptr = new_nullpointerexception();
134 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
135 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
136 sigctx->rip = (u8) asm_handle_exception;
141 /* faultaddr += (long) ((instr << 16) >> 16); */
142 /* fprintf(stderr, "faulting address: 0x%08x\n", faultaddr); */
143 /* panic("Stack overflow"); */
148 /* ArithmeticException signal handler for hardware divide by zero check */
150 void catch_ArithmeticException(int sig, siginfo_t *siginfo, void *_p)
154 struct ucontext *_uc = (struct ucontext *) _p;
155 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
156 struct sigaction act;
157 java_objectheader *xptr;
159 /* Reset signal handler - necessary for SysV, does no harm for BSD */
161 act.sa_sigaction = (functionptr) catch_ArithmeticException; /* reinstall handler */
162 act.sa_flags = SA_SIGINFO;
163 sigaction(sig, &act, NULL);
166 sigaddset(&nsig, sig);
167 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
169 xptr = new_arithmeticexception();
171 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
172 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
173 sigctx->rip = (u8) asm_handle_exception;
179 void init_exceptions(void)
181 struct sigaction act;
183 /* install signal handlers we need to convert to exceptions */
184 sigemptyset(&act.sa_mask);
188 act.sa_sigaction = (functionptr) catch_NullPointerException;
189 act.sa_flags = SA_SIGINFO;
190 sigaction(SIGSEGV, &act, NULL);
194 act.sa_sigaction = (functionptr) catch_NullPointerException;
195 act.sa_flags = SA_SIGINFO;
196 sigaction(SIGBUS, &act, NULL);
200 act.sa_sigaction = (functionptr) catch_ArithmeticException;
201 act.sa_flags = SA_SIGINFO;
202 sigaction(SIGFPE, &act, NULL);
206 /* function gen_mcode **********************************************************
208 generates machine code
210 *******************************************************************************/
212 void codegen(methodinfo *m, codegendata *cd, registerdata *rd)
214 s4 len, s1, s2, s3, d;
229 /* space to save used callee saved registers */
231 savedregs_num += (rd->savintregcnt - rd->maxsavintreguse);
232 savedregs_num += (rd->savfltregcnt - rd->maxsavfltreguse);
234 parentargs_base = rd->maxmemuse + savedregs_num;
236 #if defined(USE_THREADS) /* space to save argument of monitor_enter */
238 if (checksync && (m->flags & ACC_SYNCHRONIZED))
243 /* keep stack 16-byte aligned for calls into native code e.g. libc or jni */
244 /* (alignment problems with movaps) */
246 if (!(parentargs_base & 0x1)) {
250 /* create method header */
252 (void) dseg_addaddress(cd, m); /* MethodPointer */
253 (void) dseg_adds4(cd, parentargs_base * 8); /* FrameSize */
255 #if defined(USE_THREADS)
257 /* IsSync contains the offset relative to the stack pointer for the
258 argument of monitor_exit used in the exception handler. Since the
259 offset could be zero and give a wrong meaning of the flag it is
263 if (checksync && (m->flags & ACC_SYNCHRONIZED))
264 (void) dseg_adds4(cd, (rd->maxmemuse + 1) * 8); /* IsSync */
269 (void) dseg_adds4(cd, 0); /* IsSync */
271 (void) dseg_adds4(cd, m->isleafmethod); /* IsLeaf */
272 (void) dseg_adds4(cd, rd->savintregcnt - rd->maxsavintreguse);/* IntSave */
273 (void) dseg_adds4(cd, rd->savfltregcnt - rd->maxsavfltreguse);/* FltSave */
274 (void) dseg_adds4(cd, cd->exceptiontablelength); /* ExTableSize */
276 /* create exception table */
278 for (ex = cd->exceptiontable; ex != NULL; ex = ex->down) {
279 dseg_addtarget(cd, ex->start);
280 dseg_addtarget(cd, ex->end);
281 dseg_addtarget(cd, ex->handler);
282 (void) dseg_addaddress(cd, ex->catchtype);
285 /* initialize mcode variables */
287 cd->mcodeptr = (u1 *) cd->mcodebase;
288 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
289 MCODECHECK(128 + m->paramcount);
291 /* create stack frame (if necessary) */
293 if (parentargs_base) {
294 x86_64_alu_imm_reg(cd, X86_64_SUB, parentargs_base * 8, REG_SP);
297 /* save return address and used callee saved registers */
300 for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
301 p--; x86_64_mov_reg_membase(cd, rd->savintregs[i], REG_SP, p * 8);
303 for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
304 p--; x86_64_movq_reg_membase(cd, rd->savfltregs[i], REG_SP, p * 8);
307 /* take arguments out of register or stack frame */
309 for (p = 0, l = 0, s1 = 0, s2 = 0; p < m->paramcount; p++) {
310 t = m->paramtypes[p];
311 var = &(rd->locals[l][t]);
313 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
316 if (IS_INT_LNG_TYPE(t)) {
323 if (IS_INT_LNG_TYPE(t)) { /* integer args */
324 if (s1 < INT_ARG_CNT) { /* register arguments */
325 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
326 M_INTMOVE(rd->argintregs[s1], var->regoff);
328 } else { /* reg arg -> spilled */
329 x86_64_mov_reg_membase(cd, rd->argintregs[s1], REG_SP, var->regoff * 8);
332 } else { /* stack arguments */
333 pa = s1 - INT_ARG_CNT;
334 if (s2 >= FLT_ARG_CNT) {
335 pa += s2 - FLT_ARG_CNT;
337 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
338 x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff); /* + 8 for return address */
339 } else { /* stack arg -> spilled */
340 x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_ITMP1); /* + 8 for return address */
341 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, var->regoff * 8);
346 } else { /* floating args */
347 if (s2 < FLT_ARG_CNT) { /* register arguments */
348 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
349 M_FLTMOVE(rd->argfltregs[s2], var->regoff);
351 } else { /* reg arg -> spilled */
352 x86_64_movq_reg_membase(cd, rd->argfltregs[s2], REG_SP, var->regoff * 8);
355 } else { /* stack arguments */
356 pa = s2 - FLT_ARG_CNT;
357 if (s1 >= INT_ARG_CNT) {
358 pa += s1 - INT_ARG_CNT;
360 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
361 x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff);
364 x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_FTMP1);
365 x86_64_movq_reg_membase(cd, REG_FTMP1, REG_SP, var->regoff * 8);
372 /* save monitorenter argument */
374 #if defined(USE_THREADS)
375 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
378 if (m->flags & ACC_STATIC) {
379 func_enter = (u8) builtin_staticmonitorenter;
380 x86_64_mov_imm_reg(cd, (s8) m->class, REG_ITMP1);
381 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, rd->maxmemuse * 8);
384 func_enter = (u8) builtin_monitorenter;
385 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, rd->maxmemuse * 8);
388 /* call monitorenter function */
390 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
391 x86_64_mov_imm_reg(cd, func_enter, REG_ITMP1);
392 x86_64_call_reg(cd, REG_ITMP1);
396 /* copy argument registers to stack and call trace function with pointer
397 to arguments on stack.
400 x86_64_alu_imm_reg(cd, X86_64_SUB, (6 + 8 + 1 + 1) * 8, REG_SP);
402 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, 1 * 8);
403 x86_64_mov_reg_membase(cd, rd->argintregs[1], REG_SP, 2 * 8);
404 x86_64_mov_reg_membase(cd, rd->argintregs[2], REG_SP, 3 * 8);
405 x86_64_mov_reg_membase(cd, rd->argintregs[3], REG_SP, 4 * 8);
406 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 5 * 8);
407 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 6 * 8);
409 x86_64_movq_reg_membase(cd, rd->argfltregs[0], REG_SP, 7 * 8);
410 x86_64_movq_reg_membase(cd, rd->argfltregs[1], REG_SP, 8 * 8);
411 x86_64_movq_reg_membase(cd, rd->argfltregs[2], REG_SP, 9 * 8);
412 x86_64_movq_reg_membase(cd, rd->argfltregs[3], REG_SP, 10 * 8);
413 x86_64_movq_reg_membase(cd, rd->argfltregs[4], REG_SP, 11 * 8);
414 x86_64_movq_reg_membase(cd, rd->argfltregs[5], REG_SP, 12 * 8);
415 x86_64_movq_reg_membase(cd, rd->argfltregs[6], REG_SP, 13 * 8);
416 x86_64_movq_reg_membase(cd, rd->argfltregs[7], REG_SP, 14 * 8);
418 /* show integer hex code for float arguments */
420 for (p = 0, l = 0; p < m->paramcount; p++) {
421 t = m->paramtypes[p];
423 if (IS_FLT_DBL_TYPE(t)) {
424 for (s1 = (m->paramcount > INT_ARG_CNT) ? INT_ARG_CNT - 2 : m->paramcount - 2; s1 >= p; s1--) {
425 x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
428 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[p]);
433 x86_64_mov_imm_reg(cd, (s8) m, REG_ITMP2);
434 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_SP, 0 * 8);
435 x86_64_mov_imm_reg(cd, (s8) builtin_trace_args, REG_ITMP1);
436 x86_64_call_reg(cd, REG_ITMP1);
438 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, rd->argintregs[0]);
439 x86_64_mov_membase_reg(cd, REG_SP, 2 * 8, rd->argintregs[1]);
440 x86_64_mov_membase_reg(cd, REG_SP, 3 * 8, rd->argintregs[2]);
441 x86_64_mov_membase_reg(cd, REG_SP, 4 * 8, rd->argintregs[3]);
442 x86_64_mov_membase_reg(cd, REG_SP, 5 * 8, rd->argintregs[4]);
443 x86_64_mov_membase_reg(cd, REG_SP, 6 * 8, rd->argintregs[5]);
445 x86_64_movq_membase_reg(cd, REG_SP, 7 * 8, rd->argfltregs[0]);
446 x86_64_movq_membase_reg(cd, REG_SP, 8 * 8, rd->argfltregs[1]);
447 x86_64_movq_membase_reg(cd, REG_SP, 9 * 8, rd->argfltregs[2]);
448 x86_64_movq_membase_reg(cd, REG_SP, 10 * 8, rd->argfltregs[3]);
449 x86_64_movq_membase_reg(cd, REG_SP, 11 * 8, rd->argfltregs[4]);
450 x86_64_movq_membase_reg(cd, REG_SP, 12 * 8, rd->argfltregs[5]);
451 x86_64_movq_membase_reg(cd, REG_SP, 13 * 8, rd->argfltregs[6]);
452 x86_64_movq_membase_reg(cd, REG_SP, 14 * 8, rd->argfltregs[7]);
454 x86_64_alu_imm_reg(cd, X86_64_ADD, (6 + 8 + 1 + 1) * 8, REG_SP);
459 /* end of header generation */
461 /* walk through all basic blocks */
462 for (bptr = m->basicblocks; bptr != NULL; bptr = bptr->next) {
464 bptr->mpc = (u4) ((u1 *) cd->mcodeptr - cd->mcodebase);
466 if (bptr->flags >= BBREACHED) {
468 /* branch resolving */
471 for (bref = bptr->branchrefs; bref != NULL; bref = bref->next) {
472 gen_resolvebranch((u1 *) cd->mcodebase + bref->branchpos,
477 /* copy interface registers to their destination */
481 MCODECHECK(64 + len);
482 while (src != NULL) {
484 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
485 if (bptr->type == BBTYPE_SBR) {
486 d = reg_of_var(rd, src, REG_ITMP1);
487 x86_64_pop_reg(cd, d);
488 store_reg_to_var_int(src, d);
490 } else if (bptr->type == BBTYPE_EXH) {
491 d = reg_of_var(rd, src, REG_ITMP1);
492 M_INTMOVE(REG_ITMP1, d);
493 store_reg_to_var_int(src, d);
497 d = reg_of_var(rd, src, REG_ITMP1);
498 if ((src->varkind != STACKVAR)) {
500 if (IS_FLT_DBL_TYPE(s2)) {
501 s1 = rd->interfaces[len][s2].regoff;
502 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
506 x86_64_movq_membase_reg(cd, REG_SP, s1 * 8, d);
508 store_reg_to_var_flt(src, d);
511 s1 = rd->interfaces[len][s2].regoff;
512 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
516 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, d);
518 store_reg_to_var_int(src, d);
525 /* walk through all instructions */
529 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
531 MCODECHECK(64); /* an instruction usually needs < 64 words */
534 case ICMD_NOP: /* ... ==> ... */
537 case ICMD_NULLCHECKPOP: /* ..., objectref ==> ... */
538 if (src->flags & INMEMORY) {
539 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
542 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
544 x86_64_jcc(cd, X86_64_CC_E, 0);
545 codegen_addxnullrefs(cd, cd->mcodeptr);
548 /* constant operations ************************************************/
550 case ICMD_ICONST: /* ... ==> ..., constant */
551 /* op1 = 0, val.i = constant */
553 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
554 if (iptr->val.i == 0) {
555 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
557 x86_64_movl_imm_reg(cd, iptr->val.i, d);
559 store_reg_to_var_int(iptr->dst, d);
562 case ICMD_ACONST: /* ... ==> ..., constant */
563 /* op1 = 0, val.a = constant */
565 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
566 if (iptr->val.a == 0) {
567 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
569 x86_64_mov_imm_reg(cd, (s8) iptr->val.a, d);
571 store_reg_to_var_int(iptr->dst, d);
574 case ICMD_LCONST: /* ... ==> ..., constant */
575 /* op1 = 0, val.l = constant */
577 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
578 if (iptr->val.l == 0) {
579 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
581 x86_64_mov_imm_reg(cd, iptr->val.l, d);
583 store_reg_to_var_int(iptr->dst, d);
586 case ICMD_FCONST: /* ... ==> ..., constant */
587 /* op1 = 0, val.f = constant */
589 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
590 a = dseg_addfloat(cd, iptr->val.f);
591 x86_64_movdl_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + ((d > 7) ? 9 : 8)) - (s8) cd->mcodebase) + a, d);
592 store_reg_to_var_flt(iptr->dst, d);
595 case ICMD_DCONST: /* ... ==> ..., constant */
596 /* op1 = 0, val.d = constant */
598 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
599 a = dseg_adddouble(cd, iptr->val.d);
600 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, d);
601 store_reg_to_var_flt(iptr->dst, d);
605 /* load/store operations **********************************************/
607 case ICMD_ILOAD: /* ... ==> ..., content of local variable */
608 /* op1 = local variable */
610 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
611 if ((iptr->dst->varkind == LOCALVAR) &&
612 (iptr->dst->varnum == iptr->op1)) {
615 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
616 if (var->flags & INMEMORY) {
617 x86_64_movl_membase_reg(cd, REG_SP, var->regoff * 8, d);
618 store_reg_to_var_int(iptr->dst, d);
621 if (iptr->dst->flags & INMEMORY) {
622 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
625 M_INTMOVE(var->regoff, d);
630 case ICMD_LLOAD: /* ... ==> ..., content of local variable */
631 case ICMD_ALOAD: /* op1 = local variable */
633 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
634 if ((iptr->dst->varkind == LOCALVAR) &&
635 (iptr->dst->varnum == iptr->op1)) {
638 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
639 if (var->flags & INMEMORY) {
640 x86_64_mov_membase_reg(cd, REG_SP, var->regoff * 8, d);
641 store_reg_to_var_int(iptr->dst, d);
644 if (iptr->dst->flags & INMEMORY) {
645 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
648 M_INTMOVE(var->regoff, d);
653 case ICMD_FLOAD: /* ... ==> ..., content of local variable */
654 case ICMD_DLOAD: /* op1 = local variable */
656 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
657 if ((iptr->dst->varkind == LOCALVAR) &&
658 (iptr->dst->varnum == iptr->op1)) {
661 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
662 if (var->flags & INMEMORY) {
663 x86_64_movq_membase_reg(cd, REG_SP, var->regoff * 8, d);
664 store_reg_to_var_flt(iptr->dst, d);
667 if (iptr->dst->flags & INMEMORY) {
668 x86_64_movq_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
671 M_FLTMOVE(var->regoff, d);
676 case ICMD_ISTORE: /* ..., value ==> ... */
677 case ICMD_LSTORE: /* op1 = local variable */
680 if ((src->varkind == LOCALVAR) &&
681 (src->varnum == iptr->op1)) {
684 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
685 if (var->flags & INMEMORY) {
686 var_to_reg_int(s1, src, REG_ITMP1);
687 x86_64_mov_reg_membase(cd, s1, REG_SP, var->regoff * 8);
690 var_to_reg_int(s1, src, var->regoff);
691 M_INTMOVE(s1, var->regoff);
695 case ICMD_FSTORE: /* ..., value ==> ... */
696 case ICMD_DSTORE: /* op1 = local variable */
698 if ((src->varkind == LOCALVAR) &&
699 (src->varnum == iptr->op1)) {
702 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
703 if (var->flags & INMEMORY) {
704 var_to_reg_flt(s1, src, REG_FTMP1);
705 x86_64_movq_reg_membase(cd, s1, REG_SP, var->regoff * 8);
708 var_to_reg_flt(s1, src, var->regoff);
709 M_FLTMOVE(s1, var->regoff);
714 /* pop/dup/swap operations ********************************************/
716 /* attention: double and longs are only one entry in CACAO ICMDs */
718 case ICMD_POP: /* ..., value ==> ... */
719 case ICMD_POP2: /* ..., value, value ==> ... */
722 case ICMD_DUP: /* ..., a ==> ..., a, a */
723 M_COPY(src, iptr->dst);
726 case ICMD_DUP_X1: /* ..., a, b ==> ..., b, a, b */
728 M_COPY(src, iptr->dst);
729 M_COPY(src->prev, iptr->dst->prev);
730 M_COPY(iptr->dst, iptr->dst->prev->prev);
733 case ICMD_DUP_X2: /* ..., a, b, c ==> ..., c, a, b, c */
735 M_COPY(src, iptr->dst);
736 M_COPY(src->prev, iptr->dst->prev);
737 M_COPY(src->prev->prev, iptr->dst->prev->prev);
738 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
741 case ICMD_DUP2: /* ..., a, b ==> ..., a, b, a, b */
743 M_COPY(src, iptr->dst);
744 M_COPY(src->prev, iptr->dst->prev);
747 case ICMD_DUP2_X1: /* ..., a, b, c ==> ..., b, c, a, b, c */
749 M_COPY(src, iptr->dst);
750 M_COPY(src->prev, iptr->dst->prev);
751 M_COPY(src->prev->prev, iptr->dst->prev->prev);
752 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
753 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev);
756 case ICMD_DUP2_X2: /* ..., a, b, c, d ==> ..., c, d, a, b, c, d */
758 M_COPY(src, iptr->dst);
759 M_COPY(src->prev, iptr->dst->prev);
760 M_COPY(src->prev->prev, iptr->dst->prev->prev);
761 M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
762 M_COPY(iptr->dst, iptr->dst->prev->prev->prev->prev);
763 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev->prev);
766 case ICMD_SWAP: /* ..., a, b ==> ..., b, a */
768 M_COPY(src, iptr->dst->prev);
769 M_COPY(src->prev, iptr->dst);
773 /* integer operations *************************************************/
775 case ICMD_INEG: /* ..., value ==> ..., - value */
777 d = reg_of_var(rd, iptr->dst, REG_NULL);
778 if (iptr->dst->flags & INMEMORY) {
779 if (src->flags & INMEMORY) {
780 if (src->regoff == iptr->dst->regoff) {
781 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
784 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
785 x86_64_negl_reg(cd, REG_ITMP1);
786 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
790 x86_64_movl_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
791 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
795 if (src->flags & INMEMORY) {
796 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
797 x86_64_negl_reg(cd, d);
800 M_INTMOVE(src->regoff, iptr->dst->regoff);
801 x86_64_negl_reg(cd, iptr->dst->regoff);
806 case ICMD_LNEG: /* ..., value ==> ..., - value */
808 d = reg_of_var(rd, iptr->dst, REG_NULL);
809 if (iptr->dst->flags & INMEMORY) {
810 if (src->flags & INMEMORY) {
811 if (src->regoff == iptr->dst->regoff) {
812 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
815 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
816 x86_64_neg_reg(cd, REG_ITMP1);
817 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
821 x86_64_mov_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
822 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
826 if (src->flags & INMEMORY) {
827 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
828 x86_64_neg_reg(cd, iptr->dst->regoff);
831 M_INTMOVE(src->regoff, iptr->dst->regoff);
832 x86_64_neg_reg(cd, iptr->dst->regoff);
837 case ICMD_I2L: /* ..., value ==> ..., value */
839 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
840 if (src->flags & INMEMORY) {
841 x86_64_movslq_membase_reg(cd, REG_SP, src->regoff * 8, d);
844 x86_64_movslq_reg_reg(cd, src->regoff, d);
846 store_reg_to_var_int(iptr->dst, d);
849 case ICMD_L2I: /* ..., value ==> ..., value */
851 var_to_reg_int(s1, src, REG_ITMP1);
852 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
854 store_reg_to_var_int(iptr->dst, d);
857 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
859 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
860 if (src->flags & INMEMORY) {
861 x86_64_movsbq_membase_reg(cd, REG_SP, src->regoff * 8, d);
864 x86_64_movsbq_reg_reg(cd, src->regoff, d);
866 store_reg_to_var_int(iptr->dst, d);
869 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
871 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
872 if (src->flags & INMEMORY) {
873 x86_64_movzwq_membase_reg(cd, REG_SP, src->regoff * 8, d);
876 x86_64_movzwq_reg_reg(cd, src->regoff, d);
878 store_reg_to_var_int(iptr->dst, d);
881 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
883 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
884 if (src->flags & INMEMORY) {
885 x86_64_movswq_membase_reg(cd, REG_SP, src->regoff * 8, d);
888 x86_64_movswq_reg_reg(cd, src->regoff, d);
890 store_reg_to_var_int(iptr->dst, d);
894 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
896 d = reg_of_var(rd, iptr->dst, REG_NULL);
897 x86_64_emit_ialu(cd, X86_64_ADD, src, iptr);
900 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
901 /* val.i = constant */
903 d = reg_of_var(rd, iptr->dst, REG_NULL);
904 x86_64_emit_ialuconst(cd, X86_64_ADD, src, iptr);
907 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
909 d = reg_of_var(rd, iptr->dst, REG_NULL);
910 x86_64_emit_lalu(cd, X86_64_ADD, src, iptr);
913 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
914 /* val.l = constant */
916 d = reg_of_var(rd, iptr->dst, REG_NULL);
917 x86_64_emit_laluconst(cd, X86_64_ADD, src, iptr);
920 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
922 d = reg_of_var(rd, iptr->dst, REG_NULL);
923 if (iptr->dst->flags & INMEMORY) {
924 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
925 if (src->prev->regoff == iptr->dst->regoff) {
926 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
927 x86_64_alul_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
930 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
931 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
932 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
935 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
936 M_INTMOVE(src->prev->regoff, REG_ITMP1);
937 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
938 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
940 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
941 if (src->prev->regoff == iptr->dst->regoff) {
942 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
945 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
946 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
947 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
951 x86_64_movl_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
952 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
956 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
957 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
958 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
960 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
961 M_INTMOVE(src->prev->regoff, d);
962 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
964 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
965 /* workaround for reg alloc */
966 if (src->regoff == iptr->dst->regoff) {
967 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
968 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
969 M_INTMOVE(REG_ITMP1, d);
972 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
973 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
977 /* workaround for reg alloc */
978 if (src->regoff == iptr->dst->regoff) {
979 M_INTMOVE(src->prev->regoff, REG_ITMP1);
980 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
981 M_INTMOVE(REG_ITMP1, d);
984 M_INTMOVE(src->prev->regoff, d);
985 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
991 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
992 /* val.i = constant */
994 d = reg_of_var(rd, iptr->dst, REG_NULL);
995 x86_64_emit_ialuconst(cd, X86_64_SUB, src, iptr);
998 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1000 d = reg_of_var(rd, iptr->dst, REG_NULL);
1001 if (iptr->dst->flags & INMEMORY) {
1002 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1003 if (src->prev->regoff == iptr->dst->regoff) {
1004 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1005 x86_64_alu_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1008 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1009 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1010 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1013 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1014 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1015 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1016 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1018 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1019 if (src->prev->regoff == iptr->dst->regoff) {
1020 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1023 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1024 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1025 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1029 x86_64_mov_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1030 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1034 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1035 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1036 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1038 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1039 M_INTMOVE(src->prev->regoff, d);
1040 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1042 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1043 /* workaround for reg alloc */
1044 if (src->regoff == iptr->dst->regoff) {
1045 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1046 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1047 M_INTMOVE(REG_ITMP1, d);
1050 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1051 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1055 /* workaround for reg alloc */
1056 if (src->regoff == iptr->dst->regoff) {
1057 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1058 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1059 M_INTMOVE(REG_ITMP1, d);
1062 M_INTMOVE(src->prev->regoff, d);
1063 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1069 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
1070 /* val.l = constant */
1072 d = reg_of_var(rd, iptr->dst, REG_NULL);
1073 x86_64_emit_laluconst(cd, X86_64_SUB, src, iptr);
1076 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1078 d = reg_of_var(rd, iptr->dst, REG_NULL);
1079 if (iptr->dst->flags & INMEMORY) {
1080 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1081 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1082 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1083 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1085 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1086 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1087 x86_64_imull_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1088 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1090 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1091 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1092 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1093 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1096 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1097 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1098 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1102 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1103 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1104 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1106 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1107 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1108 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1110 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1111 M_INTMOVE(src->regoff, iptr->dst->regoff);
1112 x86_64_imull_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1115 if (src->regoff == iptr->dst->regoff) {
1116 x86_64_imull_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1119 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1120 x86_64_imull_reg_reg(cd, src->regoff, iptr->dst->regoff);
1126 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
1127 /* val.i = constant */
1129 d = reg_of_var(rd, iptr->dst, REG_NULL);
1130 if (iptr->dst->flags & INMEMORY) {
1131 if (src->flags & INMEMORY) {
1132 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1133 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1136 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, REG_ITMP1);
1137 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1141 if (src->flags & INMEMORY) {
1142 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1145 if (iptr->val.i == 2) {
1146 M_INTMOVE(src->regoff, iptr->dst->regoff);
1147 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1150 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, iptr->dst->regoff); /* 3 cycles */
1156 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1158 d = reg_of_var(rd, iptr->dst, REG_NULL);
1159 if (iptr->dst->flags & INMEMORY) {
1160 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1161 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1162 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1163 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1165 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1166 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1167 x86_64_imul_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1168 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1170 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1171 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1172 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1173 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1176 x86_64_mov_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1177 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1178 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1182 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1183 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1184 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1186 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1187 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1188 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1190 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1191 M_INTMOVE(src->regoff, iptr->dst->regoff);
1192 x86_64_imul_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1195 if (src->regoff == iptr->dst->regoff) {
1196 x86_64_imul_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1199 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1200 x86_64_imul_reg_reg(cd, src->regoff, iptr->dst->regoff);
1206 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
1207 /* val.l = constant */
1209 d = reg_of_var(rd, iptr->dst, REG_NULL);
1210 if (iptr->dst->flags & INMEMORY) {
1211 if (src->flags & INMEMORY) {
1212 if (x86_64_is_imm32(iptr->val.l)) {
1213 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1216 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1217 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1219 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1222 if (x86_64_is_imm32(iptr->val.l)) {
1223 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, REG_ITMP1);
1226 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1227 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1229 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1233 if (src->flags & INMEMORY) {
1234 if (x86_64_is_imm32(iptr->val.l)) {
1235 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1238 x86_64_mov_imm_reg(cd, iptr->val.l, iptr->dst->regoff);
1239 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1243 /* should match in many cases */
1244 if (iptr->val.l == 2) {
1245 M_INTMOVE(src->regoff, iptr->dst->regoff);
1246 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1249 if (x86_64_is_imm32(iptr->val.l)) {
1250 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, iptr->dst->regoff); /* 4 cycles */
1253 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1254 M_INTMOVE(src->regoff, iptr->dst->regoff);
1255 x86_64_imul_reg_reg(cd, REG_ITMP1, iptr->dst->regoff);
1262 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1264 d = reg_of_var(rd, iptr->dst, REG_NULL);
1265 if (src->prev->flags & INMEMORY) {
1266 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1269 M_INTMOVE(src->prev->regoff, RAX);
1272 if (src->flags & INMEMORY) {
1273 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1276 M_INTMOVE(src->regoff, REG_ITMP3);
1280 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1281 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1282 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1283 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1285 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1287 x86_64_idivl_reg(cd, REG_ITMP3);
1289 if (iptr->dst->flags & INMEMORY) {
1290 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1291 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1294 M_INTMOVE(RAX, iptr->dst->regoff);
1296 if (iptr->dst->regoff != RDX) {
1297 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1302 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1304 d = reg_of_var(rd, iptr->dst, REG_NULL);
1305 if (src->prev->flags & INMEMORY) {
1306 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1309 M_INTMOVE(src->prev->regoff, RAX);
1312 if (src->flags & INMEMORY) {
1313 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1316 M_INTMOVE(src->regoff, REG_ITMP3);
1320 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1321 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1322 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1323 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1324 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1326 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1328 x86_64_idivl_reg(cd, REG_ITMP3);
1330 if (iptr->dst->flags & INMEMORY) {
1331 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1332 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1335 M_INTMOVE(RDX, iptr->dst->regoff);
1337 if (iptr->dst->regoff != RDX) {
1338 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1343 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
1344 /* val.i = constant */
1346 var_to_reg_int(s1, src, REG_ITMP1);
1347 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1348 M_INTMOVE(s1, REG_ITMP1);
1349 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1350 x86_64_leal_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1351 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1352 x86_64_shiftl_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1353 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1354 store_reg_to_var_int(iptr->dst, d);
1357 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
1358 /* val.i = constant */
1360 var_to_reg_int(s1, src, REG_ITMP1);
1361 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1362 M_INTMOVE(s1, REG_ITMP1);
1363 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1364 x86_64_leal_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1365 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1366 x86_64_alul_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1367 x86_64_alul_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1368 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1369 store_reg_to_var_int(iptr->dst, d);
1373 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1375 d = reg_of_var(rd, iptr->dst, REG_NULL);
1376 if (src->prev->flags & INMEMORY) {
1377 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1380 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1383 if (src->flags & INMEMORY) {
1384 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1387 M_INTMOVE(src->regoff, REG_ITMP3);
1391 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1392 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1393 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1394 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1395 x86_64_jcc(cd, X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1397 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1399 x86_64_idiv_reg(cd, REG_ITMP3);
1401 if (iptr->dst->flags & INMEMORY) {
1402 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1403 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1406 M_INTMOVE(RAX, iptr->dst->regoff);
1408 if (iptr->dst->regoff != RDX) {
1409 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1414 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1416 d = reg_of_var(rd, iptr->dst, REG_NULL);
1417 if (src->prev->flags & INMEMORY) {
1418 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1421 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1424 if (src->flags & INMEMORY) {
1425 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1428 M_INTMOVE(src->regoff, REG_ITMP3);
1432 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1433 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1434 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1435 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1436 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1437 x86_64_jcc(cd, X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1439 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1441 x86_64_idiv_reg(cd, REG_ITMP3);
1443 if (iptr->dst->flags & INMEMORY) {
1444 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1445 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1448 M_INTMOVE(RDX, iptr->dst->regoff);
1450 if (iptr->dst->regoff != RDX) {
1451 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1456 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1457 /* val.i = constant */
1459 var_to_reg_int(s1, src, REG_ITMP1);
1460 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1461 M_INTMOVE(s1, REG_ITMP1);
1462 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1463 x86_64_lea_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1464 x86_64_cmovcc_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1465 x86_64_shift_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1466 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1467 store_reg_to_var_int(iptr->dst, d);
1470 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1471 /* val.l = constant */
1473 var_to_reg_int(s1, src, REG_ITMP1);
1474 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1475 M_INTMOVE(s1, REG_ITMP1);
1476 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1477 x86_64_lea_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1478 x86_64_cmovcc_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1479 x86_64_alu_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1480 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1481 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1482 store_reg_to_var_int(iptr->dst, d);
1485 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1487 d = reg_of_var(rd, iptr->dst, REG_NULL);
1488 x86_64_emit_ishift(cd, X86_64_SHL, src, iptr);
1491 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1492 /* val.i = constant */
1494 d = reg_of_var(rd, iptr->dst, REG_NULL);
1495 x86_64_emit_ishiftconst(cd, X86_64_SHL, src, iptr);
1498 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1500 d = reg_of_var(rd, iptr->dst, REG_NULL);
1501 x86_64_emit_ishift(cd, X86_64_SAR, src, iptr);
1504 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1505 /* val.i = constant */
1507 d = reg_of_var(rd, iptr->dst, REG_NULL);
1508 x86_64_emit_ishiftconst(cd, X86_64_SAR, src, iptr);
1511 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1513 d = reg_of_var(rd, iptr->dst, REG_NULL);
1514 x86_64_emit_ishift(cd, X86_64_SHR, src, iptr);
1517 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1518 /* val.i = constant */
1520 d = reg_of_var(rd, iptr->dst, REG_NULL);
1521 x86_64_emit_ishiftconst(cd, X86_64_SHR, src, iptr);
1524 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1526 d = reg_of_var(rd, iptr->dst, REG_NULL);
1527 x86_64_emit_lshift(cd, X86_64_SHL, src, iptr);
1530 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1531 /* val.i = constant */
1533 d = reg_of_var(rd, iptr->dst, REG_NULL);
1534 x86_64_emit_lshiftconst(cd, X86_64_SHL, src, iptr);
1537 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1539 d = reg_of_var(rd, iptr->dst, REG_NULL);
1540 x86_64_emit_lshift(cd, X86_64_SAR, src, iptr);
1543 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1544 /* val.i = constant */
1546 d = reg_of_var(rd, iptr->dst, REG_NULL);
1547 x86_64_emit_lshiftconst(cd, X86_64_SAR, src, iptr);
1550 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1552 d = reg_of_var(rd, iptr->dst, REG_NULL);
1553 x86_64_emit_lshift(cd, X86_64_SHR, src, iptr);
1556 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1557 /* val.l = constant */
1559 d = reg_of_var(rd, iptr->dst, REG_NULL);
1560 x86_64_emit_lshiftconst(cd, X86_64_SHR, src, iptr);
1563 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1565 d = reg_of_var(rd, iptr->dst, REG_NULL);
1566 x86_64_emit_ialu(cd, X86_64_AND, src, iptr);
1569 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1570 /* val.i = constant */
1572 d = reg_of_var(rd, iptr->dst, REG_NULL);
1573 x86_64_emit_ialuconst(cd, X86_64_AND, src, iptr);
1576 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1578 d = reg_of_var(rd, iptr->dst, REG_NULL);
1579 x86_64_emit_lalu(cd, X86_64_AND, src, iptr);
1582 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1583 /* val.l = constant */
1585 d = reg_of_var(rd, iptr->dst, REG_NULL);
1586 x86_64_emit_laluconst(cd, X86_64_AND, src, iptr);
1589 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1591 d = reg_of_var(rd, iptr->dst, REG_NULL);
1592 x86_64_emit_ialu(cd, X86_64_OR, src, iptr);
1595 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1596 /* val.i = constant */
1598 d = reg_of_var(rd, iptr->dst, REG_NULL);
1599 x86_64_emit_ialuconst(cd, X86_64_OR, src, iptr);
1602 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1604 d = reg_of_var(rd, iptr->dst, REG_NULL);
1605 x86_64_emit_lalu(cd, X86_64_OR, src, iptr);
1608 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1609 /* val.l = constant */
1611 d = reg_of_var(rd, iptr->dst, REG_NULL);
1612 x86_64_emit_laluconst(cd, X86_64_OR, src, iptr);
1615 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1617 d = reg_of_var(rd, iptr->dst, REG_NULL);
1618 x86_64_emit_ialu(cd, X86_64_XOR, src, iptr);
1621 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1622 /* val.i = constant */
1624 d = reg_of_var(rd, iptr->dst, REG_NULL);
1625 x86_64_emit_ialuconst(cd, X86_64_XOR, src, iptr);
1628 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1630 d = reg_of_var(rd, iptr->dst, REG_NULL);
1631 x86_64_emit_lalu(cd, X86_64_XOR, src, iptr);
1634 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1635 /* val.l = constant */
1637 d = reg_of_var(rd, iptr->dst, REG_NULL);
1638 x86_64_emit_laluconst(cd, X86_64_XOR, src, iptr);
1642 case ICMD_IINC: /* ..., value ==> ..., value + constant */
1643 /* op1 = variable, val.i = constant */
1645 /* using inc and dec is definitely faster than add -- tested */
1648 var = &(rd->locals[iptr->op1][TYPE_INT]);
1650 if (var->flags & INMEMORY) {
1651 if (iptr->val.i == 1) {
1652 x86_64_incl_membase(cd, REG_SP, d * 8);
1654 } else if (iptr->val.i == -1) {
1655 x86_64_decl_membase(cd, REG_SP, d * 8);
1658 x86_64_alul_imm_membase(cd, X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1662 if (iptr->val.i == 1) {
1663 x86_64_incl_reg(cd, d);
1665 } else if (iptr->val.i == -1) {
1666 x86_64_decl_reg(cd, d);
1669 x86_64_alul_imm_reg(cd, X86_64_ADD, iptr->val.i, d);
1675 /* floating operations ************************************************/
1677 case ICMD_FNEG: /* ..., value ==> ..., - value */
1679 var_to_reg_flt(s1, src, REG_FTMP1);
1680 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1681 a = dseg_adds4(cd, 0x80000000);
1683 x86_64_movss_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1684 x86_64_xorps_reg_reg(cd, REG_FTMP2, d);
1685 store_reg_to_var_flt(iptr->dst, d);
1688 case ICMD_DNEG: /* ..., value ==> ..., - value */
1690 var_to_reg_flt(s1, src, REG_FTMP1);
1691 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1692 a = dseg_adds8(cd, 0x8000000000000000);
1694 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1695 x86_64_xorpd_reg_reg(cd, REG_FTMP2, d);
1696 store_reg_to_var_flt(iptr->dst, d);
1699 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1701 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1702 var_to_reg_flt(s2, src, REG_FTMP2);
1703 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1705 x86_64_addss_reg_reg(cd, s2, d);
1706 } else if (s2 == d) {
1707 x86_64_addss_reg_reg(cd, s1, d);
1710 x86_64_addss_reg_reg(cd, s2, d);
1712 store_reg_to_var_flt(iptr->dst, d);
1715 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1717 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1718 var_to_reg_flt(s2, src, REG_FTMP2);
1719 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1721 x86_64_addsd_reg_reg(cd, s2, d);
1722 } else if (s2 == d) {
1723 x86_64_addsd_reg_reg(cd, s1, d);
1726 x86_64_addsd_reg_reg(cd, s2, d);
1728 store_reg_to_var_flt(iptr->dst, d);
1731 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1733 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1734 var_to_reg_flt(s2, src, REG_FTMP2);
1735 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1737 M_FLTMOVE(s2, REG_FTMP2);
1741 x86_64_subss_reg_reg(cd, s2, d);
1742 store_reg_to_var_flt(iptr->dst, d);
1745 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1747 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1748 var_to_reg_flt(s2, src, REG_FTMP2);
1749 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1751 M_FLTMOVE(s2, REG_FTMP2);
1755 x86_64_subsd_reg_reg(cd, s2, d);
1756 store_reg_to_var_flt(iptr->dst, d);
1759 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1761 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1762 var_to_reg_flt(s2, src, REG_FTMP2);
1763 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1765 x86_64_mulss_reg_reg(cd, s2, d);
1766 } else if (s2 == d) {
1767 x86_64_mulss_reg_reg(cd, s1, d);
1770 x86_64_mulss_reg_reg(cd, s2, d);
1772 store_reg_to_var_flt(iptr->dst, d);
1775 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1777 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1778 var_to_reg_flt(s2, src, REG_FTMP2);
1779 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1781 x86_64_mulsd_reg_reg(cd, s2, d);
1782 } else if (s2 == d) {
1783 x86_64_mulsd_reg_reg(cd, s1, d);
1786 x86_64_mulsd_reg_reg(cd, s2, d);
1788 store_reg_to_var_flt(iptr->dst, d);
1791 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1793 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1794 var_to_reg_flt(s2, src, REG_FTMP2);
1795 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1797 M_FLTMOVE(s2, REG_FTMP2);
1801 x86_64_divss_reg_reg(cd, s2, d);
1802 store_reg_to_var_flt(iptr->dst, d);
1805 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1807 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1808 var_to_reg_flt(s2, src, REG_FTMP2);
1809 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1811 M_FLTMOVE(s2, REG_FTMP2);
1815 x86_64_divsd_reg_reg(cd, s2, d);
1816 store_reg_to_var_flt(iptr->dst, d);
1819 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1821 var_to_reg_int(s1, src, REG_ITMP1);
1822 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1823 x86_64_cvtsi2ss_reg_reg(cd, s1, d);
1824 store_reg_to_var_flt(iptr->dst, d);
1827 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1829 var_to_reg_int(s1, src, REG_ITMP1);
1830 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1831 x86_64_cvtsi2sd_reg_reg(cd, s1, d);
1832 store_reg_to_var_flt(iptr->dst, d);
1835 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1837 var_to_reg_int(s1, src, REG_ITMP1);
1838 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1839 x86_64_cvtsi2ssq_reg_reg(cd, s1, d);
1840 store_reg_to_var_flt(iptr->dst, d);
1843 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1845 var_to_reg_int(s1, src, REG_ITMP1);
1846 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1847 x86_64_cvtsi2sdq_reg_reg(cd, s1, d);
1848 store_reg_to_var_flt(iptr->dst, d);
1851 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1853 var_to_reg_flt(s1, src, REG_FTMP1);
1854 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1855 x86_64_cvttss2si_reg_reg(cd, s1, d);
1856 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1857 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1858 x86_64_jcc(cd, X86_64_CC_NE, a);
1859 M_FLTMOVE(s1, REG_FTMP1);
1860 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2i, REG_ITMP2);
1861 x86_64_call_reg(cd, REG_ITMP2);
1862 M_INTMOVE(REG_RESULT, d);
1863 store_reg_to_var_int(iptr->dst, d);
1866 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1868 var_to_reg_flt(s1, src, REG_FTMP1);
1869 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1870 x86_64_cvttsd2si_reg_reg(cd, s1, d);
1871 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1872 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1873 x86_64_jcc(cd, X86_64_CC_NE, a);
1874 M_FLTMOVE(s1, REG_FTMP1);
1875 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2i, REG_ITMP2);
1876 x86_64_call_reg(cd, REG_ITMP2);
1877 M_INTMOVE(REG_RESULT, d);
1878 store_reg_to_var_int(iptr->dst, d);
1881 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1883 var_to_reg_flt(s1, src, REG_FTMP1);
1884 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1885 x86_64_cvttss2siq_reg_reg(cd, s1, d);
1886 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1887 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1888 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1889 x86_64_jcc(cd, X86_64_CC_NE, a);
1890 M_FLTMOVE(s1, REG_FTMP1);
1891 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2l, REG_ITMP2);
1892 x86_64_call_reg(cd, REG_ITMP2);
1893 M_INTMOVE(REG_RESULT, d);
1894 store_reg_to_var_int(iptr->dst, d);
1897 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1899 var_to_reg_flt(s1, src, REG_FTMP1);
1900 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1901 x86_64_cvttsd2siq_reg_reg(cd, s1, d);
1902 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1903 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1904 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1905 x86_64_jcc(cd, X86_64_CC_NE, a);
1906 M_FLTMOVE(s1, REG_FTMP1);
1907 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2l, REG_ITMP2);
1908 x86_64_call_reg(cd, REG_ITMP2);
1909 M_INTMOVE(REG_RESULT, d);
1910 store_reg_to_var_int(iptr->dst, d);
1913 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1915 var_to_reg_flt(s1, src, REG_FTMP1);
1916 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1917 x86_64_cvtss2sd_reg_reg(cd, s1, d);
1918 store_reg_to_var_flt(iptr->dst, d);
1921 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1923 var_to_reg_flt(s1, src, REG_FTMP1);
1924 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1925 x86_64_cvtsd2ss_reg_reg(cd, s1, d);
1926 store_reg_to_var_flt(iptr->dst, d);
1929 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1930 /* == => 0, < => 1, > => -1 */
1932 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1933 var_to_reg_flt(s2, src, REG_FTMP2);
1934 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1935 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1936 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1937 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1938 x86_64_ucomiss_reg_reg(cd, s1, s2);
1939 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1940 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1941 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1942 store_reg_to_var_int(iptr->dst, d);
1945 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1946 /* == => 0, < => 1, > => -1 */
1948 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1949 var_to_reg_flt(s2, src, REG_FTMP2);
1950 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1951 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1952 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1953 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1954 x86_64_ucomiss_reg_reg(cd, s1, s2);
1955 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1956 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1957 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1958 store_reg_to_var_int(iptr->dst, d);
1961 case ICMD_DCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1962 /* == => 0, < => 1, > => -1 */
1964 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1965 var_to_reg_flt(s2, src, REG_FTMP2);
1966 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1967 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1968 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1969 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1970 x86_64_ucomisd_reg_reg(cd, s1, s2);
1971 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1972 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1973 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1974 store_reg_to_var_int(iptr->dst, d);
1977 case ICMD_DCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1978 /* == => 0, < => 1, > => -1 */
1980 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1981 var_to_reg_flt(s2, src, REG_FTMP2);
1982 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1983 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1984 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1985 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1986 x86_64_ucomisd_reg_reg(cd, s1, s2);
1987 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1988 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1989 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1990 store_reg_to_var_int(iptr->dst, d);
1994 /* memory operations **************************************************/
1996 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., (int) length */
1998 var_to_reg_int(s1, src, REG_ITMP1);
1999 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2000 gen_nullptr_check(s1);
2001 x86_64_movl_membase_reg(cd, s1, OFFSET(java_arrayheader, size), d);
2002 store_reg_to_var_int(iptr->dst, d);
2005 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2007 var_to_reg_int(s1, src->prev, REG_ITMP1);
2008 var_to_reg_int(s2, src, REG_ITMP2);
2009 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2010 if (iptr->op1 == 0) {
2011 gen_nullptr_check(s1);
2014 x86_64_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
2015 store_reg_to_var_int(iptr->dst, d);
2018 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
2020 var_to_reg_int(s1, src->prev, REG_ITMP1);
2021 var_to_reg_int(s2, src, REG_ITMP2);
2022 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2023 if (iptr->op1 == 0) {
2024 gen_nullptr_check(s1);
2027 x86_64_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]), s1, s2, 3, d);
2028 store_reg_to_var_int(iptr->dst, d);
2031 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
2033 var_to_reg_int(s1, src->prev, REG_ITMP1);
2034 var_to_reg_int(s2, src, REG_ITMP2);
2035 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2036 if (iptr->op1 == 0) {
2037 gen_nullptr_check(s1);
2040 x86_64_movl_memindex_reg(cd, OFFSET(java_intarray, data[0]), s1, s2, 2, d);
2041 store_reg_to_var_int(iptr->dst, d);
2044 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2046 var_to_reg_int(s1, src->prev, REG_ITMP1);
2047 var_to_reg_int(s2, src, REG_ITMP2);
2048 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2049 if (iptr->op1 == 0) {
2050 gen_nullptr_check(s1);
2053 x86_64_movss_memindex_reg(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2054 store_reg_to_var_flt(iptr->dst, d);
2057 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2059 var_to_reg_int(s1, src->prev, REG_ITMP1);
2060 var_to_reg_int(s2, src, REG_ITMP2);
2061 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2062 if (iptr->op1 == 0) {
2063 gen_nullptr_check(s1);
2066 x86_64_movsd_memindex_reg(cd, OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2067 store_reg_to_var_flt(iptr->dst, d);
2070 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
2072 var_to_reg_int(s1, src->prev, REG_ITMP1);
2073 var_to_reg_int(s2, src, REG_ITMP2);
2074 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2075 if (iptr->op1 == 0) {
2076 gen_nullptr_check(s1);
2079 x86_64_movzwq_memindex_reg(cd, OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2080 store_reg_to_var_int(iptr->dst, d);
2083 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
2085 var_to_reg_int(s1, src->prev, REG_ITMP1);
2086 var_to_reg_int(s2, src, REG_ITMP2);
2087 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2088 if (iptr->op1 == 0) {
2089 gen_nullptr_check(s1);
2092 x86_64_movswq_memindex_reg(cd, OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2093 store_reg_to_var_int(iptr->dst, d);
2096 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
2098 var_to_reg_int(s1, src->prev, REG_ITMP1);
2099 var_to_reg_int(s2, src, REG_ITMP2);
2100 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2101 if (iptr->op1 == 0) {
2102 gen_nullptr_check(s1);
2105 x86_64_movsbq_memindex_reg(cd, OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2106 store_reg_to_var_int(iptr->dst, d);
2110 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2112 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2113 var_to_reg_int(s2, src->prev, REG_ITMP2);
2114 if (iptr->op1 == 0) {
2115 gen_nullptr_check(s1);
2118 var_to_reg_int(s3, src, REG_ITMP3);
2119 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2122 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2124 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2125 var_to_reg_int(s2, src->prev, REG_ITMP2);
2126 if (iptr->op1 == 0) {
2127 gen_nullptr_check(s1);
2130 var_to_reg_int(s3, src, REG_ITMP3);
2131 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2134 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2136 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2137 var_to_reg_int(s2, src->prev, REG_ITMP2);
2138 if (iptr->op1 == 0) {
2139 gen_nullptr_check(s1);
2142 var_to_reg_int(s3, src, REG_ITMP3);
2143 x86_64_movl_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2146 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2148 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2149 var_to_reg_int(s2, src->prev, REG_ITMP2);
2150 if (iptr->op1 == 0) {
2151 gen_nullptr_check(s1);
2154 var_to_reg_flt(s3, src, REG_FTMP3);
2155 x86_64_movss_reg_memindex(cd, s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2158 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2160 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2161 var_to_reg_int(s2, src->prev, REG_ITMP2);
2162 if (iptr->op1 == 0) {
2163 gen_nullptr_check(s1);
2166 var_to_reg_flt(s3, src, REG_FTMP3);
2167 x86_64_movsd_reg_memindex(cd, s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2170 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2172 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2173 var_to_reg_int(s2, src->prev, REG_ITMP2);
2174 if (iptr->op1 == 0) {
2175 gen_nullptr_check(s1);
2178 var_to_reg_int(s3, src, REG_ITMP3);
2179 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2182 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2184 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2185 var_to_reg_int(s2, src->prev, REG_ITMP2);
2186 if (iptr->op1 == 0) {
2187 gen_nullptr_check(s1);
2190 var_to_reg_int(s3, src, REG_ITMP3);
2191 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2194 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2196 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2197 var_to_reg_int(s2, src->prev, REG_ITMP2);
2198 if (iptr->op1 == 0) {
2199 gen_nullptr_check(s1);
2202 var_to_reg_int(s3, src, REG_ITMP3);
2203 x86_64_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2206 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2208 var_to_reg_int(s1, src->prev, REG_ITMP1);
2209 var_to_reg_int(s2, src, REG_ITMP2);
2210 if (iptr->op1 == 0) {
2211 gen_nullptr_check(s1);
2214 x86_64_movl_imm_memindex(cd, iptr->val.i, OFFSET(java_intarray, data[0]), s1, s2, 2);
2217 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2219 var_to_reg_int(s1, src->prev, REG_ITMP1);
2220 var_to_reg_int(s2, src, REG_ITMP2);
2221 if (iptr->op1 == 0) {
2222 gen_nullptr_check(s1);
2226 if (x86_64_is_imm32(iptr->val.l)) {
2227 x86_64_mov_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2230 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2231 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l >> 32), OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2235 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2237 var_to_reg_int(s1, src->prev, REG_ITMP1);
2238 var_to_reg_int(s2, src, REG_ITMP2);
2239 if (iptr->op1 == 0) {
2240 gen_nullptr_check(s1);
2243 x86_64_mov_imm_memindex(cd, 0, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2246 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2248 var_to_reg_int(s1, src->prev, REG_ITMP1);
2249 var_to_reg_int(s2, src, REG_ITMP2);
2250 if (iptr->op1 == 0) {
2251 gen_nullptr_check(s1);
2254 x86_64_movb_imm_memindex(cd, iptr->val.i, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2257 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2259 var_to_reg_int(s1, src->prev, REG_ITMP1);
2260 var_to_reg_int(s2, src, REG_ITMP2);
2261 if (iptr->op1 == 0) {
2262 gen_nullptr_check(s1);
2265 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_chararray, data[0]), s1, s2, 1);
2268 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2270 var_to_reg_int(s1, src->prev, REG_ITMP1);
2271 var_to_reg_int(s2, src, REG_ITMP2);
2272 if (iptr->op1 == 0) {
2273 gen_nullptr_check(s1);
2276 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2280 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2281 /* op1 = type, val.a = field address */
2283 /* If the static fields' class is not yet initialized, we do it */
2284 /* now. The call code is generated later. */
2285 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2286 codegen_addclinitref(cd, cd->mcodeptr, ((fieldinfo *) iptr->val.a)->class);
2288 /* This is just for debugging purposes. Is very difficult to */
2289 /* read patched code. Here we patch the following 5 nop's */
2290 /* so that the real code keeps untouched. */
2291 if (showdisassemble) {
2300 /* This approach is much faster than moving the field address */
2301 /* inline into a register. */
2302 a = dseg_addaddress(cd, &(((fieldinfo *) iptr->val.a)->value));
2303 x86_64_mov_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 7) - (s8) cd->mcodebase) + a, REG_ITMP2);
2304 switch (iptr->op1) {
2306 var_to_reg_int(s2, src, REG_ITMP1);
2307 x86_64_movl_reg_membase(cd, s2, REG_ITMP2, 0);
2311 var_to_reg_int(s2, src, REG_ITMP1);
2312 x86_64_mov_reg_membase(cd, s2, REG_ITMP2, 0);
2315 var_to_reg_flt(s2, src, REG_FTMP1);
2316 x86_64_movss_reg_membase(cd, s2, REG_ITMP2, 0);
2319 var_to_reg_flt(s2, src, REG_FTMP1);
2320 x86_64_movsd_reg_membase(cd, s2, REG_ITMP2, 0);
2323 throw_cacao_exception_exit(string_java_lang_InternalError,
2324 "Unknown PUTSTATIC operand type %d",
2329 case ICMD_GETSTATIC: /* ... ==> ..., value */
2330 /* op1 = type, val.a = field address */
2332 /* If the static fields' class is not yet initialized, we do it */
2333 /* now. The call code is generated later. */
2334 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2335 codegen_addclinitref(cd, cd->mcodeptr, ((fieldinfo *) iptr->val.a)->class);
2337 /* This is just for debugging purposes. Is very difficult to */
2338 /* read patched code. Here we patch the following 5 nop's */
2339 /* so that the real code keeps untouched. */
2340 if (showdisassemble) {
2349 /* This approach is much faster than moving the field address */
2350 /* inline into a register. */
2351 a = dseg_addaddress(cd, &(((fieldinfo *) iptr->val.a)->value));
2352 x86_64_mov_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 7) - (s8) cd->mcodebase) + a, REG_ITMP2);
2353 switch (iptr->op1) {
2355 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2356 x86_64_movl_membase_reg(cd, REG_ITMP2, 0, d);
2357 store_reg_to_var_int(iptr->dst, d);
2361 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2362 x86_64_mov_membase_reg(cd, REG_ITMP2, 0, d);
2363 store_reg_to_var_int(iptr->dst, d);
2366 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2367 x86_64_movss_membase_reg(cd, REG_ITMP2, 0, d);
2368 store_reg_to_var_flt(iptr->dst, d);
2371 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2372 x86_64_movsd_membase_reg(cd, REG_ITMP2, 0, d);
2373 store_reg_to_var_flt(iptr->dst, d);
2376 throw_cacao_exception_exit(string_java_lang_InternalError,
2377 "Unknown GETSTATIC operand type %d",
2382 case ICMD_PUTFIELD: /* ..., value ==> ... */
2383 /* op1 = type, val.i = field offset */
2385 a = ((fieldinfo *)(iptr->val.a))->offset;
2386 var_to_reg_int(s1, src->prev, REG_ITMP1);
2387 switch (iptr->op1) {
2389 var_to_reg_int(s2, src, REG_ITMP2);
2390 gen_nullptr_check(s1);
2391 x86_64_movl_reg_membase(cd, s2, s1, a);
2395 var_to_reg_int(s2, src, REG_ITMP2);
2396 gen_nullptr_check(s1);
2397 x86_64_mov_reg_membase(cd, s2, s1, a);
2400 var_to_reg_flt(s2, src, REG_FTMP2);
2401 gen_nullptr_check(s1);
2402 x86_64_movss_reg_membase(cd, s2, s1, a);
2405 var_to_reg_flt(s2, src, REG_FTMP2);
2406 gen_nullptr_check(s1);
2407 x86_64_movsd_reg_membase(cd, s2, s1, a);
2410 throw_cacao_exception_exit(string_java_lang_InternalError,
2411 "Unknown PUTFIELD operand type %d",
2416 case ICMD_GETFIELD: /* ... ==> ..., value */
2417 /* op1 = type, val.i = field offset */
2419 a = ((fieldinfo *)(iptr->val.a))->offset;
2420 var_to_reg_int(s1, src, REG_ITMP1);
2421 switch (iptr->op1) {
2423 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2424 gen_nullptr_check(s1);
2425 x86_64_movl_membase_reg(cd, s1, a, d);
2426 store_reg_to_var_int(iptr->dst, d);
2430 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2431 gen_nullptr_check(s1);
2432 x86_64_mov_membase_reg(cd, s1, a, d);
2433 store_reg_to_var_int(iptr->dst, d);
2436 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2437 gen_nullptr_check(s1);
2438 x86_64_movss_membase_reg(cd, s1, a, d);
2439 store_reg_to_var_flt(iptr->dst, d);
2442 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2443 gen_nullptr_check(s1);
2444 x86_64_movsd_membase_reg(cd, s1, a, d);
2445 store_reg_to_var_flt(iptr->dst, d);
2448 throw_cacao_exception_exit(string_java_lang_InternalError,
2449 "Unknown GETFIELD operand type %d",
2455 /* branch operations **************************************************/
2457 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2459 var_to_reg_int(s1, src, REG_ITMP1);
2460 M_INTMOVE(s1, REG_ITMP1_XPTR);
2462 x86_64_call_imm(cd, 0); /* passing exception pointer */
2463 x86_64_pop_reg(cd, REG_ITMP2_XPC);
2465 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
2466 x86_64_jmp_reg(cd, REG_ITMP3);
2470 case ICMD_GOTO: /* ... ==> ... */
2471 /* op1 = target JavaVM pc */
2473 x86_64_jmp_imm(cd, 0);
2474 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2478 case ICMD_JSR: /* ... ==> ... */
2479 /* op1 = target JavaVM pc */
2481 x86_64_call_imm(cd, 0);
2482 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2485 case ICMD_RET: /* ... ==> ... */
2486 /* op1 = local variable */
2488 var = &(rd->locals[iptr->op1][TYPE_ADR]);
2489 var_to_reg_int(s1, var, REG_ITMP1);
2490 x86_64_jmp_reg(cd, s1);
2493 case ICMD_IFNULL: /* ..., value ==> ... */
2494 /* op1 = target JavaVM pc */
2496 if (src->flags & INMEMORY) {
2497 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2500 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2502 x86_64_jcc(cd, X86_64_CC_E, 0);
2503 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2506 case ICMD_IFNONNULL: /* ..., value ==> ... */
2507 /* op1 = target JavaVM pc */
2509 if (src->flags & INMEMORY) {
2510 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2513 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2515 x86_64_jcc(cd, X86_64_CC_NE, 0);
2516 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2519 case ICMD_IFEQ: /* ..., value ==> ... */
2520 /* op1 = target JavaVM pc, val.i = constant */
2522 x86_64_emit_ifcc(cd, X86_64_CC_E, src, iptr);
2525 case ICMD_IFLT: /* ..., value ==> ... */
2526 /* op1 = target JavaVM pc, val.i = constant */
2528 x86_64_emit_ifcc(cd, X86_64_CC_L, src, iptr);
2531 case ICMD_IFLE: /* ..., value ==> ... */
2532 /* op1 = target JavaVM pc, val.i = constant */
2534 x86_64_emit_ifcc(cd, X86_64_CC_LE, src, iptr);
2537 case ICMD_IFNE: /* ..., value ==> ... */
2538 /* op1 = target JavaVM pc, val.i = constant */
2540 x86_64_emit_ifcc(cd, X86_64_CC_NE, src, iptr);
2543 case ICMD_IFGT: /* ..., value ==> ... */
2544 /* op1 = target JavaVM pc, val.i = constant */
2546 x86_64_emit_ifcc(cd, X86_64_CC_G, src, iptr);
2549 case ICMD_IFGE: /* ..., value ==> ... */
2550 /* op1 = target JavaVM pc, val.i = constant */
2552 x86_64_emit_ifcc(cd, X86_64_CC_GE, src, iptr);
2555 case ICMD_IF_LEQ: /* ..., value ==> ... */
2556 /* op1 = target JavaVM pc, val.l = constant */
2558 x86_64_emit_if_lcc(cd, X86_64_CC_E, src, iptr);
2561 case ICMD_IF_LLT: /* ..., value ==> ... */
2562 /* op1 = target JavaVM pc, val.l = constant */
2564 x86_64_emit_if_lcc(cd, X86_64_CC_L, src, iptr);
2567 case ICMD_IF_LLE: /* ..., value ==> ... */
2568 /* op1 = target JavaVM pc, val.l = constant */
2570 x86_64_emit_if_lcc(cd, X86_64_CC_LE, src, iptr);
2573 case ICMD_IF_LNE: /* ..., value ==> ... */
2574 /* op1 = target JavaVM pc, val.l = constant */
2576 x86_64_emit_if_lcc(cd, X86_64_CC_NE, src, iptr);
2579 case ICMD_IF_LGT: /* ..., value ==> ... */
2580 /* op1 = target JavaVM pc, val.l = constant */
2582 x86_64_emit_if_lcc(cd, X86_64_CC_G, src, iptr);
2585 case ICMD_IF_LGE: /* ..., value ==> ... */
2586 /* op1 = target JavaVM pc, val.l = constant */
2588 x86_64_emit_if_lcc(cd, X86_64_CC_GE, src, iptr);
2591 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2592 /* op1 = target JavaVM pc */
2594 x86_64_emit_if_icmpcc(cd, X86_64_CC_E, src, iptr);
2597 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2598 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2600 x86_64_emit_if_lcmpcc(cd, X86_64_CC_E, src, iptr);
2603 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2604 /* op1 = target JavaVM pc */
2606 x86_64_emit_if_icmpcc(cd, X86_64_CC_NE, src, iptr);
2609 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2610 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2612 x86_64_emit_if_lcmpcc(cd, X86_64_CC_NE, src, iptr);
2615 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2616 /* op1 = target JavaVM pc */
2618 x86_64_emit_if_icmpcc(cd, X86_64_CC_L, src, iptr);
2621 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2622 /* op1 = target JavaVM pc */
2624 x86_64_emit_if_lcmpcc(cd, X86_64_CC_L, src, iptr);
2627 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2628 /* op1 = target JavaVM pc */
2630 x86_64_emit_if_icmpcc(cd, X86_64_CC_G, src, iptr);
2633 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2634 /* op1 = target JavaVM pc */
2636 x86_64_emit_if_lcmpcc(cd, X86_64_CC_G, src, iptr);
2639 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2640 /* op1 = target JavaVM pc */
2642 x86_64_emit_if_icmpcc(cd, X86_64_CC_LE, src, iptr);
2645 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2646 /* op1 = target JavaVM pc */
2648 x86_64_emit_if_lcmpcc(cd, X86_64_CC_LE, src, iptr);
2651 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2652 /* op1 = target JavaVM pc */
2654 x86_64_emit_if_icmpcc(cd, X86_64_CC_GE, src, iptr);
2657 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2658 /* op1 = target JavaVM pc */
2660 x86_64_emit_if_lcmpcc(cd, X86_64_CC_GE, src, iptr);
2663 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST */
2665 case ICMD_ELSE_ICONST: /* handled by IFxx_ICONST */
2668 case ICMD_IFEQ_ICONST: /* ..., value ==> ..., constant */
2669 /* val.i = constant */
2671 var_to_reg_int(s1, src, REG_ITMP1);
2672 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2674 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2676 M_INTMOVE(s1, REG_ITMP1);
2679 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2681 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2682 x86_64_testl_reg_reg(cd, s1, s1);
2683 x86_64_cmovccl_reg_reg(cd, X86_64_CC_E, REG_ITMP2, d);
2684 store_reg_to_var_int(iptr->dst, d);
2687 case ICMD_IFNE_ICONST: /* ..., value ==> ..., constant */
2688 /* val.i = constant */
2690 var_to_reg_int(s1, src, REG_ITMP1);
2691 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2693 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2695 M_INTMOVE(s1, REG_ITMP1);
2698 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2700 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2701 x86_64_testl_reg_reg(cd, s1, s1);
2702 x86_64_cmovccl_reg_reg(cd, X86_64_CC_NE, REG_ITMP2, d);
2703 store_reg_to_var_int(iptr->dst, d);
2706 case ICMD_IFLT_ICONST: /* ..., value ==> ..., constant */
2707 /* val.i = constant */
2709 var_to_reg_int(s1, src, REG_ITMP1);
2710 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2712 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2714 M_INTMOVE(s1, REG_ITMP1);
2717 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2719 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2720 x86_64_testl_reg_reg(cd, s1, s1);
2721 x86_64_cmovccl_reg_reg(cd, X86_64_CC_L, REG_ITMP2, d);
2722 store_reg_to_var_int(iptr->dst, d);
2725 case ICMD_IFGE_ICONST: /* ..., value ==> ..., constant */
2726 /* val.i = constant */
2728 var_to_reg_int(s1, src, REG_ITMP1);
2729 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2731 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2733 M_INTMOVE(s1, REG_ITMP1);
2736 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2738 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2739 x86_64_testl_reg_reg(cd, s1, s1);
2740 x86_64_cmovccl_reg_reg(cd, X86_64_CC_GE, REG_ITMP2, d);
2741 store_reg_to_var_int(iptr->dst, d);
2744 case ICMD_IFGT_ICONST: /* ..., value ==> ..., constant */
2745 /* val.i = constant */
2747 var_to_reg_int(s1, src, REG_ITMP1);
2748 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2750 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2752 M_INTMOVE(s1, REG_ITMP1);
2755 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2757 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2758 x86_64_testl_reg_reg(cd, s1, s1);
2759 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP2, d);
2760 store_reg_to_var_int(iptr->dst, d);
2763 case ICMD_IFLE_ICONST: /* ..., value ==> ..., constant */
2764 /* val.i = constant */
2766 var_to_reg_int(s1, src, REG_ITMP1);
2767 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2769 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2771 M_INTMOVE(s1, REG_ITMP1);
2774 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2776 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2777 x86_64_testl_reg_reg(cd, s1, s1);
2778 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, d);
2779 store_reg_to_var_int(iptr->dst, d);
2783 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2787 var_to_reg_int(s1, src, REG_RESULT);
2788 M_INTMOVE(s1, REG_RESULT);
2790 goto nowperformreturn;
2792 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2795 var_to_reg_flt(s1, src, REG_FRESULT);
2796 M_FLTMOVE(s1, REG_FRESULT);
2798 goto nowperformreturn;
2800 case ICMD_RETURN: /* ... ==> ... */
2806 p = parentargs_base;
2808 /* call trace function */
2810 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
2812 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
2813 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
2815 x86_64_mov_imm_reg(cd, (s8) m, rd->argintregs[0]);
2816 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
2817 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
2818 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
2820 x86_64_mov_imm_reg(cd, (s8) builtin_displaymethodstop, REG_ITMP1);
2821 x86_64_call_reg(cd, REG_ITMP1);
2823 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
2824 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
2826 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
2829 #if defined(USE_THREADS)
2830 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2831 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
2833 /* we need to save the proper return value */
2834 switch (iptr->opc) {
2838 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, rd->maxmemuse * 8);
2842 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, rd->maxmemuse * 8);
2846 x86_64_mov_imm_reg(cd, (u8) builtin_monitorexit, REG_ITMP1);
2847 x86_64_call_reg(cd, REG_ITMP1);
2849 /* and now restore the proper return value */
2850 switch (iptr->opc) {
2854 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_RESULT);
2858 x86_64_movq_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_FRESULT);
2864 /* restore saved registers */
2865 for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
2866 p--; x86_64_mov_membase_reg(cd, REG_SP, p * 8, rd->savintregs[i]);
2868 for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
2869 p--; x86_64_movq_membase_reg(cd, REG_SP, p * 8, rd->savfltregs[i]);
2872 /* deallocate stack */
2873 if (parentargs_base) {
2874 x86_64_alu_imm_reg(cd, X86_64_ADD, parentargs_base * 8, REG_SP);
2883 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2888 tptr = (void **) iptr->target;
2890 s4ptr = iptr->val.a;
2891 l = s4ptr[1]; /* low */
2892 i = s4ptr[2]; /* high */
2894 var_to_reg_int(s1, src, REG_ITMP1);
2895 M_INTMOVE(s1, REG_ITMP1);
2897 x86_64_alul_imm_reg(cd, X86_64_SUB, l, REG_ITMP1);
2902 x86_64_alul_imm_reg(cd, X86_64_CMP, i - 1, REG_ITMP1);
2903 x86_64_jcc(cd, X86_64_CC_A, 0);
2905 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[0]), cd->mcodeptr); */
2906 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
2908 /* build jump table top down and use address of lowest entry */
2910 /* s4ptr += 3 + i; */
2914 /* dseg_addtarget(cd, BlockPtrOfPC(*--s4ptr)); */
2915 dseg_addtarget(cd, (basicblock *) tptr[0]);
2919 /* length of dataseg after last dseg_addtarget is used by load */
2921 x86_64_mov_imm_reg(cd, 0, REG_ITMP2);
2922 dseg_adddata(cd, cd->mcodeptr);
2923 x86_64_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
2924 x86_64_jmp_reg(cd, REG_ITMP1);
2930 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2932 s4 i, l, val, *s4ptr;
2935 tptr = (void **) iptr->target;
2937 s4ptr = iptr->val.a;
2938 l = s4ptr[0]; /* default */
2939 i = s4ptr[1]; /* count */
2941 MCODECHECK((i<<2)+8);
2942 var_to_reg_int(s1, src, REG_ITMP1); /* reg compare should always be faster */
2948 x86_64_alul_imm_reg(cd, X86_64_CMP, val, s1);
2949 x86_64_jcc(cd, X86_64_CC_E, 0);
2950 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[1]), cd->mcodeptr); */
2951 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
2954 x86_64_jmp_imm(cd, 0);
2955 /* codegen_addreference(cd, BlockPtrOfPC(l), cd->mcodeptr); */
2957 tptr = (void **) iptr->target;
2958 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
2965 case ICMD_BUILTIN3: /* ..., arg1, arg2, arg3 ==> ... */
2966 /* op1 = return type, val.a = function pointer*/
2970 case ICMD_BUILTIN2: /* ..., arg1, arg2 ==> ... */
2971 /* op1 = return type, val.a = function pointer*/
2975 case ICMD_BUILTIN1: /* ..., arg1 ==> ... */
2976 /* op1 = return type, val.a = function pointer*/
2980 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2981 /* op1 = arg count, val.a = method pointer */
2983 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2984 /* op1 = arg count, val.a = method pointer */
2986 case ICMD_INVOKEVIRTUAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2987 /* op1 = arg count, val.a = method pointer */
2989 case ICMD_INVOKEINTERFACE:/*.., objectref, [arg1, [arg2 ...]] ==> ... */
2990 /* op1 = arg count, val.a = method pointer */
3000 MCODECHECK((s3 << 1) + 64);
3007 /* copy arguments to registers or stack location ******************/
3009 /* count integer and float arguments */
3011 for (; --s3 >= 0; src = src->prev) {
3012 IS_INT_LNG_TYPE(src->type) ? iarg++ : farg++;
3018 /* calculate amount of arguments to be on stack */
3020 s2 = (iarg > INT_ARG_CNT) ? iarg - INT_ARG_CNT : 0 +
3021 (farg > FLT_ARG_CNT) ? farg - FLT_ARG_CNT : 0;
3023 for (; --s3 >= 0; src = src->prev) {
3024 /* decrement the current argument type */
3025 IS_INT_LNG_TYPE(src->type) ? iarg-- : farg--;
3027 if (src->varkind == ARGVAR) {
3028 if (IS_INT_LNG_TYPE(src->type)) {
3029 if (iarg >= INT_ARG_CNT) {
3033 if (farg >= FLT_ARG_CNT) {
3040 if (IS_INT_LNG_TYPE(src->type)) {
3041 if (iarg < INT_ARG_CNT) {
3042 s1 = rd->argintregs[iarg];
3043 var_to_reg_int(d, src, s1);
3047 var_to_reg_int(d, src, REG_ITMP1);
3049 x86_64_mov_reg_membase(cd, d, REG_SP, s2 * 8);
3053 if (farg < FLT_ARG_CNT) {
3054 s1 = rd->argfltregs[farg];
3055 var_to_reg_flt(d, src, s1);
3059 var_to_reg_flt(d, src, REG_FTMP1);
3061 x86_64_movq_reg_membase(cd, d, REG_SP, s2 * 8);
3067 switch (iptr->opc) {
3075 x86_64_mov_imm_reg(cd, a, REG_ITMP1);
3076 x86_64_call_reg(cd, REG_ITMP1);
3079 case ICMD_INVOKESTATIC:
3081 a = (s8) lm->stubroutine;
3084 x86_64_mov_imm_reg(cd, a, REG_ITMP2);
3085 x86_64_call_reg(cd, REG_ITMP2);
3088 case ICMD_INVOKESPECIAL:
3090 a = (s8) lm->stubroutine;
3093 gen_nullptr_check(rd->argintregs[0]); /* first argument contains pointer */
3094 x86_64_mov_membase_reg(cd, rd->argintregs[0], 0, REG_ITMP2); /* access memory for hardware nullptr */
3095 x86_64_mov_imm_reg(cd, a, REG_ITMP2);
3096 x86_64_call_reg(cd, REG_ITMP2);
3099 case ICMD_INVOKEVIRTUAL:
3103 gen_nullptr_check(rd->argintregs[0]);
3104 x86_64_mov_membase_reg(cd, rd->argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3105 x86_64_mov_membase32_reg(cd, REG_ITMP2, OFFSET(vftbl_t, table[0]) + sizeof(methodptr) * lm->vftblindex, REG_ITMP1);
3106 x86_64_call_reg(cd, REG_ITMP1);
3109 case ICMD_INVOKEINTERFACE:
3114 gen_nullptr_check(rd->argintregs[0]);
3115 x86_64_mov_membase_reg(cd, rd->argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3116 x86_64_mov_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, interfacetable[0]) - sizeof(methodptr) * ci->index, REG_ITMP2);
3117 x86_64_mov_membase32_reg(cd, REG_ITMP2, sizeof(methodptr) * (lm - ci->methods), REG_ITMP1);
3118 x86_64_call_reg(cd, REG_ITMP1);
3123 error("Unkown ICMD-Command: %d", iptr->opc);
3126 /* d contains return type */
3128 if (d != TYPE_VOID) {
3129 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3130 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3131 M_INTMOVE(REG_RESULT, s1);
3132 store_reg_to_var_int(iptr->dst, s1);
3135 s1 = reg_of_var(rd, iptr->dst, REG_FRESULT);
3136 M_FLTMOVE(REG_FRESULT, s1);
3137 store_reg_to_var_flt(iptr->dst, s1);
3144 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3146 /* op1: 0 == array, 1 == class */
3147 /* val.a: (classinfo*) superclass */
3149 /* superclass is an interface:
3151 * return (sub != NULL) &&
3152 * (sub->vftbl->interfacetablelength > super->index) &&
3153 * (sub->vftbl->interfacetable[-super->index] != NULL);
3155 * superclass is a class:
3157 * return ((sub != NULL) && (0
3158 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3159 * super->vftbl->diffvall));
3163 classinfo *super = (classinfo*) iptr->val.a;
3165 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3166 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3169 var_to_reg_int(s1, src, REG_ITMP1);
3170 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3172 M_INTMOVE(s1, REG_ITMP1);
3175 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3176 if (iptr->op1) { /* class/interface */
3177 if (super->flags & ACC_INTERFACE) { /* interface */
3178 x86_64_test_reg_reg(cd, s1, s1);
3180 /* TODO: clean up this calculation */
3181 a = 3; /* mov_membase_reg */
3182 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3184 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3185 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3188 CALCIMMEDIATEBYTES(a, super->index);
3193 a += 3; /* mov_membase_reg */
3194 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3199 x86_64_jcc(cd, X86_64_CC_E, a);
3201 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3202 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength), REG_ITMP2);
3203 x86_64_alu_imm_reg(cd, X86_64_SUB, super->index, REG_ITMP2);
3204 x86_64_test_reg_reg(cd, REG_ITMP2, REG_ITMP2);
3206 /* TODO: clean up this calculation */
3208 a += 3; /* mov_membase_reg */
3209 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3214 x86_64_jcc(cd, X86_64_CC_LE, a);
3215 x86_64_mov_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP1);
3216 x86_64_test_reg_reg(cd, REG_ITMP1, REG_ITMP1);
3217 x86_64_setcc_reg(cd, X86_64_CC_NE, d);
3219 } else { /* class */
3220 x86_64_test_reg_reg(cd, s1, s1);
3222 /* TODO: clean up this calculation */
3223 a = 3; /* mov_membase_reg */
3224 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3226 a += 10; /* mov_imm_reg */
3228 a += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3229 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, baseval));
3231 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3232 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3234 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3235 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, diffval));
3242 x86_64_jcc(cd, X86_64_CC_E, a);
3244 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3245 x86_64_mov_imm_reg(cd, (s8) super->vftbl, REG_ITMP2);
3246 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3247 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3249 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, baseval), REG_ITMP1);
3250 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP3);
3251 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, diffval), REG_ITMP2);
3252 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3253 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3255 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP1);
3256 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3257 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
3258 x86_64_setcc_reg(cd, X86_64_CC_BE, d);
3262 panic("internal error: no inlined array instanceof");
3264 store_reg_to_var_int(iptr->dst, d);
3267 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3269 /* op1: 0 == array, 1 == class */
3270 /* val.a: (classinfo*) superclass */
3272 /* superclass is an interface:
3274 * OK if ((sub == NULL) ||
3275 * (sub->vftbl->interfacetablelength > super->index) &&
3276 * (sub->vftbl->interfacetable[-super->index] != NULL));
3278 * superclass is a class:
3280 * OK if ((sub == NULL) || (0
3281 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3282 * super->vftbl->diffvall));
3286 classinfo *super = (classinfo*) iptr->val.a;
3288 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3289 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3291 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3292 var_to_reg_int(s1, src, d);
3293 if (iptr->op1) { /* class/interface */
3294 if (super->flags & ACC_INTERFACE) { /* interface */
3295 x86_64_test_reg_reg(cd, s1, s1);
3297 /* TODO: clean up this calculation */
3298 a = 3; /* mov_membase_reg */
3299 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3301 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3302 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3305 CALCIMMEDIATEBYTES(a, super->index);
3310 a += 3; /* mov_membase_reg */
3311 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3316 x86_64_jcc(cd, X86_64_CC_E, a);
3318 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3319 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength), REG_ITMP2);
3320 x86_64_alu_imm_reg(cd, X86_64_SUB, super->index, REG_ITMP2);
3321 x86_64_test_reg_reg(cd, REG_ITMP2, REG_ITMP2);
3322 x86_64_jcc(cd, X86_64_CC_LE, 0);
3323 codegen_addxcastrefs(cd, cd->mcodeptr);
3324 x86_64_mov_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP2);
3325 x86_64_test_reg_reg(cd, REG_ITMP2, REG_ITMP2);
3326 x86_64_jcc(cd, X86_64_CC_E, 0);
3327 codegen_addxcastrefs(cd, cd->mcodeptr);
3329 } else { /* class */
3330 x86_64_test_reg_reg(cd, s1, s1);
3332 /* TODO: clean up this calculation */
3333 a = 3; /* mov_membase_reg */
3334 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3335 a += 10; /* mov_imm_reg */
3336 a += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3337 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, baseval));
3339 if (d != REG_ITMP3) {
3340 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3341 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3342 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3343 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, diffval));
3347 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3348 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3350 a += 10; /* mov_imm_reg */
3351 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3352 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, diffval));
3358 x86_64_jcc(cd, X86_64_CC_E, a);
3360 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3361 x86_64_mov_imm_reg(cd, (s8) super->vftbl, REG_ITMP2);
3362 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3363 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3365 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, baseval), REG_ITMP1);
3366 if (d != REG_ITMP3) {
3367 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP3);
3368 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, diffval), REG_ITMP2);
3369 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3370 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3372 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP1);
3375 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP2);
3376 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
3377 x86_64_mov_imm_reg(cd, (s8) super->vftbl, REG_ITMP2);
3378 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, diffval), REG_ITMP2);
3379 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3380 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3383 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
3384 x86_64_jcc(cd, X86_64_CC_A, 0); /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3385 codegen_addxcastrefs(cd, cd->mcodeptr);
3389 panic("internal error: no inlined array checkcast");
3392 store_reg_to_var_int(iptr->dst, d);
3395 case ICMD_CHECKASIZE: /* ..., size ==> ..., size */
3397 if (src->flags & INMEMORY) {
3398 x86_64_alul_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
3401 x86_64_testl_reg_reg(cd, src->regoff, src->regoff);
3403 x86_64_jcc(cd, X86_64_CC_L, 0);
3404 codegen_addxcheckarefs(cd, cd->mcodeptr);
3407 case ICMD_CHECKEXCEPTION: /* ... ==> ... */
3409 x86_64_test_reg_reg(cd, REG_RESULT, REG_RESULT);
3410 x86_64_jcc(cd, X86_64_CC_E, 0);
3411 codegen_addxexceptionrefs(cd, cd->mcodeptr);
3414 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3415 /* op1 = dimension, val.a = array descriptor */
3417 /* check for negative sizes and copy sizes to stack if necessary */
3419 MCODECHECK((iptr->op1 << 1) + 64);
3421 for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3422 var_to_reg_int(s2, src, REG_ITMP1);
3423 x86_64_testl_reg_reg(cd, s2, s2);
3424 x86_64_jcc(cd, X86_64_CC_L, 0);
3425 codegen_addxcheckarefs(cd, cd->mcodeptr);
3427 /* copy sizes to stack (argument numbers >= INT_ARG_CNT) */
3429 if (src->varkind != ARGVAR) {
3430 x86_64_mov_reg_membase(cd, s2, REG_SP, (s1 + INT_ARG_CNT) * 8);
3434 /* a0 = dimension count */
3435 x86_64_mov_imm_reg(cd, iptr->op1, rd->argintregs[0]);
3437 /* a1 = arraydescriptor */
3438 x86_64_mov_imm_reg(cd, (s8) iptr->val.a, rd->argintregs[1]);
3440 /* a2 = pointer to dimensions = stack pointer */
3441 x86_64_mov_reg_reg(cd, REG_SP, rd->argintregs[2]);
3443 x86_64_mov_imm_reg(cd, (s8) builtin_nmultianewarray, REG_ITMP1);
3444 x86_64_call_reg(cd, REG_ITMP1);
3446 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3447 M_INTMOVE(REG_RESULT, s1);
3448 store_reg_to_var_int(iptr->dst, s1);
3452 throw_cacao_exception_exit(string_java_lang_InternalError,
3453 "Unknown ICMD %d", iptr->opc);
3456 } /* for instruction */
3458 /* copy values to interface registers */
3460 src = bptr->outstack;
3461 len = bptr->outdepth;
3462 MCODECHECK(64 + len);
3465 if ((src->varkind != STACKVAR)) {
3467 if (IS_FLT_DBL_TYPE(s2)) {
3468 var_to_reg_flt(s1, src, REG_FTMP1);
3469 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3470 M_FLTMOVE(s1, rd->interfaces[len][s2].regoff);
3473 x86_64_movq_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3477 var_to_reg_int(s1, src, REG_ITMP1);
3478 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3479 M_INTMOVE(s1, rd->interfaces[len][s2].regoff);
3482 x86_64_mov_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3488 } /* if (bptr -> flags >= BBREACHED) */
3489 } /* for basic block */
3493 /* generate bound check stubs */
3495 u1 *xcodeptr = NULL;
3498 for (bref = cd->xboundrefs; bref != NULL; bref = bref->next) {
3499 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3501 cd->mcodeptr - cd->mcodebase);
3505 /* move index register into REG_ITMP1 */
3506 x86_64_mov_reg_reg(cd, bref->reg, REG_ITMP1); /* 3 bytes */
3508 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3509 dseg_adddata(cd, cd->mcodeptr);
3510 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3511 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3513 if (xcodeptr != NULL) {
3514 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3517 xcodeptr = cd->mcodeptr;
3519 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3520 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3522 x86_64_mov_reg_reg(cd, REG_ITMP1, rd->argintregs[0]);
3523 x86_64_mov_imm_reg(cd, (s8) new_arrayindexoutofboundsexception, REG_ITMP3);
3524 x86_64_call_reg(cd, REG_ITMP3);
3526 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3527 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3529 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
3530 x86_64_jmp_reg(cd, REG_ITMP3);
3534 /* generate negative array size check stubs */
3538 for (bref = cd->xcheckarefs; bref != NULL; bref = bref->next) {
3539 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3540 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3542 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3546 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3548 cd->mcodeptr - cd->mcodebase);
3552 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3553 dseg_adddata(cd, cd->mcodeptr);
3554 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3555 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3557 if (xcodeptr != NULL) {
3558 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3561 xcodeptr = cd->mcodeptr;
3563 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3564 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3566 x86_64_mov_imm_reg(cd, (s8) new_negativearraysizeexception, REG_ITMP3);
3567 x86_64_call_reg(cd, REG_ITMP3);
3569 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3570 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3572 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
3573 x86_64_jmp_reg(cd, REG_ITMP3);
3577 /* generate cast check stubs */
3581 for (bref = cd->xcastrefs; bref != NULL; bref = bref->next) {
3582 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3583 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3585 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3589 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3591 cd->mcodeptr - cd->mcodebase);
3595 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3596 dseg_adddata(cd, cd->mcodeptr);
3597 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3598 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3600 if (xcodeptr != NULL) {
3601 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3604 xcodeptr = cd->mcodeptr;
3606 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3607 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3609 x86_64_mov_imm_reg(cd, (s8) new_classcastexception, REG_ITMP3);
3610 x86_64_call_reg(cd, REG_ITMP3);
3612 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3613 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3615 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
3616 x86_64_jmp_reg(cd, REG_ITMP3);
3620 /* generate divide by zero check stubs */
3624 for (bref = cd->xdivrefs; bref != NULL; bref = bref->next) {
3625 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3626 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3628 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3632 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3634 cd->mcodeptr - cd->mcodebase);
3638 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3639 dseg_adddata(cd, cd->mcodeptr);
3640 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3641 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3643 if (xcodeptr != NULL) {
3644 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3647 xcodeptr = cd->mcodeptr;
3649 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3650 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3652 x86_64_mov_imm_reg(cd, (u8) new_arithmeticexception, REG_ITMP3);
3653 x86_64_call_reg(cd, REG_ITMP3);
3655 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3656 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3658 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3659 x86_64_jmp_reg(cd, REG_ITMP3);
3663 /* generate exception check stubs */
3667 for (bref = cd->xexceptionrefs; bref != NULL; bref = bref->next) {
3668 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3669 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3671 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3675 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3677 cd->mcodeptr - cd->mcodebase);
3681 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3682 dseg_adddata(cd, cd->mcodeptr);
3683 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
3684 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3686 if (xcodeptr != NULL) {
3687 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3690 xcodeptr = cd->mcodeptr;
3692 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3693 x86_64_alu_imm_reg(cd, X86_64_SUB, 8, REG_SP);
3694 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0);
3695 x86_64_mov_imm_reg(cd, (u8) &builtin_get_exceptionptrptr, REG_ITMP1);
3696 x86_64_call_reg(cd, REG_ITMP1);
3697 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
3698 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
3699 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
3700 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC);
3701 x86_64_alu_imm_reg(cd, X86_64_ADD, 8, REG_SP);
3703 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
3704 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP1_XPTR);
3705 x86_64_mov_imm_membase(cd, 0, REG_ITMP3, 0);
3708 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3709 x86_64_jmp_reg(cd, REG_ITMP3);
3713 /* generate null pointer check stubs */
3717 for (bref = cd->xnullrefs; bref != NULL; bref = bref->next) {
3718 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3719 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3721 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3725 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3727 cd->mcodeptr - cd->mcodebase);
3731 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3732 dseg_adddata(cd, cd->mcodeptr);
3733 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
3734 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3736 if (xcodeptr != NULL) {
3737 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3740 xcodeptr = cd->mcodeptr;
3742 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3743 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3745 x86_64_mov_imm_reg(cd, (s8) new_nullpointerexception, REG_ITMP3);
3746 x86_64_call_reg(cd, REG_ITMP3);
3748 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3749 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3751 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
3752 x86_64_jmp_reg(cd, REG_ITMP3);
3756 /* generate put/getstatic stub call code */
3764 tmpcd = DNEW(codegendata);
3766 for (cref = cd->clinitrefs; cref != NULL; cref = cref->next) {
3767 /* Get machine code which is patched back in later. A */
3768 /* `call rel32' is 5 bytes long. */
3769 xcodeptr = cd->mcodebase + cref->branchpos;
3771 mcode = *((u4 *) (xcodeptr + 1));
3775 /* patch in `call rel32' to call the following code */
3776 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
3777 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
3779 /* Save current stack pointer into a temporary register. */
3780 x86_64_mov_reg_reg(cd, REG_SP, REG_ITMP1);
3782 /* Push machine code bytes to patch onto the stack. */
3783 x86_64_push_imm(cd, (u1) xmcode);
3784 x86_64_push_imm(cd, (u4) mcode);
3786 x86_64_push_imm(cd, (u8) cref->class);
3788 /* Push previously saved stack pointer onto stack. */
3789 x86_64_push_reg(cd, REG_ITMP1);
3791 x86_64_mov_imm_reg(cd, (u8) asm_check_clinit, REG_ITMP1);
3792 x86_64_jmp_reg(cd, REG_ITMP1);
3797 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
3801 /* function createcompilerstub *************************************************
3803 creates a stub routine which calls the compiler
3805 *******************************************************************************/
3807 #define COMPSTUBSIZE 23
3809 u1 *createcompilerstub(methodinfo *m)
3811 u1 *s = CNEW(u1, COMPSTUBSIZE); /* memory to hold the stub */
3815 /* mark start of dump memory area */
3817 dumpsize = dump_size();
3819 cd = DNEW(codegendata);
3822 /* code for the stub */
3823 x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP1); /* pass method to compiler */
3824 x86_64_mov_imm_reg(cd, (u8) asm_call_jit_compiler, REG_ITMP3);/* load address */
3825 x86_64_jmp_reg(cd, REG_ITMP3); /* jump to compiler */
3827 #if defined(STATISTICS)
3829 count_cstub_len += COMPSTUBSIZE;
3832 /* release dump area */
3834 dump_release(dumpsize);
3840 /* function removecompilerstub *************************************************
3842 deletes a compilerstub from memory (simply by freeing it)
3844 *******************************************************************************/
3846 void removecompilerstub(u1 *stub)
3848 CFREE(stub, COMPSTUBSIZE);
3852 /* function: createnativestub **************************************************
3854 creates a stub routine which calls a native method
3856 *******************************************************************************/
3858 /* #if defined(USE_THREADS) && defined(NATIVE_THREADS) */
3859 /* static java_objectheader **(*callgetexceptionptrptr)() = builtin_get_exceptionptrptr; */
3862 #define NATIVESTUBSIZE 700 /* keep this size high enough! */
3864 u1 *createnativestub(functionptr f, methodinfo *m)
3866 u1 *s; /* pointer to stub memory */
3869 t_inlining_globals *id;
3871 s4 stackframesize; /* size of stackframe if needed */
3873 s4 iargs; /* count of integer arguments */
3874 s4 fargs; /* count of float arguments */
3877 void **callAddrPatchPos=0;
3879 void **jmpInstrPatchPos=0;
3881 /* initialize variables */
3886 /* mark start of dump memory area */
3888 dumpsize = dump_size();
3890 cd = DNEW(codegendata);
3891 rd = DNEW(registerdata);
3892 id = DNEW(t_inlining_globals);
3894 /* setup registers before using it */
3896 inlining_setup(m, id);
3897 reg_setup(m, rd, id);
3899 /* set paramcount and paramtypes */
3901 descriptor2types(m);
3903 /* count integer and float arguments */
3905 tptr = m->paramtypes;
3906 for (i = 0; i < m->paramcount; i++) {
3907 IS_INT_LNG_TYPE(*tptr++) ? iargs++ : fargs++;
3910 s = CNEW(u1, NATIVESTUBSIZE); /* memory to hold the stub */
3912 /* set some required varibles which are normally set by codegen_setup */
3915 cd->clinitrefs = NULL;
3917 /* if function is static, check for initialized */
3919 if ((m->flags & ACC_STATIC) && !m->class->initialized) {
3920 codegen_addclinitref(cd, cd->mcodeptr, m->class);
3926 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
3928 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, 1 * 8);
3929 x86_64_mov_reg_membase(cd, rd->argintregs[1], REG_SP, 2 * 8);
3930 x86_64_mov_reg_membase(cd, rd->argintregs[2], REG_SP, 3 * 8);
3931 x86_64_mov_reg_membase(cd, rd->argintregs[3], REG_SP, 4 * 8);
3932 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 5 * 8);
3933 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 6 * 8);
3935 x86_64_movq_reg_membase(cd, rd->argfltregs[0], REG_SP, 7 * 8);
3936 x86_64_movq_reg_membase(cd, rd->argfltregs[1], REG_SP, 8 * 8);
3937 x86_64_movq_reg_membase(cd, rd->argfltregs[2], REG_SP, 9 * 8);
3938 x86_64_movq_reg_membase(cd, rd->argfltregs[3], REG_SP, 10 * 8);
3939 x86_64_movq_reg_membase(cd, rd->argfltregs[4], REG_SP, 11 * 8);
3940 x86_64_movq_reg_membase(cd, rd->argfltregs[5], REG_SP, 12 * 8);
3941 x86_64_movq_reg_membase(cd, rd->argfltregs[6], REG_SP, 13 * 8);
3942 x86_64_movq_reg_membase(cd, rd->argfltregs[7], REG_SP, 14 * 8);
3944 /* show integer hex code for float arguments */
3946 for (i = 0, l = 0; i < m->paramcount; i++) {
3947 if (IS_FLT_DBL_TYPE(m->paramtypes[i])) {
3948 for (s1 = (m->paramcount > INT_ARG_CNT) ? INT_ARG_CNT - 2 : m->paramcount - 2; s1 >= i; s1--) {
3949 x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
3952 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[i]);
3957 x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP1);
3958 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, 0 * 8);
3959 x86_64_mov_imm_reg(cd, (u8) builtin_trace_args, REG_ITMP1);
3960 x86_64_call_reg(cd, REG_ITMP1);
3962 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, rd->argintregs[0]);
3963 x86_64_mov_membase_reg(cd, REG_SP, 2 * 8, rd->argintregs[1]);
3964 x86_64_mov_membase_reg(cd, REG_SP, 3 * 8, rd->argintregs[2]);
3965 x86_64_mov_membase_reg(cd, REG_SP, 4 * 8, rd->argintregs[3]);
3966 x86_64_mov_membase_reg(cd, REG_SP, 5 * 8, rd->argintregs[4]);
3967 x86_64_mov_membase_reg(cd, REG_SP, 6 * 8, rd->argintregs[5]);
3969 x86_64_movq_membase_reg(cd, REG_SP, 7 * 8, rd->argfltregs[0]);
3970 x86_64_movq_membase_reg(cd, REG_SP, 8 * 8, rd->argfltregs[1]);
3971 x86_64_movq_membase_reg(cd, REG_SP, 9 * 8, rd->argfltregs[2]);
3972 x86_64_movq_membase_reg(cd, REG_SP, 10 * 8, rd->argfltregs[3]);
3973 x86_64_movq_membase_reg(cd, REG_SP, 11 * 8, rd->argfltregs[4]);
3974 x86_64_movq_membase_reg(cd, REG_SP, 12 * 8, rd->argfltregs[5]);
3975 x86_64_movq_membase_reg(cd, REG_SP, 13 * 8, rd->argfltregs[6]);
3976 x86_64_movq_membase_reg(cd, REG_SP, 14 * 8, rd->argfltregs[7]);
3978 x86_64_alu_imm_reg(cd, X86_64_ADD, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
3981 #if !defined(STATIC_CLASSPATH)
3982 /* call method to resolve native function if needed */
3984 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
3986 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, 1 * 8);
3987 x86_64_mov_reg_membase(cd, rd->argintregs[1], REG_SP, 2 * 8);
3988 x86_64_mov_reg_membase(cd, rd->argintregs[2], REG_SP, 3 * 8);
3989 x86_64_mov_reg_membase(cd, rd->argintregs[3], REG_SP, 4 * 8);
3990 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 5 * 8);
3991 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 6 * 8);
3993 x86_64_movq_reg_membase(cd, rd->argfltregs[0], REG_SP, 7 * 8);
3994 x86_64_movq_reg_membase(cd, rd->argfltregs[1], REG_SP, 8 * 8);
3995 x86_64_movq_reg_membase(cd, rd->argfltregs[2], REG_SP, 9 * 8);
3996 x86_64_movq_reg_membase(cd, rd->argfltregs[3], REG_SP, 10 * 8);
3997 x86_64_movq_reg_membase(cd, rd->argfltregs[4], REG_SP, 11 * 8);
3998 x86_64_movq_reg_membase(cd, rd->argfltregs[5], REG_SP, 12 * 8);
3999 x86_64_movq_reg_membase(cd, rd->argfltregs[6], REG_SP, 13 * 8);
4000 x86_64_movq_reg_membase(cd, rd->argfltregs[7], REG_SP, 14 * 8);
4002 /* needed to patch a jump over this block */
4003 x86_64_jmp_imm(cd, 0);
4004 jmpInstrPos = cd->mcodeptr - 4;
4006 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
4008 x86_64_mov_imm_reg(cd, 0, rd->argintregs[1]);
4009 callAddrPatchPos = cd->mcodeptr - 8; /* at this position the place is specified where the native function adress should be patched into*/
4011 x86_64_mov_imm_reg(cd, 0, rd->argintregs[2]);
4012 jmpInstrPatchPos = cd->mcodeptr - 8;
4014 x86_64_mov_imm_reg(cd, jmpInstrPos, rd->argintregs[3]);
4016 x86_64_mov_imm_reg(cd, (u8) codegen_resolve_native, REG_ITMP1);
4017 x86_64_call_reg(cd, REG_ITMP1);
4019 *(jmpInstrPatchPos) = cd->mcodeptr - jmpInstrPos - 1; /*=opcode jmp_imm size*/
4021 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, rd->argintregs[0]);
4022 x86_64_mov_membase_reg(cd, REG_SP, 2 * 8, rd->argintregs[1]);
4023 x86_64_mov_membase_reg(cd, REG_SP, 3 * 8, rd->argintregs[2]);
4024 x86_64_mov_membase_reg(cd, REG_SP, 4 * 8, rd->argintregs[3]);
4025 x86_64_mov_membase_reg(cd, REG_SP, 5 * 8, rd->argintregs[4]);
4026 x86_64_mov_membase_reg(cd, REG_SP, 6 * 8, rd->argintregs[5]);
4028 x86_64_movq_membase_reg(cd, REG_SP, 7 * 8, rd->argfltregs[0]);
4029 x86_64_movq_membase_reg(cd, REG_SP, 8 * 8, rd->argfltregs[1]);
4030 x86_64_movq_membase_reg(cd, REG_SP, 9 * 8, rd->argfltregs[2]);
4031 x86_64_movq_membase_reg(cd, REG_SP, 10 * 8, rd->argfltregs[3]);
4032 x86_64_movq_membase_reg(cd, REG_SP, 11 * 8, rd->argfltregs[4]);
4033 x86_64_movq_membase_reg(cd, REG_SP, 12 * 8, rd->argfltregs[5]);
4034 x86_64_movq_membase_reg(cd, REG_SP, 13 * 8, rd->argfltregs[6]);
4035 x86_64_movq_membase_reg(cd, REG_SP, 14 * 8, rd->argfltregs[7]);
4037 x86_64_alu_imm_reg(cd, X86_64_ADD, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4041 /* save argument registers on stack -- if we have to */
4043 if ((((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) || iargs > (INT_ARG_CNT - 1)) ||
4044 (fargs > FLT_ARG_CNT)) {
4051 /* do we need to shift integer argument register onto stack? */
4053 if ((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) {
4054 /* do we need to shift 2 arguments? */
4055 if (iargs > (INT_ARG_CNT - 1)) {
4062 } else if (iargs > (INT_ARG_CNT - 1)) {
4066 /* calculate required stack space */
4068 stackparamcnt += (iargs > INT_ARG_CNT) ? iargs - INT_ARG_CNT : 0;
4069 stackparamcnt += (fargs > FLT_ARG_CNT) ? fargs - FLT_ARG_CNT : 0;
4071 stackframesize = stackparamcnt + paramshiftcnt;
4073 /* keep stack 16-byte aligned */
4074 if (!(stackframesize & 0x1))
4077 x86_64_alu_imm_reg(cd, X86_64_SUB, stackframesize * 8, REG_SP);
4079 /* shift integer arguments if required */
4081 if ((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) {
4082 /* do we need to shift 2 arguments? */
4083 if (iargs > (INT_ARG_CNT - 1))
4084 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 1 * 8);
4086 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 0 * 8);
4088 } else if (iargs > (INT_ARG_CNT - 1)) {
4089 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 0 * 8);
4092 /* copy stack arguments into new stack frame -- if any */
4093 for (i = 0; i < stackparamcnt; i++) {
4094 x86_64_mov_membase_reg(cd, REG_SP, (stackframesize + 1 + i) * 8, REG_ITMP1);
4095 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, (paramshiftcnt + i) * 8);
4099 /* keep stack 16-byte aligned */
4100 x86_64_alu_imm_reg(cd, X86_64_SUB, 1 * 8, REG_SP);
4104 /* shift integer arguments for `env' and `class' arguments */
4106 if (m->flags & ACC_STATIC) {
4107 /* shift iargs count, if less than INT_ARG_CNT, or all */
4108 for (i = (iargs < (INT_ARG_CNT - 2)) ? iargs : (INT_ARG_CNT - 2); i >= 0; i--) {
4109 x86_64_mov_reg_reg(cd, rd->argintregs[i], rd->argintregs[i + 2]);
4112 /* put class into second argument register */
4113 x86_64_mov_imm_reg(cd, (u8) m->class, rd->argintregs[1]);
4116 /* shift iargs count, if less than INT_ARG_CNT, or all */
4117 for (i = (iargs < (INT_ARG_CNT - 1)) ? iargs : (INT_ARG_CNT - 1); i >= 0; i--) {
4118 x86_64_mov_reg_reg(cd, rd->argintregs[i], rd->argintregs[i + 1]);
4122 /* put env into first argument register */
4123 x86_64_mov_imm_reg(cd, (u8) &env, rd->argintregs[0]);
4125 /* do the native function call */
4126 x86_64_mov_imm_reg(cd, (u8) f, REG_ITMP1);
4127 #if !defined(STATIC_CLASSPATH)
4129 (*callAddrPatchPos) = cd->mcodeptr - 8;
4131 x86_64_call_reg(cd, REG_ITMP1);
4133 /* remove stackframe if there is one */
4134 if (stackframesize) {
4135 x86_64_alu_imm_reg(cd, X86_64_ADD, stackframesize * 8, REG_SP);
4139 x86_64_alu_imm_reg(cd, X86_64_SUB, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
4141 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
4142 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
4144 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
4145 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
4146 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
4147 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
4149 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
4150 x86_64_call_reg(cd, REG_ITMP1);
4152 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
4153 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
4155 x86_64_alu_imm_reg(cd, X86_64_ADD, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
4158 /* check for exception */
4160 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4161 x86_64_push_reg(cd, REG_RESULT);
4162 /* x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
4163 x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
4164 x86_64_call_reg(cd, REG_ITMP3);
4165 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
4166 x86_64_pop_reg(cd, REG_RESULT);
4168 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
4169 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP3);
4171 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
4172 x86_64_jcc(cd, X86_64_CC_NE, 1);
4176 /* handle exception */
4178 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4179 x86_64_push_reg(cd, REG_ITMP3);
4180 /* x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
4181 x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
4182 x86_64_call_reg(cd, REG_ITMP3);
4183 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
4184 x86_64_pop_reg(cd, REG_ITMP1_XPTR);
4186 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
4187 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
4188 x86_64_alu_reg_reg(cd, X86_64_XOR, REG_ITMP2, REG_ITMP2);
4189 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_ITMP3, 0); /* clear exception pointer */
4192 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC); /* get return address from stack */
4193 x86_64_alu_imm_reg(cd, X86_64_SUB, 3, REG_ITMP2_XPC); /* callq */
4195 x86_64_mov_imm_reg(cd, (u8) asm_handle_nat_exception, REG_ITMP3);
4196 x86_64_jmp_reg(cd, REG_ITMP3);
4199 /* patch in a clinit call if required *************************************/
4208 tmpcd = DNEW(codegendata);
4210 /* there can only be one clinit ref entry */
4211 cref = cd->clinitrefs;
4214 /* Get machine code which is patched back in later. A */
4215 /* `call rel32' is 5 bytes long. */
4216 xcodeptr = cd->mcodebase + cref->branchpos;
4218 mcode = *((u4 *) (xcodeptr + 1));
4220 /* patch in `call rel32' to call the following code */
4221 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
4222 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4224 /* Save current stack pointer into a temporary register. */
4225 x86_64_mov_reg_reg(cd, REG_SP, REG_ITMP1);
4227 /* Push machine code bytes to patch onto the stack. */
4228 x86_64_push_imm(cd, (u1) xmcode);
4229 x86_64_push_imm(cd, (u4) mcode);
4231 x86_64_push_imm(cd, (u8) cref->class);
4233 /* Push previously saved stack pointer onto stack. */
4234 x86_64_push_reg(cd, REG_ITMP1);
4236 x86_64_mov_imm_reg(cd, (u8) asm_check_clinit, REG_ITMP1);
4237 x86_64_jmp_reg(cd, REG_ITMP1);
4241 /* Check if the stub size is big enough to hold the whole stub generated. */
4242 /* If not, this can lead into unpredictable crashes, because of heap */
4244 if ((s4) (cd->mcodeptr - s) > NATIVESTUBSIZE) {
4245 throw_cacao_exception_exit(string_java_lang_InternalError,
4246 "Native stub size %d is to small for current stub size %d",
4247 NATIVESTUBSIZE, (s4) (cd->mcodeptr - s));
4250 #if defined(STATISTICS)
4252 count_nstub_len += NATIVESTUBSIZE;
4255 /* release dump area */
4257 dump_release(dumpsize);
4263 /* function: removenativestub **************************************************
4265 removes a previously created native-stub from memory
4267 *******************************************************************************/
4269 void removenativestub(u1 *stub)
4271 CFREE(stub, NATIVESTUBSIZE);
4276 * These are local overrides for various environment variables in Emacs.
4277 * Please do not remove this and leave it at the end of the file, where
4278 * Emacs will automagically detect them.
4279 * ---------------------------------------------------------------------
4282 * indent-tabs-mode: t