1 /* vm/jit/x86_64/codegen.c - machine code generator for x86_64
3 Copyright (C) 1996-2005 R. Grafl, A. Krall, C. Kruegel, C. Oates,
4 R. Obermaisser, M. Platter, M. Probst, S. Ring, E. Steiner,
5 C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich, J. Wenninger,
6 Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
25 Contact: cacao@complang.tuwien.ac.at
27 Authors: Andreas Krall
30 $Id: codegen.c 2179 2005-04-01 13:28:16Z twisti $
40 #include "cacao/cacao.h"
41 #include "native/native.h"
42 #include "vm/global.h"
43 #include "vm/builtin.h"
44 #include "vm/loader.h"
45 #include "vm/tables.h"
46 #include "vm/jit/asmpart.h"
47 #include "vm/jit/jit.h"
48 #include "vm/jit/reg.h"
49 #include "vm/jit/parse.h"
50 #include "vm/jit/x86_64/arch.h"
51 #include "vm/jit/x86_64/codegen.h"
52 #include "vm/jit/x86_64/emitfuncs.h"
53 #include "vm/jit/x86_64/types.h"
54 #include "vm/jit/x86_64/asmoffsets.h"
57 /* register descripton - array ************************************************/
59 /* #define REG_RES 0 reserved register for OS or code generator */
60 /* #define REG_RET 1 return value register */
61 /* #define REG_EXC 2 exception value register (only old jit) */
62 /* #define REG_SAV 3 (callee) saved register */
63 /* #define REG_TMP 4 scratch temporary register (caller saved) */
64 /* #define REG_ARG 5 argument register (caller saved) */
66 /* #define REG_END -1 last entry in tables */
68 static int nregdescint[] = {
69 REG_RET, REG_ARG, REG_ARG, REG_TMP, REG_RES, REG_SAV, REG_ARG, REG_ARG,
70 REG_ARG, REG_ARG, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV,
75 static int nregdescfloat[] = {
76 REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG,
77 REG_RES, REG_RES, REG_RES, REG_TMP, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
82 /* Include independent code generation stuff -- include after register */
83 /* descriptions to avoid extern definitions. */
85 #include "vm/jit/codegen.inc"
86 #include "vm/jit/reg.inc"
88 #include "vm/jit/lsra.inc"
92 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
93 void thread_restartcriticalsection(ucontext_t *uc)
97 critical = thread_checkcritical((void *) uc->uc_mcontext.gregs[REG_RIP]);
100 uc->uc_mcontext.gregs[REG_RIP] = (u8) critical;
105 /* NullPointerException signal handler for hardware null pointer check */
107 void catch_NullPointerException(int sig, siginfo_t *siginfo, void *_p)
111 struct ucontext *_uc = (struct ucontext *) _p;
112 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
113 struct sigaction act;
114 java_objectheader *xptr;
116 /* Reset signal handler - necessary for SysV, does no harm for BSD */
118 act.sa_sigaction = catch_NullPointerException; /* reinstall handler */
119 act.sa_flags = SA_SIGINFO;
120 sigaction(sig, &act, NULL);
123 sigaddset(&nsig, sig);
124 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
126 xptr = new_nullpointerexception();
128 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
129 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
130 sigctx->rip = (u8) asm_handle_exception;
136 /* ArithmeticException signal handler for hardware divide by zero check */
138 void catch_ArithmeticException(int sig, siginfo_t *siginfo, void *_p)
142 struct ucontext *_uc = (struct ucontext *) _p;
143 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
144 struct sigaction act;
145 java_objectheader *xptr;
147 /* Reset signal handler - necessary for SysV, does no harm for BSD */
149 act.sa_sigaction = catch_ArithmeticException; /* reinstall handler */
150 act.sa_flags = SA_SIGINFO;
151 sigaction(sig, &act, NULL);
154 sigaddset(&nsig, sig);
155 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
157 xptr = new_arithmeticexception();
159 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
160 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
161 sigctx->rip = (u8) asm_handle_exception;
167 void init_exceptions(void)
169 struct sigaction act;
171 /* install signal handlers we need to convert to exceptions */
172 sigemptyset(&act.sa_mask);
176 act.sa_sigaction = catch_NullPointerException;
177 act.sa_flags = SA_SIGINFO;
178 sigaction(SIGSEGV, &act, NULL);
182 act.sa_sigaction = catch_NullPointerException;
183 act.sa_flags = SA_SIGINFO;
184 sigaction(SIGBUS, &act, NULL);
188 act.sa_sigaction = catch_ArithmeticException;
189 act.sa_flags = SA_SIGINFO;
190 sigaction(SIGFPE, &act, NULL);
194 /* function gen_mcode **********************************************************
196 generates machine code
198 *******************************************************************************/
200 void codegen(methodinfo *m, codegendata *cd, registerdata *rd)
202 s4 len, s1, s2, s3, d;
217 /* space to save used callee saved registers */
219 savedregs_num += (rd->savintregcnt - rd->maxsavintreguse);
220 savedregs_num += (rd->savfltregcnt - rd->maxsavfltreguse);
222 parentargs_base = rd->maxmemuse + savedregs_num;
224 #if defined(USE_THREADS) /* space to save argument of monitor_enter */
226 if (checksync && (m->flags & ACC_SYNCHRONIZED))
231 /* Keep stack of non-leaf functions 16-byte aligned for calls into native */
232 /* code e.g. libc or jni (alignment problems with movaps). */
234 if (!m->isleafmethod || runverbose) {
235 parentargs_base |= 0x1;
238 /* create method header */
240 (void) dseg_addaddress(cd, m); /* MethodPointer */
241 (void) dseg_adds4(cd, parentargs_base * 8); /* FrameSize */
243 #if defined(USE_THREADS)
245 /* IsSync contains the offset relative to the stack pointer for the
246 argument of monitor_exit used in the exception handler. Since the
247 offset could be zero and give a wrong meaning of the flag it is
251 if (checksync && (m->flags & ACC_SYNCHRONIZED))
252 (void) dseg_adds4(cd, (rd->maxmemuse + 1) * 8); /* IsSync */
257 (void) dseg_adds4(cd, 0); /* IsSync */
259 (void) dseg_adds4(cd, m->isleafmethod); /* IsLeaf */
260 (void) dseg_adds4(cd, rd->savintregcnt - rd->maxsavintreguse);/* IntSave */
261 (void) dseg_adds4(cd, rd->savfltregcnt - rd->maxsavfltreguse);/* FltSave */
262 (void) dseg_adds4(cd, cd->exceptiontablelength); /* ExTableSize */
264 /* create exception table */
266 for (ex = cd->exceptiontable; ex != NULL; ex = ex->down) {
267 dseg_addtarget(cd, ex->start);
268 dseg_addtarget(cd, ex->end);
269 dseg_addtarget(cd, ex->handler);
270 (void) dseg_addaddress(cd, ex->catchtype);
273 /* initialize mcode variables */
275 cd->mcodeptr = (u1 *) cd->mcodebase;
276 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
277 MCODECHECK(128 + m->paramcount);
279 /* create stack frame (if necessary) */
281 if (parentargs_base) {
282 x86_64_alu_imm_reg(cd, X86_64_SUB, parentargs_base * 8, REG_SP);
285 /* save used callee saved registers */
288 for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
289 p--; x86_64_mov_reg_membase(cd, rd->savintregs[i], REG_SP, p * 8);
291 for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
292 p--; x86_64_movq_reg_membase(cd, rd->savfltregs[i], REG_SP, p * 8);
295 /* take arguments out of register or stack frame */
297 for (p = 0, l = 0, s1 = 0, s2 = 0; p < m->paramcount; p++) {
298 t = m->paramtypes[p];
299 var = &(rd->locals[l][t]);
301 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
304 if (IS_INT_LNG_TYPE(t)) {
311 if (IS_INT_LNG_TYPE(t)) { /* integer args */
312 if (s1 < INT_ARG_CNT) { /* register arguments */
313 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
314 M_INTMOVE(rd->argintregs[s1], var->regoff);
316 } else { /* reg arg -> spilled */
317 x86_64_mov_reg_membase(cd, rd->argintregs[s1], REG_SP, var->regoff * 8);
320 } else { /* stack arguments */
321 pa = s1 - INT_ARG_CNT;
322 if (s2 >= FLT_ARG_CNT) {
323 pa += s2 - FLT_ARG_CNT;
325 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
326 x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff); /* + 8 for return address */
327 } else { /* stack arg -> spilled */
328 x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_ITMP1); /* + 8 for return address */
329 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, var->regoff * 8);
334 } else { /* floating args */
335 if (s2 < FLT_ARG_CNT) { /* register arguments */
336 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
337 M_FLTMOVE(rd->argfltregs[s2], var->regoff);
339 } else { /* reg arg -> spilled */
340 x86_64_movq_reg_membase(cd, rd->argfltregs[s2], REG_SP, var->regoff * 8);
343 } else { /* stack arguments */
344 pa = s2 - FLT_ARG_CNT;
345 if (s1 >= INT_ARG_CNT) {
346 pa += s1 - INT_ARG_CNT;
348 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
349 x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff);
352 x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_FTMP1);
353 x86_64_movq_reg_membase(cd, REG_FTMP1, REG_SP, var->regoff * 8);
360 /* save monitorenter argument */
362 #if defined(USE_THREADS)
363 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
366 if (m->flags & ACC_STATIC) {
367 func_enter = (u8) builtin_staticmonitorenter;
368 x86_64_mov_imm_reg(cd, (s8) m->class, REG_ITMP1);
369 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, rd->maxmemuse * 8);
372 func_enter = (u8) builtin_monitorenter;
373 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, rd->maxmemuse * 8);
376 /* call monitorenter function */
378 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
379 x86_64_mov_imm_reg(cd, func_enter, REG_ITMP1);
380 x86_64_call_reg(cd, REG_ITMP1);
384 /* Copy argument registers to stack and call trace function with pointer */
385 /* to arguments on stack. */
388 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1 + 1) * 8, REG_SP);
390 /* save integer argument registers */
392 for (p = 0; p < INT_ARG_CNT; p++) {
393 x86_64_mov_reg_membase(cd, rd->argintregs[p], REG_SP, (1 + p) * 8);
396 /* save float argument registers */
398 for (p = 0; p < FLT_ARG_CNT; p++) {
399 x86_64_movq_reg_membase(cd, rd->argfltregs[p], REG_SP, (1 + INT_ARG_CNT + p) * 8);
402 /* show integer hex code for float arguments */
404 for (p = 0, l = 0; p < m->paramcount && p < INT_ARG_CNT; p++) {
405 t = m->paramtypes[p];
407 /* if the paramtype is a float, we have to right shift all */
408 /* following integer registers */
410 if (IS_FLT_DBL_TYPE(t)) {
411 for (s1 = INT_ARG_CNT - 2; s1 >= p; s1--) {
412 x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
415 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[p]);
420 x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP2);
421 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_SP, 0 * 8);
422 x86_64_mov_imm_reg(cd, (u8) builtin_trace_args, REG_ITMP1);
423 x86_64_call_reg(cd, REG_ITMP1);
425 /* restore integer argument registers */
427 for (p = 0; p < INT_ARG_CNT; p++) {
428 x86_64_mov_membase_reg(cd, REG_SP, (1 + p) * 8, rd->argintregs[p]);
431 /* restore float argument registers */
433 for (p = 0; p < FLT_ARG_CNT; p++) {
434 x86_64_movq_membase_reg(cd, REG_SP, (1 + INT_ARG_CNT + p) * 8, rd->argfltregs[p]);
437 x86_64_alu_imm_reg(cd, X86_64_ADD, (6 + 8 + 1 + 1) * 8, REG_SP);
442 /* end of header generation */
444 /* walk through all basic blocks */
445 for (bptr = m->basicblocks; bptr != NULL; bptr = bptr->next) {
447 bptr->mpc = (u4) ((u1 *) cd->mcodeptr - cd->mcodebase);
449 if (bptr->flags >= BBREACHED) {
451 /* branch resolving */
454 for (bref = bptr->branchrefs; bref != NULL; bref = bref->next) {
455 gen_resolvebranch((u1 *) cd->mcodebase + bref->branchpos,
460 /* copy interface registers to their destination */
464 MCODECHECK(64 + len);
468 while (src != NULL) {
470 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
471 if (bptr->type == BBTYPE_SBR) {
472 /* d = reg_of_var(rd, src, REG_ITMP1); */
473 if (!(src->flags & INMEMORY))
477 x86_64_pop_reg(cd, d);
478 store_reg_to_var_int(src, d);
480 } else if (bptr->type == BBTYPE_EXH) {
481 /* d = reg_of_var(rd, src, REG_ITMP1); */
482 if (!(src->flags & INMEMORY))
486 M_INTMOVE(REG_ITMP1, d);
487 store_reg_to_var_int(src, d);
496 while (src != NULL) {
498 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
499 if (bptr->type == BBTYPE_SBR) {
500 d = reg_of_var(rd, src, REG_ITMP1);
501 x86_64_pop_reg(cd, d);
502 store_reg_to_var_int(src, d);
504 } else if (bptr->type == BBTYPE_EXH) {
505 d = reg_of_var(rd, src, REG_ITMP1);
506 M_INTMOVE(REG_ITMP1, d);
507 store_reg_to_var_int(src, d);
511 d = reg_of_var(rd, src, REG_ITMP1);
512 if ((src->varkind != STACKVAR)) {
514 if (IS_FLT_DBL_TYPE(s2)) {
515 s1 = rd->interfaces[len][s2].regoff;
516 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
520 x86_64_movq_membase_reg(cd, REG_SP, s1 * 8, d);
522 store_reg_to_var_flt(src, d);
525 s1 = rd->interfaces[len][s2].regoff;
526 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
530 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, d);
532 store_reg_to_var_int(src, d);
541 /* walk through all instructions */
545 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
547 MCODECHECK(64); /* an instruction usually needs < 64 words */
549 case ICMD_INLINE_START: /* internal ICMDs */
550 case ICMD_INLINE_END:
553 case ICMD_NOP: /* ... ==> ... */
556 case ICMD_NULLCHECKPOP: /* ..., objectref ==> ... */
557 if (src->flags & INMEMORY) {
558 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
561 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
563 x86_64_jcc(cd, X86_64_CC_E, 0);
564 codegen_addxnullrefs(cd, cd->mcodeptr);
567 /* constant operations ************************************************/
569 case ICMD_ICONST: /* ... ==> ..., constant */
570 /* op1 = 0, val.i = constant */
572 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
573 if (iptr->val.i == 0) {
574 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
576 x86_64_movl_imm_reg(cd, iptr->val.i, d);
578 store_reg_to_var_int(iptr->dst, d);
581 case ICMD_ACONST: /* ... ==> ..., constant */
582 /* op1 = 0, val.a = constant */
584 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
585 if (iptr->val.a == 0) {
586 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
588 x86_64_mov_imm_reg(cd, (s8) iptr->val.a, d);
590 store_reg_to_var_int(iptr->dst, d);
593 case ICMD_LCONST: /* ... ==> ..., constant */
594 /* op1 = 0, val.l = constant */
596 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
597 if (iptr->val.l == 0) {
598 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
600 x86_64_mov_imm_reg(cd, iptr->val.l, d);
602 store_reg_to_var_int(iptr->dst, d);
605 case ICMD_FCONST: /* ... ==> ..., constant */
606 /* op1 = 0, val.f = constant */
608 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
609 a = dseg_addfloat(cd, iptr->val.f);
610 x86_64_movdl_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + ((d > 7) ? 9 : 8)) - (s8) cd->mcodebase) + a, d);
611 store_reg_to_var_flt(iptr->dst, d);
614 case ICMD_DCONST: /* ... ==> ..., constant */
615 /* op1 = 0, val.d = constant */
617 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
618 a = dseg_adddouble(cd, iptr->val.d);
619 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, d);
620 store_reg_to_var_flt(iptr->dst, d);
624 /* load/store operations **********************************************/
626 case ICMD_ILOAD: /* ... ==> ..., content of local variable */
627 /* op1 = local variable */
629 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
630 if ((iptr->dst->varkind == LOCALVAR) &&
631 (iptr->dst->varnum == iptr->op1)) {
634 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
635 if (var->flags & INMEMORY) {
636 x86_64_movl_membase_reg(cd, REG_SP, var->regoff * 8, d);
637 store_reg_to_var_int(iptr->dst, d);
640 if (iptr->dst->flags & INMEMORY) {
641 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
644 M_INTMOVE(var->regoff, d);
649 case ICMD_LLOAD: /* ... ==> ..., content of local variable */
650 case ICMD_ALOAD: /* op1 = local variable */
652 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
653 if ((iptr->dst->varkind == LOCALVAR) &&
654 (iptr->dst->varnum == iptr->op1)) {
657 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
658 if (var->flags & INMEMORY) {
659 x86_64_mov_membase_reg(cd, REG_SP, var->regoff * 8, d);
660 store_reg_to_var_int(iptr->dst, d);
663 if (iptr->dst->flags & INMEMORY) {
664 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
667 M_INTMOVE(var->regoff, d);
672 case ICMD_FLOAD: /* ... ==> ..., content of local variable */
673 case ICMD_DLOAD: /* op1 = local variable */
675 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
676 if ((iptr->dst->varkind == LOCALVAR) &&
677 (iptr->dst->varnum == iptr->op1)) {
680 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
681 if (var->flags & INMEMORY) {
682 x86_64_movq_membase_reg(cd, REG_SP, var->regoff * 8, d);
683 store_reg_to_var_flt(iptr->dst, d);
686 if (iptr->dst->flags & INMEMORY) {
687 x86_64_movq_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
690 M_FLTMOVE(var->regoff, d);
695 case ICMD_ISTORE: /* ..., value ==> ... */
696 case ICMD_LSTORE: /* op1 = local variable */
699 if ((src->varkind == LOCALVAR) &&
700 (src->varnum == iptr->op1)) {
703 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
704 if (var->flags & INMEMORY) {
705 var_to_reg_int(s1, src, REG_ITMP1);
706 x86_64_mov_reg_membase(cd, s1, REG_SP, var->regoff * 8);
709 var_to_reg_int(s1, src, var->regoff);
710 M_INTMOVE(s1, var->regoff);
714 case ICMD_FSTORE: /* ..., value ==> ... */
715 case ICMD_DSTORE: /* op1 = local variable */
717 if ((src->varkind == LOCALVAR) &&
718 (src->varnum == iptr->op1)) {
721 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
722 if (var->flags & INMEMORY) {
723 var_to_reg_flt(s1, src, REG_FTMP1);
724 x86_64_movq_reg_membase(cd, s1, REG_SP, var->regoff * 8);
727 var_to_reg_flt(s1, src, var->regoff);
728 M_FLTMOVE(s1, var->regoff);
733 /* pop/dup/swap operations ********************************************/
735 /* attention: double and longs are only one entry in CACAO ICMDs */
737 case ICMD_POP: /* ..., value ==> ... */
738 case ICMD_POP2: /* ..., value, value ==> ... */
741 case ICMD_DUP: /* ..., a ==> ..., a, a */
742 M_COPY(src, iptr->dst);
745 case ICMD_DUP_X1: /* ..., a, b ==> ..., b, a, b */
747 M_COPY(src, iptr->dst);
748 M_COPY(src->prev, iptr->dst->prev);
749 M_COPY(iptr->dst, iptr->dst->prev->prev);
752 case ICMD_DUP_X2: /* ..., a, b, c ==> ..., c, a, b, c */
754 M_COPY(src, iptr->dst);
755 M_COPY(src->prev, iptr->dst->prev);
756 M_COPY(src->prev->prev, iptr->dst->prev->prev);
757 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
760 case ICMD_DUP2: /* ..., a, b ==> ..., a, b, a, b */
762 M_COPY(src, iptr->dst);
763 M_COPY(src->prev, iptr->dst->prev);
766 case ICMD_DUP2_X1: /* ..., a, b, c ==> ..., b, c, a, b, c */
768 M_COPY(src, iptr->dst);
769 M_COPY(src->prev, iptr->dst->prev);
770 M_COPY(src->prev->prev, iptr->dst->prev->prev);
771 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
772 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev);
775 case ICMD_DUP2_X2: /* ..., a, b, c, d ==> ..., c, d, a, b, c, d */
777 M_COPY(src, iptr->dst);
778 M_COPY(src->prev, iptr->dst->prev);
779 M_COPY(src->prev->prev, iptr->dst->prev->prev);
780 M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
781 M_COPY(iptr->dst, iptr->dst->prev->prev->prev->prev);
782 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev->prev);
785 case ICMD_SWAP: /* ..., a, b ==> ..., b, a */
787 M_COPY(src, iptr->dst->prev);
788 M_COPY(src->prev, iptr->dst);
792 /* integer operations *************************************************/
794 case ICMD_INEG: /* ..., value ==> ..., - value */
796 d = reg_of_var(rd, iptr->dst, REG_NULL);
797 if (iptr->dst->flags & INMEMORY) {
798 if (src->flags & INMEMORY) {
799 if (src->regoff == iptr->dst->regoff) {
800 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
803 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
804 x86_64_negl_reg(cd, REG_ITMP1);
805 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
809 x86_64_movl_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
810 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
814 if (src->flags & INMEMORY) {
815 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
816 x86_64_negl_reg(cd, d);
819 M_INTMOVE(src->regoff, iptr->dst->regoff);
820 x86_64_negl_reg(cd, iptr->dst->regoff);
825 case ICMD_LNEG: /* ..., value ==> ..., - value */
827 d = reg_of_var(rd, iptr->dst, REG_NULL);
828 if (iptr->dst->flags & INMEMORY) {
829 if (src->flags & INMEMORY) {
830 if (src->regoff == iptr->dst->regoff) {
831 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
834 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
835 x86_64_neg_reg(cd, REG_ITMP1);
836 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
840 x86_64_mov_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
841 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
845 if (src->flags & INMEMORY) {
846 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
847 x86_64_neg_reg(cd, iptr->dst->regoff);
850 M_INTMOVE(src->regoff, iptr->dst->regoff);
851 x86_64_neg_reg(cd, iptr->dst->regoff);
856 case ICMD_I2L: /* ..., value ==> ..., value */
858 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
859 if (src->flags & INMEMORY) {
860 x86_64_movslq_membase_reg(cd, REG_SP, src->regoff * 8, d);
863 x86_64_movslq_reg_reg(cd, src->regoff, d);
865 store_reg_to_var_int(iptr->dst, d);
868 case ICMD_L2I: /* ..., value ==> ..., value */
870 var_to_reg_int(s1, src, REG_ITMP1);
871 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
873 store_reg_to_var_int(iptr->dst, d);
876 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
878 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
879 if (src->flags & INMEMORY) {
880 x86_64_movsbq_membase_reg(cd, REG_SP, src->regoff * 8, d);
883 x86_64_movsbq_reg_reg(cd, src->regoff, d);
885 store_reg_to_var_int(iptr->dst, d);
888 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
890 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
891 if (src->flags & INMEMORY) {
892 x86_64_movzwq_membase_reg(cd, REG_SP, src->regoff * 8, d);
895 x86_64_movzwq_reg_reg(cd, src->regoff, d);
897 store_reg_to_var_int(iptr->dst, d);
900 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
902 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
903 if (src->flags & INMEMORY) {
904 x86_64_movswq_membase_reg(cd, REG_SP, src->regoff * 8, d);
907 x86_64_movswq_reg_reg(cd, src->regoff, d);
909 store_reg_to_var_int(iptr->dst, d);
913 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
915 d = reg_of_var(rd, iptr->dst, REG_NULL);
916 x86_64_emit_ialu(cd, X86_64_ADD, src, iptr);
919 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
920 /* val.i = constant */
922 d = reg_of_var(rd, iptr->dst, REG_NULL);
923 x86_64_emit_ialuconst(cd, X86_64_ADD, src, iptr);
926 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
928 d = reg_of_var(rd, iptr->dst, REG_NULL);
929 x86_64_emit_lalu(cd, X86_64_ADD, src, iptr);
932 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
933 /* val.l = constant */
935 d = reg_of_var(rd, iptr->dst, REG_NULL);
936 x86_64_emit_laluconst(cd, X86_64_ADD, src, iptr);
939 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
941 d = reg_of_var(rd, iptr->dst, REG_NULL);
942 if (iptr->dst->flags & INMEMORY) {
943 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
944 if (src->prev->regoff == iptr->dst->regoff) {
945 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
946 x86_64_alul_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
949 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
950 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
951 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
954 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
955 M_INTMOVE(src->prev->regoff, REG_ITMP1);
956 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
957 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
959 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
960 if (src->prev->regoff == iptr->dst->regoff) {
961 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
964 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
965 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
966 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
970 x86_64_movl_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
971 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
975 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
976 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
977 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
979 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
980 M_INTMOVE(src->prev->regoff, d);
981 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
983 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
984 /* workaround for reg alloc */
985 if (src->regoff == iptr->dst->regoff) {
986 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
987 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
988 M_INTMOVE(REG_ITMP1, d);
991 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
992 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
996 /* workaround for reg alloc */
997 if (src->regoff == iptr->dst->regoff) {
998 M_INTMOVE(src->prev->regoff, REG_ITMP1);
999 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1000 M_INTMOVE(REG_ITMP1, d);
1003 M_INTMOVE(src->prev->regoff, d);
1004 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
1010 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
1011 /* val.i = constant */
1013 d = reg_of_var(rd, iptr->dst, REG_NULL);
1014 x86_64_emit_ialuconst(cd, X86_64_SUB, src, iptr);
1017 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1019 d = reg_of_var(rd, iptr->dst, REG_NULL);
1020 if (iptr->dst->flags & INMEMORY) {
1021 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1022 if (src->prev->regoff == iptr->dst->regoff) {
1023 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1024 x86_64_alu_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1027 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1028 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1029 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1032 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1033 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1034 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1035 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1037 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1038 if (src->prev->regoff == iptr->dst->regoff) {
1039 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1042 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1043 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1044 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1048 x86_64_mov_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1049 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1053 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1054 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1055 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1057 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1058 M_INTMOVE(src->prev->regoff, d);
1059 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1061 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1062 /* workaround for reg alloc */
1063 if (src->regoff == iptr->dst->regoff) {
1064 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1065 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1066 M_INTMOVE(REG_ITMP1, d);
1069 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1070 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1074 /* workaround for reg alloc */
1075 if (src->regoff == iptr->dst->regoff) {
1076 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1077 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1078 M_INTMOVE(REG_ITMP1, d);
1081 M_INTMOVE(src->prev->regoff, d);
1082 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1088 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
1089 /* val.l = constant */
1091 d = reg_of_var(rd, iptr->dst, REG_NULL);
1092 x86_64_emit_laluconst(cd, X86_64_SUB, src, iptr);
1095 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1097 d = reg_of_var(rd, iptr->dst, REG_NULL);
1098 if (iptr->dst->flags & INMEMORY) {
1099 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1100 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1101 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1102 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1104 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1105 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1106 x86_64_imull_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1107 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1109 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1110 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1111 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1112 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1115 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1116 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1117 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1121 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1122 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1123 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1125 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1126 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1127 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1129 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1130 M_INTMOVE(src->regoff, iptr->dst->regoff);
1131 x86_64_imull_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1134 if (src->regoff == iptr->dst->regoff) {
1135 x86_64_imull_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1138 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1139 x86_64_imull_reg_reg(cd, src->regoff, iptr->dst->regoff);
1145 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
1146 /* val.i = constant */
1148 d = reg_of_var(rd, iptr->dst, REG_NULL);
1149 if (iptr->dst->flags & INMEMORY) {
1150 if (src->flags & INMEMORY) {
1151 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1152 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1155 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, REG_ITMP1);
1156 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1160 if (src->flags & INMEMORY) {
1161 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1164 if (iptr->val.i == 2) {
1165 M_INTMOVE(src->regoff, iptr->dst->regoff);
1166 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1169 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, iptr->dst->regoff); /* 3 cycles */
1175 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1177 d = reg_of_var(rd, iptr->dst, REG_NULL);
1178 if (iptr->dst->flags & INMEMORY) {
1179 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1180 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1181 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1182 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1184 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1185 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1186 x86_64_imul_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1187 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1189 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1190 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1191 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1192 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1195 x86_64_mov_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1196 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1197 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1201 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1202 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1203 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1205 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1206 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1207 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1209 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1210 M_INTMOVE(src->regoff, iptr->dst->regoff);
1211 x86_64_imul_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1214 if (src->regoff == iptr->dst->regoff) {
1215 x86_64_imul_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1218 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1219 x86_64_imul_reg_reg(cd, src->regoff, iptr->dst->regoff);
1225 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
1226 /* val.l = constant */
1228 d = reg_of_var(rd, iptr->dst, REG_NULL);
1229 if (iptr->dst->flags & INMEMORY) {
1230 if (src->flags & INMEMORY) {
1231 if (IS_IMM32(iptr->val.l)) {
1232 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1235 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1236 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1238 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1241 if (IS_IMM32(iptr->val.l)) {
1242 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, REG_ITMP1);
1245 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1246 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1248 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1252 if (src->flags & INMEMORY) {
1253 if (IS_IMM32(iptr->val.l)) {
1254 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1257 x86_64_mov_imm_reg(cd, iptr->val.l, iptr->dst->regoff);
1258 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1262 /* should match in many cases */
1263 if (iptr->val.l == 2) {
1264 M_INTMOVE(src->regoff, iptr->dst->regoff);
1265 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1268 if (IS_IMM32(iptr->val.l)) {
1269 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, iptr->dst->regoff); /* 4 cycles */
1272 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1273 M_INTMOVE(src->regoff, iptr->dst->regoff);
1274 x86_64_imul_reg_reg(cd, REG_ITMP1, iptr->dst->regoff);
1281 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1283 d = reg_of_var(rd, iptr->dst, REG_NULL);
1284 if (src->prev->flags & INMEMORY) {
1285 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1288 M_INTMOVE(src->prev->regoff, RAX);
1291 if (src->flags & INMEMORY) {
1292 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1295 M_INTMOVE(src->regoff, REG_ITMP3);
1299 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1300 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1301 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1302 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1304 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1306 x86_64_idivl_reg(cd, REG_ITMP3);
1308 if (iptr->dst->flags & INMEMORY) {
1309 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1310 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1313 M_INTMOVE(RAX, iptr->dst->regoff);
1315 if (iptr->dst->regoff != RDX) {
1316 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1321 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1323 d = reg_of_var(rd, iptr->dst, REG_NULL);
1324 if (src->prev->flags & INMEMORY) {
1325 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1328 M_INTMOVE(src->prev->regoff, RAX);
1331 if (src->flags & INMEMORY) {
1332 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1335 M_INTMOVE(src->regoff, REG_ITMP3);
1339 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1341 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1342 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1345 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1346 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1347 x86_64_jcc(cd, X86_64_CC_E, 1 + 3); /* 6 bytes */
1350 x86_64_idivl_reg(cd, REG_ITMP3);
1352 if (iptr->dst->flags & INMEMORY) {
1353 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1354 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1357 M_INTMOVE(RDX, iptr->dst->regoff);
1359 if (iptr->dst->regoff != RDX) {
1360 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1365 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
1366 /* val.i = constant */
1368 var_to_reg_int(s1, src, REG_ITMP1);
1369 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1370 M_INTMOVE(s1, REG_ITMP1);
1371 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1372 x86_64_leal_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1373 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1374 x86_64_shiftl_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1375 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1376 store_reg_to_var_int(iptr->dst, d);
1379 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
1380 /* val.i = constant */
1382 var_to_reg_int(s1, src, REG_ITMP1);
1383 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1384 M_INTMOVE(s1, REG_ITMP1);
1385 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1386 x86_64_leal_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1387 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1388 x86_64_alul_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1389 x86_64_alul_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1390 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1391 store_reg_to_var_int(iptr->dst, d);
1395 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1397 d = reg_of_var(rd, iptr->dst, REG_NULL);
1398 if (src->prev->flags & INMEMORY) {
1399 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1402 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1405 if (src->flags & INMEMORY) {
1406 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1409 M_INTMOVE(src->regoff, REG_ITMP3);
1413 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1414 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1415 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1416 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1417 x86_64_jcc(cd, X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1419 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1421 x86_64_idiv_reg(cd, REG_ITMP3);
1423 if (iptr->dst->flags & INMEMORY) {
1424 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1425 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1428 M_INTMOVE(RAX, iptr->dst->regoff);
1430 if (iptr->dst->regoff != RDX) {
1431 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1436 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1438 d = reg_of_var(rd, iptr->dst, REG_NULL);
1439 if (src->prev->flags & INMEMORY) {
1440 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1443 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1446 if (src->flags & INMEMORY) {
1447 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1450 M_INTMOVE(src->regoff, REG_ITMP3);
1454 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1456 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1457 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1458 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1461 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1462 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1463 x86_64_jcc(cd, X86_64_CC_E, 2 + 3); /* 6 bytes */
1466 x86_64_idiv_reg(cd, REG_ITMP3);
1468 if (iptr->dst->flags & INMEMORY) {
1469 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1470 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1473 M_INTMOVE(RDX, iptr->dst->regoff);
1475 if (iptr->dst->regoff != RDX) {
1476 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1481 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1482 /* val.i = constant */
1484 var_to_reg_int(s1, src, REG_ITMP1);
1485 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1486 M_INTMOVE(s1, REG_ITMP1);
1487 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1488 x86_64_lea_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1489 x86_64_cmovcc_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1490 x86_64_shift_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1491 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1492 store_reg_to_var_int(iptr->dst, d);
1495 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1496 /* val.l = constant */
1498 var_to_reg_int(s1, src, REG_ITMP1);
1499 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1500 M_INTMOVE(s1, REG_ITMP1);
1501 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1502 x86_64_lea_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1503 x86_64_cmovcc_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1504 x86_64_alu_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1505 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1506 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1507 store_reg_to_var_int(iptr->dst, d);
1510 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1512 d = reg_of_var(rd, iptr->dst, REG_NULL);
1513 x86_64_emit_ishift(cd, X86_64_SHL, src, iptr);
1516 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1517 /* val.i = constant */
1519 d = reg_of_var(rd, iptr->dst, REG_NULL);
1520 x86_64_emit_ishiftconst(cd, X86_64_SHL, src, iptr);
1523 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1525 d = reg_of_var(rd, iptr->dst, REG_NULL);
1526 x86_64_emit_ishift(cd, X86_64_SAR, src, iptr);
1529 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1530 /* val.i = constant */
1532 d = reg_of_var(rd, iptr->dst, REG_NULL);
1533 x86_64_emit_ishiftconst(cd, X86_64_SAR, src, iptr);
1536 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1538 d = reg_of_var(rd, iptr->dst, REG_NULL);
1539 x86_64_emit_ishift(cd, X86_64_SHR, src, iptr);
1542 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1543 /* val.i = constant */
1545 d = reg_of_var(rd, iptr->dst, REG_NULL);
1546 x86_64_emit_ishiftconst(cd, X86_64_SHR, src, iptr);
1549 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1551 d = reg_of_var(rd, iptr->dst, REG_NULL);
1552 x86_64_emit_lshift(cd, X86_64_SHL, src, iptr);
1555 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1556 /* val.i = constant */
1558 d = reg_of_var(rd, iptr->dst, REG_NULL);
1559 x86_64_emit_lshiftconst(cd, X86_64_SHL, src, iptr);
1562 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1564 d = reg_of_var(rd, iptr->dst, REG_NULL);
1565 x86_64_emit_lshift(cd, X86_64_SAR, src, iptr);
1568 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1569 /* val.i = constant */
1571 d = reg_of_var(rd, iptr->dst, REG_NULL);
1572 x86_64_emit_lshiftconst(cd, X86_64_SAR, src, iptr);
1575 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1577 d = reg_of_var(rd, iptr->dst, REG_NULL);
1578 x86_64_emit_lshift(cd, X86_64_SHR, src, iptr);
1581 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1582 /* val.l = constant */
1584 d = reg_of_var(rd, iptr->dst, REG_NULL);
1585 x86_64_emit_lshiftconst(cd, X86_64_SHR, src, iptr);
1588 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1590 d = reg_of_var(rd, iptr->dst, REG_NULL);
1591 x86_64_emit_ialu(cd, X86_64_AND, src, iptr);
1594 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1595 /* val.i = constant */
1597 d = reg_of_var(rd, iptr->dst, REG_NULL);
1598 x86_64_emit_ialuconst(cd, X86_64_AND, src, iptr);
1601 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1603 d = reg_of_var(rd, iptr->dst, REG_NULL);
1604 x86_64_emit_lalu(cd, X86_64_AND, src, iptr);
1607 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1608 /* val.l = constant */
1610 d = reg_of_var(rd, iptr->dst, REG_NULL);
1611 x86_64_emit_laluconst(cd, X86_64_AND, src, iptr);
1614 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1616 d = reg_of_var(rd, iptr->dst, REG_NULL);
1617 x86_64_emit_ialu(cd, X86_64_OR, src, iptr);
1620 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1621 /* val.i = constant */
1623 d = reg_of_var(rd, iptr->dst, REG_NULL);
1624 x86_64_emit_ialuconst(cd, X86_64_OR, src, iptr);
1627 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1629 d = reg_of_var(rd, iptr->dst, REG_NULL);
1630 x86_64_emit_lalu(cd, X86_64_OR, src, iptr);
1633 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1634 /* val.l = constant */
1636 d = reg_of_var(rd, iptr->dst, REG_NULL);
1637 x86_64_emit_laluconst(cd, X86_64_OR, src, iptr);
1640 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1642 d = reg_of_var(rd, iptr->dst, REG_NULL);
1643 x86_64_emit_ialu(cd, X86_64_XOR, src, iptr);
1646 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1647 /* val.i = constant */
1649 d = reg_of_var(rd, iptr->dst, REG_NULL);
1650 x86_64_emit_ialuconst(cd, X86_64_XOR, src, iptr);
1653 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1655 d = reg_of_var(rd, iptr->dst, REG_NULL);
1656 x86_64_emit_lalu(cd, X86_64_XOR, src, iptr);
1659 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1660 /* val.l = constant */
1662 d = reg_of_var(rd, iptr->dst, REG_NULL);
1663 x86_64_emit_laluconst(cd, X86_64_XOR, src, iptr);
1667 case ICMD_IINC: /* ..., value ==> ..., value + constant */
1668 /* op1 = variable, val.i = constant */
1670 /* using inc and dec is definitely faster than add -- tested */
1673 var = &(rd->locals[iptr->op1][TYPE_INT]);
1675 if (var->flags & INMEMORY) {
1676 if (iptr->val.i == 1) {
1677 x86_64_incl_membase(cd, REG_SP, d * 8);
1679 } else if (iptr->val.i == -1) {
1680 x86_64_decl_membase(cd, REG_SP, d * 8);
1683 x86_64_alul_imm_membase(cd, X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1687 if (iptr->val.i == 1) {
1688 x86_64_incl_reg(cd, d);
1690 } else if (iptr->val.i == -1) {
1691 x86_64_decl_reg(cd, d);
1694 x86_64_alul_imm_reg(cd, X86_64_ADD, iptr->val.i, d);
1700 /* floating operations ************************************************/
1702 case ICMD_FNEG: /* ..., value ==> ..., - value */
1704 var_to_reg_flt(s1, src, REG_FTMP1);
1705 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1706 a = dseg_adds4(cd, 0x80000000);
1708 x86_64_movss_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1709 x86_64_xorps_reg_reg(cd, REG_FTMP2, d);
1710 store_reg_to_var_flt(iptr->dst, d);
1713 case ICMD_DNEG: /* ..., value ==> ..., - value */
1715 var_to_reg_flt(s1, src, REG_FTMP1);
1716 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1717 a = dseg_adds8(cd, 0x8000000000000000);
1719 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1720 x86_64_xorpd_reg_reg(cd, REG_FTMP2, d);
1721 store_reg_to_var_flt(iptr->dst, d);
1724 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1726 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1727 var_to_reg_flt(s2, src, REG_FTMP2);
1728 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1730 x86_64_addss_reg_reg(cd, s2, d);
1731 } else if (s2 == d) {
1732 x86_64_addss_reg_reg(cd, s1, d);
1735 x86_64_addss_reg_reg(cd, s2, d);
1737 store_reg_to_var_flt(iptr->dst, d);
1740 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1742 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1743 var_to_reg_flt(s2, src, REG_FTMP2);
1744 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1746 x86_64_addsd_reg_reg(cd, s2, d);
1747 } else if (s2 == d) {
1748 x86_64_addsd_reg_reg(cd, s1, d);
1751 x86_64_addsd_reg_reg(cd, s2, d);
1753 store_reg_to_var_flt(iptr->dst, d);
1756 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1758 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1759 var_to_reg_flt(s2, src, REG_FTMP2);
1760 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1762 M_FLTMOVE(s2, REG_FTMP2);
1766 x86_64_subss_reg_reg(cd, s2, d);
1767 store_reg_to_var_flt(iptr->dst, d);
1770 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1772 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1773 var_to_reg_flt(s2, src, REG_FTMP2);
1774 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1776 M_FLTMOVE(s2, REG_FTMP2);
1780 x86_64_subsd_reg_reg(cd, s2, d);
1781 store_reg_to_var_flt(iptr->dst, d);
1784 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1786 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1787 var_to_reg_flt(s2, src, REG_FTMP2);
1788 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1790 x86_64_mulss_reg_reg(cd, s2, d);
1791 } else if (s2 == d) {
1792 x86_64_mulss_reg_reg(cd, s1, d);
1795 x86_64_mulss_reg_reg(cd, s2, d);
1797 store_reg_to_var_flt(iptr->dst, d);
1800 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1802 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1803 var_to_reg_flt(s2, src, REG_FTMP2);
1804 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1806 x86_64_mulsd_reg_reg(cd, s2, d);
1807 } else if (s2 == d) {
1808 x86_64_mulsd_reg_reg(cd, s1, d);
1811 x86_64_mulsd_reg_reg(cd, s2, d);
1813 store_reg_to_var_flt(iptr->dst, d);
1816 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1818 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1819 var_to_reg_flt(s2, src, REG_FTMP2);
1820 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1822 M_FLTMOVE(s2, REG_FTMP2);
1826 x86_64_divss_reg_reg(cd, s2, d);
1827 store_reg_to_var_flt(iptr->dst, d);
1830 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1832 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1833 var_to_reg_flt(s2, src, REG_FTMP2);
1834 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1836 M_FLTMOVE(s2, REG_FTMP2);
1840 x86_64_divsd_reg_reg(cd, s2, d);
1841 store_reg_to_var_flt(iptr->dst, d);
1844 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1846 var_to_reg_int(s1, src, REG_ITMP1);
1847 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1848 x86_64_cvtsi2ss_reg_reg(cd, s1, d);
1849 store_reg_to_var_flt(iptr->dst, d);
1852 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1854 var_to_reg_int(s1, src, REG_ITMP1);
1855 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1856 x86_64_cvtsi2sd_reg_reg(cd, s1, d);
1857 store_reg_to_var_flt(iptr->dst, d);
1860 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1862 var_to_reg_int(s1, src, REG_ITMP1);
1863 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1864 x86_64_cvtsi2ssq_reg_reg(cd, s1, d);
1865 store_reg_to_var_flt(iptr->dst, d);
1868 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1870 var_to_reg_int(s1, src, REG_ITMP1);
1871 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1872 x86_64_cvtsi2sdq_reg_reg(cd, s1, d);
1873 store_reg_to_var_flt(iptr->dst, d);
1876 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1878 var_to_reg_flt(s1, src, REG_FTMP1);
1879 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1880 x86_64_cvttss2si_reg_reg(cd, s1, d);
1881 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1882 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1883 x86_64_jcc(cd, X86_64_CC_NE, a);
1884 M_FLTMOVE(s1, REG_FTMP1);
1885 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2i, REG_ITMP2);
1886 x86_64_call_reg(cd, REG_ITMP2);
1887 M_INTMOVE(REG_RESULT, d);
1888 store_reg_to_var_int(iptr->dst, d);
1891 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1893 var_to_reg_flt(s1, src, REG_FTMP1);
1894 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1895 x86_64_cvttsd2si_reg_reg(cd, s1, d);
1896 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1897 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1898 x86_64_jcc(cd, X86_64_CC_NE, a);
1899 M_FLTMOVE(s1, REG_FTMP1);
1900 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2i, REG_ITMP2);
1901 x86_64_call_reg(cd, REG_ITMP2);
1902 M_INTMOVE(REG_RESULT, d);
1903 store_reg_to_var_int(iptr->dst, d);
1906 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1908 var_to_reg_flt(s1, src, REG_FTMP1);
1909 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1910 x86_64_cvttss2siq_reg_reg(cd, s1, d);
1911 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1912 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1913 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1914 x86_64_jcc(cd, X86_64_CC_NE, a);
1915 M_FLTMOVE(s1, REG_FTMP1);
1916 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2l, REG_ITMP2);
1917 x86_64_call_reg(cd, REG_ITMP2);
1918 M_INTMOVE(REG_RESULT, d);
1919 store_reg_to_var_int(iptr->dst, d);
1922 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1924 var_to_reg_flt(s1, src, REG_FTMP1);
1925 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1926 x86_64_cvttsd2siq_reg_reg(cd, s1, d);
1927 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1928 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1929 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1930 x86_64_jcc(cd, X86_64_CC_NE, a);
1931 M_FLTMOVE(s1, REG_FTMP1);
1932 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2l, REG_ITMP2);
1933 x86_64_call_reg(cd, REG_ITMP2);
1934 M_INTMOVE(REG_RESULT, d);
1935 store_reg_to_var_int(iptr->dst, d);
1938 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1940 var_to_reg_flt(s1, src, REG_FTMP1);
1941 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1942 x86_64_cvtss2sd_reg_reg(cd, s1, d);
1943 store_reg_to_var_flt(iptr->dst, d);
1946 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1948 var_to_reg_flt(s1, src, REG_FTMP1);
1949 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1950 x86_64_cvtsd2ss_reg_reg(cd, s1, d);
1951 store_reg_to_var_flt(iptr->dst, d);
1954 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1955 /* == => 0, < => 1, > => -1 */
1957 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1958 var_to_reg_flt(s2, src, REG_FTMP2);
1959 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1960 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1961 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1962 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1963 x86_64_ucomiss_reg_reg(cd, s1, s2);
1964 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1965 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1966 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1967 store_reg_to_var_int(iptr->dst, d);
1970 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1971 /* == => 0, < => 1, > => -1 */
1973 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1974 var_to_reg_flt(s2, src, REG_FTMP2);
1975 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1976 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1977 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1978 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1979 x86_64_ucomiss_reg_reg(cd, s1, s2);
1980 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1981 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1982 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1983 store_reg_to_var_int(iptr->dst, d);
1986 case ICMD_DCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1987 /* == => 0, < => 1, > => -1 */
1989 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1990 var_to_reg_flt(s2, src, REG_FTMP2);
1991 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1992 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1993 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1994 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1995 x86_64_ucomisd_reg_reg(cd, s1, s2);
1996 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1997 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1998 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1999 store_reg_to_var_int(iptr->dst, d);
2002 case ICMD_DCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
2003 /* == => 0, < => 1, > => -1 */
2005 var_to_reg_flt(s1, src->prev, REG_FTMP1);
2006 var_to_reg_flt(s2, src, REG_FTMP2);
2007 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2008 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
2009 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
2010 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
2011 x86_64_ucomisd_reg_reg(cd, s1, s2);
2012 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
2013 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
2014 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
2015 store_reg_to_var_int(iptr->dst, d);
2019 /* memory operations **************************************************/
2021 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., (int) length */
2023 var_to_reg_int(s1, src, REG_ITMP1);
2024 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2025 gen_nullptr_check(s1);
2026 x86_64_movl_membase_reg(cd, s1, OFFSET(java_arrayheader, size), d);
2027 store_reg_to_var_int(iptr->dst, d);
2030 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2032 var_to_reg_int(s1, src->prev, REG_ITMP1);
2033 var_to_reg_int(s2, src, REG_ITMP2);
2034 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2035 if (iptr->op1 == 0) {
2036 gen_nullptr_check(s1);
2039 x86_64_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
2040 store_reg_to_var_int(iptr->dst, d);
2043 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
2045 var_to_reg_int(s1, src->prev, REG_ITMP1);
2046 var_to_reg_int(s2, src, REG_ITMP2);
2047 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2048 if (iptr->op1 == 0) {
2049 gen_nullptr_check(s1);
2052 x86_64_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]), s1, s2, 3, d);
2053 store_reg_to_var_int(iptr->dst, d);
2056 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
2058 var_to_reg_int(s1, src->prev, REG_ITMP1);
2059 var_to_reg_int(s2, src, REG_ITMP2);
2060 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2061 if (iptr->op1 == 0) {
2062 gen_nullptr_check(s1);
2065 x86_64_movl_memindex_reg(cd, OFFSET(java_intarray, data[0]), s1, s2, 2, d);
2066 store_reg_to_var_int(iptr->dst, d);
2069 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2071 var_to_reg_int(s1, src->prev, REG_ITMP1);
2072 var_to_reg_int(s2, src, REG_ITMP2);
2073 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2074 if (iptr->op1 == 0) {
2075 gen_nullptr_check(s1);
2078 x86_64_movss_memindex_reg(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2079 store_reg_to_var_flt(iptr->dst, d);
2082 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2084 var_to_reg_int(s1, src->prev, REG_ITMP1);
2085 var_to_reg_int(s2, src, REG_ITMP2);
2086 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2087 if (iptr->op1 == 0) {
2088 gen_nullptr_check(s1);
2091 x86_64_movsd_memindex_reg(cd, OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2092 store_reg_to_var_flt(iptr->dst, d);
2095 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
2097 var_to_reg_int(s1, src->prev, REG_ITMP1);
2098 var_to_reg_int(s2, src, REG_ITMP2);
2099 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2100 if (iptr->op1 == 0) {
2101 gen_nullptr_check(s1);
2104 x86_64_movzwq_memindex_reg(cd, OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2105 store_reg_to_var_int(iptr->dst, d);
2108 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
2110 var_to_reg_int(s1, src->prev, REG_ITMP1);
2111 var_to_reg_int(s2, src, REG_ITMP2);
2112 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2113 if (iptr->op1 == 0) {
2114 gen_nullptr_check(s1);
2117 x86_64_movswq_memindex_reg(cd, OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2118 store_reg_to_var_int(iptr->dst, d);
2121 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
2123 var_to_reg_int(s1, src->prev, REG_ITMP1);
2124 var_to_reg_int(s2, src, REG_ITMP2);
2125 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2126 if (iptr->op1 == 0) {
2127 gen_nullptr_check(s1);
2130 x86_64_movsbq_memindex_reg(cd, OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2131 store_reg_to_var_int(iptr->dst, d);
2135 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2137 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2138 var_to_reg_int(s2, src->prev, REG_ITMP2);
2139 if (iptr->op1 == 0) {
2140 gen_nullptr_check(s1);
2143 var_to_reg_int(s3, src, REG_ITMP3);
2144 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2147 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2149 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2150 var_to_reg_int(s2, src->prev, REG_ITMP2);
2151 if (iptr->op1 == 0) {
2152 gen_nullptr_check(s1);
2155 var_to_reg_int(s3, src, REG_ITMP3);
2156 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2159 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2161 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2162 var_to_reg_int(s2, src->prev, REG_ITMP2);
2163 if (iptr->op1 == 0) {
2164 gen_nullptr_check(s1);
2167 var_to_reg_int(s3, src, REG_ITMP3);
2168 x86_64_movl_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2171 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2173 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2174 var_to_reg_int(s2, src->prev, REG_ITMP2);
2175 if (iptr->op1 == 0) {
2176 gen_nullptr_check(s1);
2179 var_to_reg_flt(s3, src, REG_FTMP3);
2180 x86_64_movss_reg_memindex(cd, s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2183 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2185 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2186 var_to_reg_int(s2, src->prev, REG_ITMP2);
2187 if (iptr->op1 == 0) {
2188 gen_nullptr_check(s1);
2191 var_to_reg_flt(s3, src, REG_FTMP3);
2192 x86_64_movsd_reg_memindex(cd, s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2195 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2197 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2198 var_to_reg_int(s2, src->prev, REG_ITMP2);
2199 if (iptr->op1 == 0) {
2200 gen_nullptr_check(s1);
2203 var_to_reg_int(s3, src, REG_ITMP3);
2204 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2207 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2209 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2210 var_to_reg_int(s2, src->prev, REG_ITMP2);
2211 if (iptr->op1 == 0) {
2212 gen_nullptr_check(s1);
2215 var_to_reg_int(s3, src, REG_ITMP3);
2216 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2219 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2221 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2222 var_to_reg_int(s2, src->prev, REG_ITMP2);
2223 if (iptr->op1 == 0) {
2224 gen_nullptr_check(s1);
2227 var_to_reg_int(s3, src, REG_ITMP3);
2228 x86_64_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2231 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2233 var_to_reg_int(s1, src->prev, REG_ITMP1);
2234 var_to_reg_int(s2, src, REG_ITMP2);
2235 if (iptr->op1 == 0) {
2236 gen_nullptr_check(s1);
2239 x86_64_movl_imm_memindex(cd, iptr->val.i, OFFSET(java_intarray, data[0]), s1, s2, 2);
2242 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2244 var_to_reg_int(s1, src->prev, REG_ITMP1);
2245 var_to_reg_int(s2, src, REG_ITMP2);
2246 if (iptr->op1 == 0) {
2247 gen_nullptr_check(s1);
2251 if (IS_IMM32(iptr->val.l)) {
2252 x86_64_mov_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2255 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2256 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l >> 32), OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2260 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2262 var_to_reg_int(s1, src->prev, REG_ITMP1);
2263 var_to_reg_int(s2, src, REG_ITMP2);
2264 if (iptr->op1 == 0) {
2265 gen_nullptr_check(s1);
2268 x86_64_mov_imm_memindex(cd, 0, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2271 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2273 var_to_reg_int(s1, src->prev, REG_ITMP1);
2274 var_to_reg_int(s2, src, REG_ITMP2);
2275 if (iptr->op1 == 0) {
2276 gen_nullptr_check(s1);
2279 x86_64_movb_imm_memindex(cd, iptr->val.i, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2282 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2284 var_to_reg_int(s1, src->prev, REG_ITMP1);
2285 var_to_reg_int(s2, src, REG_ITMP2);
2286 if (iptr->op1 == 0) {
2287 gen_nullptr_check(s1);
2290 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_chararray, data[0]), s1, s2, 1);
2293 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2295 var_to_reg_int(s1, src->prev, REG_ITMP1);
2296 var_to_reg_int(s2, src, REG_ITMP2);
2297 if (iptr->op1 == 0) {
2298 gen_nullptr_check(s1);
2301 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2305 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2306 /* op1 = type, val.a = field address */
2308 /* If the static fields' class is not yet initialized, we do it */
2309 /* now. The call code is generated later. */
2310 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2311 codegen_addclinitref(cd, cd->mcodeptr, ((fieldinfo *) iptr->val.a)->class);
2313 /* This is just for debugging purposes. Is very difficult to */
2314 /* read patched code. Here we patch the following 5 nop's */
2315 /* so that the real code keeps untouched. */
2316 if (showdisassemble) {
2317 x86_64_nop(cd); x86_64_nop(cd); x86_64_nop(cd);
2318 x86_64_nop(cd); x86_64_nop(cd);
2322 /* This approach is much faster than moving the field address */
2323 /* inline into a register. */
2324 a = dseg_addaddress(cd, &(((fieldinfo *) iptr->val.a)->value));
2325 x86_64_mov_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 7) - (s8) cd->mcodebase) + a, REG_ITMP2);
2326 switch (iptr->op1) {
2328 var_to_reg_int(s2, src, REG_ITMP1);
2329 x86_64_movl_reg_membase(cd, s2, REG_ITMP2, 0);
2333 var_to_reg_int(s2, src, REG_ITMP1);
2334 x86_64_mov_reg_membase(cd, s2, REG_ITMP2, 0);
2337 var_to_reg_flt(s2, src, REG_FTMP1);
2338 x86_64_movss_reg_membase(cd, s2, REG_ITMP2, 0);
2341 var_to_reg_flt(s2, src, REG_FTMP1);
2342 x86_64_movsd_reg_membase(cd, s2, REG_ITMP2, 0);
2347 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2348 /* val = value (in current instruction) */
2349 /* op1 = type, val.a = field address (in */
2350 /* following NOP) */
2352 /* If the static fields' class is not yet initialized, we do it */
2353 /* now. The call code is generated later. */
2354 if (!((fieldinfo *) iptr[1].val.a)->class->initialized) {
2355 codegen_addclinitref(cd, cd->mcodeptr, ((fieldinfo *) iptr[1].val.a)->class);
2357 /* This is just for debugging purposes. Is very difficult to */
2358 /* read patched code. Here we patch the following 5 nop's */
2359 /* so that the real code keeps untouched. */
2360 if (showdisassemble) {
2361 x86_64_nop(cd); x86_64_nop(cd); x86_64_nop(cd);
2362 x86_64_nop(cd); x86_64_nop(cd);
2366 /* This approach is much faster than moving the field address */
2367 /* inline into a register. */
2368 a = dseg_addaddress(cd, &(((fieldinfo *) iptr[1].val.a)->value));
2369 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP1);
2370 switch (iptr->op1) {
2373 x86_64_movl_imm_membase(cd, iptr->val.i, REG_ITMP1, 0);
2378 if (IS_IMM32(iptr->val.l)) {
2379 x86_64_mov_imm_membase(cd, iptr->val.l, REG_ITMP1, 0);
2381 x86_64_movl_imm_membase(cd, iptr->val.l, REG_ITMP1, 0);
2382 x86_64_movl_imm_membase(cd, iptr->val.l >> 32, REG_ITMP1, 4);
2388 case ICMD_GETSTATIC: /* ... ==> ..., value */
2389 /* op1 = type, val.a = field address */
2391 /* If the static fields' class is not yet initialized, we do it */
2392 /* now. The call code is generated later. */
2393 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2394 codegen_addclinitref(cd, cd->mcodeptr, ((fieldinfo *) iptr->val.a)->class);
2396 /* This is just for debugging purposes. Is very difficult to */
2397 /* read patched code. Here we patch the following 5 nop's */
2398 /* so that the real code keeps untouched. */
2399 if (showdisassemble) {
2400 x86_64_nop(cd); x86_64_nop(cd); x86_64_nop(cd);
2401 x86_64_nop(cd); x86_64_nop(cd);
2405 /* This approach is much faster than moving the field address */
2406 /* inline into a register. */
2407 a = dseg_addaddress(cd, &(((fieldinfo *) iptr->val.a)->value));
2408 x86_64_mov_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 7) - (s8) cd->mcodebase) + a, REG_ITMP2);
2409 switch (iptr->op1) {
2411 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2412 x86_64_movl_membase_reg(cd, REG_ITMP2, 0, d);
2413 store_reg_to_var_int(iptr->dst, d);
2417 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2418 x86_64_mov_membase_reg(cd, REG_ITMP2, 0, d);
2419 store_reg_to_var_int(iptr->dst, d);
2422 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2423 x86_64_movss_membase_reg(cd, REG_ITMP2, 0, d);
2424 store_reg_to_var_flt(iptr->dst, d);
2427 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2428 x86_64_movsd_membase_reg(cd, REG_ITMP2, 0, d);
2429 store_reg_to_var_flt(iptr->dst, d);
2434 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2435 /* op1 = type, val.i = field offset */
2437 a = ((fieldinfo *)(iptr->val.a))->offset;
2438 var_to_reg_int(s1, src->prev, REG_ITMP1);
2439 gen_nullptr_check(s1);
2440 switch (iptr->op1) {
2442 var_to_reg_int(s2, src, REG_ITMP2);
2443 x86_64_movl_reg_membase(cd, s2, s1, a);
2447 var_to_reg_int(s2, src, REG_ITMP2);
2448 x86_64_mov_reg_membase(cd, s2, s1, a);
2451 var_to_reg_flt(s2, src, REG_FTMP2);
2452 x86_64_movss_reg_membase(cd, s2, s1, a);
2455 var_to_reg_flt(s2, src, REG_FTMP2);
2456 x86_64_movsd_reg_membase(cd, s2, s1, a);
2461 case ICMD_PUTFIELDCONST: /* ..., objectref, value ==> ... */
2462 /* val = value (in current instruction) */
2463 /* op1 = type, val.a = field address (in */
2464 /* following NOP) */
2466 a = ((fieldinfo *) iptr[1].val.a)->offset;
2467 var_to_reg_int(s1, src, REG_ITMP1);
2468 gen_nullptr_check(s1);
2469 switch (iptr->op1) {
2472 x86_64_movl_imm_membase(cd, iptr->val.i, s1, a);
2477 if (IS_IMM32(iptr->val.l)) {
2478 x86_64_mov_imm_membase(cd, iptr->val.l, s1, a);
2480 x86_64_movl_imm_membase(cd, iptr->val.l, s1, a);
2481 x86_64_movl_imm_membase(cd, iptr->val.l >> 32, s1, a + 4);
2487 case ICMD_GETFIELD: /* ... ==> ..., value */
2488 /* op1 = type, val.i = field offset */
2490 a = ((fieldinfo *)(iptr->val.a))->offset;
2491 var_to_reg_int(s1, src, REG_ITMP1);
2492 gen_nullptr_check(s1);
2493 switch (iptr->op1) {
2495 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2496 x86_64_movl_membase_reg(cd, s1, a, d);
2497 store_reg_to_var_int(iptr->dst, d);
2501 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2502 x86_64_mov_membase_reg(cd, s1, a, d);
2503 store_reg_to_var_int(iptr->dst, d);
2506 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2507 x86_64_movss_membase_reg(cd, s1, a, d);
2508 store_reg_to_var_flt(iptr->dst, d);
2511 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2512 x86_64_movsd_membase_reg(cd, s1, a, d);
2513 store_reg_to_var_flt(iptr->dst, d);
2519 /* branch operations **************************************************/
2521 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2523 var_to_reg_int(s1, src, REG_ITMP1);
2524 M_INTMOVE(s1, REG_ITMP1_XPTR);
2526 x86_64_call_imm(cd, 0); /* passing exception pointer */
2527 x86_64_pop_reg(cd, REG_ITMP2_XPC);
2529 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
2530 x86_64_jmp_reg(cd, REG_ITMP3);
2533 case ICMD_GOTO: /* ... ==> ... */
2534 /* op1 = target JavaVM pc */
2536 x86_64_jmp_imm(cd, 0);
2537 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2540 case ICMD_JSR: /* ... ==> ... */
2541 /* op1 = target JavaVM pc */
2543 x86_64_call_imm(cd, 0);
2544 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2547 case ICMD_RET: /* ... ==> ... */
2548 /* op1 = local variable */
2550 var = &(rd->locals[iptr->op1][TYPE_ADR]);
2551 var_to_reg_int(s1, var, REG_ITMP1);
2552 x86_64_jmp_reg(cd, s1);
2555 case ICMD_IFNULL: /* ..., value ==> ... */
2556 /* op1 = target JavaVM pc */
2558 if (src->flags & INMEMORY) {
2559 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2562 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2564 x86_64_jcc(cd, X86_64_CC_E, 0);
2565 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2568 case ICMD_IFNONNULL: /* ..., value ==> ... */
2569 /* op1 = target JavaVM pc */
2571 if (src->flags & INMEMORY) {
2572 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2575 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2577 x86_64_jcc(cd, X86_64_CC_NE, 0);
2578 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2581 case ICMD_IFEQ: /* ..., value ==> ... */
2582 /* op1 = target JavaVM pc, val.i = constant */
2584 x86_64_emit_ifcc(cd, X86_64_CC_E, src, iptr);
2587 case ICMD_IFLT: /* ..., value ==> ... */
2588 /* op1 = target JavaVM pc, val.i = constant */
2590 x86_64_emit_ifcc(cd, X86_64_CC_L, src, iptr);
2593 case ICMD_IFLE: /* ..., value ==> ... */
2594 /* op1 = target JavaVM pc, val.i = constant */
2596 x86_64_emit_ifcc(cd, X86_64_CC_LE, src, iptr);
2599 case ICMD_IFNE: /* ..., value ==> ... */
2600 /* op1 = target JavaVM pc, val.i = constant */
2602 x86_64_emit_ifcc(cd, X86_64_CC_NE, src, iptr);
2605 case ICMD_IFGT: /* ..., value ==> ... */
2606 /* op1 = target JavaVM pc, val.i = constant */
2608 x86_64_emit_ifcc(cd, X86_64_CC_G, src, iptr);
2611 case ICMD_IFGE: /* ..., value ==> ... */
2612 /* op1 = target JavaVM pc, val.i = constant */
2614 x86_64_emit_ifcc(cd, X86_64_CC_GE, src, iptr);
2617 case ICMD_IF_LEQ: /* ..., value ==> ... */
2618 /* op1 = target JavaVM pc, val.l = constant */
2620 x86_64_emit_if_lcc(cd, X86_64_CC_E, src, iptr);
2623 case ICMD_IF_LLT: /* ..., value ==> ... */
2624 /* op1 = target JavaVM pc, val.l = constant */
2626 x86_64_emit_if_lcc(cd, X86_64_CC_L, src, iptr);
2629 case ICMD_IF_LLE: /* ..., value ==> ... */
2630 /* op1 = target JavaVM pc, val.l = constant */
2632 x86_64_emit_if_lcc(cd, X86_64_CC_LE, src, iptr);
2635 case ICMD_IF_LNE: /* ..., value ==> ... */
2636 /* op1 = target JavaVM pc, val.l = constant */
2638 x86_64_emit_if_lcc(cd, X86_64_CC_NE, src, iptr);
2641 case ICMD_IF_LGT: /* ..., value ==> ... */
2642 /* op1 = target JavaVM pc, val.l = constant */
2644 x86_64_emit_if_lcc(cd, X86_64_CC_G, src, iptr);
2647 case ICMD_IF_LGE: /* ..., value ==> ... */
2648 /* op1 = target JavaVM pc, val.l = constant */
2650 x86_64_emit_if_lcc(cd, X86_64_CC_GE, src, iptr);
2653 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2654 /* op1 = target JavaVM pc */
2656 x86_64_emit_if_icmpcc(cd, X86_64_CC_E, src, iptr);
2659 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2660 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2662 x86_64_emit_if_lcmpcc(cd, X86_64_CC_E, src, iptr);
2665 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2666 /* op1 = target JavaVM pc */
2668 x86_64_emit_if_icmpcc(cd, X86_64_CC_NE, src, iptr);
2671 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2672 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2674 x86_64_emit_if_lcmpcc(cd, X86_64_CC_NE, src, iptr);
2677 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2678 /* op1 = target JavaVM pc */
2680 x86_64_emit_if_icmpcc(cd, X86_64_CC_L, src, iptr);
2683 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2684 /* op1 = target JavaVM pc */
2686 x86_64_emit_if_lcmpcc(cd, X86_64_CC_L, src, iptr);
2689 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2690 /* op1 = target JavaVM pc */
2692 x86_64_emit_if_icmpcc(cd, X86_64_CC_G, src, iptr);
2695 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2696 /* op1 = target JavaVM pc */
2698 x86_64_emit_if_lcmpcc(cd, X86_64_CC_G, src, iptr);
2701 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2702 /* op1 = target JavaVM pc */
2704 x86_64_emit_if_icmpcc(cd, X86_64_CC_LE, src, iptr);
2707 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2708 /* op1 = target JavaVM pc */
2710 x86_64_emit_if_lcmpcc(cd, X86_64_CC_LE, src, iptr);
2713 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2714 /* op1 = target JavaVM pc */
2716 x86_64_emit_if_icmpcc(cd, X86_64_CC_GE, src, iptr);
2719 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2720 /* op1 = target JavaVM pc */
2722 x86_64_emit_if_lcmpcc(cd, X86_64_CC_GE, src, iptr);
2725 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST */
2727 case ICMD_ELSE_ICONST: /* handled by IFxx_ICONST */
2730 case ICMD_IFEQ_ICONST: /* ..., value ==> ..., constant */
2731 /* val.i = constant */
2733 var_to_reg_int(s1, src, REG_ITMP1);
2734 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2735 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2737 M_INTMOVE(s1, REG_ITMP1);
2740 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2742 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2743 x86_64_testl_reg_reg(cd, s1, s1);
2744 x86_64_cmovccl_reg_reg(cd, X86_64_CC_E, REG_ITMP2, d);
2745 store_reg_to_var_int(iptr->dst, d);
2748 case ICMD_IFNE_ICONST: /* ..., value ==> ..., constant */
2749 /* val.i = constant */
2751 var_to_reg_int(s1, src, REG_ITMP1);
2752 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2753 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2755 M_INTMOVE(s1, REG_ITMP1);
2758 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2760 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2761 x86_64_testl_reg_reg(cd, s1, s1);
2762 x86_64_cmovccl_reg_reg(cd, X86_64_CC_NE, REG_ITMP2, d);
2763 store_reg_to_var_int(iptr->dst, d);
2766 case ICMD_IFLT_ICONST: /* ..., value ==> ..., constant */
2767 /* val.i = constant */
2769 var_to_reg_int(s1, src, REG_ITMP1);
2770 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2771 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2773 M_INTMOVE(s1, REG_ITMP1);
2776 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2778 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2779 x86_64_testl_reg_reg(cd, s1, s1);
2780 x86_64_cmovccl_reg_reg(cd, X86_64_CC_L, REG_ITMP2, d);
2781 store_reg_to_var_int(iptr->dst, d);
2784 case ICMD_IFGE_ICONST: /* ..., value ==> ..., constant */
2785 /* val.i = constant */
2787 var_to_reg_int(s1, src, REG_ITMP1);
2788 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2789 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2791 M_INTMOVE(s1, REG_ITMP1);
2794 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2796 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2797 x86_64_testl_reg_reg(cd, s1, s1);
2798 x86_64_cmovccl_reg_reg(cd, X86_64_CC_GE, REG_ITMP2, d);
2799 store_reg_to_var_int(iptr->dst, d);
2802 case ICMD_IFGT_ICONST: /* ..., value ==> ..., constant */
2803 /* val.i = constant */
2805 var_to_reg_int(s1, src, REG_ITMP1);
2806 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2807 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2809 M_INTMOVE(s1, REG_ITMP1);
2812 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2814 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2815 x86_64_testl_reg_reg(cd, s1, s1);
2816 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP2, d);
2817 store_reg_to_var_int(iptr->dst, d);
2820 case ICMD_IFLE_ICONST: /* ..., value ==> ..., constant */
2821 /* val.i = constant */
2823 var_to_reg_int(s1, src, REG_ITMP1);
2824 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2825 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2827 M_INTMOVE(s1, REG_ITMP1);
2830 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2832 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2833 x86_64_testl_reg_reg(cd, s1, s1);
2834 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, d);
2835 store_reg_to_var_int(iptr->dst, d);
2839 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2843 var_to_reg_int(s1, src, REG_RESULT);
2844 M_INTMOVE(s1, REG_RESULT);
2846 goto nowperformreturn;
2848 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2851 var_to_reg_flt(s1, src, REG_FRESULT);
2852 M_FLTMOVE(s1, REG_FRESULT);
2854 goto nowperformreturn;
2856 case ICMD_RETURN: /* ... ==> ... */
2862 p = parentargs_base;
2864 /* call trace function */
2866 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
2868 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
2869 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
2871 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
2872 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
2873 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
2874 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
2876 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
2877 x86_64_call_reg(cd, REG_ITMP1);
2879 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
2880 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
2882 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
2885 #if defined(USE_THREADS)
2886 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2887 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
2889 /* we need to save the proper return value */
2890 switch (iptr->opc) {
2894 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, rd->maxmemuse * 8);
2898 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, rd->maxmemuse * 8);
2902 x86_64_mov_imm_reg(cd, (u8) builtin_monitorexit, REG_ITMP1);
2903 x86_64_call_reg(cd, REG_ITMP1);
2905 /* and now restore the proper return value */
2906 switch (iptr->opc) {
2910 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_RESULT);
2914 x86_64_movq_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_FRESULT);
2920 /* restore saved registers */
2921 for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
2922 p--; x86_64_mov_membase_reg(cd, REG_SP, p * 8, rd->savintregs[i]);
2924 for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
2925 p--; x86_64_movq_membase_reg(cd, REG_SP, p * 8, rd->savfltregs[i]);
2928 /* deallocate stack */
2929 if (parentargs_base) {
2930 x86_64_alu_imm_reg(cd, X86_64_ADD, parentargs_base * 8, REG_SP);
2938 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2943 tptr = (void **) iptr->target;
2945 s4ptr = iptr->val.a;
2946 l = s4ptr[1]; /* low */
2947 i = s4ptr[2]; /* high */
2949 var_to_reg_int(s1, src, REG_ITMP1);
2950 M_INTMOVE(s1, REG_ITMP1);
2952 x86_64_alul_imm_reg(cd, X86_64_SUB, l, REG_ITMP1);
2957 x86_64_alul_imm_reg(cd, X86_64_CMP, i - 1, REG_ITMP1);
2958 x86_64_jcc(cd, X86_64_CC_A, 0);
2960 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[0]), cd->mcodeptr); */
2961 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
2963 /* build jump table top down and use address of lowest entry */
2965 /* s4ptr += 3 + i; */
2969 /* dseg_addtarget(cd, BlockPtrOfPC(*--s4ptr)); */
2970 dseg_addtarget(cd, (basicblock *) tptr[0]);
2974 /* length of dataseg after last dseg_addtarget is used by load */
2976 x86_64_mov_imm_reg(cd, 0, REG_ITMP2);
2977 dseg_adddata(cd, cd->mcodeptr);
2978 x86_64_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
2979 x86_64_jmp_reg(cd, REG_ITMP1);
2984 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2986 s4 i, l, val, *s4ptr;
2989 tptr = (void **) iptr->target;
2991 s4ptr = iptr->val.a;
2992 l = s4ptr[0]; /* default */
2993 i = s4ptr[1]; /* count */
2995 MCODECHECK((i<<2)+8);
2996 var_to_reg_int(s1, src, REG_ITMP1); /* reg compare should always be faster */
3002 x86_64_alul_imm_reg(cd, X86_64_CMP, val, s1);
3003 x86_64_jcc(cd, X86_64_CC_E, 0);
3004 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[1]), cd->mcodeptr); */
3005 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3008 x86_64_jmp_imm(cd, 0);
3009 /* codegen_addreference(cd, BlockPtrOfPC(l), cd->mcodeptr); */
3011 tptr = (void **) iptr->target;
3012 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3017 case ICMD_BUILTIN3: /* ..., arg1, arg2, arg3 ==> ... */
3018 /* op1 = return type, val.a = function pointer*/
3022 case ICMD_BUILTIN2: /* ..., arg1, arg2 ==> ... */
3023 /* op1 = return type, val.a = function pointer*/
3027 case ICMD_BUILTIN1: /* ..., arg1 ==> ... */
3028 /* op1 = return type, val.a = function pointer*/
3032 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
3033 /* op1 = arg count, val.a = method pointer */
3035 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3036 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
3037 case ICMD_INVOKEINTERFACE:
3047 MCODECHECK((s3 << 1) + 64);
3054 /* copy arguments to registers or stack location ******************/
3056 /* count integer and float arguments */
3058 for (; --s3 >= 0; src = src->prev) {
3059 IS_INT_LNG_TYPE(src->type) ? iarg++ : farg++;
3065 /* calculate amount of arguments to be on stack */
3067 s2 = (iarg > INT_ARG_CNT) ? iarg - INT_ARG_CNT : 0 +
3068 (farg > FLT_ARG_CNT) ? farg - FLT_ARG_CNT : 0;
3070 for (; --s3 >= 0; src = src->prev) {
3071 /* decrement the current argument type */
3072 IS_INT_LNG_TYPE(src->type) ? iarg-- : farg--;
3074 if (src->varkind == ARGVAR) {
3075 if (IS_INT_LNG_TYPE(src->type)) {
3076 if (iarg >= INT_ARG_CNT) {
3080 if (farg >= FLT_ARG_CNT) {
3087 if (IS_INT_LNG_TYPE(src->type)) {
3088 if (iarg < INT_ARG_CNT) {
3089 s1 = rd->argintregs[iarg];
3090 var_to_reg_int(d, src, s1);
3094 var_to_reg_int(d, src, REG_ITMP1);
3096 x86_64_mov_reg_membase(cd, d, REG_SP, s2 * 8);
3100 if (farg < FLT_ARG_CNT) {
3101 s1 = rd->argfltregs[farg];
3102 var_to_reg_flt(d, src, s1);
3106 var_to_reg_flt(d, src, REG_FTMP1);
3108 x86_64_movq_reg_membase(cd, d, REG_SP, s2 * 8);
3114 switch (iptr->opc) {
3121 x86_64_mov_imm_reg(cd, a, REG_ITMP1);
3122 x86_64_call_reg(cd, REG_ITMP1);
3125 case ICMD_INVOKESTATIC:
3126 a = (s8) lm->stubroutine;
3129 x86_64_mov_imm_reg(cd, a, REG_ITMP2);
3130 x86_64_call_reg(cd, REG_ITMP2);
3133 case ICMD_INVOKESPECIAL:
3134 a = (s8) lm->stubroutine;
3137 gen_nullptr_check(rd->argintregs[0]); /* first argument contains pointer */
3138 x86_64_mov_membase_reg(cd, rd->argintregs[0], 0, REG_ITMP2); /* access memory for hardware nullptr */
3139 x86_64_mov_imm_reg(cd, a, REG_ITMP2);
3140 x86_64_call_reg(cd, REG_ITMP2);
3143 case ICMD_INVOKEVIRTUAL:
3146 gen_nullptr_check(rd->argintregs[0]);
3147 x86_64_mov_membase_reg(cd, rd->argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3148 x86_64_mov_membase32_reg(cd, REG_ITMP2, OFFSET(vftbl_t, table[0]) + sizeof(methodptr) * lm->vftblindex, REG_ITMP1);
3149 x86_64_call_reg(cd, REG_ITMP1);
3152 case ICMD_INVOKEINTERFACE:
3156 gen_nullptr_check(rd->argintregs[0]);
3157 x86_64_mov_membase_reg(cd, rd->argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3158 x86_64_mov_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, interfacetable[0]) - sizeof(methodptr) * ci->index, REG_ITMP2);
3159 x86_64_mov_membase32_reg(cd, REG_ITMP2, sizeof(methodptr) * (lm - ci->methods), REG_ITMP1);
3160 x86_64_call_reg(cd, REG_ITMP1);
3164 /* d contains return type */
3166 if (d != TYPE_VOID) {
3167 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3168 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3169 M_INTMOVE(REG_RESULT, s1);
3170 store_reg_to_var_int(iptr->dst, s1);
3173 s1 = reg_of_var(rd, iptr->dst, REG_FRESULT);
3174 M_FLTMOVE(REG_FRESULT, s1);
3175 store_reg_to_var_flt(iptr->dst, s1);
3182 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3184 /* op1: 0 == array, 1 == class */
3185 /* val.a: (classinfo*) superclass */
3187 /* superclass is an interface:
3189 * return (sub != NULL) &&
3190 * (sub->vftbl->interfacetablelength > super->index) &&
3191 * (sub->vftbl->interfacetable[-super->index] != NULL);
3193 * superclass is a class:
3195 * return ((sub != NULL) && (0
3196 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3197 * super->vftbl->diffvall));
3201 classinfo *super = (classinfo *) iptr->val.a;
3203 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3204 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3207 var_to_reg_int(s1, src, REG_ITMP1);
3208 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3210 M_INTMOVE(s1, REG_ITMP1);
3213 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3214 if (iptr->op1) { /* class/interface */
3215 if (super->flags & ACC_INTERFACE) { /* interface */
3216 x86_64_test_reg_reg(cd, s1, s1);
3218 /* TODO: clean up this calculation */
3219 a = 3; /* mov_membase_reg */
3220 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3222 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3223 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3226 CALCIMMEDIATEBYTES(a, super->index);
3231 a += 3; /* mov_membase_reg */
3232 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3237 x86_64_jcc(cd, X86_64_CC_E, a);
3239 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3240 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength), REG_ITMP2);
3241 x86_64_alu_imm_reg(cd, X86_64_SUB, super->index, REG_ITMP2);
3242 x86_64_test_reg_reg(cd, REG_ITMP2, REG_ITMP2);
3244 /* TODO: clean up this calculation */
3246 a += 3; /* mov_membase_reg */
3247 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3252 x86_64_jcc(cd, X86_64_CC_LE, a);
3253 x86_64_mov_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP1);
3254 x86_64_test_reg_reg(cd, REG_ITMP1, REG_ITMP1);
3255 x86_64_setcc_reg(cd, X86_64_CC_NE, d);
3257 } else { /* class */
3258 x86_64_test_reg_reg(cd, s1, s1);
3260 /* TODO: clean up this calculation */
3261 a = 3; /* mov_membase_reg */
3262 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3264 a += 10; /* mov_imm_reg */
3266 a += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3267 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, baseval));
3269 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3270 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3272 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3273 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, diffval));
3280 x86_64_jcc(cd, X86_64_CC_E, a);
3282 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3283 x86_64_mov_imm_reg(cd, (ptrint) super->vftbl, REG_ITMP2);
3284 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3285 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3287 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, baseval), REG_ITMP1);
3288 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP3);
3289 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, diffval), REG_ITMP2);
3290 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3291 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3293 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP1);
3294 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3295 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
3296 x86_64_setcc_reg(cd, X86_64_CC_BE, d);
3300 panic("internal error: no inlined array instanceof");
3302 store_reg_to_var_int(iptr->dst, d);
3305 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3307 /* op1: 0 == array, 1 == class */
3308 /* val.a: (classinfo*) superclass */
3310 /* superclass is an interface:
3312 * OK if ((sub == NULL) ||
3313 * (sub->vftbl->interfacetablelength > super->index) &&
3314 * (sub->vftbl->interfacetable[-super->index] != NULL));
3316 * superclass is a class:
3318 * OK if ((sub == NULL) || (0
3319 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3320 * super->vftbl->diffval));
3324 classinfo *super = (classinfo *) iptr->val.a;
3326 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3327 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3329 var_to_reg_int(s1, src, REG_ITMP1);
3330 if (iptr->op1) { /* class/interface */
3331 if (super->flags & ACC_INTERFACE) { /* interface */
3332 x86_64_test_reg_reg(cd, s1, s1);
3334 /* TODO: clean up this calculation */
3335 a = 3; /* mov_membase_reg */
3336 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3338 a += 3; /* movl_membase_reg - if REG_ITMP3 == R10 */
3339 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, interfacetablelength));
3342 CALCIMMEDIATEBYTES(a, super->index);
3347 a += 3; /* mov_membase_reg */
3348 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3353 x86_64_jcc(cd, X86_64_CC_E, a);
3355 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP2);
3356 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, interfacetablelength), REG_ITMP3);
3357 x86_64_alu_imm_reg(cd, X86_64_SUB, super->index, REG_ITMP3);
3358 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3359 x86_64_jcc(cd, X86_64_CC_LE, 0);
3360 codegen_addxcastrefs(cd, cd->mcodeptr);
3361 x86_64_mov_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP3);
3362 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3363 x86_64_jcc(cd, X86_64_CC_E, 0);
3364 codegen_addxcastrefs(cd, cd->mcodeptr);
3366 } else { /* class */
3367 x86_64_test_reg_reg(cd, s1, s1);
3369 /* TODO: clean up this calculation */
3370 a = 3; /* mov_membase_reg */
3371 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3372 a += 10; /* mov_imm_reg */
3373 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3374 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3376 if (s1 != REG_ITMP1) {
3377 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3378 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, baseval));
3379 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3380 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, diffval));
3384 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3385 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, baseval));
3387 a += 10; /* mov_imm_reg */
3388 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3389 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, diffval));
3395 x86_64_jcc(cd, X86_64_CC_E, a);
3397 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP2);
3398 x86_64_mov_imm_reg(cd, (ptrint) super->vftbl, REG_ITMP3);
3399 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3400 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3402 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP2);
3403 if (s1 != REG_ITMP1) {
3404 x86_64_movl_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP1);
3405 x86_64_movl_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3);
3406 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3407 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3409 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP1, REG_ITMP2);
3412 x86_64_movl_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP3);
3413 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP2);
3414 x86_64_mov_imm_reg(cd, (ptrint) super->vftbl, REG_ITMP3);
3415 x86_64_movl_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3);
3417 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3418 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3420 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP3, REG_ITMP2);
3421 x86_64_jcc(cd, X86_64_CC_A, 0); /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3422 codegen_addxcastrefs(cd, cd->mcodeptr);
3426 panic("internal error: no inlined array checkcast");
3428 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3430 store_reg_to_var_int(iptr->dst, d);
3431 /* if (iptr->dst->flags & INMEMORY) { */
3432 /* x86_64_mov_reg_membase(cd, s1, REG_SP, iptr->dst->regoff * 8); */
3434 /* M_INTMOVE(s1, iptr->dst->regoff); */
3438 case ICMD_CHECKASIZE: /* ..., size ==> ..., size */
3440 if (src->flags & INMEMORY) {
3441 x86_64_alul_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
3444 x86_64_testl_reg_reg(cd, src->regoff, src->regoff);
3446 x86_64_jcc(cd, X86_64_CC_L, 0);
3447 codegen_addxcheckarefs(cd, cd->mcodeptr);
3450 case ICMD_CHECKEXCEPTION: /* ... ==> ... */
3452 x86_64_test_reg_reg(cd, REG_RESULT, REG_RESULT);
3453 x86_64_jcc(cd, X86_64_CC_E, 0);
3454 codegen_addxexceptionrefs(cd, cd->mcodeptr);
3457 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3458 /* op1 = dimension, val.a = array descriptor */
3460 /* check for negative sizes and copy sizes to stack if necessary */
3462 MCODECHECK((iptr->op1 << 1) + 64);
3464 for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3465 var_to_reg_int(s2, src, REG_ITMP1);
3466 x86_64_testl_reg_reg(cd, s2, s2);
3467 x86_64_jcc(cd, X86_64_CC_L, 0);
3468 codegen_addxcheckarefs(cd, cd->mcodeptr);
3470 /* copy SAVEDVAR sizes to stack */
3472 if (src->varkind != ARGVAR) {
3473 x86_64_mov_reg_membase(cd, s2, REG_SP, s1 * 8);
3477 /* a0 = dimension count */
3478 x86_64_mov_imm_reg(cd, iptr->op1, rd->argintregs[0]);
3480 /* a1 = arraydescriptor */
3481 x86_64_mov_imm_reg(cd, (u8) iptr->val.a, rd->argintregs[1]);
3483 /* a2 = pointer to dimensions = stack pointer */
3484 x86_64_mov_reg_reg(cd, REG_SP, rd->argintregs[2]);
3486 x86_64_mov_imm_reg(cd, (u8) builtin_nmultianewarray, REG_ITMP1);
3487 x86_64_call_reg(cd, REG_ITMP1);
3489 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3490 M_INTMOVE(REG_RESULT, s1);
3491 store_reg_to_var_int(iptr->dst, s1);
3495 throw_cacao_exception_exit(string_java_lang_InternalError,
3496 "Unknown ICMD %d", iptr->opc);
3499 } /* for instruction */
3501 /* copy values to interface registers */
3503 src = bptr->outstack;
3504 len = bptr->outdepth;
3505 MCODECHECK(64 + len);
3511 if ((src->varkind != STACKVAR)) {
3513 if (IS_FLT_DBL_TYPE(s2)) {
3514 var_to_reg_flt(s1, src, REG_FTMP1);
3515 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3516 M_FLTMOVE(s1, rd->interfaces[len][s2].regoff);
3519 x86_64_movq_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3523 var_to_reg_int(s1, src, REG_ITMP1);
3524 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3525 M_INTMOVE(s1, rd->interfaces[len][s2].regoff);
3528 x86_64_mov_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3534 } /* if (bptr -> flags >= BBREACHED) */
3535 } /* for basic block */
3539 /* generate bound check stubs */
3541 u1 *xcodeptr = NULL;
3544 for (bref = cd->xboundrefs; bref != NULL; bref = bref->next) {
3545 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3547 cd->mcodeptr - cd->mcodebase);
3551 /* move index register into REG_ITMP1 */
3552 x86_64_mov_reg_reg(cd, bref->reg, REG_ITMP1); /* 3 bytes */
3554 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3555 dseg_adddata(cd, cd->mcodeptr);
3556 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3557 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3559 if (xcodeptr != NULL) {
3560 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3563 xcodeptr = cd->mcodeptr;
3565 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3566 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3568 x86_64_mov_reg_reg(cd, REG_ITMP1, rd->argintregs[0]);
3569 x86_64_mov_imm_reg(cd, (u8) new_arrayindexoutofboundsexception, REG_ITMP3);
3570 x86_64_call_reg(cd, REG_ITMP3);
3572 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3573 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3575 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3576 x86_64_jmp_reg(cd, REG_ITMP3);
3580 /* generate negative array size check stubs */
3584 for (bref = cd->xcheckarefs; bref != NULL; bref = bref->next) {
3585 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3586 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3588 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3592 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3594 cd->mcodeptr - cd->mcodebase);
3598 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3599 dseg_adddata(cd, cd->mcodeptr);
3600 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3601 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3603 if (xcodeptr != NULL) {
3604 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3607 xcodeptr = cd->mcodeptr;
3609 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3610 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3612 x86_64_mov_imm_reg(cd, (u8) new_negativearraysizeexception, REG_ITMP3);
3613 x86_64_call_reg(cd, REG_ITMP3);
3615 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3616 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3618 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3619 x86_64_jmp_reg(cd, REG_ITMP3);
3623 /* generate cast check stubs */
3627 for (bref = cd->xcastrefs; bref != NULL; bref = bref->next) {
3628 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3629 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3631 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3635 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3637 cd->mcodeptr - cd->mcodebase);
3641 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3642 dseg_adddata(cd, cd->mcodeptr);
3643 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3644 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3646 if (xcodeptr != NULL) {
3647 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3650 xcodeptr = cd->mcodeptr;
3652 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3653 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3655 x86_64_mov_imm_reg(cd, (u8) new_classcastexception, REG_ITMP3);
3656 x86_64_call_reg(cd, REG_ITMP3);
3658 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3659 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3661 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3662 x86_64_jmp_reg(cd, REG_ITMP3);
3666 /* generate divide by zero check stubs */
3670 for (bref = cd->xdivrefs; bref != NULL; bref = bref->next) {
3671 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3672 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3674 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3678 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3680 cd->mcodeptr - cd->mcodebase);
3684 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3685 dseg_adddata(cd, cd->mcodeptr);
3686 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3687 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3689 if (xcodeptr != NULL) {
3690 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3693 xcodeptr = cd->mcodeptr;
3695 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3696 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3698 x86_64_mov_imm_reg(cd, (u8) new_arithmeticexception, REG_ITMP3);
3699 x86_64_call_reg(cd, REG_ITMP3);
3701 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3702 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3704 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3705 x86_64_jmp_reg(cd, REG_ITMP3);
3709 /* generate exception check stubs */
3713 for (bref = cd->xexceptionrefs; bref != NULL; bref = bref->next) {
3714 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3715 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3717 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3721 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3723 cd->mcodeptr - cd->mcodebase);
3727 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3728 dseg_adddata(cd, cd->mcodeptr);
3729 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
3730 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3732 if (xcodeptr != NULL) {
3733 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3736 xcodeptr = cd->mcodeptr;
3738 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3739 x86_64_alu_imm_reg(cd, X86_64_SUB, 8, REG_SP);
3740 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0);
3741 x86_64_mov_imm_reg(cd, (u8) &builtin_get_exceptionptrptr, REG_ITMP1);
3742 x86_64_call_reg(cd, REG_ITMP1);
3743 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
3744 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
3745 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
3746 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC);
3747 x86_64_alu_imm_reg(cd, X86_64_ADD, 8, REG_SP);
3749 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
3750 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP1_XPTR);
3751 x86_64_mov_imm_membase(cd, 0, REG_ITMP3, 0);
3754 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3755 x86_64_jmp_reg(cd, REG_ITMP3);
3759 /* generate null pointer check stubs */
3763 for (bref = cd->xnullrefs; bref != NULL; bref = bref->next) {
3764 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3765 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3767 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3771 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3773 cd->mcodeptr - cd->mcodebase);
3777 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3778 dseg_adddata(cd, cd->mcodeptr);
3779 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
3780 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3782 if (xcodeptr != NULL) {
3783 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3786 xcodeptr = cd->mcodeptr;
3788 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3789 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3791 x86_64_mov_imm_reg(cd, (u8) new_nullpointerexception, REG_ITMP3);
3792 x86_64_call_reg(cd, REG_ITMP3);
3794 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3795 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3797 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3798 x86_64_jmp_reg(cd, REG_ITMP3);
3802 /* generate put/getstatic stub call code */
3810 tmpcd = DNEW(codegendata);
3812 for (cref = cd->clinitrefs; cref != NULL; cref = cref->next) {
3813 /* Get machine code which is patched back in later. A */
3814 /* `call rel32' is 5 bytes long. */
3815 xcodeptr = cd->mcodebase + cref->branchpos;
3817 mcode = *((u4 *) (xcodeptr + 1));
3821 /* patch in `call rel32' to call the following code */
3822 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
3823 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
3825 /* Push machine code bytes to patch onto the stack. */
3826 x86_64_push_imm(cd, (u1) xmcode);
3827 x86_64_push_imm(cd, (u4) mcode);
3829 x86_64_push_imm(cd, (u8) cref->class);
3831 x86_64_mov_imm_reg(cd, (u8) asm_check_clinit, REG_ITMP1);
3832 x86_64_jmp_reg(cd, REG_ITMP1);
3837 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
3841 /* function createcompilerstub *************************************************
3843 creates a stub routine which calls the compiler
3845 *******************************************************************************/
3847 #define COMPSTUBSIZE 23
3849 u1 *createcompilerstub(methodinfo *m)
3851 u1 *s = CNEW(u1, COMPSTUBSIZE); /* memory to hold the stub */
3855 /* mark start of dump memory area */
3857 dumpsize = dump_size();
3859 cd = DNEW(codegendata);
3862 /* code for the stub */
3863 x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP1); /* pass method to compiler */
3864 x86_64_mov_imm_reg(cd, (u8) asm_call_jit_compiler, REG_ITMP3);/* load address */
3865 x86_64_jmp_reg(cd, REG_ITMP3); /* jump to compiler */
3867 #if defined(STATISTICS)
3869 count_cstub_len += COMPSTUBSIZE;
3872 /* release dump area */
3874 dump_release(dumpsize);
3880 /* function removecompilerstub *************************************************
3882 deletes a compilerstub from memory (simply by freeing it)
3884 *******************************************************************************/
3886 void removecompilerstub(u1 *stub)
3888 CFREE(stub, COMPSTUBSIZE);
3892 /* function: createnativestub **************************************************
3894 creates a stub routine which calls a native method
3896 *******************************************************************************/
3898 /* #if defined(USE_THREADS) && defined(NATIVE_THREADS) */
3899 /* static java_objectheader **(*callgetexceptionptrptr)() = builtin_get_exceptionptrptr; */
3902 #define NATIVESTUBSIZE 700 /* keep this size high enough! */
3904 u1 *createnativestub(functionptr f, methodinfo *m)
3906 u1 *s; /* pointer to stub memory */
3909 t_inlining_globals *id;
3911 s4 stackframesize; /* size of stackframe if needed */
3913 s4 iargs; /* count of integer arguments */
3914 s4 fargs; /* count of float arguments */
3917 void **callAddrPatchPos=0;
3919 void **jmpInstrPatchPos=0;
3921 /* initialize variables */
3926 /* mark start of dump memory area */
3928 dumpsize = dump_size();
3930 cd = DNEW(codegendata);
3931 rd = DNEW(registerdata);
3932 id = DNEW(t_inlining_globals);
3934 /* setup registers before using it */
3936 inlining_setup(m, id);
3937 reg_setup(m, rd, id);
3939 /* set paramcount and paramtypes */
3941 descriptor2types(m);
3943 /* count integer and float arguments */
3945 tptr = m->paramtypes;
3946 for (i = 0; i < m->paramcount; i++) {
3947 IS_INT_LNG_TYPE(*tptr++) ? iargs++ : fargs++;
3950 s = CNEW(u1, NATIVESTUBSIZE); /* memory to hold the stub */
3952 /* set some required varibles which are normally set by codegen_setup */
3955 cd->clinitrefs = NULL;
3957 /* if function is static, check for initialized */
3959 if ((m->flags & ACC_STATIC) && !m->class->initialized) {
3960 codegen_addclinitref(cd, cd->mcodeptr, m->class);
3966 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
3968 /* save integer and float argument registers */
3970 for (i = 0; i < INT_ARG_CNT; i++) {
3971 x86_64_mov_reg_membase(cd, rd->argintregs[i], REG_SP, (1 + i) * 8);
3974 for (i = 0; i < FLT_ARG_CNT; i++) {
3975 x86_64_movq_reg_membase(cd, rd->argfltregs[i], REG_SP, (1 + INT_ARG_CNT + i) * 8);
3978 /* show integer hex code for float arguments */
3980 for (i = 0, l = 0; i < m->paramcount && i < INT_ARG_CNT; i++) {
3981 /* if the paramtype is a float, we have to right shift all */
3982 /* following integer registers */
3984 if (IS_FLT_DBL_TYPE(m->paramtypes[i])) {
3985 for (s1 = INT_ARG_CNT - 2; s1 >= i; s1--) {
3986 x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
3989 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[i]);
3994 x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP1);
3995 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, 0 * 8);
3996 x86_64_mov_imm_reg(cd, (u8) builtin_trace_args, REG_ITMP1);
3997 x86_64_call_reg(cd, REG_ITMP1);
3999 /* restore integer and float argument registers */
4001 for (i = 0; i < INT_ARG_CNT; i++) {
4002 x86_64_mov_membase_reg(cd, REG_SP, (1 + i) * 8, rd->argintregs[i]);
4005 for (i = 0; i < FLT_ARG_CNT; i++) {
4006 x86_64_movq_membase_reg(cd, REG_SP, (1 + INT_ARG_CNT + i) * 8, rd->argfltregs[i]);
4009 x86_64_alu_imm_reg(cd, X86_64_ADD, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4012 #if !defined(STATIC_CLASSPATH)
4013 /* call method to resolve native function if needed */
4015 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4017 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, 1 * 8);
4018 x86_64_mov_reg_membase(cd, rd->argintregs[1], REG_SP, 2 * 8);
4019 x86_64_mov_reg_membase(cd, rd->argintregs[2], REG_SP, 3 * 8);
4020 x86_64_mov_reg_membase(cd, rd->argintregs[3], REG_SP, 4 * 8);
4021 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 5 * 8);
4022 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 6 * 8);
4024 x86_64_movq_reg_membase(cd, rd->argfltregs[0], REG_SP, 7 * 8);
4025 x86_64_movq_reg_membase(cd, rd->argfltregs[1], REG_SP, 8 * 8);
4026 x86_64_movq_reg_membase(cd, rd->argfltregs[2], REG_SP, 9 * 8);
4027 x86_64_movq_reg_membase(cd, rd->argfltregs[3], REG_SP, 10 * 8);
4028 x86_64_movq_reg_membase(cd, rd->argfltregs[4], REG_SP, 11 * 8);
4029 x86_64_movq_reg_membase(cd, rd->argfltregs[5], REG_SP, 12 * 8);
4030 x86_64_movq_reg_membase(cd, rd->argfltregs[6], REG_SP, 13 * 8);
4031 x86_64_movq_reg_membase(cd, rd->argfltregs[7], REG_SP, 14 * 8);
4033 /* needed to patch a jump over this block */
4034 x86_64_jmp_imm(cd, 0);
4035 jmpInstrPos = cd->mcodeptr - 4;
4037 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
4039 x86_64_mov_imm_reg(cd, 0, rd->argintregs[1]);
4040 callAddrPatchPos = cd->mcodeptr - 8; /* at this position the place is specified where the native function adress should be patched into*/
4042 x86_64_mov_imm_reg(cd, 0, rd->argintregs[2]);
4043 jmpInstrPatchPos = cd->mcodeptr - 8;
4045 x86_64_mov_imm_reg(cd, jmpInstrPos, rd->argintregs[3]);
4047 x86_64_mov_imm_reg(cd, (u8) codegen_resolve_native, REG_ITMP1);
4048 x86_64_call_reg(cd, REG_ITMP1);
4050 *(jmpInstrPatchPos) = cd->mcodeptr - jmpInstrPos - 1; /*=opcode jmp_imm size*/
4052 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, rd->argintregs[0]);
4053 x86_64_mov_membase_reg(cd, REG_SP, 2 * 8, rd->argintregs[1]);
4054 x86_64_mov_membase_reg(cd, REG_SP, 3 * 8, rd->argintregs[2]);
4055 x86_64_mov_membase_reg(cd, REG_SP, 4 * 8, rd->argintregs[3]);
4056 x86_64_mov_membase_reg(cd, REG_SP, 5 * 8, rd->argintregs[4]);
4057 x86_64_mov_membase_reg(cd, REG_SP, 6 * 8, rd->argintregs[5]);
4059 x86_64_movq_membase_reg(cd, REG_SP, 7 * 8, rd->argfltregs[0]);
4060 x86_64_movq_membase_reg(cd, REG_SP, 8 * 8, rd->argfltregs[1]);
4061 x86_64_movq_membase_reg(cd, REG_SP, 9 * 8, rd->argfltregs[2]);
4062 x86_64_movq_membase_reg(cd, REG_SP, 10 * 8, rd->argfltregs[3]);
4063 x86_64_movq_membase_reg(cd, REG_SP, 11 * 8, rd->argfltregs[4]);
4064 x86_64_movq_membase_reg(cd, REG_SP, 12 * 8, rd->argfltregs[5]);
4065 x86_64_movq_membase_reg(cd, REG_SP, 13 * 8, rd->argfltregs[6]);
4066 x86_64_movq_membase_reg(cd, REG_SP, 14 * 8, rd->argfltregs[7]);
4068 x86_64_alu_imm_reg(cd, X86_64_ADD, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4072 /* save argument registers on stack -- if we have to */
4074 if ((((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) || iargs > (INT_ARG_CNT - 1)) ||
4075 (fargs > FLT_ARG_CNT)) {
4082 /* do we need to shift integer argument register onto stack? */
4084 if ((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) {
4085 /* do we need to shift 2 arguments? */
4086 if (iargs > (INT_ARG_CNT - 1)) {
4093 } else if (iargs > (INT_ARG_CNT - 1)) {
4097 /* calculate required stack space */
4099 stackparamcnt += (iargs > INT_ARG_CNT) ? iargs - INT_ARG_CNT : 0;
4100 stackparamcnt += (fargs > FLT_ARG_CNT) ? fargs - FLT_ARG_CNT : 0;
4102 stackframesize = stackparamcnt + paramshiftcnt;
4104 /* keep stack 16-byte aligned */
4105 if (!(stackframesize & 0x1))
4108 x86_64_alu_imm_reg(cd, X86_64_SUB, stackframesize * 8, REG_SP);
4110 /* shift integer arguments if required */
4112 if ((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) {
4113 /* do we need to shift 2 arguments? */
4114 if (iargs > (INT_ARG_CNT - 1))
4115 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 1 * 8);
4117 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 0 * 8);
4119 } else if (iargs > (INT_ARG_CNT - 1)) {
4120 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 0 * 8);
4123 /* copy stack arguments into new stack frame -- if any */
4124 for (i = 0; i < stackparamcnt; i++) {
4125 x86_64_mov_membase_reg(cd, REG_SP, (stackframesize + 1 + i) * 8, REG_ITMP1);
4126 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, (paramshiftcnt + i) * 8);
4130 /* keep stack 16-byte aligned */
4131 x86_64_alu_imm_reg(cd, X86_64_SUB, 1 * 8, REG_SP);
4135 /* shift integer arguments for `env' and `class' arguments */
4137 if (m->flags & ACC_STATIC) {
4138 /* shift iargs count if less than INT_ARG_CNT, or all */
4139 for (i = (iargs < (INT_ARG_CNT - 2)) ? iargs : (INT_ARG_CNT - 2); i >= 0; i--) {
4140 x86_64_mov_reg_reg(cd, rd->argintregs[i], rd->argintregs[i + 2]);
4143 /* put class into second argument register */
4144 x86_64_mov_imm_reg(cd, (u8) m->class, rd->argintregs[1]);
4147 /* shift iargs count if less than INT_ARG_CNT, or all */
4148 for (i = (iargs < (INT_ARG_CNT - 1)) ? iargs : (INT_ARG_CNT - 1); i >= 0; i--) {
4149 x86_64_mov_reg_reg(cd, rd->argintregs[i], rd->argintregs[i + 1]);
4153 /* put env into first argument register */
4154 x86_64_mov_imm_reg(cd, (u8) &env, rd->argintregs[0]);
4156 /* do the native function call */
4157 x86_64_mov_imm_reg(cd, (u8) f, REG_ITMP1);
4158 #if !defined(STATIC_CLASSPATH)
4160 (*callAddrPatchPos) = cd->mcodeptr - 8;
4162 x86_64_call_reg(cd, REG_ITMP1);
4164 /* remove stackframe if there is one */
4165 if (stackframesize) {
4166 x86_64_alu_imm_reg(cd, X86_64_ADD, stackframesize * 8, REG_SP);
4170 x86_64_alu_imm_reg(cd, X86_64_SUB, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
4172 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
4173 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
4175 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
4176 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
4177 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
4178 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
4180 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
4181 x86_64_call_reg(cd, REG_ITMP1);
4183 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
4184 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
4186 x86_64_alu_imm_reg(cd, X86_64_ADD, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
4189 /* check for exception */
4191 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4192 x86_64_push_reg(cd, REG_RESULT);
4193 /* x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
4194 x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
4195 x86_64_call_reg(cd, REG_ITMP3);
4196 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
4197 x86_64_pop_reg(cd, REG_RESULT);
4199 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
4200 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP3);
4202 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
4203 x86_64_jcc(cd, X86_64_CC_NE, 1);
4207 /* handle exception */
4209 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4210 x86_64_push_reg(cd, REG_ITMP3);
4211 /* x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
4212 x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
4213 x86_64_call_reg(cd, REG_ITMP3);
4214 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
4215 x86_64_pop_reg(cd, REG_ITMP1_XPTR);
4217 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
4218 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
4219 x86_64_alu_reg_reg(cd, X86_64_XOR, REG_ITMP2, REG_ITMP2);
4220 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_ITMP3, 0); /* clear exception pointer */
4223 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC); /* get return address from stack */
4224 x86_64_alu_imm_reg(cd, X86_64_SUB, 3, REG_ITMP2_XPC); /* callq */
4226 x86_64_mov_imm_reg(cd, (u8) asm_handle_nat_exception, REG_ITMP3);
4227 x86_64_jmp_reg(cd, REG_ITMP3);
4230 /* patch in a clinit call if required *************************************/
4239 tmpcd = DNEW(codegendata);
4241 /* there can only be one clinit ref entry */
4242 cref = cd->clinitrefs;
4245 /* Get machine code which is patched back in later. A */
4246 /* `call rel32' is 5 bytes long. */
4247 xcodeptr = cd->mcodebase + cref->branchpos;
4249 mcode = *((u4 *) (xcodeptr + 1));
4251 /* patch in `call rel32' to call the following code */
4252 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
4253 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4255 /* Push machine code bytes to patch onto the stack. */
4256 x86_64_push_imm(cd, (u1) xmcode);
4257 x86_64_push_imm(cd, (u4) mcode);
4259 x86_64_push_imm(cd, (u8) cref->class);
4261 x86_64_mov_imm_reg(cd, (u8) asm_check_clinit, REG_ITMP1);
4262 x86_64_jmp_reg(cd, REG_ITMP1);
4266 /* Check if the stub size is big enough to hold the whole stub generated. */
4267 /* If not, this can lead into unpredictable crashes, because of heap */
4269 if ((s4) (cd->mcodeptr - s) > NATIVESTUBSIZE) {
4270 throw_cacao_exception_exit(string_java_lang_InternalError,
4271 "Native stub size %d is to small for current stub size %d",
4272 NATIVESTUBSIZE, (s4) (cd->mcodeptr - s));
4275 #if defined(STATISTICS)
4277 count_nstub_len += NATIVESTUBSIZE;
4280 /* release dump area */
4282 dump_release(dumpsize);
4288 /* function: removenativestub **************************************************
4290 removes a previously created native-stub from memory
4292 *******************************************************************************/
4294 void removenativestub(u1 *stub)
4296 CFREE(stub, NATIVESTUBSIZE);
4301 * These are local overrides for various environment variables in Emacs.
4302 * Please do not remove this and leave it at the end of the file, where
4303 * Emacs will automagically detect them.
4304 * ---------------------------------------------------------------------
4307 * indent-tabs-mode: t