1 /* vm/jit/x86_64/codegen.c - machine code generator for x86_64
3 Copyright (C) 1996-2005 R. Grafl, A. Krall, C. Kruegel, C. Oates,
4 R. Obermaisser, M. Platter, M. Probst, S. Ring, E. Steiner,
5 C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich, J. Wenninger,
6 Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
25 Contact: cacao@complang.tuwien.ac.at
27 Authors: Andreas Krall
30 $Id: codegen.c 1988 2005-03-05 15:55:51Z twisti $
40 #include "cacao/cacao.h"
41 #include "native/native.h"
42 #include "vm/global.h"
43 #include "vm/builtin.h"
44 #include "vm/loader.h"
45 #include "vm/tables.h"
46 #include "vm/jit/asmpart.h"
47 #include "vm/jit/jit.h"
48 #include "vm/jit/reg.h"
49 #include "vm/jit/parse.h"
50 #include "vm/jit/x86_64/arch.h"
51 #include "vm/jit/x86_64/codegen.h"
52 #include "vm/jit/x86_64/emitfuncs.h"
53 #include "vm/jit/x86_64/types.h"
54 #include "vm/jit/x86_64/asmoffsets.h"
57 /* register descripton - array ************************************************/
59 /* #define REG_RES 0 reserved register for OS or code generator */
60 /* #define REG_RET 1 return value register */
61 /* #define REG_EXC 2 exception value register (only old jit) */
62 /* #define REG_SAV 3 (callee) saved register */
63 /* #define REG_TMP 4 scratch temporary register (caller saved) */
64 /* #define REG_ARG 5 argument register (caller saved) */
66 /* #define REG_END -1 last entry in tables */
68 static int nregdescint[] = {
69 REG_RET, REG_ARG, REG_ARG, REG_TMP, REG_RES, REG_SAV, REG_ARG, REG_ARG,
70 REG_ARG, REG_ARG, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV,
75 static int nregdescfloat[] = {
76 REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG,
77 REG_RES, REG_RES, REG_RES, REG_TMP, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
82 /* Include independent code generation stuff -- include after register */
83 /* descriptions to avoid extern definitions. */
85 #include "vm/jit/codegen.inc"
86 #include "vm/jit/reg.inc"
88 #include "vm/jit/lsra.inc"
92 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
93 void thread_restartcriticalsection(ucontext_t *uc)
97 critical = thread_checkcritical((void *) uc->uc_mcontext.gregs[REG_RIP]);
100 uc->uc_mcontext.gregs[REG_RIP] = (u8) critical;
105 /* NullPointerException signal handler for hardware null pointer check */
107 void catch_NullPointerException(int sig, siginfo_t *siginfo, void *_p)
111 struct ucontext *_uc = (struct ucontext *) _p;
112 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
113 struct sigaction act;
114 java_objectheader *xptr;
116 /* Reset signal handler - necessary for SysV, does no harm for BSD */
118 act.sa_sigaction = catch_NullPointerException; /* reinstall handler */
119 act.sa_flags = SA_SIGINFO;
120 sigaction(sig, &act, NULL);
123 sigaddset(&nsig, sig);
124 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
126 xptr = new_nullpointerexception();
128 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
129 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
130 sigctx->rip = (u8) asm_handle_exception;
136 /* ArithmeticException signal handler for hardware divide by zero check */
138 void catch_ArithmeticException(int sig, siginfo_t *siginfo, void *_p)
142 struct ucontext *_uc = (struct ucontext *) _p;
143 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
144 struct sigaction act;
145 java_objectheader *xptr;
147 /* Reset signal handler - necessary for SysV, does no harm for BSD */
149 act.sa_sigaction = catch_ArithmeticException; /* reinstall handler */
150 act.sa_flags = SA_SIGINFO;
151 sigaction(sig, &act, NULL);
154 sigaddset(&nsig, sig);
155 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
157 xptr = new_arithmeticexception();
159 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
160 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
161 sigctx->rip = (u8) asm_handle_exception;
167 void init_exceptions(void)
169 struct sigaction act;
171 /* install signal handlers we need to convert to exceptions */
172 sigemptyset(&act.sa_mask);
176 act.sa_sigaction = catch_NullPointerException;
177 act.sa_flags = SA_SIGINFO;
178 sigaction(SIGSEGV, &act, NULL);
182 act.sa_sigaction = catch_NullPointerException;
183 act.sa_flags = SA_SIGINFO;
184 sigaction(SIGBUS, &act, NULL);
188 act.sa_sigaction = catch_ArithmeticException;
189 act.sa_flags = SA_SIGINFO;
190 sigaction(SIGFPE, &act, NULL);
194 /* function gen_mcode **********************************************************
196 generates machine code
198 *******************************************************************************/
200 void codegen(methodinfo *m, codegendata *cd, registerdata *rd)
202 s4 len, s1, s2, s3, d;
217 /* space to save used callee saved registers */
219 savedregs_num += (rd->savintregcnt - rd->maxsavintreguse);
220 savedregs_num += (rd->savfltregcnt - rd->maxsavfltreguse);
222 parentargs_base = rd->maxmemuse + savedregs_num;
224 #if defined(USE_THREADS) /* space to save argument of monitor_enter */
226 if (checksync && (m->flags & ACC_SYNCHRONIZED))
231 /* keep stack 16-byte aligned for calls into native code e.g. libc or jni */
232 /* (alignment problems with movaps) */
234 if (!(parentargs_base & 0x1)) {
238 /* create method header */
240 (void) dseg_addaddress(cd, m); /* MethodPointer */
241 (void) dseg_adds4(cd, parentargs_base * 8); /* FrameSize */
243 #if defined(USE_THREADS)
245 /* IsSync contains the offset relative to the stack pointer for the
246 argument of monitor_exit used in the exception handler. Since the
247 offset could be zero and give a wrong meaning of the flag it is
251 if (checksync && (m->flags & ACC_SYNCHRONIZED))
252 (void) dseg_adds4(cd, (rd->maxmemuse + 1) * 8); /* IsSync */
257 (void) dseg_adds4(cd, 0); /* IsSync */
259 (void) dseg_adds4(cd, m->isleafmethod); /* IsLeaf */
260 (void) dseg_adds4(cd, rd->savintregcnt - rd->maxsavintreguse);/* IntSave */
261 (void) dseg_adds4(cd, rd->savfltregcnt - rd->maxsavfltreguse);/* FltSave */
262 (void) dseg_adds4(cd, cd->exceptiontablelength); /* ExTableSize */
264 /* create exception table */
266 for (ex = cd->exceptiontable; ex != NULL; ex = ex->down) {
267 dseg_addtarget(cd, ex->start);
268 dseg_addtarget(cd, ex->end);
269 dseg_addtarget(cd, ex->handler);
270 (void) dseg_addaddress(cd, ex->catchtype);
273 /* initialize mcode variables */
275 cd->mcodeptr = (u1 *) cd->mcodebase;
276 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
277 MCODECHECK(128 + m->paramcount);
279 /* create stack frame (if necessary) */
281 if (parentargs_base) {
282 x86_64_alu_imm_reg(cd, X86_64_SUB, parentargs_base * 8, REG_SP);
285 /* save return address and used callee saved registers */
288 for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
289 p--; x86_64_mov_reg_membase(cd, rd->savintregs[i], REG_SP, p * 8);
291 for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
292 p--; x86_64_movq_reg_membase(cd, rd->savfltregs[i], REG_SP, p * 8);
295 /* take arguments out of register or stack frame */
297 for (p = 0, l = 0, s1 = 0, s2 = 0; p < m->paramcount; p++) {
298 t = m->paramtypes[p];
299 var = &(rd->locals[l][t]);
301 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
304 if (IS_INT_LNG_TYPE(t)) {
311 if (IS_INT_LNG_TYPE(t)) { /* integer args */
312 if (s1 < INT_ARG_CNT) { /* register arguments */
313 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
314 M_INTMOVE(rd->argintregs[s1], var->regoff);
316 } else { /* reg arg -> spilled */
317 x86_64_mov_reg_membase(cd, rd->argintregs[s1], REG_SP, var->regoff * 8);
320 } else { /* stack arguments */
321 pa = s1 - INT_ARG_CNT;
322 if (s2 >= FLT_ARG_CNT) {
323 pa += s2 - FLT_ARG_CNT;
325 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
326 x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff); /* + 8 for return address */
327 } else { /* stack arg -> spilled */
328 x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_ITMP1); /* + 8 for return address */
329 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, var->regoff * 8);
334 } else { /* floating args */
335 if (s2 < FLT_ARG_CNT) { /* register arguments */
336 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
337 M_FLTMOVE(rd->argfltregs[s2], var->regoff);
339 } else { /* reg arg -> spilled */
340 x86_64_movq_reg_membase(cd, rd->argfltregs[s2], REG_SP, var->regoff * 8);
343 } else { /* stack arguments */
344 pa = s2 - FLT_ARG_CNT;
345 if (s1 >= INT_ARG_CNT) {
346 pa += s1 - INT_ARG_CNT;
348 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
349 x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff);
352 x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_FTMP1);
353 x86_64_movq_reg_membase(cd, REG_FTMP1, REG_SP, var->regoff * 8);
360 /* save monitorenter argument */
362 #if defined(USE_THREADS)
363 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
366 if (m->flags & ACC_STATIC) {
367 func_enter = (u8) builtin_staticmonitorenter;
368 x86_64_mov_imm_reg(cd, (s8) m->class, REG_ITMP1);
369 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, rd->maxmemuse * 8);
372 func_enter = (u8) builtin_monitorenter;
373 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, rd->maxmemuse * 8);
376 /* call monitorenter function */
378 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
379 x86_64_mov_imm_reg(cd, func_enter, REG_ITMP1);
380 x86_64_call_reg(cd, REG_ITMP1);
384 /* Copy argument registers to stack and call trace function with pointer */
385 /* to arguments on stack. */
388 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1 + 1) * 8, REG_SP);
390 /* save integer argument registers */
392 for (p = 0; p < INT_ARG_CNT; p++) {
393 x86_64_mov_reg_membase(cd, rd->argintregs[p], REG_SP, (1 + p) * 8);
396 /* save float argument registers */
398 for (p = 0; p < FLT_ARG_CNT; p++) {
399 x86_64_movq_reg_membase(cd, rd->argfltregs[p], REG_SP, (1 + INT_ARG_CNT + p) * 8);
402 /* show integer hex code for float arguments */
404 for (p = 0, l = 0; p < m->paramcount && p < INT_ARG_CNT; p++) {
405 t = m->paramtypes[p];
407 /* if the paramtype is a float, we have to right shift all */
408 /* following integer registers */
410 if (IS_FLT_DBL_TYPE(t)) {
411 for (s1 = INT_ARG_CNT - 2; s1 >= p; s1--) {
412 x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
415 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[p]);
420 x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP2);
421 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_SP, 0 * 8);
422 x86_64_mov_imm_reg(cd, (u8) builtin_trace_args, REG_ITMP1);
423 x86_64_call_reg(cd, REG_ITMP1);
425 /* restore integer argument registers */
427 for (p = 0; p < INT_ARG_CNT; p++) {
428 x86_64_mov_membase_reg(cd, REG_SP, (1 + p) * 8, rd->argintregs[p]);
431 /* restore float argument registers */
433 for (p = 0; p < FLT_ARG_CNT; p++) {
434 x86_64_movq_membase_reg(cd, REG_SP, (1 + INT_ARG_CNT + p) * 8, rd->argfltregs[p]);
437 x86_64_alu_imm_reg(cd, X86_64_ADD, (6 + 8 + 1 + 1) * 8, REG_SP);
442 /* end of header generation */
444 /* walk through all basic blocks */
445 for (bptr = m->basicblocks; bptr != NULL; bptr = bptr->next) {
447 bptr->mpc = (u4) ((u1 *) cd->mcodeptr - cd->mcodebase);
449 if (bptr->flags >= BBREACHED) {
451 /* branch resolving */
454 for (bref = bptr->branchrefs; bref != NULL; bref = bref->next) {
455 gen_resolvebranch((u1 *) cd->mcodebase + bref->branchpos,
460 /* copy interface registers to their destination */
464 MCODECHECK(64 + len);
465 while (src != NULL) {
467 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
468 if (bptr->type == BBTYPE_SBR) {
469 d = reg_of_var(rd, src, REG_ITMP1);
470 x86_64_pop_reg(cd, d);
471 store_reg_to_var_int(src, d);
473 } else if (bptr->type == BBTYPE_EXH) {
474 d = reg_of_var(rd, src, REG_ITMP1);
475 M_INTMOVE(REG_ITMP1, d);
476 store_reg_to_var_int(src, d);
480 d = reg_of_var(rd, src, REG_ITMP1);
481 if ((src->varkind != STACKVAR)) {
483 if (IS_FLT_DBL_TYPE(s2)) {
484 s1 = rd->interfaces[len][s2].regoff;
485 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
489 x86_64_movq_membase_reg(cd, REG_SP, s1 * 8, d);
491 store_reg_to_var_flt(src, d);
494 s1 = rd->interfaces[len][s2].regoff;
495 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
499 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, d);
501 store_reg_to_var_int(src, d);
508 /* walk through all instructions */
512 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
514 MCODECHECK(64); /* an instruction usually needs < 64 words */
516 case ICMD_INLINE_START: /* internal ICMDs */
517 case ICMD_INLINE_END:
520 case ICMD_NOP: /* ... ==> ... */
523 case ICMD_NULLCHECKPOP: /* ..., objectref ==> ... */
524 if (src->flags & INMEMORY) {
525 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
528 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
530 x86_64_jcc(cd, X86_64_CC_E, 0);
531 codegen_addxnullrefs(cd, cd->mcodeptr);
534 /* constant operations ************************************************/
536 case ICMD_ICONST: /* ... ==> ..., constant */
537 /* op1 = 0, val.i = constant */
539 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
540 if (iptr->val.i == 0) {
541 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
543 x86_64_movl_imm_reg(cd, iptr->val.i, d);
545 store_reg_to_var_int(iptr->dst, d);
548 case ICMD_ACONST: /* ... ==> ..., constant */
549 /* op1 = 0, val.a = constant */
551 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
552 if (iptr->val.a == 0) {
553 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
555 x86_64_mov_imm_reg(cd, (s8) iptr->val.a, d);
557 store_reg_to_var_int(iptr->dst, d);
560 case ICMD_LCONST: /* ... ==> ..., constant */
561 /* op1 = 0, val.l = constant */
563 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
564 if (iptr->val.l == 0) {
565 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
567 x86_64_mov_imm_reg(cd, iptr->val.l, d);
569 store_reg_to_var_int(iptr->dst, d);
572 case ICMD_FCONST: /* ... ==> ..., constant */
573 /* op1 = 0, val.f = constant */
575 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
576 a = dseg_addfloat(cd, iptr->val.f);
577 x86_64_movdl_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + ((d > 7) ? 9 : 8)) - (s8) cd->mcodebase) + a, d);
578 store_reg_to_var_flt(iptr->dst, d);
581 case ICMD_DCONST: /* ... ==> ..., constant */
582 /* op1 = 0, val.d = constant */
584 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
585 a = dseg_adddouble(cd, iptr->val.d);
586 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, d);
587 store_reg_to_var_flt(iptr->dst, d);
591 /* load/store operations **********************************************/
593 case ICMD_ILOAD: /* ... ==> ..., content of local variable */
594 /* op1 = local variable */
596 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
597 if ((iptr->dst->varkind == LOCALVAR) &&
598 (iptr->dst->varnum == iptr->op1)) {
601 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
602 if (var->flags & INMEMORY) {
603 x86_64_movl_membase_reg(cd, REG_SP, var->regoff * 8, d);
604 store_reg_to_var_int(iptr->dst, d);
607 if (iptr->dst->flags & INMEMORY) {
608 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
611 M_INTMOVE(var->regoff, d);
616 case ICMD_LLOAD: /* ... ==> ..., content of local variable */
617 case ICMD_ALOAD: /* op1 = local variable */
619 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
620 if ((iptr->dst->varkind == LOCALVAR) &&
621 (iptr->dst->varnum == iptr->op1)) {
624 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
625 if (var->flags & INMEMORY) {
626 x86_64_mov_membase_reg(cd, REG_SP, var->regoff * 8, d);
627 store_reg_to_var_int(iptr->dst, d);
630 if (iptr->dst->flags & INMEMORY) {
631 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
634 M_INTMOVE(var->regoff, d);
639 case ICMD_FLOAD: /* ... ==> ..., content of local variable */
640 case ICMD_DLOAD: /* op1 = local variable */
642 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
643 if ((iptr->dst->varkind == LOCALVAR) &&
644 (iptr->dst->varnum == iptr->op1)) {
647 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
648 if (var->flags & INMEMORY) {
649 x86_64_movq_membase_reg(cd, REG_SP, var->regoff * 8, d);
650 store_reg_to_var_flt(iptr->dst, d);
653 if (iptr->dst->flags & INMEMORY) {
654 x86_64_movq_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
657 M_FLTMOVE(var->regoff, d);
662 case ICMD_ISTORE: /* ..., value ==> ... */
663 case ICMD_LSTORE: /* op1 = local variable */
666 if ((src->varkind == LOCALVAR) &&
667 (src->varnum == iptr->op1)) {
670 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
671 if (var->flags & INMEMORY) {
672 var_to_reg_int(s1, src, REG_ITMP1);
673 x86_64_mov_reg_membase(cd, s1, REG_SP, var->regoff * 8);
676 var_to_reg_int(s1, src, var->regoff);
677 M_INTMOVE(s1, var->regoff);
681 case ICMD_FSTORE: /* ..., value ==> ... */
682 case ICMD_DSTORE: /* op1 = local variable */
684 if ((src->varkind == LOCALVAR) &&
685 (src->varnum == iptr->op1)) {
688 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
689 if (var->flags & INMEMORY) {
690 var_to_reg_flt(s1, src, REG_FTMP1);
691 x86_64_movq_reg_membase(cd, s1, REG_SP, var->regoff * 8);
694 var_to_reg_flt(s1, src, var->regoff);
695 M_FLTMOVE(s1, var->regoff);
700 /* pop/dup/swap operations ********************************************/
702 /* attention: double and longs are only one entry in CACAO ICMDs */
704 case ICMD_POP: /* ..., value ==> ... */
705 case ICMD_POP2: /* ..., value, value ==> ... */
708 case ICMD_DUP: /* ..., a ==> ..., a, a */
709 M_COPY(src, iptr->dst);
712 case ICMD_DUP_X1: /* ..., a, b ==> ..., b, a, b */
714 M_COPY(src, iptr->dst);
715 M_COPY(src->prev, iptr->dst->prev);
716 M_COPY(iptr->dst, iptr->dst->prev->prev);
719 case ICMD_DUP_X2: /* ..., a, b, c ==> ..., c, a, b, c */
721 M_COPY(src, iptr->dst);
722 M_COPY(src->prev, iptr->dst->prev);
723 M_COPY(src->prev->prev, iptr->dst->prev->prev);
724 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
727 case ICMD_DUP2: /* ..., a, b ==> ..., a, b, a, b */
729 M_COPY(src, iptr->dst);
730 M_COPY(src->prev, iptr->dst->prev);
733 case ICMD_DUP2_X1: /* ..., a, b, c ==> ..., b, c, a, b, c */
735 M_COPY(src, iptr->dst);
736 M_COPY(src->prev, iptr->dst->prev);
737 M_COPY(src->prev->prev, iptr->dst->prev->prev);
738 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
739 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev);
742 case ICMD_DUP2_X2: /* ..., a, b, c, d ==> ..., c, d, a, b, c, d */
744 M_COPY(src, iptr->dst);
745 M_COPY(src->prev, iptr->dst->prev);
746 M_COPY(src->prev->prev, iptr->dst->prev->prev);
747 M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
748 M_COPY(iptr->dst, iptr->dst->prev->prev->prev->prev);
749 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev->prev);
752 case ICMD_SWAP: /* ..., a, b ==> ..., b, a */
754 M_COPY(src, iptr->dst->prev);
755 M_COPY(src->prev, iptr->dst);
759 /* integer operations *************************************************/
761 case ICMD_INEG: /* ..., value ==> ..., - value */
763 d = reg_of_var(rd, iptr->dst, REG_NULL);
764 if (iptr->dst->flags & INMEMORY) {
765 if (src->flags & INMEMORY) {
766 if (src->regoff == iptr->dst->regoff) {
767 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
770 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
771 x86_64_negl_reg(cd, REG_ITMP1);
772 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
776 x86_64_movl_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
777 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
781 if (src->flags & INMEMORY) {
782 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
783 x86_64_negl_reg(cd, d);
786 M_INTMOVE(src->regoff, iptr->dst->regoff);
787 x86_64_negl_reg(cd, iptr->dst->regoff);
792 case ICMD_LNEG: /* ..., value ==> ..., - value */
794 d = reg_of_var(rd, iptr->dst, REG_NULL);
795 if (iptr->dst->flags & INMEMORY) {
796 if (src->flags & INMEMORY) {
797 if (src->regoff == iptr->dst->regoff) {
798 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
801 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
802 x86_64_neg_reg(cd, REG_ITMP1);
803 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
807 x86_64_mov_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
808 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
812 if (src->flags & INMEMORY) {
813 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
814 x86_64_neg_reg(cd, iptr->dst->regoff);
817 M_INTMOVE(src->regoff, iptr->dst->regoff);
818 x86_64_neg_reg(cd, iptr->dst->regoff);
823 case ICMD_I2L: /* ..., value ==> ..., value */
825 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
826 if (src->flags & INMEMORY) {
827 x86_64_movslq_membase_reg(cd, REG_SP, src->regoff * 8, d);
830 x86_64_movslq_reg_reg(cd, src->regoff, d);
832 store_reg_to_var_int(iptr->dst, d);
835 case ICMD_L2I: /* ..., value ==> ..., value */
837 var_to_reg_int(s1, src, REG_ITMP1);
838 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
840 store_reg_to_var_int(iptr->dst, d);
843 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
845 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
846 if (src->flags & INMEMORY) {
847 x86_64_movsbq_membase_reg(cd, REG_SP, src->regoff * 8, d);
850 x86_64_movsbq_reg_reg(cd, src->regoff, d);
852 store_reg_to_var_int(iptr->dst, d);
855 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
857 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
858 if (src->flags & INMEMORY) {
859 x86_64_movzwq_membase_reg(cd, REG_SP, src->regoff * 8, d);
862 x86_64_movzwq_reg_reg(cd, src->regoff, d);
864 store_reg_to_var_int(iptr->dst, d);
867 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
869 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
870 if (src->flags & INMEMORY) {
871 x86_64_movswq_membase_reg(cd, REG_SP, src->regoff * 8, d);
874 x86_64_movswq_reg_reg(cd, src->regoff, d);
876 store_reg_to_var_int(iptr->dst, d);
880 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
882 d = reg_of_var(rd, iptr->dst, REG_NULL);
883 x86_64_emit_ialu(cd, X86_64_ADD, src, iptr);
886 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
887 /* val.i = constant */
889 d = reg_of_var(rd, iptr->dst, REG_NULL);
890 x86_64_emit_ialuconst(cd, X86_64_ADD, src, iptr);
893 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
895 d = reg_of_var(rd, iptr->dst, REG_NULL);
896 x86_64_emit_lalu(cd, X86_64_ADD, src, iptr);
899 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
900 /* val.l = constant */
902 d = reg_of_var(rd, iptr->dst, REG_NULL);
903 x86_64_emit_laluconst(cd, X86_64_ADD, src, iptr);
906 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
908 d = reg_of_var(rd, iptr->dst, REG_NULL);
909 if (iptr->dst->flags & INMEMORY) {
910 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
911 if (src->prev->regoff == iptr->dst->regoff) {
912 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
913 x86_64_alul_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
916 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
917 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
918 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
921 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
922 M_INTMOVE(src->prev->regoff, REG_ITMP1);
923 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
924 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
926 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
927 if (src->prev->regoff == iptr->dst->regoff) {
928 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
931 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
932 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
933 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
937 x86_64_movl_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
938 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
942 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
943 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
944 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
946 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
947 M_INTMOVE(src->prev->regoff, d);
948 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
950 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
951 /* workaround for reg alloc */
952 if (src->regoff == iptr->dst->regoff) {
953 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
954 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
955 M_INTMOVE(REG_ITMP1, d);
958 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
959 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
963 /* workaround for reg alloc */
964 if (src->regoff == iptr->dst->regoff) {
965 M_INTMOVE(src->prev->regoff, REG_ITMP1);
966 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
967 M_INTMOVE(REG_ITMP1, d);
970 M_INTMOVE(src->prev->regoff, d);
971 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
977 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
978 /* val.i = constant */
980 d = reg_of_var(rd, iptr->dst, REG_NULL);
981 x86_64_emit_ialuconst(cd, X86_64_SUB, src, iptr);
984 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
986 d = reg_of_var(rd, iptr->dst, REG_NULL);
987 if (iptr->dst->flags & INMEMORY) {
988 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
989 if (src->prev->regoff == iptr->dst->regoff) {
990 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
991 x86_64_alu_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
994 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
995 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
996 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
999 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1000 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1001 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1002 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1004 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1005 if (src->prev->regoff == iptr->dst->regoff) {
1006 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1009 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1010 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1011 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1015 x86_64_mov_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1016 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1020 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1021 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1022 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1024 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1025 M_INTMOVE(src->prev->regoff, d);
1026 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1028 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1029 /* workaround for reg alloc */
1030 if (src->regoff == iptr->dst->regoff) {
1031 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1032 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1033 M_INTMOVE(REG_ITMP1, d);
1036 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1037 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1041 /* workaround for reg alloc */
1042 if (src->regoff == iptr->dst->regoff) {
1043 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1044 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1045 M_INTMOVE(REG_ITMP1, d);
1048 M_INTMOVE(src->prev->regoff, d);
1049 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1055 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
1056 /* val.l = constant */
1058 d = reg_of_var(rd, iptr->dst, REG_NULL);
1059 x86_64_emit_laluconst(cd, X86_64_SUB, src, iptr);
1062 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1064 d = reg_of_var(rd, iptr->dst, REG_NULL);
1065 if (iptr->dst->flags & INMEMORY) {
1066 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1067 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1068 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1069 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1071 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1072 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1073 x86_64_imull_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1074 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1076 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1077 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1078 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1079 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1082 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1083 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1084 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1088 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1089 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1090 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1092 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1093 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1094 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1096 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1097 M_INTMOVE(src->regoff, iptr->dst->regoff);
1098 x86_64_imull_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1101 if (src->regoff == iptr->dst->regoff) {
1102 x86_64_imull_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1105 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1106 x86_64_imull_reg_reg(cd, src->regoff, iptr->dst->regoff);
1112 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
1113 /* val.i = constant */
1115 d = reg_of_var(rd, iptr->dst, REG_NULL);
1116 if (iptr->dst->flags & INMEMORY) {
1117 if (src->flags & INMEMORY) {
1118 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1119 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1122 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, REG_ITMP1);
1123 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1127 if (src->flags & INMEMORY) {
1128 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1131 if (iptr->val.i == 2) {
1132 M_INTMOVE(src->regoff, iptr->dst->regoff);
1133 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1136 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, iptr->dst->regoff); /* 3 cycles */
1142 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1144 d = reg_of_var(rd, iptr->dst, REG_NULL);
1145 if (iptr->dst->flags & INMEMORY) {
1146 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1147 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1148 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1149 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1151 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1152 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1153 x86_64_imul_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1154 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1156 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1157 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1158 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1159 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1162 x86_64_mov_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1163 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1164 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1168 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1169 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1170 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1172 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1173 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1174 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1176 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1177 M_INTMOVE(src->regoff, iptr->dst->regoff);
1178 x86_64_imul_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1181 if (src->regoff == iptr->dst->regoff) {
1182 x86_64_imul_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1185 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1186 x86_64_imul_reg_reg(cd, src->regoff, iptr->dst->regoff);
1192 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
1193 /* val.l = constant */
1195 d = reg_of_var(rd, iptr->dst, REG_NULL);
1196 if (iptr->dst->flags & INMEMORY) {
1197 if (src->flags & INMEMORY) {
1198 if (x86_64_is_imm32(iptr->val.l)) {
1199 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1202 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1203 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1205 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1208 if (x86_64_is_imm32(iptr->val.l)) {
1209 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, REG_ITMP1);
1212 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1213 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1215 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1219 if (src->flags & INMEMORY) {
1220 if (x86_64_is_imm32(iptr->val.l)) {
1221 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1224 x86_64_mov_imm_reg(cd, iptr->val.l, iptr->dst->regoff);
1225 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1229 /* should match in many cases */
1230 if (iptr->val.l == 2) {
1231 M_INTMOVE(src->regoff, iptr->dst->regoff);
1232 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1235 if (x86_64_is_imm32(iptr->val.l)) {
1236 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, iptr->dst->regoff); /* 4 cycles */
1239 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1240 M_INTMOVE(src->regoff, iptr->dst->regoff);
1241 x86_64_imul_reg_reg(cd, REG_ITMP1, iptr->dst->regoff);
1248 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1250 d = reg_of_var(rd, iptr->dst, REG_NULL);
1251 if (src->prev->flags & INMEMORY) {
1252 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1255 M_INTMOVE(src->prev->regoff, RAX);
1258 if (src->flags & INMEMORY) {
1259 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1262 M_INTMOVE(src->regoff, REG_ITMP3);
1266 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1267 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1268 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1269 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1271 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1273 x86_64_idivl_reg(cd, REG_ITMP3);
1275 if (iptr->dst->flags & INMEMORY) {
1276 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1277 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1280 M_INTMOVE(RAX, iptr->dst->regoff);
1282 if (iptr->dst->regoff != RDX) {
1283 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1288 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1290 d = reg_of_var(rd, iptr->dst, REG_NULL);
1291 if (src->prev->flags & INMEMORY) {
1292 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1295 M_INTMOVE(src->prev->regoff, RAX);
1298 if (src->flags & INMEMORY) {
1299 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1302 M_INTMOVE(src->regoff, REG_ITMP3);
1306 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1307 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1308 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1309 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1310 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1312 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1314 x86_64_idivl_reg(cd, REG_ITMP3);
1316 if (iptr->dst->flags & INMEMORY) {
1317 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1318 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1321 M_INTMOVE(RDX, iptr->dst->regoff);
1323 if (iptr->dst->regoff != RDX) {
1324 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1329 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
1330 /* val.i = constant */
1332 var_to_reg_int(s1, src, REG_ITMP1);
1333 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1334 M_INTMOVE(s1, REG_ITMP1);
1335 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1336 x86_64_leal_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1337 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1338 x86_64_shiftl_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1339 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1340 store_reg_to_var_int(iptr->dst, d);
1343 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
1344 /* val.i = constant */
1346 var_to_reg_int(s1, src, REG_ITMP1);
1347 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1348 M_INTMOVE(s1, REG_ITMP1);
1349 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1350 x86_64_leal_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1351 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1352 x86_64_alul_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1353 x86_64_alul_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1354 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1355 store_reg_to_var_int(iptr->dst, d);
1359 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1361 d = reg_of_var(rd, iptr->dst, REG_NULL);
1362 if (src->prev->flags & INMEMORY) {
1363 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1366 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1369 if (src->flags & INMEMORY) {
1370 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1373 M_INTMOVE(src->regoff, REG_ITMP3);
1377 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1378 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1379 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1380 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1381 x86_64_jcc(cd, X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1383 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1385 x86_64_idiv_reg(cd, REG_ITMP3);
1387 if (iptr->dst->flags & INMEMORY) {
1388 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1389 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1392 M_INTMOVE(RAX, iptr->dst->regoff);
1394 if (iptr->dst->regoff != RDX) {
1395 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1400 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1402 d = reg_of_var(rd, iptr->dst, REG_NULL);
1403 if (src->prev->flags & INMEMORY) {
1404 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1407 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1410 if (src->flags & INMEMORY) {
1411 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1414 M_INTMOVE(src->regoff, REG_ITMP3);
1418 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1419 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1420 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1421 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1422 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1423 x86_64_jcc(cd, X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1425 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1427 x86_64_idiv_reg(cd, REG_ITMP3);
1429 if (iptr->dst->flags & INMEMORY) {
1430 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1431 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1434 M_INTMOVE(RDX, iptr->dst->regoff);
1436 if (iptr->dst->regoff != RDX) {
1437 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1442 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1443 /* val.i = constant */
1445 var_to_reg_int(s1, src, REG_ITMP1);
1446 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1447 M_INTMOVE(s1, REG_ITMP1);
1448 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1449 x86_64_lea_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1450 x86_64_cmovcc_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1451 x86_64_shift_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1452 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1453 store_reg_to_var_int(iptr->dst, d);
1456 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1457 /* val.l = constant */
1459 var_to_reg_int(s1, src, REG_ITMP1);
1460 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1461 M_INTMOVE(s1, REG_ITMP1);
1462 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1463 x86_64_lea_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1464 x86_64_cmovcc_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1465 x86_64_alu_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1466 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1467 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1468 store_reg_to_var_int(iptr->dst, d);
1471 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1473 d = reg_of_var(rd, iptr->dst, REG_NULL);
1474 x86_64_emit_ishift(cd, X86_64_SHL, src, iptr);
1477 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1478 /* val.i = constant */
1480 d = reg_of_var(rd, iptr->dst, REG_NULL);
1481 x86_64_emit_ishiftconst(cd, X86_64_SHL, src, iptr);
1484 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1486 d = reg_of_var(rd, iptr->dst, REG_NULL);
1487 x86_64_emit_ishift(cd, X86_64_SAR, src, iptr);
1490 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1491 /* val.i = constant */
1493 d = reg_of_var(rd, iptr->dst, REG_NULL);
1494 x86_64_emit_ishiftconst(cd, X86_64_SAR, src, iptr);
1497 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1499 d = reg_of_var(rd, iptr->dst, REG_NULL);
1500 x86_64_emit_ishift(cd, X86_64_SHR, src, iptr);
1503 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1504 /* val.i = constant */
1506 d = reg_of_var(rd, iptr->dst, REG_NULL);
1507 x86_64_emit_ishiftconst(cd, X86_64_SHR, src, iptr);
1510 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1512 d = reg_of_var(rd, iptr->dst, REG_NULL);
1513 x86_64_emit_lshift(cd, X86_64_SHL, src, iptr);
1516 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1517 /* val.i = constant */
1519 d = reg_of_var(rd, iptr->dst, REG_NULL);
1520 x86_64_emit_lshiftconst(cd, X86_64_SHL, src, iptr);
1523 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1525 d = reg_of_var(rd, iptr->dst, REG_NULL);
1526 x86_64_emit_lshift(cd, X86_64_SAR, src, iptr);
1529 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1530 /* val.i = constant */
1532 d = reg_of_var(rd, iptr->dst, REG_NULL);
1533 x86_64_emit_lshiftconst(cd, X86_64_SAR, src, iptr);
1536 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1538 d = reg_of_var(rd, iptr->dst, REG_NULL);
1539 x86_64_emit_lshift(cd, X86_64_SHR, src, iptr);
1542 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1543 /* val.l = constant */
1545 d = reg_of_var(rd, iptr->dst, REG_NULL);
1546 x86_64_emit_lshiftconst(cd, X86_64_SHR, src, iptr);
1549 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1551 d = reg_of_var(rd, iptr->dst, REG_NULL);
1552 x86_64_emit_ialu(cd, X86_64_AND, src, iptr);
1555 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1556 /* val.i = constant */
1558 d = reg_of_var(rd, iptr->dst, REG_NULL);
1559 x86_64_emit_ialuconst(cd, X86_64_AND, src, iptr);
1562 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1564 d = reg_of_var(rd, iptr->dst, REG_NULL);
1565 x86_64_emit_lalu(cd, X86_64_AND, src, iptr);
1568 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1569 /* val.l = constant */
1571 d = reg_of_var(rd, iptr->dst, REG_NULL);
1572 x86_64_emit_laluconst(cd, X86_64_AND, src, iptr);
1575 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1577 d = reg_of_var(rd, iptr->dst, REG_NULL);
1578 x86_64_emit_ialu(cd, X86_64_OR, src, iptr);
1581 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1582 /* val.i = constant */
1584 d = reg_of_var(rd, iptr->dst, REG_NULL);
1585 x86_64_emit_ialuconst(cd, X86_64_OR, src, iptr);
1588 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1590 d = reg_of_var(rd, iptr->dst, REG_NULL);
1591 x86_64_emit_lalu(cd, X86_64_OR, src, iptr);
1594 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1595 /* val.l = constant */
1597 d = reg_of_var(rd, iptr->dst, REG_NULL);
1598 x86_64_emit_laluconst(cd, X86_64_OR, src, iptr);
1601 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1603 d = reg_of_var(rd, iptr->dst, REG_NULL);
1604 x86_64_emit_ialu(cd, X86_64_XOR, src, iptr);
1607 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1608 /* val.i = constant */
1610 d = reg_of_var(rd, iptr->dst, REG_NULL);
1611 x86_64_emit_ialuconst(cd, X86_64_XOR, src, iptr);
1614 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1616 d = reg_of_var(rd, iptr->dst, REG_NULL);
1617 x86_64_emit_lalu(cd, X86_64_XOR, src, iptr);
1620 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1621 /* val.l = constant */
1623 d = reg_of_var(rd, iptr->dst, REG_NULL);
1624 x86_64_emit_laluconst(cd, X86_64_XOR, src, iptr);
1628 case ICMD_IINC: /* ..., value ==> ..., value + constant */
1629 /* op1 = variable, val.i = constant */
1631 /* using inc and dec is definitely faster than add -- tested */
1634 var = &(rd->locals[iptr->op1][TYPE_INT]);
1636 if (var->flags & INMEMORY) {
1637 if (iptr->val.i == 1) {
1638 x86_64_incl_membase(cd, REG_SP, d * 8);
1640 } else if (iptr->val.i == -1) {
1641 x86_64_decl_membase(cd, REG_SP, d * 8);
1644 x86_64_alul_imm_membase(cd, X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1648 if (iptr->val.i == 1) {
1649 x86_64_incl_reg(cd, d);
1651 } else if (iptr->val.i == -1) {
1652 x86_64_decl_reg(cd, d);
1655 x86_64_alul_imm_reg(cd, X86_64_ADD, iptr->val.i, d);
1661 /* floating operations ************************************************/
1663 case ICMD_FNEG: /* ..., value ==> ..., - value */
1665 var_to_reg_flt(s1, src, REG_FTMP1);
1666 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1667 a = dseg_adds4(cd, 0x80000000);
1669 x86_64_movss_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1670 x86_64_xorps_reg_reg(cd, REG_FTMP2, d);
1671 store_reg_to_var_flt(iptr->dst, d);
1674 case ICMD_DNEG: /* ..., value ==> ..., - value */
1676 var_to_reg_flt(s1, src, REG_FTMP1);
1677 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1678 a = dseg_adds8(cd, 0x8000000000000000);
1680 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1681 x86_64_xorpd_reg_reg(cd, REG_FTMP2, d);
1682 store_reg_to_var_flt(iptr->dst, d);
1685 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1687 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1688 var_to_reg_flt(s2, src, REG_FTMP2);
1689 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1691 x86_64_addss_reg_reg(cd, s2, d);
1692 } else if (s2 == d) {
1693 x86_64_addss_reg_reg(cd, s1, d);
1696 x86_64_addss_reg_reg(cd, s2, d);
1698 store_reg_to_var_flt(iptr->dst, d);
1701 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1703 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1704 var_to_reg_flt(s2, src, REG_FTMP2);
1705 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1707 x86_64_addsd_reg_reg(cd, s2, d);
1708 } else if (s2 == d) {
1709 x86_64_addsd_reg_reg(cd, s1, d);
1712 x86_64_addsd_reg_reg(cd, s2, d);
1714 store_reg_to_var_flt(iptr->dst, d);
1717 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1719 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1720 var_to_reg_flt(s2, src, REG_FTMP2);
1721 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1723 M_FLTMOVE(s2, REG_FTMP2);
1727 x86_64_subss_reg_reg(cd, s2, d);
1728 store_reg_to_var_flt(iptr->dst, d);
1731 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1733 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1734 var_to_reg_flt(s2, src, REG_FTMP2);
1735 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1737 M_FLTMOVE(s2, REG_FTMP2);
1741 x86_64_subsd_reg_reg(cd, s2, d);
1742 store_reg_to_var_flt(iptr->dst, d);
1745 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1747 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1748 var_to_reg_flt(s2, src, REG_FTMP2);
1749 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1751 x86_64_mulss_reg_reg(cd, s2, d);
1752 } else if (s2 == d) {
1753 x86_64_mulss_reg_reg(cd, s1, d);
1756 x86_64_mulss_reg_reg(cd, s2, d);
1758 store_reg_to_var_flt(iptr->dst, d);
1761 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1763 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1764 var_to_reg_flt(s2, src, REG_FTMP2);
1765 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1767 x86_64_mulsd_reg_reg(cd, s2, d);
1768 } else if (s2 == d) {
1769 x86_64_mulsd_reg_reg(cd, s1, d);
1772 x86_64_mulsd_reg_reg(cd, s2, d);
1774 store_reg_to_var_flt(iptr->dst, d);
1777 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1779 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1780 var_to_reg_flt(s2, src, REG_FTMP2);
1781 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1783 M_FLTMOVE(s2, REG_FTMP2);
1787 x86_64_divss_reg_reg(cd, s2, d);
1788 store_reg_to_var_flt(iptr->dst, d);
1791 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1793 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1794 var_to_reg_flt(s2, src, REG_FTMP2);
1795 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1797 M_FLTMOVE(s2, REG_FTMP2);
1801 x86_64_divsd_reg_reg(cd, s2, d);
1802 store_reg_to_var_flt(iptr->dst, d);
1805 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1807 var_to_reg_int(s1, src, REG_ITMP1);
1808 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1809 x86_64_cvtsi2ss_reg_reg(cd, s1, d);
1810 store_reg_to_var_flt(iptr->dst, d);
1813 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1815 var_to_reg_int(s1, src, REG_ITMP1);
1816 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1817 x86_64_cvtsi2sd_reg_reg(cd, s1, d);
1818 store_reg_to_var_flt(iptr->dst, d);
1821 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1823 var_to_reg_int(s1, src, REG_ITMP1);
1824 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1825 x86_64_cvtsi2ssq_reg_reg(cd, s1, d);
1826 store_reg_to_var_flt(iptr->dst, d);
1829 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1831 var_to_reg_int(s1, src, REG_ITMP1);
1832 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1833 x86_64_cvtsi2sdq_reg_reg(cd, s1, d);
1834 store_reg_to_var_flt(iptr->dst, d);
1837 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1839 var_to_reg_flt(s1, src, REG_FTMP1);
1840 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1841 x86_64_cvttss2si_reg_reg(cd, s1, d);
1842 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1843 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1844 x86_64_jcc(cd, X86_64_CC_NE, a);
1845 M_FLTMOVE(s1, REG_FTMP1);
1846 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2i, REG_ITMP2);
1847 x86_64_call_reg(cd, REG_ITMP2);
1848 M_INTMOVE(REG_RESULT, d);
1849 store_reg_to_var_int(iptr->dst, d);
1852 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1854 var_to_reg_flt(s1, src, REG_FTMP1);
1855 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1856 x86_64_cvttsd2si_reg_reg(cd, s1, d);
1857 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1858 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1859 x86_64_jcc(cd, X86_64_CC_NE, a);
1860 M_FLTMOVE(s1, REG_FTMP1);
1861 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2i, REG_ITMP2);
1862 x86_64_call_reg(cd, REG_ITMP2);
1863 M_INTMOVE(REG_RESULT, d);
1864 store_reg_to_var_int(iptr->dst, d);
1867 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1869 var_to_reg_flt(s1, src, REG_FTMP1);
1870 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1871 x86_64_cvttss2siq_reg_reg(cd, s1, d);
1872 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1873 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1874 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1875 x86_64_jcc(cd, X86_64_CC_NE, a);
1876 M_FLTMOVE(s1, REG_FTMP1);
1877 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2l, REG_ITMP2);
1878 x86_64_call_reg(cd, REG_ITMP2);
1879 M_INTMOVE(REG_RESULT, d);
1880 store_reg_to_var_int(iptr->dst, d);
1883 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1885 var_to_reg_flt(s1, src, REG_FTMP1);
1886 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1887 x86_64_cvttsd2siq_reg_reg(cd, s1, d);
1888 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1889 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1890 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1891 x86_64_jcc(cd, X86_64_CC_NE, a);
1892 M_FLTMOVE(s1, REG_FTMP1);
1893 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2l, REG_ITMP2);
1894 x86_64_call_reg(cd, REG_ITMP2);
1895 M_INTMOVE(REG_RESULT, d);
1896 store_reg_to_var_int(iptr->dst, d);
1899 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1901 var_to_reg_flt(s1, src, REG_FTMP1);
1902 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1903 x86_64_cvtss2sd_reg_reg(cd, s1, d);
1904 store_reg_to_var_flt(iptr->dst, d);
1907 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1909 var_to_reg_flt(s1, src, REG_FTMP1);
1910 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1911 x86_64_cvtsd2ss_reg_reg(cd, s1, d);
1912 store_reg_to_var_flt(iptr->dst, d);
1915 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1916 /* == => 0, < => 1, > => -1 */
1918 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1919 var_to_reg_flt(s2, src, REG_FTMP2);
1920 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1921 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1922 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1923 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1924 x86_64_ucomiss_reg_reg(cd, s1, s2);
1925 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1926 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1927 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1928 store_reg_to_var_int(iptr->dst, d);
1931 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1932 /* == => 0, < => 1, > => -1 */
1934 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1935 var_to_reg_flt(s2, src, REG_FTMP2);
1936 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1937 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1938 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1939 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1940 x86_64_ucomiss_reg_reg(cd, s1, s2);
1941 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1942 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1943 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1944 store_reg_to_var_int(iptr->dst, d);
1947 case ICMD_DCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1948 /* == => 0, < => 1, > => -1 */
1950 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1951 var_to_reg_flt(s2, src, REG_FTMP2);
1952 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1953 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1954 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1955 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1956 x86_64_ucomisd_reg_reg(cd, s1, s2);
1957 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1958 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1959 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1960 store_reg_to_var_int(iptr->dst, d);
1963 case ICMD_DCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1964 /* == => 0, < => 1, > => -1 */
1966 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1967 var_to_reg_flt(s2, src, REG_FTMP2);
1968 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1969 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1970 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1971 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1972 x86_64_ucomisd_reg_reg(cd, s1, s2);
1973 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1974 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1975 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1976 store_reg_to_var_int(iptr->dst, d);
1980 /* memory operations **************************************************/
1982 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., (int) length */
1984 var_to_reg_int(s1, src, REG_ITMP1);
1985 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1986 gen_nullptr_check(s1);
1987 x86_64_movl_membase_reg(cd, s1, OFFSET(java_arrayheader, size), d);
1988 store_reg_to_var_int(iptr->dst, d);
1991 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
1993 var_to_reg_int(s1, src->prev, REG_ITMP1);
1994 var_to_reg_int(s2, src, REG_ITMP2);
1995 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1996 if (iptr->op1 == 0) {
1997 gen_nullptr_check(s1);
2000 x86_64_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
2001 store_reg_to_var_int(iptr->dst, d);
2004 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
2006 var_to_reg_int(s1, src->prev, REG_ITMP1);
2007 var_to_reg_int(s2, src, REG_ITMP2);
2008 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2009 if (iptr->op1 == 0) {
2010 gen_nullptr_check(s1);
2013 x86_64_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]), s1, s2, 3, d);
2014 store_reg_to_var_int(iptr->dst, d);
2017 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
2019 var_to_reg_int(s1, src->prev, REG_ITMP1);
2020 var_to_reg_int(s2, src, REG_ITMP2);
2021 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2022 if (iptr->op1 == 0) {
2023 gen_nullptr_check(s1);
2026 x86_64_movl_memindex_reg(cd, OFFSET(java_intarray, data[0]), s1, s2, 2, d);
2027 store_reg_to_var_int(iptr->dst, d);
2030 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2032 var_to_reg_int(s1, src->prev, REG_ITMP1);
2033 var_to_reg_int(s2, src, REG_ITMP2);
2034 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2035 if (iptr->op1 == 0) {
2036 gen_nullptr_check(s1);
2039 x86_64_movss_memindex_reg(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2040 store_reg_to_var_flt(iptr->dst, d);
2043 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2045 var_to_reg_int(s1, src->prev, REG_ITMP1);
2046 var_to_reg_int(s2, src, REG_ITMP2);
2047 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2048 if (iptr->op1 == 0) {
2049 gen_nullptr_check(s1);
2052 x86_64_movsd_memindex_reg(cd, OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2053 store_reg_to_var_flt(iptr->dst, d);
2056 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
2058 var_to_reg_int(s1, src->prev, REG_ITMP1);
2059 var_to_reg_int(s2, src, REG_ITMP2);
2060 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2061 if (iptr->op1 == 0) {
2062 gen_nullptr_check(s1);
2065 x86_64_movzwq_memindex_reg(cd, OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2066 store_reg_to_var_int(iptr->dst, d);
2069 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
2071 var_to_reg_int(s1, src->prev, REG_ITMP1);
2072 var_to_reg_int(s2, src, REG_ITMP2);
2073 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2074 if (iptr->op1 == 0) {
2075 gen_nullptr_check(s1);
2078 x86_64_movswq_memindex_reg(cd, OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2079 store_reg_to_var_int(iptr->dst, d);
2082 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
2084 var_to_reg_int(s1, src->prev, REG_ITMP1);
2085 var_to_reg_int(s2, src, REG_ITMP2);
2086 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2087 if (iptr->op1 == 0) {
2088 gen_nullptr_check(s1);
2091 x86_64_movsbq_memindex_reg(cd, OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2092 store_reg_to_var_int(iptr->dst, d);
2096 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2098 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2099 var_to_reg_int(s2, src->prev, REG_ITMP2);
2100 if (iptr->op1 == 0) {
2101 gen_nullptr_check(s1);
2104 var_to_reg_int(s3, src, REG_ITMP3);
2105 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2108 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2110 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2111 var_to_reg_int(s2, src->prev, REG_ITMP2);
2112 if (iptr->op1 == 0) {
2113 gen_nullptr_check(s1);
2116 var_to_reg_int(s3, src, REG_ITMP3);
2117 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2120 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2122 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2123 var_to_reg_int(s2, src->prev, REG_ITMP2);
2124 if (iptr->op1 == 0) {
2125 gen_nullptr_check(s1);
2128 var_to_reg_int(s3, src, REG_ITMP3);
2129 x86_64_movl_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2132 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2134 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2135 var_to_reg_int(s2, src->prev, REG_ITMP2);
2136 if (iptr->op1 == 0) {
2137 gen_nullptr_check(s1);
2140 var_to_reg_flt(s3, src, REG_FTMP3);
2141 x86_64_movss_reg_memindex(cd, s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2144 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2146 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2147 var_to_reg_int(s2, src->prev, REG_ITMP2);
2148 if (iptr->op1 == 0) {
2149 gen_nullptr_check(s1);
2152 var_to_reg_flt(s3, src, REG_FTMP3);
2153 x86_64_movsd_reg_memindex(cd, s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2156 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2158 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2159 var_to_reg_int(s2, src->prev, REG_ITMP2);
2160 if (iptr->op1 == 0) {
2161 gen_nullptr_check(s1);
2164 var_to_reg_int(s3, src, REG_ITMP3);
2165 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2168 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2170 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2171 var_to_reg_int(s2, src->prev, REG_ITMP2);
2172 if (iptr->op1 == 0) {
2173 gen_nullptr_check(s1);
2176 var_to_reg_int(s3, src, REG_ITMP3);
2177 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2180 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2182 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2183 var_to_reg_int(s2, src->prev, REG_ITMP2);
2184 if (iptr->op1 == 0) {
2185 gen_nullptr_check(s1);
2188 var_to_reg_int(s3, src, REG_ITMP3);
2189 x86_64_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2192 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2194 var_to_reg_int(s1, src->prev, REG_ITMP1);
2195 var_to_reg_int(s2, src, REG_ITMP2);
2196 if (iptr->op1 == 0) {
2197 gen_nullptr_check(s1);
2200 x86_64_movl_imm_memindex(cd, iptr->val.i, OFFSET(java_intarray, data[0]), s1, s2, 2);
2203 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2205 var_to_reg_int(s1, src->prev, REG_ITMP1);
2206 var_to_reg_int(s2, src, REG_ITMP2);
2207 if (iptr->op1 == 0) {
2208 gen_nullptr_check(s1);
2212 if (x86_64_is_imm32(iptr->val.l)) {
2213 x86_64_mov_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2216 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2217 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l >> 32), OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2221 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2223 var_to_reg_int(s1, src->prev, REG_ITMP1);
2224 var_to_reg_int(s2, src, REG_ITMP2);
2225 if (iptr->op1 == 0) {
2226 gen_nullptr_check(s1);
2229 x86_64_mov_imm_memindex(cd, 0, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2232 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2234 var_to_reg_int(s1, src->prev, REG_ITMP1);
2235 var_to_reg_int(s2, src, REG_ITMP2);
2236 if (iptr->op1 == 0) {
2237 gen_nullptr_check(s1);
2240 x86_64_movb_imm_memindex(cd, iptr->val.i, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2243 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2245 var_to_reg_int(s1, src->prev, REG_ITMP1);
2246 var_to_reg_int(s2, src, REG_ITMP2);
2247 if (iptr->op1 == 0) {
2248 gen_nullptr_check(s1);
2251 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_chararray, data[0]), s1, s2, 1);
2254 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2256 var_to_reg_int(s1, src->prev, REG_ITMP1);
2257 var_to_reg_int(s2, src, REG_ITMP2);
2258 if (iptr->op1 == 0) {
2259 gen_nullptr_check(s1);
2262 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2266 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2267 /* op1 = type, val.a = field address */
2269 /* If the static fields' class is not yet initialized, we do it */
2270 /* now. The call code is generated later. */
2271 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2272 codegen_addclinitref(cd, cd->mcodeptr, ((fieldinfo *) iptr->val.a)->class);
2274 /* This is just for debugging purposes. Is very difficult to */
2275 /* read patched code. Here we patch the following 5 nop's */
2276 /* so that the real code keeps untouched. */
2277 if (showdisassemble) {
2286 /* This approach is much faster than moving the field address */
2287 /* inline into a register. */
2288 a = dseg_addaddress(cd, &(((fieldinfo *) iptr->val.a)->value));
2289 x86_64_mov_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 7) - (s8) cd->mcodebase) + a, REG_ITMP2);
2290 switch (iptr->op1) {
2292 var_to_reg_int(s2, src, REG_ITMP1);
2293 x86_64_movl_reg_membase(cd, s2, REG_ITMP2, 0);
2297 var_to_reg_int(s2, src, REG_ITMP1);
2298 x86_64_mov_reg_membase(cd, s2, REG_ITMP2, 0);
2301 var_to_reg_flt(s2, src, REG_FTMP1);
2302 x86_64_movss_reg_membase(cd, s2, REG_ITMP2, 0);
2305 var_to_reg_flt(s2, src, REG_FTMP1);
2306 x86_64_movsd_reg_membase(cd, s2, REG_ITMP2, 0);
2309 throw_cacao_exception_exit(string_java_lang_InternalError,
2310 "Unknown PUTSTATIC operand type %d",
2315 case ICMD_GETSTATIC: /* ... ==> ..., value */
2316 /* op1 = type, val.a = field address */
2318 /* If the static fields' class is not yet initialized, we do it */
2319 /* now. The call code is generated later. */
2320 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2321 codegen_addclinitref(cd, cd->mcodeptr, ((fieldinfo *) iptr->val.a)->class);
2323 /* This is just for debugging purposes. Is very difficult to */
2324 /* read patched code. Here we patch the following 5 nop's */
2325 /* so that the real code keeps untouched. */
2326 if (showdisassemble) {
2335 /* This approach is much faster than moving the field address */
2336 /* inline into a register. */
2337 a = dseg_addaddress(cd, &(((fieldinfo *) iptr->val.a)->value));
2338 x86_64_mov_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 7) - (s8) cd->mcodebase) + a, REG_ITMP2);
2339 switch (iptr->op1) {
2341 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2342 x86_64_movl_membase_reg(cd, REG_ITMP2, 0, d);
2343 store_reg_to_var_int(iptr->dst, d);
2347 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2348 x86_64_mov_membase_reg(cd, REG_ITMP2, 0, d);
2349 store_reg_to_var_int(iptr->dst, d);
2352 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2353 x86_64_movss_membase_reg(cd, REG_ITMP2, 0, d);
2354 store_reg_to_var_flt(iptr->dst, d);
2357 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2358 x86_64_movsd_membase_reg(cd, REG_ITMP2, 0, d);
2359 store_reg_to_var_flt(iptr->dst, d);
2362 throw_cacao_exception_exit(string_java_lang_InternalError,
2363 "Unknown GETSTATIC operand type %d",
2368 case ICMD_PUTFIELD: /* ..., value ==> ... */
2369 /* op1 = type, val.i = field offset */
2371 a = ((fieldinfo *)(iptr->val.a))->offset;
2372 var_to_reg_int(s1, src->prev, REG_ITMP1);
2373 switch (iptr->op1) {
2375 var_to_reg_int(s2, src, REG_ITMP2);
2376 gen_nullptr_check(s1);
2377 x86_64_movl_reg_membase(cd, s2, s1, a);
2381 var_to_reg_int(s2, src, REG_ITMP2);
2382 gen_nullptr_check(s1);
2383 x86_64_mov_reg_membase(cd, s2, s1, a);
2386 var_to_reg_flt(s2, src, REG_FTMP2);
2387 gen_nullptr_check(s1);
2388 x86_64_movss_reg_membase(cd, s2, s1, a);
2391 var_to_reg_flt(s2, src, REG_FTMP2);
2392 gen_nullptr_check(s1);
2393 x86_64_movsd_reg_membase(cd, s2, s1, a);
2396 throw_cacao_exception_exit(string_java_lang_InternalError,
2397 "Unknown PUTFIELD operand type %d",
2402 case ICMD_GETFIELD: /* ... ==> ..., value */
2403 /* op1 = type, val.i = field offset */
2405 a = ((fieldinfo *)(iptr->val.a))->offset;
2406 var_to_reg_int(s1, src, REG_ITMP1);
2407 switch (iptr->op1) {
2409 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2410 gen_nullptr_check(s1);
2411 x86_64_movl_membase_reg(cd, s1, a, d);
2412 store_reg_to_var_int(iptr->dst, d);
2416 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2417 gen_nullptr_check(s1);
2418 x86_64_mov_membase_reg(cd, s1, a, d);
2419 store_reg_to_var_int(iptr->dst, d);
2422 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2423 gen_nullptr_check(s1);
2424 x86_64_movss_membase_reg(cd, s1, a, d);
2425 store_reg_to_var_flt(iptr->dst, d);
2428 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2429 gen_nullptr_check(s1);
2430 x86_64_movsd_membase_reg(cd, s1, a, d);
2431 store_reg_to_var_flt(iptr->dst, d);
2434 throw_cacao_exception_exit(string_java_lang_InternalError,
2435 "Unknown GETFIELD operand type %d",
2441 /* branch operations **************************************************/
2443 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2445 var_to_reg_int(s1, src, REG_ITMP1);
2446 M_INTMOVE(s1, REG_ITMP1_XPTR);
2448 x86_64_call_imm(cd, 0); /* passing exception pointer */
2449 x86_64_pop_reg(cd, REG_ITMP2_XPC);
2451 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
2452 x86_64_jmp_reg(cd, REG_ITMP3);
2455 case ICMD_GOTO: /* ... ==> ... */
2456 /* op1 = target JavaVM pc */
2458 x86_64_jmp_imm(cd, 0);
2459 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2462 case ICMD_JSR: /* ... ==> ... */
2463 /* op1 = target JavaVM pc */
2465 x86_64_call_imm(cd, 0);
2466 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2469 case ICMD_RET: /* ... ==> ... */
2470 /* op1 = local variable */
2472 var = &(rd->locals[iptr->op1][TYPE_ADR]);
2473 var_to_reg_int(s1, var, REG_ITMP1);
2474 x86_64_jmp_reg(cd, s1);
2477 case ICMD_IFNULL: /* ..., value ==> ... */
2478 /* op1 = target JavaVM pc */
2480 if (src->flags & INMEMORY) {
2481 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2484 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2486 x86_64_jcc(cd, X86_64_CC_E, 0);
2487 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2490 case ICMD_IFNONNULL: /* ..., value ==> ... */
2491 /* op1 = target JavaVM pc */
2493 if (src->flags & INMEMORY) {
2494 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2497 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2499 x86_64_jcc(cd, X86_64_CC_NE, 0);
2500 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2503 case ICMD_IFEQ: /* ..., value ==> ... */
2504 /* op1 = target JavaVM pc, val.i = constant */
2506 x86_64_emit_ifcc(cd, X86_64_CC_E, src, iptr);
2509 case ICMD_IFLT: /* ..., value ==> ... */
2510 /* op1 = target JavaVM pc, val.i = constant */
2512 x86_64_emit_ifcc(cd, X86_64_CC_L, src, iptr);
2515 case ICMD_IFLE: /* ..., value ==> ... */
2516 /* op1 = target JavaVM pc, val.i = constant */
2518 x86_64_emit_ifcc(cd, X86_64_CC_LE, src, iptr);
2521 case ICMD_IFNE: /* ..., value ==> ... */
2522 /* op1 = target JavaVM pc, val.i = constant */
2524 x86_64_emit_ifcc(cd, X86_64_CC_NE, src, iptr);
2527 case ICMD_IFGT: /* ..., value ==> ... */
2528 /* op1 = target JavaVM pc, val.i = constant */
2530 x86_64_emit_ifcc(cd, X86_64_CC_G, src, iptr);
2533 case ICMD_IFGE: /* ..., value ==> ... */
2534 /* op1 = target JavaVM pc, val.i = constant */
2536 x86_64_emit_ifcc(cd, X86_64_CC_GE, src, iptr);
2539 case ICMD_IF_LEQ: /* ..., value ==> ... */
2540 /* op1 = target JavaVM pc, val.l = constant */
2542 x86_64_emit_if_lcc(cd, X86_64_CC_E, src, iptr);
2545 case ICMD_IF_LLT: /* ..., value ==> ... */
2546 /* op1 = target JavaVM pc, val.l = constant */
2548 x86_64_emit_if_lcc(cd, X86_64_CC_L, src, iptr);
2551 case ICMD_IF_LLE: /* ..., value ==> ... */
2552 /* op1 = target JavaVM pc, val.l = constant */
2554 x86_64_emit_if_lcc(cd, X86_64_CC_LE, src, iptr);
2557 case ICMD_IF_LNE: /* ..., value ==> ... */
2558 /* op1 = target JavaVM pc, val.l = constant */
2560 x86_64_emit_if_lcc(cd, X86_64_CC_NE, src, iptr);
2563 case ICMD_IF_LGT: /* ..., value ==> ... */
2564 /* op1 = target JavaVM pc, val.l = constant */
2566 x86_64_emit_if_lcc(cd, X86_64_CC_G, src, iptr);
2569 case ICMD_IF_LGE: /* ..., value ==> ... */
2570 /* op1 = target JavaVM pc, val.l = constant */
2572 x86_64_emit_if_lcc(cd, X86_64_CC_GE, src, iptr);
2575 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2576 /* op1 = target JavaVM pc */
2578 x86_64_emit_if_icmpcc(cd, X86_64_CC_E, src, iptr);
2581 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2582 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2584 x86_64_emit_if_lcmpcc(cd, X86_64_CC_E, src, iptr);
2587 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2588 /* op1 = target JavaVM pc */
2590 x86_64_emit_if_icmpcc(cd, X86_64_CC_NE, src, iptr);
2593 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2594 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2596 x86_64_emit_if_lcmpcc(cd, X86_64_CC_NE, src, iptr);
2599 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2600 /* op1 = target JavaVM pc */
2602 x86_64_emit_if_icmpcc(cd, X86_64_CC_L, src, iptr);
2605 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2606 /* op1 = target JavaVM pc */
2608 x86_64_emit_if_lcmpcc(cd, X86_64_CC_L, src, iptr);
2611 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2612 /* op1 = target JavaVM pc */
2614 x86_64_emit_if_icmpcc(cd, X86_64_CC_G, src, iptr);
2617 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2618 /* op1 = target JavaVM pc */
2620 x86_64_emit_if_lcmpcc(cd, X86_64_CC_G, src, iptr);
2623 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2624 /* op1 = target JavaVM pc */
2626 x86_64_emit_if_icmpcc(cd, X86_64_CC_LE, src, iptr);
2629 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2630 /* op1 = target JavaVM pc */
2632 x86_64_emit_if_lcmpcc(cd, X86_64_CC_LE, src, iptr);
2635 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2636 /* op1 = target JavaVM pc */
2638 x86_64_emit_if_icmpcc(cd, X86_64_CC_GE, src, iptr);
2641 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2642 /* op1 = target JavaVM pc */
2644 x86_64_emit_if_lcmpcc(cd, X86_64_CC_GE, src, iptr);
2647 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST */
2649 case ICMD_ELSE_ICONST: /* handled by IFxx_ICONST */
2652 case ICMD_IFEQ_ICONST: /* ..., value ==> ..., constant */
2653 /* val.i = constant */
2655 var_to_reg_int(s1, src, REG_ITMP1);
2656 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2657 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2659 M_INTMOVE(s1, REG_ITMP1);
2662 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2664 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2665 x86_64_testl_reg_reg(cd, s1, s1);
2666 x86_64_cmovccl_reg_reg(cd, X86_64_CC_E, REG_ITMP2, d);
2667 store_reg_to_var_int(iptr->dst, d);
2670 case ICMD_IFNE_ICONST: /* ..., value ==> ..., constant */
2671 /* val.i = constant */
2673 var_to_reg_int(s1, src, REG_ITMP1);
2674 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2675 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2677 M_INTMOVE(s1, REG_ITMP1);
2680 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2682 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2683 x86_64_testl_reg_reg(cd, s1, s1);
2684 x86_64_cmovccl_reg_reg(cd, X86_64_CC_NE, REG_ITMP2, d);
2685 store_reg_to_var_int(iptr->dst, d);
2688 case ICMD_IFLT_ICONST: /* ..., value ==> ..., constant */
2689 /* val.i = constant */
2691 var_to_reg_int(s1, src, REG_ITMP1);
2692 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2693 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2695 M_INTMOVE(s1, REG_ITMP1);
2698 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2700 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2701 x86_64_testl_reg_reg(cd, s1, s1);
2702 x86_64_cmovccl_reg_reg(cd, X86_64_CC_L, REG_ITMP2, d);
2703 store_reg_to_var_int(iptr->dst, d);
2706 case ICMD_IFGE_ICONST: /* ..., value ==> ..., constant */
2707 /* val.i = constant */
2709 var_to_reg_int(s1, src, REG_ITMP1);
2710 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2711 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2713 M_INTMOVE(s1, REG_ITMP1);
2716 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2718 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2719 x86_64_testl_reg_reg(cd, s1, s1);
2720 x86_64_cmovccl_reg_reg(cd, X86_64_CC_GE, REG_ITMP2, d);
2721 store_reg_to_var_int(iptr->dst, d);
2724 case ICMD_IFGT_ICONST: /* ..., value ==> ..., constant */
2725 /* val.i = constant */
2727 var_to_reg_int(s1, src, REG_ITMP1);
2728 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2729 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2731 M_INTMOVE(s1, REG_ITMP1);
2734 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2736 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2737 x86_64_testl_reg_reg(cd, s1, s1);
2738 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP2, d);
2739 store_reg_to_var_int(iptr->dst, d);
2742 case ICMD_IFLE_ICONST: /* ..., value ==> ..., constant */
2743 /* val.i = constant */
2745 var_to_reg_int(s1, src, REG_ITMP1);
2746 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2747 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2749 M_INTMOVE(s1, REG_ITMP1);
2752 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2754 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2755 x86_64_testl_reg_reg(cd, s1, s1);
2756 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, d);
2757 store_reg_to_var_int(iptr->dst, d);
2761 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2765 var_to_reg_int(s1, src, REG_RESULT);
2766 M_INTMOVE(s1, REG_RESULT);
2768 goto nowperformreturn;
2770 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2773 var_to_reg_flt(s1, src, REG_FRESULT);
2774 M_FLTMOVE(s1, REG_FRESULT);
2776 goto nowperformreturn;
2778 case ICMD_RETURN: /* ... ==> ... */
2784 p = parentargs_base;
2786 /* call trace function */
2788 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
2790 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
2791 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
2793 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
2794 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
2795 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
2796 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
2798 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
2799 x86_64_call_reg(cd, REG_ITMP1);
2801 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
2802 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
2804 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
2807 #if defined(USE_THREADS)
2808 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2809 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
2811 /* we need to save the proper return value */
2812 switch (iptr->opc) {
2816 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, rd->maxmemuse * 8);
2820 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, rd->maxmemuse * 8);
2824 x86_64_mov_imm_reg(cd, (u8) builtin_monitorexit, REG_ITMP1);
2825 x86_64_call_reg(cd, REG_ITMP1);
2827 /* and now restore the proper return value */
2828 switch (iptr->opc) {
2832 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_RESULT);
2836 x86_64_movq_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_FRESULT);
2842 /* restore saved registers */
2843 for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
2844 p--; x86_64_mov_membase_reg(cd, REG_SP, p * 8, rd->savintregs[i]);
2846 for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
2847 p--; x86_64_movq_membase_reg(cd, REG_SP, p * 8, rd->savfltregs[i]);
2850 /* deallocate stack */
2851 if (parentargs_base) {
2852 x86_64_alu_imm_reg(cd, X86_64_ADD, parentargs_base * 8, REG_SP);
2860 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2865 tptr = (void **) iptr->target;
2867 s4ptr = iptr->val.a;
2868 l = s4ptr[1]; /* low */
2869 i = s4ptr[2]; /* high */
2871 var_to_reg_int(s1, src, REG_ITMP1);
2872 M_INTMOVE(s1, REG_ITMP1);
2874 x86_64_alul_imm_reg(cd, X86_64_SUB, l, REG_ITMP1);
2879 x86_64_alul_imm_reg(cd, X86_64_CMP, i - 1, REG_ITMP1);
2880 x86_64_jcc(cd, X86_64_CC_A, 0);
2882 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[0]), cd->mcodeptr); */
2883 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
2885 /* build jump table top down and use address of lowest entry */
2887 /* s4ptr += 3 + i; */
2891 /* dseg_addtarget(cd, BlockPtrOfPC(*--s4ptr)); */
2892 dseg_addtarget(cd, (basicblock *) tptr[0]);
2896 /* length of dataseg after last dseg_addtarget is used by load */
2898 x86_64_mov_imm_reg(cd, 0, REG_ITMP2);
2899 dseg_adddata(cd, cd->mcodeptr);
2900 x86_64_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
2901 x86_64_jmp_reg(cd, REG_ITMP1);
2906 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2908 s4 i, l, val, *s4ptr;
2911 tptr = (void **) iptr->target;
2913 s4ptr = iptr->val.a;
2914 l = s4ptr[0]; /* default */
2915 i = s4ptr[1]; /* count */
2917 MCODECHECK((i<<2)+8);
2918 var_to_reg_int(s1, src, REG_ITMP1); /* reg compare should always be faster */
2924 x86_64_alul_imm_reg(cd, X86_64_CMP, val, s1);
2925 x86_64_jcc(cd, X86_64_CC_E, 0);
2926 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[1]), cd->mcodeptr); */
2927 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
2930 x86_64_jmp_imm(cd, 0);
2931 /* codegen_addreference(cd, BlockPtrOfPC(l), cd->mcodeptr); */
2933 tptr = (void **) iptr->target;
2934 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
2939 case ICMD_BUILTIN3: /* ..., arg1, arg2, arg3 ==> ... */
2940 /* op1 = return type, val.a = function pointer*/
2944 case ICMD_BUILTIN2: /* ..., arg1, arg2 ==> ... */
2945 /* op1 = return type, val.a = function pointer*/
2949 case ICMD_BUILTIN1: /* ..., arg1 ==> ... */
2950 /* op1 = return type, val.a = function pointer*/
2954 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2955 /* op1 = arg count, val.a = method pointer */
2957 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2958 /* op1 = arg count, val.a = method pointer */
2960 case ICMD_INVOKEVIRTUAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2961 /* op1 = arg count, val.a = method pointer */
2963 case ICMD_INVOKEINTERFACE:/*.., objectref, [arg1, [arg2 ...]] ==> ... */
2964 /* op1 = arg count, val.a = method pointer */
2974 MCODECHECK((s3 << 1) + 64);
2981 /* copy arguments to registers or stack location ******************/
2983 /* count integer and float arguments */
2985 for (; --s3 >= 0; src = src->prev) {
2986 IS_INT_LNG_TYPE(src->type) ? iarg++ : farg++;
2992 /* calculate amount of arguments to be on stack */
2994 s2 = (iarg > INT_ARG_CNT) ? iarg - INT_ARG_CNT : 0 +
2995 (farg > FLT_ARG_CNT) ? farg - FLT_ARG_CNT : 0;
2997 for (; --s3 >= 0; src = src->prev) {
2998 /* decrement the current argument type */
2999 IS_INT_LNG_TYPE(src->type) ? iarg-- : farg--;
3001 if (src->varkind == ARGVAR) {
3002 if (IS_INT_LNG_TYPE(src->type)) {
3003 if (iarg >= INT_ARG_CNT) {
3007 if (farg >= FLT_ARG_CNT) {
3014 if (IS_INT_LNG_TYPE(src->type)) {
3015 if (iarg < INT_ARG_CNT) {
3016 s1 = rd->argintregs[iarg];
3017 var_to_reg_int(d, src, s1);
3021 var_to_reg_int(d, src, REG_ITMP1);
3023 x86_64_mov_reg_membase(cd, d, REG_SP, s2 * 8);
3027 if (farg < FLT_ARG_CNT) {
3028 s1 = rd->argfltregs[farg];
3029 var_to_reg_flt(d, src, s1);
3033 var_to_reg_flt(d, src, REG_FTMP1);
3035 x86_64_movq_reg_membase(cd, d, REG_SP, s2 * 8);
3041 switch (iptr->opc) {
3049 x86_64_mov_imm_reg(cd, a, REG_ITMP1);
3050 x86_64_call_reg(cd, REG_ITMP1);
3053 case ICMD_INVOKESTATIC:
3055 a = (s8) lm->stubroutine;
3058 x86_64_mov_imm_reg(cd, a, REG_ITMP2);
3059 x86_64_call_reg(cd, REG_ITMP2);
3062 case ICMD_INVOKESPECIAL:
3064 a = (s8) lm->stubroutine;
3067 gen_nullptr_check(rd->argintregs[0]); /* first argument contains pointer */
3068 x86_64_mov_membase_reg(cd, rd->argintregs[0], 0, REG_ITMP2); /* access memory for hardware nullptr */
3069 x86_64_mov_imm_reg(cd, a, REG_ITMP2);
3070 x86_64_call_reg(cd, REG_ITMP2);
3073 case ICMD_INVOKEVIRTUAL:
3077 gen_nullptr_check(rd->argintregs[0]);
3078 x86_64_mov_membase_reg(cd, rd->argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3079 x86_64_mov_membase32_reg(cd, REG_ITMP2, OFFSET(vftbl_t, table[0]) + sizeof(methodptr) * lm->vftblindex, REG_ITMP1);
3080 x86_64_call_reg(cd, REG_ITMP1);
3083 case ICMD_INVOKEINTERFACE:
3088 gen_nullptr_check(rd->argintregs[0]);
3089 x86_64_mov_membase_reg(cd, rd->argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3090 x86_64_mov_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, interfacetable[0]) - sizeof(methodptr) * ci->index, REG_ITMP2);
3091 x86_64_mov_membase32_reg(cd, REG_ITMP2, sizeof(methodptr) * (lm - ci->methods), REG_ITMP1);
3092 x86_64_call_reg(cd, REG_ITMP1);
3097 error("Unkown ICMD-Command: %d", iptr->opc);
3100 /* d contains return type */
3102 if (d != TYPE_VOID) {
3103 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3104 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3105 M_INTMOVE(REG_RESULT, s1);
3106 store_reg_to_var_int(iptr->dst, s1);
3109 s1 = reg_of_var(rd, iptr->dst, REG_FRESULT);
3110 M_FLTMOVE(REG_FRESULT, s1);
3111 store_reg_to_var_flt(iptr->dst, s1);
3118 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3120 /* op1: 0 == array, 1 == class */
3121 /* val.a: (classinfo*) superclass */
3123 /* superclass is an interface:
3125 * return (sub != NULL) &&
3126 * (sub->vftbl->interfacetablelength > super->index) &&
3127 * (sub->vftbl->interfacetable[-super->index] != NULL);
3129 * superclass is a class:
3131 * return ((sub != NULL) && (0
3132 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3133 * super->vftbl->diffvall));
3137 classinfo *super = (classinfo*) iptr->val.a;
3139 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3140 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3143 var_to_reg_int(s1, src, REG_ITMP1);
3144 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3146 M_INTMOVE(s1, REG_ITMP1);
3149 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3150 if (iptr->op1) { /* class/interface */
3151 if (super->flags & ACC_INTERFACE) { /* interface */
3152 x86_64_test_reg_reg(cd, s1, s1);
3154 /* TODO: clean up this calculation */
3155 a = 3; /* mov_membase_reg */
3156 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3158 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3159 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3162 CALCIMMEDIATEBYTES(a, super->index);
3167 a += 3; /* mov_membase_reg */
3168 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3173 x86_64_jcc(cd, X86_64_CC_E, a);
3175 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3176 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength), REG_ITMP2);
3177 x86_64_alu_imm_reg(cd, X86_64_SUB, super->index, REG_ITMP2);
3178 x86_64_test_reg_reg(cd, REG_ITMP2, REG_ITMP2);
3180 /* TODO: clean up this calculation */
3182 a += 3; /* mov_membase_reg */
3183 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3188 x86_64_jcc(cd, X86_64_CC_LE, a);
3189 x86_64_mov_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP1);
3190 x86_64_test_reg_reg(cd, REG_ITMP1, REG_ITMP1);
3191 x86_64_setcc_reg(cd, X86_64_CC_NE, d);
3193 } else { /* class */
3194 x86_64_test_reg_reg(cd, s1, s1);
3196 /* TODO: clean up this calculation */
3197 a = 3; /* mov_membase_reg */
3198 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3200 a += 10; /* mov_imm_reg */
3202 a += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3203 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, baseval));
3205 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3206 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3208 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3209 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, diffval));
3216 x86_64_jcc(cd, X86_64_CC_E, a);
3218 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3219 x86_64_mov_imm_reg(cd, (s8) super->vftbl, REG_ITMP2);
3220 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3221 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3223 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, baseval), REG_ITMP1);
3224 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP3);
3225 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, diffval), REG_ITMP2);
3226 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3227 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3229 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP1);
3230 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3231 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
3232 x86_64_setcc_reg(cd, X86_64_CC_BE, d);
3236 panic("internal error: no inlined array instanceof");
3238 store_reg_to_var_int(iptr->dst, d);
3241 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3243 /* op1: 0 == array, 1 == class */
3244 /* val.a: (classinfo*) superclass */
3246 /* superclass is an interface:
3248 * OK if ((sub == NULL) ||
3249 * (sub->vftbl->interfacetablelength > super->index) &&
3250 * (sub->vftbl->interfacetable[-super->index] != NULL));
3252 * superclass is a class:
3254 * OK if ((sub == NULL) || (0
3255 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3256 * super->vftbl->diffvall));
3260 classinfo *super = (classinfo*) iptr->val.a;
3262 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3263 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3265 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3266 var_to_reg_int(s1, src, d);
3267 if (iptr->op1) { /* class/interface */
3268 if (super->flags & ACC_INTERFACE) { /* interface */
3269 x86_64_test_reg_reg(cd, s1, s1);
3271 /* TODO: clean up this calculation */
3272 a = 3; /* mov_membase_reg */
3273 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3275 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3276 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3279 CALCIMMEDIATEBYTES(a, super->index);
3284 a += 3; /* mov_membase_reg */
3285 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3290 x86_64_jcc(cd, X86_64_CC_E, a);
3292 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3293 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength), REG_ITMP2);
3294 x86_64_alu_imm_reg(cd, X86_64_SUB, super->index, REG_ITMP2);
3295 x86_64_test_reg_reg(cd, REG_ITMP2, REG_ITMP2);
3296 x86_64_jcc(cd, X86_64_CC_LE, 0);
3297 codegen_addxcastrefs(cd, cd->mcodeptr);
3298 x86_64_mov_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP2);
3299 x86_64_test_reg_reg(cd, REG_ITMP2, REG_ITMP2);
3300 x86_64_jcc(cd, X86_64_CC_E, 0);
3301 codegen_addxcastrefs(cd, cd->mcodeptr);
3303 } else { /* class */
3304 x86_64_test_reg_reg(cd, s1, s1);
3306 /* TODO: clean up this calculation */
3307 a = 3; /* mov_membase_reg */
3308 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3309 a += 10; /* mov_imm_reg */
3310 a += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3311 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, baseval));
3313 if (d != REG_ITMP3) {
3314 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3315 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3316 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3317 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, diffval));
3321 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3322 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3324 a += 10; /* mov_imm_reg */
3325 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3326 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, diffval));
3332 x86_64_jcc(cd, X86_64_CC_E, a);
3334 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3335 x86_64_mov_imm_reg(cd, (s8) super->vftbl, REG_ITMP2);
3336 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3337 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3339 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, baseval), REG_ITMP1);
3340 if (d != REG_ITMP3) {
3341 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP3);
3342 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, diffval), REG_ITMP2);
3343 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3344 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3346 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP1);
3349 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP2);
3350 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
3351 x86_64_mov_imm_reg(cd, (s8) super->vftbl, REG_ITMP2);
3352 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, diffval), REG_ITMP2);
3353 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3354 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3357 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
3358 x86_64_jcc(cd, X86_64_CC_A, 0); /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3359 codegen_addxcastrefs(cd, cd->mcodeptr);
3363 panic("internal error: no inlined array checkcast");
3366 store_reg_to_var_int(iptr->dst, d);
3369 case ICMD_CHECKASIZE: /* ..., size ==> ..., size */
3371 if (src->flags & INMEMORY) {
3372 x86_64_alul_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
3375 x86_64_testl_reg_reg(cd, src->regoff, src->regoff);
3377 x86_64_jcc(cd, X86_64_CC_L, 0);
3378 codegen_addxcheckarefs(cd, cd->mcodeptr);
3381 case ICMD_CHECKEXCEPTION: /* ... ==> ... */
3383 x86_64_test_reg_reg(cd, REG_RESULT, REG_RESULT);
3384 x86_64_jcc(cd, X86_64_CC_E, 0);
3385 codegen_addxexceptionrefs(cd, cd->mcodeptr);
3388 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3389 /* op1 = dimension, val.a = array descriptor */
3391 /* check for negative sizes and copy sizes to stack if necessary */
3393 MCODECHECK((iptr->op1 << 1) + 64);
3395 for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3396 var_to_reg_int(s2, src, REG_ITMP1);
3397 x86_64_testl_reg_reg(cd, s2, s2);
3398 x86_64_jcc(cd, X86_64_CC_L, 0);
3399 codegen_addxcheckarefs(cd, cd->mcodeptr);
3401 /* copy SAVEDVAR sizes to stack */
3403 if (src->varkind != ARGVAR) {
3404 x86_64_mov_reg_membase(cd, s2, REG_SP, s1 * 8);
3408 /* a0 = dimension count */
3409 x86_64_mov_imm_reg(cd, iptr->op1, rd->argintregs[0]);
3411 /* a1 = arraydescriptor */
3412 x86_64_mov_imm_reg(cd, (u8) iptr->val.a, rd->argintregs[1]);
3414 /* a2 = pointer to dimensions = stack pointer */
3415 x86_64_mov_reg_reg(cd, REG_SP, rd->argintregs[2]);
3417 x86_64_mov_imm_reg(cd, (u8) builtin_nmultianewarray, REG_ITMP1);
3418 x86_64_call_reg(cd, REG_ITMP1);
3420 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3421 M_INTMOVE(REG_RESULT, s1);
3422 store_reg_to_var_int(iptr->dst, s1);
3426 throw_cacao_exception_exit(string_java_lang_InternalError,
3427 "Unknown ICMD %d", iptr->opc);
3430 } /* for instruction */
3432 /* copy values to interface registers */
3434 src = bptr->outstack;
3435 len = bptr->outdepth;
3436 MCODECHECK(64 + len);
3439 if ((src->varkind != STACKVAR)) {
3441 if (IS_FLT_DBL_TYPE(s2)) {
3442 var_to_reg_flt(s1, src, REG_FTMP1);
3443 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3444 M_FLTMOVE(s1, rd->interfaces[len][s2].regoff);
3447 x86_64_movq_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3451 var_to_reg_int(s1, src, REG_ITMP1);
3452 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3453 M_INTMOVE(s1, rd->interfaces[len][s2].regoff);
3456 x86_64_mov_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3462 } /* if (bptr -> flags >= BBREACHED) */
3463 } /* for basic block */
3467 /* generate bound check stubs */
3469 u1 *xcodeptr = NULL;
3472 for (bref = cd->xboundrefs; bref != NULL; bref = bref->next) {
3473 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3475 cd->mcodeptr - cd->mcodebase);
3479 /* move index register into REG_ITMP1 */
3480 x86_64_mov_reg_reg(cd, bref->reg, REG_ITMP1); /* 3 bytes */
3482 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3483 dseg_adddata(cd, cd->mcodeptr);
3484 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3485 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3487 if (xcodeptr != NULL) {
3488 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3491 xcodeptr = cd->mcodeptr;
3493 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3494 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3496 x86_64_mov_reg_reg(cd, REG_ITMP1, rd->argintregs[0]);
3497 x86_64_mov_imm_reg(cd, (u8) new_arrayindexoutofboundsexception, REG_ITMP3);
3498 x86_64_call_reg(cd, REG_ITMP3);
3500 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3501 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3503 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3504 x86_64_jmp_reg(cd, REG_ITMP3);
3508 /* generate negative array size check stubs */
3512 for (bref = cd->xcheckarefs; bref != NULL; bref = bref->next) {
3513 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3514 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3516 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3520 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3522 cd->mcodeptr - cd->mcodebase);
3526 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3527 dseg_adddata(cd, cd->mcodeptr);
3528 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3529 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3531 if (xcodeptr != NULL) {
3532 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3535 xcodeptr = cd->mcodeptr;
3537 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3538 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3540 x86_64_mov_imm_reg(cd, (u8) new_negativearraysizeexception, REG_ITMP3);
3541 x86_64_call_reg(cd, REG_ITMP3);
3543 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3544 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3546 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3547 x86_64_jmp_reg(cd, REG_ITMP3);
3551 /* generate cast check stubs */
3555 for (bref = cd->xcastrefs; bref != NULL; bref = bref->next) {
3556 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3557 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3559 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3563 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3565 cd->mcodeptr - cd->mcodebase);
3569 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3570 dseg_adddata(cd, cd->mcodeptr);
3571 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3572 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3574 if (xcodeptr != NULL) {
3575 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3578 xcodeptr = cd->mcodeptr;
3580 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3581 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3583 x86_64_mov_imm_reg(cd, (u8) new_classcastexception, REG_ITMP3);
3584 x86_64_call_reg(cd, REG_ITMP3);
3586 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3587 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3589 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3590 x86_64_jmp_reg(cd, REG_ITMP3);
3594 /* generate divide by zero check stubs */
3598 for (bref = cd->xdivrefs; bref != NULL; bref = bref->next) {
3599 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3600 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3602 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3606 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3608 cd->mcodeptr - cd->mcodebase);
3612 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3613 dseg_adddata(cd, cd->mcodeptr);
3614 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3615 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3617 if (xcodeptr != NULL) {
3618 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3621 xcodeptr = cd->mcodeptr;
3623 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3624 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3626 x86_64_mov_imm_reg(cd, (u8) new_arithmeticexception, REG_ITMP3);
3627 x86_64_call_reg(cd, REG_ITMP3);
3629 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3630 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3632 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3633 x86_64_jmp_reg(cd, REG_ITMP3);
3637 /* generate exception check stubs */
3641 for (bref = cd->xexceptionrefs; bref != NULL; bref = bref->next) {
3642 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3643 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3645 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3649 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3651 cd->mcodeptr - cd->mcodebase);
3655 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3656 dseg_adddata(cd, cd->mcodeptr);
3657 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
3658 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3660 if (xcodeptr != NULL) {
3661 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3664 xcodeptr = cd->mcodeptr;
3666 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3667 x86_64_alu_imm_reg(cd, X86_64_SUB, 8, REG_SP);
3668 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0);
3669 x86_64_mov_imm_reg(cd, (u8) &builtin_get_exceptionptrptr, REG_ITMP1);
3670 x86_64_call_reg(cd, REG_ITMP1);
3671 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
3672 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
3673 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
3674 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC);
3675 x86_64_alu_imm_reg(cd, X86_64_ADD, 8, REG_SP);
3677 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
3678 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP1_XPTR);
3679 x86_64_mov_imm_membase(cd, 0, REG_ITMP3, 0);
3682 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3683 x86_64_jmp_reg(cd, REG_ITMP3);
3687 /* generate null pointer check stubs */
3691 for (bref = cd->xnullrefs; bref != NULL; bref = bref->next) {
3692 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3693 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3695 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3699 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3701 cd->mcodeptr - cd->mcodebase);
3705 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3706 dseg_adddata(cd, cd->mcodeptr);
3707 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
3708 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3710 if (xcodeptr != NULL) {
3711 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3714 xcodeptr = cd->mcodeptr;
3716 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3717 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3719 x86_64_mov_imm_reg(cd, (u8) new_nullpointerexception, REG_ITMP3);
3720 x86_64_call_reg(cd, REG_ITMP3);
3722 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3723 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3725 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3726 x86_64_jmp_reg(cd, REG_ITMP3);
3730 /* generate put/getstatic stub call code */
3738 tmpcd = DNEW(codegendata);
3740 for (cref = cd->clinitrefs; cref != NULL; cref = cref->next) {
3741 /* Get machine code which is patched back in later. A */
3742 /* `call rel32' is 5 bytes long. */
3743 xcodeptr = cd->mcodebase + cref->branchpos;
3745 mcode = *((u4 *) (xcodeptr + 1));
3749 /* patch in `call rel32' to call the following code */
3750 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
3751 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
3753 /* Save current stack pointer into a temporary register. */
3754 x86_64_mov_reg_reg(cd, REG_SP, REG_ITMP1);
3756 /* Push machine code bytes to patch onto the stack. */
3757 x86_64_push_imm(cd, (u1) xmcode);
3758 x86_64_push_imm(cd, (u4) mcode);
3760 x86_64_push_imm(cd, (u8) cref->class);
3762 /* Push previously saved stack pointer onto stack. */
3763 x86_64_push_reg(cd, REG_ITMP1);
3765 x86_64_mov_imm_reg(cd, (u8) asm_check_clinit, REG_ITMP1);
3766 x86_64_jmp_reg(cd, REG_ITMP1);
3771 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
3775 /* function createcompilerstub *************************************************
3777 creates a stub routine which calls the compiler
3779 *******************************************************************************/
3781 #define COMPSTUBSIZE 23
3783 u1 *createcompilerstub(methodinfo *m)
3785 u1 *s = CNEW(u1, COMPSTUBSIZE); /* memory to hold the stub */
3789 /* mark start of dump memory area */
3791 dumpsize = dump_size();
3793 cd = DNEW(codegendata);
3796 /* code for the stub */
3797 x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP1); /* pass method to compiler */
3798 x86_64_mov_imm_reg(cd, (u8) asm_call_jit_compiler, REG_ITMP3);/* load address */
3799 x86_64_jmp_reg(cd, REG_ITMP3); /* jump to compiler */
3801 #if defined(STATISTICS)
3803 count_cstub_len += COMPSTUBSIZE;
3806 /* release dump area */
3808 dump_release(dumpsize);
3814 /* function removecompilerstub *************************************************
3816 deletes a compilerstub from memory (simply by freeing it)
3818 *******************************************************************************/
3820 void removecompilerstub(u1 *stub)
3822 CFREE(stub, COMPSTUBSIZE);
3826 /* function: createnativestub **************************************************
3828 creates a stub routine which calls a native method
3830 *******************************************************************************/
3832 /* #if defined(USE_THREADS) && defined(NATIVE_THREADS) */
3833 /* static java_objectheader **(*callgetexceptionptrptr)() = builtin_get_exceptionptrptr; */
3836 #define NATIVESTUBSIZE 700 /* keep this size high enough! */
3838 u1 *createnativestub(functionptr f, methodinfo *m)
3840 u1 *s; /* pointer to stub memory */
3843 t_inlining_globals *id;
3845 s4 stackframesize; /* size of stackframe if needed */
3847 s4 iargs; /* count of integer arguments */
3848 s4 fargs; /* count of float arguments */
3851 void **callAddrPatchPos=0;
3853 void **jmpInstrPatchPos=0;
3855 /* initialize variables */
3860 /* mark start of dump memory area */
3862 dumpsize = dump_size();
3864 cd = DNEW(codegendata);
3865 rd = DNEW(registerdata);
3866 id = DNEW(t_inlining_globals);
3868 /* setup registers before using it */
3870 inlining_setup(m, id);
3871 reg_setup(m, rd, id);
3873 /* set paramcount and paramtypes */
3875 descriptor2types(m);
3877 /* count integer and float arguments */
3879 tptr = m->paramtypes;
3880 for (i = 0; i < m->paramcount; i++) {
3881 IS_INT_LNG_TYPE(*tptr++) ? iargs++ : fargs++;
3884 s = CNEW(u1, NATIVESTUBSIZE); /* memory to hold the stub */
3886 /* set some required varibles which are normally set by codegen_setup */
3889 cd->clinitrefs = NULL;
3891 /* if function is static, check for initialized */
3893 if ((m->flags & ACC_STATIC) && !m->class->initialized) {
3894 codegen_addclinitref(cd, cd->mcodeptr, m->class);
3900 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
3902 /* save integer and float argument registers */
3904 for (i = 0; i < INT_ARG_CNT; i++) {
3905 x86_64_mov_reg_membase(cd, rd->argintregs[i], REG_SP, (1 + i) * 8);
3908 for (i = 0; i < FLT_ARG_CNT; i++) {
3909 x86_64_movq_reg_membase(cd, rd->argfltregs[i], REG_SP, (1 + INT_ARG_CNT + i) * 8);
3912 /* show integer hex code for float arguments */
3914 for (i = 0, l = 0; i < m->paramcount && i < INT_ARG_CNT; i++) {
3915 /* if the paramtype is a float, we have to right shift all */
3916 /* following integer registers */
3918 if (IS_FLT_DBL_TYPE(m->paramtypes[i])) {
3919 for (s1 = INT_ARG_CNT - 2; s1 >= i; s1--) {
3920 x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
3923 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[i]);
3928 x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP1);
3929 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, 0 * 8);
3930 x86_64_mov_imm_reg(cd, (u8) builtin_trace_args, REG_ITMP1);
3931 x86_64_call_reg(cd, REG_ITMP1);
3933 /* restore integer and float argument registers */
3935 for (i = 0; i < INT_ARG_CNT; i++) {
3936 x86_64_mov_membase_reg(cd, REG_SP, (1 + i) * 8, rd->argintregs[i]);
3939 for (i = 0; i < FLT_ARG_CNT; i++) {
3940 x86_64_movq_membase_reg(cd, REG_SP, (1 + INT_ARG_CNT + i) * 8, rd->argfltregs[i]);
3943 x86_64_alu_imm_reg(cd, X86_64_ADD, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
3946 #if !defined(STATIC_CLASSPATH)
3947 /* call method to resolve native function if needed */
3949 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
3951 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, 1 * 8);
3952 x86_64_mov_reg_membase(cd, rd->argintregs[1], REG_SP, 2 * 8);
3953 x86_64_mov_reg_membase(cd, rd->argintregs[2], REG_SP, 3 * 8);
3954 x86_64_mov_reg_membase(cd, rd->argintregs[3], REG_SP, 4 * 8);
3955 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 5 * 8);
3956 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 6 * 8);
3958 x86_64_movq_reg_membase(cd, rd->argfltregs[0], REG_SP, 7 * 8);
3959 x86_64_movq_reg_membase(cd, rd->argfltregs[1], REG_SP, 8 * 8);
3960 x86_64_movq_reg_membase(cd, rd->argfltregs[2], REG_SP, 9 * 8);
3961 x86_64_movq_reg_membase(cd, rd->argfltregs[3], REG_SP, 10 * 8);
3962 x86_64_movq_reg_membase(cd, rd->argfltregs[4], REG_SP, 11 * 8);
3963 x86_64_movq_reg_membase(cd, rd->argfltregs[5], REG_SP, 12 * 8);
3964 x86_64_movq_reg_membase(cd, rd->argfltregs[6], REG_SP, 13 * 8);
3965 x86_64_movq_reg_membase(cd, rd->argfltregs[7], REG_SP, 14 * 8);
3967 /* needed to patch a jump over this block */
3968 x86_64_jmp_imm(cd, 0);
3969 jmpInstrPos = cd->mcodeptr - 4;
3971 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
3973 x86_64_mov_imm_reg(cd, 0, rd->argintregs[1]);
3974 callAddrPatchPos = cd->mcodeptr - 8; /* at this position the place is specified where the native function adress should be patched into*/
3976 x86_64_mov_imm_reg(cd, 0, rd->argintregs[2]);
3977 jmpInstrPatchPos = cd->mcodeptr - 8;
3979 x86_64_mov_imm_reg(cd, jmpInstrPos, rd->argintregs[3]);
3981 x86_64_mov_imm_reg(cd, (u8) codegen_resolve_native, REG_ITMP1);
3982 x86_64_call_reg(cd, REG_ITMP1);
3984 *(jmpInstrPatchPos) = cd->mcodeptr - jmpInstrPos - 1; /*=opcode jmp_imm size*/
3986 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, rd->argintregs[0]);
3987 x86_64_mov_membase_reg(cd, REG_SP, 2 * 8, rd->argintregs[1]);
3988 x86_64_mov_membase_reg(cd, REG_SP, 3 * 8, rd->argintregs[2]);
3989 x86_64_mov_membase_reg(cd, REG_SP, 4 * 8, rd->argintregs[3]);
3990 x86_64_mov_membase_reg(cd, REG_SP, 5 * 8, rd->argintregs[4]);
3991 x86_64_mov_membase_reg(cd, REG_SP, 6 * 8, rd->argintregs[5]);
3993 x86_64_movq_membase_reg(cd, REG_SP, 7 * 8, rd->argfltregs[0]);
3994 x86_64_movq_membase_reg(cd, REG_SP, 8 * 8, rd->argfltregs[1]);
3995 x86_64_movq_membase_reg(cd, REG_SP, 9 * 8, rd->argfltregs[2]);
3996 x86_64_movq_membase_reg(cd, REG_SP, 10 * 8, rd->argfltregs[3]);
3997 x86_64_movq_membase_reg(cd, REG_SP, 11 * 8, rd->argfltregs[4]);
3998 x86_64_movq_membase_reg(cd, REG_SP, 12 * 8, rd->argfltregs[5]);
3999 x86_64_movq_membase_reg(cd, REG_SP, 13 * 8, rd->argfltregs[6]);
4000 x86_64_movq_membase_reg(cd, REG_SP, 14 * 8, rd->argfltregs[7]);
4002 x86_64_alu_imm_reg(cd, X86_64_ADD, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4006 /* save argument registers on stack -- if we have to */
4008 if ((((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) || iargs > (INT_ARG_CNT - 1)) ||
4009 (fargs > FLT_ARG_CNT)) {
4016 /* do we need to shift integer argument register onto stack? */
4018 if ((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) {
4019 /* do we need to shift 2 arguments? */
4020 if (iargs > (INT_ARG_CNT - 1)) {
4027 } else if (iargs > (INT_ARG_CNT - 1)) {
4031 /* calculate required stack space */
4033 stackparamcnt += (iargs > INT_ARG_CNT) ? iargs - INT_ARG_CNT : 0;
4034 stackparamcnt += (fargs > FLT_ARG_CNT) ? fargs - FLT_ARG_CNT : 0;
4036 stackframesize = stackparamcnt + paramshiftcnt;
4038 /* keep stack 16-byte aligned */
4039 if (!(stackframesize & 0x1))
4042 x86_64_alu_imm_reg(cd, X86_64_SUB, stackframesize * 8, REG_SP);
4044 /* shift integer arguments if required */
4046 if ((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) {
4047 /* do we need to shift 2 arguments? */
4048 if (iargs > (INT_ARG_CNT - 1))
4049 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 1 * 8);
4051 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 0 * 8);
4053 } else if (iargs > (INT_ARG_CNT - 1)) {
4054 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 0 * 8);
4057 /* copy stack arguments into new stack frame -- if any */
4058 for (i = 0; i < stackparamcnt; i++) {
4059 x86_64_mov_membase_reg(cd, REG_SP, (stackframesize + 1 + i) * 8, REG_ITMP1);
4060 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, (paramshiftcnt + i) * 8);
4064 /* keep stack 16-byte aligned */
4065 x86_64_alu_imm_reg(cd, X86_64_SUB, 1 * 8, REG_SP);
4069 /* shift integer arguments for `env' and `class' arguments */
4071 if (m->flags & ACC_STATIC) {
4072 /* shift iargs count if less than INT_ARG_CNT, or all */
4073 for (i = (iargs < (INT_ARG_CNT - 2)) ? iargs : (INT_ARG_CNT - 2); i >= 0; i--) {
4074 x86_64_mov_reg_reg(cd, rd->argintregs[i], rd->argintregs[i + 2]);
4077 /* put class into second argument register */
4078 x86_64_mov_imm_reg(cd, (u8) m->class, rd->argintregs[1]);
4081 /* shift iargs count if less than INT_ARG_CNT, or all */
4082 for (i = (iargs < (INT_ARG_CNT - 1)) ? iargs : (INT_ARG_CNT - 1); i >= 0; i--) {
4083 x86_64_mov_reg_reg(cd, rd->argintregs[i], rd->argintregs[i + 1]);
4087 /* put env into first argument register */
4088 x86_64_mov_imm_reg(cd, (u8) &env, rd->argintregs[0]);
4090 /* do the native function call */
4091 x86_64_mov_imm_reg(cd, (u8) f, REG_ITMP1);
4092 #if !defined(STATIC_CLASSPATH)
4094 (*callAddrPatchPos) = cd->mcodeptr - 8;
4096 x86_64_call_reg(cd, REG_ITMP1);
4098 /* remove stackframe if there is one */
4099 if (stackframesize) {
4100 x86_64_alu_imm_reg(cd, X86_64_ADD, stackframesize * 8, REG_SP);
4104 x86_64_alu_imm_reg(cd, X86_64_SUB, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
4106 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
4107 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
4109 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
4110 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
4111 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
4112 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
4114 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
4115 x86_64_call_reg(cd, REG_ITMP1);
4117 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
4118 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
4120 x86_64_alu_imm_reg(cd, X86_64_ADD, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
4123 /* check for exception */
4125 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4126 x86_64_push_reg(cd, REG_RESULT);
4127 /* x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
4128 x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
4129 x86_64_call_reg(cd, REG_ITMP3);
4130 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
4131 x86_64_pop_reg(cd, REG_RESULT);
4133 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
4134 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP3);
4136 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
4137 x86_64_jcc(cd, X86_64_CC_NE, 1);
4141 /* handle exception */
4143 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4144 x86_64_push_reg(cd, REG_ITMP3);
4145 /* x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
4146 x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
4147 x86_64_call_reg(cd, REG_ITMP3);
4148 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
4149 x86_64_pop_reg(cd, REG_ITMP1_XPTR);
4151 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
4152 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
4153 x86_64_alu_reg_reg(cd, X86_64_XOR, REG_ITMP2, REG_ITMP2);
4154 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_ITMP3, 0); /* clear exception pointer */
4157 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC); /* get return address from stack */
4158 x86_64_alu_imm_reg(cd, X86_64_SUB, 3, REG_ITMP2_XPC); /* callq */
4160 x86_64_mov_imm_reg(cd, (u8) asm_handle_nat_exception, REG_ITMP3);
4161 x86_64_jmp_reg(cd, REG_ITMP3);
4164 /* patch in a clinit call if required *************************************/
4173 tmpcd = DNEW(codegendata);
4175 /* there can only be one clinit ref entry */
4176 cref = cd->clinitrefs;
4179 /* Get machine code which is patched back in later. A */
4180 /* `call rel32' is 5 bytes long. */
4181 xcodeptr = cd->mcodebase + cref->branchpos;
4183 mcode = *((u4 *) (xcodeptr + 1));
4185 /* patch in `call rel32' to call the following code */
4186 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
4187 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4189 /* Save current stack pointer into a temporary register. */
4190 x86_64_mov_reg_reg(cd, REG_SP, REG_ITMP1);
4192 /* Push machine code bytes to patch onto the stack. */
4193 x86_64_push_imm(cd, (u1) xmcode);
4194 x86_64_push_imm(cd, (u4) mcode);
4196 x86_64_push_imm(cd, (u8) cref->class);
4198 /* Push previously saved stack pointer onto stack. */
4199 x86_64_push_reg(cd, REG_ITMP1);
4201 x86_64_mov_imm_reg(cd, (u8) asm_check_clinit, REG_ITMP1);
4202 x86_64_jmp_reg(cd, REG_ITMP1);
4206 /* Check if the stub size is big enough to hold the whole stub generated. */
4207 /* If not, this can lead into unpredictable crashes, because of heap */
4209 if ((s4) (cd->mcodeptr - s) > NATIVESTUBSIZE) {
4210 throw_cacao_exception_exit(string_java_lang_InternalError,
4211 "Native stub size %d is to small for current stub size %d",
4212 NATIVESTUBSIZE, (s4) (cd->mcodeptr - s));
4215 #if defined(STATISTICS)
4217 count_nstub_len += NATIVESTUBSIZE;
4220 /* release dump area */
4222 dump_release(dumpsize);
4228 /* function: removenativestub **************************************************
4230 removes a previously created native-stub from memory
4232 *******************************************************************************/
4234 void removenativestub(u1 *stub)
4236 CFREE(stub, NATIVESTUBSIZE);
4241 * These are local overrides for various environment variables in Emacs.
4242 * Please do not remove this and leave it at the end of the file, where
4243 * Emacs will automagically detect them.
4244 * ---------------------------------------------------------------------
4247 * indent-tabs-mode: t