1 /* vm/jit/x86_64/codegen.c - machine code generator for x86_64
3 Copyright (C) 1996-2005 R. Grafl, A. Krall, C. Kruegel, C. Oates,
4 R. Obermaisser, M. Platter, M. Probst, S. Ring, E. Steiner,
5 C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich, J. Wenninger,
6 Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
25 Contact: cacao@complang.tuwien.ac.at
27 Authors: Andreas Krall
30 $Id: codegen.c 2406 2005-04-28 12:19:06Z jowenn $
40 #include "cacao/cacao.h"
41 #include "native/native.h"
42 #include "vm/global.h"
43 #include "vm/builtin.h"
44 #include "vm/loader.h"
45 #include "vm/tables.h"
46 #include "vm/jit/asmpart.h"
47 #include "vm/jit/jit.h"
48 #include "vm/jit/reg.h"
49 #include "vm/jit/parse.h"
50 #include "vm/jit/patcher.h"
51 #include "vm/jit/x86_64/arch.h"
52 #include "vm/jit/x86_64/codegen.h"
53 #include "vm/jit/x86_64/emitfuncs.h"
54 #include "vm/jit/x86_64/types.h"
55 #include "vm/jit/x86_64/asmoffsets.h"
56 #include "vm/jit/helper.h"
58 /* register descripton - array ************************************************/
60 /* #define REG_RES 0 reserved register for OS or code generator */
61 /* #define REG_RET 1 return value register */
62 /* #define REG_EXC 2 exception value register (only old jit) */
63 /* #define REG_SAV 3 (callee) saved register */
64 /* #define REG_TMP 4 scratch temporary register (caller saved) */
65 /* #define REG_ARG 5 argument register (caller saved) */
67 /* #define REG_END -1 last entry in tables */
69 static int nregdescint[] = {
70 REG_RET, REG_ARG, REG_ARG, REG_TMP, REG_RES, REG_SAV, REG_ARG, REG_ARG,
71 REG_ARG, REG_ARG, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV,
76 static int nregdescfloat[] = {
77 REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG,
78 REG_RES, REG_RES, REG_RES, REG_TMP, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
83 /* Include independent code generation stuff -- include after register */
84 /* descriptions to avoid extern definitions. */
86 #include "vm/jit/codegen.inc"
87 #include "vm/jit/reg.inc"
89 #include "vm/jit/lsra.inc"
93 void codegen_dummy_func() { log_text("codegen_dummy_func"); }
95 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
96 void thread_restartcriticalsection(ucontext_t *uc)
100 critical = thread_checkcritical((void *) uc->uc_mcontext.gregs[REG_RIP]);
103 uc->uc_mcontext.gregs[REG_RIP] = (u8) critical;
108 /* NullPointerException signal handler for hardware null pointer check */
110 void catch_NullPointerException(int sig, siginfo_t *siginfo, void *_p)
114 struct ucontext *_uc = (struct ucontext *) _p;
115 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
116 struct sigaction act;
117 java_objectheader *xptr;
119 /* Reset signal handler - necessary for SysV, does no harm for BSD */
121 act.sa_sigaction = catch_NullPointerException; /* reinstall handler */
122 act.sa_flags = SA_SIGINFO;
123 sigaction(sig, &act, NULL);
126 sigaddset(&nsig, sig);
127 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
130 xptr = new_nullpointerexception();
132 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
133 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
134 sigctx->rip = (u8) asm_handle_exception;
137 sigctx->rax = (u8) string_java_lang_NullPointerException;
138 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
139 sigctx->rip = (u8) asm_throw_and_handle_exception;
145 /* ArithmeticException signal handler for hardware divide by zero check */
147 void catch_ArithmeticException(int sig, siginfo_t *siginfo, void *_p)
151 struct ucontext *_uc = (struct ucontext *) _p;
152 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
153 struct sigaction act;
154 java_objectheader *xptr;
156 /* Reset signal handler - necessary for SysV, does no harm for BSD */
158 act.sa_sigaction = catch_ArithmeticException; /* reinstall handler */
159 act.sa_flags = SA_SIGINFO;
160 sigaction(sig, &act, NULL);
163 sigaddset(&nsig, sig);
164 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
167 xptr = new_arithmeticexception();
169 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
171 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
172 sigctx->rip = (u8) asm_throw_and_handle_hardware_arithmetic_exception;
178 void init_exceptions(void)
180 struct sigaction act;
182 /* install signal handlers we need to convert to exceptions */
183 sigemptyset(&act.sa_mask);
187 act.sa_sigaction = catch_NullPointerException;
188 act.sa_flags = SA_SIGINFO;
189 sigaction(SIGSEGV, &act, NULL);
193 act.sa_sigaction = catch_NullPointerException;
194 act.sa_flags = SA_SIGINFO;
195 sigaction(SIGBUS, &act, NULL);
199 act.sa_sigaction = catch_ArithmeticException;
200 act.sa_flags = SA_SIGINFO;
201 sigaction(SIGFPE, &act, NULL);
205 /* function gen_mcode **********************************************************
207 generates machine code
209 *******************************************************************************/
211 void codegen(methodinfo *m, codegendata *cd, registerdata *rd)
213 s4 len, s1, s2, s3, d;
229 /* space to save used callee saved registers */
231 savedregs_num += (rd->savintregcnt - rd->maxsavintreguse);
232 savedregs_num += (rd->savfltregcnt - rd->maxsavfltreguse);
234 parentargs_base = rd->maxmemuse + savedregs_num;
236 #if defined(USE_THREADS) /* space to save argument of monitor_enter */
238 if (checksync && (m->flags & ACC_SYNCHRONIZED))
243 /* Keep stack of non-leaf functions 16-byte aligned for calls into native */
244 /* code e.g. libc or jni (alignment problems with movaps). */
246 if (!m->isleafmethod || runverbose)
247 parentargs_base |= 0x1;
249 /* create method header */
251 (void) dseg_addaddress(cd, m); /* MethodPointer */
252 (void) dseg_adds4(cd, parentargs_base * 8); /* FrameSize */
254 #if defined(USE_THREADS)
256 /* IsSync contains the offset relative to the stack pointer for the
257 argument of monitor_exit used in the exception handler. Since the
258 offset could be zero and give a wrong meaning of the flag it is
262 if (checksync && (m->flags & ACC_SYNCHRONIZED))
263 (void) dseg_adds4(cd, (rd->maxmemuse + 1) * 8); /* IsSync */
268 (void) dseg_adds4(cd, 0); /* IsSync */
270 (void) dseg_adds4(cd, m->isleafmethod); /* IsLeaf */
271 (void) dseg_adds4(cd, rd->savintregcnt - rd->maxsavintreguse);/* IntSave */
272 (void) dseg_adds4(cd, rd->savfltregcnt - rd->maxsavfltreguse);/* FltSave */
274 (void) dseg_addlinenumbertablesize(cd);
276 (void) dseg_adds4(cd, cd->exceptiontablelength); /* ExTableSize */
278 /* create exception table */
280 for (ex = cd->exceptiontable; ex != NULL; ex = ex->down) {
281 dseg_addtarget(cd, ex->start);
282 dseg_addtarget(cd, ex->end);
283 dseg_addtarget(cd, ex->handler);
284 (void) dseg_addaddress(cd, ex->catchtype.cls);
287 /* initialize mcode variables */
289 cd->mcodeptr = (u1 *) cd->mcodebase;
290 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
291 MCODECHECK(128 + m->paramcount);
293 /* create stack frame (if necessary) */
295 if (parentargs_base) {
296 x86_64_alu_imm_reg(cd, X86_64_SUB, parentargs_base * 8, REG_SP);
299 /* save used callee saved registers */
302 for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
303 p--; x86_64_mov_reg_membase(cd, rd->savintregs[i], REG_SP, p * 8);
305 for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
306 p--; x86_64_movq_reg_membase(cd, rd->savfltregs[i], REG_SP, p * 8);
309 /* take arguments out of register or stack frame */
311 for (p = 0, l = 0, s1 = 0, s2 = 0; p < m->paramcount; p++) {
312 t = m->paramtypes[p];
313 var = &(rd->locals[l][t]);
315 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
318 if (IS_INT_LNG_TYPE(t)) {
325 if (IS_INT_LNG_TYPE(t)) { /* integer args */
326 if (s1 < INT_ARG_CNT) { /* register arguments */
327 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
328 M_INTMOVE(rd->argintregs[s1], var->regoff);
330 } else { /* reg arg -> spilled */
331 x86_64_mov_reg_membase(cd, rd->argintregs[s1], REG_SP, var->regoff * 8);
334 } else { /* stack arguments */
335 pa = s1 - INT_ARG_CNT;
336 if (s2 >= FLT_ARG_CNT) {
337 pa += s2 - FLT_ARG_CNT;
339 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
340 x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff); /* + 8 for return address */
341 } else { /* stack arg -> spilled */
342 /* x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_ITMP1); */ /* + 8 for return address */
343 /* x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, var->regoff * 8); */
344 var->regoff = parentargs_base + pa + 1;
349 } else { /* floating args */
350 if (s2 < FLT_ARG_CNT) { /* register arguments */
351 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
352 M_FLTMOVE(rd->argfltregs[s2], var->regoff);
354 } else { /* reg arg -> spilled */
355 x86_64_movq_reg_membase(cd, rd->argfltregs[s2], REG_SP, var->regoff * 8);
358 } else { /* stack arguments */
359 pa = s2 - FLT_ARG_CNT;
360 if (s1 >= INT_ARG_CNT) {
361 pa += s1 - INT_ARG_CNT;
363 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
364 x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff);
367 /* x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_FTMP1); */
368 /* x86_64_movq_reg_membase(cd, REG_FTMP1, REG_SP, var->regoff * 8); */
369 var->regoff = parentargs_base + pa + 1;
376 /* save monitorenter argument */
378 #if defined(USE_THREADS)
379 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
380 if (m->flags & ACC_STATIC) {
381 x86_64_mov_imm_reg(cd, (ptrint) m->class, REG_ITMP1);
382 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, rd->maxmemuse * 8);
383 M_INTMOVE(REG_ITMP1, rd->argintregs[0]);
384 x86_64_mov_imm_reg(cd, (ptrint) BUILTIN_staticmonitorenter, REG_ITMP1);
385 x86_64_call_reg(cd, REG_ITMP1);
388 x86_64_test_reg_reg(cd, rd->argintregs[0], rd->argintregs[0]);
389 x86_64_jcc(cd, X86_64_CC_Z, 0);
390 codegen_addxnullrefs(cd, cd->mcodeptr);
391 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, rd->maxmemuse * 8);
392 x86_64_mov_imm_reg(cd, (ptrint) BUILTIN_monitorenter, REG_ITMP1);
393 x86_64_call_reg(cd, REG_ITMP1);
398 /* Copy argument registers to stack and call trace function with pointer */
399 /* to arguments on stack. */
402 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1 + 1) * 8, REG_SP);
404 /* save integer argument registers */
406 for (p = 0; p < INT_ARG_CNT; p++) {
407 x86_64_mov_reg_membase(cd, rd->argintregs[p], REG_SP, (1 + p) * 8);
410 /* save float argument registers */
412 for (p = 0; p < FLT_ARG_CNT; p++) {
413 x86_64_movq_reg_membase(cd, rd->argfltregs[p], REG_SP, (1 + INT_ARG_CNT + p) * 8);
416 /* show integer hex code for float arguments */
418 for (p = 0, l = 0; p < m->paramcount && p < INT_ARG_CNT; p++) {
419 t = m->paramtypes[p];
421 /* if the paramtype is a float, we have to right shift all */
422 /* following integer registers */
424 if (IS_FLT_DBL_TYPE(t)) {
425 for (s1 = INT_ARG_CNT - 2; s1 >= p; s1--) {
426 x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
429 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[p]);
434 x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP2);
435 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_SP, 0 * 8);
436 x86_64_mov_imm_reg(cd, (u8) builtin_trace_args, REG_ITMP1);
437 x86_64_call_reg(cd, REG_ITMP1);
439 /* restore integer argument registers */
441 for (p = 0; p < INT_ARG_CNT; p++) {
442 x86_64_mov_membase_reg(cd, REG_SP, (1 + p) * 8, rd->argintregs[p]);
445 /* restore float argument registers */
447 for (p = 0; p < FLT_ARG_CNT; p++) {
448 x86_64_movq_membase_reg(cd, REG_SP, (1 + INT_ARG_CNT + p) * 8, rd->argfltregs[p]);
451 x86_64_alu_imm_reg(cd, X86_64_ADD, (6 + 8 + 1 + 1) * 8, REG_SP);
456 /* end of header generation */
458 /* walk through all basic blocks */
459 for (bptr = m->basicblocks; bptr != NULL; bptr = bptr->next) {
461 bptr->mpc = (u4) ((u1 *) cd->mcodeptr - cd->mcodebase);
463 if (bptr->flags >= BBREACHED) {
465 /* branch resolving */
468 for (bref = bptr->branchrefs; bref != NULL; bref = bref->next) {
469 gen_resolvebranch((u1 *) cd->mcodebase + bref->branchpos,
474 /* copy interface registers to their destination */
478 MCODECHECK(64 + len);
482 while (src != NULL) {
484 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
485 if (bptr->type == BBTYPE_SBR) {
486 /* d = reg_of_var(rd, src, REG_ITMP1); */
487 if (!(src->flags & INMEMORY))
491 x86_64_pop_reg(cd, d);
492 store_reg_to_var_int(src, d);
494 } else if (bptr->type == BBTYPE_EXH) {
495 /* d = reg_of_var(rd, src, REG_ITMP1); */
496 if (!(src->flags & INMEMORY))
500 M_INTMOVE(REG_ITMP1, d);
501 store_reg_to_var_int(src, d);
510 while (src != NULL) {
512 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
513 if (bptr->type == BBTYPE_SBR) {
514 d = reg_of_var(rd, src, REG_ITMP1);
515 x86_64_pop_reg(cd, d);
516 store_reg_to_var_int(src, d);
518 } else if (bptr->type == BBTYPE_EXH) {
519 d = reg_of_var(rd, src, REG_ITMP1);
520 M_INTMOVE(REG_ITMP1, d);
521 store_reg_to_var_int(src, d);
525 d = reg_of_var(rd, src, REG_ITMP1);
526 if ((src->varkind != STACKVAR)) {
528 if (IS_FLT_DBL_TYPE(s2)) {
529 s1 = rd->interfaces[len][s2].regoff;
530 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
534 x86_64_movq_membase_reg(cd, REG_SP, s1 * 8, d);
536 store_reg_to_var_flt(src, d);
539 s1 = rd->interfaces[len][s2].regoff;
540 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
544 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, d);
546 store_reg_to_var_int(src, d);
555 /* walk through all instructions */
560 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
561 if (iptr->line != currentline) {
562 dseg_addlinenumber(cd, iptr->line, cd->mcodeptr);
563 /*printf("%s : %d\n",m->name->text,iptr->line);*/
564 currentline = iptr->line;
567 MCODECHECK(128); /* XXX are 128 bytes enough? */
570 case ICMD_INLINE_START: /* internal ICMDs */
571 case ICMD_INLINE_END:
574 case ICMD_NOP: /* ... ==> ... */
577 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
578 if (src->flags & INMEMORY) {
579 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
582 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
584 x86_64_jcc(cd, X86_64_CC_Z, 0);
585 codegen_addxnullrefs(cd, cd->mcodeptr);
588 /* constant operations ************************************************/
590 case ICMD_ICONST: /* ... ==> ..., constant */
591 /* op1 = 0, val.i = constant */
593 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
594 if (iptr->val.i == 0) {
595 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
597 x86_64_movl_imm_reg(cd, iptr->val.i, d);
599 store_reg_to_var_int(iptr->dst, d);
602 case ICMD_ACONST: /* ... ==> ..., constant */
603 /* op1 = 0, val.a = constant */
605 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
606 if (iptr->val.a == 0) {
607 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
609 x86_64_mov_imm_reg(cd, (s8) iptr->val.a, d);
611 store_reg_to_var_int(iptr->dst, d);
614 case ICMD_LCONST: /* ... ==> ..., constant */
615 /* op1 = 0, val.l = constant */
617 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
618 if (iptr->val.l == 0) {
619 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
621 x86_64_mov_imm_reg(cd, iptr->val.l, d);
623 store_reg_to_var_int(iptr->dst, d);
626 case ICMD_FCONST: /* ... ==> ..., constant */
627 /* op1 = 0, val.f = constant */
629 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
630 a = dseg_addfloat(cd, iptr->val.f);
631 x86_64_movdl_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + ((d > 7) ? 9 : 8)) - (s8) cd->mcodebase) + a, d);
632 store_reg_to_var_flt(iptr->dst, d);
635 case ICMD_DCONST: /* ... ==> ..., constant */
636 /* op1 = 0, val.d = constant */
638 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
639 a = dseg_adddouble(cd, iptr->val.d);
640 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, d);
641 store_reg_to_var_flt(iptr->dst, d);
645 /* load/store operations **********************************************/
647 case ICMD_ILOAD: /* ... ==> ..., content of local variable */
648 /* op1 = local variable */
650 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
651 if ((iptr->dst->varkind == LOCALVAR) &&
652 (iptr->dst->varnum == iptr->op1)) {
655 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
656 if (var->flags & INMEMORY) {
657 x86_64_movl_membase_reg(cd, REG_SP, var->regoff * 8, d);
658 store_reg_to_var_int(iptr->dst, d);
661 if (iptr->dst->flags & INMEMORY) {
662 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
665 M_INTMOVE(var->regoff, d);
670 case ICMD_LLOAD: /* ... ==> ..., content of local variable */
671 case ICMD_ALOAD: /* op1 = local variable */
673 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
674 if ((iptr->dst->varkind == LOCALVAR) &&
675 (iptr->dst->varnum == iptr->op1)) {
678 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
679 if (var->flags & INMEMORY) {
680 x86_64_mov_membase_reg(cd, REG_SP, var->regoff * 8, d);
681 store_reg_to_var_int(iptr->dst, d);
684 if (iptr->dst->flags & INMEMORY) {
685 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
688 M_INTMOVE(var->regoff, d);
693 case ICMD_FLOAD: /* ... ==> ..., content of local variable */
694 case ICMD_DLOAD: /* op1 = local variable */
696 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
697 if ((iptr->dst->varkind == LOCALVAR) &&
698 (iptr->dst->varnum == iptr->op1)) {
701 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
702 if (var->flags & INMEMORY) {
703 x86_64_movq_membase_reg(cd, REG_SP, var->regoff * 8, d);
704 store_reg_to_var_flt(iptr->dst, d);
707 if (iptr->dst->flags & INMEMORY) {
708 x86_64_movq_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
711 M_FLTMOVE(var->regoff, d);
716 case ICMD_ISTORE: /* ..., value ==> ... */
717 case ICMD_LSTORE: /* op1 = local variable */
720 if ((src->varkind == LOCALVAR) &&
721 (src->varnum == iptr->op1)) {
724 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
725 if (var->flags & INMEMORY) {
726 var_to_reg_int(s1, src, REG_ITMP1);
727 x86_64_mov_reg_membase(cd, s1, REG_SP, var->regoff * 8);
730 var_to_reg_int(s1, src, var->regoff);
731 M_INTMOVE(s1, var->regoff);
735 case ICMD_FSTORE: /* ..., value ==> ... */
736 case ICMD_DSTORE: /* op1 = local variable */
738 if ((src->varkind == LOCALVAR) &&
739 (src->varnum == iptr->op1)) {
742 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
743 if (var->flags & INMEMORY) {
744 var_to_reg_flt(s1, src, REG_FTMP1);
745 x86_64_movq_reg_membase(cd, s1, REG_SP, var->regoff * 8);
748 var_to_reg_flt(s1, src, var->regoff);
749 M_FLTMOVE(s1, var->regoff);
754 /* pop/dup/swap operations ********************************************/
756 /* attention: double and longs are only one entry in CACAO ICMDs */
758 case ICMD_POP: /* ..., value ==> ... */
759 case ICMD_POP2: /* ..., value, value ==> ... */
762 case ICMD_DUP: /* ..., a ==> ..., a, a */
763 M_COPY(src, iptr->dst);
766 case ICMD_DUP_X1: /* ..., a, b ==> ..., b, a, b */
768 M_COPY(src, iptr->dst);
769 M_COPY(src->prev, iptr->dst->prev);
770 M_COPY(iptr->dst, iptr->dst->prev->prev);
773 case ICMD_DUP_X2: /* ..., a, b, c ==> ..., c, a, b, c */
775 M_COPY(src, iptr->dst);
776 M_COPY(src->prev, iptr->dst->prev);
777 M_COPY(src->prev->prev, iptr->dst->prev->prev);
778 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
781 case ICMD_DUP2: /* ..., a, b ==> ..., a, b, a, b */
783 M_COPY(src, iptr->dst);
784 M_COPY(src->prev, iptr->dst->prev);
787 case ICMD_DUP2_X1: /* ..., a, b, c ==> ..., b, c, a, b, c */
789 M_COPY(src, iptr->dst);
790 M_COPY(src->prev, iptr->dst->prev);
791 M_COPY(src->prev->prev, iptr->dst->prev->prev);
792 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
793 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev);
796 case ICMD_DUP2_X2: /* ..., a, b, c, d ==> ..., c, d, a, b, c, d */
798 M_COPY(src, iptr->dst);
799 M_COPY(src->prev, iptr->dst->prev);
800 M_COPY(src->prev->prev, iptr->dst->prev->prev);
801 M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
802 M_COPY(iptr->dst, iptr->dst->prev->prev->prev->prev);
803 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev->prev);
806 case ICMD_SWAP: /* ..., a, b ==> ..., b, a */
808 M_COPY(src, iptr->dst->prev);
809 M_COPY(src->prev, iptr->dst);
813 /* integer operations *************************************************/
815 case ICMD_INEG: /* ..., value ==> ..., - value */
817 d = reg_of_var(rd, iptr->dst, REG_NULL);
818 if (iptr->dst->flags & INMEMORY) {
819 if (src->flags & INMEMORY) {
820 if (src->regoff == iptr->dst->regoff) {
821 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
824 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
825 x86_64_negl_reg(cd, REG_ITMP1);
826 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
830 x86_64_movl_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
831 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
835 if (src->flags & INMEMORY) {
836 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
837 x86_64_negl_reg(cd, d);
840 M_INTMOVE(src->regoff, iptr->dst->regoff);
841 x86_64_negl_reg(cd, iptr->dst->regoff);
846 case ICMD_LNEG: /* ..., value ==> ..., - value */
848 d = reg_of_var(rd, iptr->dst, REG_NULL);
849 if (iptr->dst->flags & INMEMORY) {
850 if (src->flags & INMEMORY) {
851 if (src->regoff == iptr->dst->regoff) {
852 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
855 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
856 x86_64_neg_reg(cd, REG_ITMP1);
857 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
861 x86_64_mov_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
862 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
866 if (src->flags & INMEMORY) {
867 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
868 x86_64_neg_reg(cd, iptr->dst->regoff);
871 M_INTMOVE(src->regoff, iptr->dst->regoff);
872 x86_64_neg_reg(cd, iptr->dst->regoff);
877 case ICMD_I2L: /* ..., value ==> ..., value */
879 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
880 if (src->flags & INMEMORY) {
881 x86_64_movslq_membase_reg(cd, REG_SP, src->regoff * 8, d);
884 x86_64_movslq_reg_reg(cd, src->regoff, d);
886 store_reg_to_var_int(iptr->dst, d);
889 case ICMD_L2I: /* ..., value ==> ..., value */
891 var_to_reg_int(s1, src, REG_ITMP1);
892 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
894 store_reg_to_var_int(iptr->dst, d);
897 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
899 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
900 if (src->flags & INMEMORY) {
901 x86_64_movsbq_membase_reg(cd, REG_SP, src->regoff * 8, d);
904 x86_64_movsbq_reg_reg(cd, src->regoff, d);
906 store_reg_to_var_int(iptr->dst, d);
909 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
911 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
912 if (src->flags & INMEMORY) {
913 x86_64_movzwq_membase_reg(cd, REG_SP, src->regoff * 8, d);
916 x86_64_movzwq_reg_reg(cd, src->regoff, d);
918 store_reg_to_var_int(iptr->dst, d);
921 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
923 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
924 if (src->flags & INMEMORY) {
925 x86_64_movswq_membase_reg(cd, REG_SP, src->regoff * 8, d);
928 x86_64_movswq_reg_reg(cd, src->regoff, d);
930 store_reg_to_var_int(iptr->dst, d);
934 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
936 d = reg_of_var(rd, iptr->dst, REG_NULL);
937 x86_64_emit_ialu(cd, X86_64_ADD, src, iptr);
940 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
941 /* val.i = constant */
943 d = reg_of_var(rd, iptr->dst, REG_NULL);
944 x86_64_emit_ialuconst(cd, X86_64_ADD, src, iptr);
947 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
949 d = reg_of_var(rd, iptr->dst, REG_NULL);
950 x86_64_emit_lalu(cd, X86_64_ADD, src, iptr);
953 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
954 /* val.l = constant */
956 d = reg_of_var(rd, iptr->dst, REG_NULL);
957 x86_64_emit_laluconst(cd, X86_64_ADD, src, iptr);
960 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
962 d = reg_of_var(rd, iptr->dst, REG_NULL);
963 if (iptr->dst->flags & INMEMORY) {
964 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
965 if (src->prev->regoff == iptr->dst->regoff) {
966 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
967 x86_64_alul_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
970 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
971 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
972 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
975 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
976 M_INTMOVE(src->prev->regoff, REG_ITMP1);
977 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
978 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
980 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
981 if (src->prev->regoff == iptr->dst->regoff) {
982 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
985 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
986 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
987 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
991 x86_64_movl_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
992 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
996 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
997 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
998 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1000 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1001 M_INTMOVE(src->prev->regoff, d);
1002 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1004 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1005 /* workaround for reg alloc */
1006 if (src->regoff == iptr->dst->regoff) {
1007 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1008 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1009 M_INTMOVE(REG_ITMP1, d);
1012 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1013 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
1017 /* workaround for reg alloc */
1018 if (src->regoff == iptr->dst->regoff) {
1019 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1020 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1021 M_INTMOVE(REG_ITMP1, d);
1024 M_INTMOVE(src->prev->regoff, d);
1025 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
1031 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
1032 /* val.i = constant */
1034 d = reg_of_var(rd, iptr->dst, REG_NULL);
1035 x86_64_emit_ialuconst(cd, X86_64_SUB, src, iptr);
1038 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1040 d = reg_of_var(rd, iptr->dst, REG_NULL);
1041 if (iptr->dst->flags & INMEMORY) {
1042 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1043 if (src->prev->regoff == iptr->dst->regoff) {
1044 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1045 x86_64_alu_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1048 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1049 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1050 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1053 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1054 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1055 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1056 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1058 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1059 if (src->prev->regoff == iptr->dst->regoff) {
1060 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1063 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1064 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1065 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1069 x86_64_mov_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1070 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1074 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1075 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1076 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1078 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1079 M_INTMOVE(src->prev->regoff, d);
1080 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1082 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1083 /* workaround for reg alloc */
1084 if (src->regoff == iptr->dst->regoff) {
1085 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1086 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1087 M_INTMOVE(REG_ITMP1, d);
1090 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1091 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1095 /* workaround for reg alloc */
1096 if (src->regoff == iptr->dst->regoff) {
1097 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1098 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1099 M_INTMOVE(REG_ITMP1, d);
1102 M_INTMOVE(src->prev->regoff, d);
1103 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1109 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
1110 /* val.l = constant */
1112 d = reg_of_var(rd, iptr->dst, REG_NULL);
1113 x86_64_emit_laluconst(cd, X86_64_SUB, src, iptr);
1116 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1118 d = reg_of_var(rd, iptr->dst, REG_NULL);
1119 if (iptr->dst->flags & INMEMORY) {
1120 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1121 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1122 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1123 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1125 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1126 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1127 x86_64_imull_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1128 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1130 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1131 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1132 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1133 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1136 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1137 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1138 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1142 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1143 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1144 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1146 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1147 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1148 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1150 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1151 M_INTMOVE(src->regoff, iptr->dst->regoff);
1152 x86_64_imull_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1155 if (src->regoff == iptr->dst->regoff) {
1156 x86_64_imull_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1159 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1160 x86_64_imull_reg_reg(cd, src->regoff, iptr->dst->regoff);
1166 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
1167 /* val.i = constant */
1169 d = reg_of_var(rd, iptr->dst, REG_NULL);
1170 if (iptr->dst->flags & INMEMORY) {
1171 if (src->flags & INMEMORY) {
1172 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1173 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1176 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, REG_ITMP1);
1177 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1181 if (src->flags & INMEMORY) {
1182 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1185 if (iptr->val.i == 2) {
1186 M_INTMOVE(src->regoff, iptr->dst->regoff);
1187 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1190 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, iptr->dst->regoff); /* 3 cycles */
1196 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1198 d = reg_of_var(rd, iptr->dst, REG_NULL);
1199 if (iptr->dst->flags & INMEMORY) {
1200 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1201 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1202 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1203 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1205 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1206 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1207 x86_64_imul_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1208 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1210 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1211 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1212 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1213 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1216 x86_64_mov_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1217 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1218 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1222 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1223 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1224 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1226 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1227 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1228 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1230 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1231 M_INTMOVE(src->regoff, iptr->dst->regoff);
1232 x86_64_imul_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1235 if (src->regoff == iptr->dst->regoff) {
1236 x86_64_imul_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1239 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1240 x86_64_imul_reg_reg(cd, src->regoff, iptr->dst->regoff);
1246 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
1247 /* val.l = constant */
1249 d = reg_of_var(rd, iptr->dst, REG_NULL);
1250 if (iptr->dst->flags & INMEMORY) {
1251 if (src->flags & INMEMORY) {
1252 if (IS_IMM32(iptr->val.l)) {
1253 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1256 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1257 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1259 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1262 if (IS_IMM32(iptr->val.l)) {
1263 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, REG_ITMP1);
1266 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1267 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1269 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1273 if (src->flags & INMEMORY) {
1274 if (IS_IMM32(iptr->val.l)) {
1275 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1278 x86_64_mov_imm_reg(cd, iptr->val.l, iptr->dst->regoff);
1279 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1283 /* should match in many cases */
1284 if (iptr->val.l == 2) {
1285 M_INTMOVE(src->regoff, iptr->dst->regoff);
1286 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1289 if (IS_IMM32(iptr->val.l)) {
1290 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, iptr->dst->regoff); /* 4 cycles */
1293 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1294 M_INTMOVE(src->regoff, iptr->dst->regoff);
1295 x86_64_imul_reg_reg(cd, REG_ITMP1, iptr->dst->regoff);
1302 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1304 d = reg_of_var(rd, iptr->dst, REG_NULL);
1305 if (src->prev->flags & INMEMORY) {
1306 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1309 M_INTMOVE(src->prev->regoff, RAX);
1312 if (src->flags & INMEMORY) {
1313 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1316 M_INTMOVE(src->regoff, REG_ITMP3);
1320 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1321 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1322 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1323 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1325 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1327 x86_64_idivl_reg(cd, REG_ITMP3);
1329 if (iptr->dst->flags & INMEMORY) {
1330 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1331 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1334 M_INTMOVE(RAX, iptr->dst->regoff);
1336 if (iptr->dst->regoff != RDX) {
1337 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1342 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1343 log_text("Emitting ICMD_IREM");
1344 d = reg_of_var(rd, iptr->dst, REG_NULL);
1345 if (src->prev->flags & INMEMORY) {
1346 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1349 M_INTMOVE(src->prev->regoff, RAX);
1352 if (src->flags & INMEMORY) {
1353 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1356 M_INTMOVE(src->regoff, REG_ITMP3);
1360 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1362 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1363 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1366 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1367 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1368 x86_64_jcc(cd, X86_64_CC_E, 1 + 3); /* 6 bytes */
1371 x86_64_idivl_reg(cd, REG_ITMP3);
1373 if (iptr->dst->flags & INMEMORY) {
1374 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1375 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1378 M_INTMOVE(RDX, iptr->dst->regoff);
1380 if (iptr->dst->regoff != RDX) {
1381 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1386 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
1387 /* val.i = constant */
1389 var_to_reg_int(s1, src, REG_ITMP1);
1390 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1391 M_INTMOVE(s1, REG_ITMP1);
1392 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1393 x86_64_leal_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1394 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1395 x86_64_shiftl_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1396 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1397 store_reg_to_var_int(iptr->dst, d);
1400 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
1401 /* val.i = constant */
1403 var_to_reg_int(s1, src, REG_ITMP1);
1404 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1405 M_INTMOVE(s1, REG_ITMP1);
1406 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1407 x86_64_leal_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1408 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1409 x86_64_alul_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1410 x86_64_alul_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1411 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1412 store_reg_to_var_int(iptr->dst, d);
1416 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1418 d = reg_of_var(rd, iptr->dst, REG_NULL);
1419 if (src->prev->flags & INMEMORY) {
1420 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1423 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1426 if (src->flags & INMEMORY) {
1427 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1430 M_INTMOVE(src->regoff, REG_ITMP3);
1434 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1435 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1436 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1437 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1438 x86_64_jcc(cd, X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1440 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1442 x86_64_idiv_reg(cd, REG_ITMP3);
1444 if (iptr->dst->flags & INMEMORY) {
1445 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1446 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1449 M_INTMOVE(RAX, iptr->dst->regoff);
1451 if (iptr->dst->regoff != RDX) {
1452 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1457 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1459 d = reg_of_var(rd, iptr->dst, REG_NULL);
1460 if (src->prev->flags & INMEMORY) {
1461 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1464 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1467 if (src->flags & INMEMORY) {
1468 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1471 M_INTMOVE(src->regoff, REG_ITMP3);
1475 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1477 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1478 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1479 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1482 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1483 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1484 x86_64_jcc(cd, X86_64_CC_E, 2 + 3); /* 6 bytes */
1487 x86_64_idiv_reg(cd, REG_ITMP3);
1489 if (iptr->dst->flags & INMEMORY) {
1490 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1491 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1494 M_INTMOVE(RDX, iptr->dst->regoff);
1496 if (iptr->dst->regoff != RDX) {
1497 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1502 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1503 /* val.i = constant */
1505 var_to_reg_int(s1, src, REG_ITMP1);
1506 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1507 M_INTMOVE(s1, REG_ITMP1);
1508 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1509 x86_64_lea_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1510 x86_64_cmovcc_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1511 x86_64_shift_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1512 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1513 store_reg_to_var_int(iptr->dst, d);
1516 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1517 /* val.l = constant */
1519 var_to_reg_int(s1, src, REG_ITMP1);
1520 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1521 M_INTMOVE(s1, REG_ITMP1);
1522 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1523 x86_64_lea_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1524 x86_64_cmovcc_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1525 x86_64_alu_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1526 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1527 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1528 store_reg_to_var_int(iptr->dst, d);
1531 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1533 d = reg_of_var(rd, iptr->dst, REG_NULL);
1534 x86_64_emit_ishift(cd, X86_64_SHL, src, iptr);
1537 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1538 /* val.i = constant */
1540 d = reg_of_var(rd, iptr->dst, REG_NULL);
1541 x86_64_emit_ishiftconst(cd, X86_64_SHL, src, iptr);
1544 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1546 d = reg_of_var(rd, iptr->dst, REG_NULL);
1547 x86_64_emit_ishift(cd, X86_64_SAR, src, iptr);
1550 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1551 /* val.i = constant */
1553 d = reg_of_var(rd, iptr->dst, REG_NULL);
1554 x86_64_emit_ishiftconst(cd, X86_64_SAR, src, iptr);
1557 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1559 d = reg_of_var(rd, iptr->dst, REG_NULL);
1560 x86_64_emit_ishift(cd, X86_64_SHR, src, iptr);
1563 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1564 /* val.i = constant */
1566 d = reg_of_var(rd, iptr->dst, REG_NULL);
1567 x86_64_emit_ishiftconst(cd, X86_64_SHR, src, iptr);
1570 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1572 d = reg_of_var(rd, iptr->dst, REG_NULL);
1573 x86_64_emit_lshift(cd, X86_64_SHL, src, iptr);
1576 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1577 /* val.i = constant */
1579 d = reg_of_var(rd, iptr->dst, REG_NULL);
1580 x86_64_emit_lshiftconst(cd, X86_64_SHL, src, iptr);
1583 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1585 d = reg_of_var(rd, iptr->dst, REG_NULL);
1586 x86_64_emit_lshift(cd, X86_64_SAR, src, iptr);
1589 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1590 /* val.i = constant */
1592 d = reg_of_var(rd, iptr->dst, REG_NULL);
1593 x86_64_emit_lshiftconst(cd, X86_64_SAR, src, iptr);
1596 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1598 d = reg_of_var(rd, iptr->dst, REG_NULL);
1599 x86_64_emit_lshift(cd, X86_64_SHR, src, iptr);
1602 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1603 /* val.l = constant */
1605 d = reg_of_var(rd, iptr->dst, REG_NULL);
1606 x86_64_emit_lshiftconst(cd, X86_64_SHR, src, iptr);
1609 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1611 d = reg_of_var(rd, iptr->dst, REG_NULL);
1612 x86_64_emit_ialu(cd, X86_64_AND, src, iptr);
1615 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1616 /* val.i = constant */
1618 d = reg_of_var(rd, iptr->dst, REG_NULL);
1619 x86_64_emit_ialuconst(cd, X86_64_AND, src, iptr);
1622 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1624 d = reg_of_var(rd, iptr->dst, REG_NULL);
1625 x86_64_emit_lalu(cd, X86_64_AND, src, iptr);
1628 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1629 /* val.l = constant */
1631 d = reg_of_var(rd, iptr->dst, REG_NULL);
1632 x86_64_emit_laluconst(cd, X86_64_AND, src, iptr);
1635 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1637 d = reg_of_var(rd, iptr->dst, REG_NULL);
1638 x86_64_emit_ialu(cd, X86_64_OR, src, iptr);
1641 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1642 /* val.i = constant */
1644 d = reg_of_var(rd, iptr->dst, REG_NULL);
1645 x86_64_emit_ialuconst(cd, X86_64_OR, src, iptr);
1648 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1650 d = reg_of_var(rd, iptr->dst, REG_NULL);
1651 x86_64_emit_lalu(cd, X86_64_OR, src, iptr);
1654 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1655 /* val.l = constant */
1657 d = reg_of_var(rd, iptr->dst, REG_NULL);
1658 x86_64_emit_laluconst(cd, X86_64_OR, src, iptr);
1661 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1663 d = reg_of_var(rd, iptr->dst, REG_NULL);
1664 x86_64_emit_ialu(cd, X86_64_XOR, src, iptr);
1667 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1668 /* val.i = constant */
1670 d = reg_of_var(rd, iptr->dst, REG_NULL);
1671 x86_64_emit_ialuconst(cd, X86_64_XOR, src, iptr);
1674 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1676 d = reg_of_var(rd, iptr->dst, REG_NULL);
1677 x86_64_emit_lalu(cd, X86_64_XOR, src, iptr);
1680 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1681 /* val.l = constant */
1683 d = reg_of_var(rd, iptr->dst, REG_NULL);
1684 x86_64_emit_laluconst(cd, X86_64_XOR, src, iptr);
1688 case ICMD_IINC: /* ..., value ==> ..., value + constant */
1689 /* op1 = variable, val.i = constant */
1691 /* using inc and dec is definitely faster than add -- tested */
1694 var = &(rd->locals[iptr->op1][TYPE_INT]);
1696 if (var->flags & INMEMORY) {
1697 if (iptr->val.i == 1) {
1698 x86_64_incl_membase(cd, REG_SP, d * 8);
1700 } else if (iptr->val.i == -1) {
1701 x86_64_decl_membase(cd, REG_SP, d * 8);
1704 x86_64_alul_imm_membase(cd, X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1708 if (iptr->val.i == 1) {
1709 x86_64_incl_reg(cd, d);
1711 } else if (iptr->val.i == -1) {
1712 x86_64_decl_reg(cd, d);
1715 x86_64_alul_imm_reg(cd, X86_64_ADD, iptr->val.i, d);
1721 /* floating operations ************************************************/
1723 case ICMD_FNEG: /* ..., value ==> ..., - value */
1725 var_to_reg_flt(s1, src, REG_FTMP1);
1726 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1727 a = dseg_adds4(cd, 0x80000000);
1729 x86_64_movss_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1730 x86_64_xorps_reg_reg(cd, REG_FTMP2, d);
1731 store_reg_to_var_flt(iptr->dst, d);
1734 case ICMD_DNEG: /* ..., value ==> ..., - value */
1736 var_to_reg_flt(s1, src, REG_FTMP1);
1737 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1738 a = dseg_adds8(cd, 0x8000000000000000);
1740 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1741 x86_64_xorpd_reg_reg(cd, REG_FTMP2, d);
1742 store_reg_to_var_flt(iptr->dst, d);
1745 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1747 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1748 var_to_reg_flt(s2, src, REG_FTMP2);
1749 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1751 x86_64_addss_reg_reg(cd, s2, d);
1752 } else if (s2 == d) {
1753 x86_64_addss_reg_reg(cd, s1, d);
1756 x86_64_addss_reg_reg(cd, s2, d);
1758 store_reg_to_var_flt(iptr->dst, d);
1761 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1763 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1764 var_to_reg_flt(s2, src, REG_FTMP2);
1765 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1767 x86_64_addsd_reg_reg(cd, s2, d);
1768 } else if (s2 == d) {
1769 x86_64_addsd_reg_reg(cd, s1, d);
1772 x86_64_addsd_reg_reg(cd, s2, d);
1774 store_reg_to_var_flt(iptr->dst, d);
1777 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1779 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1780 var_to_reg_flt(s2, src, REG_FTMP2);
1781 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1783 M_FLTMOVE(s2, REG_FTMP2);
1787 x86_64_subss_reg_reg(cd, s2, d);
1788 store_reg_to_var_flt(iptr->dst, d);
1791 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1793 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1794 var_to_reg_flt(s2, src, REG_FTMP2);
1795 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1797 M_FLTMOVE(s2, REG_FTMP2);
1801 x86_64_subsd_reg_reg(cd, s2, d);
1802 store_reg_to_var_flt(iptr->dst, d);
1805 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1807 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1808 var_to_reg_flt(s2, src, REG_FTMP2);
1809 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1811 x86_64_mulss_reg_reg(cd, s2, d);
1812 } else if (s2 == d) {
1813 x86_64_mulss_reg_reg(cd, s1, d);
1816 x86_64_mulss_reg_reg(cd, s2, d);
1818 store_reg_to_var_flt(iptr->dst, d);
1821 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1823 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1824 var_to_reg_flt(s2, src, REG_FTMP2);
1825 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1827 x86_64_mulsd_reg_reg(cd, s2, d);
1828 } else if (s2 == d) {
1829 x86_64_mulsd_reg_reg(cd, s1, d);
1832 x86_64_mulsd_reg_reg(cd, s2, d);
1834 store_reg_to_var_flt(iptr->dst, d);
1837 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1839 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1840 var_to_reg_flt(s2, src, REG_FTMP2);
1841 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1843 M_FLTMOVE(s2, REG_FTMP2);
1847 x86_64_divss_reg_reg(cd, s2, d);
1848 store_reg_to_var_flt(iptr->dst, d);
1851 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1853 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1854 var_to_reg_flt(s2, src, REG_FTMP2);
1855 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1857 M_FLTMOVE(s2, REG_FTMP2);
1861 x86_64_divsd_reg_reg(cd, s2, d);
1862 store_reg_to_var_flt(iptr->dst, d);
1865 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1867 var_to_reg_int(s1, src, REG_ITMP1);
1868 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1869 x86_64_cvtsi2ss_reg_reg(cd, s1, d);
1870 store_reg_to_var_flt(iptr->dst, d);
1873 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1875 var_to_reg_int(s1, src, REG_ITMP1);
1876 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1877 x86_64_cvtsi2sd_reg_reg(cd, s1, d);
1878 store_reg_to_var_flt(iptr->dst, d);
1881 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1883 var_to_reg_int(s1, src, REG_ITMP1);
1884 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1885 x86_64_cvtsi2ssq_reg_reg(cd, s1, d);
1886 store_reg_to_var_flt(iptr->dst, d);
1889 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1891 var_to_reg_int(s1, src, REG_ITMP1);
1892 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1893 x86_64_cvtsi2sdq_reg_reg(cd, s1, d);
1894 store_reg_to_var_flt(iptr->dst, d);
1897 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1899 var_to_reg_flt(s1, src, REG_FTMP1);
1900 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1901 x86_64_cvttss2si_reg_reg(cd, s1, d);
1902 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1903 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1904 x86_64_jcc(cd, X86_64_CC_NE, a);
1905 M_FLTMOVE(s1, REG_FTMP1);
1906 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2i, REG_ITMP2);
1907 x86_64_call_reg(cd, REG_ITMP2);
1908 M_INTMOVE(REG_RESULT, d);
1909 store_reg_to_var_int(iptr->dst, d);
1912 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1914 var_to_reg_flt(s1, src, REG_FTMP1);
1915 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1916 x86_64_cvttsd2si_reg_reg(cd, s1, d);
1917 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1918 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1919 x86_64_jcc(cd, X86_64_CC_NE, a);
1920 M_FLTMOVE(s1, REG_FTMP1);
1921 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2i, REG_ITMP2);
1922 x86_64_call_reg(cd, REG_ITMP2);
1923 M_INTMOVE(REG_RESULT, d);
1924 store_reg_to_var_int(iptr->dst, d);
1927 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1929 var_to_reg_flt(s1, src, REG_FTMP1);
1930 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1931 x86_64_cvttss2siq_reg_reg(cd, s1, d);
1932 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1933 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1934 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1935 x86_64_jcc(cd, X86_64_CC_NE, a);
1936 M_FLTMOVE(s1, REG_FTMP1);
1937 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2l, REG_ITMP2);
1938 x86_64_call_reg(cd, REG_ITMP2);
1939 M_INTMOVE(REG_RESULT, d);
1940 store_reg_to_var_int(iptr->dst, d);
1943 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1945 var_to_reg_flt(s1, src, REG_FTMP1);
1946 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1947 x86_64_cvttsd2siq_reg_reg(cd, s1, d);
1948 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1949 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1950 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1951 x86_64_jcc(cd, X86_64_CC_NE, a);
1952 M_FLTMOVE(s1, REG_FTMP1);
1953 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2l, REG_ITMP2);
1954 x86_64_call_reg(cd, REG_ITMP2);
1955 M_INTMOVE(REG_RESULT, d);
1956 store_reg_to_var_int(iptr->dst, d);
1959 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1961 var_to_reg_flt(s1, src, REG_FTMP1);
1962 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1963 x86_64_cvtss2sd_reg_reg(cd, s1, d);
1964 store_reg_to_var_flt(iptr->dst, d);
1967 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1969 var_to_reg_flt(s1, src, REG_FTMP1);
1970 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1971 x86_64_cvtsd2ss_reg_reg(cd, s1, d);
1972 store_reg_to_var_flt(iptr->dst, d);
1975 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1976 /* == => 0, < => 1, > => -1 */
1978 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1979 var_to_reg_flt(s2, src, REG_FTMP2);
1980 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1981 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1982 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1983 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1984 x86_64_ucomiss_reg_reg(cd, s1, s2);
1985 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1986 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1987 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1988 store_reg_to_var_int(iptr->dst, d);
1991 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1992 /* == => 0, < => 1, > => -1 */
1994 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1995 var_to_reg_flt(s2, src, REG_FTMP2);
1996 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1997 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1998 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1999 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
2000 x86_64_ucomiss_reg_reg(cd, s1, s2);
2001 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
2002 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
2003 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
2004 store_reg_to_var_int(iptr->dst, d);
2007 case ICMD_DCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
2008 /* == => 0, < => 1, > => -1 */
2010 var_to_reg_flt(s1, src->prev, REG_FTMP1);
2011 var_to_reg_flt(s2, src, REG_FTMP2);
2012 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2013 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
2014 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
2015 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
2016 x86_64_ucomisd_reg_reg(cd, s1, s2);
2017 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
2018 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
2019 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
2020 store_reg_to_var_int(iptr->dst, d);
2023 case ICMD_DCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
2024 /* == => 0, < => 1, > => -1 */
2026 var_to_reg_flt(s1, src->prev, REG_FTMP1);
2027 var_to_reg_flt(s2, src, REG_FTMP2);
2028 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2029 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
2030 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
2031 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
2032 x86_64_ucomisd_reg_reg(cd, s1, s2);
2033 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
2034 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
2035 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
2036 store_reg_to_var_int(iptr->dst, d);
2040 /* memory operations **************************************************/
2042 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., (int) length */
2044 var_to_reg_int(s1, src, REG_ITMP1);
2045 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2046 gen_nullptr_check(s1);
2047 x86_64_movl_membase_reg(cd, s1, OFFSET(java_arrayheader, size), d);
2048 store_reg_to_var_int(iptr->dst, d);
2051 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2053 var_to_reg_int(s1, src->prev, REG_ITMP1);
2054 var_to_reg_int(s2, src, REG_ITMP2);
2055 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2056 if (iptr->op1 == 0) {
2057 gen_nullptr_check(s1);
2060 x86_64_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
2061 store_reg_to_var_int(iptr->dst, d);
2064 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
2066 var_to_reg_int(s1, src->prev, REG_ITMP1);
2067 var_to_reg_int(s2, src, REG_ITMP2);
2068 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2069 if (iptr->op1 == 0) {
2070 gen_nullptr_check(s1);
2073 x86_64_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]), s1, s2, 3, d);
2074 store_reg_to_var_int(iptr->dst, d);
2077 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
2079 var_to_reg_int(s1, src->prev, REG_ITMP1);
2080 var_to_reg_int(s2, src, REG_ITMP2);
2081 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2082 if (iptr->op1 == 0) {
2083 gen_nullptr_check(s1);
2086 x86_64_movl_memindex_reg(cd, OFFSET(java_intarray, data[0]), s1, s2, 2, d);
2087 store_reg_to_var_int(iptr->dst, d);
2090 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2092 var_to_reg_int(s1, src->prev, REG_ITMP1);
2093 var_to_reg_int(s2, src, REG_ITMP2);
2094 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2095 if (iptr->op1 == 0) {
2096 gen_nullptr_check(s1);
2099 x86_64_movss_memindex_reg(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2100 store_reg_to_var_flt(iptr->dst, d);
2103 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2105 var_to_reg_int(s1, src->prev, REG_ITMP1);
2106 var_to_reg_int(s2, src, REG_ITMP2);
2107 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2108 if (iptr->op1 == 0) {
2109 gen_nullptr_check(s1);
2112 x86_64_movsd_memindex_reg(cd, OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2113 store_reg_to_var_flt(iptr->dst, d);
2116 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
2118 var_to_reg_int(s1, src->prev, REG_ITMP1);
2119 var_to_reg_int(s2, src, REG_ITMP2);
2120 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2121 if (iptr->op1 == 0) {
2122 gen_nullptr_check(s1);
2125 x86_64_movzwq_memindex_reg(cd, OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2126 store_reg_to_var_int(iptr->dst, d);
2129 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
2131 var_to_reg_int(s1, src->prev, REG_ITMP1);
2132 var_to_reg_int(s2, src, REG_ITMP2);
2133 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2134 if (iptr->op1 == 0) {
2135 gen_nullptr_check(s1);
2138 x86_64_movswq_memindex_reg(cd, OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2139 store_reg_to_var_int(iptr->dst, d);
2142 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
2144 var_to_reg_int(s1, src->prev, REG_ITMP1);
2145 var_to_reg_int(s2, src, REG_ITMP2);
2146 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2147 if (iptr->op1 == 0) {
2148 gen_nullptr_check(s1);
2151 x86_64_movsbq_memindex_reg(cd, OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2152 store_reg_to_var_int(iptr->dst, d);
2156 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2158 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2159 var_to_reg_int(s2, src->prev, REG_ITMP2);
2160 if (iptr->op1 == 0) {
2161 gen_nullptr_check(s1);
2164 var_to_reg_int(s3, src, REG_ITMP3);
2165 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2168 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2170 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2171 var_to_reg_int(s2, src->prev, REG_ITMP2);
2172 if (iptr->op1 == 0) {
2173 gen_nullptr_check(s1);
2176 var_to_reg_int(s3, src, REG_ITMP3);
2177 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2180 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2182 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2183 var_to_reg_int(s2, src->prev, REG_ITMP2);
2184 if (iptr->op1 == 0) {
2185 gen_nullptr_check(s1);
2188 var_to_reg_int(s3, src, REG_ITMP3);
2189 x86_64_movl_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2192 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2194 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2195 var_to_reg_int(s2, src->prev, REG_ITMP2);
2196 if (iptr->op1 == 0) {
2197 gen_nullptr_check(s1);
2200 var_to_reg_flt(s3, src, REG_FTMP3);
2201 x86_64_movss_reg_memindex(cd, s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2204 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2206 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2207 var_to_reg_int(s2, src->prev, REG_ITMP2);
2208 if (iptr->op1 == 0) {
2209 gen_nullptr_check(s1);
2212 var_to_reg_flt(s3, src, REG_FTMP3);
2213 x86_64_movsd_reg_memindex(cd, s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2216 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2218 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2219 var_to_reg_int(s2, src->prev, REG_ITMP2);
2220 if (iptr->op1 == 0) {
2221 gen_nullptr_check(s1);
2224 var_to_reg_int(s3, src, REG_ITMP3);
2225 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2228 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2230 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2231 var_to_reg_int(s2, src->prev, REG_ITMP2);
2232 if (iptr->op1 == 0) {
2233 gen_nullptr_check(s1);
2236 var_to_reg_int(s3, src, REG_ITMP3);
2237 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2240 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2242 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2243 var_to_reg_int(s2, src->prev, REG_ITMP2);
2244 if (iptr->op1 == 0) {
2245 gen_nullptr_check(s1);
2248 var_to_reg_int(s3, src, REG_ITMP3);
2249 x86_64_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2252 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2254 var_to_reg_int(s1, src->prev, REG_ITMP1);
2255 var_to_reg_int(s2, src, REG_ITMP2);
2256 if (iptr->op1 == 0) {
2257 gen_nullptr_check(s1);
2260 x86_64_movl_imm_memindex(cd, iptr->val.i, OFFSET(java_intarray, data[0]), s1, s2, 2);
2263 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2265 var_to_reg_int(s1, src->prev, REG_ITMP1);
2266 var_to_reg_int(s2, src, REG_ITMP2);
2267 if (iptr->op1 == 0) {
2268 gen_nullptr_check(s1);
2272 if (IS_IMM32(iptr->val.l)) {
2273 x86_64_mov_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2276 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2277 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l >> 32), OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2281 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2283 var_to_reg_int(s1, src->prev, REG_ITMP1);
2284 var_to_reg_int(s2, src, REG_ITMP2);
2285 if (iptr->op1 == 0) {
2286 gen_nullptr_check(s1);
2289 x86_64_mov_imm_memindex(cd, 0, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2292 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2294 var_to_reg_int(s1, src->prev, REG_ITMP1);
2295 var_to_reg_int(s2, src, REG_ITMP2);
2296 if (iptr->op1 == 0) {
2297 gen_nullptr_check(s1);
2300 x86_64_movb_imm_memindex(cd, iptr->val.i, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2303 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2305 var_to_reg_int(s1, src->prev, REG_ITMP1);
2306 var_to_reg_int(s2, src, REG_ITMP2);
2307 if (iptr->op1 == 0) {
2308 gen_nullptr_check(s1);
2311 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_chararray, data[0]), s1, s2, 1);
2314 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2316 var_to_reg_int(s1, src->prev, REG_ITMP1);
2317 var_to_reg_int(s2, src, REG_ITMP2);
2318 if (iptr->op1 == 0) {
2319 gen_nullptr_check(s1);
2322 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2326 case ICMD_GETSTATIC: /* ... ==> ..., value */
2327 /* op1 = type, val.a = field address */
2330 codegen_addpatchref(cd, cd->mcodeptr,
2331 PATCHER_get_putstatic,
2332 (unresolved_field *) iptr->target);
2334 if (showdisassemble) {
2335 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2341 fieldinfo *fi = iptr->val.a;
2343 if (!fi->class->initialized) {
2344 codegen_addpatchref(cd, cd->mcodeptr,
2345 PATCHER_clinit, fi->class);
2347 if (showdisassemble) {
2348 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2352 a = (ptrint) &(fi->value);
2355 /* This approach is much faster than moving the field address */
2356 /* inline into a register. */
2357 a = dseg_addaddress(cd, a);
2358 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP2);
2359 switch (iptr->op1) {
2361 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2362 x86_64_movl_membase_reg(cd, REG_ITMP2, 0, d);
2363 store_reg_to_var_int(iptr->dst, d);
2367 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2368 x86_64_mov_membase_reg(cd, REG_ITMP2, 0, d);
2369 store_reg_to_var_int(iptr->dst, d);
2372 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2373 x86_64_movss_membase_reg(cd, REG_ITMP2, 0, d);
2374 store_reg_to_var_flt(iptr->dst, d);
2377 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2378 x86_64_movsd_membase_reg(cd, REG_ITMP2, 0, d);
2379 store_reg_to_var_flt(iptr->dst, d);
2384 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2385 /* op1 = type, val.a = field address */
2388 codegen_addpatchref(cd, cd->mcodeptr,
2389 PATCHER_get_putstatic,
2390 (unresolved_field *) iptr->target);
2392 if (showdisassemble) {
2393 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2399 fieldinfo *fi = iptr->val.a;
2401 if (!fi->class->initialized) {
2402 codegen_addpatchref(cd, cd->mcodeptr,
2403 PATCHER_clinit, fi->class);
2405 if (showdisassemble) {
2406 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2410 a = (ptrint) &(fi->value);
2413 /* This approach is much faster than moving the field address */
2414 /* inline into a register. */
2415 a = dseg_addaddress(cd, a);
2416 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP2);
2417 switch (iptr->op1) {
2419 var_to_reg_int(s2, src, REG_ITMP1);
2420 x86_64_movl_reg_membase(cd, s2, REG_ITMP2, 0);
2424 var_to_reg_int(s2, src, REG_ITMP1);
2425 x86_64_mov_reg_membase(cd, s2, REG_ITMP2, 0);
2428 var_to_reg_flt(s2, src, REG_FTMP1);
2429 x86_64_movss_reg_membase(cd, s2, REG_ITMP2, 0);
2432 var_to_reg_flt(s2, src, REG_FTMP1);
2433 x86_64_movsd_reg_membase(cd, s2, REG_ITMP2, 0);
2438 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2439 /* val = value (in current instruction) */
2440 /* op1 = type, val.a = field address (in */
2441 /* following NOP) */
2443 if (!iptr[1].val.a) {
2444 codegen_addpatchref(cd, cd->mcodeptr,
2445 PATCHER_get_putstatic,
2446 (unresolved_field *) iptr[1].target);
2448 if (showdisassemble) {
2449 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2455 fieldinfo *fi = iptr[1].val.a;
2457 if (!fi->class->initialized) {
2458 codegen_addpatchref(cd, cd->mcodeptr,
2459 PATCHER_clinit, fi->class);
2461 if (showdisassemble) {
2462 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2466 a = (ptrint) &(fi->value);
2469 /* This approach is much faster than moving the field address */
2470 /* inline into a register. */
2471 a = dseg_addaddress(cd, a);
2472 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP1);
2473 switch (iptr->op1) {
2476 x86_64_movl_imm_membase(cd, iptr->val.i, REG_ITMP1, 0);
2481 if (IS_IMM32(iptr->val.l)) {
2482 x86_64_mov_imm_membase(cd, iptr->val.l, REG_ITMP1, 0);
2484 x86_64_movl_imm_membase(cd, iptr->val.l, REG_ITMP1, 0);
2485 x86_64_movl_imm_membase(cd, iptr->val.l >> 32, REG_ITMP1, 4);
2491 case ICMD_GETFIELD: /* ... ==> ..., value */
2492 /* op1 = type, val.i = field offset */
2494 var_to_reg_int(s1, src, REG_ITMP1);
2495 gen_nullptr_check(s1);
2498 codegen_addpatchref(cd, cd->mcodeptr,
2499 PATCHER_get_putfield,
2500 (unresolved_field *) iptr->target);
2502 if (showdisassemble) {
2503 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2509 a = ((fieldinfo *) (iptr->val.a))->offset;
2512 switch (iptr->op1) {
2514 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2515 x86_64_movl_membase32_reg(cd, s1, a, d);
2516 store_reg_to_var_int(iptr->dst, d);
2520 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2521 x86_64_mov_membase32_reg(cd, s1, a, d);
2522 store_reg_to_var_int(iptr->dst, d);
2525 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2526 x86_64_movss_membase32_reg(cd, s1, a, d);
2527 store_reg_to_var_flt(iptr->dst, d);
2530 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2531 x86_64_movsd_membase32_reg(cd, s1, a, d);
2532 store_reg_to_var_flt(iptr->dst, d);
2537 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2538 /* op1 = type, val.i = field offset */
2540 var_to_reg_int(s1, src->prev, REG_ITMP1);
2541 gen_nullptr_check(s1);
2542 if (IS_INT_LNG_TYPE(iptr->op1)) {
2543 var_to_reg_int(s2, src, REG_ITMP2);
2545 var_to_reg_flt(s2, src, REG_FTMP2);
2549 codegen_addpatchref(cd, cd->mcodeptr,
2550 PATCHER_get_putfield,
2551 (unresolved_field *) iptr->target);
2553 if (showdisassemble) {
2554 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2560 a = ((fieldinfo *) (iptr->val.a))->offset;
2563 switch (iptr->op1) {
2565 x86_64_movl_reg_membase32(cd, s2, s1, a);
2569 x86_64_mov_reg_membase32(cd, s2, s1, a);
2572 x86_64_movss_reg_membase32(cd, s2, s1, a);
2575 x86_64_movsd_reg_membase32(cd, s2, s1, a);
2580 case ICMD_PUTFIELDCONST: /* ..., objectref, value ==> ... */
2581 /* val = value (in current instruction) */
2582 /* op1 = type, val.a = field address (in */
2583 /* following NOP) */
2585 var_to_reg_int(s1, src, REG_ITMP1);
2586 gen_nullptr_check(s1);
2588 if (!iptr[1].val.a) {
2589 codegen_addpatchref(cd, cd->mcodeptr,
2590 PATCHER_get_putfield,
2591 (unresolved_field *) iptr[1].target);
2593 if (showdisassemble) {
2594 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2600 a = ((fieldinfo *) (iptr[1].val.a))->offset;
2603 switch (iptr->op1) {
2606 x86_64_movl_imm_membase32(cd, iptr->val.i, s1, a);
2611 if (IS_IMM32(iptr->val.l)) {
2612 x86_64_mov_imm_membase32(cd, iptr->val.l, s1, a);
2614 x86_64_movl_imm_membase32(cd, iptr->val.l, s1, a);
2615 x86_64_movl_imm_membase32(cd, iptr->val.l >> 32, s1, a + 4);
2622 /* branch operations **************************************************/
2624 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2626 var_to_reg_int(s1, src, REG_ITMP1);
2627 M_INTMOVE(s1, REG_ITMP1_XPTR);
2629 x86_64_call_imm(cd, 0); /* passing exception pointer */
2630 x86_64_pop_reg(cd, REG_ITMP2_XPC);
2632 x86_64_mov_imm_reg(cd, (ptrint) asm_handle_exception, REG_ITMP3);
2633 x86_64_jmp_reg(cd, REG_ITMP3);
2636 case ICMD_GOTO: /* ... ==> ... */
2637 /* op1 = target JavaVM pc */
2639 x86_64_jmp_imm(cd, 0);
2640 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2643 case ICMD_JSR: /* ... ==> ... */
2644 /* op1 = target JavaVM pc */
2646 x86_64_call_imm(cd, 0);
2647 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2650 case ICMD_RET: /* ... ==> ... */
2651 /* op1 = local variable */
2653 var = &(rd->locals[iptr->op1][TYPE_ADR]);
2654 var_to_reg_int(s1, var, REG_ITMP1);
2655 x86_64_jmp_reg(cd, s1);
2658 case ICMD_IFNULL: /* ..., value ==> ... */
2659 /* op1 = target JavaVM pc */
2661 if (src->flags & INMEMORY) {
2662 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2665 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2667 x86_64_jcc(cd, X86_64_CC_E, 0);
2668 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2671 case ICMD_IFNONNULL: /* ..., value ==> ... */
2672 /* op1 = target JavaVM pc */
2674 if (src->flags & INMEMORY) {
2675 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2678 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2680 x86_64_jcc(cd, X86_64_CC_NE, 0);
2681 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2684 case ICMD_IFEQ: /* ..., value ==> ... */
2685 /* op1 = target JavaVM pc, val.i = constant */
2687 x86_64_emit_ifcc(cd, X86_64_CC_E, src, iptr);
2690 case ICMD_IFLT: /* ..., value ==> ... */
2691 /* op1 = target JavaVM pc, val.i = constant */
2693 x86_64_emit_ifcc(cd, X86_64_CC_L, src, iptr);
2696 case ICMD_IFLE: /* ..., value ==> ... */
2697 /* op1 = target JavaVM pc, val.i = constant */
2699 x86_64_emit_ifcc(cd, X86_64_CC_LE, src, iptr);
2702 case ICMD_IFNE: /* ..., value ==> ... */
2703 /* op1 = target JavaVM pc, val.i = constant */
2705 x86_64_emit_ifcc(cd, X86_64_CC_NE, src, iptr);
2708 case ICMD_IFGT: /* ..., value ==> ... */
2709 /* op1 = target JavaVM pc, val.i = constant */
2711 x86_64_emit_ifcc(cd, X86_64_CC_G, src, iptr);
2714 case ICMD_IFGE: /* ..., value ==> ... */
2715 /* op1 = target JavaVM pc, val.i = constant */
2717 x86_64_emit_ifcc(cd, X86_64_CC_GE, src, iptr);
2720 case ICMD_IF_LEQ: /* ..., value ==> ... */
2721 /* op1 = target JavaVM pc, val.l = constant */
2723 x86_64_emit_if_lcc(cd, X86_64_CC_E, src, iptr);
2726 case ICMD_IF_LLT: /* ..., value ==> ... */
2727 /* op1 = target JavaVM pc, val.l = constant */
2729 x86_64_emit_if_lcc(cd, X86_64_CC_L, src, iptr);
2732 case ICMD_IF_LLE: /* ..., value ==> ... */
2733 /* op1 = target JavaVM pc, val.l = constant */
2735 x86_64_emit_if_lcc(cd, X86_64_CC_LE, src, iptr);
2738 case ICMD_IF_LNE: /* ..., value ==> ... */
2739 /* op1 = target JavaVM pc, val.l = constant */
2741 x86_64_emit_if_lcc(cd, X86_64_CC_NE, src, iptr);
2744 case ICMD_IF_LGT: /* ..., value ==> ... */
2745 /* op1 = target JavaVM pc, val.l = constant */
2747 x86_64_emit_if_lcc(cd, X86_64_CC_G, src, iptr);
2750 case ICMD_IF_LGE: /* ..., value ==> ... */
2751 /* op1 = target JavaVM pc, val.l = constant */
2753 x86_64_emit_if_lcc(cd, X86_64_CC_GE, src, iptr);
2756 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2757 /* op1 = target JavaVM pc */
2759 x86_64_emit_if_icmpcc(cd, X86_64_CC_E, src, iptr);
2762 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2763 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2765 x86_64_emit_if_lcmpcc(cd, X86_64_CC_E, src, iptr);
2768 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2769 /* op1 = target JavaVM pc */
2771 x86_64_emit_if_icmpcc(cd, X86_64_CC_NE, src, iptr);
2774 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2775 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2777 x86_64_emit_if_lcmpcc(cd, X86_64_CC_NE, src, iptr);
2780 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2781 /* op1 = target JavaVM pc */
2783 x86_64_emit_if_icmpcc(cd, X86_64_CC_L, src, iptr);
2786 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2787 /* op1 = target JavaVM pc */
2789 x86_64_emit_if_lcmpcc(cd, X86_64_CC_L, src, iptr);
2792 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2793 /* op1 = target JavaVM pc */
2795 x86_64_emit_if_icmpcc(cd, X86_64_CC_G, src, iptr);
2798 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2799 /* op1 = target JavaVM pc */
2801 x86_64_emit_if_lcmpcc(cd, X86_64_CC_G, src, iptr);
2804 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2805 /* op1 = target JavaVM pc */
2807 x86_64_emit_if_icmpcc(cd, X86_64_CC_LE, src, iptr);
2810 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2811 /* op1 = target JavaVM pc */
2813 x86_64_emit_if_lcmpcc(cd, X86_64_CC_LE, src, iptr);
2816 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2817 /* op1 = target JavaVM pc */
2819 x86_64_emit_if_icmpcc(cd, X86_64_CC_GE, src, iptr);
2822 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2823 /* op1 = target JavaVM pc */
2825 x86_64_emit_if_lcmpcc(cd, X86_64_CC_GE, src, iptr);
2828 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST */
2830 case ICMD_ELSE_ICONST: /* handled by IFxx_ICONST */
2833 case ICMD_IFEQ_ICONST: /* ..., value ==> ..., constant */
2834 /* val.i = constant */
2836 var_to_reg_int(s1, src, REG_ITMP1);
2837 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2838 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2840 M_INTMOVE(s1, REG_ITMP1);
2843 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2845 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2846 x86_64_testl_reg_reg(cd, s1, s1);
2847 x86_64_cmovccl_reg_reg(cd, X86_64_CC_E, REG_ITMP2, d);
2848 store_reg_to_var_int(iptr->dst, d);
2851 case ICMD_IFNE_ICONST: /* ..., value ==> ..., constant */
2852 /* val.i = constant */
2854 var_to_reg_int(s1, src, REG_ITMP1);
2855 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2856 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2858 M_INTMOVE(s1, REG_ITMP1);
2861 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2863 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2864 x86_64_testl_reg_reg(cd, s1, s1);
2865 x86_64_cmovccl_reg_reg(cd, X86_64_CC_NE, REG_ITMP2, d);
2866 store_reg_to_var_int(iptr->dst, d);
2869 case ICMD_IFLT_ICONST: /* ..., value ==> ..., constant */
2870 /* val.i = constant */
2872 var_to_reg_int(s1, src, REG_ITMP1);
2873 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2874 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2876 M_INTMOVE(s1, REG_ITMP1);
2879 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2881 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2882 x86_64_testl_reg_reg(cd, s1, s1);
2883 x86_64_cmovccl_reg_reg(cd, X86_64_CC_L, REG_ITMP2, d);
2884 store_reg_to_var_int(iptr->dst, d);
2887 case ICMD_IFGE_ICONST: /* ..., value ==> ..., constant */
2888 /* val.i = constant */
2890 var_to_reg_int(s1, src, REG_ITMP1);
2891 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2892 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2894 M_INTMOVE(s1, REG_ITMP1);
2897 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2899 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2900 x86_64_testl_reg_reg(cd, s1, s1);
2901 x86_64_cmovccl_reg_reg(cd, X86_64_CC_GE, REG_ITMP2, d);
2902 store_reg_to_var_int(iptr->dst, d);
2905 case ICMD_IFGT_ICONST: /* ..., value ==> ..., constant */
2906 /* val.i = constant */
2908 var_to_reg_int(s1, src, REG_ITMP1);
2909 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2910 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2912 M_INTMOVE(s1, REG_ITMP1);
2915 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2917 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2918 x86_64_testl_reg_reg(cd, s1, s1);
2919 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP2, d);
2920 store_reg_to_var_int(iptr->dst, d);
2923 case ICMD_IFLE_ICONST: /* ..., value ==> ..., constant */
2924 /* val.i = constant */
2926 var_to_reg_int(s1, src, REG_ITMP1);
2927 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2928 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2930 M_INTMOVE(s1, REG_ITMP1);
2933 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2935 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2936 x86_64_testl_reg_reg(cd, s1, s1);
2937 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, d);
2938 store_reg_to_var_int(iptr->dst, d);
2942 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2946 var_to_reg_int(s1, src, REG_RESULT);
2947 M_INTMOVE(s1, REG_RESULT);
2949 goto nowperformreturn;
2951 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2954 var_to_reg_flt(s1, src, REG_FRESULT);
2955 M_FLTMOVE(s1, REG_FRESULT);
2957 goto nowperformreturn;
2959 case ICMD_RETURN: /* ... ==> ... */
2965 p = parentargs_base;
2967 /* call trace function */
2969 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
2971 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
2972 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
2974 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
2975 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
2976 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
2977 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
2979 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
2980 x86_64_call_reg(cd, REG_ITMP1);
2982 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
2983 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
2985 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
2988 #if defined(USE_THREADS)
2989 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2990 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
2992 /* we need to save the proper return value */
2993 switch (iptr->opc) {
2997 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, rd->maxmemuse * 8);
3001 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, rd->maxmemuse * 8);
3005 x86_64_mov_imm_reg(cd, (ptrint) builtin_monitorexit, REG_ITMP1);
3006 x86_64_call_reg(cd, REG_ITMP1);
3008 /* and now restore the proper return value */
3009 switch (iptr->opc) {
3013 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_RESULT);
3017 x86_64_movq_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_FRESULT);
3023 /* restore saved registers */
3024 for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
3025 p--; x86_64_mov_membase_reg(cd, REG_SP, p * 8, rd->savintregs[i]);
3027 for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
3028 p--; x86_64_movq_membase_reg(cd, REG_SP, p * 8, rd->savfltregs[i]);
3031 /* deallocate stack */
3032 if (parentargs_base) {
3033 x86_64_alu_imm_reg(cd, X86_64_ADD, parentargs_base * 8, REG_SP);
3041 case ICMD_TABLESWITCH: /* ..., index ==> ... */
3046 tptr = (void **) iptr->target;
3048 s4ptr = iptr->val.a;
3049 l = s4ptr[1]; /* low */
3050 i = s4ptr[2]; /* high */
3052 var_to_reg_int(s1, src, REG_ITMP1);
3053 M_INTMOVE(s1, REG_ITMP1);
3055 x86_64_alul_imm_reg(cd, X86_64_SUB, l, REG_ITMP1);
3060 x86_64_alul_imm_reg(cd, X86_64_CMP, i - 1, REG_ITMP1);
3061 x86_64_jcc(cd, X86_64_CC_A, 0);
3063 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[0]), cd->mcodeptr); */
3064 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3066 /* build jump table top down and use address of lowest entry */
3068 /* s4ptr += 3 + i; */
3072 /* dseg_addtarget(cd, BlockPtrOfPC(*--s4ptr)); */
3073 dseg_addtarget(cd, (basicblock *) tptr[0]);
3077 /* length of dataseg after last dseg_addtarget is used by load */
3079 x86_64_mov_imm_reg(cd, 0, REG_ITMP2);
3080 dseg_adddata(cd, cd->mcodeptr);
3081 x86_64_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
3082 x86_64_jmp_reg(cd, REG_ITMP1);
3087 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
3089 s4 i, l, val, *s4ptr;
3092 tptr = (void **) iptr->target;
3094 s4ptr = iptr->val.a;
3095 l = s4ptr[0]; /* default */
3096 i = s4ptr[1]; /* count */
3098 MCODECHECK((i<<2)+8);
3099 var_to_reg_int(s1, src, REG_ITMP1); /* reg compare should always be faster */
3105 x86_64_alul_imm_reg(cd, X86_64_CMP, val, s1);
3106 x86_64_jcc(cd, X86_64_CC_E, 0);
3107 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[1]), cd->mcodeptr); */
3108 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3111 x86_64_jmp_imm(cd, 0);
3112 /* codegen_addreference(cd, BlockPtrOfPC(l), cd->mcodeptr); */
3114 tptr = (void **) iptr->target;
3115 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3120 case ICMD_BUILTIN3: /* ..., arg1, arg2, arg3 ==> ... */
3121 /* op1 = return type, val.a = function pointer*/
3125 case ICMD_BUILTIN2: /* ..., arg1, arg2 ==> ... */
3126 /* op1 = return type, val.a = function pointer*/
3130 case ICMD_BUILTIN1: /* ..., arg1 ==> ... */
3131 /* op1 = return type, val.a = function pointer*/
3135 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
3136 /* op1 = arg count, val.a = method pointer */
3138 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3139 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
3140 case ICMD_INVOKEINTERFACE:
3150 MCODECHECK((s3 << 1) + 64);
3152 /* copy arguments to registers or stack location ******************/
3154 /* count integer and float arguments */
3159 for (s2 = s3, tmpsrc = src; --s2 >= 0; tmpsrc = tmpsrc->prev) {
3160 IS_INT_LNG_TYPE(tmpsrc->type) ? iarg++ : farg++;
3163 /* calculate amount of arguments to be on stack */
3165 s2 = (iarg > INT_ARG_CNT) ? iarg - INT_ARG_CNT : 0 +
3166 (farg > FLT_ARG_CNT) ? farg - FLT_ARG_CNT : 0;
3168 for (; --s3 >= 0; src = src->prev) {
3169 /* decrement the current argument type */
3170 IS_INT_LNG_TYPE(src->type) ? iarg-- : farg--;
3172 if (src->varkind == ARGVAR) {
3173 if (IS_INT_LNG_TYPE(src->type)) {
3174 if (iarg >= INT_ARG_CNT) {
3178 if (farg >= FLT_ARG_CNT) {
3185 if (IS_INT_LNG_TYPE(src->type)) {
3186 if (iarg < INT_ARG_CNT) {
3187 s1 = rd->argintregs[iarg];
3188 var_to_reg_int(d, src, s1);
3192 var_to_reg_int(d, src, REG_ITMP1);
3194 x86_64_mov_reg_membase(cd, d, REG_SP, s2 * 8);
3198 if (farg < FLT_ARG_CNT) {
3199 s1 = rd->argfltregs[farg];
3200 var_to_reg_flt(d, src, s1);
3204 var_to_reg_flt(d, src, REG_FTMP1);
3206 x86_64_movq_reg_membase(cd, d, REG_SP, s2 * 8);
3212 switch (iptr->opc) {
3219 codegen_addpatchref(cd, cd->mcodeptr,
3220 (functionptr) lm, iptr->target);
3222 if (showdisassemble) {
3223 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3232 x86_64_mov_imm_reg(cd, a, REG_ITMP1);
3233 x86_64_call_reg(cd, REG_ITMP1);
3236 case ICMD_INVOKESPECIAL:
3237 x86_64_test_reg_reg(cd, rd->argintregs[0], rd->argintregs[0]);
3238 x86_64_jcc(cd, X86_64_CC_Z, 0);
3239 codegen_addxnullrefs(cd, cd->mcodeptr);
3241 /* first argument contains pointer */
3242 /* gen_nullptr_check(rd->argintregs[0]); */
3244 /* access memory for hardware nullptr */
3245 /* x86_64_mov_membase_reg(cd, rd->argintregs[0], 0, REG_ITMP2); */
3249 case ICMD_INVOKESTATIC:
3251 unresolved_method *um = iptr->target;
3253 codegen_addpatchref(cd, cd->mcodeptr,
3254 PATCHER_invokestatic_special, um);
3256 if (showdisassemble) {
3257 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3261 d = um->methodref->parseddesc.md->returntype.type;
3264 a = (ptrint) lm->stubroutine;
3265 d = lm->parseddesc->returntype.type;
3268 x86_64_mov_imm_reg(cd, a, REG_ITMP2);
3269 x86_64_call_reg(cd, REG_ITMP2);
3272 case ICMD_INVOKEVIRTUAL:
3273 gen_nullptr_check(rd->argintregs[0]);
3276 unresolved_method *um = iptr->target;
3278 codegen_addpatchref(cd, cd->mcodeptr,
3279 PATCHER_invokevirtual, um);
3281 if (showdisassemble) {
3282 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3286 d = um->methodref->parseddesc.md->returntype.type;
3289 s1 = OFFSET(vftbl_t, table[0]) +
3290 sizeof(methodptr) * lm->vftblindex;
3291 d = lm->parseddesc->returntype.type;
3294 x86_64_mov_membase_reg(cd, rd->argintregs[0],
3295 OFFSET(java_objectheader, vftbl),
3297 x86_64_mov_membase32_reg(cd, REG_ITMP2, s1, REG_ITMP1);
3298 x86_64_call_reg(cd, REG_ITMP1);
3301 case ICMD_INVOKEINTERFACE:
3302 gen_nullptr_check(rd->argintregs[0]);
3305 unresolved_method *um = iptr->target;
3307 codegen_addpatchref(cd, cd->mcodeptr,
3308 PATCHER_invokeinterface, um);
3310 if (showdisassemble) {
3311 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3316 d = um->methodref->parseddesc.md->returntype.type;
3319 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3320 sizeof(methodptr) * lm->class->index;
3322 s2 = sizeof(methodptr) * (lm - lm->class->methods);
3324 d = lm->parseddesc->returntype.type;
3327 x86_64_mov_membase_reg(cd, rd->argintregs[0],
3328 OFFSET(java_objectheader, vftbl),
3330 x86_64_mov_membase32_reg(cd, REG_ITMP2, s1, REG_ITMP2);
3331 x86_64_mov_membase32_reg(cd, REG_ITMP2, s2, REG_ITMP1);
3332 x86_64_call_reg(cd, REG_ITMP1);
3336 /* d contains return type */
3338 if (d != TYPE_VOID) {
3339 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3340 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3341 M_INTMOVE(REG_RESULT, s1);
3342 store_reg_to_var_int(iptr->dst, s1);
3345 s1 = reg_of_var(rd, iptr->dst, REG_FRESULT);
3346 M_FLTMOVE(REG_FRESULT, s1);
3347 store_reg_to_var_flt(iptr->dst, s1);
3354 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3356 /* op1: 0 == array, 1 == class */
3357 /* val.a: (classinfo *) superclass */
3359 /* superclass is an interface:
3361 * OK if ((sub == NULL) ||
3362 * (sub->vftbl->interfacetablelength > super->index) &&
3363 * (sub->vftbl->interfacetable[-super->index] != NULL));
3365 * superclass is a class:
3367 * OK if ((sub == NULL) || (0
3368 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3369 * super->vftbl->diffval));
3374 vftbl_t *supervftbl;
3377 super = (classinfo *) iptr->val.a;
3384 superindex = super->index;
3385 supervftbl = super->vftbl;
3388 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3389 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3391 var_to_reg_int(s1, src, REG_ITMP1);
3393 /* calculate interface checkcast code size */
3395 s2 = 3; /* mov_membase_reg */
3396 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3398 s2 += 3 + 4 /* movl_membase32_reg */ + 3 + 4 /* sub imm32 */ +
3399 3 /* test */ + 6 /* jcc */ + 3 + 4 /* mov_membase32_reg */ +
3400 3 /* test */ + 6 /* jcc */;
3403 s2 += (showdisassemble ? 5 : 0);
3405 /* calculate class checkcast code size */
3407 s3 = 3; /* mov_membase_reg */
3408 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3409 s3 += 10 /* mov_imm_reg */ + 3 + 4 /* movl_membase32_reg */;
3412 if (s1 != REG_ITMP1) {
3413 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3414 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, baseval));
3415 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3416 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, diffval));
3422 s3 += 3 + 4 /* movl_membase32_reg */ + 3 /* sub */ +
3423 10 /* mov_imm_reg */ + 3 /* movl_membase_reg */;
3424 CALCOFFSETBYTES(s3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3427 s3 += 3 /* cmp */ + 6 /* jcc */;
3430 s3 += (showdisassemble ? 5 : 0);
3432 /* if class is not resolved, check which code to call */
3435 x86_64_test_reg_reg(cd, s1, s1);
3436 x86_64_jcc(cd, X86_64_CC_Z, 6 + (showdisassemble ? 5 : 0) + 7 + 6 + s2 + 5 + s3);
3438 codegen_addpatchref(cd, cd->mcodeptr,
3439 PATCHER_checkcast_instanceof_flags,
3440 (constant_classref *) iptr->target);
3442 if (showdisassemble) {
3443 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3446 x86_64_movl_imm_reg(cd, 0, REG_ITMP2); /* super->flags */
3447 x86_64_alul_imm_reg(cd, X86_64_AND, ACC_INTERFACE, REG_ITMP2);
3448 x86_64_jcc(cd, X86_64_CC_Z, s2 + 5);
3451 /* interface checkcast code */
3453 if (!super || (super->flags & ACC_INTERFACE)) {
3455 x86_64_test_reg_reg(cd, s1, s1);
3456 x86_64_jcc(cd, X86_64_CC_Z, s2);
3459 x86_64_mov_membase_reg(cd, s1,
3460 OFFSET(java_objectheader, vftbl),
3464 codegen_addpatchref(cd, cd->mcodeptr,
3465 PATCHER_checkcast_instanceof_interface,
3466 (constant_classref *) iptr->target);
3468 if (showdisassemble) {
3469 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3473 x86_64_movl_membase32_reg(cd, REG_ITMP2,
3474 OFFSET(vftbl_t, interfacetablelength),
3476 x86_64_alu_imm32_reg(cd, X86_64_SUB, superindex, REG_ITMP3);
3477 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3478 x86_64_jcc(cd, X86_64_CC_LE, 0);
3479 codegen_addxcastrefs(cd, cd->mcodeptr);
3480 x86_64_mov_membase32_reg(cd, REG_ITMP2,
3481 OFFSET(vftbl_t, interfacetable[0]) -
3482 superindex * sizeof(methodptr*),
3484 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3485 x86_64_jcc(cd, X86_64_CC_E, 0);
3486 codegen_addxcastrefs(cd, cd->mcodeptr);
3489 x86_64_jmp_imm(cd, s3);
3492 /* class checkcast code */
3494 if (!super || !(super->flags & ACC_INTERFACE)) {
3496 x86_64_test_reg_reg(cd, s1, s1);
3497 x86_64_jcc(cd, X86_64_CC_Z, s3);
3500 x86_64_mov_membase_reg(cd, s1,
3501 OFFSET(java_objectheader, vftbl),
3505 codegen_addpatchref(cd, cd->mcodeptr,
3506 PATCHER_checkcast_class,
3507 (constant_classref *) iptr->target);
3509 if (showdisassemble) {
3510 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3514 x86_64_mov_imm_reg(cd, (ptrint) supervftbl, REG_ITMP3);
3515 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3516 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3518 x86_64_movl_membase32_reg(cd, REG_ITMP2,
3519 OFFSET(vftbl_t, baseval),
3521 /* if (s1 != REG_ITMP1) { */
3522 /* x86_64_movl_membase_reg(cd, REG_ITMP3, */
3523 /* OFFSET(vftbl_t, baseval), */
3525 /* x86_64_movl_membase_reg(cd, REG_ITMP3, */
3526 /* OFFSET(vftbl_t, diffval), */
3528 /* #if defined(USE_THREADS) && defined(NATIVE_THREADS) */
3529 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3531 /* x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP1, REG_ITMP2); */
3534 x86_64_movl_membase32_reg(cd, REG_ITMP3,
3535 OFFSET(vftbl_t, baseval),
3537 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP2);
3538 x86_64_mov_imm_reg(cd, (ptrint) supervftbl, REG_ITMP3);
3539 x86_64_movl_membase_reg(cd, REG_ITMP3,
3540 OFFSET(vftbl_t, diffval),
3543 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3544 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3546 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP3, REG_ITMP2);
3547 x86_64_jcc(cd, X86_64_CC_A, 0); /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3548 codegen_addxcastrefs(cd, cd->mcodeptr);
3550 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3552 store_reg_to_var_int(iptr->dst, d);
3553 /* if (iptr->dst->flags & INMEMORY) { */
3554 /* x86_64_mov_reg_membase(cd, s1, REG_SP, iptr->dst->regoff * 8); */
3556 /* M_INTMOVE(s1, iptr->dst->regoff); */
3561 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3563 /* op1: 0 == array, 1 == class */
3564 /* val.a: (classinfo *) superclass */
3566 /* superclass is an interface:
3568 * return (sub != NULL) &&
3569 * (sub->vftbl->interfacetablelength > super->index) &&
3570 * (sub->vftbl->interfacetable[-super->index] != NULL);
3572 * superclass is a class:
3574 * return ((sub != NULL) && (0
3575 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3576 * super->vftbl->diffvall));
3581 vftbl_t *supervftbl;
3584 super = (classinfo *) iptr->val.a;
3591 superindex = super->index;
3592 supervftbl = super->vftbl;
3595 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3596 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3599 var_to_reg_int(s1, src, REG_ITMP1);
3600 d = reg_of_var(rd, iptr->dst, REG_ITMP2);
3602 M_INTMOVE(s1, REG_ITMP1);
3606 /* calculate interface instanceof code size */
3608 s2 = 3; /* mov_membase_reg */
3609 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3610 s2 += 3 + 4 /* movl_membase32_reg */ + 3 + 4 /* sub_imm32 */ +
3611 3 /* test */ + 6 /* jcc */ + 3 + 4 /* mov_membase32_reg */ +
3612 3 /* test */ + 4 /* setcc */;
3615 s2 += (showdisassemble ? 5 : 0);
3617 /* calculate class instanceof code size */
3619 s3 = 3; /* mov_membase_reg */
3620 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3621 s3 += 10; /* mov_imm_reg */
3622 s3 += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3623 CALCOFFSETBYTES(s3, REG_ITMP1, OFFSET(vftbl_t, baseval));
3624 s3 += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3625 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, baseval));
3626 s3 += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3627 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3628 s3 += 3 /* sub */ + 3 /* xor */ + 3 /* cmp */ + 4 /* setcc */;
3631 s3 += (showdisassemble ? 5 : 0);
3633 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3635 /* if class is not resolved, check which code to call */
3638 x86_64_test_reg_reg(cd, s1, s1);
3639 x86_64_jcc(cd, X86_64_CC_Z, (6 + (showdisassemble ? 5 : 0) +
3640 7 + 6 + s2 + 5 + s3));
3642 codegen_addpatchref(cd, cd->mcodeptr,
3643 PATCHER_checkcast_instanceof_flags,
3644 (constant_classref *) iptr->target);
3646 if (showdisassemble) {
3647 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3650 x86_64_movl_imm_reg(cd, 0, REG_ITMP3); /* super->flags */
3651 x86_64_alul_imm_reg(cd, X86_64_AND, ACC_INTERFACE, REG_ITMP3);
3652 x86_64_jcc(cd, X86_64_CC_Z, s2 + 5);
3655 /* interface instanceof code */
3657 if (!super || (super->flags & ACC_INTERFACE)) {
3659 x86_64_test_reg_reg(cd, s1, s1);
3660 x86_64_jcc(cd, X86_64_CC_Z, s2);
3663 x86_64_mov_membase_reg(cd, s1,
3664 OFFSET(java_objectheader, vftbl),
3667 codegen_addpatchref(cd, cd->mcodeptr,
3668 PATCHER_checkcast_instanceof_interface,
3669 (constant_classref *) iptr->target);
3671 if (showdisassemble) {
3672 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3676 x86_64_movl_membase32_reg(cd, REG_ITMP1,
3677 OFFSET(vftbl_t, interfacetablelength),
3679 x86_64_alu_imm32_reg(cd, X86_64_SUB, superindex, REG_ITMP3);
3680 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3682 a = 3 + 4 /* mov_membase32_reg */ + 3 /* test */ + 4 /* setcc */;
3684 x86_64_jcc(cd, X86_64_CC_LE, a);
3685 x86_64_mov_membase32_reg(cd, REG_ITMP1,
3686 OFFSET(vftbl_t, interfacetable[0]) -
3687 superindex * sizeof(methodptr*),
3689 x86_64_test_reg_reg(cd, REG_ITMP1, REG_ITMP1);
3690 x86_64_setcc_reg(cd, X86_64_CC_NE, d);
3693 x86_64_jmp_imm(cd, s3);
3696 /* class instanceof code */
3698 if (!super || !(super->flags & ACC_INTERFACE)) {
3700 x86_64_test_reg_reg(cd, s1, s1);
3701 x86_64_jcc(cd, X86_64_CC_E, s3);
3704 x86_64_mov_membase_reg(cd, s1,
3705 OFFSET(java_objectheader, vftbl),
3709 codegen_addpatchref(cd, cd->mcodeptr,
3710 PATCHER_instanceof_class,
3711 (constant_classref *) iptr->target);
3713 if (showdisassemble) {
3714 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3718 x86_64_mov_imm_reg(cd, (ptrint) supervftbl, REG_ITMP2);
3719 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3720 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3722 x86_64_movl_membase_reg(cd, REG_ITMP1,
3723 OFFSET(vftbl_t, baseval),
3725 x86_64_movl_membase_reg(cd, REG_ITMP2,
3726 OFFSET(vftbl_t, diffval),
3728 x86_64_movl_membase_reg(cd, REG_ITMP2,
3729 OFFSET(vftbl_t, baseval),
3731 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3732 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3734 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
3735 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d); /* may be REG_ITMP2 */
3736 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP3, REG_ITMP1);
3737 x86_64_setcc_reg(cd, X86_64_CC_BE, d);
3739 store_reg_to_var_int(iptr->dst, d);
3743 case ICMD_CHECKASIZE: /* ..., size ==> ..., size */
3745 if (src->flags & INMEMORY) {
3746 x86_64_alul_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
3749 x86_64_testl_reg_reg(cd, src->regoff, src->regoff);
3751 x86_64_jcc(cd, X86_64_CC_L, 0);
3752 codegen_addxcheckarefs(cd, cd->mcodeptr);
3755 case ICMD_CHECKEXCEPTION: /* ... ==> ... */
3757 x86_64_test_reg_reg(cd, REG_RESULT, REG_RESULT);
3758 x86_64_jcc(cd, X86_64_CC_E, 0);
3759 codegen_addxexceptionrefs(cd, cd->mcodeptr);
3762 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3763 /* op1 = dimension, val.a = array descriptor */
3765 /* check for negative sizes and copy sizes to stack if necessary */
3767 MCODECHECK((iptr->op1 << 1) + 64);
3769 for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3770 var_to_reg_int(s2, src, REG_ITMP1);
3771 x86_64_testl_reg_reg(cd, s2, s2);
3772 x86_64_jcc(cd, X86_64_CC_L, 0);
3773 codegen_addxcheckarefs(cd, cd->mcodeptr);
3775 /* copy SAVEDVAR sizes to stack */
3777 if (src->varkind != ARGVAR) {
3778 x86_64_mov_reg_membase(cd, s2, REG_SP, s1 * 8);
3782 /* is a patcher function set? */
3785 codegen_addpatchref(cd, cd->mcodeptr,
3786 (functionptr) iptr->target, iptr->val.a);
3788 if (showdisassemble) {
3789 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3795 a = (ptrint) iptr->val.a;
3798 /* a0 = dimension count */
3800 x86_64_mov_imm_reg(cd, iptr->op1, rd->argintregs[0]);
3802 /* a1 = arrayvftbl */
3804 x86_64_mov_imm_reg(cd, (ptrint) iptr->val.a, rd->argintregs[1]);
3806 /* a2 = pointer to dimensions = stack pointer */
3808 x86_64_mov_reg_reg(cd, REG_SP, rd->argintregs[2]);
3810 x86_64_mov_imm_reg(cd, (ptrint) BUILTIN_multianewarray, REG_ITMP1);
3811 x86_64_call_reg(cd, REG_ITMP1);
3813 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3814 M_INTMOVE(REG_RESULT, s1);
3815 store_reg_to_var_int(iptr->dst, s1);
3819 throw_cacao_exception_exit(string_java_lang_InternalError,
3820 "Unknown ICMD %d", iptr->opc);
3823 } /* for instruction */
3825 /* copy values to interface registers */
3827 src = bptr->outstack;
3828 len = bptr->outdepth;
3829 MCODECHECK(64 + len);
3835 if ((src->varkind != STACKVAR)) {
3837 if (IS_FLT_DBL_TYPE(s2)) {
3838 var_to_reg_flt(s1, src, REG_FTMP1);
3839 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3840 M_FLTMOVE(s1, rd->interfaces[len][s2].regoff);
3843 x86_64_movq_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3847 var_to_reg_int(s1, src, REG_ITMP1);
3848 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3849 M_INTMOVE(s1, rd->interfaces[len][s2].regoff);
3852 x86_64_mov_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3858 } /* if (bptr -> flags >= BBREACHED) */
3859 } /* for basic block */
3861 codegen_createlinenumbertable(cd);
3865 /* generate bound check stubs */
3867 u1 *xcodeptr = NULL;
3870 for (bref = cd->xboundrefs; bref != NULL; bref = bref->next) {
3871 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3873 cd->mcodeptr - cd->mcodebase);
3877 /* move index register into REG_ITMP1 */
3878 x86_64_mov_reg_reg(cd, bref->reg, REG_ITMP1); /* 3 bytes */
3880 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3881 dseg_adddata(cd, cd->mcodeptr);
3882 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3883 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3885 if (xcodeptr != NULL) {
3886 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3889 xcodeptr = cd->mcodeptr;
3892 /*create stackinfo -- begin*/
3893 x86_64_alu_imm_reg(cd, X86_64_SUB, 4 * 8, REG_SP);
3894 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3 * 8);
3895 x86_64_mov_imm_membase(cd,0,REG_SP,2*8);
3896 x86_64_mov_imm_membase(cd,0,REG_SP,1*8);
3897 x86_64_mov_imm_reg(cd,(ptrint)asm_prepare_native_stackinfo,REG_ITMP3);
3898 x86_64_call_reg(cd,REG_ITMP3);
3899 /*create stackinfo -- end*/
3901 x86_64_mov_reg_reg(cd, REG_ITMP1, rd->argintregs[0]);
3902 x86_64_mov_imm_reg(cd, (ptrint) new_arrayindexoutofboundsexception, REG_ITMP3);
3903 x86_64_call_reg(cd, REG_ITMP3);
3905 /*remove stackinfo -- begin*/
3906 x86_64_mov_imm_reg(cd,(ptrint)asm_remove_native_stackinfo,REG_ITMP3);
3907 x86_64_call_reg(cd,REG_ITMP3);
3908 /*remove stackinfo -- end*/
3910 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, REG_ITMP2_XPC);
3911 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3913 x86_64_mov_imm_reg(cd, (ptrint) asm_handle_exception, REG_ITMP3);
3914 x86_64_jmp_reg(cd, REG_ITMP3);
3918 /* generate negative array size check stubs */
3922 for (bref = cd->xcheckarefs; bref != NULL; bref = bref->next) {
3923 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3924 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3926 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3930 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3932 cd->mcodeptr - cd->mcodebase);
3936 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3937 dseg_adddata(cd, cd->mcodeptr);
3938 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3939 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3941 if (xcodeptr != NULL) {
3942 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3945 xcodeptr = cd->mcodeptr;
3948 /*create stackinfo -- begin*/
3949 x86_64_alu_imm_reg(cd, X86_64_SUB, 4 * 8, REG_SP);
3950 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3 * 8);
3951 x86_64_mov_imm_membase(cd,0,REG_SP,2*8);
3952 x86_64_mov_imm_membase(cd,0,REG_SP,1*8);
3953 x86_64_mov_imm_reg(cd,(ptrint)asm_prepare_native_stackinfo,REG_ITMP3);
3954 x86_64_call_reg(cd,REG_ITMP3);
3955 /*create stackinfo -- end*/
3957 x86_64_mov_imm_reg(cd, (u8) new_negativearraysizeexception, REG_ITMP3);
3958 x86_64_call_reg(cd, REG_ITMP3);
3960 /*remove stackinfo -- begin*/
3961 x86_64_mov_imm_reg(cd,(ptrint)asm_remove_native_stackinfo,REG_ITMP3);
3962 x86_64_call_reg(cd,REG_ITMP3);
3963 /*remove stackinfo -- end*/
3965 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, REG_ITMP2_XPC);
3966 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3968 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3969 x86_64_jmp_reg(cd, REG_ITMP3);
3973 /* generate cast check stubs */
3977 for (bref = cd->xcastrefs; bref != NULL; bref = bref->next) {
3978 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3979 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3981 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3985 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3987 cd->mcodeptr - cd->mcodebase);
3991 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3992 dseg_adddata(cd, cd->mcodeptr);
3993 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3994 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3996 if (xcodeptr != NULL) {
3997 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
4000 xcodeptr = cd->mcodeptr;
4002 /*create stackinfo -- begin*/
4003 x86_64_alu_imm_reg(cd, X86_64_SUB, 4 * 8, REG_SP);
4004 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3 * 8);
4005 x86_64_mov_imm_membase(cd,0,REG_SP,2*8);
4006 x86_64_mov_imm_membase(cd,0,REG_SP,1*8);
4007 x86_64_mov_imm_reg(cd,(ptrint)asm_prepare_native_stackinfo,REG_ITMP3);
4008 x86_64_call_reg(cd,REG_ITMP3);
4009 /*create stackinfo -- end*/
4012 x86_64_mov_imm_reg(cd, (u8) new_classcastexception, REG_ITMP3);
4013 x86_64_call_reg(cd, REG_ITMP3);
4015 /*remove stackinfo -- begin*/
4016 x86_64_mov_imm_reg(cd,(ptrint)asm_remove_native_stackinfo,REG_ITMP3);
4017 x86_64_call_reg(cd,REG_ITMP3);
4018 /*remove stackinfo -- end*/
4020 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, REG_ITMP2_XPC);
4021 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
4023 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
4024 x86_64_jmp_reg(cd, REG_ITMP3);
4028 /* generate divide by zero check stubs */
4032 for (bref = cd->xdivrefs; bref != NULL; bref = bref->next) {
4033 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
4034 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4036 xcodeptr - cd->mcodebase - (10 + 10 + 3));
4040 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4042 cd->mcodeptr - cd->mcodebase);
4046 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
4047 dseg_adddata(cd, cd->mcodeptr);
4048 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
4049 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
4051 if (xcodeptr != NULL) {
4052 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
4055 xcodeptr = cd->mcodeptr;
4057 /*create stackinfo -- begin*/
4058 x86_64_alu_imm_reg(cd, X86_64_SUB, 4 * 8, REG_SP);
4059 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3 * 8);
4060 x86_64_mov_imm_membase(cd,0,REG_SP,2*8);
4061 x86_64_mov_imm_membase(cd,0,REG_SP,1*8);
4062 x86_64_mov_imm_reg(cd,(ptrint)asm_prepare_native_stackinfo,REG_ITMP3);
4063 x86_64_call_reg(cd,REG_ITMP3);
4064 /*create stackinfo -- end*/
4066 x86_64_mov_imm_reg(cd, (u8) new_arithmeticexception, REG_ITMP3);
4067 x86_64_call_reg(cd, REG_ITMP3);
4069 /*remove stackinfo -- begin*/
4070 x86_64_mov_imm_reg(cd,(ptrint)asm_remove_native_stackinfo,REG_ITMP3);
4071 x86_64_call_reg(cd,REG_ITMP3);
4072 /*remove stackinfo -- end*/
4074 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, REG_ITMP2_XPC);
4075 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
4077 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
4078 x86_64_jmp_reg(cd, REG_ITMP3);
4082 /* generate exception check stubs */
4086 for (bref = cd->xexceptionrefs; bref != NULL; bref = bref->next) {
4087 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
4088 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4090 xcodeptr - cd->mcodebase - (10 + 10 + 3));
4094 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4096 cd->mcodeptr - cd->mcodebase);
4100 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
4101 dseg_adddata(cd, cd->mcodeptr);
4102 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
4103 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
4105 if (xcodeptr != NULL) {
4106 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
4109 xcodeptr = cd->mcodeptr;
4113 x86_64_alu_imm_reg(cd, X86_64_SUB, 4*8, REG_SP);
4114 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3*8);
4115 x86_64_mov_imm_membase(cd, 0, REG_SP, 2*8);
4116 x86_64_mov_imm_membase(cd, 0, REG_SP, 1*8);
4117 x86_64_mov_imm_membase(cd, 0, REG_SP, 0*8);
4118 x86_64_mov_imm_reg(cd,(u8) asm_prepare_native_stackinfo,REG_ITMP1);
4119 x86_64_call_reg(cd,REG_ITMP1);
4122 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4123 x86_64_mov_imm_reg(cd, (u8) &builtin_get_exceptionptrptr, REG_ITMP1);
4124 x86_64_call_reg(cd, REG_ITMP1);
4125 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
4126 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
4127 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
4129 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
4130 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP1_XPTR);
4131 x86_64_mov_imm_membase(cd, 0, REG_ITMP3, 0);
4133 x86_64_mov_reg_reg(cd,REG_ITMP1_XPTR,RDI);
4134 x86_64_mov_imm_reg(cd,(u8) helper_fillin_stacktrace_always,REG_ITMP1);
4135 x86_64_call_reg(cd,REG_ITMP1);
4136 x86_64_mov_reg_reg(cd,REG_RESULT,REG_ITMP1_XPTR);
4138 x86_64_mov_imm_reg(cd,(u8) asm_remove_native_stackinfo,REG_ITMP2);
4139 x86_64_call_reg(cd,REG_ITMP2);
4141 x86_64_alu_imm_reg(cd, X86_64_ADD, 8, REG_SP);
4142 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC);
4143 x86_64_alu_imm_reg(cd, X86_64_ADD, 8, REG_SP);
4146 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
4147 x86_64_jmp_reg(cd, REG_ITMP3);
4151 /* generate null pointer check stubs */
4155 for (bref = cd->xnullrefs; bref != NULL; bref = bref->next) {
4156 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
4157 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4159 xcodeptr - cd->mcodebase - (10 + 10 + 3));
4163 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4165 cd->mcodeptr - cd->mcodebase);
4169 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
4170 dseg_adddata(cd, cd->mcodeptr);
4171 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
4172 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
4174 if (xcodeptr != NULL) {
4175 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
4178 xcodeptr = cd->mcodeptr;
4180 /*create stackinfo -- begin*/
4181 x86_64_alu_imm_reg(cd, X86_64_SUB, 4 * 8, REG_SP);
4182 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3 * 8);
4183 x86_64_mov_imm_membase(cd,0,REG_SP,2*8);
4184 x86_64_mov_imm_membase(cd,0,REG_SP,1*8);
4185 x86_64_mov_imm_reg(cd,(ptrint)asm_prepare_native_stackinfo,REG_ITMP3);
4186 x86_64_call_reg(cd,REG_ITMP3);
4187 /*create stackinfo -- end*/
4190 x86_64_mov_imm_reg(cd, (ptrint) new_nullpointerexception, REG_ITMP3);
4191 x86_64_call_reg(cd, REG_ITMP3);
4193 /*remove stackinfo -- begin*/
4194 x86_64_mov_imm_reg(cd,(ptrint)asm_remove_native_stackinfo,REG_ITMP3);
4195 x86_64_call_reg(cd,REG_ITMP3);
4196 /*remove stackinfo -- end*/
4198 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, REG_ITMP2_XPC);
4199 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
4201 x86_64_mov_imm_reg(cd, (ptrint) asm_handle_exception, REG_ITMP3);
4202 x86_64_jmp_reg(cd, REG_ITMP3);
4206 /* generate code patching stub call code */
4213 tmpcd = DNEW(codegendata);
4215 for (pref = cd->patchrefs; pref != NULL; pref = pref->next) {
4218 /* Get machine code which is patched back in later. A */
4219 /* `call rel32' is 5 bytes long (but read 8 bytes). */
4220 xcodeptr = cd->mcodebase + pref->branchpos;
4221 mcode = *((ptrint *) xcodeptr);
4223 /* patch in `call rel32' to call the following code */
4224 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
4225 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4227 /* move machine code bytes and classinfo pointer into registers */
4228 x86_64_mov_imm_reg(cd, (ptrint) mcode, REG_ITMP3);
4229 x86_64_push_reg(cd, REG_ITMP3);
4230 x86_64_mov_imm_reg(cd, (ptrint) pref->ref, REG_ITMP3);
4231 x86_64_push_reg(cd, REG_ITMP3);
4233 x86_64_mov_imm_reg(cd, (ptrint) pref->patcher, REG_ITMP3);
4234 x86_64_push_reg(cd, REG_ITMP3);
4236 x86_64_mov_imm_reg(cd, (ptrint) asm_wrapper_patcher, REG_ITMP3);
4237 x86_64_jmp_reg(cd, REG_ITMP3);
4242 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
4246 /* function createcompilerstub *************************************************
4248 creates a stub routine which calls the compiler
4250 *******************************************************************************/
4252 #define COMPSTUBSIZE 23
4254 u1 *createcompilerstub(methodinfo *m)
4256 u1 *s = CNEW(u1, COMPSTUBSIZE); /* memory to hold the stub */
4260 /* mark start of dump memory area */
4262 dumpsize = dump_size();
4264 cd = DNEW(codegendata);
4267 /* code for the stub */
4268 x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP1); /* pass method to compiler */
4269 x86_64_mov_imm_reg(cd, (u8) asm_call_jit_compiler, REG_ITMP3);/* load address */
4270 x86_64_jmp_reg(cd, REG_ITMP3); /* jump to compiler */
4272 #if defined(STATISTICS)
4274 count_cstub_len += COMPSTUBSIZE;
4277 /* release dump area */
4279 dump_release(dumpsize);
4285 /* function removecompilerstub *************************************************
4287 deletes a compilerstub from memory (simply by freeing it)
4289 *******************************************************************************/
4291 void removecompilerstub(u1 *stub)
4293 CFREE(stub, COMPSTUBSIZE);
4297 /* function: createnativestub **************************************************
4299 creates a stub routine which calls a native method
4301 *******************************************************************************/
4303 /* #if defined(USE_THREADS) && defined(NATIVE_THREADS) */
4304 /* static java_objectheader **(*callgetexceptionptrptr)() = builtin_get_exceptionptrptr; */
4307 #define NATIVESTUBSIZE 800 /* keep this size high enough! */
4308 #define NATIVESTUB_DATA_SIZE (7*8)
4310 u1 *createnativestub(functionptr f, methodinfo *m)
4312 u1 *s; /* pointer to stub memory */
4315 t_inlining_globals *id;
4317 s4 stackframesize; /* size of stackframe if needed */
4319 s4 iargs; /* count of integer arguments */
4320 s4 fargs; /* count of float arguments */
4324 bool require_clinit_call;
4326 void **callAddrPatchPos=0;
4328 void **jmpInstrPatchPos=0;
4330 /* initialize variables */
4335 /* mark start of dump memory area */
4337 dumpsize = dump_size();
4339 cd = DNEW(codegendata);
4340 rd = DNEW(registerdata);
4341 id = DNEW(t_inlining_globals);
4343 /* setup registers before using it */
4345 inlining_setup(m, id);
4346 reg_setup(m, rd, id);
4348 /* set paramcount and paramtypes */
4350 method_descriptor2types(m);
4352 /* count integer and float arguments */
4354 tptr = m->paramtypes;
4355 for (i = 0; i < m->paramcount; i++) {
4356 IS_INT_LNG_TYPE(*tptr++) ? iargs++ : fargs++;
4359 stubsize=NATIVESTUBSIZE;
4360 require_clinit_call= ((m->flags & ACC_STATIC) && !m->class->initialized);
4361 if (require_clinit_call) stubsize+=NATIVESTUB_DATA_SIZE;
4362 s = CNEW(u1, stubsize); /* memory to hold the stub */
4364 if (require_clinit_call) {
4365 cs = (u8*) (s+NATIVESTUB_DATA_SIZE);
4366 *(cs - 7) = 0; /* extable size,padding */
4367 *(cs - 6) = 0; /* line number table start */
4368 *(cs - 5) = 0; /* line number table size */
4369 *(cs - 4) = 0; /* padding,fltsave */
4370 *(cs - 3) = 0; /* intsave=0,isleaf=0 */
4371 *(cs - 2) = 0x0000000000000000; /* frame size=0 (stack misalignment) issync=0 */
4373 *(cs - 2) = 0x0000000100000000; /* frame size=1 issync=0 */
4375 *(cs - 1) = (u8) m; /* method pointer */
4380 /* set some required varibles which are normally set by codegen_setup */
4381 cd->mcodebase = (u1*)cs;
4382 cd->mcodeptr = (u1*)cs;
4383 cd->patchrefs = NULL;
4385 /* if function is static, check for initialized */
4387 if (require_clinit_call) {
4388 codegen_addpatchref(cd, cd->mcodeptr, PATCHER_clinit, m->class);
4394 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4396 /* save integer and float argument registers */
4398 for (i = 0; i < INT_ARG_CNT; i++) {
4399 x86_64_mov_reg_membase(cd, rd->argintregs[i], REG_SP, (1 + i) * 8);
4402 for (i = 0; i < FLT_ARG_CNT; i++) {
4403 x86_64_movq_reg_membase(cd, rd->argfltregs[i], REG_SP, (1 + INT_ARG_CNT + i) * 8);
4406 /* show integer hex code for float arguments */
4408 for (i = 0, l = 0; i < m->paramcount && i < INT_ARG_CNT; i++) {
4409 /* if the paramtype is a float, we have to right shift all */
4410 /* following integer registers */
4412 if (IS_FLT_DBL_TYPE(m->paramtypes[i])) {
4413 for (s1 = INT_ARG_CNT - 2; s1 >= i; s1--) {
4414 x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
4417 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[i]);
4422 x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP1);
4423 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, 0 * 8);
4424 x86_64_mov_imm_reg(cd, (u8) builtin_trace_args, REG_ITMP1);
4425 x86_64_call_reg(cd, REG_ITMP1);
4427 /* restore integer and float argument registers */
4429 for (i = 0; i < INT_ARG_CNT; i++) {
4430 x86_64_mov_membase_reg(cd, REG_SP, (1 + i) * 8, rd->argintregs[i]);
4433 for (i = 0; i < FLT_ARG_CNT; i++) {
4434 x86_64_movq_membase_reg(cd, REG_SP, (1 + INT_ARG_CNT + i) * 8, rd->argfltregs[i]);
4437 x86_64_alu_imm_reg(cd, X86_64_ADD, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4440 /* 4 == additional size needed for native stack frame information*/
4441 x86_64_alu_imm_reg(cd, X86_64_SUB, (4+INT_ARG_CNT + FLT_ARG_CNT+1) * 8, REG_SP);
4443 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, 0 * 8);
4444 x86_64_mov_reg_membase(cd, rd->argintregs[1], REG_SP, 1 * 8);
4445 x86_64_mov_reg_membase(cd, rd->argintregs[2], REG_SP, 2 * 8);
4446 x86_64_mov_reg_membase(cd, rd->argintregs[3], REG_SP, 3 * 8);
4447 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 4 * 8);
4448 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 5 * 8);
4450 x86_64_movq_reg_membase(cd, rd->argfltregs[0], REG_SP, 6 * 8);
4451 x86_64_movq_reg_membase(cd, rd->argfltregs[1], REG_SP, 7 * 8);
4452 x86_64_movq_reg_membase(cd, rd->argfltregs[2], REG_SP, 8 * 8);
4453 x86_64_movq_reg_membase(cd, rd->argfltregs[3], REG_SP, 9 * 8);
4454 x86_64_movq_reg_membase(cd, rd->argfltregs[4], REG_SP, 10 * 8);
4455 x86_64_movq_reg_membase(cd, rd->argfltregs[5], REG_SP, 11 * 8);
4456 x86_64_movq_reg_membase(cd, rd->argfltregs[6], REG_SP, 12 * 8);
4457 x86_64_movq_reg_membase(cd, rd->argfltregs[7], REG_SP, 13 * 8);
4461 0*8 void *oldThreadspecificHeadValue;
4462 1*8 void **addressOfThreadspecificHead;
4463 2*8 methodinfo *method;
4464 3*8 void *beginOfJavaStackframe; only used if != 0
4465 4*8 void *returnToFromNative;
4468 /* CREATE DYNAMIC STACK INFO -- BEGIN offsets:15,16,17,18*/
4469 x86_64_mov_imm_membase(cd, 0, REG_SP, 18*8);
4470 x86_64_mov_imm_membase(cd, (u8)m, REG_SP, 17*8);
4472 x86_64_mov_imm_reg(cd, (u8) builtin_asm_get_stackframeinfo,REG_ITMP1);
4473 x86_64_call_reg(cd,REG_ITMP1);
4475 x86_64_mov_reg_membase(cd,REG_RESULT,REG_SP,16*8);
4476 x86_64_mov_membase_reg(cd,REG_RESULT,0,REG_ITMP2);
4477 x86_64_mov_reg_membase(cd,REG_ITMP2,REG_SP,15*8);
4478 x86_64_mov_reg_reg(cd,REG_SP,REG_ITMP2);
4479 x86_64_alu_imm_reg(cd, X86_64_ADD, (1+INT_ARG_CNT + FLT_ARG_CNT) * 8, REG_ITMP2);
4480 x86_64_mov_reg_membase(cd,REG_ITMP2,REG_RESULT,0);
4484 i386_mov_imm_membase(cd,0,REG_SP,stackframesize-4);
4485 i386_mov_imm_membase(cd, (s4) m, REG_SP,stackframesize-8);
4486 i386_mov_imm_reg(cd, (s4) builtin_asm_get_stackframeinfo, REG_ITMP1);
4487 i386_call_reg(cd, REG_ITMP1);
4488 i386_mov_reg_membase(cd, REG_RESULT,REG_SP,stackframesize-12); /*save thread specific pointer*/
4489 i386_mov_membase_reg(cd, REG_RESULT,0,REG_ITMP2);
4490 i386_mov_reg_membase(cd, REG_ITMP2,REG_SP,stackframesize-16); /*save previous value of memory adress pointed to by thread specific pointer*/
4491 i386_mov_reg_reg(cd, REG_SP,REG_ITMP2);
4492 i386_alu_imm_reg(cd, I386_ADD,stackframesize-16,REG_ITMP2);
4493 i386_mov_reg_membase(cd, REG_ITMP2,REG_RESULT,0);
4495 /* CREATE DYNAMIC STACK INFO -- END*/
4498 #if !defined(STATIC_CLASSPATH)
4499 /* call method to resolve native function if needed */
4501 /* needed to patch a jump over this block */
4502 x86_64_jmp_imm(cd, 0);
4503 jmpInstrPos = cd->mcodeptr - 4;
4506 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
4508 x86_64_mov_imm_reg(cd, 0, rd->argintregs[1]);
4509 callAddrPatchPos = cd->mcodeptr - 8; /* at this position the place is specified where the native function adress should be patched into*/
4511 x86_64_mov_imm_reg(cd, 0, rd->argintregs[2]);
4512 jmpInstrPatchPos = cd->mcodeptr - 8;
4514 x86_64_mov_imm_reg(cd, jmpInstrPos, rd->argintregs[3]);
4516 x86_64_mov_imm_reg(cd, (u8) codegen_resolve_native, REG_ITMP1);
4517 x86_64_call_reg(cd, REG_ITMP1);
4519 *(jmpInstrPatchPos) = cd->mcodeptr - jmpInstrPos - 1-3; /*=opcode jmp_imm size*/
4526 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, rd->argintregs[0]);
4527 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, rd->argintregs[1]);
4528 x86_64_mov_membase_reg(cd, REG_SP, 2 * 8, rd->argintregs[2]);
4529 x86_64_mov_membase_reg(cd, REG_SP, 3 * 8, rd->argintregs[3]);
4530 x86_64_mov_membase_reg(cd, REG_SP, 4 * 8, rd->argintregs[4]);
4531 x86_64_mov_membase_reg(cd, REG_SP, 5 * 8, rd->argintregs[5]);
4533 x86_64_movq_membase_reg(cd, REG_SP, 6 * 8, rd->argfltregs[0]);
4534 x86_64_movq_membase_reg(cd, REG_SP, 7 * 8, rd->argfltregs[1]);
4535 x86_64_movq_membase_reg(cd, REG_SP, 8 * 8, rd->argfltregs[2]);
4536 x86_64_movq_membase_reg(cd, REG_SP, 9 * 8, rd->argfltregs[3]);
4537 x86_64_movq_membase_reg(cd, REG_SP, 10 * 8, rd->argfltregs[4]);
4538 x86_64_movq_membase_reg(cd, REG_SP, 11 * 8, rd->argfltregs[5]);
4539 x86_64_movq_membase_reg(cd, REG_SP, 12 * 8, rd->argfltregs[6]);
4540 x86_64_movq_membase_reg(cd, REG_SP, 13 * 8, rd->argfltregs[7]);
4542 x86_64_alu_imm_reg(cd, X86_64_ADD, (INT_ARG_CNT + FLT_ARG_CNT+1) * 8, REG_SP);
4544 /* save argument registers on stack -- if we have to */
4546 if ((((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) || iargs > (INT_ARG_CNT - 1)) ||
4547 (fargs > FLT_ARG_CNT)) {
4554 /* do we need to shift integer argument register onto stack? */
4556 if ((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) {
4557 /* do we need to shift 2 arguments? */
4558 if (iargs > (INT_ARG_CNT - 1)) {
4565 } else if (iargs > (INT_ARG_CNT - 1)) {
4569 /* calculate required stack space */
4571 stackparamcnt += (iargs > INT_ARG_CNT) ? iargs - INT_ARG_CNT : 0;
4572 stackparamcnt += (fargs > FLT_ARG_CNT) ? fargs - FLT_ARG_CNT : 0;
4575 stackframesize = stackparamcnt + paramshiftcnt;
4577 /* keep stack 16-byte aligned */
4578 if (!(stackframesize & 0x1))
4581 x86_64_alu_imm_reg(cd, X86_64_SUB, stackframesize * 8, REG_SP);
4583 /* shift integer arguments if required */
4585 if ((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) {
4586 /* do we need to shift 2 arguments? */
4587 if (iargs > (INT_ARG_CNT - 1))
4588 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 1 * 8);
4590 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 0 * 8);
4592 } else if (iargs > (INT_ARG_CNT - 1)) {
4593 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 0 * 8);
4596 /* copy stack arguments into new stack frame -- if any */
4597 for (i = 0; i < stackparamcnt; i++) {
4598 x86_64_mov_membase_reg(cd, REG_SP, (stackframesize + 1 + i+4) * 8, REG_ITMP1); /* 4==additional size for stackrace data*/
4599 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, (paramshiftcnt + i) * 8);
4603 /* keep stack 16-byte aligned */
4604 x86_64_alu_imm_reg(cd, X86_64_SUB, 1 * 8, REG_SP);
4608 /* shift integer arguments for `env' and `class' arguments */
4610 if (m->flags & ACC_STATIC) {
4611 /* shift iargs count if less than INT_ARG_CNT, or all */
4612 for (i = (iargs < (INT_ARG_CNT - 2)) ? iargs : (INT_ARG_CNT - 2); i >= 0; i--) {
4613 x86_64_mov_reg_reg(cd, rd->argintregs[i], rd->argintregs[i + 2]);
4616 /* put class into second argument register */
4617 x86_64_mov_imm_reg(cd, (u8) m->class, rd->argintregs[1]);
4620 /* shift iargs count if less than INT_ARG_CNT, or all */
4621 for (i = (iargs < (INT_ARG_CNT - 1)) ? iargs : (INT_ARG_CNT - 1); i >= 0; i--) {
4622 x86_64_mov_reg_reg(cd, rd->argintregs[i], rd->argintregs[i + 1]);
4626 /* put env into first argument register */
4627 x86_64_mov_imm_reg(cd, (u8) &env, rd->argintregs[0]);
4629 /* do the native function call */
4630 x86_64_mov_imm_reg(cd, (u8) f, REG_ITMP1);
4631 #if !defined(STATIC_CLASSPATH)
4633 (*callAddrPatchPos) = cd->mcodeptr - 8;
4635 x86_64_call_reg(cd, REG_ITMP1);
4637 /* remove stackframe if there is one */
4638 if (stackframesize) {
4639 x86_64_alu_imm_reg(cd, X86_64_ADD, stackframesize * 8, REG_SP);
4642 /*REMOVE DYNAMIC STACK INFO -BEGIN */
4643 x86_64_mov_reg_membase(cd,REG_RESULT,REG_SP,2*8);
4644 x86_64_mov_membase_reg(cd,REG_SP,0*8,REG_ITMP2);
4645 x86_64_mov_membase_reg(cd,REG_SP,1*8,REG_RESULT);
4646 x86_64_mov_reg_membase(cd,REG_ITMP2,REG_RESULT,0);
4647 x86_64_mov_membase_reg(cd,REG_SP,2*8,REG_RESULT);
4649 i386_push_reg(cd, REG_RESULT2);
4650 i386_mov_membase_reg(cd, REG_SP,stackframesize-12,REG_ITMP2); /*old value*/
4651 i386_mov_membase_reg(cd, REG_SP,stackframesize-8,REG_RESULT2); /*pointer*/
4652 i386_mov_reg_membase(cd, REG_ITMP2,REG_RESULT2,0);
4653 i386_pop_reg(cd, REG_RESULT2);
4655 /*REMOVE DYNAMIC STACK INFO -END */
4657 x86_64_alu_imm_reg(cd, X86_64_ADD, 4 * 8, REG_SP);
4660 x86_64_alu_imm_reg(cd, X86_64_SUB, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
4662 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
4663 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
4665 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
4666 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
4667 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
4668 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
4670 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
4671 x86_64_call_reg(cd, REG_ITMP1);
4673 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
4674 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
4676 x86_64_alu_imm_reg(cd, X86_64_ADD, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
4679 /* check for exception */
4681 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4682 x86_64_push_reg(cd, REG_RESULT);
4683 /* x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
4684 x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
4685 x86_64_call_reg(cd, REG_ITMP3);
4686 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
4687 x86_64_pop_reg(cd, REG_RESULT);
4689 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
4690 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP3);
4692 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
4693 x86_64_jcc(cd, X86_64_CC_NE, 1);
4697 /* handle exception */
4699 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4700 x86_64_push_reg(cd, REG_ITMP3);
4701 /* x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
4702 x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
4703 x86_64_call_reg(cd, REG_ITMP3);
4704 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
4705 x86_64_pop_reg(cd, REG_ITMP1_XPTR);
4707 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
4708 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
4709 x86_64_alu_reg_reg(cd, X86_64_XOR, REG_ITMP2, REG_ITMP2);
4710 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_ITMP3, 0); /* clear exception pointer */
4713 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC); /* get return address from stack */
4714 x86_64_alu_imm_reg(cd, X86_64_SUB, 3, REG_ITMP2_XPC); /* callq */
4716 x86_64_mov_imm_reg(cd, (u8) asm_handle_nat_exception, REG_ITMP3);
4717 x86_64_jmp_reg(cd, REG_ITMP3);
4720 /* patch in a <clinit> call if required ***********************************/
4728 tmpcd = DNEW(codegendata);
4730 /* there can only be one patch ref entry */
4731 pref = cd->patchrefs;
4734 /* Get machine code which is patched back in later. A */
4735 /* `call rel32' is 5 bytes long (but read 8 bytes). */
4736 xcodeptr = cd->mcodebase + pref->branchpos;
4737 mcode = *((ptrint *) xcodeptr);
4739 /* patch in `call rel32' to call the following code */
4740 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
4741 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4743 /* move machine code bytes and classinfo pointer into registers */
4744 x86_64_mov_imm_reg(cd, (ptrint) mcode, REG_ITMP3);
4745 x86_64_push_reg(cd, REG_ITMP3);
4746 x86_64_mov_imm_reg(cd, (ptrint) pref->ref, REG_ITMP3);
4747 x86_64_push_reg(cd, REG_ITMP3);
4749 x86_64_mov_imm_reg(cd, (ptrint) pref->patcher, REG_ITMP3);
4750 x86_64_push_reg(cd, REG_ITMP3);
4752 x86_64_mov_imm_reg(cd, (ptrint) asm_wrapper_patcher, REG_ITMP3);
4753 x86_64_jmp_reg(cd, REG_ITMP3);
4755 codegen_insertmethod((functionptr) cs, (functionptr) cd->mcodeptr);
4756 printf("codegen_insertmethod (nativestub) %p - %p\n",cs,cd->mcodeptr);
4760 printf("(nativestub) %s: %p - %p\n",m->name->text,cs,cd->mcodeptr);
4762 /* Check if the stub size is big enough to hold the whole stub generated. */
4763 /* If not, this can lead into unpredictable crashes, because of heap */
4765 if ((s4) (cd->mcodeptr - s) > stubsize) {
4766 throw_cacao_exception_exit(string_java_lang_InternalError,
4767 "Native stub size %d is to small for current stub size %d",
4768 stubsize, (s4) (cd->mcodeptr - s));
4772 #if defined(STATISTICS)
4774 count_nstub_len += stubsize;
4777 /* release dump area */
4779 dump_release(dumpsize);
4785 /* function: removenativestub **************************************************
4787 removes a previously created native-stub from memory
4789 *******************************************************************************/
4791 void removenativestub(u1 *stub)
4793 CFREE(stub, NATIVESTUBSIZE);
4798 * These are local overrides for various environment variables in Emacs.
4799 * Please do not remove this and leave it at the end of the file, where
4800 * Emacs will automagically detect them.
4801 * ---------------------------------------------------------------------
4804 * indent-tabs-mode: t