1 /* src/vm/jit/x86_64/codegen.c - machine code generator for x86_64
3 Copyright (C) 1996-2005, 2006 R. Grafl, A. Krall, C. Kruegel,
4 C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
5 E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
6 J. Wenninger, Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
25 Contact: cacao@cacaojvm.org
27 Authors: Andreas Krall
30 Changes: Christian Ullrich
32 $Id: codegen.c 4508 2006-02-14 00:41:57Z twisti $
46 #include "vm/jit/x86_64/arch.h"
47 #include "vm/jit/x86_64/codegen.h"
48 #include "vm/jit/x86_64/emitfuncs.h"
50 #include "cacao/cacao.h"
51 #include "native/native.h"
52 #include "vm/builtin.h"
53 #include "vm/exceptions.h"
54 #include "vm/global.h"
55 #include "vm/loader.h"
56 #include "vm/options.h"
57 #include "vm/statistics.h"
58 #include "vm/stringlocal.h"
59 #include "vm/jit/asmpart.h"
60 #include "vm/jit/codegen-common.h"
61 #include "vm/jit/dseg.h"
62 #include "vm/jit/jit.h"
63 #include "vm/jit/methodheader.h"
64 #include "vm/jit/parse.h"
65 #include "vm/jit/patcher.h"
66 #include "vm/jit/reg.h"
68 #if defined(ENABLE_LSRA)
69 # include "vm/jit/allocator/lsra.h"
75 /* codegen *********************************************************************
77 Generates machine code.
79 *******************************************************************************/
81 bool codegen(methodinfo *m, codegendata *cd, registerdata *rd)
83 s4 len, s1, s2, s3, d, disp;
92 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
93 builtintable_entry *bte;
96 /* prevent compiler warnings */
108 /* space to save used callee saved registers */
110 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
111 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse);
113 parentargs_base = rd->memuse + savedregs_num;
115 #if defined(USE_THREADS)
116 /* space to save argument of monitor_enter */
118 if (checksync && (m->flags & ACC_SYNCHRONIZED))
122 /* Keep stack of non-leaf functions 16-byte aligned for calls into native */
123 /* code e.g. libc or jni (alignment problems with movaps). */
125 if (!m->isleafmethod || runverbose)
126 parentargs_base |= 0x1;
128 /* create method header */
130 (void) dseg_addaddress(cd, m); /* MethodPointer */
131 (void) dseg_adds4(cd, parentargs_base * 8); /* FrameSize */
133 #if defined(USE_THREADS)
134 /* IsSync contains the offset relative to the stack pointer for the
135 argument of monitor_exit used in the exception handler. Since the
136 offset could be zero and give a wrong meaning of the flag it is
140 if (checksync && (m->flags & ACC_SYNCHRONIZED))
141 (void) dseg_adds4(cd, (rd->memuse + 1) * 8); /* IsSync */
144 (void) dseg_adds4(cd, 0); /* IsSync */
146 (void) dseg_adds4(cd, m->isleafmethod); /* IsLeaf */
147 (void) dseg_adds4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
148 (void) dseg_adds4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
150 (void) dseg_addlinenumbertablesize(cd);
152 (void) dseg_adds4(cd, cd->exceptiontablelength); /* ExTableSize */
154 /* create exception table */
156 for (ex = cd->exceptiontable; ex != NULL; ex = ex->down) {
157 dseg_addtarget(cd, ex->start);
158 dseg_addtarget(cd, ex->end);
159 dseg_addtarget(cd, ex->handler);
160 (void) dseg_addaddress(cd, ex->catchtype.cls);
163 /* initialize mcode variables */
165 cd->mcodeptr = (u1 *) cd->mcodebase;
166 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
168 /* initialize the last patcher pointer */
170 cd->lastmcodeptr = cd->mcodeptr;
172 /* generate method profiling code */
175 /* count frequency */
177 M_MOV_IMM((ptrint) m, REG_ITMP3);
178 M_IINC_MEMBASE(REG_ITMP3, OFFSET(methodinfo, frequency));
183 /* create stack frame (if necessary) */
186 M_ASUB_IMM(parentargs_base * 8, REG_SP);
188 /* save used callee saved registers */
191 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
192 p--; M_LST(rd->savintregs[i], REG_SP, p * 8);
194 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
195 p--; M_DST(rd->savfltregs[i], REG_SP, p * 8);
198 /* take arguments out of register or stack frame */
202 for (p = 0, l = 0; p < md->paramcount; p++) {
203 t = md->paramtypes[p].type;
204 var = &(rd->locals[l][t]);
206 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
210 s1 = md->params[p].regoff;
211 if (IS_INT_LNG_TYPE(t)) { /* integer args */
212 s2 = rd->argintregs[s1];
213 if (!md->params[p].inmemory) { /* register arguments */
214 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
215 M_INTMOVE(s2, var->regoff);
217 } else { /* reg arg -> spilled */
218 M_LST(s2, REG_SP, var->regoff * 8);
221 } else { /* stack arguments */
222 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
223 /* + 8 for return address */
224 M_LLD(var->regoff, REG_SP, (parentargs_base + s1) * 8 + 8);
226 } else { /* stack arg -> spilled */
227 var->regoff = parentargs_base + s1 + 1;
231 } else { /* floating args */
232 if (!md->params[p].inmemory) { /* register arguments */
233 s2 = rd->argfltregs[s1];
234 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
235 M_FLTMOVE(s2, var->regoff);
237 } else { /* reg arg -> spilled */
238 M_DST(s2, REG_SP, var->regoff * 8);
241 } else { /* stack arguments */
242 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
243 M_DLD(var->regoff, REG_SP, (parentargs_base + s1) * 8 + 8);
246 var->regoff = parentargs_base + s1 + 1;
252 /* save monitorenter argument */
254 #if defined(USE_THREADS)
255 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
256 /* stack offset for monitor argument */
261 M_LSUB_IMM((INT_ARG_CNT + FLT_ARG_CNT) * 8, REG_SP);
263 for (p = 0; p < INT_ARG_CNT; p++)
264 M_LST(rd->argintregs[p], REG_SP, p * 8);
266 for (p = 0; p < FLT_ARG_CNT; p++)
267 M_DST(rd->argfltregs[p], REG_SP, (INT_ARG_CNT + p) * 8);
269 s1 += INT_ARG_CNT + FLT_ARG_CNT;
272 /* decide which monitor enter function to call */
274 if (m->flags & ACC_STATIC) {
275 M_MOV_IMM((ptrint) m->class, REG_ITMP1);
276 M_AST(REG_ITMP1, REG_SP, s1 * 8);
277 M_INTMOVE(REG_ITMP1, rd->argintregs[0]);
278 M_MOV_IMM((ptrint) BUILTIN_staticmonitorenter, REG_ITMP1);
282 M_TEST(rd->argintregs[0]);
283 x86_64_jcc(cd, X86_64_CC_Z, 0);
284 codegen_addxnullrefs(cd, cd->mcodeptr);
285 M_AST(rd->argintregs[0], REG_SP, s1 * 8);
286 M_MOV_IMM((ptrint) BUILTIN_monitorenter, REG_ITMP1);
291 for (p = 0; p < INT_ARG_CNT; p++)
292 M_LLD(rd->argintregs[p], REG_SP, p * 8);
294 for (p = 0; p < FLT_ARG_CNT; p++)
295 M_DLD(rd->argfltregs[p], REG_SP, (INT_ARG_CNT + p) * 8);
297 M_LADD_IMM((INT_ARG_CNT + FLT_ARG_CNT) * 8, REG_SP);
302 /* Copy argument registers to stack and call trace function with
303 pointer to arguments on stack. */
305 if (runverbose || opt_stat) {
306 M_LSUB_IMM((INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + FLT_TMP_CNT + 1 + 1) * 8, REG_SP);
308 /* save integer argument registers */
310 for (p = 0; p < INT_ARG_CNT; p++)
311 M_LST(rd->argintregs[p], REG_SP, (1 + p) * 8);
313 /* save float argument registers */
315 for (p = 0; p < FLT_ARG_CNT; p++)
316 M_DST(rd->argfltregs[p], REG_SP, (1 + INT_ARG_CNT + p) * 8);
318 /* save temporary registers for leaf methods */
320 if (m->isleafmethod) {
321 for (p = 0; p < INT_TMP_CNT; p++)
322 M_LST(rd->tmpintregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + p) * 8);
324 for (p = 0; p < FLT_TMP_CNT; p++)
325 M_DST(rd->tmpfltregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + p) * 8);
329 /* show integer hex code for float arguments */
331 for (p = 0, l = 0; p < md->paramcount && p < INT_ARG_CNT; p++) {
332 /* if the paramtype is a float, we have to right shift all */
333 /* following integer registers */
335 if (IS_FLT_DBL_TYPE(md->paramtypes[p].type)) {
336 for (s1 = INT_ARG_CNT - 2; s1 >= p; s1--) {
337 M_MOV(rd->argintregs[s1], rd->argintregs[s1 + 1]);
340 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[p]);
345 x86_64_mov_imm_reg(cd, (ptrint) m, REG_ITMP2);
346 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_SP, 0 * 8);
347 x86_64_mov_imm_reg(cd, (ptrint) builtin_trace_args, REG_ITMP1);
348 x86_64_call_reg(cd, REG_ITMP1);
351 /* restore integer argument registers */
353 for (p = 0; p < INT_ARG_CNT; p++)
354 M_LLD(rd->argintregs[p], REG_SP, (1 + p) * 8);
356 /* restore float argument registers */
358 for (p = 0; p < FLT_ARG_CNT; p++)
359 M_DLD(rd->argfltregs[p], REG_SP, (1 + INT_ARG_CNT + p) * 8);
361 /* restore temporary registers for leaf methods */
363 if (m->isleafmethod) {
364 for (p = 0; p < INT_TMP_CNT; p++)
365 M_LLD(rd->tmpintregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + p) * 8);
367 for (p = 0; p < FLT_TMP_CNT; p++)
368 M_DLD(rd->tmpfltregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + p) * 8);
371 M_LADD_IMM((INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + FLT_TMP_CNT + 1 + 1) * 8, REG_SP);
376 /* end of header generation */
378 /* walk through all basic blocks */
380 for (bptr = m->basicblocks; bptr != NULL; bptr = bptr->next) {
382 bptr->mpc = (u4) ((u1 *) cd->mcodeptr - cd->mcodebase);
384 if (bptr->flags >= BBREACHED) {
386 /* branch resolving */
389 for (bref = bptr->branchrefs; bref != NULL; bref = bref->next) {
390 gen_resolvebranch((u1 *) cd->mcodebase + bref->branchpos,
395 /* copy interface registers to their destination */
401 /* generate basicblock profiling code */
404 /* count frequency */
406 M_MOV_IMM((ptrint) m->bbfrequency, REG_ITMP2);
407 M_IINC_MEMBASE(REG_ITMP2, bptr->debug_nr * 4);
409 /* if this is an exception handler, start profiling again */
411 if (bptr->type == BBTYPE_EXH)
415 #if defined(ENABLE_LSRA)
417 while (src != NULL) {
419 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
420 if (bptr->type == BBTYPE_SBR) {
421 /* d = reg_of_var(rd, src, REG_ITMP1); */
422 if (!(src->flags & INMEMORY))
426 x86_64_pop_reg(cd, d);
427 store_reg_to_var_int(src, d);
429 } else if (bptr->type == BBTYPE_EXH) {
430 /* d = reg_of_var(rd, src, REG_ITMP1); */
431 if (!(src->flags & INMEMORY))
435 M_INTMOVE(REG_ITMP1, d);
436 store_reg_to_var_int(src, d);
445 while (src != NULL) {
447 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
448 if (bptr->type == BBTYPE_SBR) {
449 d = reg_of_var(rd, src, REG_ITMP1);
451 store_reg_to_var_int(src, d);
453 } else if (bptr->type == BBTYPE_EXH) {
454 d = reg_of_var(rd, src, REG_ITMP1);
455 M_INTMOVE(REG_ITMP1, d);
456 store_reg_to_var_int(src, d);
460 d = reg_of_var(rd, src, REG_ITMP1);
461 if ((src->varkind != STACKVAR)) {
463 if (IS_FLT_DBL_TYPE(s2)) {
464 s1 = rd->interfaces[len][s2].regoff;
466 if (!(rd->interfaces[len][s2].flags & INMEMORY))
469 M_DLD(d, REG_SP, s1 * 8);
471 store_reg_to_var_flt(src, d);
474 s1 = rd->interfaces[len][s2].regoff;
476 if (!(rd->interfaces[len][s2].flags & INMEMORY))
479 M_LLD(d, REG_SP, s1 * 8);
481 store_reg_to_var_int(src, d);
487 #if defined(ENABLE_LSRA)
490 /* walk through all instructions */
496 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
497 if (iptr->line != currentline) {
498 dseg_addlinenumber(cd, iptr->line, cd->mcodeptr);
499 currentline = iptr->line;
502 MCODECHECK(1024); /* 1KB should be enough */
505 case ICMD_INLINE_START: /* internal ICMDs */
506 case ICMD_INLINE_END:
509 case ICMD_NOP: /* ... ==> ... */
512 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
514 if (src->flags & INMEMORY)
515 M_CMP_IMM_MEMBASE(0, REG_SP, src->regoff * 8);
519 codegen_addxnullrefs(cd, cd->mcodeptr);
522 /* constant operations ************************************************/
524 case ICMD_ICONST: /* ... ==> ..., constant */
525 /* op1 = 0, val.i = constant */
527 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
528 if (iptr->val.i == 0)
531 M_IMOV_IMM(iptr->val.i, d);
532 store_reg_to_var_int(iptr->dst, d);
535 case ICMD_LCONST: /* ... ==> ..., constant */
536 /* op1 = 0, val.l = constant */
538 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
539 if (iptr->val.l == 0)
542 M_MOV_IMM(iptr->val.l, d);
543 store_reg_to_var_int(iptr->dst, d);
546 case ICMD_FCONST: /* ... ==> ..., constant */
547 /* op1 = 0, val.f = constant */
549 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
550 disp = dseg_addfloat(cd, iptr->val.f);
551 x86_64_movdl_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + ((d > 7) ? 9 : 8)) - (s8) cd->mcodebase) + disp, d);
552 store_reg_to_var_flt(iptr->dst, d);
555 case ICMD_DCONST: /* ... ==> ..., constant */
556 /* op1 = 0, val.d = constant */
558 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
559 disp = dseg_adddouble(cd, iptr->val.d);
560 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + disp, d);
561 store_reg_to_var_flt(iptr->dst, d);
564 case ICMD_ACONST: /* ... ==> ..., constant */
565 /* op1 = 0, val.a = constant */
567 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
569 if ((iptr->target != NULL) && (iptr->val.a == NULL)) {
570 /* PROFILE_CYCLE_STOP; */
572 codegen_addpatchref(cd, cd->mcodeptr,
574 (unresolved_class *) iptr->target, 0);
576 if (opt_showdisassemble) {
577 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
580 /* PROFILE_CYCLE_START; */
582 M_MOV_IMM((ptrint) iptr->val.a, d);
585 if (iptr->val.a == 0)
588 M_MOV_IMM((ptrint) iptr->val.a, d);
590 store_reg_to_var_int(iptr->dst, d);
594 /* load/store operations **********************************************/
596 case ICMD_ILOAD: /* ... ==> ..., content of local variable */
597 /* op1 = local variable */
599 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
600 if ((iptr->dst->varkind == LOCALVAR) &&
601 (iptr->dst->varnum == iptr->op1)) {
604 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
605 if (var->flags & INMEMORY) {
606 x86_64_movl_membase_reg(cd, REG_SP, var->regoff * 8, d);
607 store_reg_to_var_int(iptr->dst, d);
610 if (iptr->dst->flags & INMEMORY) {
611 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
614 M_INTMOVE(var->regoff, d);
619 case ICMD_LLOAD: /* ... ==> ..., content of local variable */
620 case ICMD_ALOAD: /* op1 = local variable */
622 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
623 if ((iptr->dst->varkind == LOCALVAR) &&
624 (iptr->dst->varnum == iptr->op1)) {
627 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
628 if (var->flags & INMEMORY) {
629 x86_64_mov_membase_reg(cd, REG_SP, var->regoff * 8, d);
630 store_reg_to_var_int(iptr->dst, d);
633 if (iptr->dst->flags & INMEMORY) {
634 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
637 M_INTMOVE(var->regoff, d);
642 case ICMD_FLOAD: /* ... ==> ..., content of local variable */
643 case ICMD_DLOAD: /* op1 = local variable */
645 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
646 if ((iptr->dst->varkind == LOCALVAR) &&
647 (iptr->dst->varnum == iptr->op1)) {
650 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
651 if (var->flags & INMEMORY) {
652 x86_64_movq_membase_reg(cd, REG_SP, var->regoff * 8, d);
653 store_reg_to_var_flt(iptr->dst, d);
656 if (iptr->dst->flags & INMEMORY) {
657 x86_64_movq_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
660 M_FLTMOVE(var->regoff, d);
665 case ICMD_ISTORE: /* ..., value ==> ... */
666 case ICMD_LSTORE: /* op1 = local variable */
669 if ((src->varkind == LOCALVAR) &&
670 (src->varnum == iptr->op1)) {
673 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
674 if (var->flags & INMEMORY) {
675 var_to_reg_int(s1, src, REG_ITMP1);
676 x86_64_mov_reg_membase(cd, s1, REG_SP, var->regoff * 8);
679 var_to_reg_int(s1, src, var->regoff);
680 M_INTMOVE(s1, var->regoff);
684 case ICMD_FSTORE: /* ..., value ==> ... */
685 case ICMD_DSTORE: /* op1 = local variable */
687 if ((src->varkind == LOCALVAR) &&
688 (src->varnum == iptr->op1)) {
691 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
692 if (var->flags & INMEMORY) {
693 var_to_reg_flt(s1, src, REG_FTMP1);
694 x86_64_movq_reg_membase(cd, s1, REG_SP, var->regoff * 8);
697 var_to_reg_flt(s1, src, var->regoff);
698 M_FLTMOVE(s1, var->regoff);
703 /* pop/dup/swap operations ********************************************/
705 /* attention: double and longs are only one entry in CACAO ICMDs */
707 case ICMD_POP: /* ..., value ==> ... */
708 case ICMD_POP2: /* ..., value, value ==> ... */
711 case ICMD_DUP: /* ..., a ==> ..., a, a */
712 M_COPY(src, iptr->dst);
715 case ICMD_DUP_X1: /* ..., a, b ==> ..., b, a, b */
717 M_COPY(src, iptr->dst);
718 M_COPY(src->prev, iptr->dst->prev);
719 M_COPY(iptr->dst, iptr->dst->prev->prev);
722 case ICMD_DUP_X2: /* ..., a, b, c ==> ..., c, a, b, c */
724 M_COPY(src, iptr->dst);
725 M_COPY(src->prev, iptr->dst->prev);
726 M_COPY(src->prev->prev, iptr->dst->prev->prev);
727 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
730 case ICMD_DUP2: /* ..., a, b ==> ..., a, b, a, b */
732 M_COPY(src, iptr->dst);
733 M_COPY(src->prev, iptr->dst->prev);
736 case ICMD_DUP2_X1: /* ..., a, b, c ==> ..., b, c, a, b, c */
738 M_COPY(src, iptr->dst);
739 M_COPY(src->prev, iptr->dst->prev);
740 M_COPY(src->prev->prev, iptr->dst->prev->prev);
741 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
742 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev);
745 case ICMD_DUP2_X2: /* ..., a, b, c, d ==> ..., c, d, a, b, c, d */
747 M_COPY(src, iptr->dst);
748 M_COPY(src->prev, iptr->dst->prev);
749 M_COPY(src->prev->prev, iptr->dst->prev->prev);
750 M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
751 M_COPY(iptr->dst, iptr->dst->prev->prev->prev->prev);
752 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev->prev);
755 case ICMD_SWAP: /* ..., a, b ==> ..., b, a */
757 M_COPY(src, iptr->dst->prev);
758 M_COPY(src->prev, iptr->dst);
762 /* integer operations *************************************************/
764 case ICMD_INEG: /* ..., value ==> ..., - value */
766 d = reg_of_var(rd, iptr->dst, REG_NULL);
767 if (iptr->dst->flags & INMEMORY) {
768 if (src->flags & INMEMORY) {
769 if (src->regoff == iptr->dst->regoff) {
770 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
773 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
774 x86_64_negl_reg(cd, REG_ITMP1);
775 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
779 x86_64_movl_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
780 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
784 if (src->flags & INMEMORY) {
785 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
786 x86_64_negl_reg(cd, d);
789 M_INTMOVE(src->regoff, iptr->dst->regoff);
790 x86_64_negl_reg(cd, iptr->dst->regoff);
795 case ICMD_LNEG: /* ..., value ==> ..., - value */
797 d = reg_of_var(rd, iptr->dst, REG_NULL);
798 if (iptr->dst->flags & INMEMORY) {
799 if (src->flags & INMEMORY) {
800 if (src->regoff == iptr->dst->regoff) {
801 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
804 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
805 x86_64_neg_reg(cd, REG_ITMP1);
806 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
810 x86_64_mov_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
811 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
815 if (src->flags & INMEMORY) {
816 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
817 x86_64_neg_reg(cd, iptr->dst->regoff);
820 M_INTMOVE(src->regoff, iptr->dst->regoff);
821 x86_64_neg_reg(cd, iptr->dst->regoff);
826 case ICMD_I2L: /* ..., value ==> ..., value */
828 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
829 if (src->flags & INMEMORY) {
830 x86_64_movslq_membase_reg(cd, REG_SP, src->regoff * 8, d);
833 x86_64_movslq_reg_reg(cd, src->regoff, d);
835 store_reg_to_var_int(iptr->dst, d);
838 case ICMD_L2I: /* ..., value ==> ..., value */
840 var_to_reg_int(s1, src, REG_ITMP1);
841 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
843 store_reg_to_var_int(iptr->dst, d);
846 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
848 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
849 if (src->flags & INMEMORY) {
850 x86_64_movsbq_membase_reg(cd, REG_SP, src->regoff * 8, d);
853 x86_64_movsbq_reg_reg(cd, src->regoff, d);
855 store_reg_to_var_int(iptr->dst, d);
858 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
860 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
861 if (src->flags & INMEMORY) {
862 x86_64_movzwq_membase_reg(cd, REG_SP, src->regoff * 8, d);
865 x86_64_movzwq_reg_reg(cd, src->regoff, d);
867 store_reg_to_var_int(iptr->dst, d);
870 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
872 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
873 if (src->flags & INMEMORY) {
874 x86_64_movswq_membase_reg(cd, REG_SP, src->regoff * 8, d);
877 x86_64_movswq_reg_reg(cd, src->regoff, d);
879 store_reg_to_var_int(iptr->dst, d);
883 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
885 d = reg_of_var(rd, iptr->dst, REG_NULL);
886 x86_64_emit_ialu(cd, X86_64_ADD, src, iptr);
889 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
890 /* val.i = constant */
892 d = reg_of_var(rd, iptr->dst, REG_NULL);
893 x86_64_emit_ialuconst(cd, X86_64_ADD, src, iptr);
896 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
898 d = reg_of_var(rd, iptr->dst, REG_NULL);
899 x86_64_emit_lalu(cd, X86_64_ADD, src, iptr);
902 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
903 /* val.l = constant */
905 d = reg_of_var(rd, iptr->dst, REG_NULL);
906 x86_64_emit_laluconst(cd, X86_64_ADD, src, iptr);
909 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
911 d = reg_of_var(rd, iptr->dst, REG_NULL);
912 if (iptr->dst->flags & INMEMORY) {
913 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
914 if (src->prev->regoff == iptr->dst->regoff) {
915 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
916 x86_64_alul_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
919 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
920 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
921 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
924 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
925 M_INTMOVE(src->prev->regoff, REG_ITMP1);
926 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
927 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
929 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
930 if (src->prev->regoff == iptr->dst->regoff) {
931 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
934 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
935 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
936 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
940 x86_64_movl_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
941 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
945 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
946 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
947 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
949 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
950 M_INTMOVE(src->prev->regoff, d);
951 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
953 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
954 /* workaround for reg alloc */
955 if (src->regoff == iptr->dst->regoff) {
956 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
957 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
958 M_INTMOVE(REG_ITMP1, d);
961 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
962 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
966 /* workaround for reg alloc */
967 if (src->regoff == iptr->dst->regoff) {
968 M_INTMOVE(src->prev->regoff, REG_ITMP1);
969 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
970 M_INTMOVE(REG_ITMP1, d);
973 M_INTMOVE(src->prev->regoff, d);
974 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
980 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
981 /* val.i = constant */
983 d = reg_of_var(rd, iptr->dst, REG_NULL);
984 x86_64_emit_ialuconst(cd, X86_64_SUB, src, iptr);
987 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
989 d = reg_of_var(rd, iptr->dst, REG_NULL);
990 if (iptr->dst->flags & INMEMORY) {
991 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
992 if (src->prev->regoff == iptr->dst->regoff) {
993 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
994 x86_64_alu_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
997 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
998 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
999 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1002 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1003 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1004 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1005 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1007 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1008 if (src->prev->regoff == iptr->dst->regoff) {
1009 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1012 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1013 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1014 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1018 x86_64_mov_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1019 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1023 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1024 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1025 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1027 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1028 M_INTMOVE(src->prev->regoff, d);
1029 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1031 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1032 /* workaround for reg alloc */
1033 if (src->regoff == iptr->dst->regoff) {
1034 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1035 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1036 M_INTMOVE(REG_ITMP1, d);
1039 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1040 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1044 /* workaround for reg alloc */
1045 if (src->regoff == iptr->dst->regoff) {
1046 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1047 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1048 M_INTMOVE(REG_ITMP1, d);
1051 M_INTMOVE(src->prev->regoff, d);
1052 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1058 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
1059 /* val.l = constant */
1061 d = reg_of_var(rd, iptr->dst, REG_NULL);
1062 x86_64_emit_laluconst(cd, X86_64_SUB, src, iptr);
1065 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1067 d = reg_of_var(rd, iptr->dst, REG_NULL);
1068 if (iptr->dst->flags & INMEMORY) {
1069 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1070 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1071 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1072 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1074 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1075 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1076 x86_64_imull_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1077 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1079 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1080 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1081 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1082 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1085 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1086 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1087 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1091 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1092 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1093 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1095 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1096 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1097 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1099 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1100 M_INTMOVE(src->regoff, iptr->dst->regoff);
1101 x86_64_imull_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1104 if (src->regoff == iptr->dst->regoff) {
1105 x86_64_imull_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1108 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1109 x86_64_imull_reg_reg(cd, src->regoff, iptr->dst->regoff);
1115 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
1116 /* val.i = constant */
1118 d = reg_of_var(rd, iptr->dst, REG_NULL);
1119 if (iptr->dst->flags & INMEMORY) {
1120 if (src->flags & INMEMORY) {
1121 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1122 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1125 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, REG_ITMP1);
1126 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1130 if (src->flags & INMEMORY) {
1131 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1134 if (iptr->val.i == 2) {
1135 M_INTMOVE(src->regoff, iptr->dst->regoff);
1136 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1139 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, iptr->dst->regoff); /* 3 cycles */
1145 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1147 d = reg_of_var(rd, iptr->dst, REG_NULL);
1148 if (iptr->dst->flags & INMEMORY) {
1149 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1150 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1151 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1152 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1154 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1155 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1156 x86_64_imul_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1157 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1159 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1160 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1161 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1162 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1165 x86_64_mov_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1166 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1167 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1171 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1172 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1173 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1175 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1176 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1177 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1179 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1180 M_INTMOVE(src->regoff, iptr->dst->regoff);
1181 x86_64_imul_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1184 if (src->regoff == iptr->dst->regoff) {
1185 x86_64_imul_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1188 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1189 x86_64_imul_reg_reg(cd, src->regoff, iptr->dst->regoff);
1195 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
1196 /* val.l = constant */
1198 d = reg_of_var(rd, iptr->dst, REG_NULL);
1199 if (iptr->dst->flags & INMEMORY) {
1200 if (src->flags & INMEMORY) {
1201 if (IS_IMM32(iptr->val.l)) {
1202 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1205 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1206 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1208 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1211 if (IS_IMM32(iptr->val.l)) {
1212 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, REG_ITMP1);
1215 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1216 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1218 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1222 if (src->flags & INMEMORY) {
1223 if (IS_IMM32(iptr->val.l)) {
1224 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1227 x86_64_mov_imm_reg(cd, iptr->val.l, iptr->dst->regoff);
1228 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1232 /* should match in many cases */
1233 if (iptr->val.l == 2) {
1234 M_INTMOVE(src->regoff, iptr->dst->regoff);
1235 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1238 if (IS_IMM32(iptr->val.l)) {
1239 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, iptr->dst->regoff); /* 4 cycles */
1242 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1243 M_INTMOVE(src->regoff, iptr->dst->regoff);
1244 x86_64_imul_reg_reg(cd, REG_ITMP1, iptr->dst->regoff);
1251 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1253 d = reg_of_var(rd, iptr->dst, REG_NULL);
1254 if (src->prev->flags & INMEMORY) {
1255 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1258 M_INTMOVE(src->prev->regoff, RAX);
1261 if (src->flags & INMEMORY) {
1262 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1265 M_INTMOVE(src->regoff, REG_ITMP3);
1269 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1270 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1271 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1272 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1274 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1276 x86_64_idivl_reg(cd, REG_ITMP3);
1278 if (iptr->dst->flags & INMEMORY) {
1279 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1280 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1283 M_INTMOVE(RAX, iptr->dst->regoff);
1285 if (iptr->dst->regoff != RDX) {
1286 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1291 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1292 d = reg_of_var(rd, iptr->dst, REG_NULL);
1293 if (src->prev->flags & INMEMORY) {
1294 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1297 M_INTMOVE(src->prev->regoff, RAX);
1300 if (src->flags & INMEMORY) {
1301 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1304 M_INTMOVE(src->regoff, REG_ITMP3);
1308 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1310 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1311 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1314 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1315 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1316 x86_64_jcc(cd, X86_64_CC_E, 1 + 3); /* 6 bytes */
1319 x86_64_idivl_reg(cd, REG_ITMP3);
1321 if (iptr->dst->flags & INMEMORY) {
1322 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1323 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1326 M_INTMOVE(RDX, iptr->dst->regoff);
1328 if (iptr->dst->regoff != RDX) {
1329 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1334 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
1335 /* val.i = constant */
1337 var_to_reg_int(s1, src, REG_ITMP1);
1338 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1339 M_INTMOVE(s1, REG_ITMP1);
1340 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1341 x86_64_leal_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1342 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1343 x86_64_shiftl_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1344 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1345 store_reg_to_var_int(iptr->dst, d);
1348 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
1349 /* val.i = constant */
1351 var_to_reg_int(s1, src, REG_ITMP1);
1352 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1353 M_INTMOVE(s1, REG_ITMP1);
1354 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1355 x86_64_leal_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1356 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1357 x86_64_alul_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1358 x86_64_alul_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1359 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1360 store_reg_to_var_int(iptr->dst, d);
1364 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1366 d = reg_of_var(rd, iptr->dst, REG_NULL);
1368 if (src->prev->flags & INMEMORY) {
1369 M_LLD(RAX, REG_SP, src->prev->regoff * 8);
1372 M_INTMOVE(src->prev->regoff, RAX);
1375 if (src->flags & INMEMORY) {
1376 M_LLD(REG_ITMP3, REG_SP, src->regoff * 8);
1379 M_INTMOVE(src->regoff, REG_ITMP3);
1383 /* check as described in jvm spec */
1384 disp = dseg_adds8(cd, 0x8000000000000000LL);
1385 M_CMP_MEMBASE(RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, RAX);
1387 M_CMP_IMM(-1, REG_ITMP3); /* 4 bytes */
1388 M_BEQ(3 + 2 + 3); /* 6 bytes */
1390 M_MOV(RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1392 x86_64_idiv_reg(cd, REG_ITMP3);
1394 if (iptr->dst->flags & INMEMORY) {
1395 M_LST(RAX, REG_SP, iptr->dst->regoff * 8);
1396 M_MOV(REG_ITMP2, RDX); /* restore %rdx */
1399 M_INTMOVE(RAX, iptr->dst->regoff);
1401 if (iptr->dst->regoff != RDX) {
1402 M_MOV(REG_ITMP2, RDX); /* restore %rdx */
1407 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1409 d = reg_of_var(rd, iptr->dst, REG_NULL);
1410 if (src->prev->flags & INMEMORY) {
1411 M_LLD(REG_ITMP1, REG_SP, src->prev->regoff * 8);
1414 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1417 if (src->flags & INMEMORY) {
1418 M_LLD(REG_ITMP3, REG_SP, src->regoff * 8);
1421 M_INTMOVE(src->regoff, REG_ITMP3);
1425 M_MOV(RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1427 /* check as described in jvm spec */
1428 disp = dseg_adds8(cd, 0x8000000000000000LL);
1429 M_CMP_MEMBASE(RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, REG_ITMP1);
1433 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1435 M_XOR(RDX, RDX); /* 3 bytes */
1436 M_CMP_IMM(-1, REG_ITMP3); /* 4 bytes */
1437 M_BEQ(2 + 3); /* 6 bytes */
1440 x86_64_idiv_reg(cd, REG_ITMP3);
1442 if (iptr->dst->flags & INMEMORY) {
1443 M_LST(RDX, REG_SP, iptr->dst->regoff * 8);
1444 M_MOV(REG_ITMP2, RDX); /* restore %rdx */
1447 M_INTMOVE(RDX, iptr->dst->regoff);
1449 if (iptr->dst->regoff != RDX) {
1450 M_MOV(REG_ITMP2, RDX); /* restore %rdx */
1455 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1456 /* val.i = constant */
1458 var_to_reg_int(s1, src, REG_ITMP1);
1459 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1460 M_INTMOVE(s1, REG_ITMP1);
1461 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1462 x86_64_lea_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1463 x86_64_cmovcc_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1464 x86_64_shift_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1465 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1466 store_reg_to_var_int(iptr->dst, d);
1469 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1470 /* val.l = constant */
1472 var_to_reg_int(s1, src, REG_ITMP1);
1473 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1474 M_INTMOVE(s1, REG_ITMP1);
1475 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1476 x86_64_lea_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1477 x86_64_cmovcc_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1478 x86_64_alu_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1479 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1480 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1481 store_reg_to_var_int(iptr->dst, d);
1484 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1486 d = reg_of_var(rd, iptr->dst, REG_NULL);
1487 x86_64_emit_ishift(cd, X86_64_SHL, src, iptr);
1490 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1491 /* val.i = constant */
1493 d = reg_of_var(rd, iptr->dst, REG_NULL);
1494 x86_64_emit_ishiftconst(cd, X86_64_SHL, src, iptr);
1497 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1499 d = reg_of_var(rd, iptr->dst, REG_NULL);
1500 x86_64_emit_ishift(cd, X86_64_SAR, src, iptr);
1503 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1504 /* val.i = constant */
1506 d = reg_of_var(rd, iptr->dst, REG_NULL);
1507 x86_64_emit_ishiftconst(cd, X86_64_SAR, src, iptr);
1510 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1512 d = reg_of_var(rd, iptr->dst, REG_NULL);
1513 x86_64_emit_ishift(cd, X86_64_SHR, src, iptr);
1516 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1517 /* val.i = constant */
1519 d = reg_of_var(rd, iptr->dst, REG_NULL);
1520 x86_64_emit_ishiftconst(cd, X86_64_SHR, src, iptr);
1523 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1525 d = reg_of_var(rd, iptr->dst, REG_NULL);
1526 x86_64_emit_lshift(cd, X86_64_SHL, src, iptr);
1529 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1530 /* val.i = constant */
1532 d = reg_of_var(rd, iptr->dst, REG_NULL);
1533 x86_64_emit_lshiftconst(cd, X86_64_SHL, src, iptr);
1536 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1538 d = reg_of_var(rd, iptr->dst, REG_NULL);
1539 x86_64_emit_lshift(cd, X86_64_SAR, src, iptr);
1542 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1543 /* val.i = constant */
1545 d = reg_of_var(rd, iptr->dst, REG_NULL);
1546 x86_64_emit_lshiftconst(cd, X86_64_SAR, src, iptr);
1549 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1551 d = reg_of_var(rd, iptr->dst, REG_NULL);
1552 x86_64_emit_lshift(cd, X86_64_SHR, src, iptr);
1555 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1556 /* val.l = constant */
1558 d = reg_of_var(rd, iptr->dst, REG_NULL);
1559 x86_64_emit_lshiftconst(cd, X86_64_SHR, src, iptr);
1562 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1564 d = reg_of_var(rd, iptr->dst, REG_NULL);
1565 x86_64_emit_ialu(cd, X86_64_AND, src, iptr);
1568 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1569 /* val.i = constant */
1571 d = reg_of_var(rd, iptr->dst, REG_NULL);
1572 x86_64_emit_ialuconst(cd, X86_64_AND, src, iptr);
1575 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1577 d = reg_of_var(rd, iptr->dst, REG_NULL);
1578 x86_64_emit_lalu(cd, X86_64_AND, src, iptr);
1581 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1582 /* val.l = constant */
1584 d = reg_of_var(rd, iptr->dst, REG_NULL);
1585 x86_64_emit_laluconst(cd, X86_64_AND, src, iptr);
1588 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1590 d = reg_of_var(rd, iptr->dst, REG_NULL);
1591 x86_64_emit_ialu(cd, X86_64_OR, src, iptr);
1594 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1595 /* val.i = constant */
1597 d = reg_of_var(rd, iptr->dst, REG_NULL);
1598 x86_64_emit_ialuconst(cd, X86_64_OR, src, iptr);
1601 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1603 d = reg_of_var(rd, iptr->dst, REG_NULL);
1604 x86_64_emit_lalu(cd, X86_64_OR, src, iptr);
1607 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1608 /* val.l = constant */
1610 d = reg_of_var(rd, iptr->dst, REG_NULL);
1611 x86_64_emit_laluconst(cd, X86_64_OR, src, iptr);
1614 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1616 d = reg_of_var(rd, iptr->dst, REG_NULL);
1617 x86_64_emit_ialu(cd, X86_64_XOR, src, iptr);
1620 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1621 /* val.i = constant */
1623 d = reg_of_var(rd, iptr->dst, REG_NULL);
1624 x86_64_emit_ialuconst(cd, X86_64_XOR, src, iptr);
1627 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1629 d = reg_of_var(rd, iptr->dst, REG_NULL);
1630 x86_64_emit_lalu(cd, X86_64_XOR, src, iptr);
1633 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1634 /* val.l = constant */
1636 d = reg_of_var(rd, iptr->dst, REG_NULL);
1637 x86_64_emit_laluconst(cd, X86_64_XOR, src, iptr);
1641 case ICMD_IINC: /* ..., value ==> ..., value + constant */
1642 /* op1 = variable, val.i = constant */
1644 /* using inc and dec is definitely faster than add -- tested */
1647 var = &(rd->locals[iptr->op1][TYPE_INT]);
1649 if (var->flags & INMEMORY) {
1650 if (iptr->val.i == 1) {
1651 x86_64_incl_membase(cd, REG_SP, d * 8);
1653 } else if (iptr->val.i == -1) {
1654 x86_64_decl_membase(cd, REG_SP, d * 8);
1657 x86_64_alul_imm_membase(cd, X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1661 if (iptr->val.i == 1) {
1662 x86_64_incl_reg(cd, d);
1664 } else if (iptr->val.i == -1) {
1665 x86_64_decl_reg(cd, d);
1668 x86_64_alul_imm_reg(cd, X86_64_ADD, iptr->val.i, d);
1674 /* floating operations ************************************************/
1676 case ICMD_FNEG: /* ..., value ==> ..., - value */
1678 var_to_reg_flt(s1, src, REG_FTMP1);
1679 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1680 disp = dseg_adds4(cd, 0x80000000);
1682 x86_64_movss_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + disp, REG_FTMP2);
1683 x86_64_xorps_reg_reg(cd, REG_FTMP2, d);
1684 store_reg_to_var_flt(iptr->dst, d);
1687 case ICMD_DNEG: /* ..., value ==> ..., - value */
1689 var_to_reg_flt(s1, src, REG_FTMP1);
1690 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1691 disp = dseg_adds8(cd, 0x8000000000000000);
1693 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + disp, REG_FTMP2);
1694 x86_64_xorpd_reg_reg(cd, REG_FTMP2, d);
1695 store_reg_to_var_flt(iptr->dst, d);
1698 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1700 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1701 var_to_reg_flt(s2, src, REG_FTMP2);
1702 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1704 x86_64_addss_reg_reg(cd, s2, d);
1705 } else if (s2 == d) {
1706 x86_64_addss_reg_reg(cd, s1, d);
1709 x86_64_addss_reg_reg(cd, s2, d);
1711 store_reg_to_var_flt(iptr->dst, d);
1714 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1716 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1717 var_to_reg_flt(s2, src, REG_FTMP2);
1718 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1720 x86_64_addsd_reg_reg(cd, s2, d);
1721 } else if (s2 == d) {
1722 x86_64_addsd_reg_reg(cd, s1, d);
1725 x86_64_addsd_reg_reg(cd, s2, d);
1727 store_reg_to_var_flt(iptr->dst, d);
1730 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1732 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1733 var_to_reg_flt(s2, src, REG_FTMP2);
1734 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1736 M_FLTMOVE(s2, REG_FTMP2);
1740 x86_64_subss_reg_reg(cd, s2, d);
1741 store_reg_to_var_flt(iptr->dst, d);
1744 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1746 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1747 var_to_reg_flt(s2, src, REG_FTMP2);
1748 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1750 M_FLTMOVE(s2, REG_FTMP2);
1754 x86_64_subsd_reg_reg(cd, s2, d);
1755 store_reg_to_var_flt(iptr->dst, d);
1758 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1760 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1761 var_to_reg_flt(s2, src, REG_FTMP2);
1762 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1764 x86_64_mulss_reg_reg(cd, s2, d);
1765 } else if (s2 == d) {
1766 x86_64_mulss_reg_reg(cd, s1, d);
1769 x86_64_mulss_reg_reg(cd, s2, d);
1771 store_reg_to_var_flt(iptr->dst, d);
1774 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1776 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1777 var_to_reg_flt(s2, src, REG_FTMP2);
1778 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1780 x86_64_mulsd_reg_reg(cd, s2, d);
1781 } else if (s2 == d) {
1782 x86_64_mulsd_reg_reg(cd, s1, d);
1785 x86_64_mulsd_reg_reg(cd, s2, d);
1787 store_reg_to_var_flt(iptr->dst, d);
1790 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1792 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1793 var_to_reg_flt(s2, src, REG_FTMP2);
1794 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1796 M_FLTMOVE(s2, REG_FTMP2);
1800 x86_64_divss_reg_reg(cd, s2, d);
1801 store_reg_to_var_flt(iptr->dst, d);
1804 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1806 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1807 var_to_reg_flt(s2, src, REG_FTMP2);
1808 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1810 M_FLTMOVE(s2, REG_FTMP2);
1814 x86_64_divsd_reg_reg(cd, s2, d);
1815 store_reg_to_var_flt(iptr->dst, d);
1818 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1820 var_to_reg_int(s1, src, REG_ITMP1);
1821 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1822 x86_64_cvtsi2ss_reg_reg(cd, s1, d);
1823 store_reg_to_var_flt(iptr->dst, d);
1826 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1828 var_to_reg_int(s1, src, REG_ITMP1);
1829 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1830 x86_64_cvtsi2sd_reg_reg(cd, s1, d);
1831 store_reg_to_var_flt(iptr->dst, d);
1834 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1836 var_to_reg_int(s1, src, REG_ITMP1);
1837 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1838 x86_64_cvtsi2ssq_reg_reg(cd, s1, d);
1839 store_reg_to_var_flt(iptr->dst, d);
1842 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1844 var_to_reg_int(s1, src, REG_ITMP1);
1845 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1846 x86_64_cvtsi2sdq_reg_reg(cd, s1, d);
1847 store_reg_to_var_flt(iptr->dst, d);
1850 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1852 var_to_reg_flt(s1, src, REG_FTMP1);
1853 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1854 x86_64_cvttss2si_reg_reg(cd, s1, d);
1855 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1856 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1857 x86_64_jcc(cd, X86_64_CC_NE, a);
1858 M_FLTMOVE(s1, REG_FTMP1);
1859 x86_64_mov_imm_reg(cd, (ptrint) asm_builtin_f2i, REG_ITMP2);
1860 x86_64_call_reg(cd, REG_ITMP2);
1861 M_INTMOVE(REG_RESULT, d);
1862 store_reg_to_var_int(iptr->dst, d);
1865 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1867 var_to_reg_flt(s1, src, REG_FTMP1);
1868 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1869 x86_64_cvttsd2si_reg_reg(cd, s1, d);
1870 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1871 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1872 x86_64_jcc(cd, X86_64_CC_NE, a);
1873 M_FLTMOVE(s1, REG_FTMP1);
1874 x86_64_mov_imm_reg(cd, (ptrint) asm_builtin_d2i, REG_ITMP2);
1875 x86_64_call_reg(cd, REG_ITMP2);
1876 M_INTMOVE(REG_RESULT, d);
1877 store_reg_to_var_int(iptr->dst, d);
1880 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1882 var_to_reg_flt(s1, src, REG_FTMP1);
1883 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1884 x86_64_cvttss2siq_reg_reg(cd, s1, d);
1885 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1886 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1887 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1888 x86_64_jcc(cd, X86_64_CC_NE, a);
1889 M_FLTMOVE(s1, REG_FTMP1);
1890 x86_64_mov_imm_reg(cd, (ptrint) asm_builtin_f2l, REG_ITMP2);
1891 x86_64_call_reg(cd, REG_ITMP2);
1892 M_INTMOVE(REG_RESULT, d);
1893 store_reg_to_var_int(iptr->dst, d);
1896 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1898 var_to_reg_flt(s1, src, REG_FTMP1);
1899 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1900 x86_64_cvttsd2siq_reg_reg(cd, s1, d);
1901 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1902 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1903 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1904 x86_64_jcc(cd, X86_64_CC_NE, a);
1905 M_FLTMOVE(s1, REG_FTMP1);
1906 x86_64_mov_imm_reg(cd, (ptrint) asm_builtin_d2l, REG_ITMP2);
1907 x86_64_call_reg(cd, REG_ITMP2);
1908 M_INTMOVE(REG_RESULT, d);
1909 store_reg_to_var_int(iptr->dst, d);
1912 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1914 var_to_reg_flt(s1, src, REG_FTMP1);
1915 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1916 x86_64_cvtss2sd_reg_reg(cd, s1, d);
1917 store_reg_to_var_flt(iptr->dst, d);
1920 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1922 var_to_reg_flt(s1, src, REG_FTMP1);
1923 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1924 x86_64_cvtsd2ss_reg_reg(cd, s1, d);
1925 store_reg_to_var_flt(iptr->dst, d);
1928 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1929 /* == => 0, < => 1, > => -1 */
1931 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1932 var_to_reg_flt(s2, src, REG_FTMP2);
1933 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1935 M_MOV_IMM(1, REG_ITMP1);
1936 M_MOV_IMM(-1, REG_ITMP2);
1937 x86_64_ucomiss_reg_reg(cd, s1, s2);
1938 M_CMOVB(REG_ITMP1, d);
1939 M_CMOVA(REG_ITMP2, d);
1940 M_CMOVP(REG_ITMP2, d); /* treat unordered as GT */
1941 store_reg_to_var_int(iptr->dst, d);
1944 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1945 /* == => 0, < => 1, > => -1 */
1947 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1948 var_to_reg_flt(s2, src, REG_FTMP2);
1949 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1951 M_MOV_IMM(1, REG_ITMP1);
1952 M_MOV_IMM(-1, REG_ITMP2);
1953 x86_64_ucomiss_reg_reg(cd, s1, s2);
1954 M_CMOVB(REG_ITMP1, d);
1955 M_CMOVA(REG_ITMP2, d);
1956 M_CMOVP(REG_ITMP1, d); /* treat unordered as LT */
1957 store_reg_to_var_int(iptr->dst, d);
1960 case ICMD_DCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1961 /* == => 0, < => 1, > => -1 */
1963 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1964 var_to_reg_flt(s2, src, REG_FTMP2);
1965 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1967 M_MOV_IMM(1, REG_ITMP1);
1968 M_MOV_IMM(-1, REG_ITMP2);
1969 x86_64_ucomisd_reg_reg(cd, s1, s2);
1970 M_CMOVB(REG_ITMP1, d);
1971 M_CMOVA(REG_ITMP2, d);
1972 M_CMOVP(REG_ITMP2, d); /* treat unordered as GT */
1973 store_reg_to_var_int(iptr->dst, d);
1976 case ICMD_DCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1977 /* == => 0, < => 1, > => -1 */
1979 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1980 var_to_reg_flt(s2, src, REG_FTMP2);
1981 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1983 M_MOV_IMM(1, REG_ITMP1);
1984 M_MOV_IMM(-1, REG_ITMP2);
1985 x86_64_ucomisd_reg_reg(cd, s1, s2);
1986 M_CMOVB(REG_ITMP1, d);
1987 M_CMOVA(REG_ITMP2, d);
1988 M_CMOVP(REG_ITMP1, d); /* treat unordered as LT */
1989 store_reg_to_var_int(iptr->dst, d);
1993 /* memory operations **************************************************/
1995 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., (int) length */
1997 var_to_reg_int(s1, src, REG_ITMP1);
1998 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1999 gen_nullptr_check(s1);
2000 M_ILD(d, s1, OFFSET(java_arrayheader, size));
2001 store_reg_to_var_int(iptr->dst, d);
2004 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
2006 var_to_reg_int(s1, src->prev, REG_ITMP1);
2007 var_to_reg_int(s2, src, REG_ITMP2);
2008 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2009 if (iptr->op1 == 0) {
2010 gen_nullptr_check(s1);
2013 x86_64_movsbq_memindex_reg(cd, OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2014 store_reg_to_var_int(iptr->dst, d);
2017 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
2019 var_to_reg_int(s1, src->prev, REG_ITMP1);
2020 var_to_reg_int(s2, src, REG_ITMP2);
2021 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2022 if (iptr->op1 == 0) {
2023 gen_nullptr_check(s1);
2026 x86_64_movzwq_memindex_reg(cd, OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2027 store_reg_to_var_int(iptr->dst, d);
2030 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
2032 var_to_reg_int(s1, src->prev, REG_ITMP1);
2033 var_to_reg_int(s2, src, REG_ITMP2);
2034 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2035 if (iptr->op1 == 0) {
2036 gen_nullptr_check(s1);
2039 x86_64_movswq_memindex_reg(cd, OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2040 store_reg_to_var_int(iptr->dst, d);
2043 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
2045 var_to_reg_int(s1, src->prev, REG_ITMP1);
2046 var_to_reg_int(s2, src, REG_ITMP2);
2047 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2048 if (iptr->op1 == 0) {
2049 gen_nullptr_check(s1);
2052 x86_64_movl_memindex_reg(cd, OFFSET(java_intarray, data[0]), s1, s2, 2, d);
2053 store_reg_to_var_int(iptr->dst, d);
2056 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
2058 var_to_reg_int(s1, src->prev, REG_ITMP1);
2059 var_to_reg_int(s2, src, REG_ITMP2);
2060 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2061 if (iptr->op1 == 0) {
2062 gen_nullptr_check(s1);
2065 x86_64_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]), s1, s2, 3, d);
2066 store_reg_to_var_int(iptr->dst, d);
2069 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2071 var_to_reg_int(s1, src->prev, REG_ITMP1);
2072 var_to_reg_int(s2, src, REG_ITMP2);
2073 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2074 if (iptr->op1 == 0) {
2075 gen_nullptr_check(s1);
2078 x86_64_movss_memindex_reg(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2079 store_reg_to_var_flt(iptr->dst, d);
2082 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2084 var_to_reg_int(s1, src->prev, REG_ITMP1);
2085 var_to_reg_int(s2, src, REG_ITMP2);
2086 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2087 if (iptr->op1 == 0) {
2088 gen_nullptr_check(s1);
2091 x86_64_movsd_memindex_reg(cd, OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2092 store_reg_to_var_flt(iptr->dst, d);
2095 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2097 var_to_reg_int(s1, src->prev, REG_ITMP1);
2098 var_to_reg_int(s2, src, REG_ITMP2);
2099 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2100 if (iptr->op1 == 0) {
2101 gen_nullptr_check(s1);
2104 x86_64_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
2105 store_reg_to_var_int(iptr->dst, d);
2109 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2111 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2112 var_to_reg_int(s2, src->prev, REG_ITMP2);
2113 if (iptr->op1 == 0) {
2114 gen_nullptr_check(s1);
2117 var_to_reg_int(s3, src, REG_ITMP3);
2118 x86_64_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2121 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2123 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2124 var_to_reg_int(s2, src->prev, REG_ITMP2);
2125 if (iptr->op1 == 0) {
2126 gen_nullptr_check(s1);
2129 var_to_reg_int(s3, src, REG_ITMP3);
2130 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2133 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2135 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2136 var_to_reg_int(s2, src->prev, REG_ITMP2);
2137 if (iptr->op1 == 0) {
2138 gen_nullptr_check(s1);
2141 var_to_reg_int(s3, src, REG_ITMP3);
2142 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2145 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2147 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2148 var_to_reg_int(s2, src->prev, REG_ITMP2);
2149 if (iptr->op1 == 0) {
2150 gen_nullptr_check(s1);
2153 var_to_reg_int(s3, src, REG_ITMP3);
2154 x86_64_movl_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2157 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2159 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2160 var_to_reg_int(s2, src->prev, REG_ITMP2);
2161 if (iptr->op1 == 0) {
2162 gen_nullptr_check(s1);
2165 var_to_reg_int(s3, src, REG_ITMP3);
2166 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2169 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2171 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2172 var_to_reg_int(s2, src->prev, REG_ITMP2);
2173 if (iptr->op1 == 0) {
2174 gen_nullptr_check(s1);
2177 var_to_reg_flt(s3, src, REG_FTMP3);
2178 x86_64_movss_reg_memindex(cd, s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2181 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2183 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2184 var_to_reg_int(s2, src->prev, REG_ITMP2);
2185 if (iptr->op1 == 0) {
2186 gen_nullptr_check(s1);
2189 var_to_reg_flt(s3, src, REG_FTMP3);
2190 x86_64_movsd_reg_memindex(cd, s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2193 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2195 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2196 var_to_reg_int(s2, src->prev, REG_ITMP2);
2197 if (iptr->op1 == 0) {
2198 gen_nullptr_check(s1);
2201 var_to_reg_int(s3, src, REG_ITMP3);
2203 M_MOV(s1, rd->argintregs[0]);
2204 M_MOV(s3, rd->argintregs[1]);
2205 M_MOV_IMM((ptrint) BUILTIN_canstore, REG_ITMP1);
2209 codegen_addxstorerefs(cd, cd->mcodeptr);
2211 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2212 var_to_reg_int(s2, src->prev, REG_ITMP2);
2213 var_to_reg_int(s3, src, REG_ITMP3);
2214 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2218 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2220 var_to_reg_int(s1, src->prev, REG_ITMP1);
2221 var_to_reg_int(s2, src, REG_ITMP2);
2222 if (iptr->op1 == 0) {
2223 gen_nullptr_check(s1);
2226 x86_64_movb_imm_memindex(cd, iptr->val.i, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2229 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2231 var_to_reg_int(s1, src->prev, REG_ITMP1);
2232 var_to_reg_int(s2, src, REG_ITMP2);
2233 if (iptr->op1 == 0) {
2234 gen_nullptr_check(s1);
2237 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_chararray, data[0]), s1, s2, 1);
2240 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2242 var_to_reg_int(s1, src->prev, REG_ITMP1);
2243 var_to_reg_int(s2, src, REG_ITMP2);
2244 if (iptr->op1 == 0) {
2245 gen_nullptr_check(s1);
2248 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2251 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2253 var_to_reg_int(s1, src->prev, REG_ITMP1);
2254 var_to_reg_int(s2, src, REG_ITMP2);
2255 if (iptr->op1 == 0) {
2256 gen_nullptr_check(s1);
2259 x86_64_movl_imm_memindex(cd, iptr->val.i, OFFSET(java_intarray, data[0]), s1, s2, 2);
2262 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2264 var_to_reg_int(s1, src->prev, REG_ITMP1);
2265 var_to_reg_int(s2, src, REG_ITMP2);
2266 if (iptr->op1 == 0) {
2267 gen_nullptr_check(s1);
2271 if (IS_IMM32(iptr->val.l)) {
2272 x86_64_mov_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2274 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2275 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l >> 32), OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2279 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2281 var_to_reg_int(s1, src->prev, REG_ITMP1);
2282 var_to_reg_int(s2, src, REG_ITMP2);
2283 if (iptr->op1 == 0) {
2284 gen_nullptr_check(s1);
2287 x86_64_mov_imm_memindex(cd, 0, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2291 case ICMD_GETSTATIC: /* ... ==> ..., value */
2292 /* op1 = type, val.a = field address */
2294 if (iptr->val.a == NULL) {
2295 disp = dseg_addaddress(cd, NULL);
2297 /* PROFILE_CYCLE_STOP; */
2299 codegen_addpatchref(cd, cd->mcodeptr,
2300 PATCHER_get_putstatic,
2301 (unresolved_field *) iptr->target, disp);
2303 if (opt_showdisassemble) {
2304 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2307 /* PROFILE_CYCLE_START; */
2310 fieldinfo *fi = iptr->val.a;
2312 disp = dseg_addaddress(cd, &(fi->value));
2314 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class)) {
2317 codegen_addpatchref(cd, cd->mcodeptr,
2318 PATCHER_clinit, fi->class, 0);
2320 if (opt_showdisassemble) {
2321 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2324 PROFILE_CYCLE_START;
2328 /* This approach is much faster than moving the field
2329 address inline into a register. */
2331 M_ALD(REG_ITMP2, RIP, -(((ptrint) cd->mcodeptr + 7) -
2332 (ptrint) cd->mcodebase) + disp);
2334 switch (iptr->op1) {
2336 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2337 M_ILD(d, REG_ITMP2, 0);
2338 store_reg_to_var_int(iptr->dst, d);
2342 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2343 M_LLD(d, REG_ITMP2, 0);
2344 store_reg_to_var_int(iptr->dst, d);
2347 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2348 x86_64_movss_membase_reg(cd, REG_ITMP2, 0, d);
2349 store_reg_to_var_flt(iptr->dst, d);
2352 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2353 x86_64_movsd_membase_reg(cd, REG_ITMP2, 0, d);
2354 store_reg_to_var_flt(iptr->dst, d);
2359 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2360 /* op1 = type, val.a = field address */
2362 if (iptr->val.a == NULL) {
2363 disp = dseg_addaddress(cd, NULL);
2365 /* PROFILE_CYCLE_STOP; */
2367 codegen_addpatchref(cd, cd->mcodeptr,
2368 PATCHER_get_putstatic,
2369 (unresolved_field *) iptr->target, disp);
2371 if (opt_showdisassemble) {
2372 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2375 /* PROFILE_CYCLE_START; */
2378 fieldinfo *fi = iptr->val.a;
2380 disp = dseg_addaddress(cd, &(fi->value));
2382 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class)) {
2385 codegen_addpatchref(cd, cd->mcodeptr,
2386 PATCHER_clinit, fi->class, 0);
2388 if (opt_showdisassemble) {
2389 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2392 PROFILE_CYCLE_START;
2396 /* This approach is much faster than moving the field
2397 address inline into a register. */
2399 M_ALD(REG_ITMP2, RIP, -(((ptrint) cd->mcodeptr + 7) -
2400 (ptrint) cd->mcodebase) + disp);
2402 switch (iptr->op1) {
2404 var_to_reg_int(s2, src, REG_ITMP1);
2405 M_IST(s2, REG_ITMP2, 0);
2409 var_to_reg_int(s2, src, REG_ITMP1);
2410 M_LST(s2, REG_ITMP2, 0);
2413 var_to_reg_flt(s2, src, REG_FTMP1);
2414 x86_64_movss_reg_membase(cd, s2, REG_ITMP2, 0);
2417 var_to_reg_flt(s2, src, REG_FTMP1);
2418 x86_64_movsd_reg_membase(cd, s2, REG_ITMP2, 0);
2423 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2424 /* val = value (in current instruction) */
2425 /* op1 = type, val.a = field address (in */
2426 /* following NOP) */
2428 if (iptr[1].val.a == NULL) {
2429 disp = dseg_addaddress(cd, NULL);
2431 /* PROFILE_CYCLE_STOP; */
2433 codegen_addpatchref(cd, cd->mcodeptr,
2434 PATCHER_get_putstatic,
2435 (unresolved_field *) iptr[1].target, disp);
2437 if (opt_showdisassemble) {
2438 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2441 /* PROFILE_CYCLE_START; */
2444 fieldinfo *fi = iptr[1].val.a;
2446 disp = dseg_addaddress(cd, &(fi->value));
2448 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class)) {
2451 codegen_addpatchref(cd, cd->mcodeptr,
2452 PATCHER_clinit, fi->class, 0);
2454 if (opt_showdisassemble) {
2455 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2458 PROFILE_CYCLE_START;
2462 /* This approach is much faster than moving the field
2463 address inline into a register. */
2465 M_ALD(REG_ITMP1, RIP, -(((ptrint) cd->mcodeptr + 7) -
2466 (ptrint) cd->mcodebase) + disp);
2468 switch (iptr->op1) {
2471 M_IST_IMM(iptr->val.i, REG_ITMP1, 0);
2476 if (IS_IMM32(iptr->val.l)) {
2477 M_LST_IMM32(iptr->val.l, REG_ITMP1, 0);
2479 M_IST_IMM(iptr->val.l, REG_ITMP1, 0);
2480 M_IST_IMM(iptr->val.l >> 32, REG_ITMP1, 4);
2486 case ICMD_GETFIELD: /* ... ==> ..., value */
2487 /* op1 = type, val.i = field offset */
2489 var_to_reg_int(s1, src, REG_ITMP1);
2490 gen_nullptr_check(s1);
2492 if (iptr->val.a == NULL) {
2493 /* PROFILE_CYCLE_STOP; */
2495 codegen_addpatchref(cd, cd->mcodeptr,
2496 PATCHER_get_putfield,
2497 (unresolved_field *) iptr->target, 0);
2499 if (opt_showdisassemble) {
2500 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2503 /* PROFILE_CYCLE_START; */
2508 disp = ((fieldinfo *) (iptr->val.a))->offset;
2511 switch (iptr->op1) {
2513 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2514 if (iptr->val.a == NULL)
2515 M_ILD32(d, s1, disp);
2518 store_reg_to_var_int(iptr->dst, d);
2522 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2523 if (iptr->val.a == NULL)
2524 M_LLD32(d, s1, disp);
2527 store_reg_to_var_int(iptr->dst, d);
2530 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2531 x86_64_movss_membase32_reg(cd, s1, disp, d);
2532 store_reg_to_var_flt(iptr->dst, d);
2535 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2536 x86_64_movsd_membase32_reg(cd, s1, disp, d);
2537 store_reg_to_var_flt(iptr->dst, d);
2542 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2543 /* op1 = type, val.i = field offset */
2545 var_to_reg_int(s1, src->prev, REG_ITMP1);
2546 gen_nullptr_check(s1);
2548 if (IS_INT_LNG_TYPE(iptr->op1)) {
2549 var_to_reg_int(s2, src, REG_ITMP2);
2551 var_to_reg_flt(s2, src, REG_FTMP2);
2554 if (iptr->val.a == NULL) {
2555 /* PROFILE_CYCLE_STOP; */
2557 codegen_addpatchref(cd, cd->mcodeptr,
2558 PATCHER_get_putfield,
2559 (unresolved_field *) iptr->target, 0);
2561 if (opt_showdisassemble) {
2562 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2565 /* PROFILE_CYCLE_START; */
2570 disp = ((fieldinfo *) (iptr->val.a))->offset;
2573 switch (iptr->op1) {
2575 if (iptr->val.a == NULL)
2576 M_IST32(s2, s1, disp);
2578 M_IST(s2, s1, disp);
2582 if (iptr->val.a == NULL)
2583 M_LST32(s2, s1, disp);
2585 M_LST(s2, s1, disp);
2588 x86_64_movss_reg_membase32(cd, s2, s1, disp);
2591 x86_64_movsd_reg_membase32(cd, s2, s1, disp);
2596 case ICMD_PUTFIELDCONST: /* ..., objectref, value ==> ... */
2597 /* val = value (in current instruction) */
2598 /* op1 = type, val.a = field address (in */
2599 /* following NOP) */
2601 var_to_reg_int(s1, src, REG_ITMP1);
2602 gen_nullptr_check(s1);
2604 if (iptr[1].val.a == NULL) {
2605 /* PROFILE_CYCLE_STOP; */
2607 codegen_addpatchref(cd, cd->mcodeptr,
2608 PATCHER_putfieldconst,
2609 (unresolved_field *) iptr[1].target, 0);
2611 if (opt_showdisassemble) {
2612 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2615 /* PROFILE_CYCLE_START; */
2620 disp = ((fieldinfo *) (iptr[1].val.a))->offset;
2623 switch (iptr->op1) {
2626 if (iptr[1].val.a == NULL)
2627 M_IST32_IMM(iptr->val.i, s1, disp);
2629 M_IST_IMM(iptr->val.i, s1, disp);
2634 /* We can only optimize the move, if the class is
2635 resolved. Otherwise we don't know what to patch. */
2636 if (iptr[1].val.a == NULL) {
2637 M_IST32_IMM(iptr->val.l, s1, disp);
2638 M_IST32_IMM(iptr->val.l >> 32, s1, disp + 4);
2640 if (IS_IMM32(iptr->val.l)) {
2641 M_LST_IMM32(iptr->val.l, s1, disp);
2643 M_IST_IMM(iptr->val.l, s1, disp);
2644 M_IST_IMM(iptr->val.l >> 32, s1, disp + 4);
2652 /* branch operations **************************************************/
2654 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2656 var_to_reg_int(s1, src, REG_ITMP1);
2657 M_INTMOVE(s1, REG_ITMP1_XPTR);
2661 #ifdef ENABLE_VERIFIER
2663 codegen_addpatchref(cd, cd->mcodeptr,
2664 PATCHER_athrow_areturn,
2665 (unresolved_class *) iptr->val.a, 0);
2667 if (opt_showdisassemble) {
2668 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2671 #endif /* ENABLE_VERIFIER */
2673 M_CALL_IMM(0); /* passing exception pc */
2674 M_POP(REG_ITMP2_XPC);
2676 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
2680 case ICMD_GOTO: /* ... ==> ... */
2681 /* op1 = target JavaVM pc */
2684 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2687 case ICMD_JSR: /* ... ==> ... */
2688 /* op1 = target JavaVM pc */
2691 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2694 case ICMD_RET: /* ... ==> ... */
2695 /* op1 = local variable */
2697 var = &(rd->locals[iptr->op1][TYPE_ADR]);
2698 var_to_reg_int(s1, var, REG_ITMP1);
2702 case ICMD_IFNULL: /* ..., value ==> ... */
2703 /* op1 = target JavaVM pc */
2705 if (src->flags & INMEMORY)
2706 M_CMP_IMM_MEMBASE(0, REG_SP, src->regoff * 8);
2708 M_TEST(src->regoff);
2710 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2713 case ICMD_IFNONNULL: /* ..., value ==> ... */
2714 /* op1 = target JavaVM pc */
2716 if (src->flags & INMEMORY)
2717 M_CMP_IMM_MEMBASE(0, REG_SP, src->regoff * 8);
2719 M_TEST(src->regoff);
2721 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2724 case ICMD_IFEQ: /* ..., value ==> ... */
2725 /* op1 = target JavaVM pc, val.i = constant */
2727 x86_64_emit_ifcc(cd, X86_64_CC_E, src, iptr);
2730 case ICMD_IFLT: /* ..., value ==> ... */
2731 /* op1 = target JavaVM pc, val.i = constant */
2733 x86_64_emit_ifcc(cd, X86_64_CC_L, src, iptr);
2736 case ICMD_IFLE: /* ..., value ==> ... */
2737 /* op1 = target JavaVM pc, val.i = constant */
2739 x86_64_emit_ifcc(cd, X86_64_CC_LE, src, iptr);
2742 case ICMD_IFNE: /* ..., value ==> ... */
2743 /* op1 = target JavaVM pc, val.i = constant */
2745 x86_64_emit_ifcc(cd, X86_64_CC_NE, src, iptr);
2748 case ICMD_IFGT: /* ..., value ==> ... */
2749 /* op1 = target JavaVM pc, val.i = constant */
2751 x86_64_emit_ifcc(cd, X86_64_CC_G, src, iptr);
2754 case ICMD_IFGE: /* ..., value ==> ... */
2755 /* op1 = target JavaVM pc, val.i = constant */
2757 x86_64_emit_ifcc(cd, X86_64_CC_GE, src, iptr);
2760 case ICMD_IF_LEQ: /* ..., value ==> ... */
2761 /* op1 = target JavaVM pc, val.l = constant */
2763 x86_64_emit_if_lcc(cd, X86_64_CC_E, src, iptr);
2766 case ICMD_IF_LLT: /* ..., value ==> ... */
2767 /* op1 = target JavaVM pc, val.l = constant */
2769 x86_64_emit_if_lcc(cd, X86_64_CC_L, src, iptr);
2772 case ICMD_IF_LLE: /* ..., value ==> ... */
2773 /* op1 = target JavaVM pc, val.l = constant */
2775 x86_64_emit_if_lcc(cd, X86_64_CC_LE, src, iptr);
2778 case ICMD_IF_LNE: /* ..., value ==> ... */
2779 /* op1 = target JavaVM pc, val.l = constant */
2781 x86_64_emit_if_lcc(cd, X86_64_CC_NE, src, iptr);
2784 case ICMD_IF_LGT: /* ..., value ==> ... */
2785 /* op1 = target JavaVM pc, val.l = constant */
2787 x86_64_emit_if_lcc(cd, X86_64_CC_G, src, iptr);
2790 case ICMD_IF_LGE: /* ..., value ==> ... */
2791 /* op1 = target JavaVM pc, val.l = constant */
2793 x86_64_emit_if_lcc(cd, X86_64_CC_GE, src, iptr);
2796 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2797 /* op1 = target JavaVM pc */
2799 x86_64_emit_if_icmpcc(cd, X86_64_CC_E, src, iptr);
2802 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2803 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2805 x86_64_emit_if_lcmpcc(cd, X86_64_CC_E, src, iptr);
2808 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2809 /* op1 = target JavaVM pc */
2811 x86_64_emit_if_icmpcc(cd, X86_64_CC_NE, src, iptr);
2814 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2815 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2817 x86_64_emit_if_lcmpcc(cd, X86_64_CC_NE, src, iptr);
2820 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2821 /* op1 = target JavaVM pc */
2823 x86_64_emit_if_icmpcc(cd, X86_64_CC_L, src, iptr);
2826 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2827 /* op1 = target JavaVM pc */
2829 x86_64_emit_if_lcmpcc(cd, X86_64_CC_L, src, iptr);
2832 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2833 /* op1 = target JavaVM pc */
2835 x86_64_emit_if_icmpcc(cd, X86_64_CC_G, src, iptr);
2838 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2839 /* op1 = target JavaVM pc */
2841 x86_64_emit_if_lcmpcc(cd, X86_64_CC_G, src, iptr);
2844 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2845 /* op1 = target JavaVM pc */
2847 x86_64_emit_if_icmpcc(cd, X86_64_CC_LE, src, iptr);
2850 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2851 /* op1 = target JavaVM pc */
2853 x86_64_emit_if_lcmpcc(cd, X86_64_CC_LE, src, iptr);
2856 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2857 /* op1 = target JavaVM pc */
2859 x86_64_emit_if_icmpcc(cd, X86_64_CC_GE, src, iptr);
2862 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2863 /* op1 = target JavaVM pc */
2865 x86_64_emit_if_lcmpcc(cd, X86_64_CC_GE, src, iptr);
2868 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST */
2870 case ICMD_ELSE_ICONST: /* handled by IFxx_ICONST */
2873 case ICMD_IFEQ_ICONST: /* ..., value ==> ..., constant */
2874 case ICMD_IFNE_ICONST: /* val.i = constant */
2875 case ICMD_IFLT_ICONST:
2876 case ICMD_IFGE_ICONST:
2877 case ICMD_IFGT_ICONST:
2878 case ICMD_IFLE_ICONST:
2880 var_to_reg_int(s1, src, REG_ITMP1);
2881 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2882 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2884 M_INTMOVE(s1, REG_ITMP1);
2887 if (iptr[1].val.i == 0)
2890 M_IMOV_IMM(iptr[1].val.i, d);
2892 if (iptr->val.i == 0)
2895 M_IMOV_IMM(iptr->val.i, REG_ITMP2);
2898 switch (iptr->opc) {
2899 case ICMD_IFEQ_ICONST:
2900 M_CMOVEQ(REG_ITMP2, d);
2902 case ICMD_IFNE_ICONST:
2903 M_CMOVNE(REG_ITMP2, d);
2905 case ICMD_IFLT_ICONST:
2906 M_CMOVLT(REG_ITMP2, d);
2908 case ICMD_IFGE_ICONST:
2909 M_CMOVGE(REG_ITMP2, d);
2911 case ICMD_IFGT_ICONST:
2912 M_CMOVGT(REG_ITMP2, d);
2914 case ICMD_IFLE_ICONST:
2915 M_CMOVLE(REG_ITMP2, d);
2919 store_reg_to_var_int(iptr->dst, d);
2923 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2926 var_to_reg_int(s1, src, REG_RESULT);
2927 M_INTMOVE(s1, REG_RESULT);
2928 goto nowperformreturn;
2930 case ICMD_ARETURN: /* ..., retvalue ==> ... */
2932 var_to_reg_int(s1, src, REG_RESULT);
2933 M_INTMOVE(s1, REG_RESULT);
2935 #ifdef ENABLE_VERIFIER
2939 codegen_addpatchref(cd, cd->mcodeptr,
2940 PATCHER_athrow_areturn,
2941 (unresolved_class *) iptr->val.a, 0);
2943 if (opt_showdisassemble) {
2944 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2947 PROFILE_CYCLE_START;
2949 #endif /* ENABLE_VERIFIER */
2950 goto nowperformreturn;
2952 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2955 var_to_reg_flt(s1, src, REG_FRESULT);
2956 M_FLTMOVE(s1, REG_FRESULT);
2957 goto nowperformreturn;
2959 case ICMD_RETURN: /* ... ==> ... */
2965 p = parentargs_base;
2967 /* call trace function */
2969 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
2971 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
2972 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
2974 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
2975 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
2976 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
2977 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
2979 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
2980 x86_64_call_reg(cd, REG_ITMP1);
2982 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
2983 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
2985 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
2988 #if defined(USE_THREADS)
2989 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2990 M_ALD(rd->argintregs[0], REG_SP, rd->memuse * 8);
2992 /* we need to save the proper return value */
2993 switch (iptr->opc) {
2997 M_LST(REG_RESULT, REG_SP, rd->memuse * 8);
3001 M_DST(REG_FRESULT, REG_SP, rd->memuse * 8);
3005 M_MOV_IMM((ptrint) builtin_monitorexit, REG_ITMP1);
3008 /* and now restore the proper return value */
3009 switch (iptr->opc) {
3013 M_LLD(REG_RESULT, REG_SP, rd->memuse * 8);
3017 M_DLD(REG_FRESULT, REG_SP, rd->memuse * 8);
3023 /* restore saved registers */
3025 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
3026 p--; M_LLD(rd->savintregs[i], REG_SP, p * 8);
3028 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
3029 p--; M_DLD(rd->savfltregs[i], REG_SP, p * 8);
3032 /* deallocate stack */
3034 if (parentargs_base)
3035 M_AADD_IMM(parentargs_base * 8, REG_SP);
3037 /* generate method profiling code */
3046 case ICMD_TABLESWITCH: /* ..., index ==> ... */
3051 tptr = (void **) iptr->target;
3053 s4ptr = iptr->val.a;
3054 l = s4ptr[1]; /* low */
3055 i = s4ptr[2]; /* high */
3057 var_to_reg_int(s1, src, REG_ITMP1);
3058 M_INTMOVE(s1, REG_ITMP1);
3060 x86_64_alul_imm_reg(cd, X86_64_SUB, l, REG_ITMP1);
3065 x86_64_alul_imm_reg(cd, X86_64_CMP, i - 1, REG_ITMP1);
3066 x86_64_jcc(cd, X86_64_CC_A, 0);
3068 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[0]), cd->mcodeptr); */
3069 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3071 /* build jump table top down and use address of lowest entry */
3073 /* s4ptr += 3 + i; */
3077 dseg_addtarget(cd, (basicblock *) tptr[0]);
3081 /* length of dataseg after last dseg_addtarget is used by load */
3083 x86_64_mov_imm_reg(cd, 0, REG_ITMP2);
3084 dseg_adddata(cd, cd->mcodeptr);
3085 x86_64_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
3086 x86_64_jmp_reg(cd, REG_ITMP1);
3091 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
3093 s4 i, l, val, *s4ptr;
3096 tptr = (void **) iptr->target;
3098 s4ptr = iptr->val.a;
3099 l = s4ptr[0]; /* default */
3100 i = s4ptr[1]; /* count */
3102 MCODECHECK(8 + ((7 + 6) * i) + 5);
3103 var_to_reg_int(s1, src, REG_ITMP1); /* reg compare should always be faster */
3109 x86_64_alul_imm_reg(cd, X86_64_CMP, val, s1);
3110 x86_64_jcc(cd, X86_64_CC_E, 0);
3111 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3114 x86_64_jmp_imm(cd, 0);
3116 tptr = (void **) iptr->target;
3117 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3122 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
3123 /* op1 = arg count val.a = builtintable entry */
3129 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
3130 /* op1 = arg count, val.a = method pointer */
3132 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3133 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
3134 case ICMD_INVOKEINTERFACE:
3139 unresolved_method *um = iptr->target;
3140 md = um->methodref->parseddesc.md;
3142 md = lm->parseddesc;
3146 s3 = md->paramcount;
3148 MCODECHECK((20 * s3) + 128);
3150 /* copy arguments to registers or stack location */
3152 for (s3 = s3 - 1; s3 >= 0; s3--, src = src->prev) {
3153 if (src->varkind == ARGVAR)
3155 if (IS_INT_LNG_TYPE(src->type)) {
3156 if (!md->params[s3].inmemory) {
3157 s1 = rd->argintregs[md->params[s3].regoff];
3158 var_to_reg_int(d, src, s1);
3161 var_to_reg_int(d, src, REG_ITMP1);
3162 M_LST(d, REG_SP, md->params[s3].regoff * 8);
3166 if (!md->params[s3].inmemory) {
3167 s1 = rd->argfltregs[md->params[s3].regoff];
3168 var_to_reg_flt(d, src, s1);
3171 var_to_reg_flt(d, src, REG_FTMP1);
3172 M_DST(d, REG_SP, md->params[s3].regoff * 8);
3177 /* generate method profiling code */
3181 switch (iptr->opc) {
3183 a = (ptrint) bte->fp;
3184 d = md->returntype.type;
3186 M_MOV_IMM(a, REG_ITMP1);
3189 /* if op1 == true, we need to check for an exception */
3191 if (iptr->op1 == true) {
3194 codegen_addxexceptionrefs(cd, cd->mcodeptr);
3198 case ICMD_INVOKESPECIAL:
3199 M_TEST(rd->argintregs[0]);
3201 codegen_addxnullrefs(cd, cd->mcodeptr);
3203 /* first argument contains pointer */
3204 /* gen_nullptr_check(rd->argintregs[0]); */
3206 /* access memory for hardware nullptr */
3207 /* x86_64_mov_membase_reg(cd, rd->argintregs[0], 0, REG_ITMP2); */
3211 case ICMD_INVOKESTATIC:
3213 unresolved_method *um = iptr->target;
3215 codegen_addpatchref(cd, cd->mcodeptr,
3216 PATCHER_invokestatic_special, um, 0);
3218 if (opt_showdisassemble) {
3219 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3223 d = um->methodref->parseddesc.md->returntype.type;
3226 a = (ptrint) lm->stubroutine;
3227 d = lm->parseddesc->returntype.type;
3230 M_MOV_IMM(a, REG_ITMP2);
3234 case ICMD_INVOKEVIRTUAL:
3235 gen_nullptr_check(rd->argintregs[0]);
3238 unresolved_method *um = iptr->target;
3240 codegen_addpatchref(cd, cd->mcodeptr,
3241 PATCHER_invokevirtual, um, 0);
3243 if (opt_showdisassemble) {
3244 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3248 d = um->methodref->parseddesc.md->returntype.type;
3251 s1 = OFFSET(vftbl_t, table[0]) +
3252 sizeof(methodptr) * lm->vftblindex;
3253 d = lm->parseddesc->returntype.type;
3256 x86_64_mov_membase_reg(cd, rd->argintregs[0],
3257 OFFSET(java_objectheader, vftbl),
3259 x86_64_mov_membase32_reg(cd, REG_ITMP2, s1, REG_ITMP1);
3263 case ICMD_INVOKEINTERFACE:
3264 gen_nullptr_check(rd->argintregs[0]);
3267 unresolved_method *um = iptr->target;
3269 codegen_addpatchref(cd, cd->mcodeptr,
3270 PATCHER_invokeinterface, um, 0);
3272 if (opt_showdisassemble) {
3273 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3278 d = um->methodref->parseddesc.md->returntype.type;
3281 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3282 sizeof(methodptr) * lm->class->index;
3284 s2 = sizeof(methodptr) * (lm - lm->class->methods);
3286 d = lm->parseddesc->returntype.type;
3289 M_ALD(REG_ITMP2, rd->argintregs[0],
3290 OFFSET(java_objectheader, vftbl));
3291 x86_64_mov_membase32_reg(cd, REG_ITMP2, s1, REG_ITMP2);
3292 x86_64_mov_membase32_reg(cd, REG_ITMP2, s2, REG_ITMP1);
3297 /* generate method profiling code */
3299 PROFILE_CYCLE_START;
3301 /* d contains return type */
3303 if (d != TYPE_VOID) {
3304 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3305 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3306 M_INTMOVE(REG_RESULT, s1);
3307 store_reg_to_var_int(iptr->dst, s1);
3309 s1 = reg_of_var(rd, iptr->dst, REG_FRESULT);
3310 M_FLTMOVE(REG_FRESULT, s1);
3311 store_reg_to_var_flt(iptr->dst, s1);
3317 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3319 /* op1: 0 == array, 1 == class */
3320 /* val.a: (classinfo *) superclass */
3322 /* superclass is an interface:
3324 * OK if ((sub == NULL) ||
3325 * (sub->vftbl->interfacetablelength > super->index) &&
3326 * (sub->vftbl->interfacetable[-super->index] != NULL));
3328 * superclass is a class:
3330 * OK if ((sub == NULL) || (0
3331 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3332 * super->vftbl->diffval));
3335 if (iptr->op1 == 1) {
3336 /* object type cast-check */
3339 vftbl_t *supervftbl;
3342 super = (classinfo *) iptr->val.a;
3349 superindex = super->index;
3350 supervftbl = super->vftbl;
3353 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3354 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3356 var_to_reg_int(s1, src, REG_ITMP1);
3358 /* calculate interface checkcast code size */
3360 s2 = 3; /* mov_membase_reg */
3361 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3363 s2 += 3 + 4 /* movl_membase32_reg */ + 3 + 4 /* sub imm32 */ +
3364 3 /* test */ + 6 /* jcc */ + 3 + 4 /* mov_membase32_reg */ +
3365 3 /* test */ + 6 /* jcc */;
3368 s2 += (opt_showdisassemble ? 5 : 0);
3370 /* calculate class checkcast code size */
3372 s3 = 3; /* mov_membase_reg */
3373 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3374 s3 += 10 /* mov_imm_reg */ + 3 + 4 /* movl_membase32_reg */;
3377 if (s1 != REG_ITMP1) {
3378 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3379 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, baseval));
3380 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3381 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, diffval));
3387 s3 += 3 + 4 /* movl_membase32_reg */ + 3 /* sub */ +
3388 10 /* mov_imm_reg */ + 3 /* movl_membase_reg */;
3389 CALCOFFSETBYTES(s3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3392 s3 += 3 /* cmp */ + 6 /* jcc */;
3395 s3 += (opt_showdisassemble ? 5 : 0);
3397 /* if class is not resolved, check which code to call */
3401 M_BEQ(6 + (opt_showdisassemble ? 5 : 0) + 7 + 6 + s2 + 5 + s3);
3403 codegen_addpatchref(cd, cd->mcodeptr,
3404 PATCHER_checkcast_instanceof_flags,
3405 (constant_classref *) iptr->target, 0);
3407 if (opt_showdisassemble) {
3408 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3411 M_IMOV_IMM(0, REG_ITMP2); /* super->flags */
3412 M_IAND_IMM(ACC_INTERFACE, REG_ITMP2);
3416 /* interface checkcast code */
3418 if (!super || (super->flags & ACC_INTERFACE)) {
3424 M_ALD(REG_ITMP2, s1, OFFSET(java_objectheader, vftbl));
3427 codegen_addpatchref(cd, cd->mcodeptr,
3428 PATCHER_checkcast_instanceof_interface,
3429 (constant_classref *) iptr->target, 0);
3431 if (opt_showdisassemble) {
3432 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3436 x86_64_movl_membase32_reg(cd, REG_ITMP2,
3437 OFFSET(vftbl_t, interfacetablelength),
3439 /* XXX TWISTI: should this be int arithmetic? */
3440 M_LSUB_IMM32(superindex, REG_ITMP3);
3443 codegen_addxcastrefs(cd, cd->mcodeptr);
3444 x86_64_mov_membase32_reg(cd, REG_ITMP2,
3445 OFFSET(vftbl_t, interfacetable[0]) -
3446 superindex * sizeof(methodptr*),
3450 codegen_addxcastrefs(cd, cd->mcodeptr);
3456 /* class checkcast code */
3458 if (!super || !(super->flags & ACC_INTERFACE)) {
3464 M_ALD(REG_ITMP2, s1, OFFSET(java_objectheader, vftbl));
3467 codegen_addpatchref(cd, cd->mcodeptr,
3468 PATCHER_checkcast_class,
3469 (constant_classref *) iptr->target,
3472 if (opt_showdisassemble) {
3473 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3477 M_MOV_IMM((ptrint) supervftbl, REG_ITMP3);
3478 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3479 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3481 x86_64_movl_membase32_reg(cd, REG_ITMP2,
3482 OFFSET(vftbl_t, baseval),
3484 /* if (s1 != REG_ITMP1) { */
3485 /* x86_64_movl_membase_reg(cd, REG_ITMP3, */
3486 /* OFFSET(vftbl_t, baseval), */
3488 /* x86_64_movl_membase_reg(cd, REG_ITMP3, */
3489 /* OFFSET(vftbl_t, diffval), */
3491 /* #if defined(USE_THREADS) && defined(NATIVE_THREADS) */
3492 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3494 /* x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP1, REG_ITMP2); */
3497 x86_64_movl_membase32_reg(cd, REG_ITMP3,
3498 OFFSET(vftbl_t, baseval),
3500 M_LSUB(REG_ITMP3, REG_ITMP2);
3501 M_MOV_IMM((ptrint) supervftbl, REG_ITMP3);
3502 M_ILD(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3504 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3505 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3507 M_CMP(REG_ITMP3, REG_ITMP2);
3508 M_BA(0); /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3509 codegen_addxcastrefs(cd, cd->mcodeptr);
3511 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3514 /* array type cast-check */
3516 var_to_reg_int(s1, src, REG_ITMP1);
3517 M_INTMOVE(s1, rd->argintregs[0]);
3519 if (iptr->val.a == NULL) {
3520 codegen_addpatchref(cd, cd->mcodeptr,
3521 PATCHER_builtin_arraycheckcast,
3522 (constant_classref *) iptr->target, 0);
3524 if (opt_showdisassemble) {
3525 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3529 M_MOV_IMM((ptrint) iptr->val.a, rd->argintregs[1]);
3530 M_MOV_IMM((ptrint) BUILTIN_arraycheckcast, REG_ITMP1);
3534 codegen_addxcastrefs(cd, cd->mcodeptr);
3536 var_to_reg_int(s1, src, REG_ITMP1);
3537 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
3540 store_reg_to_var_int(iptr->dst, d);
3543 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3545 /* op1: 0 == array, 1 == class */
3546 /* val.a: (classinfo *) superclass */
3548 /* superclass is an interface:
3550 * return (sub != NULL) &&
3551 * (sub->vftbl->interfacetablelength > super->index) &&
3552 * (sub->vftbl->interfacetable[-super->index] != NULL);
3554 * superclass is a class:
3556 * return ((sub != NULL) && (0
3557 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3558 * super->vftbl->diffvall));
3563 vftbl_t *supervftbl;
3566 super = (classinfo *) iptr->val.a;
3573 superindex = super->index;
3574 supervftbl = super->vftbl;
3577 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3578 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3581 var_to_reg_int(s1, src, REG_ITMP1);
3582 d = reg_of_var(rd, iptr->dst, REG_ITMP2);
3584 M_INTMOVE(s1, REG_ITMP1);
3588 /* calculate interface instanceof code size */
3590 s2 = 3; /* mov_membase_reg */
3591 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3592 s2 += 3 + 4 /* movl_membase32_reg */ + 3 + 4 /* sub_imm32 */ +
3593 3 /* test */ + 6 /* jcc */ + 3 + 4 /* mov_membase32_reg */ +
3594 3 /* test */ + 4 /* setcc */;
3597 s2 += (opt_showdisassemble ? 5 : 0);
3599 /* calculate class instanceof code size */
3601 s3 = 3; /* mov_membase_reg */
3602 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3603 s3 += 10; /* mov_imm_reg */
3604 s3 += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3605 CALCOFFSETBYTES(s3, REG_ITMP1, OFFSET(vftbl_t, baseval));
3606 s3 += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3607 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, baseval));
3608 s3 += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3609 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3610 s3 += 3 /* sub */ + 3 /* xor */ + 3 /* cmp */ + 4 /* setcc */;
3613 s3 += (opt_showdisassemble ? 5 : 0);
3615 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3617 /* if class is not resolved, check which code to call */
3620 x86_64_test_reg_reg(cd, s1, s1);
3621 x86_64_jcc(cd, X86_64_CC_Z, (6 + (opt_showdisassemble ? 5 : 0) +
3622 7 + 6 + s2 + 5 + s3));
3624 codegen_addpatchref(cd, cd->mcodeptr,
3625 PATCHER_checkcast_instanceof_flags,
3626 (constant_classref *) iptr->target, 0);
3628 if (opt_showdisassemble) {
3629 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3632 x86_64_movl_imm_reg(cd, 0, REG_ITMP3); /* super->flags */
3633 x86_64_alul_imm_reg(cd, X86_64_AND, ACC_INTERFACE, REG_ITMP3);
3634 x86_64_jcc(cd, X86_64_CC_Z, s2 + 5);
3637 /* interface instanceof code */
3639 if (!super || (super->flags & ACC_INTERFACE)) {
3641 x86_64_test_reg_reg(cd, s1, s1);
3642 x86_64_jcc(cd, X86_64_CC_Z, s2);
3645 x86_64_mov_membase_reg(cd, s1,
3646 OFFSET(java_objectheader, vftbl),
3649 codegen_addpatchref(cd, cd->mcodeptr,
3650 PATCHER_checkcast_instanceof_interface,
3651 (constant_classref *) iptr->target, 0);
3653 if (opt_showdisassemble) {
3654 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3658 x86_64_movl_membase32_reg(cd, REG_ITMP1,
3659 OFFSET(vftbl_t, interfacetablelength),
3661 x86_64_alu_imm32_reg(cd, X86_64_SUB, superindex, REG_ITMP3);
3662 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3664 a = 3 + 4 /* mov_membase32_reg */ + 3 /* test */ + 4 /* setcc */;
3666 x86_64_jcc(cd, X86_64_CC_LE, a);
3667 x86_64_mov_membase32_reg(cd, REG_ITMP1,
3668 OFFSET(vftbl_t, interfacetable[0]) -
3669 superindex * sizeof(methodptr*),
3671 x86_64_test_reg_reg(cd, REG_ITMP1, REG_ITMP1);
3672 x86_64_setcc_reg(cd, X86_64_CC_NE, d);
3675 x86_64_jmp_imm(cd, s3);
3678 /* class instanceof code */
3680 if (!super || !(super->flags & ACC_INTERFACE)) {
3682 x86_64_test_reg_reg(cd, s1, s1);
3683 x86_64_jcc(cd, X86_64_CC_E, s3);
3686 x86_64_mov_membase_reg(cd, s1,
3687 OFFSET(java_objectheader, vftbl),
3691 codegen_addpatchref(cd, cd->mcodeptr,
3692 PATCHER_instanceof_class,
3693 (constant_classref *) iptr->target, 0);
3695 if (opt_showdisassemble) {
3696 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3700 x86_64_mov_imm_reg(cd, (ptrint) supervftbl, REG_ITMP2);
3701 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3702 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3704 x86_64_movl_membase_reg(cd, REG_ITMP1,
3705 OFFSET(vftbl_t, baseval),
3707 x86_64_movl_membase_reg(cd, REG_ITMP2,
3708 OFFSET(vftbl_t, diffval),
3710 x86_64_movl_membase_reg(cd, REG_ITMP2,
3711 OFFSET(vftbl_t, baseval),
3713 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3714 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3716 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
3717 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d); /* may be REG_ITMP2 */
3718 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP3, REG_ITMP1);
3719 x86_64_setcc_reg(cd, X86_64_CC_BE, d);
3721 store_reg_to_var_int(iptr->dst, d);
3725 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3726 /* op1 = dimension, val.a = class */
3728 /* check for negative sizes and copy sizes to stack if necessary */
3730 MCODECHECK((10 * 4 * iptr->op1) + 5 + 10 * 8);
3732 for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3733 /* copy SAVEDVAR sizes to stack */
3735 if (src->varkind != ARGVAR) {
3736 var_to_reg_int(s2, src, REG_ITMP1);
3737 M_LST(s2, REG_SP, s1 * 8);
3741 /* is a patcher function set? */
3743 if (iptr->val.a == NULL) {
3744 codegen_addpatchref(cd, cd->mcodeptr,
3745 PATCHER_builtin_multianewarray,
3746 (constant_classref *) iptr->target, 0);
3748 if (opt_showdisassemble) {
3749 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3755 a = (ptrint) iptr->val.a;
3758 /* a0 = dimension count */
3760 M_MOV_IMM(iptr->op1, rd->argintregs[0]);
3762 /* a1 = arrayvftbl */
3764 M_MOV_IMM((ptrint) iptr->val.a, rd->argintregs[1]);
3766 /* a2 = pointer to dimensions = stack pointer */
3768 M_MOV(REG_SP, rd->argintregs[2]);
3770 M_MOV_IMM((ptrint) BUILTIN_multianewarray, REG_ITMP1);
3773 /* check for exception before result assignment */
3777 codegen_addxexceptionrefs(cd, cd->mcodeptr);
3779 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3780 M_INTMOVE(REG_RESULT, s1);
3781 store_reg_to_var_int(iptr->dst, s1);
3785 *exceptionptr = new_internalerror("Unknown ICMD %d", iptr->opc);
3789 } /* for instruction */
3791 /* copy values to interface registers */
3793 src = bptr->outstack;
3794 len = bptr->outdepth;
3796 #if defined(ENABLE_LSRA)
3801 if ((src->varkind != STACKVAR)) {
3803 if (IS_FLT_DBL_TYPE(s2)) {
3804 var_to_reg_flt(s1, src, REG_FTMP1);
3805 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3806 M_FLTMOVE(s1, rd->interfaces[len][s2].regoff);
3809 x86_64_movq_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3813 var_to_reg_int(s1, src, REG_ITMP1);
3814 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3815 M_INTMOVE(s1, rd->interfaces[len][s2].regoff);
3818 x86_64_mov_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3825 /* At the end of a basic block we may have to append some nops,
3826 because the patcher stub calling code might be longer than the
3827 actual instruction. So codepatching does not change the
3828 following block unintentionally. */
3830 if (cd->mcodeptr < cd->lastmcodeptr) {
3831 while (cd->mcodeptr < cd->lastmcodeptr) {
3836 } /* if (bptr -> flags >= BBREACHED) */
3837 } /* for basic block */
3839 dseg_createlinenumbertable(cd);
3846 /* generate ArithmeticException stubs */
3850 for (bref = cd->xdivrefs; bref != NULL; bref = bref->next) {
3851 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3853 cd->mcodeptr - cd->mcodebase);
3857 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
3858 dseg_adddata(cd, cd->mcodeptr);
3859 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
3861 if (xcodeptr != NULL) {
3862 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
3865 xcodeptr = cd->mcodeptr;
3867 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
3868 M_MOV(REG_SP, rd->argintregs[1]);
3869 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
3870 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
3872 M_ASUB_IMM(2 * 8, REG_SP);
3873 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
3875 M_MOV_IMM((ptrint) stacktrace_inline_arithmeticexception,
3879 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
3880 M_AADD_IMM(2 * 8, REG_SP);
3882 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
3887 /* generate ArrayIndexOutOfBoundsException stubs */
3891 for (bref = cd->xboundrefs; bref != NULL; bref = bref->next) {
3892 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3894 cd->mcodeptr - cd->mcodebase);
3898 /* move index register into REG_ITMP1 */
3900 M_MOV(bref->reg, REG_ITMP1); /* 3 bytes */
3902 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
3903 dseg_adddata(cd, cd->mcodeptr);
3904 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
3906 if (xcodeptr != NULL) {
3907 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
3910 xcodeptr = cd->mcodeptr;
3912 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
3913 M_MOV(REG_SP, rd->argintregs[1]);
3914 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
3915 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
3916 M_MOV(REG_ITMP1, rd->argintregs[4]);
3918 M_ASUB_IMM(2 * 8, REG_SP);
3919 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
3921 M_MOV_IMM((ptrint) stacktrace_inline_arrayindexoutofboundsexception,
3925 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
3926 M_AADD_IMM(2 * 8, REG_SP);
3928 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
3933 /* generate ArrayStoreException stubs */
3937 for (bref = cd->xstorerefs; bref != NULL; bref = bref->next) {
3938 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3940 cd->mcodeptr - cd->mcodebase);
3944 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
3945 dseg_adddata(cd, cd->mcodeptr);
3946 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
3948 if (xcodeptr != NULL) {
3949 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
3952 xcodeptr = cd->mcodeptr;
3954 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
3955 M_MOV(REG_SP, rd->argintregs[1]);
3956 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
3957 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
3959 M_ASUB_IMM(2 * 8, REG_SP);
3960 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
3962 M_MOV_IMM((ptrint) stacktrace_inline_arraystoreexception,
3966 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
3967 M_AADD_IMM(2 * 8, REG_SP);
3969 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
3974 /* generate ClassCastException stubs */
3978 for (bref = cd->xcastrefs; bref != NULL; bref = bref->next) {
3979 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3981 cd->mcodeptr - cd->mcodebase);
3985 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
3986 dseg_adddata(cd, cd->mcodeptr);
3987 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
3989 if (xcodeptr != NULL) {
3990 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
3993 xcodeptr = cd->mcodeptr;
3995 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
3996 M_MOV(REG_SP, rd->argintregs[1]);
3997 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
3998 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
4000 M_ASUB_IMM(2 * 8, REG_SP);
4001 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
4003 M_MOV_IMM((ptrint) stacktrace_inline_classcastexception, REG_ITMP3);
4006 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
4007 M_AADD_IMM(2 * 8, REG_SP);
4009 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
4014 /* generate NullpointerException stubs */
4018 for (bref = cd->xnullrefs; bref != NULL; bref = bref->next) {
4019 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4021 cd->mcodeptr - cd->mcodebase);
4025 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
4026 dseg_adddata(cd, cd->mcodeptr);
4027 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
4029 if (xcodeptr != NULL) {
4030 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
4033 xcodeptr = cd->mcodeptr;
4035 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
4036 M_MOV(REG_SP, rd->argintregs[1]);
4037 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
4038 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
4040 M_ASUB_IMM(2 * 8, REG_SP);
4041 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
4043 M_MOV_IMM((ptrint) stacktrace_inline_nullpointerexception,
4047 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
4048 M_AADD_IMM(2 * 8, REG_SP);
4050 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
4055 /* generate exception check stubs */
4059 for (bref = cd->xexceptionrefs; bref != NULL; bref = bref->next) {
4060 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4062 cd->mcodeptr - cd->mcodebase);
4066 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
4067 dseg_adddata(cd, cd->mcodeptr);
4068 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
4070 if (xcodeptr != NULL) {
4071 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
4074 xcodeptr = cd->mcodeptr;
4076 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
4077 M_MOV(REG_SP, rd->argintregs[1]);
4078 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
4079 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
4081 M_ASUB_IMM(2 * 8, REG_SP);
4082 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
4084 M_MOV_IMM((ptrint) stacktrace_inline_fillInStackTrace, REG_ITMP3);
4087 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
4088 M_AADD_IMM(2 * 8, REG_SP);
4090 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
4095 /* generate code patching stub call code */
4102 tmpcd = DNEW(codegendata);
4104 for (pref = cd->patchrefs; pref != NULL; pref = pref->next) {
4105 /* check size of code segment */
4109 /* Get machine code which is patched back in later. A */
4110 /* `call rel32' is 5 bytes long (but read 8 bytes). */
4112 xcodeptr = cd->mcodebase + pref->branchpos;
4113 mcode = *((ptrint *) xcodeptr);
4115 /* patch in `call rel32' to call the following code */
4117 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
4118 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4120 /* move pointer to java_objectheader onto stack */
4122 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4123 /* create a virtual java_objectheader */
4125 (void) dseg_addaddress(cd, get_dummyLR()); /* monitorPtr */
4126 a = dseg_addaddress(cd, NULL); /* vftbl */
4128 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP3);
4134 /* move machine code bytes and classinfo pointer into registers */
4136 M_MOV_IMM((ptrint) mcode, REG_ITMP3);
4138 M_MOV_IMM((ptrint) pref->ref, REG_ITMP3);
4140 M_MOV_IMM((ptrint) pref->disp, REG_ITMP3);
4143 M_MOV_IMM((ptrint) pref->patcher, REG_ITMP3);
4146 M_MOV_IMM((ptrint) asm_wrapper_patcher, REG_ITMP3);
4152 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
4154 /* everything's ok */
4160 /* createcompilerstub **********************************************************
4162 Creates a stub routine which calls the compiler.
4164 *******************************************************************************/
4166 #define COMPILERSTUB_DATASIZE 2 * SIZEOF_VOID_P
4167 #define COMPILERSTUB_CODESIZE 7 + 7 + 3
4169 #define COMPILERSTUB_SIZE COMPILERSTUB_DATASIZE + COMPILERSTUB_CODESIZE
4172 u1 *createcompilerstub(methodinfo *m)
4174 u1 *s; /* memory to hold the stub */
4179 s = CNEW(u1, COMPILERSTUB_SIZE);
4181 /* set data pointer and code pointer */
4184 s = s + COMPILERSTUB_DATASIZE;
4186 /* mark start of dump memory area */
4188 dumpsize = dump_size();
4190 cd = DNEW(codegendata);
4193 /* Store the methodinfo* in the same place as in the methodheader
4194 for compiled methods. */
4196 d[0] = (ptrint) asm_call_jit_compiler;
4199 /* code for the stub */
4201 M_ALD(REG_ITMP1, RIP, -(7 * 1 + 1 * SIZEOF_VOID_P)); /* methodinfo */
4202 M_ALD(REG_ITMP3, RIP, -(7 * 2 + 2 * SIZEOF_VOID_P)); /* compiler pointer */
4205 #if defined(ENABLE_STATISTICS)
4207 count_cstub_len += COMPILERSTUB_SIZE;
4210 /* release dump area */
4212 dump_release(dumpsize);
4218 /* createnativestub ************************************************************
4220 Creates a stub routine which calls a native method.
4222 *******************************************************************************/
4224 u1 *createnativestub(functionptr f, methodinfo *m, codegendata *cd,
4225 registerdata *rd, methoddesc *nmd)
4228 s4 stackframesize; /* size of stackframe if needed */
4230 s4 i, j; /* count variables */
4234 /* initialize variables */
4237 nativeparams = (m->flags & ACC_STATIC) ? 2 : 1;
4239 /* calculate stack frame size */
4242 sizeof(stackframeinfo) / SIZEOF_VOID_P +
4243 sizeof(localref_table) / SIZEOF_VOID_P +
4244 INT_ARG_CNT + FLT_ARG_CNT + 1 + /* + 1 for function address */
4247 if (!(stackframesize & 0x1)) /* keep stack 16-byte aligned */
4250 /* create method header */
4252 (void) dseg_addaddress(cd, m); /* MethodPointer */
4253 (void) dseg_adds4(cd, stackframesize * 8); /* FrameSize */
4254 (void) dseg_adds4(cd, 0); /* IsSync */
4255 (void) dseg_adds4(cd, 0); /* IsLeaf */
4256 (void) dseg_adds4(cd, 0); /* IntSave */
4257 (void) dseg_adds4(cd, 0); /* FltSave */
4258 (void) dseg_addlinenumbertablesize(cd);
4259 (void) dseg_adds4(cd, 0); /* ExTableSize */
4261 /* initialize mcode variables */
4263 cd->mcodeptr = (u1 *) cd->mcodebase;
4264 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
4266 /* generate native method profiling code */
4269 /* count frequency */
4271 M_MOV_IMM((ptrint) m, REG_ITMP2);
4272 M_IINC_MEMBASE(REG_ITMP2, OFFSET(methodinfo, frequency));
4275 /* generate stub code */
4277 M_ASUB_IMM(stackframesize * 8, REG_SP);
4280 /* save integer and float argument registers */
4282 for (i = 0, j = 0; i < md->paramcount && j < INT_ARG_CNT; i++)
4283 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4284 M_LST(rd->argintregs[j++], REG_SP, (1 + i) * 8);
4286 for (i = 0, j = 0; i < md->paramcount && j < FLT_ARG_CNT; i++)
4287 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4288 M_DST(rd->argfltregs[j++], REG_SP, (1 + INT_ARG_CNT + i) * 8);
4290 /* show integer hex code for float arguments */
4292 for (i = 0, j = 0; i < md->paramcount && j < INT_ARG_CNT; i++) {
4293 /* if the paramtype is a float, we have to right shift all
4294 following integer registers */
4296 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type)) {
4297 for (s1 = INT_ARG_CNT - 2; s1 >= i; s1--)
4298 M_MOV(rd->argintregs[s1], rd->argintregs[s1 + 1]);
4300 x86_64_movd_freg_reg(cd, rd->argfltregs[j], rd->argintregs[i]);
4305 M_MOV_IMM((ptrint) m, REG_ITMP1);
4306 M_AST(REG_ITMP1, REG_SP, 0 * 8);
4307 M_MOV_IMM((ptrint) builtin_trace_args, REG_ITMP1);
4310 /* restore integer and float argument registers */
4312 for (i = 0, j = 0; i < md->paramcount && j < INT_ARG_CNT; i++)
4313 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4314 M_LLD(rd->argintregs[j++], REG_SP, (1 + i) * 8);
4316 for (i = 0, j = 0; i < md->paramcount && j < FLT_ARG_CNT; i++)
4317 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4318 M_DLD(rd->argfltregs[j++], REG_SP, (1 + INT_ARG_CNT + i) * 8);
4322 /* get function address (this must happen before the stackframeinfo) */
4324 #if !defined(ENABLE_STATICVM)
4326 codegen_addpatchref(cd, cd->mcodeptr, PATCHER_resolve_native, m, 0);
4328 if (opt_showdisassemble) {
4329 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
4334 M_MOV_IMM((ptrint) f, REG_ITMP3);
4337 /* save integer and float argument registers */
4339 for (i = 0, j = 0; i < md->paramcount && j < INT_ARG_CNT; i++)
4340 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4341 M_LST(rd->argintregs[j++], REG_SP, i * 8);
4343 for (i = 0, j = 0; i < md->paramcount && j < FLT_ARG_CNT; i++)
4344 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4345 M_DST(rd->argfltregs[j++], REG_SP, (INT_ARG_CNT + i) * 8);
4347 M_AST(REG_ITMP3, REG_SP, (INT_ARG_CNT + FLT_ARG_CNT) * 8);
4349 /* create dynamic stack info */
4351 M_ALEA(REG_SP, stackframesize * 8, rd->argintregs[0]);
4352 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[1]);
4353 M_ALEA(REG_SP, stackframesize * 8 + SIZEOF_VOID_P, rd->argintregs[2]);
4354 M_ALD(rd->argintregs[3], REG_SP, stackframesize * 8);
4355 M_MOV_IMM((ptrint) codegen_start_native_call, REG_ITMP1);
4358 /* restore integer and float argument registers */
4360 for (i = 0, j = 0; i < md->paramcount && j < INT_ARG_CNT; i++)
4361 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4362 M_LLD(rd->argintregs[j++], REG_SP, i * 8);
4364 for (i = 0, j = 0; i < md->paramcount && j < FLT_ARG_CNT; i++)
4365 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4366 M_DLD(rd->argfltregs[j++], REG_SP, (INT_ARG_CNT + i) * 8);
4368 M_ALD(REG_ITMP3, REG_SP, (INT_ARG_CNT + FLT_ARG_CNT) * 8);
4371 /* copy or spill arguments to new locations */
4373 for (i = md->paramcount - 1, j = i + nativeparams; i >= 0; i--, j--) {
4374 t = md->paramtypes[i].type;
4376 if (IS_INT_LNG_TYPE(t)) {
4377 if (!md->params[i].inmemory) {
4378 s1 = rd->argintregs[md->params[i].regoff];
4380 if (!nmd->params[j].inmemory) {
4381 s2 = rd->argintregs[nmd->params[j].regoff];
4385 s2 = nmd->params[j].regoff;
4386 M_LST(s1, REG_SP, s2 * 8);
4390 s1 = md->params[i].regoff + stackframesize + 1; /* + 1 (RA) */
4391 s2 = nmd->params[j].regoff;
4392 M_LLD(REG_ITMP1, REG_SP, s1 * 8);
4393 M_LST(REG_ITMP1, REG_SP, s2 * 8);
4397 /* We only copy spilled float arguments, as the float argument */
4398 /* registers keep unchanged. */
4400 if (md->params[i].inmemory) {
4401 s1 = md->params[i].regoff + stackframesize + 1; /* + 1 (RA) */
4402 s2 = nmd->params[j].regoff;
4403 M_DLD(REG_FTMP1, REG_SP, s1 * 8);
4404 M_DST(REG_FTMP1, REG_SP, s2 * 8);
4409 /* put class into second argument register */
4411 if (m->flags & ACC_STATIC)
4412 M_MOV_IMM((ptrint) m->class, rd->argintregs[1]);
4414 /* put env into first argument register */
4416 M_MOV_IMM((ptrint) &env, rd->argintregs[0]);
4418 /* do the native function call */
4422 /* save return value */
4424 if (md->returntype.type != TYPE_VOID) {
4425 if (IS_INT_LNG_TYPE(md->returntype.type))
4426 M_LST(REG_RESULT, REG_SP, 0 * 8);
4428 M_DST(REG_FRESULT, REG_SP, 0 * 8);
4431 /* remove native stackframe info */
4433 M_ALEA(REG_SP, stackframesize * 8, rd->argintregs[0]);
4434 M_MOV_IMM((ptrint) codegen_finish_native_call, REG_ITMP1);
4437 /* generate call trace */
4440 /* just restore the value we need, don't care about the other */
4442 if (md->returntype.type != TYPE_VOID) {
4443 if (IS_INT_LNG_TYPE(md->returntype.type))
4444 M_LLD(REG_RESULT, REG_SP, 0 * 8);
4446 M_DLD(REG_FRESULT, REG_SP, 0 * 8);
4449 M_MOV_IMM((ptrint) m, rd->argintregs[0]);
4450 M_MOV(REG_RESULT, rd->argintregs[1]);
4451 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
4452 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
4454 M_MOV_IMM((ptrint) builtin_displaymethodstop, REG_ITMP1);
4458 /* check for exception */
4460 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4461 M_MOV_IMM((ptrint) builtin_get_exceptionptrptr, REG_ITMP3);
4464 M_MOV_IMM((ptrint) &_no_threads_exceptionptr, REG_RESULT);
4466 M_ALD(REG_ITMP2, REG_RESULT, 0);
4468 /* restore return value */
4470 if (md->returntype.type != TYPE_VOID) {
4471 if (IS_INT_LNG_TYPE(md->returntype.type))
4472 M_LLD(REG_RESULT, REG_SP, 0 * 8);
4474 M_DLD(REG_FRESULT, REG_SP, 0 * 8);
4477 /* test for exception */
4482 /* remove stackframe */
4484 M_AADD_IMM(stackframesize * 8, REG_SP);
4488 /* handle exception */
4490 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4491 M_LST(REG_ITMP2, REG_SP, 0 * 8);
4492 M_MOV_IMM((ptrint) builtin_get_exceptionptrptr, REG_ITMP3);
4494 M_AST_IMM32(0, REG_RESULT, 0); /* clear exception pointer */
4495 M_LLD(REG_ITMP1_XPTR, REG_SP, 0 * 8);
4497 M_MOV(REG_ITMP3, REG_ITMP1_XPTR);
4498 M_MOV_IMM((ptrint) &_no_threads_exceptionptr, REG_ITMP3);
4499 M_AST_IMM32(0, REG_ITMP3, 0); /* clear exception pointer */
4502 /* remove stackframe */
4504 M_AADD_IMM(stackframesize * 8, REG_SP);
4506 M_LLD(REG_ITMP2_XPC, REG_SP, 0 * 8); /* get return address from stack */
4507 M_ASUB_IMM(3, REG_ITMP2_XPC); /* callq */
4509 M_MOV_IMM((ptrint) asm_handle_nat_exception, REG_ITMP3);
4513 /* process patcher calls **************************************************/
4520 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4524 tmpcd = DNEW(codegendata);
4526 for (pref = cd->patchrefs; pref != NULL; pref = pref->next) {
4527 /* Get machine code which is patched back in later. A */
4528 /* `call rel32' is 5 bytes long (but read 8 bytes). */
4530 xcodeptr = cd->mcodebase + pref->branchpos;
4531 mcode = *((ptrint *) xcodeptr);
4533 /* patch in `call rel32' to call the following code */
4535 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
4536 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4538 /* move pointer to java_objectheader onto stack */
4540 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4541 /* create a virtual java_objectheader */
4543 (void) dseg_addaddress(cd, get_dummyLR()); /* monitorPtr */
4544 disp = dseg_addaddress(cd, NULL); /* vftbl */
4546 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, REG_ITMP3);
4552 /* move machine code bytes and classinfo pointer into registers */
4554 M_MOV_IMM((ptrint) mcode, REG_ITMP3);
4556 M_MOV_IMM((ptrint) pref->ref, REG_ITMP3);
4558 M_MOV_IMM((ptrint) pref->disp, REG_ITMP3);
4561 M_MOV_IMM((ptrint) pref->patcher, REG_ITMP3);
4564 M_MOV_IMM((ptrint) asm_wrapper_patcher, REG_ITMP3);
4569 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
4571 return m->entrypoint;
4576 * These are local overrides for various environment variables in Emacs.
4577 * Please do not remove this and leave it at the end of the file, where
4578 * Emacs will automagically detect them.
4579 * ---------------------------------------------------------------------
4582 * indent-tabs-mode: t