1 /* src/vm/jit/x86_64/codegen.c - machine code generator for x86_64
3 Copyright (C) 1996-2005, 2006 R. Grafl, A. Krall, C. Kruegel,
4 C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
5 E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
6 J. Wenninger, Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
25 Contact: cacao@cacaojvm.org
27 Authors: Andreas Krall
30 Changes: Christian Ullrich
33 $Id: codegen.c 4615 2006-03-15 16:36:43Z twisti $
47 #include "vm/jit/x86_64/arch.h"
48 #include "vm/jit/x86_64/codegen.h"
49 #include "vm/jit/x86_64/emitfuncs.h"
51 #include "native/native.h"
52 #include "vm/builtin.h"
53 #include "vm/exceptions.h"
54 #include "vm/global.h"
55 #include "vm/loader.h"
56 #include "vm/options.h"
57 #include "vm/statistics.h"
58 #include "vm/stringlocal.h"
60 #include "vm/jit/asmpart.h"
61 #include "vm/jit/codegen-common.h"
62 #include "vm/jit/dseg.h"
63 #include "vm/jit/jit.h"
64 #include "vm/jit/methodheader.h"
65 #include "vm/jit/parse.h"
66 #include "vm/jit/patcher.h"
67 #include "vm/jit/reg.h"
69 #if defined(ENABLE_LSRA)
70 # include "vm/jit/allocator/lsra.h"
76 /* codegen *********************************************************************
78 Generates machine code.
80 *******************************************************************************/
82 bool codegen(methodinfo *m, codegendata *cd, registerdata *rd)
84 s4 len, s1, s2, s3, d, disp;
93 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
94 builtintable_entry *bte;
97 /* prevent compiler warnings */
109 /* space to save used callee saved registers */
111 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
112 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse);
114 parentargs_base = rd->memuse + savedregs_num;
116 #if defined(USE_THREADS)
117 /* space to save argument of monitor_enter */
119 if (checksync && (m->flags & ACC_SYNCHRONIZED))
123 /* Keep stack of non-leaf functions 16-byte aligned for calls into native */
124 /* code e.g. libc or jni (alignment problems with movaps). */
126 if (!m->isleafmethod || opt_verbosecall)
127 parentargs_base |= 0x1;
129 /* create method header */
131 (void) dseg_addaddress(cd, m); /* MethodPointer */
132 (void) dseg_adds4(cd, parentargs_base * 8); /* FrameSize */
134 #if defined(USE_THREADS)
135 /* IsSync contains the offset relative to the stack pointer for the
136 argument of monitor_exit used in the exception handler. Since the
137 offset could be zero and give a wrong meaning of the flag it is
141 if (checksync && (m->flags & ACC_SYNCHRONIZED))
142 (void) dseg_adds4(cd, (rd->memuse + 1) * 8); /* IsSync */
145 (void) dseg_adds4(cd, 0); /* IsSync */
147 (void) dseg_adds4(cd, m->isleafmethod); /* IsLeaf */
148 (void) dseg_adds4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
149 (void) dseg_adds4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
151 (void) dseg_addlinenumbertablesize(cd);
153 (void) dseg_adds4(cd, cd->exceptiontablelength); /* ExTableSize */
155 /* create exception table */
157 for (ex = cd->exceptiontable; ex != NULL; ex = ex->down) {
158 dseg_addtarget(cd, ex->start);
159 dseg_addtarget(cd, ex->end);
160 dseg_addtarget(cd, ex->handler);
161 (void) dseg_addaddress(cd, ex->catchtype.cls);
164 /* initialize mcode variables */
166 cd->mcodeptr = (u1 *) cd->mcodebase;
167 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
169 /* initialize the last patcher pointer */
171 cd->lastmcodeptr = cd->mcodeptr;
173 /* generate method profiling code */
176 /* count frequency */
178 M_MOV_IMM(m, REG_ITMP3);
179 M_IINC_MEMBASE(REG_ITMP3, OFFSET(methodinfo, frequency));
184 /* create stack frame (if necessary) */
187 M_ASUB_IMM(parentargs_base * 8, REG_SP);
189 /* save used callee saved registers */
192 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
193 p--; M_LST(rd->savintregs[i], REG_SP, p * 8);
195 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
196 p--; M_DST(rd->savfltregs[i], REG_SP, p * 8);
199 /* take arguments out of register or stack frame */
203 for (p = 0, l = 0; p < md->paramcount; p++) {
204 t = md->paramtypes[p].type;
205 var = &(rd->locals[l][t]);
207 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
211 s1 = md->params[p].regoff;
212 if (IS_INT_LNG_TYPE(t)) { /* integer args */
213 s2 = rd->argintregs[s1];
214 if (!md->params[p].inmemory) { /* register arguments */
215 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
216 M_INTMOVE(s2, var->regoff);
218 } else { /* reg arg -> spilled */
219 M_LST(s2, REG_SP, var->regoff * 8);
222 } else { /* stack arguments */
223 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
224 /* + 8 for return address */
225 M_LLD(var->regoff, REG_SP, (parentargs_base + s1) * 8 + 8);
227 } else { /* stack arg -> spilled */
228 var->regoff = parentargs_base + s1 + 1;
232 } else { /* floating args */
233 if (!md->params[p].inmemory) { /* register arguments */
234 s2 = rd->argfltregs[s1];
235 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
236 M_FLTMOVE(s2, var->regoff);
238 } else { /* reg arg -> spilled */
239 M_DST(s2, REG_SP, var->regoff * 8);
242 } else { /* stack arguments */
243 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
244 M_DLD(var->regoff, REG_SP, (parentargs_base + s1) * 8 + 8);
247 var->regoff = parentargs_base + s1 + 1;
253 /* save monitorenter argument */
255 #if defined(USE_THREADS)
256 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
257 /* stack offset for monitor argument */
261 if (opt_verbosecall) {
262 M_LSUB_IMM((INT_ARG_CNT + FLT_ARG_CNT) * 8, REG_SP);
264 for (p = 0; p < INT_ARG_CNT; p++)
265 M_LST(rd->argintregs[p], REG_SP, p * 8);
267 for (p = 0; p < FLT_ARG_CNT; p++)
268 M_DST(rd->argfltregs[p], REG_SP, (INT_ARG_CNT + p) * 8);
270 s1 += INT_ARG_CNT + FLT_ARG_CNT;
273 /* decide which monitor enter function to call */
275 if (m->flags & ACC_STATIC) {
276 M_MOV_IMM(m->class, REG_ITMP1);
277 M_AST(REG_ITMP1, REG_SP, s1 * 8);
278 M_INTMOVE(REG_ITMP1, rd->argintregs[0]);
279 M_MOV_IMM(BUILTIN_staticmonitorenter, REG_ITMP1);
283 M_TEST(rd->argintregs[0]);
285 codegen_add_nullpointerexception_ref(cd, cd->mcodeptr);
286 M_AST(rd->argintregs[0], REG_SP, s1 * 8);
287 M_MOV_IMM(BUILTIN_monitorenter, REG_ITMP1);
291 if (opt_verbosecall) {
292 for (p = 0; p < INT_ARG_CNT; p++)
293 M_LLD(rd->argintregs[p], REG_SP, p * 8);
295 for (p = 0; p < FLT_ARG_CNT; p++)
296 M_DLD(rd->argfltregs[p], REG_SP, (INT_ARG_CNT + p) * 8);
298 M_LADD_IMM((INT_ARG_CNT + FLT_ARG_CNT) * 8, REG_SP);
303 /* Copy argument registers to stack and call trace function with
304 pointer to arguments on stack. */
306 if (opt_verbosecall) {
307 M_LSUB_IMM((INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + FLT_TMP_CNT + 1 + 1) * 8, REG_SP);
309 /* save integer argument registers */
311 for (p = 0; p < INT_ARG_CNT; p++)
312 M_LST(rd->argintregs[p], REG_SP, (1 + p) * 8);
314 /* save float argument registers */
316 for (p = 0; p < FLT_ARG_CNT; p++)
317 M_DST(rd->argfltregs[p], REG_SP, (1 + INT_ARG_CNT + p) * 8);
319 /* save temporary registers for leaf methods */
321 if (m->isleafmethod) {
322 for (p = 0; p < INT_TMP_CNT; p++)
323 M_LST(rd->tmpintregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + p) * 8);
325 for (p = 0; p < FLT_TMP_CNT; p++)
326 M_DST(rd->tmpfltregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + p) * 8);
329 /* show integer hex code for float arguments */
331 for (p = 0, l = 0; p < md->paramcount && p < INT_ARG_CNT; p++) {
332 /* If the paramtype is a float, we have to right shift all
333 following integer registers. */
335 if (IS_FLT_DBL_TYPE(md->paramtypes[p].type)) {
336 for (s1 = INT_ARG_CNT - 2; s1 >= p; s1--) {
337 M_MOV(rd->argintregs[s1], rd->argintregs[s1 + 1]);
340 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[p]);
345 M_MOV_IMM(m, REG_ITMP2);
346 M_AST(REG_ITMP2, REG_SP, 0 * 8);
347 M_MOV_IMM(builtin_trace_args, REG_ITMP1);
350 /* restore integer argument registers */
352 for (p = 0; p < INT_ARG_CNT; p++)
353 M_LLD(rd->argintregs[p], REG_SP, (1 + p) * 8);
355 /* restore float argument registers */
357 for (p = 0; p < FLT_ARG_CNT; p++)
358 M_DLD(rd->argfltregs[p], REG_SP, (1 + INT_ARG_CNT + p) * 8);
360 /* restore temporary registers for leaf methods */
362 if (m->isleafmethod) {
363 for (p = 0; p < INT_TMP_CNT; p++)
364 M_LLD(rd->tmpintregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + p) * 8);
366 for (p = 0; p < FLT_TMP_CNT; p++)
367 M_DLD(rd->tmpfltregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + p) * 8);
370 M_LADD_IMM((INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + FLT_TMP_CNT + 1 + 1) * 8, REG_SP);
375 /* end of header generation */
377 /* walk through all basic blocks */
379 for (bptr = m->basicblocks; bptr != NULL; bptr = bptr->next) {
381 bptr->mpc = (u4) ((u1 *) cd->mcodeptr - cd->mcodebase);
383 if (bptr->flags >= BBREACHED) {
385 /* branch resolving */
388 for (bref = bptr->branchrefs; bref != NULL; bref = bref->next) {
389 gen_resolvebranch((u1 *) cd->mcodebase + bref->branchpos,
394 /* copy interface registers to their destination */
400 /* generate basicblock profiling code */
403 /* count frequency */
405 M_MOV_IMM(m->bbfrequency, REG_ITMP2);
406 M_IINC_MEMBASE(REG_ITMP2, bptr->debug_nr * 4);
408 /* if this is an exception handler, start profiling again */
410 if (bptr->type == BBTYPE_EXH)
414 #if defined(ENABLE_LSRA)
416 while (src != NULL) {
418 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
419 if (bptr->type == BBTYPE_SBR) {
420 /* d = reg_of_var(rd, src, REG_ITMP1); */
421 if (!(src->flags & INMEMORY))
425 x86_64_pop_reg(cd, d);
426 store_reg_to_var_int(src, d);
428 } else if (bptr->type == BBTYPE_EXH) {
429 /* d = reg_of_var(rd, src, REG_ITMP1); */
430 if (!(src->flags & INMEMORY))
434 M_INTMOVE(REG_ITMP1, d);
435 store_reg_to_var_int(src, d);
444 while (src != NULL) {
446 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
447 if (bptr->type == BBTYPE_SBR) {
448 d = reg_of_var(rd, src, REG_ITMP1);
450 store_reg_to_var_int(src, d);
452 } else if (bptr->type == BBTYPE_EXH) {
453 d = reg_of_var(rd, src, REG_ITMP1);
454 M_INTMOVE(REG_ITMP1, d);
455 store_reg_to_var_int(src, d);
459 d = reg_of_var(rd, src, REG_ITMP1);
460 if ((src->varkind != STACKVAR)) {
462 if (IS_FLT_DBL_TYPE(s2)) {
463 s1 = rd->interfaces[len][s2].regoff;
465 if (!(rd->interfaces[len][s2].flags & INMEMORY))
468 M_DLD(d, REG_SP, s1 * 8);
470 store_reg_to_var_flt(src, d);
473 s1 = rd->interfaces[len][s2].regoff;
475 if (!(rd->interfaces[len][s2].flags & INMEMORY))
478 M_LLD(d, REG_SP, s1 * 8);
480 store_reg_to_var_int(src, d);
486 #if defined(ENABLE_LSRA)
489 /* walk through all instructions */
495 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
496 if (iptr->line != currentline) {
497 dseg_addlinenumber(cd, iptr->line, cd->mcodeptr);
498 currentline = iptr->line;
501 MCODECHECK(1024); /* 1KB should be enough */
504 case ICMD_INLINE_START: /* internal ICMDs */
505 case ICMD_INLINE_END:
508 case ICMD_NOP: /* ... ==> ... */
511 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
513 if (src->flags & INMEMORY)
514 M_CMP_IMM_MEMBASE(0, REG_SP, src->regoff * 8);
518 codegen_add_nullpointerexception_ref(cd, cd->mcodeptr);
521 /* constant operations ************************************************/
523 case ICMD_ICONST: /* ... ==> ..., constant */
524 /* op1 = 0, val.i = constant */
526 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
527 if (iptr->val.i == 0)
530 M_IMOV_IMM(iptr->val.i, d);
531 store_reg_to_var_int(iptr->dst, d);
534 case ICMD_LCONST: /* ... ==> ..., constant */
535 /* op1 = 0, val.l = constant */
537 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
538 if (iptr->val.l == 0)
541 M_MOV_IMM(iptr->val.l, d);
542 store_reg_to_var_int(iptr->dst, d);
545 case ICMD_FCONST: /* ... ==> ..., constant */
546 /* op1 = 0, val.f = constant */
548 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
549 disp = dseg_addfloat(cd, iptr->val.f);
550 x86_64_movdl_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + ((d > 7) ? 9 : 8)) - (s8) cd->mcodebase) + disp, d);
551 store_reg_to_var_flt(iptr->dst, d);
554 case ICMD_DCONST: /* ... ==> ..., constant */
555 /* op1 = 0, val.d = constant */
557 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
558 disp = dseg_adddouble(cd, iptr->val.d);
559 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + disp, d);
560 store_reg_to_var_flt(iptr->dst, d);
563 case ICMD_ACONST: /* ... ==> ..., constant */
564 /* op1 = 0, val.a = constant */
566 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
568 if ((iptr->target != NULL) && (iptr->val.a == NULL)) {
569 /* PROFILE_CYCLE_STOP; */
571 codegen_addpatchref(cd, cd->mcodeptr,
573 (unresolved_class *) iptr->target, 0);
575 if (opt_showdisassemble) {
576 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
579 /* PROFILE_CYCLE_START; */
581 M_MOV_IMM(iptr->val.a, d);
584 if (iptr->val.a == 0)
587 M_MOV_IMM(iptr->val.a, d);
589 store_reg_to_var_int(iptr->dst, d);
593 /* load/store operations **********************************************/
595 case ICMD_ILOAD: /* ... ==> ..., content of local variable */
596 /* op1 = local variable */
598 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
599 if ((iptr->dst->varkind == LOCALVAR) &&
600 (iptr->dst->varnum == iptr->op1)) {
603 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
604 if (var->flags & INMEMORY) {
605 x86_64_movl_membase_reg(cd, REG_SP, var->regoff * 8, d);
606 store_reg_to_var_int(iptr->dst, d);
609 if (iptr->dst->flags & INMEMORY) {
610 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
613 M_INTMOVE(var->regoff, d);
618 case ICMD_LLOAD: /* ... ==> ..., content of local variable */
619 case ICMD_ALOAD: /* op1 = local variable */
621 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
622 if ((iptr->dst->varkind == LOCALVAR) &&
623 (iptr->dst->varnum == iptr->op1)) {
626 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
627 if (var->flags & INMEMORY) {
628 x86_64_mov_membase_reg(cd, REG_SP, var->regoff * 8, d);
629 store_reg_to_var_int(iptr->dst, d);
632 if (iptr->dst->flags & INMEMORY) {
633 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
636 M_INTMOVE(var->regoff, d);
641 case ICMD_FLOAD: /* ... ==> ..., content of local variable */
642 case ICMD_DLOAD: /* op1 = local variable */
644 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
645 if ((iptr->dst->varkind == LOCALVAR) &&
646 (iptr->dst->varnum == iptr->op1)) {
649 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
650 if (var->flags & INMEMORY) {
651 x86_64_movq_membase_reg(cd, REG_SP, var->regoff * 8, d);
652 store_reg_to_var_flt(iptr->dst, d);
655 if (iptr->dst->flags & INMEMORY) {
656 x86_64_movq_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
659 M_FLTMOVE(var->regoff, d);
664 case ICMD_ISTORE: /* ..., value ==> ... */
665 case ICMD_LSTORE: /* op1 = local variable */
668 if ((src->varkind == LOCALVAR) &&
669 (src->varnum == iptr->op1)) {
672 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
673 if (var->flags & INMEMORY) {
674 var_to_reg_int(s1, src, REG_ITMP1);
675 x86_64_mov_reg_membase(cd, s1, REG_SP, var->regoff * 8);
678 var_to_reg_int(s1, src, var->regoff);
679 M_INTMOVE(s1, var->regoff);
683 case ICMD_FSTORE: /* ..., value ==> ... */
684 case ICMD_DSTORE: /* op1 = local variable */
686 if ((src->varkind == LOCALVAR) &&
687 (src->varnum == iptr->op1)) {
690 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
691 if (var->flags & INMEMORY) {
692 var_to_reg_flt(s1, src, REG_FTMP1);
693 x86_64_movq_reg_membase(cd, s1, REG_SP, var->regoff * 8);
696 var_to_reg_flt(s1, src, var->regoff);
697 M_FLTMOVE(s1, var->regoff);
702 /* pop/dup/swap operations ********************************************/
704 /* attention: double and longs are only one entry in CACAO ICMDs */
706 case ICMD_POP: /* ..., value ==> ... */
707 case ICMD_POP2: /* ..., value, value ==> ... */
710 case ICMD_DUP: /* ..., a ==> ..., a, a */
711 M_COPY(src, iptr->dst);
714 case ICMD_DUP_X1: /* ..., a, b ==> ..., b, a, b */
716 M_COPY(src, iptr->dst);
717 M_COPY(src->prev, iptr->dst->prev);
718 M_COPY(iptr->dst, iptr->dst->prev->prev);
721 case ICMD_DUP_X2: /* ..., a, b, c ==> ..., c, a, b, c */
723 M_COPY(src, iptr->dst);
724 M_COPY(src->prev, iptr->dst->prev);
725 M_COPY(src->prev->prev, iptr->dst->prev->prev);
726 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
729 case ICMD_DUP2: /* ..., a, b ==> ..., a, b, a, b */
731 M_COPY(src, iptr->dst);
732 M_COPY(src->prev, iptr->dst->prev);
735 case ICMD_DUP2_X1: /* ..., a, b, c ==> ..., b, c, a, b, c */
737 M_COPY(src, iptr->dst);
738 M_COPY(src->prev, iptr->dst->prev);
739 M_COPY(src->prev->prev, iptr->dst->prev->prev);
740 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
741 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev);
744 case ICMD_DUP2_X2: /* ..., a, b, c, d ==> ..., c, d, a, b, c, d */
746 M_COPY(src, iptr->dst);
747 M_COPY(src->prev, iptr->dst->prev);
748 M_COPY(src->prev->prev, iptr->dst->prev->prev);
749 M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
750 M_COPY(iptr->dst, iptr->dst->prev->prev->prev->prev);
751 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev->prev);
754 case ICMD_SWAP: /* ..., a, b ==> ..., b, a */
756 M_COPY(src, iptr->dst->prev);
757 M_COPY(src->prev, iptr->dst);
761 /* integer operations *************************************************/
763 case ICMD_INEG: /* ..., value ==> ..., - value */
765 d = reg_of_var(rd, iptr->dst, REG_NULL);
766 if (iptr->dst->flags & INMEMORY) {
767 if (src->flags & INMEMORY) {
768 if (src->regoff == iptr->dst->regoff) {
769 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
772 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
773 x86_64_negl_reg(cd, REG_ITMP1);
774 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
778 x86_64_movl_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
779 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
783 if (src->flags & INMEMORY) {
784 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
785 x86_64_negl_reg(cd, d);
788 M_INTMOVE(src->regoff, iptr->dst->regoff);
789 x86_64_negl_reg(cd, iptr->dst->regoff);
794 case ICMD_LNEG: /* ..., value ==> ..., - value */
796 d = reg_of_var(rd, iptr->dst, REG_NULL);
797 if (iptr->dst->flags & INMEMORY) {
798 if (src->flags & INMEMORY) {
799 if (src->regoff == iptr->dst->regoff) {
800 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
803 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
804 x86_64_neg_reg(cd, REG_ITMP1);
805 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
809 x86_64_mov_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
810 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
814 if (src->flags & INMEMORY) {
815 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
816 x86_64_neg_reg(cd, iptr->dst->regoff);
819 M_INTMOVE(src->regoff, iptr->dst->regoff);
820 x86_64_neg_reg(cd, iptr->dst->regoff);
825 case ICMD_I2L: /* ..., value ==> ..., value */
827 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
828 if (src->flags & INMEMORY) {
829 x86_64_movslq_membase_reg(cd, REG_SP, src->regoff * 8, d);
832 x86_64_movslq_reg_reg(cd, src->regoff, d);
834 store_reg_to_var_int(iptr->dst, d);
837 case ICMD_L2I: /* ..., value ==> ..., value */
839 var_to_reg_int(s1, src, REG_ITMP1);
840 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
842 store_reg_to_var_int(iptr->dst, d);
845 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
847 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
848 if (src->flags & INMEMORY) {
849 x86_64_movsbq_membase_reg(cd, REG_SP, src->regoff * 8, d);
852 x86_64_movsbq_reg_reg(cd, src->regoff, d);
854 store_reg_to_var_int(iptr->dst, d);
857 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
859 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
860 if (src->flags & INMEMORY) {
861 x86_64_movzwq_membase_reg(cd, REG_SP, src->regoff * 8, d);
864 x86_64_movzwq_reg_reg(cd, src->regoff, d);
866 store_reg_to_var_int(iptr->dst, d);
869 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
871 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
872 if (src->flags & INMEMORY) {
873 x86_64_movswq_membase_reg(cd, REG_SP, src->regoff * 8, d);
876 x86_64_movswq_reg_reg(cd, src->regoff, d);
878 store_reg_to_var_int(iptr->dst, d);
882 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
884 d = reg_of_var(rd, iptr->dst, REG_NULL);
885 x86_64_emit_ialu(cd, X86_64_ADD, src, iptr);
888 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
889 /* val.i = constant */
891 d = reg_of_var(rd, iptr->dst, REG_NULL);
892 x86_64_emit_ialuconst(cd, X86_64_ADD, src, iptr);
895 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
897 d = reg_of_var(rd, iptr->dst, REG_NULL);
898 x86_64_emit_lalu(cd, X86_64_ADD, src, iptr);
901 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
902 /* val.l = constant */
904 d = reg_of_var(rd, iptr->dst, REG_NULL);
905 x86_64_emit_laluconst(cd, X86_64_ADD, src, iptr);
908 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
910 d = reg_of_var(rd, iptr->dst, REG_NULL);
911 if (iptr->dst->flags & INMEMORY) {
912 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
913 if (src->prev->regoff == iptr->dst->regoff) {
914 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
915 x86_64_alul_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
918 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
919 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
920 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
923 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
924 M_INTMOVE(src->prev->regoff, REG_ITMP1);
925 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
926 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
928 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
929 if (src->prev->regoff == iptr->dst->regoff) {
930 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
933 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
934 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
935 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
939 x86_64_movl_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
940 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
944 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
945 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
946 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
948 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
949 M_INTMOVE(src->prev->regoff, d);
950 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
952 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
953 /* workaround for reg alloc */
954 if (src->regoff == iptr->dst->regoff) {
955 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
956 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
957 M_INTMOVE(REG_ITMP1, d);
960 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
961 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
965 /* workaround for reg alloc */
966 if (src->regoff == iptr->dst->regoff) {
967 M_INTMOVE(src->prev->regoff, REG_ITMP1);
968 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
969 M_INTMOVE(REG_ITMP1, d);
972 M_INTMOVE(src->prev->regoff, d);
973 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
979 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
980 /* val.i = constant */
982 d = reg_of_var(rd, iptr->dst, REG_NULL);
983 x86_64_emit_ialuconst(cd, X86_64_SUB, src, iptr);
986 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
988 d = reg_of_var(rd, iptr->dst, REG_NULL);
989 if (iptr->dst->flags & INMEMORY) {
990 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
991 if (src->prev->regoff == iptr->dst->regoff) {
992 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
993 x86_64_alu_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
996 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
997 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
998 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1001 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1002 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1003 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1004 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1006 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1007 if (src->prev->regoff == iptr->dst->regoff) {
1008 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1011 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1012 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1013 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1017 x86_64_mov_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1018 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1022 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1023 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1024 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1026 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1027 M_INTMOVE(src->prev->regoff, d);
1028 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1030 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1031 /* workaround for reg alloc */
1032 if (src->regoff == iptr->dst->regoff) {
1033 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1034 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1035 M_INTMOVE(REG_ITMP1, d);
1038 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1039 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1043 /* workaround for reg alloc */
1044 if (src->regoff == iptr->dst->regoff) {
1045 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1046 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1047 M_INTMOVE(REG_ITMP1, d);
1050 M_INTMOVE(src->prev->regoff, d);
1051 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1057 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
1058 /* val.l = constant */
1060 d = reg_of_var(rd, iptr->dst, REG_NULL);
1061 x86_64_emit_laluconst(cd, X86_64_SUB, src, iptr);
1064 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1066 d = reg_of_var(rd, iptr->dst, REG_NULL);
1067 if (iptr->dst->flags & INMEMORY) {
1068 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1069 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1070 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1071 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1073 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1074 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1075 x86_64_imull_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1076 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1078 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1079 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1080 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1081 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1084 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1085 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1086 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1090 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1091 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1092 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1094 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1095 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1096 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1098 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1099 M_INTMOVE(src->regoff, iptr->dst->regoff);
1100 x86_64_imull_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1103 if (src->regoff == iptr->dst->regoff) {
1104 x86_64_imull_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1107 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1108 x86_64_imull_reg_reg(cd, src->regoff, iptr->dst->regoff);
1114 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
1115 /* val.i = constant */
1117 d = reg_of_var(rd, iptr->dst, REG_NULL);
1118 if (iptr->dst->flags & INMEMORY) {
1119 if (src->flags & INMEMORY) {
1120 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1121 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1124 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, REG_ITMP1);
1125 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1129 if (src->flags & INMEMORY) {
1130 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1133 if (iptr->val.i == 2) {
1134 M_INTMOVE(src->regoff, iptr->dst->regoff);
1135 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1138 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, iptr->dst->regoff); /* 3 cycles */
1144 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1146 d = reg_of_var(rd, iptr->dst, REG_NULL);
1147 if (iptr->dst->flags & INMEMORY) {
1148 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1149 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1150 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1151 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1153 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1154 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1155 x86_64_imul_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1156 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1158 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1159 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1160 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1161 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1164 x86_64_mov_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1165 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1166 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1170 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1171 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1172 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1174 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1175 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1176 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1178 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1179 M_INTMOVE(src->regoff, iptr->dst->regoff);
1180 x86_64_imul_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1183 if (src->regoff == iptr->dst->regoff) {
1184 x86_64_imul_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1187 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1188 x86_64_imul_reg_reg(cd, src->regoff, iptr->dst->regoff);
1194 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
1195 /* val.l = constant */
1197 d = reg_of_var(rd, iptr->dst, REG_NULL);
1198 if (iptr->dst->flags & INMEMORY) {
1199 if (src->flags & INMEMORY) {
1200 if (IS_IMM32(iptr->val.l)) {
1201 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1204 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1205 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1207 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1210 if (IS_IMM32(iptr->val.l)) {
1211 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, REG_ITMP1);
1214 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1215 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1217 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1221 if (src->flags & INMEMORY) {
1222 if (IS_IMM32(iptr->val.l)) {
1223 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1226 x86_64_mov_imm_reg(cd, iptr->val.l, iptr->dst->regoff);
1227 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1231 /* should match in many cases */
1232 if (iptr->val.l == 2) {
1233 M_INTMOVE(src->regoff, iptr->dst->regoff);
1234 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1237 if (IS_IMM32(iptr->val.l)) {
1238 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, iptr->dst->regoff); /* 4 cycles */
1241 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1242 M_INTMOVE(src->regoff, iptr->dst->regoff);
1243 x86_64_imul_reg_reg(cd, REG_ITMP1, iptr->dst->regoff);
1250 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1252 d = reg_of_var(rd, iptr->dst, REG_NULL);
1253 if (src->prev->flags & INMEMORY) {
1254 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1257 M_INTMOVE(src->prev->regoff, RAX);
1260 if (src->flags & INMEMORY) {
1261 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1264 M_INTMOVE(src->regoff, REG_ITMP3);
1268 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1269 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1270 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1271 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1273 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1275 x86_64_idivl_reg(cd, REG_ITMP3);
1277 if (iptr->dst->flags & INMEMORY) {
1278 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1279 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1282 M_INTMOVE(RAX, iptr->dst->regoff);
1284 if (iptr->dst->regoff != RDX) {
1285 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1290 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1291 d = reg_of_var(rd, iptr->dst, REG_NULL);
1292 if (src->prev->flags & INMEMORY) {
1293 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1296 M_INTMOVE(src->prev->regoff, RAX);
1299 if (src->flags & INMEMORY) {
1300 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1303 M_INTMOVE(src->regoff, REG_ITMP3);
1307 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1309 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1310 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1313 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1314 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1315 x86_64_jcc(cd, X86_64_CC_E, 1 + 3); /* 6 bytes */
1318 x86_64_idivl_reg(cd, REG_ITMP3);
1320 if (iptr->dst->flags & INMEMORY) {
1321 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1322 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1325 M_INTMOVE(RDX, iptr->dst->regoff);
1327 if (iptr->dst->regoff != RDX) {
1328 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1333 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
1334 /* val.i = constant */
1336 var_to_reg_int(s1, src, REG_ITMP1);
1337 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1338 M_INTMOVE(s1, REG_ITMP1);
1339 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1340 x86_64_leal_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1341 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1342 x86_64_shiftl_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1343 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1344 store_reg_to_var_int(iptr->dst, d);
1347 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
1348 /* val.i = constant */
1350 var_to_reg_int(s1, src, REG_ITMP1);
1351 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1352 M_INTMOVE(s1, REG_ITMP1);
1353 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1354 x86_64_leal_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1355 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1356 x86_64_alul_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1357 x86_64_alul_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1358 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1359 store_reg_to_var_int(iptr->dst, d);
1363 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1365 d = reg_of_var(rd, iptr->dst, REG_NULL);
1367 if (src->prev->flags & INMEMORY) {
1368 M_LLD(RAX, REG_SP, src->prev->regoff * 8);
1371 M_INTMOVE(src->prev->regoff, RAX);
1374 if (src->flags & INMEMORY) {
1375 M_LLD(REG_ITMP3, REG_SP, src->regoff * 8);
1378 M_INTMOVE(src->regoff, REG_ITMP3);
1382 /* check as described in jvm spec */
1383 disp = dseg_adds8(cd, 0x8000000000000000LL);
1384 M_CMP_MEMBASE(RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, RAX);
1386 M_CMP_IMM(-1, REG_ITMP3); /* 4 bytes */
1387 M_BEQ(3 + 2 + 3); /* 6 bytes */
1389 M_MOV(RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1391 x86_64_idiv_reg(cd, REG_ITMP3);
1393 if (iptr->dst->flags & INMEMORY) {
1394 M_LST(RAX, REG_SP, iptr->dst->regoff * 8);
1395 M_MOV(REG_ITMP2, RDX); /* restore %rdx */
1398 M_INTMOVE(RAX, iptr->dst->regoff);
1400 if (iptr->dst->regoff != RDX) {
1401 M_MOV(REG_ITMP2, RDX); /* restore %rdx */
1406 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1408 d = reg_of_var(rd, iptr->dst, REG_NULL);
1409 if (src->prev->flags & INMEMORY) {
1410 M_LLD(REG_ITMP1, REG_SP, src->prev->regoff * 8);
1413 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1416 if (src->flags & INMEMORY) {
1417 M_LLD(REG_ITMP3, REG_SP, src->regoff * 8);
1420 M_INTMOVE(src->regoff, REG_ITMP3);
1424 M_MOV(RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1426 /* check as described in jvm spec */
1427 disp = dseg_adds8(cd, 0x8000000000000000LL);
1428 M_CMP_MEMBASE(RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, REG_ITMP1);
1432 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1434 M_XOR(RDX, RDX); /* 3 bytes */
1435 M_CMP_IMM(-1, REG_ITMP3); /* 4 bytes */
1436 M_BEQ(2 + 3); /* 6 bytes */
1439 x86_64_idiv_reg(cd, REG_ITMP3);
1441 if (iptr->dst->flags & INMEMORY) {
1442 M_LST(RDX, REG_SP, iptr->dst->regoff * 8);
1443 M_MOV(REG_ITMP2, RDX); /* restore %rdx */
1446 M_INTMOVE(RDX, iptr->dst->regoff);
1448 if (iptr->dst->regoff != RDX) {
1449 M_MOV(REG_ITMP2, RDX); /* restore %rdx */
1454 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1455 /* val.i = constant */
1457 var_to_reg_int(s1, src, REG_ITMP1);
1458 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1459 M_INTMOVE(s1, REG_ITMP1);
1460 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1461 x86_64_lea_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1462 x86_64_cmovcc_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1463 x86_64_shift_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1464 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1465 store_reg_to_var_int(iptr->dst, d);
1468 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1469 /* val.l = constant */
1471 var_to_reg_int(s1, src, REG_ITMP1);
1472 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1473 M_INTMOVE(s1, REG_ITMP1);
1474 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1475 x86_64_lea_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1476 x86_64_cmovcc_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1477 x86_64_alu_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1478 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1479 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1480 store_reg_to_var_int(iptr->dst, d);
1483 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1485 d = reg_of_var(rd, iptr->dst, REG_NULL);
1486 x86_64_emit_ishift(cd, X86_64_SHL, src, iptr);
1489 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1490 /* val.i = constant */
1492 d = reg_of_var(rd, iptr->dst, REG_NULL);
1493 x86_64_emit_ishiftconst(cd, X86_64_SHL, src, iptr);
1496 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1498 d = reg_of_var(rd, iptr->dst, REG_NULL);
1499 x86_64_emit_ishift(cd, X86_64_SAR, src, iptr);
1502 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1503 /* val.i = constant */
1505 d = reg_of_var(rd, iptr->dst, REG_NULL);
1506 x86_64_emit_ishiftconst(cd, X86_64_SAR, src, iptr);
1509 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1511 d = reg_of_var(rd, iptr->dst, REG_NULL);
1512 x86_64_emit_ishift(cd, X86_64_SHR, src, iptr);
1515 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1516 /* val.i = constant */
1518 d = reg_of_var(rd, iptr->dst, REG_NULL);
1519 x86_64_emit_ishiftconst(cd, X86_64_SHR, src, iptr);
1522 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1524 d = reg_of_var(rd, iptr->dst, REG_NULL);
1525 x86_64_emit_lshift(cd, X86_64_SHL, src, iptr);
1528 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1529 /* val.i = constant */
1531 d = reg_of_var(rd, iptr->dst, REG_NULL);
1532 x86_64_emit_lshiftconst(cd, X86_64_SHL, src, iptr);
1535 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1537 d = reg_of_var(rd, iptr->dst, REG_NULL);
1538 x86_64_emit_lshift(cd, X86_64_SAR, src, iptr);
1541 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1542 /* val.i = constant */
1544 d = reg_of_var(rd, iptr->dst, REG_NULL);
1545 x86_64_emit_lshiftconst(cd, X86_64_SAR, src, iptr);
1548 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1550 d = reg_of_var(rd, iptr->dst, REG_NULL);
1551 x86_64_emit_lshift(cd, X86_64_SHR, src, iptr);
1554 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1555 /* val.l = constant */
1557 d = reg_of_var(rd, iptr->dst, REG_NULL);
1558 x86_64_emit_lshiftconst(cd, X86_64_SHR, src, iptr);
1561 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1563 d = reg_of_var(rd, iptr->dst, REG_NULL);
1564 x86_64_emit_ialu(cd, X86_64_AND, src, iptr);
1567 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1568 /* val.i = constant */
1570 d = reg_of_var(rd, iptr->dst, REG_NULL);
1571 x86_64_emit_ialuconst(cd, X86_64_AND, src, iptr);
1574 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1576 d = reg_of_var(rd, iptr->dst, REG_NULL);
1577 x86_64_emit_lalu(cd, X86_64_AND, src, iptr);
1580 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1581 /* val.l = constant */
1583 d = reg_of_var(rd, iptr->dst, REG_NULL);
1584 x86_64_emit_laluconst(cd, X86_64_AND, src, iptr);
1587 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1589 d = reg_of_var(rd, iptr->dst, REG_NULL);
1590 x86_64_emit_ialu(cd, X86_64_OR, src, iptr);
1593 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1594 /* val.i = constant */
1596 d = reg_of_var(rd, iptr->dst, REG_NULL);
1597 x86_64_emit_ialuconst(cd, X86_64_OR, src, iptr);
1600 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1602 d = reg_of_var(rd, iptr->dst, REG_NULL);
1603 x86_64_emit_lalu(cd, X86_64_OR, src, iptr);
1606 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1607 /* val.l = constant */
1609 d = reg_of_var(rd, iptr->dst, REG_NULL);
1610 x86_64_emit_laluconst(cd, X86_64_OR, src, iptr);
1613 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1615 d = reg_of_var(rd, iptr->dst, REG_NULL);
1616 x86_64_emit_ialu(cd, X86_64_XOR, src, iptr);
1619 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1620 /* val.i = constant */
1622 d = reg_of_var(rd, iptr->dst, REG_NULL);
1623 x86_64_emit_ialuconst(cd, X86_64_XOR, src, iptr);
1626 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1628 d = reg_of_var(rd, iptr->dst, REG_NULL);
1629 x86_64_emit_lalu(cd, X86_64_XOR, src, iptr);
1632 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1633 /* val.l = constant */
1635 d = reg_of_var(rd, iptr->dst, REG_NULL);
1636 x86_64_emit_laluconst(cd, X86_64_XOR, src, iptr);
1640 case ICMD_IINC: /* ..., value ==> ..., value + constant */
1641 /* op1 = variable, val.i = constant */
1643 /* using inc and dec is definitely faster than add -- tested */
1646 var = &(rd->locals[iptr->op1][TYPE_INT]);
1648 if (var->flags & INMEMORY) {
1649 if (iptr->val.i == 1) {
1650 x86_64_incl_membase(cd, REG_SP, d * 8);
1652 } else if (iptr->val.i == -1) {
1653 x86_64_decl_membase(cd, REG_SP, d * 8);
1656 x86_64_alul_imm_membase(cd, X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1660 if (iptr->val.i == 1) {
1661 x86_64_incl_reg(cd, d);
1663 } else if (iptr->val.i == -1) {
1664 x86_64_decl_reg(cd, d);
1667 x86_64_alul_imm_reg(cd, X86_64_ADD, iptr->val.i, d);
1673 /* floating operations ************************************************/
1675 case ICMD_FNEG: /* ..., value ==> ..., - value */
1677 var_to_reg_flt(s1, src, REG_FTMP1);
1678 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1679 disp = dseg_adds4(cd, 0x80000000);
1681 x86_64_movss_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + disp, REG_FTMP2);
1682 x86_64_xorps_reg_reg(cd, REG_FTMP2, d);
1683 store_reg_to_var_flt(iptr->dst, d);
1686 case ICMD_DNEG: /* ..., value ==> ..., - value */
1688 var_to_reg_flt(s1, src, REG_FTMP1);
1689 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1690 disp = dseg_adds8(cd, 0x8000000000000000);
1692 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + disp, REG_FTMP2);
1693 x86_64_xorpd_reg_reg(cd, REG_FTMP2, d);
1694 store_reg_to_var_flt(iptr->dst, d);
1697 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1699 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1700 var_to_reg_flt(s2, src, REG_FTMP2);
1701 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1703 x86_64_addss_reg_reg(cd, s2, d);
1704 } else if (s2 == d) {
1705 x86_64_addss_reg_reg(cd, s1, d);
1708 x86_64_addss_reg_reg(cd, s2, d);
1710 store_reg_to_var_flt(iptr->dst, d);
1713 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1715 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1716 var_to_reg_flt(s2, src, REG_FTMP2);
1717 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1719 x86_64_addsd_reg_reg(cd, s2, d);
1720 } else if (s2 == d) {
1721 x86_64_addsd_reg_reg(cd, s1, d);
1724 x86_64_addsd_reg_reg(cd, s2, d);
1726 store_reg_to_var_flt(iptr->dst, d);
1729 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1731 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1732 var_to_reg_flt(s2, src, REG_FTMP2);
1733 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1735 M_FLTMOVE(s2, REG_FTMP2);
1739 x86_64_subss_reg_reg(cd, s2, d);
1740 store_reg_to_var_flt(iptr->dst, d);
1743 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1745 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1746 var_to_reg_flt(s2, src, REG_FTMP2);
1747 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1749 M_FLTMOVE(s2, REG_FTMP2);
1753 x86_64_subsd_reg_reg(cd, s2, d);
1754 store_reg_to_var_flt(iptr->dst, d);
1757 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1759 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1760 var_to_reg_flt(s2, src, REG_FTMP2);
1761 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1763 x86_64_mulss_reg_reg(cd, s2, d);
1764 } else if (s2 == d) {
1765 x86_64_mulss_reg_reg(cd, s1, d);
1768 x86_64_mulss_reg_reg(cd, s2, d);
1770 store_reg_to_var_flt(iptr->dst, d);
1773 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1775 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1776 var_to_reg_flt(s2, src, REG_FTMP2);
1777 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1779 x86_64_mulsd_reg_reg(cd, s2, d);
1780 } else if (s2 == d) {
1781 x86_64_mulsd_reg_reg(cd, s1, d);
1784 x86_64_mulsd_reg_reg(cd, s2, d);
1786 store_reg_to_var_flt(iptr->dst, d);
1789 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1791 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1792 var_to_reg_flt(s2, src, REG_FTMP2);
1793 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1795 M_FLTMOVE(s2, REG_FTMP2);
1799 x86_64_divss_reg_reg(cd, s2, d);
1800 store_reg_to_var_flt(iptr->dst, d);
1803 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1805 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1806 var_to_reg_flt(s2, src, REG_FTMP2);
1807 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1809 M_FLTMOVE(s2, REG_FTMP2);
1813 x86_64_divsd_reg_reg(cd, s2, d);
1814 store_reg_to_var_flt(iptr->dst, d);
1817 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1819 var_to_reg_int(s1, src, REG_ITMP1);
1820 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1821 x86_64_cvtsi2ss_reg_reg(cd, s1, d);
1822 store_reg_to_var_flt(iptr->dst, d);
1825 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1827 var_to_reg_int(s1, src, REG_ITMP1);
1828 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1829 x86_64_cvtsi2sd_reg_reg(cd, s1, d);
1830 store_reg_to_var_flt(iptr->dst, d);
1833 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1835 var_to_reg_int(s1, src, REG_ITMP1);
1836 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1837 x86_64_cvtsi2ssq_reg_reg(cd, s1, d);
1838 store_reg_to_var_flt(iptr->dst, d);
1841 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1843 var_to_reg_int(s1, src, REG_ITMP1);
1844 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1845 x86_64_cvtsi2sdq_reg_reg(cd, s1, d);
1846 store_reg_to_var_flt(iptr->dst, d);
1849 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1851 var_to_reg_flt(s1, src, REG_FTMP1);
1852 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1853 x86_64_cvttss2si_reg_reg(cd, s1, d);
1854 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1855 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1856 x86_64_jcc(cd, X86_64_CC_NE, a);
1857 M_FLTMOVE(s1, REG_FTMP1);
1858 x86_64_mov_imm_reg(cd, (ptrint) asm_builtin_f2i, REG_ITMP2);
1859 x86_64_call_reg(cd, REG_ITMP2);
1860 M_INTMOVE(REG_RESULT, d);
1861 store_reg_to_var_int(iptr->dst, d);
1864 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1866 var_to_reg_flt(s1, src, REG_FTMP1);
1867 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1868 x86_64_cvttsd2si_reg_reg(cd, s1, d);
1869 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1870 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1871 x86_64_jcc(cd, X86_64_CC_NE, a);
1872 M_FLTMOVE(s1, REG_FTMP1);
1873 x86_64_mov_imm_reg(cd, (ptrint) asm_builtin_d2i, REG_ITMP2);
1874 x86_64_call_reg(cd, REG_ITMP2);
1875 M_INTMOVE(REG_RESULT, d);
1876 store_reg_to_var_int(iptr->dst, d);
1879 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1881 var_to_reg_flt(s1, src, REG_FTMP1);
1882 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1883 x86_64_cvttss2siq_reg_reg(cd, s1, d);
1884 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1885 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1886 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1887 x86_64_jcc(cd, X86_64_CC_NE, a);
1888 M_FLTMOVE(s1, REG_FTMP1);
1889 x86_64_mov_imm_reg(cd, (ptrint) asm_builtin_f2l, REG_ITMP2);
1890 x86_64_call_reg(cd, REG_ITMP2);
1891 M_INTMOVE(REG_RESULT, d);
1892 store_reg_to_var_int(iptr->dst, d);
1895 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1897 var_to_reg_flt(s1, src, REG_FTMP1);
1898 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1899 x86_64_cvttsd2siq_reg_reg(cd, s1, d);
1900 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1901 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1902 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1903 x86_64_jcc(cd, X86_64_CC_NE, a);
1904 M_FLTMOVE(s1, REG_FTMP1);
1905 x86_64_mov_imm_reg(cd, (ptrint) asm_builtin_d2l, REG_ITMP2);
1906 x86_64_call_reg(cd, REG_ITMP2);
1907 M_INTMOVE(REG_RESULT, d);
1908 store_reg_to_var_int(iptr->dst, d);
1911 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1913 var_to_reg_flt(s1, src, REG_FTMP1);
1914 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1915 x86_64_cvtss2sd_reg_reg(cd, s1, d);
1916 store_reg_to_var_flt(iptr->dst, d);
1919 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1921 var_to_reg_flt(s1, src, REG_FTMP1);
1922 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1923 x86_64_cvtsd2ss_reg_reg(cd, s1, d);
1924 store_reg_to_var_flt(iptr->dst, d);
1927 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1928 /* == => 0, < => 1, > => -1 */
1930 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1931 var_to_reg_flt(s2, src, REG_FTMP2);
1932 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1934 M_MOV_IMM(1, REG_ITMP1);
1935 M_MOV_IMM(-1, REG_ITMP2);
1936 x86_64_ucomiss_reg_reg(cd, s1, s2);
1937 M_CMOVB(REG_ITMP1, d);
1938 M_CMOVA(REG_ITMP2, d);
1939 M_CMOVP(REG_ITMP2, d); /* treat unordered as GT */
1940 store_reg_to_var_int(iptr->dst, d);
1943 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1944 /* == => 0, < => 1, > => -1 */
1946 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1947 var_to_reg_flt(s2, src, REG_FTMP2);
1948 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1950 M_MOV_IMM(1, REG_ITMP1);
1951 M_MOV_IMM(-1, REG_ITMP2);
1952 x86_64_ucomiss_reg_reg(cd, s1, s2);
1953 M_CMOVB(REG_ITMP1, d);
1954 M_CMOVA(REG_ITMP2, d);
1955 M_CMOVP(REG_ITMP1, d); /* treat unordered as LT */
1956 store_reg_to_var_int(iptr->dst, d);
1959 case ICMD_DCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1960 /* == => 0, < => 1, > => -1 */
1962 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1963 var_to_reg_flt(s2, src, REG_FTMP2);
1964 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1966 M_MOV_IMM(1, REG_ITMP1);
1967 M_MOV_IMM(-1, REG_ITMP2);
1968 x86_64_ucomisd_reg_reg(cd, s1, s2);
1969 M_CMOVB(REG_ITMP1, d);
1970 M_CMOVA(REG_ITMP2, d);
1971 M_CMOVP(REG_ITMP2, d); /* treat unordered as GT */
1972 store_reg_to_var_int(iptr->dst, d);
1975 case ICMD_DCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1976 /* == => 0, < => 1, > => -1 */
1978 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1979 var_to_reg_flt(s2, src, REG_FTMP2);
1980 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1982 M_MOV_IMM(1, REG_ITMP1);
1983 M_MOV_IMM(-1, REG_ITMP2);
1984 x86_64_ucomisd_reg_reg(cd, s1, s2);
1985 M_CMOVB(REG_ITMP1, d);
1986 M_CMOVA(REG_ITMP2, d);
1987 M_CMOVP(REG_ITMP1, d); /* treat unordered as LT */
1988 store_reg_to_var_int(iptr->dst, d);
1992 /* memory operations **************************************************/
1994 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., (int) length */
1996 var_to_reg_int(s1, src, REG_ITMP1);
1997 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1998 gen_nullptr_check(s1);
1999 M_ILD(d, s1, OFFSET(java_arrayheader, size));
2000 store_reg_to_var_int(iptr->dst, d);
2003 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
2005 var_to_reg_int(s1, src->prev, REG_ITMP1);
2006 var_to_reg_int(s2, src, REG_ITMP2);
2007 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2008 if (iptr->op1 == 0) {
2009 gen_nullptr_check(s1);
2012 x86_64_movsbq_memindex_reg(cd, OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2013 store_reg_to_var_int(iptr->dst, d);
2016 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
2018 var_to_reg_int(s1, src->prev, REG_ITMP1);
2019 var_to_reg_int(s2, src, REG_ITMP2);
2020 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2021 if (iptr->op1 == 0) {
2022 gen_nullptr_check(s1);
2025 x86_64_movzwq_memindex_reg(cd, OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2026 store_reg_to_var_int(iptr->dst, d);
2029 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
2031 var_to_reg_int(s1, src->prev, REG_ITMP1);
2032 var_to_reg_int(s2, src, REG_ITMP2);
2033 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2034 if (iptr->op1 == 0) {
2035 gen_nullptr_check(s1);
2038 x86_64_movswq_memindex_reg(cd, OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2039 store_reg_to_var_int(iptr->dst, d);
2042 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
2044 var_to_reg_int(s1, src->prev, REG_ITMP1);
2045 var_to_reg_int(s2, src, REG_ITMP2);
2046 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2047 if (iptr->op1 == 0) {
2048 gen_nullptr_check(s1);
2051 x86_64_movl_memindex_reg(cd, OFFSET(java_intarray, data[0]), s1, s2, 2, d);
2052 store_reg_to_var_int(iptr->dst, d);
2055 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
2057 var_to_reg_int(s1, src->prev, REG_ITMP1);
2058 var_to_reg_int(s2, src, REG_ITMP2);
2059 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2060 if (iptr->op1 == 0) {
2061 gen_nullptr_check(s1);
2064 x86_64_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]), s1, s2, 3, d);
2065 store_reg_to_var_int(iptr->dst, d);
2068 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2070 var_to_reg_int(s1, src->prev, REG_ITMP1);
2071 var_to_reg_int(s2, src, REG_ITMP2);
2072 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2073 if (iptr->op1 == 0) {
2074 gen_nullptr_check(s1);
2077 x86_64_movss_memindex_reg(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2078 store_reg_to_var_flt(iptr->dst, d);
2081 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2083 var_to_reg_int(s1, src->prev, REG_ITMP1);
2084 var_to_reg_int(s2, src, REG_ITMP2);
2085 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2086 if (iptr->op1 == 0) {
2087 gen_nullptr_check(s1);
2090 x86_64_movsd_memindex_reg(cd, OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2091 store_reg_to_var_flt(iptr->dst, d);
2094 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2096 var_to_reg_int(s1, src->prev, REG_ITMP1);
2097 var_to_reg_int(s2, src, REG_ITMP2);
2098 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2099 if (iptr->op1 == 0) {
2100 gen_nullptr_check(s1);
2103 x86_64_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
2104 store_reg_to_var_int(iptr->dst, d);
2108 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2110 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2111 var_to_reg_int(s2, src->prev, REG_ITMP2);
2112 if (iptr->op1 == 0) {
2113 gen_nullptr_check(s1);
2116 var_to_reg_int(s3, src, REG_ITMP3);
2117 x86_64_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2120 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2122 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2123 var_to_reg_int(s2, src->prev, REG_ITMP2);
2124 if (iptr->op1 == 0) {
2125 gen_nullptr_check(s1);
2128 var_to_reg_int(s3, src, REG_ITMP3);
2129 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2132 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2134 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2135 var_to_reg_int(s2, src->prev, REG_ITMP2);
2136 if (iptr->op1 == 0) {
2137 gen_nullptr_check(s1);
2140 var_to_reg_int(s3, src, REG_ITMP3);
2141 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2144 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2146 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2147 var_to_reg_int(s2, src->prev, REG_ITMP2);
2148 if (iptr->op1 == 0) {
2149 gen_nullptr_check(s1);
2152 var_to_reg_int(s3, src, REG_ITMP3);
2153 x86_64_movl_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2156 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2158 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2159 var_to_reg_int(s2, src->prev, REG_ITMP2);
2160 if (iptr->op1 == 0) {
2161 gen_nullptr_check(s1);
2164 var_to_reg_int(s3, src, REG_ITMP3);
2165 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2168 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2170 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2171 var_to_reg_int(s2, src->prev, REG_ITMP2);
2172 if (iptr->op1 == 0) {
2173 gen_nullptr_check(s1);
2176 var_to_reg_flt(s3, src, REG_FTMP3);
2177 x86_64_movss_reg_memindex(cd, s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2180 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2182 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2183 var_to_reg_int(s2, src->prev, REG_ITMP2);
2184 if (iptr->op1 == 0) {
2185 gen_nullptr_check(s1);
2188 var_to_reg_flt(s3, src, REG_FTMP3);
2189 x86_64_movsd_reg_memindex(cd, s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2192 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2194 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2195 var_to_reg_int(s2, src->prev, REG_ITMP2);
2196 if (iptr->op1 == 0) {
2197 gen_nullptr_check(s1);
2200 var_to_reg_int(s3, src, REG_ITMP3);
2202 M_MOV(s1, rd->argintregs[0]);
2203 M_MOV(s3, rd->argintregs[1]);
2204 M_MOV_IMM(BUILTIN_canstore, REG_ITMP1);
2208 codegen_add_arraystoreexception_ref(cd, cd->mcodeptr);
2210 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2211 var_to_reg_int(s2, src->prev, REG_ITMP2);
2212 var_to_reg_int(s3, src, REG_ITMP3);
2213 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2217 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2219 var_to_reg_int(s1, src->prev, REG_ITMP1);
2220 var_to_reg_int(s2, src, REG_ITMP2);
2221 if (iptr->op1 == 0) {
2222 gen_nullptr_check(s1);
2225 x86_64_movb_imm_memindex(cd, iptr->val.i, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2228 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2230 var_to_reg_int(s1, src->prev, REG_ITMP1);
2231 var_to_reg_int(s2, src, REG_ITMP2);
2232 if (iptr->op1 == 0) {
2233 gen_nullptr_check(s1);
2236 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_chararray, data[0]), s1, s2, 1);
2239 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2241 var_to_reg_int(s1, src->prev, REG_ITMP1);
2242 var_to_reg_int(s2, src, REG_ITMP2);
2243 if (iptr->op1 == 0) {
2244 gen_nullptr_check(s1);
2247 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2250 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2252 var_to_reg_int(s1, src->prev, REG_ITMP1);
2253 var_to_reg_int(s2, src, REG_ITMP2);
2254 if (iptr->op1 == 0) {
2255 gen_nullptr_check(s1);
2258 x86_64_movl_imm_memindex(cd, iptr->val.i, OFFSET(java_intarray, data[0]), s1, s2, 2);
2261 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2263 var_to_reg_int(s1, src->prev, REG_ITMP1);
2264 var_to_reg_int(s2, src, REG_ITMP2);
2265 if (iptr->op1 == 0) {
2266 gen_nullptr_check(s1);
2270 if (IS_IMM32(iptr->val.l)) {
2271 x86_64_mov_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2273 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2274 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l >> 32), OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2278 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2280 var_to_reg_int(s1, src->prev, REG_ITMP1);
2281 var_to_reg_int(s2, src, REG_ITMP2);
2282 if (iptr->op1 == 0) {
2283 gen_nullptr_check(s1);
2286 x86_64_mov_imm_memindex(cd, 0, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2290 case ICMD_GETSTATIC: /* ... ==> ..., value */
2291 /* op1 = type, val.a = field address */
2293 if (iptr->val.a == NULL) {
2294 disp = dseg_addaddress(cd, NULL);
2296 /* PROFILE_CYCLE_STOP; */
2298 codegen_addpatchref(cd, cd->mcodeptr,
2299 PATCHER_get_putstatic,
2300 (unresolved_field *) iptr->target, disp);
2302 if (opt_showdisassemble) {
2303 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2306 /* PROFILE_CYCLE_START; */
2309 fieldinfo *fi = iptr->val.a;
2311 disp = dseg_addaddress(cd, &(fi->value));
2313 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class)) {
2316 codegen_addpatchref(cd, cd->mcodeptr,
2317 PATCHER_clinit, fi->class, 0);
2319 if (opt_showdisassemble) {
2320 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2323 PROFILE_CYCLE_START;
2327 /* This approach is much faster than moving the field
2328 address inline into a register. */
2330 M_ALD(REG_ITMP2, RIP, -(((ptrint) cd->mcodeptr + 7) -
2331 (ptrint) cd->mcodebase) + disp);
2333 switch (iptr->op1) {
2335 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2336 M_ILD(d, REG_ITMP2, 0);
2337 store_reg_to_var_int(iptr->dst, d);
2341 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2342 M_LLD(d, REG_ITMP2, 0);
2343 store_reg_to_var_int(iptr->dst, d);
2346 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2347 x86_64_movss_membase_reg(cd, REG_ITMP2, 0, d);
2348 store_reg_to_var_flt(iptr->dst, d);
2351 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2352 x86_64_movsd_membase_reg(cd, REG_ITMP2, 0, d);
2353 store_reg_to_var_flt(iptr->dst, d);
2358 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2359 /* op1 = type, val.a = field address */
2361 if (iptr->val.a == NULL) {
2362 disp = dseg_addaddress(cd, NULL);
2364 /* PROFILE_CYCLE_STOP; */
2366 codegen_addpatchref(cd, cd->mcodeptr,
2367 PATCHER_get_putstatic,
2368 (unresolved_field *) iptr->target, disp);
2370 if (opt_showdisassemble) {
2371 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2374 /* PROFILE_CYCLE_START; */
2377 fieldinfo *fi = iptr->val.a;
2379 disp = dseg_addaddress(cd, &(fi->value));
2381 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class)) {
2384 codegen_addpatchref(cd, cd->mcodeptr,
2385 PATCHER_clinit, fi->class, 0);
2387 if (opt_showdisassemble) {
2388 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2391 PROFILE_CYCLE_START;
2395 /* This approach is much faster than moving the field
2396 address inline into a register. */
2398 M_ALD(REG_ITMP2, RIP, -(((ptrint) cd->mcodeptr + 7) -
2399 (ptrint) cd->mcodebase) + disp);
2401 switch (iptr->op1) {
2403 var_to_reg_int(s2, src, REG_ITMP1);
2404 M_IST(s2, REG_ITMP2, 0);
2408 var_to_reg_int(s2, src, REG_ITMP1);
2409 M_LST(s2, REG_ITMP2, 0);
2412 var_to_reg_flt(s2, src, REG_FTMP1);
2413 x86_64_movss_reg_membase(cd, s2, REG_ITMP2, 0);
2416 var_to_reg_flt(s2, src, REG_FTMP1);
2417 x86_64_movsd_reg_membase(cd, s2, REG_ITMP2, 0);
2422 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2423 /* val = value (in current instruction) */
2424 /* op1 = type, val.a = field address (in */
2425 /* following NOP) */
2427 if (iptr[1].val.a == NULL) {
2428 disp = dseg_addaddress(cd, NULL);
2430 /* PROFILE_CYCLE_STOP; */
2432 codegen_addpatchref(cd, cd->mcodeptr,
2433 PATCHER_get_putstatic,
2434 (unresolved_field *) iptr[1].target, disp);
2436 if (opt_showdisassemble) {
2437 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2440 /* PROFILE_CYCLE_START; */
2443 fieldinfo *fi = iptr[1].val.a;
2445 disp = dseg_addaddress(cd, &(fi->value));
2447 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class)) {
2450 codegen_addpatchref(cd, cd->mcodeptr,
2451 PATCHER_clinit, fi->class, 0);
2453 if (opt_showdisassemble) {
2454 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2457 PROFILE_CYCLE_START;
2461 /* This approach is much faster than moving the field
2462 address inline into a register. */
2464 M_ALD(REG_ITMP1, RIP, -(((ptrint) cd->mcodeptr + 7) -
2465 (ptrint) cd->mcodebase) + disp);
2467 switch (iptr->op1) {
2470 M_IST_IMM(iptr->val.i, REG_ITMP1, 0);
2475 if (IS_IMM32(iptr->val.l)) {
2476 M_LST_IMM32(iptr->val.l, REG_ITMP1, 0);
2478 M_IST_IMM(iptr->val.l, REG_ITMP1, 0);
2479 M_IST_IMM(iptr->val.l >> 32, REG_ITMP1, 4);
2485 case ICMD_GETFIELD: /* ... ==> ..., value */
2486 /* op1 = type, val.i = field offset */
2488 var_to_reg_int(s1, src, REG_ITMP1);
2489 gen_nullptr_check(s1);
2491 if (iptr->val.a == NULL) {
2492 /* PROFILE_CYCLE_STOP; */
2494 codegen_addpatchref(cd, cd->mcodeptr,
2495 PATCHER_get_putfield,
2496 (unresolved_field *) iptr->target, 0);
2498 if (opt_showdisassemble) {
2499 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2502 /* PROFILE_CYCLE_START; */
2507 disp = ((fieldinfo *) (iptr->val.a))->offset;
2510 switch (iptr->op1) {
2512 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2513 if (iptr->val.a == NULL)
2514 M_ILD32(d, s1, disp);
2517 store_reg_to_var_int(iptr->dst, d);
2521 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2522 if (iptr->val.a == NULL)
2523 M_LLD32(d, s1, disp);
2526 store_reg_to_var_int(iptr->dst, d);
2529 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2530 x86_64_movss_membase32_reg(cd, s1, disp, d);
2531 store_reg_to_var_flt(iptr->dst, d);
2534 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2535 x86_64_movsd_membase32_reg(cd, s1, disp, d);
2536 store_reg_to_var_flt(iptr->dst, d);
2541 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2542 /* op1 = type, val.i = field offset */
2544 var_to_reg_int(s1, src->prev, REG_ITMP1);
2545 gen_nullptr_check(s1);
2547 if (IS_INT_LNG_TYPE(iptr->op1)) {
2548 var_to_reg_int(s2, src, REG_ITMP2);
2550 var_to_reg_flt(s2, src, REG_FTMP2);
2553 if (iptr->val.a == NULL) {
2554 /* PROFILE_CYCLE_STOP; */
2556 codegen_addpatchref(cd, cd->mcodeptr,
2557 PATCHER_get_putfield,
2558 (unresolved_field *) iptr->target, 0);
2560 if (opt_showdisassemble) {
2561 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2564 /* PROFILE_CYCLE_START; */
2569 disp = ((fieldinfo *) (iptr->val.a))->offset;
2572 switch (iptr->op1) {
2574 if (iptr->val.a == NULL)
2575 M_IST32(s2, s1, disp);
2577 M_IST(s2, s1, disp);
2581 if (iptr->val.a == NULL)
2582 M_LST32(s2, s1, disp);
2584 M_LST(s2, s1, disp);
2587 x86_64_movss_reg_membase32(cd, s2, s1, disp);
2590 x86_64_movsd_reg_membase32(cd, s2, s1, disp);
2595 case ICMD_PUTFIELDCONST: /* ..., objectref, value ==> ... */
2596 /* val = value (in current instruction) */
2597 /* op1 = type, val.a = field address (in */
2598 /* following NOP) */
2600 var_to_reg_int(s1, src, REG_ITMP1);
2601 gen_nullptr_check(s1);
2603 if (iptr[1].val.a == NULL) {
2604 /* PROFILE_CYCLE_STOP; */
2606 codegen_addpatchref(cd, cd->mcodeptr,
2607 PATCHER_putfieldconst,
2608 (unresolved_field *) iptr[1].target, 0);
2610 if (opt_showdisassemble) {
2611 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2614 /* PROFILE_CYCLE_START; */
2619 disp = ((fieldinfo *) (iptr[1].val.a))->offset;
2622 switch (iptr->op1) {
2625 if (iptr[1].val.a == NULL)
2626 M_IST32_IMM(iptr->val.i, s1, disp);
2628 M_IST_IMM(iptr->val.i, s1, disp);
2633 /* We can only optimize the move, if the class is
2634 resolved. Otherwise we don't know what to patch. */
2635 if (iptr[1].val.a == NULL) {
2636 M_IST32_IMM(iptr->val.l, s1, disp);
2637 M_IST32_IMM(iptr->val.l >> 32, s1, disp + 4);
2639 if (IS_IMM32(iptr->val.l)) {
2640 M_LST_IMM32(iptr->val.l, s1, disp);
2642 M_IST_IMM(iptr->val.l, s1, disp);
2643 M_IST_IMM(iptr->val.l >> 32, s1, disp + 4);
2651 /* branch operations **************************************************/
2653 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2655 var_to_reg_int(s1, src, REG_ITMP1);
2656 M_INTMOVE(s1, REG_ITMP1_XPTR);
2660 #ifdef ENABLE_VERIFIER
2662 codegen_addpatchref(cd, cd->mcodeptr,
2663 PATCHER_athrow_areturn,
2664 (unresolved_class *) iptr->val.a, 0);
2666 if (opt_showdisassemble) {
2667 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2670 #endif /* ENABLE_VERIFIER */
2672 M_CALL_IMM(0); /* passing exception pc */
2673 M_POP(REG_ITMP2_XPC);
2675 M_MOV_IMM(asm_handle_exception, REG_ITMP3);
2679 case ICMD_GOTO: /* ... ==> ... */
2680 /* op1 = target JavaVM pc */
2683 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2686 case ICMD_JSR: /* ... ==> ... */
2687 /* op1 = target JavaVM pc */
2690 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2693 case ICMD_RET: /* ... ==> ... */
2694 /* op1 = local variable */
2696 var = &(rd->locals[iptr->op1][TYPE_ADR]);
2697 var_to_reg_int(s1, var, REG_ITMP1);
2701 case ICMD_IFNULL: /* ..., value ==> ... */
2702 /* op1 = target JavaVM pc */
2704 if (src->flags & INMEMORY)
2705 M_CMP_IMM_MEMBASE(0, REG_SP, src->regoff * 8);
2707 M_TEST(src->regoff);
2709 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2712 case ICMD_IFNONNULL: /* ..., value ==> ... */
2713 /* op1 = target JavaVM pc */
2715 if (src->flags & INMEMORY)
2716 M_CMP_IMM_MEMBASE(0, REG_SP, src->regoff * 8);
2718 M_TEST(src->regoff);
2720 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2723 case ICMD_IFEQ: /* ..., value ==> ... */
2724 /* op1 = target JavaVM pc, val.i = constant */
2726 x86_64_emit_ifcc(cd, X86_64_CC_E, src, iptr);
2729 case ICMD_IFLT: /* ..., value ==> ... */
2730 /* op1 = target JavaVM pc, val.i = constant */
2732 x86_64_emit_ifcc(cd, X86_64_CC_L, src, iptr);
2735 case ICMD_IFLE: /* ..., value ==> ... */
2736 /* op1 = target JavaVM pc, val.i = constant */
2738 x86_64_emit_ifcc(cd, X86_64_CC_LE, src, iptr);
2741 case ICMD_IFNE: /* ..., value ==> ... */
2742 /* op1 = target JavaVM pc, val.i = constant */
2744 x86_64_emit_ifcc(cd, X86_64_CC_NE, src, iptr);
2747 case ICMD_IFGT: /* ..., value ==> ... */
2748 /* op1 = target JavaVM pc, val.i = constant */
2750 x86_64_emit_ifcc(cd, X86_64_CC_G, src, iptr);
2753 case ICMD_IFGE: /* ..., value ==> ... */
2754 /* op1 = target JavaVM pc, val.i = constant */
2756 x86_64_emit_ifcc(cd, X86_64_CC_GE, src, iptr);
2759 case ICMD_IF_LEQ: /* ..., value ==> ... */
2760 /* op1 = target JavaVM pc, val.l = constant */
2762 x86_64_emit_if_lcc(cd, X86_64_CC_E, src, iptr);
2765 case ICMD_IF_LLT: /* ..., value ==> ... */
2766 /* op1 = target JavaVM pc, val.l = constant */
2768 x86_64_emit_if_lcc(cd, X86_64_CC_L, src, iptr);
2771 case ICMD_IF_LLE: /* ..., value ==> ... */
2772 /* op1 = target JavaVM pc, val.l = constant */
2774 x86_64_emit_if_lcc(cd, X86_64_CC_LE, src, iptr);
2777 case ICMD_IF_LNE: /* ..., value ==> ... */
2778 /* op1 = target JavaVM pc, val.l = constant */
2780 x86_64_emit_if_lcc(cd, X86_64_CC_NE, src, iptr);
2783 case ICMD_IF_LGT: /* ..., value ==> ... */
2784 /* op1 = target JavaVM pc, val.l = constant */
2786 x86_64_emit_if_lcc(cd, X86_64_CC_G, src, iptr);
2789 case ICMD_IF_LGE: /* ..., value ==> ... */
2790 /* op1 = target JavaVM pc, val.l = constant */
2792 x86_64_emit_if_lcc(cd, X86_64_CC_GE, src, iptr);
2795 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2796 /* op1 = target JavaVM pc */
2798 x86_64_emit_if_icmpcc(cd, X86_64_CC_E, src, iptr);
2801 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2802 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2804 x86_64_emit_if_lcmpcc(cd, X86_64_CC_E, src, iptr);
2807 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2808 /* op1 = target JavaVM pc */
2810 x86_64_emit_if_icmpcc(cd, X86_64_CC_NE, src, iptr);
2813 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2814 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2816 x86_64_emit_if_lcmpcc(cd, X86_64_CC_NE, src, iptr);
2819 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2820 /* op1 = target JavaVM pc */
2822 x86_64_emit_if_icmpcc(cd, X86_64_CC_L, src, iptr);
2825 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2826 /* op1 = target JavaVM pc */
2828 x86_64_emit_if_lcmpcc(cd, X86_64_CC_L, src, iptr);
2831 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2832 /* op1 = target JavaVM pc */
2834 x86_64_emit_if_icmpcc(cd, X86_64_CC_G, src, iptr);
2837 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2838 /* op1 = target JavaVM pc */
2840 x86_64_emit_if_lcmpcc(cd, X86_64_CC_G, src, iptr);
2843 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2844 /* op1 = target JavaVM pc */
2846 x86_64_emit_if_icmpcc(cd, X86_64_CC_LE, src, iptr);
2849 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2850 /* op1 = target JavaVM pc */
2852 x86_64_emit_if_lcmpcc(cd, X86_64_CC_LE, src, iptr);
2855 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2856 /* op1 = target JavaVM pc */
2858 x86_64_emit_if_icmpcc(cd, X86_64_CC_GE, src, iptr);
2861 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2862 /* op1 = target JavaVM pc */
2864 x86_64_emit_if_lcmpcc(cd, X86_64_CC_GE, src, iptr);
2867 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST */
2869 case ICMD_ELSE_ICONST: /* handled by IFxx_ICONST */
2872 case ICMD_IFEQ_ICONST: /* ..., value ==> ..., constant */
2873 case ICMD_IFNE_ICONST: /* val.i = constant */
2874 case ICMD_IFLT_ICONST:
2875 case ICMD_IFGE_ICONST:
2876 case ICMD_IFGT_ICONST:
2877 case ICMD_IFLE_ICONST:
2879 var_to_reg_int(s1, src, REG_ITMP1);
2880 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2881 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2883 M_INTMOVE(s1, REG_ITMP1);
2886 if (iptr[1].val.i == 0)
2889 M_IMOV_IMM(iptr[1].val.i, d);
2891 if (iptr->val.i == 0)
2894 M_IMOV_IMM(iptr->val.i, REG_ITMP2);
2897 switch (iptr->opc) {
2898 case ICMD_IFEQ_ICONST:
2899 M_CMOVEQ(REG_ITMP2, d);
2901 case ICMD_IFNE_ICONST:
2902 M_CMOVNE(REG_ITMP2, d);
2904 case ICMD_IFLT_ICONST:
2905 M_CMOVLT(REG_ITMP2, d);
2907 case ICMD_IFGE_ICONST:
2908 M_CMOVGE(REG_ITMP2, d);
2910 case ICMD_IFGT_ICONST:
2911 M_CMOVGT(REG_ITMP2, d);
2913 case ICMD_IFLE_ICONST:
2914 M_CMOVLE(REG_ITMP2, d);
2918 store_reg_to_var_int(iptr->dst, d);
2922 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2925 var_to_reg_int(s1, src, REG_RESULT);
2926 M_INTMOVE(s1, REG_RESULT);
2927 goto nowperformreturn;
2929 case ICMD_ARETURN: /* ..., retvalue ==> ... */
2931 var_to_reg_int(s1, src, REG_RESULT);
2932 M_INTMOVE(s1, REG_RESULT);
2934 #ifdef ENABLE_VERIFIER
2938 codegen_addpatchref(cd, cd->mcodeptr,
2939 PATCHER_athrow_areturn,
2940 (unresolved_class *) iptr->val.a, 0);
2942 if (opt_showdisassemble) {
2943 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2946 PROFILE_CYCLE_START;
2948 #endif /* ENABLE_VERIFIER */
2949 goto nowperformreturn;
2951 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2954 var_to_reg_flt(s1, src, REG_FRESULT);
2955 M_FLTMOVE(s1, REG_FRESULT);
2956 goto nowperformreturn;
2958 case ICMD_RETURN: /* ... ==> ... */
2964 p = parentargs_base;
2966 /* call trace function */
2967 if (opt_verbosecall) {
2968 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
2970 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
2971 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
2973 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
2974 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
2975 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
2976 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
2978 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
2979 x86_64_call_reg(cd, REG_ITMP1);
2981 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
2982 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
2984 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
2987 #if defined(USE_THREADS)
2988 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2989 M_ALD(rd->argintregs[0], REG_SP, rd->memuse * 8);
2991 /* we need to save the proper return value */
2992 switch (iptr->opc) {
2996 M_LST(REG_RESULT, REG_SP, rd->memuse * 8);
3000 M_DST(REG_FRESULT, REG_SP, rd->memuse * 8);
3004 M_MOV_IMM(builtin_monitorexit, REG_ITMP1);
3007 /* and now restore the proper return value */
3008 switch (iptr->opc) {
3012 M_LLD(REG_RESULT, REG_SP, rd->memuse * 8);
3016 M_DLD(REG_FRESULT, REG_SP, rd->memuse * 8);
3022 /* restore saved registers */
3024 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
3025 p--; M_LLD(rd->savintregs[i], REG_SP, p * 8);
3027 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
3028 p--; M_DLD(rd->savfltregs[i], REG_SP, p * 8);
3031 /* deallocate stack */
3033 if (parentargs_base)
3034 M_AADD_IMM(parentargs_base * 8, REG_SP);
3036 /* generate method profiling code */
3045 case ICMD_TABLESWITCH: /* ..., index ==> ... */
3050 tptr = (void **) iptr->target;
3052 s4ptr = iptr->val.a;
3053 l = s4ptr[1]; /* low */
3054 i = s4ptr[2]; /* high */
3056 var_to_reg_int(s1, src, REG_ITMP1);
3057 M_INTMOVE(s1, REG_ITMP1);
3059 x86_64_alul_imm_reg(cd, X86_64_SUB, l, REG_ITMP1);
3064 x86_64_alul_imm_reg(cd, X86_64_CMP, i - 1, REG_ITMP1);
3065 x86_64_jcc(cd, X86_64_CC_A, 0);
3067 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[0]), cd->mcodeptr); */
3068 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3070 /* build jump table top down and use address of lowest entry */
3072 /* s4ptr += 3 + i; */
3076 dseg_addtarget(cd, (basicblock *) tptr[0]);
3080 /* length of dataseg after last dseg_addtarget is used by load */
3082 x86_64_mov_imm_reg(cd, 0, REG_ITMP2);
3083 dseg_adddata(cd, cd->mcodeptr);
3084 x86_64_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
3085 x86_64_jmp_reg(cd, REG_ITMP1);
3090 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
3092 s4 i, l, val, *s4ptr;
3095 tptr = (void **) iptr->target;
3097 s4ptr = iptr->val.a;
3098 l = s4ptr[0]; /* default */
3099 i = s4ptr[1]; /* count */
3101 MCODECHECK(8 + ((7 + 6) * i) + 5);
3102 var_to_reg_int(s1, src, REG_ITMP1); /* reg compare should always be faster */
3108 x86_64_alul_imm_reg(cd, X86_64_CMP, val, s1);
3109 x86_64_jcc(cd, X86_64_CC_E, 0);
3110 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3113 x86_64_jmp_imm(cd, 0);
3115 tptr = (void **) iptr->target;
3116 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3121 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
3122 /* op1 = arg count val.a = builtintable entry */
3128 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
3129 /* op1 = arg count, val.a = method pointer */
3131 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3132 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
3133 case ICMD_INVOKEINTERFACE:
3138 unresolved_method *um = iptr->target;
3139 md = um->methodref->parseddesc.md;
3141 md = lm->parseddesc;
3145 s3 = md->paramcount;
3147 MCODECHECK((20 * s3) + 128);
3149 /* copy arguments to registers or stack location */
3151 for (s3 = s3 - 1; s3 >= 0; s3--, src = src->prev) {
3152 if (src->varkind == ARGVAR)
3154 if (IS_INT_LNG_TYPE(src->type)) {
3155 if (!md->params[s3].inmemory) {
3156 s1 = rd->argintregs[md->params[s3].regoff];
3157 var_to_reg_int(d, src, s1);
3160 var_to_reg_int(d, src, REG_ITMP1);
3161 M_LST(d, REG_SP, md->params[s3].regoff * 8);
3165 if (!md->params[s3].inmemory) {
3166 s1 = rd->argfltregs[md->params[s3].regoff];
3167 var_to_reg_flt(d, src, s1);
3170 var_to_reg_flt(d, src, REG_FTMP1);
3171 M_DST(d, REG_SP, md->params[s3].regoff * 8);
3176 /* generate method profiling code */
3180 switch (iptr->opc) {
3182 a = (ptrint) bte->fp;
3183 d = md->returntype.type;
3185 M_MOV_IMM(a, REG_ITMP1);
3188 /* if op1 == true, we need to check for an exception */
3190 if (iptr->op1 == true) {
3193 codegen_add_fillinstacktrace_ref(cd, cd->mcodeptr);
3197 case ICMD_INVOKESPECIAL:
3198 M_TEST(rd->argintregs[0]);
3200 codegen_add_nullpointerexception_ref(cd, cd->mcodeptr);
3202 /* first argument contains pointer */
3203 /* gen_nullptr_check(rd->argintregs[0]); */
3205 /* access memory for hardware nullptr */
3206 /* x86_64_mov_membase_reg(cd, rd->argintregs[0], 0, REG_ITMP2); */
3210 case ICMD_INVOKESTATIC:
3212 unresolved_method *um = iptr->target;
3214 codegen_addpatchref(cd, cd->mcodeptr,
3215 PATCHER_invokestatic_special, um, 0);
3217 if (opt_showdisassemble) {
3218 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3222 d = um->methodref->parseddesc.md->returntype.type;
3225 a = (ptrint) lm->stubroutine;
3226 d = lm->parseddesc->returntype.type;
3229 M_MOV_IMM(a, REG_ITMP2);
3233 case ICMD_INVOKEVIRTUAL:
3234 gen_nullptr_check(rd->argintregs[0]);
3237 unresolved_method *um = iptr->target;
3239 codegen_addpatchref(cd, cd->mcodeptr,
3240 PATCHER_invokevirtual, um, 0);
3242 if (opt_showdisassemble) {
3243 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3247 d = um->methodref->parseddesc.md->returntype.type;
3250 s1 = OFFSET(vftbl_t, table[0]) +
3251 sizeof(methodptr) * lm->vftblindex;
3252 d = lm->parseddesc->returntype.type;
3255 x86_64_mov_membase_reg(cd, rd->argintregs[0],
3256 OFFSET(java_objectheader, vftbl),
3258 x86_64_mov_membase32_reg(cd, REG_ITMP2, s1, REG_ITMP1);
3262 case ICMD_INVOKEINTERFACE:
3263 gen_nullptr_check(rd->argintregs[0]);
3266 unresolved_method *um = iptr->target;
3268 codegen_addpatchref(cd, cd->mcodeptr,
3269 PATCHER_invokeinterface, um, 0);
3271 if (opt_showdisassemble) {
3272 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3277 d = um->methodref->parseddesc.md->returntype.type;
3280 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3281 sizeof(methodptr) * lm->class->index;
3283 s2 = sizeof(methodptr) * (lm - lm->class->methods);
3285 d = lm->parseddesc->returntype.type;
3288 M_ALD(REG_ITMP2, rd->argintregs[0],
3289 OFFSET(java_objectheader, vftbl));
3290 x86_64_mov_membase32_reg(cd, REG_ITMP2, s1, REG_ITMP2);
3291 x86_64_mov_membase32_reg(cd, REG_ITMP2, s2, REG_ITMP1);
3296 /* generate method profiling code */
3298 PROFILE_CYCLE_START;
3300 /* d contains return type */
3302 if (d != TYPE_VOID) {
3303 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3304 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3305 M_INTMOVE(REG_RESULT, s1);
3306 store_reg_to_var_int(iptr->dst, s1);
3308 s1 = reg_of_var(rd, iptr->dst, REG_FRESULT);
3309 M_FLTMOVE(REG_FRESULT, s1);
3310 store_reg_to_var_flt(iptr->dst, s1);
3316 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3318 /* op1: 0 == array, 1 == class */
3319 /* val.a: (classinfo *) superclass */
3321 /* superclass is an interface:
3323 * OK if ((sub == NULL) ||
3324 * (sub->vftbl->interfacetablelength > super->index) &&
3325 * (sub->vftbl->interfacetable[-super->index] != NULL));
3327 * superclass is a class:
3329 * OK if ((sub == NULL) || (0
3330 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3331 * super->vftbl->diffval));
3334 if (iptr->op1 == 1) {
3335 /* object type cast-check */
3338 vftbl_t *supervftbl;
3341 super = (classinfo *) iptr->val.a;
3348 superindex = super->index;
3349 supervftbl = super->vftbl;
3352 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3353 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3355 var_to_reg_int(s1, src, REG_ITMP1);
3357 /* calculate interface checkcast code size */
3359 s2 = 3; /* mov_membase_reg */
3360 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3362 s2 += 3 + 4 /* movl_membase32_reg */ + 3 + 4 /* sub imm32 */ +
3363 3 /* test */ + 6 /* jcc */ + 3 + 4 /* mov_membase32_reg */ +
3364 3 /* test */ + 6 /* jcc */;
3367 s2 += (opt_showdisassemble ? 5 : 0);
3369 /* calculate class checkcast code size */
3371 s3 = 3; /* mov_membase_reg */
3372 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3373 s3 += 10 /* mov_imm_reg */ + 3 + 4 /* movl_membase32_reg */;
3376 if (s1 != REG_ITMP1) {
3377 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3378 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, baseval));
3379 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3380 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, diffval));
3386 s3 += 3 + 4 /* movl_membase32_reg */ + 3 /* sub */ +
3387 10 /* mov_imm_reg */ + 3 /* movl_membase_reg */;
3388 CALCOFFSETBYTES(s3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3391 s3 += 3 /* cmp */ + 6 /* jcc */;
3394 s3 += (opt_showdisassemble ? 5 : 0);
3396 /* if class is not resolved, check which code to call */
3400 M_BEQ(6 + (opt_showdisassemble ? 5 : 0) + 7 + 6 + s2 + 5 + s3);
3402 codegen_addpatchref(cd, cd->mcodeptr,
3403 PATCHER_checkcast_instanceof_flags,
3404 (constant_classref *) iptr->target, 0);
3406 if (opt_showdisassemble) {
3407 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3410 M_IMOV_IMM(0, REG_ITMP2); /* super->flags */
3411 M_IAND_IMM(ACC_INTERFACE, REG_ITMP2);
3415 /* interface checkcast code */
3417 if (!super || (super->flags & ACC_INTERFACE)) {
3423 M_ALD(REG_ITMP2, s1, OFFSET(java_objectheader, vftbl));
3426 codegen_addpatchref(cd, cd->mcodeptr,
3427 PATCHER_checkcast_instanceof_interface,
3428 (constant_classref *) iptr->target, 0);
3430 if (opt_showdisassemble) {
3431 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3435 x86_64_movl_membase32_reg(cd, REG_ITMP2,
3436 OFFSET(vftbl_t, interfacetablelength),
3438 /* XXX TWISTI: should this be int arithmetic? */
3439 M_LSUB_IMM32(superindex, REG_ITMP3);
3442 codegen_add_classcastexception_ref(cd, cd->mcodeptr);
3443 x86_64_mov_membase32_reg(cd, REG_ITMP2,
3444 OFFSET(vftbl_t, interfacetable[0]) -
3445 superindex * sizeof(methodptr*),
3449 codegen_add_classcastexception_ref(cd, cd->mcodeptr);
3455 /* class checkcast code */
3457 if (!super || !(super->flags & ACC_INTERFACE)) {
3463 M_ALD(REG_ITMP2, s1, OFFSET(java_objectheader, vftbl));
3466 codegen_addpatchref(cd, cd->mcodeptr,
3467 PATCHER_checkcast_class,
3468 (constant_classref *) iptr->target,
3471 if (opt_showdisassemble) {
3472 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3476 M_MOV_IMM(supervftbl, REG_ITMP3);
3477 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3478 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3480 x86_64_movl_membase32_reg(cd, REG_ITMP2,
3481 OFFSET(vftbl_t, baseval),
3483 /* if (s1 != REG_ITMP1) { */
3484 /* x86_64_movl_membase_reg(cd, REG_ITMP3, */
3485 /* OFFSET(vftbl_t, baseval), */
3487 /* x86_64_movl_membase_reg(cd, REG_ITMP3, */
3488 /* OFFSET(vftbl_t, diffval), */
3490 /* #if defined(USE_THREADS) && defined(NATIVE_THREADS) */
3491 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3493 /* x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP1, REG_ITMP2); */
3496 x86_64_movl_membase32_reg(cd, REG_ITMP3,
3497 OFFSET(vftbl_t, baseval),
3499 M_LSUB(REG_ITMP3, REG_ITMP2);
3500 M_MOV_IMM(supervftbl, REG_ITMP3);
3501 M_ILD(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3503 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3504 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3506 M_CMP(REG_ITMP3, REG_ITMP2);
3507 M_BA(0); /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3508 codegen_add_classcastexception_ref(cd, cd->mcodeptr);
3510 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3513 /* array type cast-check */
3515 var_to_reg_int(s1, src, REG_ITMP1);
3516 M_INTMOVE(s1, rd->argintregs[0]);
3518 if (iptr->val.a == NULL) {
3519 codegen_addpatchref(cd, cd->mcodeptr,
3520 PATCHER_builtin_arraycheckcast,
3521 (constant_classref *) iptr->target, 0);
3523 if (opt_showdisassemble) {
3524 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3528 M_MOV_IMM(iptr->val.a, rd->argintregs[1]);
3529 M_MOV_IMM(BUILTIN_arraycheckcast, REG_ITMP1);
3533 codegen_add_classcastexception_ref(cd, cd->mcodeptr);
3535 var_to_reg_int(s1, src, REG_ITMP1);
3536 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
3539 store_reg_to_var_int(iptr->dst, d);
3542 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3544 /* op1: 0 == array, 1 == class */
3545 /* val.a: (classinfo *) superclass */
3547 /* superclass is an interface:
3549 * return (sub != NULL) &&
3550 * (sub->vftbl->interfacetablelength > super->index) &&
3551 * (sub->vftbl->interfacetable[-super->index] != NULL);
3553 * superclass is a class:
3555 * return ((sub != NULL) && (0
3556 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3557 * super->vftbl->diffvall));
3562 vftbl_t *supervftbl;
3565 super = (classinfo *) iptr->val.a;
3572 superindex = super->index;
3573 supervftbl = super->vftbl;
3576 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3577 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3580 var_to_reg_int(s1, src, REG_ITMP1);
3581 d = reg_of_var(rd, iptr->dst, REG_ITMP2);
3583 M_INTMOVE(s1, REG_ITMP1);
3587 /* calculate interface instanceof code size */
3589 s2 = 3; /* mov_membase_reg */
3590 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3591 s2 += 3 + 4 /* movl_membase32_reg */ + 3 + 4 /* sub_imm32 */ +
3592 3 /* test */ + 6 /* jcc */ + 3 + 4 /* mov_membase32_reg */ +
3593 3 /* test */ + 4 /* setcc */;
3596 s2 += (opt_showdisassemble ? 5 : 0);
3598 /* calculate class instanceof code size */
3600 s3 = 3; /* mov_membase_reg */
3601 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3602 s3 += 10; /* mov_imm_reg */
3603 s3 += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3604 CALCOFFSETBYTES(s3, REG_ITMP1, OFFSET(vftbl_t, baseval));
3605 s3 += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3606 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, baseval));
3607 s3 += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3608 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3609 s3 += 3 /* sub */ + 3 /* xor */ + 3 /* cmp */ + 4 /* setcc */;
3612 s3 += (opt_showdisassemble ? 5 : 0);
3614 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3616 /* if class is not resolved, check which code to call */
3619 x86_64_test_reg_reg(cd, s1, s1);
3620 x86_64_jcc(cd, X86_64_CC_Z, (6 + (opt_showdisassemble ? 5 : 0) +
3621 7 + 6 + s2 + 5 + s3));
3623 codegen_addpatchref(cd, cd->mcodeptr,
3624 PATCHER_checkcast_instanceof_flags,
3625 (constant_classref *) iptr->target, 0);
3627 if (opt_showdisassemble) {
3628 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3631 x86_64_movl_imm_reg(cd, 0, REG_ITMP3); /* super->flags */
3632 x86_64_alul_imm_reg(cd, X86_64_AND, ACC_INTERFACE, REG_ITMP3);
3633 x86_64_jcc(cd, X86_64_CC_Z, s2 + 5);
3636 /* interface instanceof code */
3638 if (!super || (super->flags & ACC_INTERFACE)) {
3640 x86_64_test_reg_reg(cd, s1, s1);
3641 x86_64_jcc(cd, X86_64_CC_Z, s2);
3644 x86_64_mov_membase_reg(cd, s1,
3645 OFFSET(java_objectheader, vftbl),
3648 codegen_addpatchref(cd, cd->mcodeptr,
3649 PATCHER_checkcast_instanceof_interface,
3650 (constant_classref *) iptr->target, 0);
3652 if (opt_showdisassemble) {
3653 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3657 x86_64_movl_membase32_reg(cd, REG_ITMP1,
3658 OFFSET(vftbl_t, interfacetablelength),
3660 x86_64_alu_imm32_reg(cd, X86_64_SUB, superindex, REG_ITMP3);
3661 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3663 a = 3 + 4 /* mov_membase32_reg */ + 3 /* test */ + 4 /* setcc */;
3665 x86_64_jcc(cd, X86_64_CC_LE, a);
3666 x86_64_mov_membase32_reg(cd, REG_ITMP1,
3667 OFFSET(vftbl_t, interfacetable[0]) -
3668 superindex * sizeof(methodptr*),
3670 x86_64_test_reg_reg(cd, REG_ITMP1, REG_ITMP1);
3671 x86_64_setcc_reg(cd, X86_64_CC_NE, d);
3674 x86_64_jmp_imm(cd, s3);
3677 /* class instanceof code */
3679 if (!super || !(super->flags & ACC_INTERFACE)) {
3681 x86_64_test_reg_reg(cd, s1, s1);
3682 x86_64_jcc(cd, X86_64_CC_E, s3);
3685 x86_64_mov_membase_reg(cd, s1,
3686 OFFSET(java_objectheader, vftbl),
3690 codegen_addpatchref(cd, cd->mcodeptr,
3691 PATCHER_instanceof_class,
3692 (constant_classref *) iptr->target, 0);
3694 if (opt_showdisassemble) {
3695 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3699 x86_64_mov_imm_reg(cd, (ptrint) supervftbl, REG_ITMP2);
3700 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3701 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3703 x86_64_movl_membase_reg(cd, REG_ITMP1,
3704 OFFSET(vftbl_t, baseval),
3706 x86_64_movl_membase_reg(cd, REG_ITMP2,
3707 OFFSET(vftbl_t, diffval),
3709 x86_64_movl_membase_reg(cd, REG_ITMP2,
3710 OFFSET(vftbl_t, baseval),
3712 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3713 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3715 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
3716 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d); /* may be REG_ITMP2 */
3717 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP3, REG_ITMP1);
3718 x86_64_setcc_reg(cd, X86_64_CC_BE, d);
3720 store_reg_to_var_int(iptr->dst, d);
3724 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3725 /* op1 = dimension, val.a = class */
3727 /* check for negative sizes and copy sizes to stack if necessary */
3729 MCODECHECK((10 * 4 * iptr->op1) + 5 + 10 * 8);
3731 for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3732 /* copy SAVEDVAR sizes to stack */
3734 if (src->varkind != ARGVAR) {
3735 var_to_reg_int(s2, src, REG_ITMP1);
3736 M_LST(s2, REG_SP, s1 * 8);
3740 /* is a patcher function set? */
3742 if (iptr->val.a == NULL) {
3743 codegen_addpatchref(cd, cd->mcodeptr,
3744 PATCHER_builtin_multianewarray,
3745 (constant_classref *) iptr->target, 0);
3747 if (opt_showdisassemble) {
3748 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3754 a = (ptrint) iptr->val.a;
3757 /* a0 = dimension count */
3759 M_MOV_IMM(iptr->op1, rd->argintregs[0]);
3761 /* a1 = arrayvftbl */
3763 M_MOV_IMM(iptr->val.a, rd->argintregs[1]);
3765 /* a2 = pointer to dimensions = stack pointer */
3767 M_MOV(REG_SP, rd->argintregs[2]);
3769 M_MOV_IMM(BUILTIN_multianewarray, REG_ITMP1);
3772 /* check for exception before result assignment */
3776 codegen_add_fillinstacktrace_ref(cd, cd->mcodeptr);
3778 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3779 M_INTMOVE(REG_RESULT, s1);
3780 store_reg_to_var_int(iptr->dst, s1);
3784 *exceptionptr = new_internalerror("Unknown ICMD %d", iptr->opc);
3788 } /* for instruction */
3790 /* copy values to interface registers */
3792 src = bptr->outstack;
3793 len = bptr->outdepth;
3795 #if defined(ENABLE_LSRA)
3800 if ((src->varkind != STACKVAR)) {
3802 if (IS_FLT_DBL_TYPE(s2)) {
3803 var_to_reg_flt(s1, src, REG_FTMP1);
3804 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3805 M_FLTMOVE(s1, rd->interfaces[len][s2].regoff);
3808 x86_64_movq_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3812 var_to_reg_int(s1, src, REG_ITMP1);
3813 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3814 M_INTMOVE(s1, rd->interfaces[len][s2].regoff);
3817 x86_64_mov_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3824 /* At the end of a basic block we may have to append some nops,
3825 because the patcher stub calling code might be longer than the
3826 actual instruction. So codepatching does not change the
3827 following block unintentionally. */
3829 if (cd->mcodeptr < cd->lastmcodeptr) {
3830 while (cd->mcodeptr < cd->lastmcodeptr) {
3835 } /* if (bptr -> flags >= BBREACHED) */
3836 } /* for basic block */
3838 dseg_createlinenumbertable(cd);
3841 /* generate exception and patcher stubs */
3850 savedmcodeptr = NULL;
3852 /* generate exception stubs */
3854 for (eref = cd->exceptionrefs; eref != NULL; eref = eref->next) {
3855 gen_resolvebranch(cd->mcodebase + eref->branchpos,
3857 cd->mcodeptr - cd->mcodebase);
3861 /* Check if the exception is an
3862 ArrayIndexOutOfBoundsException. If so, move index register
3865 if (eref->reg != -1)
3866 M_MOV(eref->reg, REG_ITMP1);
3868 /* calcuate exception address */
3870 M_MOV_IMM(0, REG_ITMP2_XPC);
3871 dseg_adddata(cd, cd->mcodeptr);
3872 M_AADD_IMM32(eref->branchpos - 6, REG_ITMP2_XPC);
3874 /* move function to call into REG_ITMP3 */
3876 M_MOV_IMM(eref->function, REG_ITMP3);
3878 if (savedmcodeptr != NULL) {
3879 M_JMP_IMM(savedmcodeptr - cd->mcodeptr - 5);
3882 savedmcodeptr = cd->mcodeptr;
3884 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
3885 M_MOV(REG_SP, rd->argintregs[1]);
3886 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
3887 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
3888 M_MOV(REG_ITMP1, rd->argintregs[4]); /* for AIOOBE */
3890 M_ASUB_IMM(2 * 8, REG_SP);
3891 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
3895 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
3896 M_AADD_IMM(2 * 8, REG_SP);
3898 M_MOV_IMM(asm_handle_exception, REG_ITMP3);
3904 /* generate code patching stub call code */
3906 for (pref = cd->patchrefs; pref != NULL; pref = pref->next) {
3907 /* check size of code segment */
3911 /* Get machine code which is patched back in later. A
3912 `call rel32' is 5 bytes long (but read 8 bytes). */
3914 savedmcodeptr = cd->mcodebase + pref->branchpos;
3915 mcode = *((ptrint *) savedmcodeptr);
3917 /* patch in `call rel32' to call the following code */
3919 tmpmcodeptr = cd->mcodeptr; /* save current mcodeptr */
3920 cd->mcodeptr = savedmcodeptr; /* set mcodeptr to patch position */
3922 M_CALL_IMM(tmpmcodeptr - (savedmcodeptr + PATCHER_CALL_SIZE));
3924 cd->mcodeptr = tmpmcodeptr; /* restore the current mcodeptr */
3926 /* move pointer to java_objectheader onto stack */
3928 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3929 /* create a virtual java_objectheader */
3931 (void) dseg_addaddress(cd, get_dummyLR()); /* monitorPtr */
3932 a = dseg_addaddress(cd, NULL); /* vftbl */
3934 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP3);
3940 /* move machine code bytes and classinfo pointer into registers */
3942 M_MOV_IMM(mcode, REG_ITMP3);
3944 M_MOV_IMM(pref->ref, REG_ITMP3);
3946 M_MOV_IMM(pref->disp, REG_ITMP3);
3949 M_MOV_IMM(pref->patcher, REG_ITMP3);
3952 M_MOV_IMM(asm_wrapper_patcher, REG_ITMP3);
3957 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
3959 /* everything's ok */
3965 /* createcompilerstub **********************************************************
3967 Creates a stub routine which calls the compiler.
3969 *******************************************************************************/
3971 #define COMPILERSTUB_DATASIZE 2 * SIZEOF_VOID_P
3972 #define COMPILERSTUB_CODESIZE 7 + 7 + 3
3974 #define COMPILERSTUB_SIZE COMPILERSTUB_DATASIZE + COMPILERSTUB_CODESIZE
3977 u1 *createcompilerstub(methodinfo *m)
3979 u1 *s; /* memory to hold the stub */
3984 s = CNEW(u1, COMPILERSTUB_SIZE);
3986 /* set data pointer and code pointer */
3989 s = s + COMPILERSTUB_DATASIZE;
3991 /* mark start of dump memory area */
3993 dumpsize = dump_size();
3995 cd = DNEW(codegendata);
3998 /* Store the methodinfo* in the same place as in the methodheader
3999 for compiled methods. */
4001 d[0] = (ptrint) asm_call_jit_compiler;
4004 /* code for the stub */
4006 M_ALD(REG_ITMP1, RIP, -(7 * 1 + 1 * SIZEOF_VOID_P)); /* methodinfo */
4007 M_ALD(REG_ITMP3, RIP, -(7 * 2 + 2 * SIZEOF_VOID_P)); /* compiler pointer */
4010 #if defined(ENABLE_STATISTICS)
4012 count_cstub_len += COMPILERSTUB_SIZE;
4015 /* release dump area */
4017 dump_release(dumpsize);
4023 /* createnativestub ************************************************************
4025 Creates a stub routine which calls a native method.
4027 *******************************************************************************/
4029 u1 *createnativestub(functionptr f, methodinfo *m, codegendata *cd,
4030 registerdata *rd, methoddesc *nmd)
4033 s4 stackframesize; /* size of stackframe if needed */
4035 s4 i, j; /* count variables */
4039 /* initialize variables */
4042 nativeparams = (m->flags & ACC_STATIC) ? 2 : 1;
4044 /* calculate stack frame size */
4047 sizeof(stackframeinfo) / SIZEOF_VOID_P +
4048 sizeof(localref_table) / SIZEOF_VOID_P +
4049 INT_ARG_CNT + FLT_ARG_CNT + 1 + /* + 1 for function address */
4052 if (!(stackframesize & 0x1)) /* keep stack 16-byte aligned */
4055 /* create method header */
4057 (void) dseg_addaddress(cd, m); /* MethodPointer */
4058 (void) dseg_adds4(cd, stackframesize * 8); /* FrameSize */
4059 (void) dseg_adds4(cd, 0); /* IsSync */
4060 (void) dseg_adds4(cd, 0); /* IsLeaf */
4061 (void) dseg_adds4(cd, 0); /* IntSave */
4062 (void) dseg_adds4(cd, 0); /* FltSave */
4063 (void) dseg_addlinenumbertablesize(cd);
4064 (void) dseg_adds4(cd, 0); /* ExTableSize */
4066 /* initialize mcode variables */
4068 cd->mcodeptr = (u1 *) cd->mcodebase;
4069 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
4071 /* generate native method profiling code */
4074 /* count frequency */
4076 M_MOV_IMM(m, REG_ITMP2);
4077 M_IINC_MEMBASE(REG_ITMP2, OFFSET(methodinfo, frequency));
4080 /* generate stub code */
4082 M_ASUB_IMM(stackframesize * 8, REG_SP);
4084 if (opt_verbosecall) {
4085 /* save integer and float argument registers */
4087 for (i = 0, j = 0; i < md->paramcount && j < INT_ARG_CNT; i++)
4088 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4089 M_LST(rd->argintregs[j++], REG_SP, (1 + i) * 8);
4091 for (i = 0, j = 0; i < md->paramcount && j < FLT_ARG_CNT; i++)
4092 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4093 M_DST(rd->argfltregs[j++], REG_SP, (1 + INT_ARG_CNT + i) * 8);
4095 /* show integer hex code for float arguments */
4097 for (i = 0, j = 0; i < md->paramcount && j < INT_ARG_CNT; i++) {
4098 /* if the paramtype is a float, we have to right shift all
4099 following integer registers */
4101 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type)) {
4102 for (s1 = INT_ARG_CNT - 2; s1 >= i; s1--)
4103 M_MOV(rd->argintregs[s1], rd->argintregs[s1 + 1]);
4105 x86_64_movd_freg_reg(cd, rd->argfltregs[j], rd->argintregs[i]);
4110 M_MOV_IMM(m, REG_ITMP1);
4111 M_AST(REG_ITMP1, REG_SP, 0 * 8);
4112 M_MOV_IMM(builtin_trace_args, REG_ITMP1);
4115 /* restore integer and float argument registers */
4117 for (i = 0, j = 0; i < md->paramcount && j < INT_ARG_CNT; i++)
4118 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4119 M_LLD(rd->argintregs[j++], REG_SP, (1 + i) * 8);
4121 for (i = 0, j = 0; i < md->paramcount && j < FLT_ARG_CNT; i++)
4122 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4123 M_DLD(rd->argfltregs[j++], REG_SP, (1 + INT_ARG_CNT + i) * 8);
4127 /* get function address (this must happen before the stackframeinfo) */
4129 #if !defined(WITH_STATIC_CLASSPATH)
4131 codegen_addpatchref(cd, cd->mcodeptr, PATCHER_resolve_native, m, 0);
4133 if (opt_showdisassemble) {
4134 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
4139 M_MOV_IMM(f, REG_ITMP3);
4142 /* save integer and float argument registers */
4144 for (i = 0, j = 0; i < md->paramcount && j < INT_ARG_CNT; i++)
4145 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4146 M_LST(rd->argintregs[j++], REG_SP, i * 8);
4148 for (i = 0, j = 0; i < md->paramcount && j < FLT_ARG_CNT; i++)
4149 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4150 M_DST(rd->argfltregs[j++], REG_SP, (INT_ARG_CNT + i) * 8);
4152 M_AST(REG_ITMP3, REG_SP, (INT_ARG_CNT + FLT_ARG_CNT) * 8);
4154 /* create dynamic stack info */
4156 M_ALEA(REG_SP, stackframesize * 8, rd->argintregs[0]);
4157 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[1]);
4158 M_ALEA(REG_SP, stackframesize * 8 + SIZEOF_VOID_P, rd->argintregs[2]);
4159 M_ALD(rd->argintregs[3], REG_SP, stackframesize * 8);
4160 M_MOV_IMM(codegen_start_native_call, REG_ITMP1);
4163 /* restore integer and float argument registers */
4165 for (i = 0, j = 0; i < md->paramcount && j < INT_ARG_CNT; i++)
4166 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4167 M_LLD(rd->argintregs[j++], REG_SP, i * 8);
4169 for (i = 0, j = 0; i < md->paramcount && j < FLT_ARG_CNT; i++)
4170 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4171 M_DLD(rd->argfltregs[j++], REG_SP, (INT_ARG_CNT + i) * 8);
4173 M_ALD(REG_ITMP3, REG_SP, (INT_ARG_CNT + FLT_ARG_CNT) * 8);
4176 /* copy or spill arguments to new locations */
4178 for (i = md->paramcount - 1, j = i + nativeparams; i >= 0; i--, j--) {
4179 t = md->paramtypes[i].type;
4181 if (IS_INT_LNG_TYPE(t)) {
4182 if (!md->params[i].inmemory) {
4183 s1 = rd->argintregs[md->params[i].regoff];
4185 if (!nmd->params[j].inmemory) {
4186 s2 = rd->argintregs[nmd->params[j].regoff];
4190 s2 = nmd->params[j].regoff;
4191 M_LST(s1, REG_SP, s2 * 8);
4195 s1 = md->params[i].regoff + stackframesize + 1; /* + 1 (RA) */
4196 s2 = nmd->params[j].regoff;
4197 M_LLD(REG_ITMP1, REG_SP, s1 * 8);
4198 M_LST(REG_ITMP1, REG_SP, s2 * 8);
4202 /* We only copy spilled float arguments, as the float argument */
4203 /* registers keep unchanged. */
4205 if (md->params[i].inmemory) {
4206 s1 = md->params[i].regoff + stackframesize + 1; /* + 1 (RA) */
4207 s2 = nmd->params[j].regoff;
4208 M_DLD(REG_FTMP1, REG_SP, s1 * 8);
4209 M_DST(REG_FTMP1, REG_SP, s2 * 8);
4214 /* put class into second argument register */
4216 if (m->flags & ACC_STATIC)
4217 M_MOV_IMM(m->class, rd->argintregs[1]);
4219 /* put env into first argument register */
4221 M_MOV_IMM(_Jv_env, rd->argintregs[0]);
4223 /* do the native function call */
4227 /* save return value */
4229 if (md->returntype.type != TYPE_VOID) {
4230 if (IS_INT_LNG_TYPE(md->returntype.type))
4231 M_LST(REG_RESULT, REG_SP, 0 * 8);
4233 M_DST(REG_FRESULT, REG_SP, 0 * 8);
4236 /* remove native stackframe info */
4238 M_ALEA(REG_SP, stackframesize * 8, rd->argintregs[0]);
4239 M_MOV_IMM(codegen_finish_native_call, REG_ITMP1);
4242 /* generate call trace */
4244 if (opt_verbosecall) {
4245 /* just restore the value we need, don't care about the other */
4247 if (md->returntype.type != TYPE_VOID) {
4248 if (IS_INT_LNG_TYPE(md->returntype.type))
4249 M_LLD(REG_RESULT, REG_SP, 0 * 8);
4251 M_DLD(REG_FRESULT, REG_SP, 0 * 8);
4254 M_MOV_IMM(m, rd->argintregs[0]);
4255 M_MOV(REG_RESULT, rd->argintregs[1]);
4256 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
4257 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
4259 M_MOV_IMM(builtin_displaymethodstop, REG_ITMP1);
4263 /* check for exception */
4265 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4266 M_MOV_IMM(builtin_get_exceptionptrptr, REG_ITMP3);
4269 M_MOV_IMM(&_no_threads_exceptionptr, REG_RESULT);
4271 M_ALD(REG_ITMP2, REG_RESULT, 0);
4273 /* restore return value */
4275 if (md->returntype.type != TYPE_VOID) {
4276 if (IS_INT_LNG_TYPE(md->returntype.type))
4277 M_LLD(REG_RESULT, REG_SP, 0 * 8);
4279 M_DLD(REG_FRESULT, REG_SP, 0 * 8);
4282 /* test for exception */
4287 /* remove stackframe */
4289 M_AADD_IMM(stackframesize * 8, REG_SP);
4293 /* handle exception */
4295 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4296 M_LST(REG_ITMP2, REG_SP, 0 * 8);
4297 M_MOV_IMM(builtin_get_exceptionptrptr, REG_ITMP3);
4299 M_AST_IMM32(0, REG_RESULT, 0); /* clear exception pointer */
4300 M_LLD(REG_ITMP1_XPTR, REG_SP, 0 * 8);
4302 M_MOV(REG_ITMP3, REG_ITMP1_XPTR);
4303 M_MOV_IMM(&_no_threads_exceptionptr, REG_ITMP3);
4304 M_AST_IMM32(0, REG_ITMP3, 0); /* clear exception pointer */
4307 /* remove stackframe */
4309 M_AADD_IMM(stackframesize * 8, REG_SP);
4311 M_LLD(REG_ITMP2_XPC, REG_SP, 0 * 8); /* get return address from stack */
4312 M_ASUB_IMM(3, REG_ITMP2_XPC); /* callq */
4314 M_MOV_IMM(asm_handle_nat_exception, REG_ITMP3);
4318 /* process patcher calls **************************************************/
4325 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4329 for (pref = cd->patchrefs; pref != NULL; pref = pref->next) {
4330 /* Get machine code which is patched back in later. A
4331 `call rel32' is 5 bytes long (but read 8 bytes). */
4333 savedmcodeptr = cd->mcodebase + pref->branchpos;
4334 mcode = *((ptrint *) savedmcodeptr);
4336 /* patch in `call rel32' to call the following code */
4338 tmpmcodeptr = cd->mcodeptr; /* save current mcodeptr */
4339 cd->mcodeptr = savedmcodeptr; /* set mcodeptr to patch position */
4341 M_CALL_IMM(tmpmcodeptr - (savedmcodeptr + PATCHER_CALL_SIZE));
4343 cd->mcodeptr = tmpmcodeptr; /* restore the current mcodeptr */
4345 /* move pointer to java_objectheader onto stack */
4347 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4348 /* create a virtual java_objectheader */
4350 (void) dseg_addaddress(cd, get_dummyLR()); /* monitorPtr */
4351 disp = dseg_addaddress(cd, NULL); /* vftbl */
4353 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, REG_ITMP3);
4359 /* move machine code bytes and classinfo pointer into registers */
4361 M_MOV_IMM(mcode, REG_ITMP3);
4363 M_MOV_IMM(pref->ref, REG_ITMP3);
4365 M_MOV_IMM(pref->disp, REG_ITMP3);
4368 M_MOV_IMM(pref->patcher, REG_ITMP3);
4371 M_MOV_IMM(asm_wrapper_patcher, REG_ITMP3);
4376 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
4378 return cd->code->entrypoint;
4383 * These are local overrides for various environment variables in Emacs.
4384 * Please do not remove this and leave it at the end of the file, where
4385 * Emacs will automagically detect them.
4386 * ---------------------------------------------------------------------
4389 * indent-tabs-mode: t
4393 * vim:noexpandtab:sw=4:ts=4: