1 /* src/vm/jit/x86_64/codegen.c - machine code generator for x86_64
3 Copyright (C) 1996-2005 R. Grafl, A. Krall, C. Kruegel, C. Oates,
4 R. Obermaisser, M. Platter, M. Probst, S. Ring, E. Steiner,
5 C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich, J. Wenninger,
6 Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
25 Contact: cacao@complang.tuwien.ac.at
27 Authors: Andreas Krall
32 $Id: codegen.c 2767 2005-06-21 11:47:05Z twisti $
45 #include "vm/jit/x86_64/arch.h"
46 #include "vm/jit/x86_64/codegen.h"
47 #include "vm/jit/x86_64/emitfuncs.h"
48 #include "vm/jit/x86_64/types.h"
49 #include "vm/jit/x86_64/asmoffsets.h"
51 #include "cacao/cacao.h"
52 #include "native/native.h"
53 #include "vm/global.h"
54 #include "vm/builtin.h"
55 #include "vm/loader.h"
56 #include "vm/statistics.h"
57 #include "vm/stringlocal.h"
58 #include "vm/tables.h"
59 #include "vm/jit/asmpart.h"
60 #include "vm/jit/codegen.inc"
61 #include "vm/jit/helper.h"
62 #include "vm/jit/jit.h"
65 # include "vm/jit/lsra.inc"
68 #include "vm/jit/parse.h"
69 #include "vm/jit/patcher.h"
70 #include "vm/jit/reg.h"
71 #include "vm/jit/reg.inc"
74 /* codegen *********************************************************************
76 Generates machine code.
78 *******************************************************************************/
80 void codegen(methodinfo *m, codegendata *cd, registerdata *rd)
82 s4 len, s1, s2, s3, d;
91 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
92 builtintable_entry *bte;
101 /* space to save used callee saved registers */
103 savedregs_num += (rd->savintregcnt - rd->maxsavintreguse);
104 savedregs_num += (rd->savfltregcnt - rd->maxsavfltreguse);
106 parentargs_base = rd->maxmemuse + savedregs_num;
108 #if defined(USE_THREADS) /* space to save argument of monitor_enter */
110 if (checksync && (m->flags & ACC_SYNCHRONIZED))
115 /* Keep stack of non-leaf functions 16-byte aligned for calls into native */
116 /* code e.g. libc or jni (alignment problems with movaps). */
118 if (!m->isleafmethod || runverbose)
119 parentargs_base |= 0x1;
121 /* create method header */
123 (void) dseg_addaddress(cd, m); /* MethodPointer */
124 (void) dseg_adds4(cd, parentargs_base * 8); /* FrameSize */
126 #if defined(USE_THREADS)
128 /* IsSync contains the offset relative to the stack pointer for the
129 argument of monitor_exit used in the exception handler. Since the
130 offset could be zero and give a wrong meaning of the flag it is
134 if (checksync && (m->flags & ACC_SYNCHRONIZED))
135 (void) dseg_adds4(cd, (rd->maxmemuse + 1) * 8); /* IsSync */
140 (void) dseg_adds4(cd, 0); /* IsSync */
142 (void) dseg_adds4(cd, m->isleafmethod); /* IsLeaf */
143 (void) dseg_adds4(cd, rd->savintregcnt - rd->maxsavintreguse);/* IntSave */
144 (void) dseg_adds4(cd, rd->savfltregcnt - rd->maxsavfltreguse);/* FltSave */
146 (void) dseg_addlinenumbertablesize(cd);
148 (void) dseg_adds4(cd, cd->exceptiontablelength); /* ExTableSize */
150 /* create exception table */
152 for (ex = cd->exceptiontable; ex != NULL; ex = ex->down) {
153 dseg_addtarget(cd, ex->start);
154 dseg_addtarget(cd, ex->end);
155 dseg_addtarget(cd, ex->handler);
156 (void) dseg_addaddress(cd, ex->catchtype.cls);
159 /* initialize mcode variables */
161 cd->mcodeptr = (u1 *) cd->mcodebase;
162 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
163 MCODECHECK(128 + m->paramcount);
165 /* create stack frame (if necessary) */
167 if (parentargs_base) {
168 x86_64_alu_imm_reg(cd, X86_64_SUB, parentargs_base * 8, REG_SP);
171 /* save used callee saved registers */
174 for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
175 p--; x86_64_mov_reg_membase(cd, rd->savintregs[i], REG_SP, p * 8);
177 for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
178 p--; x86_64_movq_reg_membase(cd, rd->savfltregs[i], REG_SP, p * 8);
181 /* take arguments out of register or stack frame */
185 for (p = 0, l = 0; p < md->paramcount; p++) {
186 t = md->paramtypes[p].type;
187 var = &(rd->locals[l][t]);
189 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
193 s1 = md->params[p].regoff;
194 if (IS_INT_LNG_TYPE(t)) { /* integer args */
195 s2 = rd->argintregs[s1];
196 if (!md->params[p].inmemory) { /* register arguments */
197 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
198 M_INTMOVE(s2, var->regoff);
200 } else { /* reg arg -> spilled */
201 M_LST(s2, REG_SP, var->regoff * 8);
204 } else { /* stack arguments */
205 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
206 /* + 8 for return address */
207 M_LLD(var->regoff, REG_SP, (parentargs_base + s1) * 8 + 8);
209 } else { /* stack arg -> spilled */
210 var->regoff = parentargs_base + s1 + 1;
214 } else { /* floating args */
215 if (!md->params[p].inmemory) { /* register arguments */
216 s2 = rd->argfltregs[s1];
217 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
218 M_FLTMOVE(s2, var->regoff);
220 } else { /* reg arg -> spilled */
221 M_DST(s2, REG_SP, var->regoff * 8);
224 } else { /* stack arguments */
225 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
226 M_DLD(var->regoff, REG_SP, (parentargs_base + s1) * 8 + 8);
229 var->regoff = parentargs_base + s1 + 1;
235 /* save monitorenter argument */
237 #if defined(USE_THREADS)
238 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
239 /* stack offset for monitor argument */
244 M_LSUB_IMM((INT_ARG_CNT + FLT_ARG_CNT) * 8, REG_SP);
246 for (p = 0; p < INT_ARG_CNT; p++)
247 M_LST(rd->argintregs[p], REG_SP, p * 8);
249 for (p = 0; p < FLT_ARG_CNT; p++)
250 M_DST(rd->argfltregs[p], REG_SP, (INT_ARG_CNT + p) * 8);
252 s1 += INT_ARG_CNT + FLT_ARG_CNT;
255 /* decide which monitor enter function to call */
257 if (m->flags & ACC_STATIC) {
258 x86_64_mov_imm_reg(cd, (ptrint) m->class, REG_ITMP1);
259 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, s1 * 8);
260 M_INTMOVE(REG_ITMP1, rd->argintregs[0]);
261 x86_64_mov_imm_reg(cd, (ptrint) BUILTIN_staticmonitorenter, REG_ITMP1);
262 x86_64_call_reg(cd, REG_ITMP1);
265 x86_64_test_reg_reg(cd, rd->argintregs[0], rd->argintregs[0]);
266 x86_64_jcc(cd, X86_64_CC_Z, 0);
267 codegen_addxnullrefs(cd, cd->mcodeptr);
268 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, s1 * 8);
269 x86_64_mov_imm_reg(cd, (ptrint) BUILTIN_monitorenter, REG_ITMP1);
270 x86_64_call_reg(cd, REG_ITMP1);
274 for (p = 0; p < INT_ARG_CNT; p++)
275 M_LLD(rd->argintregs[p], REG_SP, p * 8);
277 for (p = 0; p < FLT_ARG_CNT; p++)
278 M_DLD(rd->argfltregs[p], REG_SP, (INT_ARG_CNT + p) * 8);
280 M_LADD_IMM((INT_ARG_CNT + FLT_ARG_CNT) * 8, REG_SP);
285 /* Copy argument registers to stack and call trace function with pointer */
286 /* to arguments on stack. */
288 if (runverbose || opt_stat) {
289 M_LSUB_IMM((INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + FLT_TMP_CNT + 1 + 1) * 8, REG_SP);
291 /* save integer argument registers */
293 for (p = 0; p < INT_ARG_CNT; p++)
294 M_LST(rd->argintregs[p], REG_SP, (1 + p) * 8);
296 /* save float argument registers */
298 for (p = 0; p < FLT_ARG_CNT; p++)
299 M_DST(rd->argfltregs[p], REG_SP, (1 + INT_ARG_CNT + p) * 8);
301 /* save temporary registers for leaf methods */
303 if (m->isleafmethod) {
304 for (p = 0; p < INT_TMP_CNT; p++)
305 M_LST(rd->tmpintregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + p) * 8);
307 for (p = 0; p < FLT_TMP_CNT; p++)
308 M_DST(rd->tmpfltregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + p) * 8);
312 /* show integer hex code for float arguments */
314 for (p = 0, l = 0; p < md->paramcount && p < INT_ARG_CNT; p++) {
315 /* if the paramtype is a float, we have to right shift all */
316 /* following integer registers */
318 if (IS_FLT_DBL_TYPE(md->paramtypes[p].type)) {
319 for (s1 = INT_ARG_CNT - 2; s1 >= p; s1--) {
320 M_MOV(rd->argintregs[s1], rd->argintregs[s1 + 1]);
323 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[p]);
328 x86_64_mov_imm_reg(cd, (ptrint) m, REG_ITMP2);
329 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_SP, 0 * 8);
330 x86_64_mov_imm_reg(cd, (ptrint) builtin_trace_args, REG_ITMP1);
331 x86_64_call_reg(cd, REG_ITMP1);
334 x86_64_mov_imm_reg(cd, (ptrint) compiledinvokation, REG_ITMP1);
335 x86_64_call_reg(cd, REG_ITMP1);
338 /* restore integer argument registers */
340 for (p = 0; p < INT_ARG_CNT; p++)
341 M_LLD(rd->argintregs[p], REG_SP, (1 + p) * 8);
343 /* restore float argument registers */
345 for (p = 0; p < FLT_ARG_CNT; p++)
346 M_DLD(rd->argfltregs[p], REG_SP, (1 + INT_ARG_CNT + p) * 8);
348 /* restore temporary registers for leaf methods */
350 if (m->isleafmethod) {
351 for (p = 0; p < INT_TMP_CNT; p++)
352 M_LLD(rd->tmpintregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + p) * 8);
354 for (p = 0; p < FLT_TMP_CNT; p++)
355 M_DLD(rd->tmpfltregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + p) * 8);
358 M_LADD_IMM((INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + FLT_TMP_CNT + 1 + 1) * 8, REG_SP);
363 /* end of header generation */
365 /* walk through all basic blocks */
366 for (bptr = m->basicblocks; bptr != NULL; bptr = bptr->next) {
368 bptr->mpc = (u4) ((u1 *) cd->mcodeptr - cd->mcodebase);
370 if (bptr->flags >= BBREACHED) {
372 /* branch resolving */
375 for (bref = bptr->branchrefs; bref != NULL; bref = bref->next) {
376 gen_resolvebranch((u1 *) cd->mcodebase + bref->branchpos,
381 /* copy interface registers to their destination */
385 MCODECHECK(64 + len);
389 while (src != NULL) {
391 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
392 if (bptr->type == BBTYPE_SBR) {
393 /* d = reg_of_var(rd, src, REG_ITMP1); */
394 if (!(src->flags & INMEMORY))
398 x86_64_pop_reg(cd, d);
399 store_reg_to_var_int(src, d);
401 } else if (bptr->type == BBTYPE_EXH) {
402 /* d = reg_of_var(rd, src, REG_ITMP1); */
403 if (!(src->flags & INMEMORY))
407 M_INTMOVE(REG_ITMP1, d);
408 store_reg_to_var_int(src, d);
417 while (src != NULL) {
419 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
420 if (bptr->type == BBTYPE_SBR) {
421 d = reg_of_var(rd, src, REG_ITMP1);
422 x86_64_pop_reg(cd, d);
423 store_reg_to_var_int(src, d);
425 } else if (bptr->type == BBTYPE_EXH) {
426 d = reg_of_var(rd, src, REG_ITMP1);
427 M_INTMOVE(REG_ITMP1, d);
428 store_reg_to_var_int(src, d);
432 d = reg_of_var(rd, src, REG_ITMP1);
433 if ((src->varkind != STACKVAR)) {
435 if (IS_FLT_DBL_TYPE(s2)) {
436 s1 = rd->interfaces[len][s2].regoff;
437 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
441 x86_64_movq_membase_reg(cd, REG_SP, s1 * 8, d);
443 store_reg_to_var_flt(src, d);
446 s1 = rd->interfaces[len][s2].regoff;
447 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
451 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, d);
453 store_reg_to_var_int(src, d);
462 /* walk through all instructions */
467 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
468 if (iptr->line != currentline) {
469 dseg_addlinenumber(cd, iptr->line, cd->mcodeptr);
470 /*printf("%s : %d\n",m->name->text,iptr->line);*/
471 currentline = iptr->line;
474 MCODECHECK(128); /* XXX are 128 bytes enough? */
477 case ICMD_INLINE_START: /* internal ICMDs */
478 case ICMD_INLINE_END:
481 case ICMD_NOP: /* ... ==> ... */
484 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
485 if (src->flags & INMEMORY) {
486 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
489 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
491 x86_64_jcc(cd, X86_64_CC_Z, 0);
492 codegen_addxnullrefs(cd, cd->mcodeptr);
495 /* constant operations ************************************************/
497 case ICMD_ICONST: /* ... ==> ..., constant */
498 /* op1 = 0, val.i = constant */
500 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
501 if (iptr->val.i == 0) {
502 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
504 x86_64_movl_imm_reg(cd, iptr->val.i, d);
506 store_reg_to_var_int(iptr->dst, d);
509 case ICMD_ACONST: /* ... ==> ..., constant */
510 /* op1 = 0, val.a = constant */
512 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
513 if (iptr->val.a == 0) {
514 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
516 x86_64_mov_imm_reg(cd, (s8) iptr->val.a, d);
518 store_reg_to_var_int(iptr->dst, d);
521 case ICMD_LCONST: /* ... ==> ..., constant */
522 /* op1 = 0, val.l = constant */
524 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
525 if (iptr->val.l == 0) {
526 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
528 x86_64_mov_imm_reg(cd, iptr->val.l, d);
530 store_reg_to_var_int(iptr->dst, d);
533 case ICMD_FCONST: /* ... ==> ..., constant */
534 /* op1 = 0, val.f = constant */
536 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
537 a = dseg_addfloat(cd, iptr->val.f);
538 x86_64_movdl_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + ((d > 7) ? 9 : 8)) - (s8) cd->mcodebase) + a, d);
539 store_reg_to_var_flt(iptr->dst, d);
542 case ICMD_DCONST: /* ... ==> ..., constant */
543 /* op1 = 0, val.d = constant */
545 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
546 a = dseg_adddouble(cd, iptr->val.d);
547 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, d);
548 store_reg_to_var_flt(iptr->dst, d);
552 /* load/store operations **********************************************/
554 case ICMD_ILOAD: /* ... ==> ..., content of local variable */
555 /* op1 = local variable */
557 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
558 if ((iptr->dst->varkind == LOCALVAR) &&
559 (iptr->dst->varnum == iptr->op1)) {
562 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
563 if (var->flags & INMEMORY) {
564 x86_64_movl_membase_reg(cd, REG_SP, var->regoff * 8, d);
565 store_reg_to_var_int(iptr->dst, d);
568 if (iptr->dst->flags & INMEMORY) {
569 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
572 M_INTMOVE(var->regoff, d);
577 case ICMD_LLOAD: /* ... ==> ..., content of local variable */
578 case ICMD_ALOAD: /* op1 = local variable */
580 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
581 if ((iptr->dst->varkind == LOCALVAR) &&
582 (iptr->dst->varnum == iptr->op1)) {
585 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
586 if (var->flags & INMEMORY) {
587 x86_64_mov_membase_reg(cd, REG_SP, var->regoff * 8, d);
588 store_reg_to_var_int(iptr->dst, d);
591 if (iptr->dst->flags & INMEMORY) {
592 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
595 M_INTMOVE(var->regoff, d);
600 case ICMD_FLOAD: /* ... ==> ..., content of local variable */
601 case ICMD_DLOAD: /* op1 = local variable */
603 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
604 if ((iptr->dst->varkind == LOCALVAR) &&
605 (iptr->dst->varnum == iptr->op1)) {
608 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
609 if (var->flags & INMEMORY) {
610 x86_64_movq_membase_reg(cd, REG_SP, var->regoff * 8, d);
611 store_reg_to_var_flt(iptr->dst, d);
614 if (iptr->dst->flags & INMEMORY) {
615 x86_64_movq_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
618 M_FLTMOVE(var->regoff, d);
623 case ICMD_ISTORE: /* ..., value ==> ... */
624 case ICMD_LSTORE: /* op1 = local variable */
627 if ((src->varkind == LOCALVAR) &&
628 (src->varnum == iptr->op1)) {
631 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
632 if (var->flags & INMEMORY) {
633 var_to_reg_int(s1, src, REG_ITMP1);
634 x86_64_mov_reg_membase(cd, s1, REG_SP, var->regoff * 8);
637 var_to_reg_int(s1, src, var->regoff);
638 M_INTMOVE(s1, var->regoff);
642 case ICMD_FSTORE: /* ..., value ==> ... */
643 case ICMD_DSTORE: /* op1 = local variable */
645 if ((src->varkind == LOCALVAR) &&
646 (src->varnum == iptr->op1)) {
649 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
650 if (var->flags & INMEMORY) {
651 var_to_reg_flt(s1, src, REG_FTMP1);
652 x86_64_movq_reg_membase(cd, s1, REG_SP, var->regoff * 8);
655 var_to_reg_flt(s1, src, var->regoff);
656 M_FLTMOVE(s1, var->regoff);
661 /* pop/dup/swap operations ********************************************/
663 /* attention: double and longs are only one entry in CACAO ICMDs */
665 case ICMD_POP: /* ..., value ==> ... */
666 case ICMD_POP2: /* ..., value, value ==> ... */
669 case ICMD_DUP: /* ..., a ==> ..., a, a */
670 M_COPY(src, iptr->dst);
673 case ICMD_DUP_X1: /* ..., a, b ==> ..., b, a, b */
675 M_COPY(src, iptr->dst);
676 M_COPY(src->prev, iptr->dst->prev);
677 M_COPY(iptr->dst, iptr->dst->prev->prev);
680 case ICMD_DUP_X2: /* ..., a, b, c ==> ..., c, a, b, c */
682 M_COPY(src, iptr->dst);
683 M_COPY(src->prev, iptr->dst->prev);
684 M_COPY(src->prev->prev, iptr->dst->prev->prev);
685 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
688 case ICMD_DUP2: /* ..., a, b ==> ..., a, b, a, b */
690 M_COPY(src, iptr->dst);
691 M_COPY(src->prev, iptr->dst->prev);
694 case ICMD_DUP2_X1: /* ..., a, b, c ==> ..., b, c, a, b, c */
696 M_COPY(src, iptr->dst);
697 M_COPY(src->prev, iptr->dst->prev);
698 M_COPY(src->prev->prev, iptr->dst->prev->prev);
699 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
700 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev);
703 case ICMD_DUP2_X2: /* ..., a, b, c, d ==> ..., c, d, a, b, c, d */
705 M_COPY(src, iptr->dst);
706 M_COPY(src->prev, iptr->dst->prev);
707 M_COPY(src->prev->prev, iptr->dst->prev->prev);
708 M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
709 M_COPY(iptr->dst, iptr->dst->prev->prev->prev->prev);
710 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev->prev);
713 case ICMD_SWAP: /* ..., a, b ==> ..., b, a */
715 M_COPY(src, iptr->dst->prev);
716 M_COPY(src->prev, iptr->dst);
720 /* integer operations *************************************************/
722 case ICMD_INEG: /* ..., value ==> ..., - value */
724 d = reg_of_var(rd, iptr->dst, REG_NULL);
725 if (iptr->dst->flags & INMEMORY) {
726 if (src->flags & INMEMORY) {
727 if (src->regoff == iptr->dst->regoff) {
728 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
731 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
732 x86_64_negl_reg(cd, REG_ITMP1);
733 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
737 x86_64_movl_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
738 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
742 if (src->flags & INMEMORY) {
743 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
744 x86_64_negl_reg(cd, d);
747 M_INTMOVE(src->regoff, iptr->dst->regoff);
748 x86_64_negl_reg(cd, iptr->dst->regoff);
753 case ICMD_LNEG: /* ..., value ==> ..., - value */
755 d = reg_of_var(rd, iptr->dst, REG_NULL);
756 if (iptr->dst->flags & INMEMORY) {
757 if (src->flags & INMEMORY) {
758 if (src->regoff == iptr->dst->regoff) {
759 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
762 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
763 x86_64_neg_reg(cd, REG_ITMP1);
764 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
768 x86_64_mov_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
769 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
773 if (src->flags & INMEMORY) {
774 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
775 x86_64_neg_reg(cd, iptr->dst->regoff);
778 M_INTMOVE(src->regoff, iptr->dst->regoff);
779 x86_64_neg_reg(cd, iptr->dst->regoff);
784 case ICMD_I2L: /* ..., value ==> ..., value */
786 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
787 if (src->flags & INMEMORY) {
788 x86_64_movslq_membase_reg(cd, REG_SP, src->regoff * 8, d);
791 x86_64_movslq_reg_reg(cd, src->regoff, d);
793 store_reg_to_var_int(iptr->dst, d);
796 case ICMD_L2I: /* ..., value ==> ..., value */
798 var_to_reg_int(s1, src, REG_ITMP1);
799 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
801 store_reg_to_var_int(iptr->dst, d);
804 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
806 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
807 if (src->flags & INMEMORY) {
808 x86_64_movsbq_membase_reg(cd, REG_SP, src->regoff * 8, d);
811 x86_64_movsbq_reg_reg(cd, src->regoff, d);
813 store_reg_to_var_int(iptr->dst, d);
816 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
818 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
819 if (src->flags & INMEMORY) {
820 x86_64_movzwq_membase_reg(cd, REG_SP, src->regoff * 8, d);
823 x86_64_movzwq_reg_reg(cd, src->regoff, d);
825 store_reg_to_var_int(iptr->dst, d);
828 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
830 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
831 if (src->flags & INMEMORY) {
832 x86_64_movswq_membase_reg(cd, REG_SP, src->regoff * 8, d);
835 x86_64_movswq_reg_reg(cd, src->regoff, d);
837 store_reg_to_var_int(iptr->dst, d);
841 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
843 d = reg_of_var(rd, iptr->dst, REG_NULL);
844 x86_64_emit_ialu(cd, X86_64_ADD, src, iptr);
847 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
848 /* val.i = constant */
850 d = reg_of_var(rd, iptr->dst, REG_NULL);
851 x86_64_emit_ialuconst(cd, X86_64_ADD, src, iptr);
854 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
856 d = reg_of_var(rd, iptr->dst, REG_NULL);
857 x86_64_emit_lalu(cd, X86_64_ADD, src, iptr);
860 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
861 /* val.l = constant */
863 d = reg_of_var(rd, iptr->dst, REG_NULL);
864 x86_64_emit_laluconst(cd, X86_64_ADD, src, iptr);
867 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
869 d = reg_of_var(rd, iptr->dst, REG_NULL);
870 if (iptr->dst->flags & INMEMORY) {
871 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
872 if (src->prev->regoff == iptr->dst->regoff) {
873 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
874 x86_64_alul_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
877 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
878 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
879 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
882 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
883 M_INTMOVE(src->prev->regoff, REG_ITMP1);
884 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
885 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
887 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
888 if (src->prev->regoff == iptr->dst->regoff) {
889 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
892 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
893 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
894 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
898 x86_64_movl_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
899 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
903 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
904 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
905 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
907 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
908 M_INTMOVE(src->prev->regoff, d);
909 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
911 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
912 /* workaround for reg alloc */
913 if (src->regoff == iptr->dst->regoff) {
914 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
915 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
916 M_INTMOVE(REG_ITMP1, d);
919 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
920 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
924 /* workaround for reg alloc */
925 if (src->regoff == iptr->dst->regoff) {
926 M_INTMOVE(src->prev->regoff, REG_ITMP1);
927 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
928 M_INTMOVE(REG_ITMP1, d);
931 M_INTMOVE(src->prev->regoff, d);
932 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
938 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
939 /* val.i = constant */
941 d = reg_of_var(rd, iptr->dst, REG_NULL);
942 x86_64_emit_ialuconst(cd, X86_64_SUB, src, iptr);
945 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
947 d = reg_of_var(rd, iptr->dst, REG_NULL);
948 if (iptr->dst->flags & INMEMORY) {
949 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
950 if (src->prev->regoff == iptr->dst->regoff) {
951 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
952 x86_64_alu_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
955 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
956 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
957 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
960 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
961 M_INTMOVE(src->prev->regoff, REG_ITMP1);
962 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
963 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
965 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
966 if (src->prev->regoff == iptr->dst->regoff) {
967 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
970 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
971 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
972 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
976 x86_64_mov_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
977 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
981 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
982 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
983 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
985 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
986 M_INTMOVE(src->prev->regoff, d);
987 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
989 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
990 /* workaround for reg alloc */
991 if (src->regoff == iptr->dst->regoff) {
992 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
993 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
994 M_INTMOVE(REG_ITMP1, d);
997 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
998 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1002 /* workaround for reg alloc */
1003 if (src->regoff == iptr->dst->regoff) {
1004 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1005 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1006 M_INTMOVE(REG_ITMP1, d);
1009 M_INTMOVE(src->prev->regoff, d);
1010 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1016 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
1017 /* val.l = constant */
1019 d = reg_of_var(rd, iptr->dst, REG_NULL);
1020 x86_64_emit_laluconst(cd, X86_64_SUB, src, iptr);
1023 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1025 d = reg_of_var(rd, iptr->dst, REG_NULL);
1026 if (iptr->dst->flags & INMEMORY) {
1027 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1028 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1029 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1030 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1032 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1033 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1034 x86_64_imull_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1035 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1037 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1038 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1039 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1040 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1043 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1044 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1045 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1049 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1050 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1051 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1053 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1054 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1055 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1057 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1058 M_INTMOVE(src->regoff, iptr->dst->regoff);
1059 x86_64_imull_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1062 if (src->regoff == iptr->dst->regoff) {
1063 x86_64_imull_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1066 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1067 x86_64_imull_reg_reg(cd, src->regoff, iptr->dst->regoff);
1073 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
1074 /* val.i = constant */
1076 d = reg_of_var(rd, iptr->dst, REG_NULL);
1077 if (iptr->dst->flags & INMEMORY) {
1078 if (src->flags & INMEMORY) {
1079 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1080 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1083 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, REG_ITMP1);
1084 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1088 if (src->flags & INMEMORY) {
1089 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1092 if (iptr->val.i == 2) {
1093 M_INTMOVE(src->regoff, iptr->dst->regoff);
1094 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1097 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, iptr->dst->regoff); /* 3 cycles */
1103 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1105 d = reg_of_var(rd, iptr->dst, REG_NULL);
1106 if (iptr->dst->flags & INMEMORY) {
1107 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1108 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1109 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1110 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1112 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1113 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1114 x86_64_imul_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1115 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1117 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1118 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1119 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1120 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1123 x86_64_mov_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1124 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1125 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1129 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1130 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1131 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1133 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1134 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1135 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1137 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1138 M_INTMOVE(src->regoff, iptr->dst->regoff);
1139 x86_64_imul_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1142 if (src->regoff == iptr->dst->regoff) {
1143 x86_64_imul_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1146 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1147 x86_64_imul_reg_reg(cd, src->regoff, iptr->dst->regoff);
1153 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
1154 /* val.l = constant */
1156 d = reg_of_var(rd, iptr->dst, REG_NULL);
1157 if (iptr->dst->flags & INMEMORY) {
1158 if (src->flags & INMEMORY) {
1159 if (IS_IMM32(iptr->val.l)) {
1160 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1163 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1164 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1166 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1169 if (IS_IMM32(iptr->val.l)) {
1170 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, REG_ITMP1);
1173 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1174 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1176 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1180 if (src->flags & INMEMORY) {
1181 if (IS_IMM32(iptr->val.l)) {
1182 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1185 x86_64_mov_imm_reg(cd, iptr->val.l, iptr->dst->regoff);
1186 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1190 /* should match in many cases */
1191 if (iptr->val.l == 2) {
1192 M_INTMOVE(src->regoff, iptr->dst->regoff);
1193 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1196 if (IS_IMM32(iptr->val.l)) {
1197 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, iptr->dst->regoff); /* 4 cycles */
1200 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1201 M_INTMOVE(src->regoff, iptr->dst->regoff);
1202 x86_64_imul_reg_reg(cd, REG_ITMP1, iptr->dst->regoff);
1209 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1211 d = reg_of_var(rd, iptr->dst, REG_NULL);
1212 if (src->prev->flags & INMEMORY) {
1213 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1216 M_INTMOVE(src->prev->regoff, RAX);
1219 if (src->flags & INMEMORY) {
1220 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1223 M_INTMOVE(src->regoff, REG_ITMP3);
1227 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1228 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1229 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1230 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1232 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1234 x86_64_idivl_reg(cd, REG_ITMP3);
1236 if (iptr->dst->flags & INMEMORY) {
1237 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1238 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1241 M_INTMOVE(RAX, iptr->dst->regoff);
1243 if (iptr->dst->regoff != RDX) {
1244 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1249 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1250 d = reg_of_var(rd, iptr->dst, REG_NULL);
1251 if (src->prev->flags & INMEMORY) {
1252 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1255 M_INTMOVE(src->prev->regoff, RAX);
1258 if (src->flags & INMEMORY) {
1259 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1262 M_INTMOVE(src->regoff, REG_ITMP3);
1266 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1268 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1269 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1272 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1273 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1274 x86_64_jcc(cd, X86_64_CC_E, 1 + 3); /* 6 bytes */
1277 x86_64_idivl_reg(cd, REG_ITMP3);
1279 if (iptr->dst->flags & INMEMORY) {
1280 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1281 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1284 M_INTMOVE(RDX, iptr->dst->regoff);
1286 if (iptr->dst->regoff != RDX) {
1287 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1292 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
1293 /* val.i = constant */
1295 var_to_reg_int(s1, src, REG_ITMP1);
1296 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1297 M_INTMOVE(s1, REG_ITMP1);
1298 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1299 x86_64_leal_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1300 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1301 x86_64_shiftl_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1302 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1303 store_reg_to_var_int(iptr->dst, d);
1306 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
1307 /* val.i = constant */
1309 var_to_reg_int(s1, src, REG_ITMP1);
1310 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1311 M_INTMOVE(s1, REG_ITMP1);
1312 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1313 x86_64_leal_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1314 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1315 x86_64_alul_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1316 x86_64_alul_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1317 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1318 store_reg_to_var_int(iptr->dst, d);
1322 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1324 d = reg_of_var(rd, iptr->dst, REG_NULL);
1325 if (src->prev->flags & INMEMORY) {
1326 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1329 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1332 if (src->flags & INMEMORY) {
1333 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1336 M_INTMOVE(src->regoff, REG_ITMP3);
1340 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1341 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1342 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1343 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1344 x86_64_jcc(cd, X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1346 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1348 x86_64_idiv_reg(cd, REG_ITMP3);
1350 if (iptr->dst->flags & INMEMORY) {
1351 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1352 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1355 M_INTMOVE(RAX, iptr->dst->regoff);
1357 if (iptr->dst->regoff != RDX) {
1358 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1363 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1365 d = reg_of_var(rd, iptr->dst, REG_NULL);
1366 if (src->prev->flags & INMEMORY) {
1367 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1370 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1373 if (src->flags & INMEMORY) {
1374 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1377 M_INTMOVE(src->regoff, REG_ITMP3);
1381 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1383 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1384 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1385 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1388 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1389 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1390 x86_64_jcc(cd, X86_64_CC_E, 2 + 3); /* 6 bytes */
1393 x86_64_idiv_reg(cd, REG_ITMP3);
1395 if (iptr->dst->flags & INMEMORY) {
1396 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1397 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1400 M_INTMOVE(RDX, iptr->dst->regoff);
1402 if (iptr->dst->regoff != RDX) {
1403 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1408 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1409 /* val.i = constant */
1411 var_to_reg_int(s1, src, REG_ITMP1);
1412 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1413 M_INTMOVE(s1, REG_ITMP1);
1414 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1415 x86_64_lea_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1416 x86_64_cmovcc_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1417 x86_64_shift_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1418 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1419 store_reg_to_var_int(iptr->dst, d);
1422 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1423 /* val.l = constant */
1425 var_to_reg_int(s1, src, REG_ITMP1);
1426 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1427 M_INTMOVE(s1, REG_ITMP1);
1428 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1429 x86_64_lea_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1430 x86_64_cmovcc_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1431 x86_64_alu_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1432 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1433 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1434 store_reg_to_var_int(iptr->dst, d);
1437 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1439 d = reg_of_var(rd, iptr->dst, REG_NULL);
1440 x86_64_emit_ishift(cd, X86_64_SHL, src, iptr);
1443 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1444 /* val.i = constant */
1446 d = reg_of_var(rd, iptr->dst, REG_NULL);
1447 x86_64_emit_ishiftconst(cd, X86_64_SHL, src, iptr);
1450 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1452 d = reg_of_var(rd, iptr->dst, REG_NULL);
1453 x86_64_emit_ishift(cd, X86_64_SAR, src, iptr);
1456 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1457 /* val.i = constant */
1459 d = reg_of_var(rd, iptr->dst, REG_NULL);
1460 x86_64_emit_ishiftconst(cd, X86_64_SAR, src, iptr);
1463 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1465 d = reg_of_var(rd, iptr->dst, REG_NULL);
1466 x86_64_emit_ishift(cd, X86_64_SHR, src, iptr);
1469 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1470 /* val.i = constant */
1472 d = reg_of_var(rd, iptr->dst, REG_NULL);
1473 x86_64_emit_ishiftconst(cd, X86_64_SHR, src, iptr);
1476 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1478 d = reg_of_var(rd, iptr->dst, REG_NULL);
1479 x86_64_emit_lshift(cd, X86_64_SHL, src, iptr);
1482 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1483 /* val.i = constant */
1485 d = reg_of_var(rd, iptr->dst, REG_NULL);
1486 x86_64_emit_lshiftconst(cd, X86_64_SHL, src, iptr);
1489 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1491 d = reg_of_var(rd, iptr->dst, REG_NULL);
1492 x86_64_emit_lshift(cd, X86_64_SAR, src, iptr);
1495 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1496 /* val.i = constant */
1498 d = reg_of_var(rd, iptr->dst, REG_NULL);
1499 x86_64_emit_lshiftconst(cd, X86_64_SAR, src, iptr);
1502 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1504 d = reg_of_var(rd, iptr->dst, REG_NULL);
1505 x86_64_emit_lshift(cd, X86_64_SHR, src, iptr);
1508 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1509 /* val.l = constant */
1511 d = reg_of_var(rd, iptr->dst, REG_NULL);
1512 x86_64_emit_lshiftconst(cd, X86_64_SHR, src, iptr);
1515 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1517 d = reg_of_var(rd, iptr->dst, REG_NULL);
1518 x86_64_emit_ialu(cd, X86_64_AND, src, iptr);
1521 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1522 /* val.i = constant */
1524 d = reg_of_var(rd, iptr->dst, REG_NULL);
1525 x86_64_emit_ialuconst(cd, X86_64_AND, src, iptr);
1528 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1530 d = reg_of_var(rd, iptr->dst, REG_NULL);
1531 x86_64_emit_lalu(cd, X86_64_AND, src, iptr);
1534 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1535 /* val.l = constant */
1537 d = reg_of_var(rd, iptr->dst, REG_NULL);
1538 x86_64_emit_laluconst(cd, X86_64_AND, src, iptr);
1541 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1543 d = reg_of_var(rd, iptr->dst, REG_NULL);
1544 x86_64_emit_ialu(cd, X86_64_OR, src, iptr);
1547 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1548 /* val.i = constant */
1550 d = reg_of_var(rd, iptr->dst, REG_NULL);
1551 x86_64_emit_ialuconst(cd, X86_64_OR, src, iptr);
1554 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1556 d = reg_of_var(rd, iptr->dst, REG_NULL);
1557 x86_64_emit_lalu(cd, X86_64_OR, src, iptr);
1560 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1561 /* val.l = constant */
1563 d = reg_of_var(rd, iptr->dst, REG_NULL);
1564 x86_64_emit_laluconst(cd, X86_64_OR, src, iptr);
1567 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1569 d = reg_of_var(rd, iptr->dst, REG_NULL);
1570 x86_64_emit_ialu(cd, X86_64_XOR, src, iptr);
1573 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1574 /* val.i = constant */
1576 d = reg_of_var(rd, iptr->dst, REG_NULL);
1577 x86_64_emit_ialuconst(cd, X86_64_XOR, src, iptr);
1580 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1582 d = reg_of_var(rd, iptr->dst, REG_NULL);
1583 x86_64_emit_lalu(cd, X86_64_XOR, src, iptr);
1586 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1587 /* val.l = constant */
1589 d = reg_of_var(rd, iptr->dst, REG_NULL);
1590 x86_64_emit_laluconst(cd, X86_64_XOR, src, iptr);
1594 case ICMD_IINC: /* ..., value ==> ..., value + constant */
1595 /* op1 = variable, val.i = constant */
1597 /* using inc and dec is definitely faster than add -- tested */
1600 var = &(rd->locals[iptr->op1][TYPE_INT]);
1602 if (var->flags & INMEMORY) {
1603 if (iptr->val.i == 1) {
1604 x86_64_incl_membase(cd, REG_SP, d * 8);
1606 } else if (iptr->val.i == -1) {
1607 x86_64_decl_membase(cd, REG_SP, d * 8);
1610 x86_64_alul_imm_membase(cd, X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1614 if (iptr->val.i == 1) {
1615 x86_64_incl_reg(cd, d);
1617 } else if (iptr->val.i == -1) {
1618 x86_64_decl_reg(cd, d);
1621 x86_64_alul_imm_reg(cd, X86_64_ADD, iptr->val.i, d);
1627 /* floating operations ************************************************/
1629 case ICMD_FNEG: /* ..., value ==> ..., - value */
1631 var_to_reg_flt(s1, src, REG_FTMP1);
1632 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1633 a = dseg_adds4(cd, 0x80000000);
1635 x86_64_movss_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1636 x86_64_xorps_reg_reg(cd, REG_FTMP2, d);
1637 store_reg_to_var_flt(iptr->dst, d);
1640 case ICMD_DNEG: /* ..., value ==> ..., - value */
1642 var_to_reg_flt(s1, src, REG_FTMP1);
1643 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1644 a = dseg_adds8(cd, 0x8000000000000000);
1646 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1647 x86_64_xorpd_reg_reg(cd, REG_FTMP2, d);
1648 store_reg_to_var_flt(iptr->dst, d);
1651 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1653 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1654 var_to_reg_flt(s2, src, REG_FTMP2);
1655 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1657 x86_64_addss_reg_reg(cd, s2, d);
1658 } else if (s2 == d) {
1659 x86_64_addss_reg_reg(cd, s1, d);
1662 x86_64_addss_reg_reg(cd, s2, d);
1664 store_reg_to_var_flt(iptr->dst, d);
1667 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1669 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1670 var_to_reg_flt(s2, src, REG_FTMP2);
1671 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1673 x86_64_addsd_reg_reg(cd, s2, d);
1674 } else if (s2 == d) {
1675 x86_64_addsd_reg_reg(cd, s1, d);
1678 x86_64_addsd_reg_reg(cd, s2, d);
1680 store_reg_to_var_flt(iptr->dst, d);
1683 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1685 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1686 var_to_reg_flt(s2, src, REG_FTMP2);
1687 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1689 M_FLTMOVE(s2, REG_FTMP2);
1693 x86_64_subss_reg_reg(cd, s2, d);
1694 store_reg_to_var_flt(iptr->dst, d);
1697 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1699 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1700 var_to_reg_flt(s2, src, REG_FTMP2);
1701 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1703 M_FLTMOVE(s2, REG_FTMP2);
1707 x86_64_subsd_reg_reg(cd, s2, d);
1708 store_reg_to_var_flt(iptr->dst, d);
1711 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1713 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1714 var_to_reg_flt(s2, src, REG_FTMP2);
1715 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1717 x86_64_mulss_reg_reg(cd, s2, d);
1718 } else if (s2 == d) {
1719 x86_64_mulss_reg_reg(cd, s1, d);
1722 x86_64_mulss_reg_reg(cd, s2, d);
1724 store_reg_to_var_flt(iptr->dst, d);
1727 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1729 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1730 var_to_reg_flt(s2, src, REG_FTMP2);
1731 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1733 x86_64_mulsd_reg_reg(cd, s2, d);
1734 } else if (s2 == d) {
1735 x86_64_mulsd_reg_reg(cd, s1, d);
1738 x86_64_mulsd_reg_reg(cd, s2, d);
1740 store_reg_to_var_flt(iptr->dst, d);
1743 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1745 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1746 var_to_reg_flt(s2, src, REG_FTMP2);
1747 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1749 M_FLTMOVE(s2, REG_FTMP2);
1753 x86_64_divss_reg_reg(cd, s2, d);
1754 store_reg_to_var_flt(iptr->dst, d);
1757 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1759 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1760 var_to_reg_flt(s2, src, REG_FTMP2);
1761 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1763 M_FLTMOVE(s2, REG_FTMP2);
1767 x86_64_divsd_reg_reg(cd, s2, d);
1768 store_reg_to_var_flt(iptr->dst, d);
1771 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1773 var_to_reg_int(s1, src, REG_ITMP1);
1774 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1775 x86_64_cvtsi2ss_reg_reg(cd, s1, d);
1776 store_reg_to_var_flt(iptr->dst, d);
1779 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1781 var_to_reg_int(s1, src, REG_ITMP1);
1782 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1783 x86_64_cvtsi2sd_reg_reg(cd, s1, d);
1784 store_reg_to_var_flt(iptr->dst, d);
1787 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1789 var_to_reg_int(s1, src, REG_ITMP1);
1790 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1791 x86_64_cvtsi2ssq_reg_reg(cd, s1, d);
1792 store_reg_to_var_flt(iptr->dst, d);
1795 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1797 var_to_reg_int(s1, src, REG_ITMP1);
1798 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1799 x86_64_cvtsi2sdq_reg_reg(cd, s1, d);
1800 store_reg_to_var_flt(iptr->dst, d);
1803 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1805 var_to_reg_flt(s1, src, REG_FTMP1);
1806 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1807 x86_64_cvttss2si_reg_reg(cd, s1, d);
1808 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1809 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1810 x86_64_jcc(cd, X86_64_CC_NE, a);
1811 M_FLTMOVE(s1, REG_FTMP1);
1812 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2i, REG_ITMP2);
1813 x86_64_call_reg(cd, REG_ITMP2);
1814 M_INTMOVE(REG_RESULT, d);
1815 store_reg_to_var_int(iptr->dst, d);
1818 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1820 var_to_reg_flt(s1, src, REG_FTMP1);
1821 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1822 x86_64_cvttsd2si_reg_reg(cd, s1, d);
1823 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1824 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1825 x86_64_jcc(cd, X86_64_CC_NE, a);
1826 M_FLTMOVE(s1, REG_FTMP1);
1827 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2i, REG_ITMP2);
1828 x86_64_call_reg(cd, REG_ITMP2);
1829 M_INTMOVE(REG_RESULT, d);
1830 store_reg_to_var_int(iptr->dst, d);
1833 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1835 var_to_reg_flt(s1, src, REG_FTMP1);
1836 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1837 x86_64_cvttss2siq_reg_reg(cd, s1, d);
1838 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1839 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1840 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1841 x86_64_jcc(cd, X86_64_CC_NE, a);
1842 M_FLTMOVE(s1, REG_FTMP1);
1843 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2l, REG_ITMP2);
1844 x86_64_call_reg(cd, REG_ITMP2);
1845 M_INTMOVE(REG_RESULT, d);
1846 store_reg_to_var_int(iptr->dst, d);
1849 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1851 var_to_reg_flt(s1, src, REG_FTMP1);
1852 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1853 x86_64_cvttsd2siq_reg_reg(cd, s1, d);
1854 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1855 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1856 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1857 x86_64_jcc(cd, X86_64_CC_NE, a);
1858 M_FLTMOVE(s1, REG_FTMP1);
1859 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2l, REG_ITMP2);
1860 x86_64_call_reg(cd, REG_ITMP2);
1861 M_INTMOVE(REG_RESULT, d);
1862 store_reg_to_var_int(iptr->dst, d);
1865 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1867 var_to_reg_flt(s1, src, REG_FTMP1);
1868 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1869 x86_64_cvtss2sd_reg_reg(cd, s1, d);
1870 store_reg_to_var_flt(iptr->dst, d);
1873 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1875 var_to_reg_flt(s1, src, REG_FTMP1);
1876 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1877 x86_64_cvtsd2ss_reg_reg(cd, s1, d);
1878 store_reg_to_var_flt(iptr->dst, d);
1881 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1882 /* == => 0, < => 1, > => -1 */
1884 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1885 var_to_reg_flt(s2, src, REG_FTMP2);
1886 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1887 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1888 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1889 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1890 x86_64_ucomiss_reg_reg(cd, s1, s2);
1891 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1892 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1893 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1894 store_reg_to_var_int(iptr->dst, d);
1897 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1898 /* == => 0, < => 1, > => -1 */
1900 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1901 var_to_reg_flt(s2, src, REG_FTMP2);
1902 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1903 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1904 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1905 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1906 x86_64_ucomiss_reg_reg(cd, s1, s2);
1907 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1908 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1909 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1910 store_reg_to_var_int(iptr->dst, d);
1913 case ICMD_DCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1914 /* == => 0, < => 1, > => -1 */
1916 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1917 var_to_reg_flt(s2, src, REG_FTMP2);
1918 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1919 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1920 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1921 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1922 x86_64_ucomisd_reg_reg(cd, s1, s2);
1923 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1924 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1925 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1926 store_reg_to_var_int(iptr->dst, d);
1929 case ICMD_DCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1930 /* == => 0, < => 1, > => -1 */
1932 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1933 var_to_reg_flt(s2, src, REG_FTMP2);
1934 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1935 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1936 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1937 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1938 x86_64_ucomisd_reg_reg(cd, s1, s2);
1939 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1940 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1941 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1942 store_reg_to_var_int(iptr->dst, d);
1946 /* memory operations **************************************************/
1948 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., (int) length */
1950 var_to_reg_int(s1, src, REG_ITMP1);
1951 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1952 gen_nullptr_check(s1);
1953 x86_64_movl_membase_reg(cd, s1, OFFSET(java_arrayheader, size), d);
1954 store_reg_to_var_int(iptr->dst, d);
1957 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
1959 var_to_reg_int(s1, src->prev, REG_ITMP1);
1960 var_to_reg_int(s2, src, REG_ITMP2);
1961 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1962 if (iptr->op1 == 0) {
1963 gen_nullptr_check(s1);
1966 x86_64_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
1967 store_reg_to_var_int(iptr->dst, d);
1970 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
1972 var_to_reg_int(s1, src->prev, REG_ITMP1);
1973 var_to_reg_int(s2, src, REG_ITMP2);
1974 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1975 if (iptr->op1 == 0) {
1976 gen_nullptr_check(s1);
1979 x86_64_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]), s1, s2, 3, d);
1980 store_reg_to_var_int(iptr->dst, d);
1983 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
1985 var_to_reg_int(s1, src->prev, REG_ITMP1);
1986 var_to_reg_int(s2, src, REG_ITMP2);
1987 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1988 if (iptr->op1 == 0) {
1989 gen_nullptr_check(s1);
1992 x86_64_movl_memindex_reg(cd, OFFSET(java_intarray, data[0]), s1, s2, 2, d);
1993 store_reg_to_var_int(iptr->dst, d);
1996 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
1998 var_to_reg_int(s1, src->prev, REG_ITMP1);
1999 var_to_reg_int(s2, src, REG_ITMP2);
2000 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2001 if (iptr->op1 == 0) {
2002 gen_nullptr_check(s1);
2005 x86_64_movss_memindex_reg(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2006 store_reg_to_var_flt(iptr->dst, d);
2009 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2011 var_to_reg_int(s1, src->prev, REG_ITMP1);
2012 var_to_reg_int(s2, src, REG_ITMP2);
2013 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2014 if (iptr->op1 == 0) {
2015 gen_nullptr_check(s1);
2018 x86_64_movsd_memindex_reg(cd, OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2019 store_reg_to_var_flt(iptr->dst, d);
2022 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
2024 var_to_reg_int(s1, src->prev, REG_ITMP1);
2025 var_to_reg_int(s2, src, REG_ITMP2);
2026 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2027 if (iptr->op1 == 0) {
2028 gen_nullptr_check(s1);
2031 x86_64_movzwq_memindex_reg(cd, OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2032 store_reg_to_var_int(iptr->dst, d);
2035 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
2037 var_to_reg_int(s1, src->prev, REG_ITMP1);
2038 var_to_reg_int(s2, src, REG_ITMP2);
2039 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2040 if (iptr->op1 == 0) {
2041 gen_nullptr_check(s1);
2044 x86_64_movswq_memindex_reg(cd, OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2045 store_reg_to_var_int(iptr->dst, d);
2048 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
2050 var_to_reg_int(s1, src->prev, REG_ITMP1);
2051 var_to_reg_int(s2, src, REG_ITMP2);
2052 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2053 if (iptr->op1 == 0) {
2054 gen_nullptr_check(s1);
2057 x86_64_movsbq_memindex_reg(cd, OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2058 store_reg_to_var_int(iptr->dst, d);
2062 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2064 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2065 var_to_reg_int(s2, src->prev, REG_ITMP2);
2066 if (iptr->op1 == 0) {
2067 gen_nullptr_check(s1);
2070 var_to_reg_int(s3, src, REG_ITMP3);
2071 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2074 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2076 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2077 var_to_reg_int(s2, src->prev, REG_ITMP2);
2078 if (iptr->op1 == 0) {
2079 gen_nullptr_check(s1);
2082 var_to_reg_int(s3, src, REG_ITMP3);
2083 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2086 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2088 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2089 var_to_reg_int(s2, src->prev, REG_ITMP2);
2090 if (iptr->op1 == 0) {
2091 gen_nullptr_check(s1);
2094 var_to_reg_int(s3, src, REG_ITMP3);
2095 x86_64_movl_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2098 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2100 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2101 var_to_reg_int(s2, src->prev, REG_ITMP2);
2102 if (iptr->op1 == 0) {
2103 gen_nullptr_check(s1);
2106 var_to_reg_flt(s3, src, REG_FTMP3);
2107 x86_64_movss_reg_memindex(cd, s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2110 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2112 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2113 var_to_reg_int(s2, src->prev, REG_ITMP2);
2114 if (iptr->op1 == 0) {
2115 gen_nullptr_check(s1);
2118 var_to_reg_flt(s3, src, REG_FTMP3);
2119 x86_64_movsd_reg_memindex(cd, s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2122 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2124 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2125 var_to_reg_int(s2, src->prev, REG_ITMP2);
2126 if (iptr->op1 == 0) {
2127 gen_nullptr_check(s1);
2130 var_to_reg_int(s3, src, REG_ITMP3);
2131 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2134 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2136 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2137 var_to_reg_int(s2, src->prev, REG_ITMP2);
2138 if (iptr->op1 == 0) {
2139 gen_nullptr_check(s1);
2142 var_to_reg_int(s3, src, REG_ITMP3);
2143 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2146 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2148 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2149 var_to_reg_int(s2, src->prev, REG_ITMP2);
2150 if (iptr->op1 == 0) {
2151 gen_nullptr_check(s1);
2154 var_to_reg_int(s3, src, REG_ITMP3);
2155 x86_64_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2158 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2160 var_to_reg_int(s1, src->prev, REG_ITMP1);
2161 var_to_reg_int(s2, src, REG_ITMP2);
2162 if (iptr->op1 == 0) {
2163 gen_nullptr_check(s1);
2166 x86_64_movl_imm_memindex(cd, iptr->val.i, OFFSET(java_intarray, data[0]), s1, s2, 2);
2169 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2171 var_to_reg_int(s1, src->prev, REG_ITMP1);
2172 var_to_reg_int(s2, src, REG_ITMP2);
2173 if (iptr->op1 == 0) {
2174 gen_nullptr_check(s1);
2178 if (IS_IMM32(iptr->val.l)) {
2179 x86_64_mov_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2182 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2183 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l >> 32), OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2187 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2189 var_to_reg_int(s1, src->prev, REG_ITMP1);
2190 var_to_reg_int(s2, src, REG_ITMP2);
2191 if (iptr->op1 == 0) {
2192 gen_nullptr_check(s1);
2195 x86_64_mov_imm_memindex(cd, 0, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2198 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2200 var_to_reg_int(s1, src->prev, REG_ITMP1);
2201 var_to_reg_int(s2, src, REG_ITMP2);
2202 if (iptr->op1 == 0) {
2203 gen_nullptr_check(s1);
2206 x86_64_movb_imm_memindex(cd, iptr->val.i, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2209 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2211 var_to_reg_int(s1, src->prev, REG_ITMP1);
2212 var_to_reg_int(s2, src, REG_ITMP2);
2213 if (iptr->op1 == 0) {
2214 gen_nullptr_check(s1);
2217 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_chararray, data[0]), s1, s2, 1);
2220 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2222 var_to_reg_int(s1, src->prev, REG_ITMP1);
2223 var_to_reg_int(s2, src, REG_ITMP2);
2224 if (iptr->op1 == 0) {
2225 gen_nullptr_check(s1);
2228 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2232 case ICMD_GETSTATIC: /* ... ==> ..., value */
2233 /* op1 = type, val.a = field address */
2236 codegen_addpatchref(cd, cd->mcodeptr,
2237 PATCHER_get_putstatic,
2238 (unresolved_field *) iptr->target);
2240 if (showdisassemble) {
2241 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2247 fieldinfo *fi = iptr->val.a;
2249 if (!fi->class->initialized) {
2250 codegen_addpatchref(cd, cd->mcodeptr,
2251 PATCHER_clinit, fi->class);
2253 if (showdisassemble) {
2254 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2258 a = (ptrint) &(fi->value);
2261 /* This approach is much faster than moving the field address */
2262 /* inline into a register. */
2263 a = dseg_addaddress(cd, a);
2264 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP2);
2265 switch (iptr->op1) {
2267 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2268 x86_64_movl_membase_reg(cd, REG_ITMP2, 0, d);
2269 store_reg_to_var_int(iptr->dst, d);
2273 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2274 x86_64_mov_membase_reg(cd, REG_ITMP2, 0, d);
2275 store_reg_to_var_int(iptr->dst, d);
2278 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2279 x86_64_movss_membase_reg(cd, REG_ITMP2, 0, d);
2280 store_reg_to_var_flt(iptr->dst, d);
2283 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2284 x86_64_movsd_membase_reg(cd, REG_ITMP2, 0, d);
2285 store_reg_to_var_flt(iptr->dst, d);
2290 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2291 /* op1 = type, val.a = field address */
2294 codegen_addpatchref(cd, cd->mcodeptr,
2295 PATCHER_get_putstatic,
2296 (unresolved_field *) iptr->target);
2298 if (showdisassemble) {
2299 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2305 fieldinfo *fi = iptr->val.a;
2307 if (!fi->class->initialized) {
2308 codegen_addpatchref(cd, cd->mcodeptr,
2309 PATCHER_clinit, fi->class);
2311 if (showdisassemble) {
2312 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2316 a = (ptrint) &(fi->value);
2319 /* This approach is much faster than moving the field address */
2320 /* inline into a register. */
2321 a = dseg_addaddress(cd, a);
2322 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP2);
2323 switch (iptr->op1) {
2325 var_to_reg_int(s2, src, REG_ITMP1);
2326 x86_64_movl_reg_membase(cd, s2, REG_ITMP2, 0);
2330 var_to_reg_int(s2, src, REG_ITMP1);
2331 x86_64_mov_reg_membase(cd, s2, REG_ITMP2, 0);
2334 var_to_reg_flt(s2, src, REG_FTMP1);
2335 x86_64_movss_reg_membase(cd, s2, REG_ITMP2, 0);
2338 var_to_reg_flt(s2, src, REG_FTMP1);
2339 x86_64_movsd_reg_membase(cd, s2, REG_ITMP2, 0);
2344 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2345 /* val = value (in current instruction) */
2346 /* op1 = type, val.a = field address (in */
2347 /* following NOP) */
2349 if (!iptr[1].val.a) {
2350 codegen_addpatchref(cd, cd->mcodeptr,
2351 PATCHER_get_putstatic,
2352 (unresolved_field *) iptr[1].target);
2354 if (showdisassemble) {
2355 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2361 fieldinfo *fi = iptr[1].val.a;
2363 if (!fi->class->initialized) {
2364 codegen_addpatchref(cd, cd->mcodeptr,
2365 PATCHER_clinit, fi->class);
2367 if (showdisassemble) {
2368 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2372 a = (ptrint) &(fi->value);
2375 /* This approach is much faster than moving the field address */
2376 /* inline into a register. */
2377 a = dseg_addaddress(cd, a);
2378 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP1);
2379 switch (iptr->op1) {
2382 x86_64_movl_imm_membase(cd, iptr->val.i, REG_ITMP1, 0);
2387 if (IS_IMM32(iptr->val.l)) {
2388 x86_64_mov_imm_membase(cd, iptr->val.l, REG_ITMP1, 0);
2390 x86_64_movl_imm_membase(cd, iptr->val.l, REG_ITMP1, 0);
2391 x86_64_movl_imm_membase(cd, iptr->val.l >> 32, REG_ITMP1, 4);
2397 case ICMD_GETFIELD: /* ... ==> ..., value */
2398 /* op1 = type, val.i = field offset */
2400 var_to_reg_int(s1, src, REG_ITMP1);
2401 gen_nullptr_check(s1);
2404 codegen_addpatchref(cd, cd->mcodeptr,
2405 PATCHER_get_putfield,
2406 (unresolved_field *) iptr->target);
2408 if (showdisassemble) {
2409 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2415 a = ((fieldinfo *) (iptr->val.a))->offset;
2418 switch (iptr->op1) {
2420 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2421 x86_64_movl_membase32_reg(cd, s1, a, d);
2422 store_reg_to_var_int(iptr->dst, d);
2426 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2427 x86_64_mov_membase32_reg(cd, s1, a, d);
2428 store_reg_to_var_int(iptr->dst, d);
2431 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2432 x86_64_movss_membase32_reg(cd, s1, a, d);
2433 store_reg_to_var_flt(iptr->dst, d);
2436 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2437 x86_64_movsd_membase32_reg(cd, s1, a, d);
2438 store_reg_to_var_flt(iptr->dst, d);
2443 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2444 /* op1 = type, val.i = field offset */
2446 var_to_reg_int(s1, src->prev, REG_ITMP1);
2447 gen_nullptr_check(s1);
2448 if (IS_INT_LNG_TYPE(iptr->op1)) {
2449 var_to_reg_int(s2, src, REG_ITMP2);
2451 var_to_reg_flt(s2, src, REG_FTMP2);
2455 codegen_addpatchref(cd, cd->mcodeptr,
2456 PATCHER_get_putfield,
2457 (unresolved_field *) iptr->target);
2459 if (showdisassemble) {
2460 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2466 a = ((fieldinfo *) (iptr->val.a))->offset;
2469 switch (iptr->op1) {
2471 x86_64_movl_reg_membase32(cd, s2, s1, a);
2475 x86_64_mov_reg_membase32(cd, s2, s1, a);
2478 x86_64_movss_reg_membase32(cd, s2, s1, a);
2481 x86_64_movsd_reg_membase32(cd, s2, s1, a);
2486 case ICMD_PUTFIELDCONST: /* ..., objectref, value ==> ... */
2487 /* val = value (in current instruction) */
2488 /* op1 = type, val.a = field address (in */
2489 /* following NOP) */
2491 var_to_reg_int(s1, src, REG_ITMP1);
2492 gen_nullptr_check(s1);
2494 if (!iptr[1].val.a) {
2495 codegen_addpatchref(cd, cd->mcodeptr,
2496 PATCHER_putfieldconst,
2497 (unresolved_field *) iptr[1].target);
2499 if (showdisassemble) {
2500 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2506 a = ((fieldinfo *) (iptr[1].val.a))->offset;
2509 switch (iptr->op1) {
2512 x86_64_movl_imm_membase32(cd, iptr->val.i, s1, a);
2517 /* We can only optimize the move, if the class is resolved. */
2518 /* Otherwise we don't know what to patch. */
2519 if (iptr[1].val.a && IS_IMM32(iptr->val.l)) {
2520 x86_64_mov_imm_membase32(cd, iptr->val.l, s1, a);
2522 x86_64_movl_imm_membase32(cd, iptr->val.l, s1, a);
2523 x86_64_movl_imm_membase32(cd, iptr->val.l >> 32, s1, a + 4);
2530 /* branch operations **************************************************/
2532 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2534 var_to_reg_int(s1, src, REG_ITMP1);
2535 M_INTMOVE(s1, REG_ITMP1_XPTR);
2537 x86_64_call_imm(cd, 0); /* passing exception pointer */
2538 x86_64_pop_reg(cd, REG_ITMP2_XPC);
2540 x86_64_mov_imm_reg(cd, (ptrint) asm_handle_exception, REG_ITMP3);
2541 x86_64_jmp_reg(cd, REG_ITMP3);
2544 case ICMD_GOTO: /* ... ==> ... */
2545 /* op1 = target JavaVM pc */
2547 x86_64_jmp_imm(cd, 0);
2548 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2551 case ICMD_JSR: /* ... ==> ... */
2552 /* op1 = target JavaVM pc */
2554 x86_64_call_imm(cd, 0);
2555 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2558 case ICMD_RET: /* ... ==> ... */
2559 /* op1 = local variable */
2561 var = &(rd->locals[iptr->op1][TYPE_ADR]);
2562 var_to_reg_int(s1, var, REG_ITMP1);
2563 x86_64_jmp_reg(cd, s1);
2566 case ICMD_IFNULL: /* ..., value ==> ... */
2567 /* op1 = target JavaVM pc */
2569 if (src->flags & INMEMORY) {
2570 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2573 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2575 x86_64_jcc(cd, X86_64_CC_E, 0);
2576 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2579 case ICMD_IFNONNULL: /* ..., value ==> ... */
2580 /* op1 = target JavaVM pc */
2582 if (src->flags & INMEMORY) {
2583 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2586 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2588 x86_64_jcc(cd, X86_64_CC_NE, 0);
2589 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2592 case ICMD_IFEQ: /* ..., value ==> ... */
2593 /* op1 = target JavaVM pc, val.i = constant */
2595 x86_64_emit_ifcc(cd, X86_64_CC_E, src, iptr);
2598 case ICMD_IFLT: /* ..., value ==> ... */
2599 /* op1 = target JavaVM pc, val.i = constant */
2601 x86_64_emit_ifcc(cd, X86_64_CC_L, src, iptr);
2604 case ICMD_IFLE: /* ..., value ==> ... */
2605 /* op1 = target JavaVM pc, val.i = constant */
2607 x86_64_emit_ifcc(cd, X86_64_CC_LE, src, iptr);
2610 case ICMD_IFNE: /* ..., value ==> ... */
2611 /* op1 = target JavaVM pc, val.i = constant */
2613 x86_64_emit_ifcc(cd, X86_64_CC_NE, src, iptr);
2616 case ICMD_IFGT: /* ..., value ==> ... */
2617 /* op1 = target JavaVM pc, val.i = constant */
2619 x86_64_emit_ifcc(cd, X86_64_CC_G, src, iptr);
2622 case ICMD_IFGE: /* ..., value ==> ... */
2623 /* op1 = target JavaVM pc, val.i = constant */
2625 x86_64_emit_ifcc(cd, X86_64_CC_GE, src, iptr);
2628 case ICMD_IF_LEQ: /* ..., value ==> ... */
2629 /* op1 = target JavaVM pc, val.l = constant */
2631 x86_64_emit_if_lcc(cd, X86_64_CC_E, src, iptr);
2634 case ICMD_IF_LLT: /* ..., value ==> ... */
2635 /* op1 = target JavaVM pc, val.l = constant */
2637 x86_64_emit_if_lcc(cd, X86_64_CC_L, src, iptr);
2640 case ICMD_IF_LLE: /* ..., value ==> ... */
2641 /* op1 = target JavaVM pc, val.l = constant */
2643 x86_64_emit_if_lcc(cd, X86_64_CC_LE, src, iptr);
2646 case ICMD_IF_LNE: /* ..., value ==> ... */
2647 /* op1 = target JavaVM pc, val.l = constant */
2649 x86_64_emit_if_lcc(cd, X86_64_CC_NE, src, iptr);
2652 case ICMD_IF_LGT: /* ..., value ==> ... */
2653 /* op1 = target JavaVM pc, val.l = constant */
2655 x86_64_emit_if_lcc(cd, X86_64_CC_G, src, iptr);
2658 case ICMD_IF_LGE: /* ..., value ==> ... */
2659 /* op1 = target JavaVM pc, val.l = constant */
2661 x86_64_emit_if_lcc(cd, X86_64_CC_GE, src, iptr);
2664 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2665 /* op1 = target JavaVM pc */
2667 x86_64_emit_if_icmpcc(cd, X86_64_CC_E, src, iptr);
2670 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2671 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2673 x86_64_emit_if_lcmpcc(cd, X86_64_CC_E, src, iptr);
2676 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2677 /* op1 = target JavaVM pc */
2679 x86_64_emit_if_icmpcc(cd, X86_64_CC_NE, src, iptr);
2682 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2683 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2685 x86_64_emit_if_lcmpcc(cd, X86_64_CC_NE, src, iptr);
2688 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2689 /* op1 = target JavaVM pc */
2691 x86_64_emit_if_icmpcc(cd, X86_64_CC_L, src, iptr);
2694 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2695 /* op1 = target JavaVM pc */
2697 x86_64_emit_if_lcmpcc(cd, X86_64_CC_L, src, iptr);
2700 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2701 /* op1 = target JavaVM pc */
2703 x86_64_emit_if_icmpcc(cd, X86_64_CC_G, src, iptr);
2706 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2707 /* op1 = target JavaVM pc */
2709 x86_64_emit_if_lcmpcc(cd, X86_64_CC_G, src, iptr);
2712 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2713 /* op1 = target JavaVM pc */
2715 x86_64_emit_if_icmpcc(cd, X86_64_CC_LE, src, iptr);
2718 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2719 /* op1 = target JavaVM pc */
2721 x86_64_emit_if_lcmpcc(cd, X86_64_CC_LE, src, iptr);
2724 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2725 /* op1 = target JavaVM pc */
2727 x86_64_emit_if_icmpcc(cd, X86_64_CC_GE, src, iptr);
2730 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2731 /* op1 = target JavaVM pc */
2733 x86_64_emit_if_lcmpcc(cd, X86_64_CC_GE, src, iptr);
2736 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST */
2738 case ICMD_ELSE_ICONST: /* handled by IFxx_ICONST */
2741 case ICMD_IFEQ_ICONST: /* ..., value ==> ..., constant */
2742 /* val.i = constant */
2744 var_to_reg_int(s1, src, REG_ITMP1);
2745 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2746 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2748 M_INTMOVE(s1, REG_ITMP1);
2751 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2753 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2754 x86_64_testl_reg_reg(cd, s1, s1);
2755 x86_64_cmovccl_reg_reg(cd, X86_64_CC_E, REG_ITMP2, d);
2756 store_reg_to_var_int(iptr->dst, d);
2759 case ICMD_IFNE_ICONST: /* ..., value ==> ..., constant */
2760 /* val.i = constant */
2762 var_to_reg_int(s1, src, REG_ITMP1);
2763 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2764 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2766 M_INTMOVE(s1, REG_ITMP1);
2769 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2771 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2772 x86_64_testl_reg_reg(cd, s1, s1);
2773 x86_64_cmovccl_reg_reg(cd, X86_64_CC_NE, REG_ITMP2, d);
2774 store_reg_to_var_int(iptr->dst, d);
2777 case ICMD_IFLT_ICONST: /* ..., value ==> ..., constant */
2778 /* val.i = constant */
2780 var_to_reg_int(s1, src, REG_ITMP1);
2781 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2782 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2784 M_INTMOVE(s1, REG_ITMP1);
2787 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2789 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2790 x86_64_testl_reg_reg(cd, s1, s1);
2791 x86_64_cmovccl_reg_reg(cd, X86_64_CC_L, REG_ITMP2, d);
2792 store_reg_to_var_int(iptr->dst, d);
2795 case ICMD_IFGE_ICONST: /* ..., value ==> ..., constant */
2796 /* val.i = constant */
2798 var_to_reg_int(s1, src, REG_ITMP1);
2799 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2800 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2802 M_INTMOVE(s1, REG_ITMP1);
2805 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2807 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2808 x86_64_testl_reg_reg(cd, s1, s1);
2809 x86_64_cmovccl_reg_reg(cd, X86_64_CC_GE, REG_ITMP2, d);
2810 store_reg_to_var_int(iptr->dst, d);
2813 case ICMD_IFGT_ICONST: /* ..., value ==> ..., constant */
2814 /* val.i = constant */
2816 var_to_reg_int(s1, src, REG_ITMP1);
2817 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2818 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2820 M_INTMOVE(s1, REG_ITMP1);
2823 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2825 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2826 x86_64_testl_reg_reg(cd, s1, s1);
2827 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP2, d);
2828 store_reg_to_var_int(iptr->dst, d);
2831 case ICMD_IFLE_ICONST: /* ..., value ==> ..., constant */
2832 /* val.i = constant */
2834 var_to_reg_int(s1, src, REG_ITMP1);
2835 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2836 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2838 M_INTMOVE(s1, REG_ITMP1);
2841 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2843 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2844 x86_64_testl_reg_reg(cd, s1, s1);
2845 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, d);
2846 store_reg_to_var_int(iptr->dst, d);
2850 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2854 var_to_reg_int(s1, src, REG_RESULT);
2855 M_INTMOVE(s1, REG_RESULT);
2857 goto nowperformreturn;
2859 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2862 var_to_reg_flt(s1, src, REG_FRESULT);
2863 M_FLTMOVE(s1, REG_FRESULT);
2865 goto nowperformreturn;
2867 case ICMD_RETURN: /* ... ==> ... */
2873 p = parentargs_base;
2875 /* call trace function */
2877 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
2879 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
2880 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
2882 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
2883 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
2884 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
2885 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
2887 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
2888 x86_64_call_reg(cd, REG_ITMP1);
2890 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
2891 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
2893 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
2896 #if defined(USE_THREADS)
2897 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2898 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
2900 /* we need to save the proper return value */
2901 switch (iptr->opc) {
2905 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, rd->maxmemuse * 8);
2909 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, rd->maxmemuse * 8);
2913 x86_64_mov_imm_reg(cd, (ptrint) builtin_monitorexit, REG_ITMP1);
2914 x86_64_call_reg(cd, REG_ITMP1);
2916 /* and now restore the proper return value */
2917 switch (iptr->opc) {
2921 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_RESULT);
2925 x86_64_movq_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_FRESULT);
2931 /* restore saved registers */
2932 for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
2933 p--; x86_64_mov_membase_reg(cd, REG_SP, p * 8, rd->savintregs[i]);
2935 for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
2936 p--; x86_64_movq_membase_reg(cd, REG_SP, p * 8, rd->savfltregs[i]);
2939 /* deallocate stack */
2940 if (parentargs_base) {
2941 x86_64_alu_imm_reg(cd, X86_64_ADD, parentargs_base * 8, REG_SP);
2949 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2954 tptr = (void **) iptr->target;
2956 s4ptr = iptr->val.a;
2957 l = s4ptr[1]; /* low */
2958 i = s4ptr[2]; /* high */
2960 var_to_reg_int(s1, src, REG_ITMP1);
2961 M_INTMOVE(s1, REG_ITMP1);
2963 x86_64_alul_imm_reg(cd, X86_64_SUB, l, REG_ITMP1);
2968 x86_64_alul_imm_reg(cd, X86_64_CMP, i - 1, REG_ITMP1);
2969 x86_64_jcc(cd, X86_64_CC_A, 0);
2971 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[0]), cd->mcodeptr); */
2972 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
2974 /* build jump table top down and use address of lowest entry */
2976 /* s4ptr += 3 + i; */
2980 /* dseg_addtarget(cd, BlockPtrOfPC(*--s4ptr)); */
2981 dseg_addtarget(cd, (basicblock *) tptr[0]);
2985 /* length of dataseg after last dseg_addtarget is used by load */
2987 x86_64_mov_imm_reg(cd, 0, REG_ITMP2);
2988 dseg_adddata(cd, cd->mcodeptr);
2989 x86_64_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
2990 x86_64_jmp_reg(cd, REG_ITMP1);
2995 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2997 s4 i, l, val, *s4ptr;
3000 tptr = (void **) iptr->target;
3002 s4ptr = iptr->val.a;
3003 l = s4ptr[0]; /* default */
3004 i = s4ptr[1]; /* count */
3006 MCODECHECK((i<<2)+8);
3007 var_to_reg_int(s1, src, REG_ITMP1); /* reg compare should always be faster */
3013 x86_64_alul_imm_reg(cd, X86_64_CMP, val, s1);
3014 x86_64_jcc(cd, X86_64_CC_E, 0);
3015 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[1]), cd->mcodeptr); */
3016 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3019 x86_64_jmp_imm(cd, 0);
3020 /* codegen_addreference(cd, BlockPtrOfPC(l), cd->mcodeptr); */
3022 tptr = (void **) iptr->target;
3023 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3028 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
3029 /* op1 = arg count val.a = builtintable entry */
3035 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
3036 /* op1 = arg count, val.a = method pointer */
3038 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3039 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
3040 case ICMD_INVOKEINTERFACE:
3045 md = lm->parseddesc;
3047 unresolved_method *um = iptr->target;
3048 md = um->methodref->parseddesc.md;
3054 MCODECHECK((s3 << 1) + 64);
3056 /* copy arguments to registers or stack location */
3058 for (s3 = s3 - 1; s3 >= 0; s3--, src = src->prev) {
3059 if (src->varkind == ARGVAR)
3061 if (IS_INT_LNG_TYPE(src->type)) {
3062 if (!md->params[s3].inmemory) {
3063 s1 = rd->argintregs[md->params[s3].regoff];
3064 var_to_reg_int(d, src, s1);
3067 var_to_reg_int(d, src, REG_ITMP1);
3068 M_LST(d, REG_SP, md->params[s3].regoff * 8);
3072 if (!md->params[s3].inmemory) {
3073 s1 = rd->argfltregs[md->params[s3].regoff];
3074 var_to_reg_flt(d, src, s1);
3077 var_to_reg_flt(d, src, REG_FTMP1);
3078 M_DST(d, REG_SP, md->params[s3].regoff * 8);
3083 switch (iptr->opc) {
3086 codegen_addpatchref(cd, cd->mcodeptr,
3087 bte->fp, iptr->target);
3089 if (showdisassemble) {
3090 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3096 a = (ptrint) bte->fp;
3099 d = md->returntype.type;
3101 x86_64_mov_imm_reg(cd, a, REG_ITMP1);
3102 x86_64_call_reg(cd, REG_ITMP1);
3105 case ICMD_INVOKESPECIAL:
3106 x86_64_test_reg_reg(cd, rd->argintregs[0], rd->argintregs[0]);
3107 x86_64_jcc(cd, X86_64_CC_Z, 0);
3108 codegen_addxnullrefs(cd, cd->mcodeptr);
3110 /* first argument contains pointer */
3111 /* gen_nullptr_check(rd->argintregs[0]); */
3113 /* access memory for hardware nullptr */
3114 /* x86_64_mov_membase_reg(cd, rd->argintregs[0], 0, REG_ITMP2); */
3118 case ICMD_INVOKESTATIC:
3120 unresolved_method *um = iptr->target;
3122 codegen_addpatchref(cd, cd->mcodeptr,
3123 PATCHER_invokestatic_special, um);
3125 if (showdisassemble) {
3126 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3130 d = um->methodref->parseddesc.md->returntype.type;
3133 a = (ptrint) lm->stubroutine;
3134 d = lm->parseddesc->returntype.type;
3137 x86_64_mov_imm_reg(cd, a, REG_ITMP2);
3138 x86_64_call_reg(cd, REG_ITMP2);
3141 case ICMD_INVOKEVIRTUAL:
3142 gen_nullptr_check(rd->argintregs[0]);
3145 unresolved_method *um = iptr->target;
3147 codegen_addpatchref(cd, cd->mcodeptr,
3148 PATCHER_invokevirtual, um);
3150 if (showdisassemble) {
3151 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3155 d = um->methodref->parseddesc.md->returntype.type;
3158 s1 = OFFSET(vftbl_t, table[0]) +
3159 sizeof(methodptr) * lm->vftblindex;
3160 d = lm->parseddesc->returntype.type;
3163 x86_64_mov_membase_reg(cd, rd->argintregs[0],
3164 OFFSET(java_objectheader, vftbl),
3166 x86_64_mov_membase32_reg(cd, REG_ITMP2, s1, REG_ITMP1);
3167 x86_64_call_reg(cd, REG_ITMP1);
3170 case ICMD_INVOKEINTERFACE:
3171 gen_nullptr_check(rd->argintregs[0]);
3174 unresolved_method *um = iptr->target;
3176 codegen_addpatchref(cd, cd->mcodeptr,
3177 PATCHER_invokeinterface, um);
3179 if (showdisassemble) {
3180 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3185 d = um->methodref->parseddesc.md->returntype.type;
3188 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3189 sizeof(methodptr) * lm->class->index;
3191 s2 = sizeof(methodptr) * (lm - lm->class->methods);
3193 d = lm->parseddesc->returntype.type;
3196 x86_64_mov_membase_reg(cd, rd->argintregs[0],
3197 OFFSET(java_objectheader, vftbl),
3199 x86_64_mov_membase32_reg(cd, REG_ITMP2, s1, REG_ITMP2);
3200 x86_64_mov_membase32_reg(cd, REG_ITMP2, s2, REG_ITMP1);
3201 x86_64_call_reg(cd, REG_ITMP1);
3205 /* d contains return type */
3207 if (d != TYPE_VOID) {
3208 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3209 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3210 M_INTMOVE(REG_RESULT, s1);
3211 store_reg_to_var_int(iptr->dst, s1);
3213 s1 = reg_of_var(rd, iptr->dst, REG_FRESULT);
3214 M_FLTMOVE(REG_FRESULT, s1);
3215 store_reg_to_var_flt(iptr->dst, s1);
3221 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3223 /* op1: 0 == array, 1 == class */
3224 /* val.a: (classinfo *) superclass */
3226 /* superclass is an interface:
3228 * OK if ((sub == NULL) ||
3229 * (sub->vftbl->interfacetablelength > super->index) &&
3230 * (sub->vftbl->interfacetable[-super->index] != NULL));
3232 * superclass is a class:
3234 * OK if ((sub == NULL) || (0
3235 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3236 * super->vftbl->diffval));
3241 vftbl_t *supervftbl;
3244 super = (classinfo *) iptr->val.a;
3251 superindex = super->index;
3252 supervftbl = super->vftbl;
3255 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3256 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3258 var_to_reg_int(s1, src, REG_ITMP1);
3260 /* calculate interface checkcast code size */
3262 s2 = 3; /* mov_membase_reg */
3263 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3265 s2 += 3 + 4 /* movl_membase32_reg */ + 3 + 4 /* sub imm32 */ +
3266 3 /* test */ + 6 /* jcc */ + 3 + 4 /* mov_membase32_reg */ +
3267 3 /* test */ + 6 /* jcc */;
3270 s2 += (showdisassemble ? 5 : 0);
3272 /* calculate class checkcast code size */
3274 s3 = 3; /* mov_membase_reg */
3275 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3276 s3 += 10 /* mov_imm_reg */ + 3 + 4 /* movl_membase32_reg */;
3279 if (s1 != REG_ITMP1) {
3280 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3281 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, baseval));
3282 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3283 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, diffval));
3289 s3 += 3 + 4 /* movl_membase32_reg */ + 3 /* sub */ +
3290 10 /* mov_imm_reg */ + 3 /* movl_membase_reg */;
3291 CALCOFFSETBYTES(s3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3294 s3 += 3 /* cmp */ + 6 /* jcc */;
3297 s3 += (showdisassemble ? 5 : 0);
3299 /* if class is not resolved, check which code to call */
3302 x86_64_test_reg_reg(cd, s1, s1);
3303 x86_64_jcc(cd, X86_64_CC_Z, 6 + (showdisassemble ? 5 : 0) + 7 + 6 + s2 + 5 + s3);
3305 codegen_addpatchref(cd, cd->mcodeptr,
3306 PATCHER_checkcast_instanceof_flags,
3307 (constant_classref *) iptr->target);
3309 if (showdisassemble) {
3310 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3313 x86_64_movl_imm_reg(cd, 0, REG_ITMP2); /* super->flags */
3314 x86_64_alul_imm_reg(cd, X86_64_AND, ACC_INTERFACE, REG_ITMP2);
3315 x86_64_jcc(cd, X86_64_CC_Z, s2 + 5);
3318 /* interface checkcast code */
3320 if (!super || (super->flags & ACC_INTERFACE)) {
3322 x86_64_test_reg_reg(cd, s1, s1);
3323 x86_64_jcc(cd, X86_64_CC_Z, s2);
3326 x86_64_mov_membase_reg(cd, s1,
3327 OFFSET(java_objectheader, vftbl),
3331 codegen_addpatchref(cd, cd->mcodeptr,
3332 PATCHER_checkcast_instanceof_interface,
3333 (constant_classref *) iptr->target);
3335 if (showdisassemble) {
3336 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3340 x86_64_movl_membase32_reg(cd, REG_ITMP2,
3341 OFFSET(vftbl_t, interfacetablelength),
3343 x86_64_alu_imm32_reg(cd, X86_64_SUB, superindex, REG_ITMP3);
3344 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3345 x86_64_jcc(cd, X86_64_CC_LE, 0);
3346 codegen_addxcastrefs(cd, cd->mcodeptr);
3347 x86_64_mov_membase32_reg(cd, REG_ITMP2,
3348 OFFSET(vftbl_t, interfacetable[0]) -
3349 superindex * sizeof(methodptr*),
3351 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3352 x86_64_jcc(cd, X86_64_CC_E, 0);
3353 codegen_addxcastrefs(cd, cd->mcodeptr);
3356 x86_64_jmp_imm(cd, s3);
3359 /* class checkcast code */
3361 if (!super || !(super->flags & ACC_INTERFACE)) {
3363 x86_64_test_reg_reg(cd, s1, s1);
3364 x86_64_jcc(cd, X86_64_CC_Z, s3);
3367 x86_64_mov_membase_reg(cd, s1,
3368 OFFSET(java_objectheader, vftbl),
3372 codegen_addpatchref(cd, cd->mcodeptr,
3373 PATCHER_checkcast_class,
3374 (constant_classref *) iptr->target);
3376 if (showdisassemble) {
3377 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3381 x86_64_mov_imm_reg(cd, (ptrint) supervftbl, REG_ITMP3);
3382 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3383 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3385 x86_64_movl_membase32_reg(cd, REG_ITMP2,
3386 OFFSET(vftbl_t, baseval),
3388 /* if (s1 != REG_ITMP1) { */
3389 /* x86_64_movl_membase_reg(cd, REG_ITMP3, */
3390 /* OFFSET(vftbl_t, baseval), */
3392 /* x86_64_movl_membase_reg(cd, REG_ITMP3, */
3393 /* OFFSET(vftbl_t, diffval), */
3395 /* #if defined(USE_THREADS) && defined(NATIVE_THREADS) */
3396 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3398 /* x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP1, REG_ITMP2); */
3401 x86_64_movl_membase32_reg(cd, REG_ITMP3,
3402 OFFSET(vftbl_t, baseval),
3404 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP2);
3405 x86_64_mov_imm_reg(cd, (ptrint) supervftbl, REG_ITMP3);
3406 x86_64_movl_membase_reg(cd, REG_ITMP3,
3407 OFFSET(vftbl_t, diffval),
3410 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3411 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3413 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP3, REG_ITMP2);
3414 x86_64_jcc(cd, X86_64_CC_A, 0); /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3415 codegen_addxcastrefs(cd, cd->mcodeptr);
3417 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3419 store_reg_to_var_int(iptr->dst, d);
3420 /* if (iptr->dst->flags & INMEMORY) { */
3421 /* x86_64_mov_reg_membase(cd, s1, REG_SP, iptr->dst->regoff * 8); */
3423 /* M_INTMOVE(s1, iptr->dst->regoff); */
3428 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3430 /* op1: 0 == array, 1 == class */
3431 /* val.a: (classinfo *) superclass */
3433 /* superclass is an interface:
3435 * return (sub != NULL) &&
3436 * (sub->vftbl->interfacetablelength > super->index) &&
3437 * (sub->vftbl->interfacetable[-super->index] != NULL);
3439 * superclass is a class:
3441 * return ((sub != NULL) && (0
3442 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3443 * super->vftbl->diffvall));
3448 vftbl_t *supervftbl;
3451 super = (classinfo *) iptr->val.a;
3458 superindex = super->index;
3459 supervftbl = super->vftbl;
3462 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3463 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3466 var_to_reg_int(s1, src, REG_ITMP1);
3467 d = reg_of_var(rd, iptr->dst, REG_ITMP2);
3469 M_INTMOVE(s1, REG_ITMP1);
3473 /* calculate interface instanceof code size */
3475 s2 = 3; /* mov_membase_reg */
3476 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3477 s2 += 3 + 4 /* movl_membase32_reg */ + 3 + 4 /* sub_imm32 */ +
3478 3 /* test */ + 6 /* jcc */ + 3 + 4 /* mov_membase32_reg */ +
3479 3 /* test */ + 4 /* setcc */;
3482 s2 += (showdisassemble ? 5 : 0);
3484 /* calculate class instanceof code size */
3486 s3 = 3; /* mov_membase_reg */
3487 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3488 s3 += 10; /* mov_imm_reg */
3489 s3 += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3490 CALCOFFSETBYTES(s3, REG_ITMP1, OFFSET(vftbl_t, baseval));
3491 s3 += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3492 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, baseval));
3493 s3 += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3494 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3495 s3 += 3 /* sub */ + 3 /* xor */ + 3 /* cmp */ + 4 /* setcc */;
3498 s3 += (showdisassemble ? 5 : 0);
3500 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3502 /* if class is not resolved, check which code to call */
3505 x86_64_test_reg_reg(cd, s1, s1);
3506 x86_64_jcc(cd, X86_64_CC_Z, (6 + (showdisassemble ? 5 : 0) +
3507 7 + 6 + s2 + 5 + s3));
3509 codegen_addpatchref(cd, cd->mcodeptr,
3510 PATCHER_checkcast_instanceof_flags,
3511 (constant_classref *) iptr->target);
3513 if (showdisassemble) {
3514 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3517 x86_64_movl_imm_reg(cd, 0, REG_ITMP3); /* super->flags */
3518 x86_64_alul_imm_reg(cd, X86_64_AND, ACC_INTERFACE, REG_ITMP3);
3519 x86_64_jcc(cd, X86_64_CC_Z, s2 + 5);
3522 /* interface instanceof code */
3524 if (!super || (super->flags & ACC_INTERFACE)) {
3526 x86_64_test_reg_reg(cd, s1, s1);
3527 x86_64_jcc(cd, X86_64_CC_Z, s2);
3530 x86_64_mov_membase_reg(cd, s1,
3531 OFFSET(java_objectheader, vftbl),
3534 codegen_addpatchref(cd, cd->mcodeptr,
3535 PATCHER_checkcast_instanceof_interface,
3536 (constant_classref *) iptr->target);
3538 if (showdisassemble) {
3539 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3543 x86_64_movl_membase32_reg(cd, REG_ITMP1,
3544 OFFSET(vftbl_t, interfacetablelength),
3546 x86_64_alu_imm32_reg(cd, X86_64_SUB, superindex, REG_ITMP3);
3547 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3549 a = 3 + 4 /* mov_membase32_reg */ + 3 /* test */ + 4 /* setcc */;
3551 x86_64_jcc(cd, X86_64_CC_LE, a);
3552 x86_64_mov_membase32_reg(cd, REG_ITMP1,
3553 OFFSET(vftbl_t, interfacetable[0]) -
3554 superindex * sizeof(methodptr*),
3556 x86_64_test_reg_reg(cd, REG_ITMP1, REG_ITMP1);
3557 x86_64_setcc_reg(cd, X86_64_CC_NE, d);
3560 x86_64_jmp_imm(cd, s3);
3563 /* class instanceof code */
3565 if (!super || !(super->flags & ACC_INTERFACE)) {
3567 x86_64_test_reg_reg(cd, s1, s1);
3568 x86_64_jcc(cd, X86_64_CC_E, s3);
3571 x86_64_mov_membase_reg(cd, s1,
3572 OFFSET(java_objectheader, vftbl),
3576 codegen_addpatchref(cd, cd->mcodeptr,
3577 PATCHER_instanceof_class,
3578 (constant_classref *) iptr->target);
3580 if (showdisassemble) {
3581 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3585 x86_64_mov_imm_reg(cd, (ptrint) supervftbl, REG_ITMP2);
3586 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3587 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3589 x86_64_movl_membase_reg(cd, REG_ITMP1,
3590 OFFSET(vftbl_t, baseval),
3592 x86_64_movl_membase_reg(cd, REG_ITMP2,
3593 OFFSET(vftbl_t, diffval),
3595 x86_64_movl_membase_reg(cd, REG_ITMP2,
3596 OFFSET(vftbl_t, baseval),
3598 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3599 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3601 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
3602 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d); /* may be REG_ITMP2 */
3603 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP3, REG_ITMP1);
3604 x86_64_setcc_reg(cd, X86_64_CC_BE, d);
3606 store_reg_to_var_int(iptr->dst, d);
3610 case ICMD_CHECKASIZE: /* ..., size ==> ..., size */
3612 if (src->flags & INMEMORY) {
3613 x86_64_alul_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
3616 x86_64_testl_reg_reg(cd, src->regoff, src->regoff);
3618 x86_64_jcc(cd, X86_64_CC_L, 0);
3619 codegen_addxcheckarefs(cd, cd->mcodeptr);
3622 case ICMD_CHECKEXCEPTION: /* ... ==> ... */
3624 x86_64_test_reg_reg(cd, REG_RESULT, REG_RESULT);
3625 x86_64_jcc(cd, X86_64_CC_E, 0);
3626 codegen_addxexceptionrefs(cd, cd->mcodeptr);
3629 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3630 /* op1 = dimension, val.a = array descriptor */
3632 /* check for negative sizes and copy sizes to stack if necessary */
3634 MCODECHECK((iptr->op1 << 1) + 64);
3636 for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3637 var_to_reg_int(s2, src, REG_ITMP1);
3638 x86_64_testl_reg_reg(cd, s2, s2);
3639 x86_64_jcc(cd, X86_64_CC_L, 0);
3640 codegen_addxcheckarefs(cd, cd->mcodeptr);
3642 /* copy SAVEDVAR sizes to stack */
3644 if (src->varkind != ARGVAR) {
3645 x86_64_mov_reg_membase(cd, s2, REG_SP, s1 * 8);
3649 /* is a patcher function set? */
3652 codegen_addpatchref(cd, cd->mcodeptr,
3653 (functionptr) (ptrint) iptr->target,
3656 if (showdisassemble) {
3657 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3663 a = (ptrint) iptr->val.a;
3666 /* a0 = dimension count */
3668 x86_64_mov_imm_reg(cd, iptr->op1, rd->argintregs[0]);
3670 /* a1 = arrayvftbl */
3672 x86_64_mov_imm_reg(cd, (ptrint) iptr->val.a, rd->argintregs[1]);
3674 /* a2 = pointer to dimensions = stack pointer */
3676 x86_64_mov_reg_reg(cd, REG_SP, rd->argintregs[2]);
3678 x86_64_mov_imm_reg(cd, (ptrint) BUILTIN_multianewarray, REG_ITMP1);
3679 x86_64_call_reg(cd, REG_ITMP1);
3681 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3682 M_INTMOVE(REG_RESULT, s1);
3683 store_reg_to_var_int(iptr->dst, s1);
3687 throw_cacao_exception_exit(string_java_lang_InternalError,
3688 "Unknown ICMD %d", iptr->opc);
3691 } /* for instruction */
3693 /* copy values to interface registers */
3695 src = bptr->outstack;
3696 len = bptr->outdepth;
3697 MCODECHECK(64 + len);
3703 if ((src->varkind != STACKVAR)) {
3705 if (IS_FLT_DBL_TYPE(s2)) {
3706 var_to_reg_flt(s1, src, REG_FTMP1);
3707 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3708 M_FLTMOVE(s1, rd->interfaces[len][s2].regoff);
3711 x86_64_movq_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3715 var_to_reg_int(s1, src, REG_ITMP1);
3716 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3717 M_INTMOVE(s1, rd->interfaces[len][s2].regoff);
3720 x86_64_mov_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3726 } /* if (bptr -> flags >= BBREACHED) */
3727 } /* for basic block */
3729 codegen_createlinenumbertable(cd);
3733 /* generate ArrayIndexOutOfBoundsException stubs */
3735 u1 *xcodeptr = NULL;
3738 for (bref = cd->xboundrefs; bref != NULL; bref = bref->next) {
3739 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3741 cd->mcodeptr - cd->mcodebase);
3745 /* move index register into REG_ITMP1 */
3747 M_MOV(bref->reg, REG_ITMP1); /* 3 bytes */
3749 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3750 dseg_adddata(cd, cd->mcodeptr);
3751 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3752 M_AADD(REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3754 if (xcodeptr != NULL) {
3755 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3758 xcodeptr = cd->mcodeptr;
3761 /*create stackinfo -- begin*/
3762 x86_64_alu_imm_reg(cd, X86_64_SUB, 4 * 8, REG_SP);
3763 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3 * 8);
3764 x86_64_mov_imm_membase(cd,0,REG_SP,2*8);
3765 x86_64_mov_imm_membase(cd,0,REG_SP,1*8);
3766 x86_64_mov_imm_reg(cd,(ptrint)asm_prepare_native_stackinfo,REG_ITMP3);
3767 x86_64_call_reg(cd,REG_ITMP3);
3768 /*create stackinfo -- end*/
3770 x86_64_mov_reg_reg(cd, REG_ITMP1, rd->argintregs[0]);
3771 x86_64_mov_imm_reg(cd, (ptrint) new_arrayindexoutofboundsexception, REG_ITMP3);
3772 x86_64_call_reg(cd, REG_ITMP3);
3774 /*remove stackinfo -- begin*/
3775 x86_64_mov_imm_reg(cd,(ptrint)asm_remove_native_stackinfo,REG_ITMP3);
3776 x86_64_call_reg(cd,REG_ITMP3);
3777 /*remove stackinfo -- end*/
3779 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, REG_ITMP2_XPC);
3780 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3782 x86_64_mov_imm_reg(cd, (ptrint) asm_handle_exception, REG_ITMP3);
3783 x86_64_jmp_reg(cd, REG_ITMP3);
3787 /* generate NegativeArraySizeException stubs */
3791 for (bref = cd->xcheckarefs; bref != NULL; bref = bref->next) {
3792 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3793 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3795 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3799 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3801 cd->mcodeptr - cd->mcodebase);
3805 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3806 dseg_adddata(cd, cd->mcodeptr);
3807 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3808 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3810 if (xcodeptr != NULL) {
3811 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3814 xcodeptr = cd->mcodeptr;
3817 /*create stackinfo -- begin*/
3818 x86_64_alu_imm_reg(cd, X86_64_SUB, 4 * 8, REG_SP);
3819 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3 * 8);
3820 x86_64_mov_imm_membase(cd,0,REG_SP,2*8);
3821 x86_64_mov_imm_membase(cd,0,REG_SP,1*8);
3822 x86_64_mov_imm_reg(cd,(ptrint)asm_prepare_native_stackinfo,REG_ITMP3);
3823 x86_64_call_reg(cd,REG_ITMP3);
3824 /*create stackinfo -- end*/
3826 x86_64_mov_imm_reg(cd, (u8) new_negativearraysizeexception, REG_ITMP3);
3827 x86_64_call_reg(cd, REG_ITMP3);
3829 /*remove stackinfo -- begin*/
3830 x86_64_mov_imm_reg(cd,(ptrint)asm_remove_native_stackinfo,REG_ITMP3);
3831 x86_64_call_reg(cd,REG_ITMP3);
3832 /*remove stackinfo -- end*/
3834 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, REG_ITMP2_XPC);
3835 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3837 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3838 x86_64_jmp_reg(cd, REG_ITMP3);
3842 /* generate cast check stubs */
3846 for (bref = cd->xcastrefs; bref != NULL; bref = bref->next) {
3847 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3848 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3850 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3854 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3856 cd->mcodeptr - cd->mcodebase);
3860 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3861 dseg_adddata(cd, cd->mcodeptr);
3862 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3863 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3865 if (xcodeptr != NULL) {
3866 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3869 xcodeptr = cd->mcodeptr;
3871 /*create stackinfo -- begin*/
3872 x86_64_alu_imm_reg(cd, X86_64_SUB, 4 * 8, REG_SP);
3873 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3 * 8);
3874 x86_64_mov_imm_membase(cd,0,REG_SP,2*8);
3875 x86_64_mov_imm_membase(cd,0,REG_SP,1*8);
3876 x86_64_mov_imm_reg(cd,(ptrint)asm_prepare_native_stackinfo,REG_ITMP3);
3877 x86_64_call_reg(cd,REG_ITMP3);
3878 /*create stackinfo -- end*/
3881 x86_64_mov_imm_reg(cd, (u8) new_classcastexception, REG_ITMP3);
3882 x86_64_call_reg(cd, REG_ITMP3);
3884 /*remove stackinfo -- begin*/
3885 x86_64_mov_imm_reg(cd,(ptrint)asm_remove_native_stackinfo,REG_ITMP3);
3886 x86_64_call_reg(cd,REG_ITMP3);
3887 /*remove stackinfo -- end*/
3889 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, REG_ITMP2_XPC);
3890 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3892 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3893 x86_64_jmp_reg(cd, REG_ITMP3);
3897 /* generate divide by zero check stubs */
3901 for (bref = cd->xdivrefs; bref != NULL; bref = bref->next) {
3902 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3903 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3905 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3909 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3911 cd->mcodeptr - cd->mcodebase);
3915 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3916 dseg_adddata(cd, cd->mcodeptr);
3917 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3918 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3920 if (xcodeptr != NULL) {
3921 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3924 xcodeptr = cd->mcodeptr;
3926 /*create stackinfo -- begin*/
3927 x86_64_alu_imm_reg(cd, X86_64_SUB, 4 * 8, REG_SP);
3928 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3 * 8);
3929 x86_64_mov_imm_membase(cd,0,REG_SP,2*8);
3930 x86_64_mov_imm_membase(cd,0,REG_SP,1*8);
3931 x86_64_mov_imm_reg(cd,(ptrint)asm_prepare_native_stackinfo,REG_ITMP3);
3932 x86_64_call_reg(cd,REG_ITMP3);
3933 /*create stackinfo -- end*/
3935 x86_64_mov_imm_reg(cd, (u8) new_arithmeticexception, REG_ITMP3);
3936 x86_64_call_reg(cd, REG_ITMP3);
3938 /*remove stackinfo -- begin*/
3939 x86_64_mov_imm_reg(cd,(ptrint)asm_remove_native_stackinfo,REG_ITMP3);
3940 x86_64_call_reg(cd,REG_ITMP3);
3941 /*remove stackinfo -- end*/
3943 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, REG_ITMP2_XPC);
3944 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3946 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3947 x86_64_jmp_reg(cd, REG_ITMP3);
3951 /* generate exception check stubs */
3955 for (bref = cd->xexceptionrefs; bref != NULL; bref = bref->next) {
3956 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3957 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3959 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3963 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3965 cd->mcodeptr - cd->mcodebase);
3969 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3970 dseg_adddata(cd, cd->mcodeptr);
3971 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
3972 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3974 if (xcodeptr != NULL) {
3975 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3978 xcodeptr = cd->mcodeptr;
3980 x86_64_alu_imm_reg(cd, X86_64_SUB, 4*8, REG_SP);
3981 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3*8);
3982 x86_64_mov_imm_membase(cd, 0, REG_SP, 2*8);
3983 x86_64_mov_imm_membase(cd, 0, REG_SP, 1*8);
3984 x86_64_mov_imm_membase(cd, 0, REG_SP, 0*8);
3985 x86_64_mov_imm_reg(cd,(u8) asm_prepare_native_stackinfo,REG_ITMP1);
3986 x86_64_call_reg(cd,REG_ITMP1);
3989 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3990 x86_64_mov_imm_reg(cd, (u8) &builtin_get_exceptionptrptr, REG_ITMP1);
3991 x86_64_call_reg(cd, REG_ITMP1);
3992 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
3993 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
3994 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
3996 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
3997 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP1_XPTR);
3998 x86_64_mov_imm_membase(cd, 0, REG_ITMP3, 0);
4000 x86_64_mov_reg_reg(cd,REG_ITMP1_XPTR,RDI);
4001 x86_64_mov_imm_reg(cd,(u8) helper_fillin_stacktrace_always,REG_ITMP1);
4002 x86_64_call_reg(cd,REG_ITMP1);
4003 x86_64_mov_reg_reg(cd,REG_RESULT,REG_ITMP1_XPTR);
4005 x86_64_mov_imm_reg(cd,(u8) asm_remove_native_stackinfo,REG_ITMP2);
4006 x86_64_call_reg(cd,REG_ITMP2);
4008 x86_64_alu_imm_reg(cd, X86_64_ADD, 8, REG_SP);
4009 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC);
4010 x86_64_alu_imm_reg(cd, X86_64_ADD, 8, REG_SP);
4013 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
4014 x86_64_jmp_reg(cd, REG_ITMP3);
4018 /* generate NullpointerException stubs */
4022 for (bref = cd->xnullrefs; bref != NULL; bref = bref->next) {
4023 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
4024 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4026 xcodeptr - cd->mcodebase - (10 + 10 + 3));
4030 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4032 cd->mcodeptr - cd->mcodebase);
4036 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
4037 dseg_adddata(cd, cd->mcodeptr);
4038 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
4039 M_AADD(REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
4041 if (xcodeptr != NULL) {
4042 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
4045 xcodeptr = cd->mcodeptr;
4047 /*create stackinfo -- begin*/
4048 x86_64_alu_imm_reg(cd, X86_64_SUB, 4 * 8, REG_SP);
4049 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3 * 8);
4050 x86_64_mov_imm_membase(cd,0,REG_SP,2*8);
4051 x86_64_mov_imm_membase(cd,0,REG_SP,1*8);
4052 x86_64_mov_imm_reg(cd,(ptrint)asm_prepare_native_stackinfo,REG_ITMP3);
4053 x86_64_call_reg(cd,REG_ITMP3);
4054 /*create stackinfo -- end*/
4057 x86_64_mov_imm_reg(cd, (ptrint) new_nullpointerexception, REG_ITMP3);
4058 x86_64_call_reg(cd, REG_ITMP3);
4060 /*remove stackinfo -- begin*/
4061 x86_64_mov_imm_reg(cd,(ptrint)asm_remove_native_stackinfo,REG_ITMP3);
4062 x86_64_call_reg(cd,REG_ITMP3);
4063 /*remove stackinfo -- end*/
4065 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, REG_ITMP2_XPC);
4066 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
4068 x86_64_mov_imm_reg(cd, (ptrint) asm_handle_exception, REG_ITMP3);
4069 x86_64_jmp_reg(cd, REG_ITMP3);
4073 /* generate code patching stub call code */
4080 tmpcd = DNEW(codegendata);
4082 for (pref = cd->patchrefs; pref != NULL; pref = pref->next) {
4083 /* check size of code segment */
4087 /* Get machine code which is patched back in later. A */
4088 /* `call rel32' is 5 bytes long (but read 8 bytes). */
4090 xcodeptr = cd->mcodebase + pref->branchpos;
4091 mcode = *((ptrint *) xcodeptr);
4093 /* patch in `call rel32' to call the following code */
4095 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
4096 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4098 /* move pointer to java_objectheader onto stack */
4100 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4101 /* create a virtual java_objectheader */
4103 (void) dseg_addaddress(cd, get_dummyLR()); /* monitorPtr */
4104 a = dseg_addaddress(cd, NULL); /* vftbl */
4106 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP3);
4107 x86_64_push_reg(cd, REG_ITMP3);
4109 x86_64_push_imm(cd, 0);
4112 /* move machine code bytes and classinfo pointer into registers */
4114 x86_64_mov_imm_reg(cd, (ptrint) mcode, REG_ITMP3);
4115 x86_64_push_reg(cd, REG_ITMP3);
4116 x86_64_mov_imm_reg(cd, (ptrint) pref->ref, REG_ITMP3);
4117 x86_64_push_reg(cd, REG_ITMP3);
4119 x86_64_mov_imm_reg(cd, (ptrint) pref->patcher, REG_ITMP3);
4120 x86_64_push_reg(cd, REG_ITMP3);
4122 x86_64_mov_imm_reg(cd, (ptrint) asm_wrapper_patcher, REG_ITMP3);
4123 x86_64_jmp_reg(cd, REG_ITMP3);
4128 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
4132 /* createcompilerstub **********************************************************
4134 Creates a stub routine which calls the compiler.
4136 *******************************************************************************/
4138 #define COMPILERSTUB_SIZE 23
4140 functionptr createcompilerstub(methodinfo *m)
4142 u1 *s; /* memory to hold the stub */
4146 s = CNEW(u1, COMPILERSTUB_SIZE);
4148 /* mark start of dump memory area */
4150 dumpsize = dump_size();
4152 cd = DNEW(codegendata);
4155 /* code for the stub */
4157 x86_64_mov_imm_reg(cd, (ptrint) m, REG_ITMP1); /* pass method to compiler */
4158 x86_64_mov_imm_reg(cd, (ptrint) asm_call_jit_compiler, REG_ITMP3);
4159 x86_64_jmp_reg(cd, REG_ITMP3);
4161 #if defined(STATISTICS)
4163 count_cstub_len += COMPILERSTUB_SIZE;
4166 /* release dump area */
4168 dump_release(dumpsize);
4170 return (functionptr) (ptrint) s;
4174 /* createnativestub ************************************************************
4176 Creates a stub routine which calls a native method.
4178 *******************************************************************************/
4180 functionptr createnativestub(functionptr f, methodinfo *m, codegendata *cd,
4181 registerdata *rd, methoddesc *nmd)
4184 s4 stackframesize; /* size of stackframe if needed */
4186 s4 i, j; /* count variables */
4190 /* initialize variables */
4193 nativeparams = (m->flags & ACC_STATIC) ? 2 : 1;
4196 /* calculate stack frame size */
4198 stackframesize = 4 + INT_ARG_CNT + FLT_ARG_CNT + nmd->memuse;
4200 if (!(stackframesize & 0x1)) /* keep stack 16-byte aligned */
4204 /* create method header */
4206 (void) dseg_addaddress(cd, m); /* MethodPointer */
4207 (void) dseg_adds4(cd, stackframesize * 8); /* FrameSize */
4208 (void) dseg_adds4(cd, 0); /* IsSync */
4209 (void) dseg_adds4(cd, 0); /* IsLeaf */
4210 (void) dseg_adds4(cd, 0); /* IntSave */
4211 (void) dseg_adds4(cd, 0); /* FltSave */
4212 (void) dseg_addlinenumbertablesize(cd);
4213 (void) dseg_adds4(cd, 0); /* ExTableSize */
4216 /* initialize mcode variables */
4218 cd->mcodeptr = (u1 *) cd->mcodebase;
4219 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
4222 /* generate stub code */
4224 M_ASUB_IMM(stackframesize * 8, REG_SP);
4226 /* if function is static, check for initialized */
4228 if ((m->flags & ACC_STATIC) && !m->class->initialized) {
4229 codegen_addpatchref(cd, cd->mcodeptr, PATCHER_clinit, m->class);
4231 if (showdisassemble) {
4232 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
4237 /* save integer and float argument registers */
4239 for (i = 0, j = 0; i < md->paramcount && i < INT_ARG_CNT; i++)
4240 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4241 M_LST(rd->argintregs[j++], REG_SP, (1 + i) * 8);
4243 for (i = 0, j = 0; i < md->paramcount && i < FLT_ARG_CNT; i++)
4244 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4245 M_DST(rd->argfltregs[j++], REG_SP, (1 + INT_ARG_CNT + i) * 8);
4247 /* show integer hex code for float arguments */
4249 for (i = 0, j = 0; i < md->paramcount && i < INT_ARG_CNT; i++) {
4250 /* if the paramtype is a float, we have to right shift all */
4251 /* following integer registers */
4253 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type)) {
4254 for (s1 = INT_ARG_CNT - 2; s1 >= i; s1--)
4255 M_MOV(rd->argintregs[s1], rd->argintregs[s1 + 1]);
4257 x86_64_movd_freg_reg(cd, rd->argfltregs[j], rd->argintregs[i]);
4262 x86_64_mov_imm_reg(cd, (ptrint) m, REG_ITMP1);
4263 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, 0 * 8);
4264 x86_64_mov_imm_reg(cd, (ptrint) builtin_trace_args, REG_ITMP1);
4265 x86_64_call_reg(cd, REG_ITMP1);
4267 /* restore integer and float argument registers */
4269 for (i = 0, j = 0; i < md->paramcount && i < INT_ARG_CNT; i++)
4270 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4271 M_LLD(rd->argintregs[j++], REG_SP, (1 + i) * 8);
4273 for (i = 0, j = 0; i < md->paramcount && i < FLT_ARG_CNT; i++)
4274 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4275 M_DLD(rd->argfltregs[j++], REG_SP, (1 + INT_ARG_CNT + i) * 8);
4279 /* save integer and float argument registers */
4281 for (i = 0, j = 0; i < md->paramcount && i < INT_ARG_CNT; i++)
4282 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4283 M_LST(rd->argintregs[j++], REG_SP, i * 8);
4285 for (i = 0, j = 0; i < md->paramcount && i < FLT_ARG_CNT; i++)
4286 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4287 M_DST(rd->argfltregs[j++], REG_SP, (INT_ARG_CNT + i) * 8);
4291 0*8 void *oldThreadspecificHeadValue;
4292 1*8 void **addressOfThreadspecificHead;
4293 2*8 methodinfo *method;
4294 3*8 void *beginOfJavaStackframe; only used if != 0
4295 4*8 void *returnToFromNative;
4298 /* create dynamic stack info */
4300 x86_64_mov_imm_membase(cd, 0, REG_SP, (stackframesize - 1) * 8);
4301 x86_64_mov_imm_membase(cd, (ptrint) m, REG_SP, (stackframesize - 2) * 8);
4303 x86_64_mov_imm_reg(cd, (ptrint) builtin_asm_get_stackframeinfo, REG_ITMP1);
4304 x86_64_call_reg(cd, REG_ITMP1);
4306 M_LST(REG_RESULT, REG_SP, (stackframesize - 3) * 8);
4307 M_LLD(REG_ITMP2, REG_RESULT, 0);
4308 M_LST(REG_ITMP2, REG_SP, (stackframesize - 4) * 8);
4309 M_MOV(REG_SP, REG_ITMP2);
4310 M_LADD_IMM((stackframesize - 4) * 8, REG_ITMP2);
4311 M_LST(REG_ITMP2, REG_RESULT, 0);
4314 x86_64_mov_imm_reg(cd, (ptrint) nativeinvokation, REG_ITMP1);
4315 x86_64_call_reg(cd, REG_ITMP1);
4318 /* restore integer and float argument registers */
4320 for (i = 0, j = 0; i < md->paramcount && i < INT_ARG_CNT; i++)
4321 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4322 M_LLD(rd->argintregs[j++], REG_SP, i * 8);
4324 for (i = 0, j = 0; i < md->paramcount && i < FLT_ARG_CNT; i++)
4325 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4326 M_DLD(rd->argfltregs[j++], REG_SP, (INT_ARG_CNT + i) * 8);
4329 /* copy or spill arguments to new locations */
4331 for (i = md->paramcount - 1, j = i + nativeparams; i >= 0; i--, j--) {
4332 t = md->paramtypes[i].type;
4334 if (IS_INT_LNG_TYPE(t)) {
4335 if (!md->params[i].inmemory) {
4336 s1 = rd->argintregs[md->params[i].regoff];
4338 if (!nmd->params[j].inmemory) {
4339 s2 = rd->argintregs[nmd->params[j].regoff];
4343 s2 = nmd->params[j].regoff;
4344 M_LST(s1, REG_SP, s2 * 8);
4348 s1 = md->params[i].regoff + stackframesize + 1; /* + 1 (RA) */
4349 s2 = nmd->params[j].regoff;
4350 M_LLD(REG_ITMP1, REG_SP, s1 * 8);
4351 M_LST(REG_ITMP1, REG_SP, s2 * 8);
4355 /* We only copy spilled float arguments, as the float argument */
4356 /* registers keep unchanged. */
4358 if (md->params[i].inmemory) {
4359 s1 = md->params[i].regoff + stackframesize + 1; /* + 1 (RA) */
4360 s2 = nmd->params[j].regoff;
4361 M_DLD(REG_FTMP1, REG_SP, s1 * 8);
4362 M_DST(REG_FTMP1, REG_SP, s2 * 8);
4367 /* put class into second argument register */
4369 if (m->flags & ACC_STATIC)
4370 x86_64_mov_imm_reg(cd, (ptrint) m->class, rd->argintregs[1]);
4372 /* put env into first argument register */
4374 x86_64_mov_imm_reg(cd, (ptrint) &env, rd->argintregs[0]);
4376 /* do the native function call */
4378 #if !defined(STATIC_CLASSPATH)
4380 codegen_addpatchref(cd, cd->mcodeptr, PATCHER_resolve_native, m);
4382 if (showdisassemble) {
4383 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
4388 x86_64_mov_imm_reg(cd, (ptrint) f, REG_ITMP1);
4389 x86_64_call_reg(cd, REG_ITMP1);
4392 /* remove dynamic stack info */
4394 M_LLD(REG_ITMP2, REG_SP, (stackframesize - 4) * 8);
4395 M_LLD(REG_ITMP3, REG_SP, (stackframesize - 3) * 8);
4396 M_LST(REG_ITMP2, REG_ITMP3, 0);
4399 /* generate call trace */
4402 M_LST(REG_RESULT, REG_SP, 0 * 8);
4403 M_DST(REG_FRESULT, REG_SP, 1 * 8);
4405 x86_64_mov_imm_reg(cd, (ptrint) m, rd->argintregs[0]);
4406 M_MOV(REG_RESULT, rd->argintregs[1]);
4407 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
4408 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
4410 x86_64_mov_imm_reg(cd, (ptrint) builtin_displaymethodstop, REG_ITMP1);
4411 x86_64_call_reg(cd, REG_ITMP1);
4413 M_LLD(REG_RESULT, REG_SP, 0 * 8);
4414 M_DLD(REG_FRESULT, REG_SP, 1 * 8);
4417 /* check for exception */
4419 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4420 M_LST(REG_RESULT, REG_SP, 0 * 8);
4421 x86_64_mov_imm_reg(cd, (ptrint) builtin_get_exceptionptrptr, REG_ITMP3);
4422 x86_64_call_reg(cd, REG_ITMP3);
4423 M_LLD(REG_ITMP3, REG_RESULT, 0);
4424 M_LLD(REG_RESULT, REG_SP, 0 * 8);
4426 x86_64_mov_imm_reg(cd, (ptrint) &_exceptionptr, REG_ITMP3);
4427 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP3);
4429 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
4430 x86_64_jcc(cd, X86_64_CC_NE, 7 + 1);
4432 /* remove stackframe */
4434 M_AADD_IMM(stackframesize * 8, REG_SP);
4439 /* handle exception */
4441 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4442 M_LST(REG_ITMP3, REG_SP, 0 * 8);
4443 x86_64_mov_imm_reg(cd, (ptrint) builtin_get_exceptionptrptr, REG_ITMP3);
4444 x86_64_call_reg(cd, REG_ITMP3);
4445 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
4446 M_LLD(REG_ITMP1_XPTR, REG_SP, 0 * 8);
4448 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
4449 x86_64_mov_imm_reg(cd, (ptrint) &_exceptionptr, REG_ITMP3);
4450 x86_64_alu_reg_reg(cd, X86_64_XOR, REG_ITMP2, REG_ITMP2);
4451 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_ITMP3, 0); /* clear exception pointer */
4454 /* remove stackframe */
4456 M_AADD_IMM(stackframesize * 8, REG_SP);
4458 M_LLD(REG_ITMP2_XPC, REG_SP, 0 * 8); /* get return address from stack */
4459 M_ASUB_IMM(3, REG_ITMP2_XPC); /* callq */
4461 x86_64_mov_imm_reg(cd, (ptrint) asm_handle_nat_exception, REG_ITMP3);
4462 x86_64_jmp_reg(cd, REG_ITMP3);
4465 /* process patcher calls **************************************************/
4473 tmpcd = DNEW(codegendata);
4475 for (pref = cd->patchrefs; pref != NULL; pref = pref->next) {
4476 /* Get machine code which is patched back in later. A */
4477 /* `call rel32' is 5 bytes long (but read 8 bytes). */
4479 xcodeptr = cd->mcodebase + pref->branchpos;
4480 mcode = *((ptrint *) xcodeptr);
4482 /* patch in `call rel32' to call the following code */
4484 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
4485 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4487 /* move pointer to java_objectheader onto stack */
4489 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4490 /* create a virtual java_objectheader */
4492 (void) dseg_addaddress(cd, get_dummyLR()); /* monitorPtr */
4493 off = dseg_addaddress(cd, NULL); /* vftbl */
4495 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + off, REG_ITMP3);
4496 x86_64_push_reg(cd, REG_ITMP3);
4498 x86_64_push_imm(cd, 0);
4501 /* move machine code bytes and classinfo pointer into registers */
4503 x86_64_mov_imm_reg(cd, (ptrint) mcode, REG_ITMP3);
4504 x86_64_push_reg(cd, REG_ITMP3);
4505 x86_64_mov_imm_reg(cd, (ptrint) pref->ref, REG_ITMP3);
4506 x86_64_push_reg(cd, REG_ITMP3);
4508 x86_64_mov_imm_reg(cd, (ptrint) pref->patcher, REG_ITMP3);
4509 x86_64_push_reg(cd, REG_ITMP3);
4511 x86_64_mov_imm_reg(cd, (ptrint) asm_wrapper_patcher, REG_ITMP3);
4512 x86_64_jmp_reg(cd, REG_ITMP3);
4516 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
4518 return m->entrypoint;
4523 * These are local overrides for various environment variables in Emacs.
4524 * Please do not remove this and leave it at the end of the file, where
4525 * Emacs will automagically detect them.
4526 * ---------------------------------------------------------------------
4529 * indent-tabs-mode: t