1 /* src/vm/jit/x86_64/codegen.c - machine code generator for x86_64
3 Copyright (C) 1996-2005 R. Grafl, A. Krall, C. Kruegel, C. Oates,
4 R. Obermaisser, M. Platter, M. Probst, S. Ring, E. Steiner,
5 C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich, J. Wenninger,
6 Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
25 Contact: cacao@complang.tuwien.ac.at
27 Authors: Andreas Krall
32 $Id: codegen.c 2656 2005-06-13 14:14:24Z twisti $
45 #include "vm/jit/x86_64/arch.h"
46 #include "vm/jit/x86_64/codegen.h"
47 #include "vm/jit/x86_64/emitfuncs.h"
48 #include "vm/jit/x86_64/types.h"
49 #include "vm/jit/x86_64/asmoffsets.h"
51 #include "cacao/cacao.h"
52 #include "native/native.h"
53 #include "vm/global.h"
54 #include "vm/builtin.h"
55 #include "vm/loader.h"
56 #include "vm/statistics.h"
57 #include "vm/stringlocal.h"
58 #include "vm/tables.h"
59 #include "vm/jit/asmpart.h"
60 #include "vm/jit/codegen.inc"
61 #include "vm/jit/helper.h"
62 #include "vm/jit/jit.h"
65 # include "vm/jit/lsra.inc"
68 #include "vm/jit/parse.h"
69 #include "vm/jit/patcher.h"
70 #include "vm/jit/reg.h"
71 #include "vm/jit/reg.inc"
74 /* codegen *********************************************************************
76 Generates machine code.
78 *******************************************************************************/
80 void codegen(methodinfo *m, codegendata *cd, registerdata *rd)
82 s4 len, s1, s2, s3, d;
91 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
92 builtintable_entry *bte;
101 /* space to save used callee saved registers */
103 savedregs_num += (rd->savintregcnt - rd->maxsavintreguse);
104 savedregs_num += (rd->savfltregcnt - rd->maxsavfltreguse);
106 parentargs_base = rd->maxmemuse + savedregs_num;
108 #if defined(USE_THREADS) /* space to save argument of monitor_enter */
110 if (checksync && (m->flags & ACC_SYNCHRONIZED))
115 /* Keep stack of non-leaf functions 16-byte aligned for calls into native */
116 /* code e.g. libc or jni (alignment problems with movaps). */
118 if (!m->isleafmethod || runverbose)
119 parentargs_base |= 0x1;
121 /* create method header */
123 (void) dseg_addaddress(cd, m); /* MethodPointer */
124 (void) dseg_adds4(cd, parentargs_base * 8); /* FrameSize */
126 #if defined(USE_THREADS)
128 /* IsSync contains the offset relative to the stack pointer for the
129 argument of monitor_exit used in the exception handler. Since the
130 offset could be zero and give a wrong meaning of the flag it is
134 if (checksync && (m->flags & ACC_SYNCHRONIZED))
135 (void) dseg_adds4(cd, (rd->maxmemuse + 1) * 8); /* IsSync */
140 (void) dseg_adds4(cd, 0); /* IsSync */
142 (void) dseg_adds4(cd, m->isleafmethod); /* IsLeaf */
143 (void) dseg_adds4(cd, rd->savintregcnt - rd->maxsavintreguse);/* IntSave */
144 (void) dseg_adds4(cd, rd->savfltregcnt - rd->maxsavfltreguse);/* FltSave */
146 (void) dseg_addlinenumbertablesize(cd);
148 (void) dseg_adds4(cd, cd->exceptiontablelength); /* ExTableSize */
150 /* create exception table */
152 for (ex = cd->exceptiontable; ex != NULL; ex = ex->down) {
153 dseg_addtarget(cd, ex->start);
154 dseg_addtarget(cd, ex->end);
155 dseg_addtarget(cd, ex->handler);
156 (void) dseg_addaddress(cd, ex->catchtype.cls);
159 /* initialize mcode variables */
161 cd->mcodeptr = (u1 *) cd->mcodebase;
162 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
163 MCODECHECK(128 + m->paramcount);
165 /* create stack frame (if necessary) */
167 if (parentargs_base) {
168 x86_64_alu_imm_reg(cd, X86_64_SUB, parentargs_base * 8, REG_SP);
171 /* save used callee saved registers */
174 for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
175 p--; x86_64_mov_reg_membase(cd, rd->savintregs[i], REG_SP, p * 8);
177 for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
178 p--; x86_64_movq_reg_membase(cd, rd->savfltregs[i], REG_SP, p * 8);
181 /* take arguments out of register or stack frame */
185 for (p = 0, l = 0; p < md->paramcount; p++) {
186 t = md->paramtypes[p].type;
187 var = &(rd->locals[l][t]);
189 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
193 s1 = md->params[p].regoff;
194 if (IS_INT_LNG_TYPE(t)) { /* integer args */
195 s2 = rd->argintregs[s1];
196 if (!md->params[p].inmemory) { /* register arguments */
197 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
198 M_INTMOVE(s2, var->regoff);
200 } else { /* reg arg -> spilled */
201 M_LST(s2, REG_SP, var->regoff * 8);
204 } else { /* stack arguments */
205 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
206 /* + 8 for return address */
207 M_LLD(var->regoff, REG_SP, (parentargs_base + s1) * 8 + 8);
209 } else { /* stack arg -> spilled */
210 var->regoff = parentargs_base + s1 + 1;
214 } else { /* floating args */
215 if (!md->params[p].inmemory) { /* register arguments */
216 s2 = rd->argfltregs[s1];
217 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
218 M_FLTMOVE(s2, var->regoff);
220 } else { /* reg arg -> spilled */
221 M_DST(s2, REG_SP, var->regoff * 8);
224 } else { /* stack arguments */
225 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
226 M_DLD(var->regoff, REG_SP, (parentargs_base + s1) * 8 + 8);
229 var->regoff = parentargs_base + s1 + 1;
235 /* save monitorenter argument */
237 #if defined(USE_THREADS)
238 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
239 /* stack offset for monitor argument */
244 M_LSUB_IMM((INT_ARG_CNT + FLT_ARG_CNT) * 8, REG_SP);
246 for (p = 0; p < INT_ARG_CNT; p++)
247 M_LST(rd->argintregs[p], REG_SP, p * 8);
249 for (p = 0; p < FLT_ARG_CNT; p++)
250 M_DST(rd->argfltregs[p], REG_SP, (INT_ARG_CNT + p) * 8);
252 s1 += INT_ARG_CNT + FLT_ARG_CNT;
255 /* decide which monitor enter function to call */
257 if (m->flags & ACC_STATIC) {
258 x86_64_mov_imm_reg(cd, (ptrint) m->class, REG_ITMP1);
259 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, s1 * 8);
260 M_INTMOVE(REG_ITMP1, rd->argintregs[0]);
261 x86_64_mov_imm_reg(cd, (ptrint) BUILTIN_staticmonitorenter, REG_ITMP1);
262 x86_64_call_reg(cd, REG_ITMP1);
265 x86_64_test_reg_reg(cd, rd->argintregs[0], rd->argintregs[0]);
266 x86_64_jcc(cd, X86_64_CC_Z, 0);
267 codegen_addxnullrefs(cd, cd->mcodeptr);
268 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, s1 * 8);
269 x86_64_mov_imm_reg(cd, (ptrint) BUILTIN_monitorenter, REG_ITMP1);
270 x86_64_call_reg(cd, REG_ITMP1);
274 for (p = 0; p < INT_ARG_CNT; p++)
275 M_LLD(rd->argintregs[p], REG_SP, p * 8);
277 for (p = 0; p < FLT_ARG_CNT; p++)
278 M_DLD(rd->argfltregs[p], REG_SP, (INT_ARG_CNT + p) * 8);
280 M_LADD_IMM((INT_ARG_CNT + FLT_ARG_CNT) * 8, REG_SP);
285 /* Copy argument registers to stack and call trace function with pointer */
286 /* to arguments on stack. */
288 if (runverbose || opt_stat) {
289 M_LSUB_IMM((INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + FLT_TMP_CNT + 1 + 1) * 8, REG_SP);
291 /* save integer argument registers */
293 for (p = 0; p < INT_ARG_CNT; p++)
294 M_LST(rd->argintregs[p], REG_SP, (1 + p) * 8);
296 /* save float argument registers */
298 for (p = 0; p < FLT_ARG_CNT; p++)
299 M_DST(rd->argfltregs[p], REG_SP, (1 + INT_ARG_CNT + p) * 8);
301 /* save temporary registers for leaf methods */
303 if (m->isleafmethod) {
304 for (p = 0; p < INT_TMP_CNT; p++)
305 M_LST(rd->tmpintregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + p) * 8);
307 for (p = 0; p < FLT_TMP_CNT; p++)
308 M_DST(rd->tmpfltregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + p) * 8);
312 /* show integer hex code for float arguments */
314 for (p = 0, l = 0; p < m->paramcount && p < INT_ARG_CNT; p++) {
315 t = m->paramtypes[p];
317 /* if the paramtype is a float, we have to right shift all */
318 /* following integer registers */
320 if (IS_FLT_DBL_TYPE(t)) {
321 for (s1 = INT_ARG_CNT - 2; s1 >= p; s1--) {
322 x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
325 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[p]);
330 x86_64_mov_imm_reg(cd, (ptrint) m, REG_ITMP2);
331 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_SP, 0 * 8);
332 x86_64_mov_imm_reg(cd, (ptrint) builtin_trace_args, REG_ITMP1);
333 x86_64_call_reg(cd, REG_ITMP1);
336 x86_64_mov_imm_reg(cd,(u8)compiledinvokation,REG_ITMP1);
337 x86_64_call_reg(cd,REG_ITMP1);
340 /* restore integer argument registers */
342 for (p = 0; p < INT_ARG_CNT; p++)
343 M_LLD(rd->argintregs[p], REG_SP, (1 + p) * 8);
345 /* restore float argument registers */
347 for (p = 0; p < FLT_ARG_CNT; p++)
348 M_DLD(rd->argfltregs[p], REG_SP, (1 + INT_ARG_CNT + p) * 8);
350 /* restore temporary registers for leaf methods */
352 if (m->isleafmethod) {
353 for (p = 0; p < INT_TMP_CNT; p++)
354 M_LLD(rd->tmpintregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + p) * 8);
356 for (p = 0; p < FLT_TMP_CNT; p++)
357 M_DLD(rd->tmpfltregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + p) * 8);
360 M_LADD_IMM((INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + FLT_TMP_CNT + 1 + 1) * 8, REG_SP);
365 /* end of header generation */
367 /* walk through all basic blocks */
368 for (bptr = m->basicblocks; bptr != NULL; bptr = bptr->next) {
370 bptr->mpc = (u4) ((u1 *) cd->mcodeptr - cd->mcodebase);
372 if (bptr->flags >= BBREACHED) {
374 /* branch resolving */
377 for (bref = bptr->branchrefs; bref != NULL; bref = bref->next) {
378 gen_resolvebranch((u1 *) cd->mcodebase + bref->branchpos,
383 /* copy interface registers to their destination */
387 MCODECHECK(64 + len);
391 while (src != NULL) {
393 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
394 if (bptr->type == BBTYPE_SBR) {
395 /* d = reg_of_var(rd, src, REG_ITMP1); */
396 if (!(src->flags & INMEMORY))
400 x86_64_pop_reg(cd, d);
401 store_reg_to_var_int(src, d);
403 } else if (bptr->type == BBTYPE_EXH) {
404 /* d = reg_of_var(rd, src, REG_ITMP1); */
405 if (!(src->flags & INMEMORY))
409 M_INTMOVE(REG_ITMP1, d);
410 store_reg_to_var_int(src, d);
419 while (src != NULL) {
421 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
422 if (bptr->type == BBTYPE_SBR) {
423 d = reg_of_var(rd, src, REG_ITMP1);
424 x86_64_pop_reg(cd, d);
425 store_reg_to_var_int(src, d);
427 } else if (bptr->type == BBTYPE_EXH) {
428 d = reg_of_var(rd, src, REG_ITMP1);
429 M_INTMOVE(REG_ITMP1, d);
430 store_reg_to_var_int(src, d);
434 d = reg_of_var(rd, src, REG_ITMP1);
435 if ((src->varkind != STACKVAR)) {
437 if (IS_FLT_DBL_TYPE(s2)) {
438 s1 = rd->interfaces[len][s2].regoff;
439 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
443 x86_64_movq_membase_reg(cd, REG_SP, s1 * 8, d);
445 store_reg_to_var_flt(src, d);
448 s1 = rd->interfaces[len][s2].regoff;
449 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
453 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, d);
455 store_reg_to_var_int(src, d);
464 /* walk through all instructions */
469 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
470 if (iptr->line != currentline) {
471 dseg_addlinenumber(cd, iptr->line, cd->mcodeptr);
472 /*printf("%s : %d\n",m->name->text,iptr->line);*/
473 currentline = iptr->line;
476 MCODECHECK(128); /* XXX are 128 bytes enough? */
479 case ICMD_INLINE_START: /* internal ICMDs */
480 case ICMD_INLINE_END:
483 case ICMD_NOP: /* ... ==> ... */
486 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
487 if (src->flags & INMEMORY) {
488 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
491 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
493 x86_64_jcc(cd, X86_64_CC_Z, 0);
494 codegen_addxnullrefs(cd, cd->mcodeptr);
497 /* constant operations ************************************************/
499 case ICMD_ICONST: /* ... ==> ..., constant */
500 /* op1 = 0, val.i = constant */
502 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
503 if (iptr->val.i == 0) {
504 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
506 x86_64_movl_imm_reg(cd, iptr->val.i, d);
508 store_reg_to_var_int(iptr->dst, d);
511 case ICMD_ACONST: /* ... ==> ..., constant */
512 /* op1 = 0, val.a = constant */
514 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
515 if (iptr->val.a == 0) {
516 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
518 x86_64_mov_imm_reg(cd, (s8) iptr->val.a, d);
520 store_reg_to_var_int(iptr->dst, d);
523 case ICMD_LCONST: /* ... ==> ..., constant */
524 /* op1 = 0, val.l = constant */
526 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
527 if (iptr->val.l == 0) {
528 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
530 x86_64_mov_imm_reg(cd, iptr->val.l, d);
532 store_reg_to_var_int(iptr->dst, d);
535 case ICMD_FCONST: /* ... ==> ..., constant */
536 /* op1 = 0, val.f = constant */
538 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
539 a = dseg_addfloat(cd, iptr->val.f);
540 x86_64_movdl_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + ((d > 7) ? 9 : 8)) - (s8) cd->mcodebase) + a, d);
541 store_reg_to_var_flt(iptr->dst, d);
544 case ICMD_DCONST: /* ... ==> ..., constant */
545 /* op1 = 0, val.d = constant */
547 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
548 a = dseg_adddouble(cd, iptr->val.d);
549 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, d);
550 store_reg_to_var_flt(iptr->dst, d);
554 /* load/store operations **********************************************/
556 case ICMD_ILOAD: /* ... ==> ..., content of local variable */
557 /* op1 = local variable */
559 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
560 if ((iptr->dst->varkind == LOCALVAR) &&
561 (iptr->dst->varnum == iptr->op1)) {
564 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
565 if (var->flags & INMEMORY) {
566 x86_64_movl_membase_reg(cd, REG_SP, var->regoff * 8, d);
567 store_reg_to_var_int(iptr->dst, d);
570 if (iptr->dst->flags & INMEMORY) {
571 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
574 M_INTMOVE(var->regoff, d);
579 case ICMD_LLOAD: /* ... ==> ..., content of local variable */
580 case ICMD_ALOAD: /* op1 = local variable */
582 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
583 if ((iptr->dst->varkind == LOCALVAR) &&
584 (iptr->dst->varnum == iptr->op1)) {
587 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
588 if (var->flags & INMEMORY) {
589 x86_64_mov_membase_reg(cd, REG_SP, var->regoff * 8, d);
590 store_reg_to_var_int(iptr->dst, d);
593 if (iptr->dst->flags & INMEMORY) {
594 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
597 M_INTMOVE(var->regoff, d);
602 case ICMD_FLOAD: /* ... ==> ..., content of local variable */
603 case ICMD_DLOAD: /* op1 = local variable */
605 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
606 if ((iptr->dst->varkind == LOCALVAR) &&
607 (iptr->dst->varnum == iptr->op1)) {
610 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
611 if (var->flags & INMEMORY) {
612 x86_64_movq_membase_reg(cd, REG_SP, var->regoff * 8, d);
613 store_reg_to_var_flt(iptr->dst, d);
616 if (iptr->dst->flags & INMEMORY) {
617 x86_64_movq_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
620 M_FLTMOVE(var->regoff, d);
625 case ICMD_ISTORE: /* ..., value ==> ... */
626 case ICMD_LSTORE: /* op1 = local variable */
629 if ((src->varkind == LOCALVAR) &&
630 (src->varnum == iptr->op1)) {
633 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
634 if (var->flags & INMEMORY) {
635 var_to_reg_int(s1, src, REG_ITMP1);
636 x86_64_mov_reg_membase(cd, s1, REG_SP, var->regoff * 8);
639 var_to_reg_int(s1, src, var->regoff);
640 M_INTMOVE(s1, var->regoff);
644 case ICMD_FSTORE: /* ..., value ==> ... */
645 case ICMD_DSTORE: /* op1 = local variable */
647 if ((src->varkind == LOCALVAR) &&
648 (src->varnum == iptr->op1)) {
651 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
652 if (var->flags & INMEMORY) {
653 var_to_reg_flt(s1, src, REG_FTMP1);
654 x86_64_movq_reg_membase(cd, s1, REG_SP, var->regoff * 8);
657 var_to_reg_flt(s1, src, var->regoff);
658 M_FLTMOVE(s1, var->regoff);
663 /* pop/dup/swap operations ********************************************/
665 /* attention: double and longs are only one entry in CACAO ICMDs */
667 case ICMD_POP: /* ..., value ==> ... */
668 case ICMD_POP2: /* ..., value, value ==> ... */
671 case ICMD_DUP: /* ..., a ==> ..., a, a */
672 M_COPY(src, iptr->dst);
675 case ICMD_DUP_X1: /* ..., a, b ==> ..., b, a, b */
677 M_COPY(src, iptr->dst);
678 M_COPY(src->prev, iptr->dst->prev);
679 M_COPY(iptr->dst, iptr->dst->prev->prev);
682 case ICMD_DUP_X2: /* ..., a, b, c ==> ..., c, a, b, c */
684 M_COPY(src, iptr->dst);
685 M_COPY(src->prev, iptr->dst->prev);
686 M_COPY(src->prev->prev, iptr->dst->prev->prev);
687 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
690 case ICMD_DUP2: /* ..., a, b ==> ..., a, b, a, b */
692 M_COPY(src, iptr->dst);
693 M_COPY(src->prev, iptr->dst->prev);
696 case ICMD_DUP2_X1: /* ..., a, b, c ==> ..., b, c, a, b, c */
698 M_COPY(src, iptr->dst);
699 M_COPY(src->prev, iptr->dst->prev);
700 M_COPY(src->prev->prev, iptr->dst->prev->prev);
701 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
702 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev);
705 case ICMD_DUP2_X2: /* ..., a, b, c, d ==> ..., c, d, a, b, c, d */
707 M_COPY(src, iptr->dst);
708 M_COPY(src->prev, iptr->dst->prev);
709 M_COPY(src->prev->prev, iptr->dst->prev->prev);
710 M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
711 M_COPY(iptr->dst, iptr->dst->prev->prev->prev->prev);
712 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev->prev);
715 case ICMD_SWAP: /* ..., a, b ==> ..., b, a */
717 M_COPY(src, iptr->dst->prev);
718 M_COPY(src->prev, iptr->dst);
722 /* integer operations *************************************************/
724 case ICMD_INEG: /* ..., value ==> ..., - value */
726 d = reg_of_var(rd, iptr->dst, REG_NULL);
727 if (iptr->dst->flags & INMEMORY) {
728 if (src->flags & INMEMORY) {
729 if (src->regoff == iptr->dst->regoff) {
730 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
733 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
734 x86_64_negl_reg(cd, REG_ITMP1);
735 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
739 x86_64_movl_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
740 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
744 if (src->flags & INMEMORY) {
745 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
746 x86_64_negl_reg(cd, d);
749 M_INTMOVE(src->regoff, iptr->dst->regoff);
750 x86_64_negl_reg(cd, iptr->dst->regoff);
755 case ICMD_LNEG: /* ..., value ==> ..., - value */
757 d = reg_of_var(rd, iptr->dst, REG_NULL);
758 if (iptr->dst->flags & INMEMORY) {
759 if (src->flags & INMEMORY) {
760 if (src->regoff == iptr->dst->regoff) {
761 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
764 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
765 x86_64_neg_reg(cd, REG_ITMP1);
766 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
770 x86_64_mov_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
771 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
775 if (src->flags & INMEMORY) {
776 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
777 x86_64_neg_reg(cd, iptr->dst->regoff);
780 M_INTMOVE(src->regoff, iptr->dst->regoff);
781 x86_64_neg_reg(cd, iptr->dst->regoff);
786 case ICMD_I2L: /* ..., value ==> ..., value */
788 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
789 if (src->flags & INMEMORY) {
790 x86_64_movslq_membase_reg(cd, REG_SP, src->regoff * 8, d);
793 x86_64_movslq_reg_reg(cd, src->regoff, d);
795 store_reg_to_var_int(iptr->dst, d);
798 case ICMD_L2I: /* ..., value ==> ..., value */
800 var_to_reg_int(s1, src, REG_ITMP1);
801 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
803 store_reg_to_var_int(iptr->dst, d);
806 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
808 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
809 if (src->flags & INMEMORY) {
810 x86_64_movsbq_membase_reg(cd, REG_SP, src->regoff * 8, d);
813 x86_64_movsbq_reg_reg(cd, src->regoff, d);
815 store_reg_to_var_int(iptr->dst, d);
818 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
820 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
821 if (src->flags & INMEMORY) {
822 x86_64_movzwq_membase_reg(cd, REG_SP, src->regoff * 8, d);
825 x86_64_movzwq_reg_reg(cd, src->regoff, d);
827 store_reg_to_var_int(iptr->dst, d);
830 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
832 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
833 if (src->flags & INMEMORY) {
834 x86_64_movswq_membase_reg(cd, REG_SP, src->regoff * 8, d);
837 x86_64_movswq_reg_reg(cd, src->regoff, d);
839 store_reg_to_var_int(iptr->dst, d);
843 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
845 d = reg_of_var(rd, iptr->dst, REG_NULL);
846 x86_64_emit_ialu(cd, X86_64_ADD, src, iptr);
849 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
850 /* val.i = constant */
852 d = reg_of_var(rd, iptr->dst, REG_NULL);
853 x86_64_emit_ialuconst(cd, X86_64_ADD, src, iptr);
856 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
858 d = reg_of_var(rd, iptr->dst, REG_NULL);
859 x86_64_emit_lalu(cd, X86_64_ADD, src, iptr);
862 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
863 /* val.l = constant */
865 d = reg_of_var(rd, iptr->dst, REG_NULL);
866 x86_64_emit_laluconst(cd, X86_64_ADD, src, iptr);
869 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
871 d = reg_of_var(rd, iptr->dst, REG_NULL);
872 if (iptr->dst->flags & INMEMORY) {
873 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
874 if (src->prev->regoff == iptr->dst->regoff) {
875 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
876 x86_64_alul_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
879 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
880 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
881 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
884 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
885 M_INTMOVE(src->prev->regoff, REG_ITMP1);
886 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
887 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
889 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
890 if (src->prev->regoff == iptr->dst->regoff) {
891 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
894 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
895 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
896 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
900 x86_64_movl_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
901 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
905 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
906 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
907 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
909 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
910 M_INTMOVE(src->prev->regoff, d);
911 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
913 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
914 /* workaround for reg alloc */
915 if (src->regoff == iptr->dst->regoff) {
916 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
917 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
918 M_INTMOVE(REG_ITMP1, d);
921 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
922 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
926 /* workaround for reg alloc */
927 if (src->regoff == iptr->dst->regoff) {
928 M_INTMOVE(src->prev->regoff, REG_ITMP1);
929 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
930 M_INTMOVE(REG_ITMP1, d);
933 M_INTMOVE(src->prev->regoff, d);
934 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
940 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
941 /* val.i = constant */
943 d = reg_of_var(rd, iptr->dst, REG_NULL);
944 x86_64_emit_ialuconst(cd, X86_64_SUB, src, iptr);
947 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
949 d = reg_of_var(rd, iptr->dst, REG_NULL);
950 if (iptr->dst->flags & INMEMORY) {
951 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
952 if (src->prev->regoff == iptr->dst->regoff) {
953 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
954 x86_64_alu_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
957 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
958 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
959 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
962 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
963 M_INTMOVE(src->prev->regoff, REG_ITMP1);
964 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
965 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
967 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
968 if (src->prev->regoff == iptr->dst->regoff) {
969 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
972 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
973 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
974 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
978 x86_64_mov_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
979 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
983 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
984 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
985 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
987 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
988 M_INTMOVE(src->prev->regoff, d);
989 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
991 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
992 /* workaround for reg alloc */
993 if (src->regoff == iptr->dst->regoff) {
994 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
995 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
996 M_INTMOVE(REG_ITMP1, d);
999 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1000 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1004 /* workaround for reg alloc */
1005 if (src->regoff == iptr->dst->regoff) {
1006 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1007 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1008 M_INTMOVE(REG_ITMP1, d);
1011 M_INTMOVE(src->prev->regoff, d);
1012 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1018 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
1019 /* val.l = constant */
1021 d = reg_of_var(rd, iptr->dst, REG_NULL);
1022 x86_64_emit_laluconst(cd, X86_64_SUB, src, iptr);
1025 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1027 d = reg_of_var(rd, iptr->dst, REG_NULL);
1028 if (iptr->dst->flags & INMEMORY) {
1029 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1030 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1031 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1032 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1034 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1035 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1036 x86_64_imull_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1037 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1039 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1040 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1041 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1042 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1045 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1046 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1047 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1051 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1052 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1053 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1055 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1056 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1057 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1059 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1060 M_INTMOVE(src->regoff, iptr->dst->regoff);
1061 x86_64_imull_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1064 if (src->regoff == iptr->dst->regoff) {
1065 x86_64_imull_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1068 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1069 x86_64_imull_reg_reg(cd, src->regoff, iptr->dst->regoff);
1075 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
1076 /* val.i = constant */
1078 d = reg_of_var(rd, iptr->dst, REG_NULL);
1079 if (iptr->dst->flags & INMEMORY) {
1080 if (src->flags & INMEMORY) {
1081 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1082 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1085 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, REG_ITMP1);
1086 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1090 if (src->flags & INMEMORY) {
1091 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1094 if (iptr->val.i == 2) {
1095 M_INTMOVE(src->regoff, iptr->dst->regoff);
1096 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1099 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, iptr->dst->regoff); /* 3 cycles */
1105 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1107 d = reg_of_var(rd, iptr->dst, REG_NULL);
1108 if (iptr->dst->flags & INMEMORY) {
1109 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1110 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1111 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1112 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1114 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1115 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1116 x86_64_imul_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1117 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1119 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1120 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1121 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1122 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1125 x86_64_mov_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1126 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1127 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1131 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1132 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1133 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1135 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1136 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1137 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1139 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1140 M_INTMOVE(src->regoff, iptr->dst->regoff);
1141 x86_64_imul_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1144 if (src->regoff == iptr->dst->regoff) {
1145 x86_64_imul_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1148 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1149 x86_64_imul_reg_reg(cd, src->regoff, iptr->dst->regoff);
1155 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
1156 /* val.l = constant */
1158 d = reg_of_var(rd, iptr->dst, REG_NULL);
1159 if (iptr->dst->flags & INMEMORY) {
1160 if (src->flags & INMEMORY) {
1161 if (IS_IMM32(iptr->val.l)) {
1162 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1165 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1166 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1168 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1171 if (IS_IMM32(iptr->val.l)) {
1172 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, REG_ITMP1);
1175 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1176 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1178 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1182 if (src->flags & INMEMORY) {
1183 if (IS_IMM32(iptr->val.l)) {
1184 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1187 x86_64_mov_imm_reg(cd, iptr->val.l, iptr->dst->regoff);
1188 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1192 /* should match in many cases */
1193 if (iptr->val.l == 2) {
1194 M_INTMOVE(src->regoff, iptr->dst->regoff);
1195 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1198 if (IS_IMM32(iptr->val.l)) {
1199 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, iptr->dst->regoff); /* 4 cycles */
1202 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1203 M_INTMOVE(src->regoff, iptr->dst->regoff);
1204 x86_64_imul_reg_reg(cd, REG_ITMP1, iptr->dst->regoff);
1211 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1213 d = reg_of_var(rd, iptr->dst, REG_NULL);
1214 if (src->prev->flags & INMEMORY) {
1215 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1218 M_INTMOVE(src->prev->regoff, RAX);
1221 if (src->flags & INMEMORY) {
1222 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1225 M_INTMOVE(src->regoff, REG_ITMP3);
1229 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1230 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1231 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1232 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1234 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1236 x86_64_idivl_reg(cd, REG_ITMP3);
1238 if (iptr->dst->flags & INMEMORY) {
1239 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1240 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1243 M_INTMOVE(RAX, iptr->dst->regoff);
1245 if (iptr->dst->regoff != RDX) {
1246 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1251 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1252 d = reg_of_var(rd, iptr->dst, REG_NULL);
1253 if (src->prev->flags & INMEMORY) {
1254 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1257 M_INTMOVE(src->prev->regoff, RAX);
1260 if (src->flags & INMEMORY) {
1261 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1264 M_INTMOVE(src->regoff, REG_ITMP3);
1268 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1270 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1271 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1274 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1275 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1276 x86_64_jcc(cd, X86_64_CC_E, 1 + 3); /* 6 bytes */
1279 x86_64_idivl_reg(cd, REG_ITMP3);
1281 if (iptr->dst->flags & INMEMORY) {
1282 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1283 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1286 M_INTMOVE(RDX, iptr->dst->regoff);
1288 if (iptr->dst->regoff != RDX) {
1289 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1294 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
1295 /* val.i = constant */
1297 var_to_reg_int(s1, src, REG_ITMP1);
1298 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1299 M_INTMOVE(s1, REG_ITMP1);
1300 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1301 x86_64_leal_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1302 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1303 x86_64_shiftl_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1304 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1305 store_reg_to_var_int(iptr->dst, d);
1308 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
1309 /* val.i = constant */
1311 var_to_reg_int(s1, src, REG_ITMP1);
1312 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1313 M_INTMOVE(s1, REG_ITMP1);
1314 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1315 x86_64_leal_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1316 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1317 x86_64_alul_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1318 x86_64_alul_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1319 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1320 store_reg_to_var_int(iptr->dst, d);
1324 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1326 d = reg_of_var(rd, iptr->dst, REG_NULL);
1327 if (src->prev->flags & INMEMORY) {
1328 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1331 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1334 if (src->flags & INMEMORY) {
1335 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1338 M_INTMOVE(src->regoff, REG_ITMP3);
1342 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1343 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1344 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1345 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1346 x86_64_jcc(cd, X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1348 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1350 x86_64_idiv_reg(cd, REG_ITMP3);
1352 if (iptr->dst->flags & INMEMORY) {
1353 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1354 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1357 M_INTMOVE(RAX, iptr->dst->regoff);
1359 if (iptr->dst->regoff != RDX) {
1360 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1365 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1367 d = reg_of_var(rd, iptr->dst, REG_NULL);
1368 if (src->prev->flags & INMEMORY) {
1369 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1372 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1375 if (src->flags & INMEMORY) {
1376 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1379 M_INTMOVE(src->regoff, REG_ITMP3);
1383 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1385 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1386 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1387 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1390 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1391 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1392 x86_64_jcc(cd, X86_64_CC_E, 2 + 3); /* 6 bytes */
1395 x86_64_idiv_reg(cd, REG_ITMP3);
1397 if (iptr->dst->flags & INMEMORY) {
1398 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1399 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1402 M_INTMOVE(RDX, iptr->dst->regoff);
1404 if (iptr->dst->regoff != RDX) {
1405 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1410 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1411 /* val.i = constant */
1413 var_to_reg_int(s1, src, REG_ITMP1);
1414 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1415 M_INTMOVE(s1, REG_ITMP1);
1416 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1417 x86_64_lea_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1418 x86_64_cmovcc_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1419 x86_64_shift_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1420 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1421 store_reg_to_var_int(iptr->dst, d);
1424 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1425 /* val.l = constant */
1427 var_to_reg_int(s1, src, REG_ITMP1);
1428 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1429 M_INTMOVE(s1, REG_ITMP1);
1430 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1431 x86_64_lea_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1432 x86_64_cmovcc_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1433 x86_64_alu_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1434 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1435 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1436 store_reg_to_var_int(iptr->dst, d);
1439 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1441 d = reg_of_var(rd, iptr->dst, REG_NULL);
1442 x86_64_emit_ishift(cd, X86_64_SHL, src, iptr);
1445 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1446 /* val.i = constant */
1448 d = reg_of_var(rd, iptr->dst, REG_NULL);
1449 x86_64_emit_ishiftconst(cd, X86_64_SHL, src, iptr);
1452 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1454 d = reg_of_var(rd, iptr->dst, REG_NULL);
1455 x86_64_emit_ishift(cd, X86_64_SAR, src, iptr);
1458 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1459 /* val.i = constant */
1461 d = reg_of_var(rd, iptr->dst, REG_NULL);
1462 x86_64_emit_ishiftconst(cd, X86_64_SAR, src, iptr);
1465 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1467 d = reg_of_var(rd, iptr->dst, REG_NULL);
1468 x86_64_emit_ishift(cd, X86_64_SHR, src, iptr);
1471 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1472 /* val.i = constant */
1474 d = reg_of_var(rd, iptr->dst, REG_NULL);
1475 x86_64_emit_ishiftconst(cd, X86_64_SHR, src, iptr);
1478 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1480 d = reg_of_var(rd, iptr->dst, REG_NULL);
1481 x86_64_emit_lshift(cd, X86_64_SHL, src, iptr);
1484 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1485 /* val.i = constant */
1487 d = reg_of_var(rd, iptr->dst, REG_NULL);
1488 x86_64_emit_lshiftconst(cd, X86_64_SHL, src, iptr);
1491 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1493 d = reg_of_var(rd, iptr->dst, REG_NULL);
1494 x86_64_emit_lshift(cd, X86_64_SAR, src, iptr);
1497 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1498 /* val.i = constant */
1500 d = reg_of_var(rd, iptr->dst, REG_NULL);
1501 x86_64_emit_lshiftconst(cd, X86_64_SAR, src, iptr);
1504 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1506 d = reg_of_var(rd, iptr->dst, REG_NULL);
1507 x86_64_emit_lshift(cd, X86_64_SHR, src, iptr);
1510 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1511 /* val.l = constant */
1513 d = reg_of_var(rd, iptr->dst, REG_NULL);
1514 x86_64_emit_lshiftconst(cd, X86_64_SHR, src, iptr);
1517 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1519 d = reg_of_var(rd, iptr->dst, REG_NULL);
1520 x86_64_emit_ialu(cd, X86_64_AND, src, iptr);
1523 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1524 /* val.i = constant */
1526 d = reg_of_var(rd, iptr->dst, REG_NULL);
1527 x86_64_emit_ialuconst(cd, X86_64_AND, src, iptr);
1530 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1532 d = reg_of_var(rd, iptr->dst, REG_NULL);
1533 x86_64_emit_lalu(cd, X86_64_AND, src, iptr);
1536 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1537 /* val.l = constant */
1539 d = reg_of_var(rd, iptr->dst, REG_NULL);
1540 x86_64_emit_laluconst(cd, X86_64_AND, src, iptr);
1543 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1545 d = reg_of_var(rd, iptr->dst, REG_NULL);
1546 x86_64_emit_ialu(cd, X86_64_OR, src, iptr);
1549 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1550 /* val.i = constant */
1552 d = reg_of_var(rd, iptr->dst, REG_NULL);
1553 x86_64_emit_ialuconst(cd, X86_64_OR, src, iptr);
1556 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1558 d = reg_of_var(rd, iptr->dst, REG_NULL);
1559 x86_64_emit_lalu(cd, X86_64_OR, src, iptr);
1562 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1563 /* val.l = constant */
1565 d = reg_of_var(rd, iptr->dst, REG_NULL);
1566 x86_64_emit_laluconst(cd, X86_64_OR, src, iptr);
1569 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1571 d = reg_of_var(rd, iptr->dst, REG_NULL);
1572 x86_64_emit_ialu(cd, X86_64_XOR, src, iptr);
1575 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1576 /* val.i = constant */
1578 d = reg_of_var(rd, iptr->dst, REG_NULL);
1579 x86_64_emit_ialuconst(cd, X86_64_XOR, src, iptr);
1582 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1584 d = reg_of_var(rd, iptr->dst, REG_NULL);
1585 x86_64_emit_lalu(cd, X86_64_XOR, src, iptr);
1588 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1589 /* val.l = constant */
1591 d = reg_of_var(rd, iptr->dst, REG_NULL);
1592 x86_64_emit_laluconst(cd, X86_64_XOR, src, iptr);
1596 case ICMD_IINC: /* ..., value ==> ..., value + constant */
1597 /* op1 = variable, val.i = constant */
1599 /* using inc and dec is definitely faster than add -- tested */
1602 var = &(rd->locals[iptr->op1][TYPE_INT]);
1604 if (var->flags & INMEMORY) {
1605 if (iptr->val.i == 1) {
1606 x86_64_incl_membase(cd, REG_SP, d * 8);
1608 } else if (iptr->val.i == -1) {
1609 x86_64_decl_membase(cd, REG_SP, d * 8);
1612 x86_64_alul_imm_membase(cd, X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1616 if (iptr->val.i == 1) {
1617 x86_64_incl_reg(cd, d);
1619 } else if (iptr->val.i == -1) {
1620 x86_64_decl_reg(cd, d);
1623 x86_64_alul_imm_reg(cd, X86_64_ADD, iptr->val.i, d);
1629 /* floating operations ************************************************/
1631 case ICMD_FNEG: /* ..., value ==> ..., - value */
1633 var_to_reg_flt(s1, src, REG_FTMP1);
1634 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1635 a = dseg_adds4(cd, 0x80000000);
1637 x86_64_movss_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1638 x86_64_xorps_reg_reg(cd, REG_FTMP2, d);
1639 store_reg_to_var_flt(iptr->dst, d);
1642 case ICMD_DNEG: /* ..., value ==> ..., - value */
1644 var_to_reg_flt(s1, src, REG_FTMP1);
1645 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1646 a = dseg_adds8(cd, 0x8000000000000000);
1648 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1649 x86_64_xorpd_reg_reg(cd, REG_FTMP2, d);
1650 store_reg_to_var_flt(iptr->dst, d);
1653 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1655 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1656 var_to_reg_flt(s2, src, REG_FTMP2);
1657 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1659 x86_64_addss_reg_reg(cd, s2, d);
1660 } else if (s2 == d) {
1661 x86_64_addss_reg_reg(cd, s1, d);
1664 x86_64_addss_reg_reg(cd, s2, d);
1666 store_reg_to_var_flt(iptr->dst, d);
1669 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1671 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1672 var_to_reg_flt(s2, src, REG_FTMP2);
1673 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1675 x86_64_addsd_reg_reg(cd, s2, d);
1676 } else if (s2 == d) {
1677 x86_64_addsd_reg_reg(cd, s1, d);
1680 x86_64_addsd_reg_reg(cd, s2, d);
1682 store_reg_to_var_flt(iptr->dst, d);
1685 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1687 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1688 var_to_reg_flt(s2, src, REG_FTMP2);
1689 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1691 M_FLTMOVE(s2, REG_FTMP2);
1695 x86_64_subss_reg_reg(cd, s2, d);
1696 store_reg_to_var_flt(iptr->dst, d);
1699 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1701 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1702 var_to_reg_flt(s2, src, REG_FTMP2);
1703 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1705 M_FLTMOVE(s2, REG_FTMP2);
1709 x86_64_subsd_reg_reg(cd, s2, d);
1710 store_reg_to_var_flt(iptr->dst, d);
1713 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1715 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1716 var_to_reg_flt(s2, src, REG_FTMP2);
1717 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1719 x86_64_mulss_reg_reg(cd, s2, d);
1720 } else if (s2 == d) {
1721 x86_64_mulss_reg_reg(cd, s1, d);
1724 x86_64_mulss_reg_reg(cd, s2, d);
1726 store_reg_to_var_flt(iptr->dst, d);
1729 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1731 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1732 var_to_reg_flt(s2, src, REG_FTMP2);
1733 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1735 x86_64_mulsd_reg_reg(cd, s2, d);
1736 } else if (s2 == d) {
1737 x86_64_mulsd_reg_reg(cd, s1, d);
1740 x86_64_mulsd_reg_reg(cd, s2, d);
1742 store_reg_to_var_flt(iptr->dst, d);
1745 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1747 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1748 var_to_reg_flt(s2, src, REG_FTMP2);
1749 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1751 M_FLTMOVE(s2, REG_FTMP2);
1755 x86_64_divss_reg_reg(cd, s2, d);
1756 store_reg_to_var_flt(iptr->dst, d);
1759 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1761 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1762 var_to_reg_flt(s2, src, REG_FTMP2);
1763 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1765 M_FLTMOVE(s2, REG_FTMP2);
1769 x86_64_divsd_reg_reg(cd, s2, d);
1770 store_reg_to_var_flt(iptr->dst, d);
1773 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1775 var_to_reg_int(s1, src, REG_ITMP1);
1776 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1777 x86_64_cvtsi2ss_reg_reg(cd, s1, d);
1778 store_reg_to_var_flt(iptr->dst, d);
1781 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1783 var_to_reg_int(s1, src, REG_ITMP1);
1784 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1785 x86_64_cvtsi2sd_reg_reg(cd, s1, d);
1786 store_reg_to_var_flt(iptr->dst, d);
1789 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1791 var_to_reg_int(s1, src, REG_ITMP1);
1792 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1793 x86_64_cvtsi2ssq_reg_reg(cd, s1, d);
1794 store_reg_to_var_flt(iptr->dst, d);
1797 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1799 var_to_reg_int(s1, src, REG_ITMP1);
1800 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1801 x86_64_cvtsi2sdq_reg_reg(cd, s1, d);
1802 store_reg_to_var_flt(iptr->dst, d);
1805 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1807 var_to_reg_flt(s1, src, REG_FTMP1);
1808 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1809 x86_64_cvttss2si_reg_reg(cd, s1, d);
1810 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1811 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1812 x86_64_jcc(cd, X86_64_CC_NE, a);
1813 M_FLTMOVE(s1, REG_FTMP1);
1814 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2i, REG_ITMP2);
1815 x86_64_call_reg(cd, REG_ITMP2);
1816 M_INTMOVE(REG_RESULT, d);
1817 store_reg_to_var_int(iptr->dst, d);
1820 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1822 var_to_reg_flt(s1, src, REG_FTMP1);
1823 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1824 x86_64_cvttsd2si_reg_reg(cd, s1, d);
1825 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1826 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1827 x86_64_jcc(cd, X86_64_CC_NE, a);
1828 M_FLTMOVE(s1, REG_FTMP1);
1829 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2i, REG_ITMP2);
1830 x86_64_call_reg(cd, REG_ITMP2);
1831 M_INTMOVE(REG_RESULT, d);
1832 store_reg_to_var_int(iptr->dst, d);
1835 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1837 var_to_reg_flt(s1, src, REG_FTMP1);
1838 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1839 x86_64_cvttss2siq_reg_reg(cd, s1, d);
1840 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1841 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1842 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1843 x86_64_jcc(cd, X86_64_CC_NE, a);
1844 M_FLTMOVE(s1, REG_FTMP1);
1845 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2l, REG_ITMP2);
1846 x86_64_call_reg(cd, REG_ITMP2);
1847 M_INTMOVE(REG_RESULT, d);
1848 store_reg_to_var_int(iptr->dst, d);
1851 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1853 var_to_reg_flt(s1, src, REG_FTMP1);
1854 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1855 x86_64_cvttsd2siq_reg_reg(cd, s1, d);
1856 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1857 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1858 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1859 x86_64_jcc(cd, X86_64_CC_NE, a);
1860 M_FLTMOVE(s1, REG_FTMP1);
1861 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2l, REG_ITMP2);
1862 x86_64_call_reg(cd, REG_ITMP2);
1863 M_INTMOVE(REG_RESULT, d);
1864 store_reg_to_var_int(iptr->dst, d);
1867 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1869 var_to_reg_flt(s1, src, REG_FTMP1);
1870 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1871 x86_64_cvtss2sd_reg_reg(cd, s1, d);
1872 store_reg_to_var_flt(iptr->dst, d);
1875 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1877 var_to_reg_flt(s1, src, REG_FTMP1);
1878 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1879 x86_64_cvtsd2ss_reg_reg(cd, s1, d);
1880 store_reg_to_var_flt(iptr->dst, d);
1883 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1884 /* == => 0, < => 1, > => -1 */
1886 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1887 var_to_reg_flt(s2, src, REG_FTMP2);
1888 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1889 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1890 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1891 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1892 x86_64_ucomiss_reg_reg(cd, s1, s2);
1893 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1894 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1895 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1896 store_reg_to_var_int(iptr->dst, d);
1899 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1900 /* == => 0, < => 1, > => -1 */
1902 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1903 var_to_reg_flt(s2, src, REG_FTMP2);
1904 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1905 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1906 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1907 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1908 x86_64_ucomiss_reg_reg(cd, s1, s2);
1909 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1910 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1911 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1912 store_reg_to_var_int(iptr->dst, d);
1915 case ICMD_DCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1916 /* == => 0, < => 1, > => -1 */
1918 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1919 var_to_reg_flt(s2, src, REG_FTMP2);
1920 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1921 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1922 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1923 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1924 x86_64_ucomisd_reg_reg(cd, s1, s2);
1925 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1926 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1927 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1928 store_reg_to_var_int(iptr->dst, d);
1931 case ICMD_DCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1932 /* == => 0, < => 1, > => -1 */
1934 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1935 var_to_reg_flt(s2, src, REG_FTMP2);
1936 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1937 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1938 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1939 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1940 x86_64_ucomisd_reg_reg(cd, s1, s2);
1941 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1942 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1943 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1944 store_reg_to_var_int(iptr->dst, d);
1948 /* memory operations **************************************************/
1950 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., (int) length */
1952 var_to_reg_int(s1, src, REG_ITMP1);
1953 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1954 gen_nullptr_check(s1);
1955 x86_64_movl_membase_reg(cd, s1, OFFSET(java_arrayheader, size), d);
1956 store_reg_to_var_int(iptr->dst, d);
1959 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
1961 var_to_reg_int(s1, src->prev, REG_ITMP1);
1962 var_to_reg_int(s2, src, REG_ITMP2);
1963 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1964 if (iptr->op1 == 0) {
1965 gen_nullptr_check(s1);
1968 x86_64_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
1969 store_reg_to_var_int(iptr->dst, d);
1972 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
1974 var_to_reg_int(s1, src->prev, REG_ITMP1);
1975 var_to_reg_int(s2, src, REG_ITMP2);
1976 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1977 if (iptr->op1 == 0) {
1978 gen_nullptr_check(s1);
1981 x86_64_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]), s1, s2, 3, d);
1982 store_reg_to_var_int(iptr->dst, d);
1985 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
1987 var_to_reg_int(s1, src->prev, REG_ITMP1);
1988 var_to_reg_int(s2, src, REG_ITMP2);
1989 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1990 if (iptr->op1 == 0) {
1991 gen_nullptr_check(s1);
1994 x86_64_movl_memindex_reg(cd, OFFSET(java_intarray, data[0]), s1, s2, 2, d);
1995 store_reg_to_var_int(iptr->dst, d);
1998 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2000 var_to_reg_int(s1, src->prev, REG_ITMP1);
2001 var_to_reg_int(s2, src, REG_ITMP2);
2002 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2003 if (iptr->op1 == 0) {
2004 gen_nullptr_check(s1);
2007 x86_64_movss_memindex_reg(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2008 store_reg_to_var_flt(iptr->dst, d);
2011 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2013 var_to_reg_int(s1, src->prev, REG_ITMP1);
2014 var_to_reg_int(s2, src, REG_ITMP2);
2015 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2016 if (iptr->op1 == 0) {
2017 gen_nullptr_check(s1);
2020 x86_64_movsd_memindex_reg(cd, OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2021 store_reg_to_var_flt(iptr->dst, d);
2024 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
2026 var_to_reg_int(s1, src->prev, REG_ITMP1);
2027 var_to_reg_int(s2, src, REG_ITMP2);
2028 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2029 if (iptr->op1 == 0) {
2030 gen_nullptr_check(s1);
2033 x86_64_movzwq_memindex_reg(cd, OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2034 store_reg_to_var_int(iptr->dst, d);
2037 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
2039 var_to_reg_int(s1, src->prev, REG_ITMP1);
2040 var_to_reg_int(s2, src, REG_ITMP2);
2041 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2042 if (iptr->op1 == 0) {
2043 gen_nullptr_check(s1);
2046 x86_64_movswq_memindex_reg(cd, OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2047 store_reg_to_var_int(iptr->dst, d);
2050 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
2052 var_to_reg_int(s1, src->prev, REG_ITMP1);
2053 var_to_reg_int(s2, src, REG_ITMP2);
2054 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2055 if (iptr->op1 == 0) {
2056 gen_nullptr_check(s1);
2059 x86_64_movsbq_memindex_reg(cd, OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2060 store_reg_to_var_int(iptr->dst, d);
2064 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2066 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2067 var_to_reg_int(s2, src->prev, REG_ITMP2);
2068 if (iptr->op1 == 0) {
2069 gen_nullptr_check(s1);
2072 var_to_reg_int(s3, src, REG_ITMP3);
2073 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2076 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2078 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2079 var_to_reg_int(s2, src->prev, REG_ITMP2);
2080 if (iptr->op1 == 0) {
2081 gen_nullptr_check(s1);
2084 var_to_reg_int(s3, src, REG_ITMP3);
2085 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2088 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2090 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2091 var_to_reg_int(s2, src->prev, REG_ITMP2);
2092 if (iptr->op1 == 0) {
2093 gen_nullptr_check(s1);
2096 var_to_reg_int(s3, src, REG_ITMP3);
2097 x86_64_movl_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2100 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2102 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2103 var_to_reg_int(s2, src->prev, REG_ITMP2);
2104 if (iptr->op1 == 0) {
2105 gen_nullptr_check(s1);
2108 var_to_reg_flt(s3, src, REG_FTMP3);
2109 x86_64_movss_reg_memindex(cd, s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2112 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2114 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2115 var_to_reg_int(s2, src->prev, REG_ITMP2);
2116 if (iptr->op1 == 0) {
2117 gen_nullptr_check(s1);
2120 var_to_reg_flt(s3, src, REG_FTMP3);
2121 x86_64_movsd_reg_memindex(cd, s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2124 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2126 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2127 var_to_reg_int(s2, src->prev, REG_ITMP2);
2128 if (iptr->op1 == 0) {
2129 gen_nullptr_check(s1);
2132 var_to_reg_int(s3, src, REG_ITMP3);
2133 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2136 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2138 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2139 var_to_reg_int(s2, src->prev, REG_ITMP2);
2140 if (iptr->op1 == 0) {
2141 gen_nullptr_check(s1);
2144 var_to_reg_int(s3, src, REG_ITMP3);
2145 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2148 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2150 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2151 var_to_reg_int(s2, src->prev, REG_ITMP2);
2152 if (iptr->op1 == 0) {
2153 gen_nullptr_check(s1);
2156 var_to_reg_int(s3, src, REG_ITMP3);
2157 x86_64_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2160 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2162 var_to_reg_int(s1, src->prev, REG_ITMP1);
2163 var_to_reg_int(s2, src, REG_ITMP2);
2164 if (iptr->op1 == 0) {
2165 gen_nullptr_check(s1);
2168 x86_64_movl_imm_memindex(cd, iptr->val.i, OFFSET(java_intarray, data[0]), s1, s2, 2);
2171 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2173 var_to_reg_int(s1, src->prev, REG_ITMP1);
2174 var_to_reg_int(s2, src, REG_ITMP2);
2175 if (iptr->op1 == 0) {
2176 gen_nullptr_check(s1);
2180 if (IS_IMM32(iptr->val.l)) {
2181 x86_64_mov_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2184 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2185 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l >> 32), OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2189 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2191 var_to_reg_int(s1, src->prev, REG_ITMP1);
2192 var_to_reg_int(s2, src, REG_ITMP2);
2193 if (iptr->op1 == 0) {
2194 gen_nullptr_check(s1);
2197 x86_64_mov_imm_memindex(cd, 0, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2200 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2202 var_to_reg_int(s1, src->prev, REG_ITMP1);
2203 var_to_reg_int(s2, src, REG_ITMP2);
2204 if (iptr->op1 == 0) {
2205 gen_nullptr_check(s1);
2208 x86_64_movb_imm_memindex(cd, iptr->val.i, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2211 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2213 var_to_reg_int(s1, src->prev, REG_ITMP1);
2214 var_to_reg_int(s2, src, REG_ITMP2);
2215 if (iptr->op1 == 0) {
2216 gen_nullptr_check(s1);
2219 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_chararray, data[0]), s1, s2, 1);
2222 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2224 var_to_reg_int(s1, src->prev, REG_ITMP1);
2225 var_to_reg_int(s2, src, REG_ITMP2);
2226 if (iptr->op1 == 0) {
2227 gen_nullptr_check(s1);
2230 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2234 case ICMD_GETSTATIC: /* ... ==> ..., value */
2235 /* op1 = type, val.a = field address */
2238 codegen_addpatchref(cd, cd->mcodeptr,
2239 PATCHER_get_putstatic,
2240 (unresolved_field *) iptr->target);
2242 if (showdisassemble) {
2243 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2249 fieldinfo *fi = iptr->val.a;
2251 if (!fi->class->initialized) {
2252 codegen_addpatchref(cd, cd->mcodeptr,
2253 PATCHER_clinit, fi->class);
2255 if (showdisassemble) {
2256 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2260 a = (ptrint) &(fi->value);
2263 /* This approach is much faster than moving the field address */
2264 /* inline into a register. */
2265 a = dseg_addaddress(cd, a);
2266 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP2);
2267 switch (iptr->op1) {
2269 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2270 x86_64_movl_membase_reg(cd, REG_ITMP2, 0, d);
2271 store_reg_to_var_int(iptr->dst, d);
2275 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2276 x86_64_mov_membase_reg(cd, REG_ITMP2, 0, d);
2277 store_reg_to_var_int(iptr->dst, d);
2280 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2281 x86_64_movss_membase_reg(cd, REG_ITMP2, 0, d);
2282 store_reg_to_var_flt(iptr->dst, d);
2285 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2286 x86_64_movsd_membase_reg(cd, REG_ITMP2, 0, d);
2287 store_reg_to_var_flt(iptr->dst, d);
2292 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2293 /* op1 = type, val.a = field address */
2296 codegen_addpatchref(cd, cd->mcodeptr,
2297 PATCHER_get_putstatic,
2298 (unresolved_field *) iptr->target);
2300 if (showdisassemble) {
2301 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2307 fieldinfo *fi = iptr->val.a;
2309 if (!fi->class->initialized) {
2310 codegen_addpatchref(cd, cd->mcodeptr,
2311 PATCHER_clinit, fi->class);
2313 if (showdisassemble) {
2314 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2318 a = (ptrint) &(fi->value);
2321 /* This approach is much faster than moving the field address */
2322 /* inline into a register. */
2323 a = dseg_addaddress(cd, a);
2324 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP2);
2325 switch (iptr->op1) {
2327 var_to_reg_int(s2, src, REG_ITMP1);
2328 x86_64_movl_reg_membase(cd, s2, REG_ITMP2, 0);
2332 var_to_reg_int(s2, src, REG_ITMP1);
2333 x86_64_mov_reg_membase(cd, s2, REG_ITMP2, 0);
2336 var_to_reg_flt(s2, src, REG_FTMP1);
2337 x86_64_movss_reg_membase(cd, s2, REG_ITMP2, 0);
2340 var_to_reg_flt(s2, src, REG_FTMP1);
2341 x86_64_movsd_reg_membase(cd, s2, REG_ITMP2, 0);
2346 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2347 /* val = value (in current instruction) */
2348 /* op1 = type, val.a = field address (in */
2349 /* following NOP) */
2351 if (!iptr[1].val.a) {
2352 codegen_addpatchref(cd, cd->mcodeptr,
2353 PATCHER_get_putstatic,
2354 (unresolved_field *) iptr[1].target);
2356 if (showdisassemble) {
2357 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2363 fieldinfo *fi = iptr[1].val.a;
2365 if (!fi->class->initialized) {
2366 codegen_addpatchref(cd, cd->mcodeptr,
2367 PATCHER_clinit, fi->class);
2369 if (showdisassemble) {
2370 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2374 a = (ptrint) &(fi->value);
2377 /* This approach is much faster than moving the field address */
2378 /* inline into a register. */
2379 a = dseg_addaddress(cd, a);
2380 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP1);
2381 switch (iptr->op1) {
2384 x86_64_movl_imm_membase(cd, iptr->val.i, REG_ITMP1, 0);
2389 if (IS_IMM32(iptr->val.l)) {
2390 x86_64_mov_imm_membase(cd, iptr->val.l, REG_ITMP1, 0);
2392 x86_64_movl_imm_membase(cd, iptr->val.l, REG_ITMP1, 0);
2393 x86_64_movl_imm_membase(cd, iptr->val.l >> 32, REG_ITMP1, 4);
2399 case ICMD_GETFIELD: /* ... ==> ..., value */
2400 /* op1 = type, val.i = field offset */
2402 var_to_reg_int(s1, src, REG_ITMP1);
2403 gen_nullptr_check(s1);
2406 codegen_addpatchref(cd, cd->mcodeptr,
2407 PATCHER_get_putfield,
2408 (unresolved_field *) iptr->target);
2410 if (showdisassemble) {
2411 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2417 a = ((fieldinfo *) (iptr->val.a))->offset;
2420 switch (iptr->op1) {
2422 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2423 x86_64_movl_membase32_reg(cd, s1, a, d);
2424 store_reg_to_var_int(iptr->dst, d);
2428 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2429 x86_64_mov_membase32_reg(cd, s1, a, d);
2430 store_reg_to_var_int(iptr->dst, d);
2433 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2434 x86_64_movss_membase32_reg(cd, s1, a, d);
2435 store_reg_to_var_flt(iptr->dst, d);
2438 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2439 x86_64_movsd_membase32_reg(cd, s1, a, d);
2440 store_reg_to_var_flt(iptr->dst, d);
2445 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2446 /* op1 = type, val.i = field offset */
2448 var_to_reg_int(s1, src->prev, REG_ITMP1);
2449 gen_nullptr_check(s1);
2450 if (IS_INT_LNG_TYPE(iptr->op1)) {
2451 var_to_reg_int(s2, src, REG_ITMP2);
2453 var_to_reg_flt(s2, src, REG_FTMP2);
2457 codegen_addpatchref(cd, cd->mcodeptr,
2458 PATCHER_get_putfield,
2459 (unresolved_field *) iptr->target);
2461 if (showdisassemble) {
2462 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2468 a = ((fieldinfo *) (iptr->val.a))->offset;
2471 switch (iptr->op1) {
2473 x86_64_movl_reg_membase32(cd, s2, s1, a);
2477 x86_64_mov_reg_membase32(cd, s2, s1, a);
2480 x86_64_movss_reg_membase32(cd, s2, s1, a);
2483 x86_64_movsd_reg_membase32(cd, s2, s1, a);
2488 case ICMD_PUTFIELDCONST: /* ..., objectref, value ==> ... */
2489 /* val = value (in current instruction) */
2490 /* op1 = type, val.a = field address (in */
2491 /* following NOP) */
2493 var_to_reg_int(s1, src, REG_ITMP1);
2494 gen_nullptr_check(s1);
2496 if (!iptr[1].val.a) {
2497 codegen_addpatchref(cd, cd->mcodeptr,
2498 PATCHER_putfieldconst,
2499 (unresolved_field *) iptr[1].target);
2501 if (showdisassemble) {
2502 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2508 a = ((fieldinfo *) (iptr[1].val.a))->offset;
2511 switch (iptr->op1) {
2514 x86_64_movl_imm_membase32(cd, iptr->val.i, s1, a);
2519 /* We can only optimize the move, if the class is resolved. */
2520 /* Otherwise we don't know what to patch. */
2521 if (iptr[1].val.a && IS_IMM32(iptr->val.l)) {
2522 x86_64_mov_imm_membase32(cd, iptr->val.l, s1, a);
2524 x86_64_movl_imm_membase32(cd, iptr->val.l, s1, a);
2525 x86_64_movl_imm_membase32(cd, iptr->val.l >> 32, s1, a + 4);
2532 /* branch operations **************************************************/
2534 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2536 var_to_reg_int(s1, src, REG_ITMP1);
2537 M_INTMOVE(s1, REG_ITMP1_XPTR);
2539 x86_64_call_imm(cd, 0); /* passing exception pointer */
2540 x86_64_pop_reg(cd, REG_ITMP2_XPC);
2542 x86_64_mov_imm_reg(cd, (ptrint) asm_handle_exception, REG_ITMP3);
2543 x86_64_jmp_reg(cd, REG_ITMP3);
2546 case ICMD_GOTO: /* ... ==> ... */
2547 /* op1 = target JavaVM pc */
2549 x86_64_jmp_imm(cd, 0);
2550 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2553 case ICMD_JSR: /* ... ==> ... */
2554 /* op1 = target JavaVM pc */
2556 x86_64_call_imm(cd, 0);
2557 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2560 case ICMD_RET: /* ... ==> ... */
2561 /* op1 = local variable */
2563 var = &(rd->locals[iptr->op1][TYPE_ADR]);
2564 var_to_reg_int(s1, var, REG_ITMP1);
2565 x86_64_jmp_reg(cd, s1);
2568 case ICMD_IFNULL: /* ..., value ==> ... */
2569 /* op1 = target JavaVM pc */
2571 if (src->flags & INMEMORY) {
2572 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2575 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2577 x86_64_jcc(cd, X86_64_CC_E, 0);
2578 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2581 case ICMD_IFNONNULL: /* ..., value ==> ... */
2582 /* op1 = target JavaVM pc */
2584 if (src->flags & INMEMORY) {
2585 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2588 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2590 x86_64_jcc(cd, X86_64_CC_NE, 0);
2591 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2594 case ICMD_IFEQ: /* ..., value ==> ... */
2595 /* op1 = target JavaVM pc, val.i = constant */
2597 x86_64_emit_ifcc(cd, X86_64_CC_E, src, iptr);
2600 case ICMD_IFLT: /* ..., value ==> ... */
2601 /* op1 = target JavaVM pc, val.i = constant */
2603 x86_64_emit_ifcc(cd, X86_64_CC_L, src, iptr);
2606 case ICMD_IFLE: /* ..., value ==> ... */
2607 /* op1 = target JavaVM pc, val.i = constant */
2609 x86_64_emit_ifcc(cd, X86_64_CC_LE, src, iptr);
2612 case ICMD_IFNE: /* ..., value ==> ... */
2613 /* op1 = target JavaVM pc, val.i = constant */
2615 x86_64_emit_ifcc(cd, X86_64_CC_NE, src, iptr);
2618 case ICMD_IFGT: /* ..., value ==> ... */
2619 /* op1 = target JavaVM pc, val.i = constant */
2621 x86_64_emit_ifcc(cd, X86_64_CC_G, src, iptr);
2624 case ICMD_IFGE: /* ..., value ==> ... */
2625 /* op1 = target JavaVM pc, val.i = constant */
2627 x86_64_emit_ifcc(cd, X86_64_CC_GE, src, iptr);
2630 case ICMD_IF_LEQ: /* ..., value ==> ... */
2631 /* op1 = target JavaVM pc, val.l = constant */
2633 x86_64_emit_if_lcc(cd, X86_64_CC_E, src, iptr);
2636 case ICMD_IF_LLT: /* ..., value ==> ... */
2637 /* op1 = target JavaVM pc, val.l = constant */
2639 x86_64_emit_if_lcc(cd, X86_64_CC_L, src, iptr);
2642 case ICMD_IF_LLE: /* ..., value ==> ... */
2643 /* op1 = target JavaVM pc, val.l = constant */
2645 x86_64_emit_if_lcc(cd, X86_64_CC_LE, src, iptr);
2648 case ICMD_IF_LNE: /* ..., value ==> ... */
2649 /* op1 = target JavaVM pc, val.l = constant */
2651 x86_64_emit_if_lcc(cd, X86_64_CC_NE, src, iptr);
2654 case ICMD_IF_LGT: /* ..., value ==> ... */
2655 /* op1 = target JavaVM pc, val.l = constant */
2657 x86_64_emit_if_lcc(cd, X86_64_CC_G, src, iptr);
2660 case ICMD_IF_LGE: /* ..., value ==> ... */
2661 /* op1 = target JavaVM pc, val.l = constant */
2663 x86_64_emit_if_lcc(cd, X86_64_CC_GE, src, iptr);
2666 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2667 /* op1 = target JavaVM pc */
2669 x86_64_emit_if_icmpcc(cd, X86_64_CC_E, src, iptr);
2672 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2673 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2675 x86_64_emit_if_lcmpcc(cd, X86_64_CC_E, src, iptr);
2678 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2679 /* op1 = target JavaVM pc */
2681 x86_64_emit_if_icmpcc(cd, X86_64_CC_NE, src, iptr);
2684 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2685 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2687 x86_64_emit_if_lcmpcc(cd, X86_64_CC_NE, src, iptr);
2690 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2691 /* op1 = target JavaVM pc */
2693 x86_64_emit_if_icmpcc(cd, X86_64_CC_L, src, iptr);
2696 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2697 /* op1 = target JavaVM pc */
2699 x86_64_emit_if_lcmpcc(cd, X86_64_CC_L, src, iptr);
2702 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2703 /* op1 = target JavaVM pc */
2705 x86_64_emit_if_icmpcc(cd, X86_64_CC_G, src, iptr);
2708 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2709 /* op1 = target JavaVM pc */
2711 x86_64_emit_if_lcmpcc(cd, X86_64_CC_G, src, iptr);
2714 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2715 /* op1 = target JavaVM pc */
2717 x86_64_emit_if_icmpcc(cd, X86_64_CC_LE, src, iptr);
2720 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2721 /* op1 = target JavaVM pc */
2723 x86_64_emit_if_lcmpcc(cd, X86_64_CC_LE, src, iptr);
2726 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2727 /* op1 = target JavaVM pc */
2729 x86_64_emit_if_icmpcc(cd, X86_64_CC_GE, src, iptr);
2732 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2733 /* op1 = target JavaVM pc */
2735 x86_64_emit_if_lcmpcc(cd, X86_64_CC_GE, src, iptr);
2738 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST */
2740 case ICMD_ELSE_ICONST: /* handled by IFxx_ICONST */
2743 case ICMD_IFEQ_ICONST: /* ..., value ==> ..., constant */
2744 /* val.i = constant */
2746 var_to_reg_int(s1, src, REG_ITMP1);
2747 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2748 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2750 M_INTMOVE(s1, REG_ITMP1);
2753 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2755 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2756 x86_64_testl_reg_reg(cd, s1, s1);
2757 x86_64_cmovccl_reg_reg(cd, X86_64_CC_E, REG_ITMP2, d);
2758 store_reg_to_var_int(iptr->dst, d);
2761 case ICMD_IFNE_ICONST: /* ..., value ==> ..., constant */
2762 /* val.i = constant */
2764 var_to_reg_int(s1, src, REG_ITMP1);
2765 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2766 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2768 M_INTMOVE(s1, REG_ITMP1);
2771 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2773 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2774 x86_64_testl_reg_reg(cd, s1, s1);
2775 x86_64_cmovccl_reg_reg(cd, X86_64_CC_NE, REG_ITMP2, d);
2776 store_reg_to_var_int(iptr->dst, d);
2779 case ICMD_IFLT_ICONST: /* ..., value ==> ..., constant */
2780 /* val.i = constant */
2782 var_to_reg_int(s1, src, REG_ITMP1);
2783 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2784 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2786 M_INTMOVE(s1, REG_ITMP1);
2789 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2791 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2792 x86_64_testl_reg_reg(cd, s1, s1);
2793 x86_64_cmovccl_reg_reg(cd, X86_64_CC_L, REG_ITMP2, d);
2794 store_reg_to_var_int(iptr->dst, d);
2797 case ICMD_IFGE_ICONST: /* ..., value ==> ..., constant */
2798 /* val.i = constant */
2800 var_to_reg_int(s1, src, REG_ITMP1);
2801 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2802 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2804 M_INTMOVE(s1, REG_ITMP1);
2807 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2809 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2810 x86_64_testl_reg_reg(cd, s1, s1);
2811 x86_64_cmovccl_reg_reg(cd, X86_64_CC_GE, REG_ITMP2, d);
2812 store_reg_to_var_int(iptr->dst, d);
2815 case ICMD_IFGT_ICONST: /* ..., value ==> ..., constant */
2816 /* val.i = constant */
2818 var_to_reg_int(s1, src, REG_ITMP1);
2819 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2820 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2822 M_INTMOVE(s1, REG_ITMP1);
2825 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2827 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2828 x86_64_testl_reg_reg(cd, s1, s1);
2829 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP2, d);
2830 store_reg_to_var_int(iptr->dst, d);
2833 case ICMD_IFLE_ICONST: /* ..., value ==> ..., constant */
2834 /* val.i = constant */
2836 var_to_reg_int(s1, src, REG_ITMP1);
2837 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2838 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2840 M_INTMOVE(s1, REG_ITMP1);
2843 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2845 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2846 x86_64_testl_reg_reg(cd, s1, s1);
2847 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, d);
2848 store_reg_to_var_int(iptr->dst, d);
2852 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2856 var_to_reg_int(s1, src, REG_RESULT);
2857 M_INTMOVE(s1, REG_RESULT);
2859 goto nowperformreturn;
2861 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2864 var_to_reg_flt(s1, src, REG_FRESULT);
2865 M_FLTMOVE(s1, REG_FRESULT);
2867 goto nowperformreturn;
2869 case ICMD_RETURN: /* ... ==> ... */
2875 p = parentargs_base;
2877 /* call trace function */
2879 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
2881 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
2882 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
2884 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
2885 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
2886 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
2887 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
2889 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
2890 x86_64_call_reg(cd, REG_ITMP1);
2892 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
2893 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
2895 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
2898 #if defined(USE_THREADS)
2899 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2900 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
2902 /* we need to save the proper return value */
2903 switch (iptr->opc) {
2907 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, rd->maxmemuse * 8);
2911 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, rd->maxmemuse * 8);
2915 x86_64_mov_imm_reg(cd, (ptrint) builtin_monitorexit, REG_ITMP1);
2916 x86_64_call_reg(cd, REG_ITMP1);
2918 /* and now restore the proper return value */
2919 switch (iptr->opc) {
2923 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_RESULT);
2927 x86_64_movq_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_FRESULT);
2933 /* restore saved registers */
2934 for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
2935 p--; x86_64_mov_membase_reg(cd, REG_SP, p * 8, rd->savintregs[i]);
2937 for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
2938 p--; x86_64_movq_membase_reg(cd, REG_SP, p * 8, rd->savfltregs[i]);
2941 /* deallocate stack */
2942 if (parentargs_base) {
2943 x86_64_alu_imm_reg(cd, X86_64_ADD, parentargs_base * 8, REG_SP);
2951 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2956 tptr = (void **) iptr->target;
2958 s4ptr = iptr->val.a;
2959 l = s4ptr[1]; /* low */
2960 i = s4ptr[2]; /* high */
2962 var_to_reg_int(s1, src, REG_ITMP1);
2963 M_INTMOVE(s1, REG_ITMP1);
2965 x86_64_alul_imm_reg(cd, X86_64_SUB, l, REG_ITMP1);
2970 x86_64_alul_imm_reg(cd, X86_64_CMP, i - 1, REG_ITMP1);
2971 x86_64_jcc(cd, X86_64_CC_A, 0);
2973 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[0]), cd->mcodeptr); */
2974 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
2976 /* build jump table top down and use address of lowest entry */
2978 /* s4ptr += 3 + i; */
2982 /* dseg_addtarget(cd, BlockPtrOfPC(*--s4ptr)); */
2983 dseg_addtarget(cd, (basicblock *) tptr[0]);
2987 /* length of dataseg after last dseg_addtarget is used by load */
2989 x86_64_mov_imm_reg(cd, 0, REG_ITMP2);
2990 dseg_adddata(cd, cd->mcodeptr);
2991 x86_64_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
2992 x86_64_jmp_reg(cd, REG_ITMP1);
2997 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2999 s4 i, l, val, *s4ptr;
3002 tptr = (void **) iptr->target;
3004 s4ptr = iptr->val.a;
3005 l = s4ptr[0]; /* default */
3006 i = s4ptr[1]; /* count */
3008 MCODECHECK((i<<2)+8);
3009 var_to_reg_int(s1, src, REG_ITMP1); /* reg compare should always be faster */
3015 x86_64_alul_imm_reg(cd, X86_64_CMP, val, s1);
3016 x86_64_jcc(cd, X86_64_CC_E, 0);
3017 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[1]), cd->mcodeptr); */
3018 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3021 x86_64_jmp_imm(cd, 0);
3022 /* codegen_addreference(cd, BlockPtrOfPC(l), cd->mcodeptr); */
3024 tptr = (void **) iptr->target;
3025 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3030 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
3031 /* op1 = arg count val.a = builtintable entry */
3037 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
3038 /* op1 = arg count, val.a = method pointer */
3040 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3041 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
3042 case ICMD_INVOKEINTERFACE:
3047 md = lm->parseddesc;
3049 unresolved_method *um = iptr->target;
3050 md = um->methodref->parseddesc.md;
3056 MCODECHECK((s3 << 1) + 64);
3058 /* copy arguments to registers or stack location */
3060 for (s3 = s3 - 1; s3 >= 0; s3--, src = src->prev) {
3061 if (src->varkind == ARGVAR)
3063 if (IS_INT_LNG_TYPE(src->type)) {
3064 if (!md->params[s3].inmemory) {
3065 s1 = rd->argintregs[md->params[s3].regoff];
3066 var_to_reg_int(d, src, s1);
3069 var_to_reg_int(d, src, REG_ITMP1);
3070 M_LST(d, REG_SP, md->params[s3].regoff * 8);
3074 if (!md->params[s3].inmemory) {
3075 s1 = rd->argfltregs[md->params[s3].regoff];
3076 var_to_reg_flt(d, src, s1);
3079 var_to_reg_flt(d, src, REG_FTMP1);
3080 M_DST(d, REG_SP, md->params[s3].regoff * 8);
3085 switch (iptr->opc) {
3088 codegen_addpatchref(cd, cd->mcodeptr,
3089 bte->fp, iptr->target);
3091 if (showdisassemble) {
3092 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3098 a = (ptrint) bte->fp;
3101 d = md->returntype.type;
3103 x86_64_mov_imm_reg(cd, a, REG_ITMP1);
3104 x86_64_call_reg(cd, REG_ITMP1);
3107 case ICMD_INVOKESPECIAL:
3108 x86_64_test_reg_reg(cd, rd->argintregs[0], rd->argintregs[0]);
3109 x86_64_jcc(cd, X86_64_CC_Z, 0);
3110 codegen_addxnullrefs(cd, cd->mcodeptr);
3112 /* first argument contains pointer */
3113 /* gen_nullptr_check(rd->argintregs[0]); */
3115 /* access memory for hardware nullptr */
3116 /* x86_64_mov_membase_reg(cd, rd->argintregs[0], 0, REG_ITMP2); */
3120 case ICMD_INVOKESTATIC:
3122 unresolved_method *um = iptr->target;
3124 codegen_addpatchref(cd, cd->mcodeptr,
3125 PATCHER_invokestatic_special, um);
3127 if (showdisassemble) {
3128 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3132 d = um->methodref->parseddesc.md->returntype.type;
3135 a = (ptrint) lm->stubroutine;
3136 d = lm->parseddesc->returntype.type;
3139 x86_64_mov_imm_reg(cd, a, REG_ITMP2);
3140 x86_64_call_reg(cd, REG_ITMP2);
3143 case ICMD_INVOKEVIRTUAL:
3144 gen_nullptr_check(rd->argintregs[0]);
3147 unresolved_method *um = iptr->target;
3149 codegen_addpatchref(cd, cd->mcodeptr,
3150 PATCHER_invokevirtual, um);
3152 if (showdisassemble) {
3153 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3157 d = um->methodref->parseddesc.md->returntype.type;
3160 s1 = OFFSET(vftbl_t, table[0]) +
3161 sizeof(methodptr) * lm->vftblindex;
3162 d = lm->parseddesc->returntype.type;
3165 x86_64_mov_membase_reg(cd, rd->argintregs[0],
3166 OFFSET(java_objectheader, vftbl),
3168 x86_64_mov_membase32_reg(cd, REG_ITMP2, s1, REG_ITMP1);
3169 x86_64_call_reg(cd, REG_ITMP1);
3172 case ICMD_INVOKEINTERFACE:
3173 gen_nullptr_check(rd->argintregs[0]);
3176 unresolved_method *um = iptr->target;
3178 codegen_addpatchref(cd, cd->mcodeptr,
3179 PATCHER_invokeinterface, um);
3181 if (showdisassemble) {
3182 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3187 d = um->methodref->parseddesc.md->returntype.type;
3190 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3191 sizeof(methodptr) * lm->class->index;
3193 s2 = sizeof(methodptr) * (lm - lm->class->methods);
3195 d = lm->parseddesc->returntype.type;
3198 x86_64_mov_membase_reg(cd, rd->argintregs[0],
3199 OFFSET(java_objectheader, vftbl),
3201 x86_64_mov_membase32_reg(cd, REG_ITMP2, s1, REG_ITMP2);
3202 x86_64_mov_membase32_reg(cd, REG_ITMP2, s2, REG_ITMP1);
3203 x86_64_call_reg(cd, REG_ITMP1);
3207 /* d contains return type */
3209 if (d != TYPE_VOID) {
3210 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3211 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3212 M_INTMOVE(REG_RESULT, s1);
3213 store_reg_to_var_int(iptr->dst, s1);
3215 s1 = reg_of_var(rd, iptr->dst, REG_FRESULT);
3216 M_FLTMOVE(REG_FRESULT, s1);
3217 store_reg_to_var_flt(iptr->dst, s1);
3223 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3225 /* op1: 0 == array, 1 == class */
3226 /* val.a: (classinfo *) superclass */
3228 /* superclass is an interface:
3230 * OK if ((sub == NULL) ||
3231 * (sub->vftbl->interfacetablelength > super->index) &&
3232 * (sub->vftbl->interfacetable[-super->index] != NULL));
3234 * superclass is a class:
3236 * OK if ((sub == NULL) || (0
3237 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3238 * super->vftbl->diffval));
3243 vftbl_t *supervftbl;
3246 super = (classinfo *) iptr->val.a;
3253 superindex = super->index;
3254 supervftbl = super->vftbl;
3257 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3258 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3260 var_to_reg_int(s1, src, REG_ITMP1);
3262 /* calculate interface checkcast code size */
3264 s2 = 3; /* mov_membase_reg */
3265 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3267 s2 += 3 + 4 /* movl_membase32_reg */ + 3 + 4 /* sub imm32 */ +
3268 3 /* test */ + 6 /* jcc */ + 3 + 4 /* mov_membase32_reg */ +
3269 3 /* test */ + 6 /* jcc */;
3272 s2 += (showdisassemble ? 5 : 0);
3274 /* calculate class checkcast code size */
3276 s3 = 3; /* mov_membase_reg */
3277 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3278 s3 += 10 /* mov_imm_reg */ + 3 + 4 /* movl_membase32_reg */;
3281 if (s1 != REG_ITMP1) {
3282 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3283 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, baseval));
3284 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3285 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, diffval));
3291 s3 += 3 + 4 /* movl_membase32_reg */ + 3 /* sub */ +
3292 10 /* mov_imm_reg */ + 3 /* movl_membase_reg */;
3293 CALCOFFSETBYTES(s3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3296 s3 += 3 /* cmp */ + 6 /* jcc */;
3299 s3 += (showdisassemble ? 5 : 0);
3301 /* if class is not resolved, check which code to call */
3304 x86_64_test_reg_reg(cd, s1, s1);
3305 x86_64_jcc(cd, X86_64_CC_Z, 6 + (showdisassemble ? 5 : 0) + 7 + 6 + s2 + 5 + s3);
3307 codegen_addpatchref(cd, cd->mcodeptr,
3308 PATCHER_checkcast_instanceof_flags,
3309 (constant_classref *) iptr->target);
3311 if (showdisassemble) {
3312 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3315 x86_64_movl_imm_reg(cd, 0, REG_ITMP2); /* super->flags */
3316 x86_64_alul_imm_reg(cd, X86_64_AND, ACC_INTERFACE, REG_ITMP2);
3317 x86_64_jcc(cd, X86_64_CC_Z, s2 + 5);
3320 /* interface checkcast code */
3322 if (!super || (super->flags & ACC_INTERFACE)) {
3324 x86_64_test_reg_reg(cd, s1, s1);
3325 x86_64_jcc(cd, X86_64_CC_Z, s2);
3328 x86_64_mov_membase_reg(cd, s1,
3329 OFFSET(java_objectheader, vftbl),
3333 codegen_addpatchref(cd, cd->mcodeptr,
3334 PATCHER_checkcast_instanceof_interface,
3335 (constant_classref *) iptr->target);
3337 if (showdisassemble) {
3338 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3342 x86_64_movl_membase32_reg(cd, REG_ITMP2,
3343 OFFSET(vftbl_t, interfacetablelength),
3345 x86_64_alu_imm32_reg(cd, X86_64_SUB, superindex, REG_ITMP3);
3346 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3347 x86_64_jcc(cd, X86_64_CC_LE, 0);
3348 codegen_addxcastrefs(cd, cd->mcodeptr);
3349 x86_64_mov_membase32_reg(cd, REG_ITMP2,
3350 OFFSET(vftbl_t, interfacetable[0]) -
3351 superindex * sizeof(methodptr*),
3353 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3354 x86_64_jcc(cd, X86_64_CC_E, 0);
3355 codegen_addxcastrefs(cd, cd->mcodeptr);
3358 x86_64_jmp_imm(cd, s3);
3361 /* class checkcast code */
3363 if (!super || !(super->flags & ACC_INTERFACE)) {
3365 x86_64_test_reg_reg(cd, s1, s1);
3366 x86_64_jcc(cd, X86_64_CC_Z, s3);
3369 x86_64_mov_membase_reg(cd, s1,
3370 OFFSET(java_objectheader, vftbl),
3374 codegen_addpatchref(cd, cd->mcodeptr,
3375 PATCHER_checkcast_class,
3376 (constant_classref *) iptr->target);
3378 if (showdisassemble) {
3379 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3383 x86_64_mov_imm_reg(cd, (ptrint) supervftbl, REG_ITMP3);
3384 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3385 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3387 x86_64_movl_membase32_reg(cd, REG_ITMP2,
3388 OFFSET(vftbl_t, baseval),
3390 /* if (s1 != REG_ITMP1) { */
3391 /* x86_64_movl_membase_reg(cd, REG_ITMP3, */
3392 /* OFFSET(vftbl_t, baseval), */
3394 /* x86_64_movl_membase_reg(cd, REG_ITMP3, */
3395 /* OFFSET(vftbl_t, diffval), */
3397 /* #if defined(USE_THREADS) && defined(NATIVE_THREADS) */
3398 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3400 /* x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP1, REG_ITMP2); */
3403 x86_64_movl_membase32_reg(cd, REG_ITMP3,
3404 OFFSET(vftbl_t, baseval),
3406 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP2);
3407 x86_64_mov_imm_reg(cd, (ptrint) supervftbl, REG_ITMP3);
3408 x86_64_movl_membase_reg(cd, REG_ITMP3,
3409 OFFSET(vftbl_t, diffval),
3412 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3413 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3415 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP3, REG_ITMP2);
3416 x86_64_jcc(cd, X86_64_CC_A, 0); /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3417 codegen_addxcastrefs(cd, cd->mcodeptr);
3419 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3421 store_reg_to_var_int(iptr->dst, d);
3422 /* if (iptr->dst->flags & INMEMORY) { */
3423 /* x86_64_mov_reg_membase(cd, s1, REG_SP, iptr->dst->regoff * 8); */
3425 /* M_INTMOVE(s1, iptr->dst->regoff); */
3430 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3432 /* op1: 0 == array, 1 == class */
3433 /* val.a: (classinfo *) superclass */
3435 /* superclass is an interface:
3437 * return (sub != NULL) &&
3438 * (sub->vftbl->interfacetablelength > super->index) &&
3439 * (sub->vftbl->interfacetable[-super->index] != NULL);
3441 * superclass is a class:
3443 * return ((sub != NULL) && (0
3444 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3445 * super->vftbl->diffvall));
3450 vftbl_t *supervftbl;
3453 super = (classinfo *) iptr->val.a;
3460 superindex = super->index;
3461 supervftbl = super->vftbl;
3464 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3465 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3468 var_to_reg_int(s1, src, REG_ITMP1);
3469 d = reg_of_var(rd, iptr->dst, REG_ITMP2);
3471 M_INTMOVE(s1, REG_ITMP1);
3475 /* calculate interface instanceof code size */
3477 s2 = 3; /* mov_membase_reg */
3478 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3479 s2 += 3 + 4 /* movl_membase32_reg */ + 3 + 4 /* sub_imm32 */ +
3480 3 /* test */ + 6 /* jcc */ + 3 + 4 /* mov_membase32_reg */ +
3481 3 /* test */ + 4 /* setcc */;
3484 s2 += (showdisassemble ? 5 : 0);
3486 /* calculate class instanceof code size */
3488 s3 = 3; /* mov_membase_reg */
3489 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3490 s3 += 10; /* mov_imm_reg */
3491 s3 += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3492 CALCOFFSETBYTES(s3, REG_ITMP1, OFFSET(vftbl_t, baseval));
3493 s3 += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3494 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, baseval));
3495 s3 += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3496 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3497 s3 += 3 /* sub */ + 3 /* xor */ + 3 /* cmp */ + 4 /* setcc */;
3500 s3 += (showdisassemble ? 5 : 0);
3502 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3504 /* if class is not resolved, check which code to call */
3507 x86_64_test_reg_reg(cd, s1, s1);
3508 x86_64_jcc(cd, X86_64_CC_Z, (6 + (showdisassemble ? 5 : 0) +
3509 7 + 6 + s2 + 5 + s3));
3511 codegen_addpatchref(cd, cd->mcodeptr,
3512 PATCHER_checkcast_instanceof_flags,
3513 (constant_classref *) iptr->target);
3515 if (showdisassemble) {
3516 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3519 x86_64_movl_imm_reg(cd, 0, REG_ITMP3); /* super->flags */
3520 x86_64_alul_imm_reg(cd, X86_64_AND, ACC_INTERFACE, REG_ITMP3);
3521 x86_64_jcc(cd, X86_64_CC_Z, s2 + 5);
3524 /* interface instanceof code */
3526 if (!super || (super->flags & ACC_INTERFACE)) {
3528 x86_64_test_reg_reg(cd, s1, s1);
3529 x86_64_jcc(cd, X86_64_CC_Z, s2);
3532 x86_64_mov_membase_reg(cd, s1,
3533 OFFSET(java_objectheader, vftbl),
3536 codegen_addpatchref(cd, cd->mcodeptr,
3537 PATCHER_checkcast_instanceof_interface,
3538 (constant_classref *) iptr->target);
3540 if (showdisassemble) {
3541 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3545 x86_64_movl_membase32_reg(cd, REG_ITMP1,
3546 OFFSET(vftbl_t, interfacetablelength),
3548 x86_64_alu_imm32_reg(cd, X86_64_SUB, superindex, REG_ITMP3);
3549 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3551 a = 3 + 4 /* mov_membase32_reg */ + 3 /* test */ + 4 /* setcc */;
3553 x86_64_jcc(cd, X86_64_CC_LE, a);
3554 x86_64_mov_membase32_reg(cd, REG_ITMP1,
3555 OFFSET(vftbl_t, interfacetable[0]) -
3556 superindex * sizeof(methodptr*),
3558 x86_64_test_reg_reg(cd, REG_ITMP1, REG_ITMP1);
3559 x86_64_setcc_reg(cd, X86_64_CC_NE, d);
3562 x86_64_jmp_imm(cd, s3);
3565 /* class instanceof code */
3567 if (!super || !(super->flags & ACC_INTERFACE)) {
3569 x86_64_test_reg_reg(cd, s1, s1);
3570 x86_64_jcc(cd, X86_64_CC_E, s3);
3573 x86_64_mov_membase_reg(cd, s1,
3574 OFFSET(java_objectheader, vftbl),
3578 codegen_addpatchref(cd, cd->mcodeptr,
3579 PATCHER_instanceof_class,
3580 (constant_classref *) iptr->target);
3582 if (showdisassemble) {
3583 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3587 x86_64_mov_imm_reg(cd, (ptrint) supervftbl, REG_ITMP2);
3588 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3589 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3591 x86_64_movl_membase_reg(cd, REG_ITMP1,
3592 OFFSET(vftbl_t, baseval),
3594 x86_64_movl_membase_reg(cd, REG_ITMP2,
3595 OFFSET(vftbl_t, diffval),
3597 x86_64_movl_membase_reg(cd, REG_ITMP2,
3598 OFFSET(vftbl_t, baseval),
3600 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3601 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3603 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
3604 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d); /* may be REG_ITMP2 */
3605 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP3, REG_ITMP1);
3606 x86_64_setcc_reg(cd, X86_64_CC_BE, d);
3608 store_reg_to_var_int(iptr->dst, d);
3612 case ICMD_CHECKASIZE: /* ..., size ==> ..., size */
3614 if (src->flags & INMEMORY) {
3615 x86_64_alul_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
3618 x86_64_testl_reg_reg(cd, src->regoff, src->regoff);
3620 x86_64_jcc(cd, X86_64_CC_L, 0);
3621 codegen_addxcheckarefs(cd, cd->mcodeptr);
3624 case ICMD_CHECKEXCEPTION: /* ... ==> ... */
3626 x86_64_test_reg_reg(cd, REG_RESULT, REG_RESULT);
3627 x86_64_jcc(cd, X86_64_CC_E, 0);
3628 codegen_addxexceptionrefs(cd, cd->mcodeptr);
3631 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3632 /* op1 = dimension, val.a = array descriptor */
3634 /* check for negative sizes and copy sizes to stack if necessary */
3636 MCODECHECK((iptr->op1 << 1) + 64);
3638 for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3639 var_to_reg_int(s2, src, REG_ITMP1);
3640 x86_64_testl_reg_reg(cd, s2, s2);
3641 x86_64_jcc(cd, X86_64_CC_L, 0);
3642 codegen_addxcheckarefs(cd, cd->mcodeptr);
3644 /* copy SAVEDVAR sizes to stack */
3646 if (src->varkind != ARGVAR) {
3647 x86_64_mov_reg_membase(cd, s2, REG_SP, s1 * 8);
3651 /* is a patcher function set? */
3654 codegen_addpatchref(cd, cd->mcodeptr,
3655 (functionptr) (ptrint) iptr->target,
3658 if (showdisassemble) {
3659 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3665 a = (ptrint) iptr->val.a;
3668 /* a0 = dimension count */
3670 x86_64_mov_imm_reg(cd, iptr->op1, rd->argintregs[0]);
3672 /* a1 = arrayvftbl */
3674 x86_64_mov_imm_reg(cd, (ptrint) iptr->val.a, rd->argintregs[1]);
3676 /* a2 = pointer to dimensions = stack pointer */
3678 x86_64_mov_reg_reg(cd, REG_SP, rd->argintregs[2]);
3680 x86_64_mov_imm_reg(cd, (ptrint) BUILTIN_multianewarray, REG_ITMP1);
3681 x86_64_call_reg(cd, REG_ITMP1);
3683 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3684 M_INTMOVE(REG_RESULT, s1);
3685 store_reg_to_var_int(iptr->dst, s1);
3689 throw_cacao_exception_exit(string_java_lang_InternalError,
3690 "Unknown ICMD %d", iptr->opc);
3693 } /* for instruction */
3695 /* copy values to interface registers */
3697 src = bptr->outstack;
3698 len = bptr->outdepth;
3699 MCODECHECK(64 + len);
3705 if ((src->varkind != STACKVAR)) {
3707 if (IS_FLT_DBL_TYPE(s2)) {
3708 var_to_reg_flt(s1, src, REG_FTMP1);
3709 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3710 M_FLTMOVE(s1, rd->interfaces[len][s2].regoff);
3713 x86_64_movq_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3717 var_to_reg_int(s1, src, REG_ITMP1);
3718 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3719 M_INTMOVE(s1, rd->interfaces[len][s2].regoff);
3722 x86_64_mov_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3728 } /* if (bptr -> flags >= BBREACHED) */
3729 } /* for basic block */
3731 codegen_createlinenumbertable(cd);
3735 /* generate bound check stubs */
3737 u1 *xcodeptr = NULL;
3740 for (bref = cd->xboundrefs; bref != NULL; bref = bref->next) {
3741 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3743 cd->mcodeptr - cd->mcodebase);
3747 /* move index register into REG_ITMP1 */
3748 x86_64_mov_reg_reg(cd, bref->reg, REG_ITMP1); /* 3 bytes */
3750 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3751 dseg_adddata(cd, cd->mcodeptr);
3752 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3753 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3755 if (xcodeptr != NULL) {
3756 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3759 xcodeptr = cd->mcodeptr;
3762 /*create stackinfo -- begin*/
3763 x86_64_alu_imm_reg(cd, X86_64_SUB, 4 * 8, REG_SP);
3764 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3 * 8);
3765 x86_64_mov_imm_membase(cd,0,REG_SP,2*8);
3766 x86_64_mov_imm_membase(cd,0,REG_SP,1*8);
3767 x86_64_mov_imm_reg(cd,(ptrint)asm_prepare_native_stackinfo,REG_ITMP3);
3768 x86_64_call_reg(cd,REG_ITMP3);
3769 /*create stackinfo -- end*/
3771 x86_64_mov_reg_reg(cd, REG_ITMP1, rd->argintregs[0]);
3772 x86_64_mov_imm_reg(cd, (ptrint) new_arrayindexoutofboundsexception, REG_ITMP3);
3773 x86_64_call_reg(cd, REG_ITMP3);
3775 /*remove stackinfo -- begin*/
3776 x86_64_mov_imm_reg(cd,(ptrint)asm_remove_native_stackinfo,REG_ITMP3);
3777 x86_64_call_reg(cd,REG_ITMP3);
3778 /*remove stackinfo -- end*/
3780 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, REG_ITMP2_XPC);
3781 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3783 x86_64_mov_imm_reg(cd, (ptrint) asm_handle_exception, REG_ITMP3);
3784 x86_64_jmp_reg(cd, REG_ITMP3);
3788 /* generate negative array size check stubs */
3792 for (bref = cd->xcheckarefs; bref != NULL; bref = bref->next) {
3793 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3794 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3796 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3800 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3802 cd->mcodeptr - cd->mcodebase);
3806 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3807 dseg_adddata(cd, cd->mcodeptr);
3808 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3809 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3811 if (xcodeptr != NULL) {
3812 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3815 xcodeptr = cd->mcodeptr;
3818 /*create stackinfo -- begin*/
3819 x86_64_alu_imm_reg(cd, X86_64_SUB, 4 * 8, REG_SP);
3820 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3 * 8);
3821 x86_64_mov_imm_membase(cd,0,REG_SP,2*8);
3822 x86_64_mov_imm_membase(cd,0,REG_SP,1*8);
3823 x86_64_mov_imm_reg(cd,(ptrint)asm_prepare_native_stackinfo,REG_ITMP3);
3824 x86_64_call_reg(cd,REG_ITMP3);
3825 /*create stackinfo -- end*/
3827 x86_64_mov_imm_reg(cd, (u8) new_negativearraysizeexception, REG_ITMP3);
3828 x86_64_call_reg(cd, REG_ITMP3);
3830 /*remove stackinfo -- begin*/
3831 x86_64_mov_imm_reg(cd,(ptrint)asm_remove_native_stackinfo,REG_ITMP3);
3832 x86_64_call_reg(cd,REG_ITMP3);
3833 /*remove stackinfo -- end*/
3835 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, REG_ITMP2_XPC);
3836 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3838 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3839 x86_64_jmp_reg(cd, REG_ITMP3);
3843 /* generate cast check stubs */
3847 for (bref = cd->xcastrefs; bref != NULL; bref = bref->next) {
3848 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3849 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3851 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3855 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3857 cd->mcodeptr - cd->mcodebase);
3861 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3862 dseg_adddata(cd, cd->mcodeptr);
3863 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3864 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3866 if (xcodeptr != NULL) {
3867 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3870 xcodeptr = cd->mcodeptr;
3872 /*create stackinfo -- begin*/
3873 x86_64_alu_imm_reg(cd, X86_64_SUB, 4 * 8, REG_SP);
3874 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3 * 8);
3875 x86_64_mov_imm_membase(cd,0,REG_SP,2*8);
3876 x86_64_mov_imm_membase(cd,0,REG_SP,1*8);
3877 x86_64_mov_imm_reg(cd,(ptrint)asm_prepare_native_stackinfo,REG_ITMP3);
3878 x86_64_call_reg(cd,REG_ITMP3);
3879 /*create stackinfo -- end*/
3882 x86_64_mov_imm_reg(cd, (u8) new_classcastexception, REG_ITMP3);
3883 x86_64_call_reg(cd, REG_ITMP3);
3885 /*remove stackinfo -- begin*/
3886 x86_64_mov_imm_reg(cd,(ptrint)asm_remove_native_stackinfo,REG_ITMP3);
3887 x86_64_call_reg(cd,REG_ITMP3);
3888 /*remove stackinfo -- end*/
3890 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, REG_ITMP2_XPC);
3891 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3893 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3894 x86_64_jmp_reg(cd, REG_ITMP3);
3898 /* generate divide by zero check stubs */
3902 for (bref = cd->xdivrefs; bref != NULL; bref = bref->next) {
3903 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3904 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3906 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3910 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3912 cd->mcodeptr - cd->mcodebase);
3916 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3917 dseg_adddata(cd, cd->mcodeptr);
3918 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3919 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3921 if (xcodeptr != NULL) {
3922 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3925 xcodeptr = cd->mcodeptr;
3927 /*create stackinfo -- begin*/
3928 x86_64_alu_imm_reg(cd, X86_64_SUB, 4 * 8, REG_SP);
3929 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3 * 8);
3930 x86_64_mov_imm_membase(cd,0,REG_SP,2*8);
3931 x86_64_mov_imm_membase(cd,0,REG_SP,1*8);
3932 x86_64_mov_imm_reg(cd,(ptrint)asm_prepare_native_stackinfo,REG_ITMP3);
3933 x86_64_call_reg(cd,REG_ITMP3);
3934 /*create stackinfo -- end*/
3936 x86_64_mov_imm_reg(cd, (u8) new_arithmeticexception, REG_ITMP3);
3937 x86_64_call_reg(cd, REG_ITMP3);
3939 /*remove stackinfo -- begin*/
3940 x86_64_mov_imm_reg(cd,(ptrint)asm_remove_native_stackinfo,REG_ITMP3);
3941 x86_64_call_reg(cd,REG_ITMP3);
3942 /*remove stackinfo -- end*/
3944 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, REG_ITMP2_XPC);
3945 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3947 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3948 x86_64_jmp_reg(cd, REG_ITMP3);
3952 /* generate exception check stubs */
3956 for (bref = cd->xexceptionrefs; bref != NULL; bref = bref->next) {
3957 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3958 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3960 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3964 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3966 cd->mcodeptr - cd->mcodebase);
3970 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3971 dseg_adddata(cd, cd->mcodeptr);
3972 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
3973 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3975 if (xcodeptr != NULL) {
3976 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3979 xcodeptr = cd->mcodeptr;
3983 x86_64_alu_imm_reg(cd, X86_64_SUB, 4*8, REG_SP);
3984 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3*8);
3985 x86_64_mov_imm_membase(cd, 0, REG_SP, 2*8);
3986 x86_64_mov_imm_membase(cd, 0, REG_SP, 1*8);
3987 x86_64_mov_imm_membase(cd, 0, REG_SP, 0*8);
3988 x86_64_mov_imm_reg(cd,(u8) asm_prepare_native_stackinfo,REG_ITMP1);
3989 x86_64_call_reg(cd,REG_ITMP1);
3992 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3993 x86_64_mov_imm_reg(cd, (u8) &builtin_get_exceptionptrptr, REG_ITMP1);
3994 x86_64_call_reg(cd, REG_ITMP1);
3995 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
3996 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
3997 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
3999 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
4000 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP1_XPTR);
4001 x86_64_mov_imm_membase(cd, 0, REG_ITMP3, 0);
4003 x86_64_mov_reg_reg(cd,REG_ITMP1_XPTR,RDI);
4004 x86_64_mov_imm_reg(cd,(u8) helper_fillin_stacktrace_always,REG_ITMP1);
4005 x86_64_call_reg(cd,REG_ITMP1);
4006 x86_64_mov_reg_reg(cd,REG_RESULT,REG_ITMP1_XPTR);
4008 x86_64_mov_imm_reg(cd,(u8) asm_remove_native_stackinfo,REG_ITMP2);
4009 x86_64_call_reg(cd,REG_ITMP2);
4011 x86_64_alu_imm_reg(cd, X86_64_ADD, 8, REG_SP);
4012 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC);
4013 x86_64_alu_imm_reg(cd, X86_64_ADD, 8, REG_SP);
4016 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
4017 x86_64_jmp_reg(cd, REG_ITMP3);
4021 /* generate null pointer check stubs */
4025 for (bref = cd->xnullrefs; bref != NULL; bref = bref->next) {
4026 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
4027 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4029 xcodeptr - cd->mcodebase - (10 + 10 + 3));
4033 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4035 cd->mcodeptr - cd->mcodebase);
4039 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
4040 dseg_adddata(cd, cd->mcodeptr);
4041 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
4042 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
4044 if (xcodeptr != NULL) {
4045 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
4048 xcodeptr = cd->mcodeptr;
4050 /*create stackinfo -- begin*/
4051 x86_64_alu_imm_reg(cd, X86_64_SUB, 4 * 8, REG_SP);
4052 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3 * 8);
4053 x86_64_mov_imm_membase(cd,0,REG_SP,2*8);
4054 x86_64_mov_imm_membase(cd,0,REG_SP,1*8);
4055 x86_64_mov_imm_reg(cd,(ptrint)asm_prepare_native_stackinfo,REG_ITMP3);
4056 x86_64_call_reg(cd,REG_ITMP3);
4057 /*create stackinfo -- end*/
4060 x86_64_mov_imm_reg(cd, (ptrint) new_nullpointerexception, REG_ITMP3);
4061 x86_64_call_reg(cd, REG_ITMP3);
4063 /*remove stackinfo -- begin*/
4064 x86_64_mov_imm_reg(cd,(ptrint)asm_remove_native_stackinfo,REG_ITMP3);
4065 x86_64_call_reg(cd,REG_ITMP3);
4066 /*remove stackinfo -- end*/
4068 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, REG_ITMP2_XPC);
4069 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
4071 x86_64_mov_imm_reg(cd, (ptrint) asm_handle_exception, REG_ITMP3);
4072 x86_64_jmp_reg(cd, REG_ITMP3);
4076 /* generate code patching stub call code */
4083 tmpcd = DNEW(codegendata);
4085 for (pref = cd->patchrefs; pref != NULL; pref = pref->next) {
4086 /* check size of code segment */
4090 /* Get machine code which is patched back in later. A */
4091 /* `call rel32' is 5 bytes long (but read 8 bytes). */
4093 xcodeptr = cd->mcodebase + pref->branchpos;
4094 mcode = *((ptrint *) xcodeptr);
4096 /* patch in `call rel32' to call the following code */
4098 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
4099 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4101 /* move pointer to java_objectheader onto stack */
4103 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4104 /* create a virtual java_objectheader */
4106 (void) dseg_addaddress(cd, get_dummyLR()); /* monitorPtr */
4107 a = dseg_addaddress(cd, NULL); /* vftbl */
4109 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP3);
4110 x86_64_push_reg(cd, REG_ITMP3);
4112 x86_64_push_imm(cd, 0);
4115 /* move machine code bytes and classinfo pointer into registers */
4117 x86_64_mov_imm_reg(cd, (ptrint) mcode, REG_ITMP3);
4118 x86_64_push_reg(cd, REG_ITMP3);
4119 x86_64_mov_imm_reg(cd, (ptrint) pref->ref, REG_ITMP3);
4120 x86_64_push_reg(cd, REG_ITMP3);
4122 x86_64_mov_imm_reg(cd, (ptrint) pref->patcher, REG_ITMP3);
4123 x86_64_push_reg(cd, REG_ITMP3);
4125 x86_64_mov_imm_reg(cd, (ptrint) asm_wrapper_patcher, REG_ITMP3);
4126 x86_64_jmp_reg(cd, REG_ITMP3);
4131 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
4135 /* createcompilerstub **********************************************************
4137 Creates a stub routine which calls the compiler.
4139 *******************************************************************************/
4141 #define COMPILERSTUB_SIZE 23
4143 functionptr createcompilerstub(methodinfo *m)
4145 u1 *s; /* memory to hold the stub */
4149 s = CNEW(u1, COMPILERSTUB_SIZE);
4151 /* mark start of dump memory area */
4153 dumpsize = dump_size();
4155 cd = DNEW(codegendata);
4158 /* code for the stub */
4160 x86_64_mov_imm_reg(cd, (ptrint) m, REG_ITMP1); /* pass method to compiler */
4161 x86_64_mov_imm_reg(cd, (ptrint) asm_call_jit_compiler, REG_ITMP3);
4162 x86_64_jmp_reg(cd, REG_ITMP3);
4164 #if defined(STATISTICS)
4166 count_cstub_len += COMPILERSTUB_SIZE;
4169 /* release dump area */
4171 dump_release(dumpsize);
4173 return (functionptr) (ptrint) s;
4177 /* createnativestub ************************************************************
4179 Creates a stub routine which calls a native method.
4181 *******************************************************************************/
4183 functionptr createnativestub(functionptr f, methodinfo *m, codegendata *cd, registerdata *rd)
4187 s4 stackframesize; /* size of stackframe if needed */
4189 s4 i, j; /* count variables */
4193 /* initialize variables */
4195 nativeparams = (m->flags & ACC_STATIC) ? 2 : 1;
4197 /* create new method descriptor with additional native parameters */
4201 nmd = (methoddesc *) DMNEW(u1, sizeof(methoddesc) - sizeof(typedesc) +
4202 md->paramcount * sizeof(typedesc) +
4203 nativeparams * sizeof(typedesc));
4205 nmd->paramcount = md->paramcount + nativeparams;
4207 nmd->params = DMNEW(paramdesc, nmd->paramcount);
4209 nmd->paramtypes[0].type = TYPE_ADR; /* add environment pointer */
4211 if (m->flags & ACC_STATIC)
4212 nmd->paramtypes[1].type = TYPE_ADR; /* add class pointer */
4214 MCOPY(nmd->paramtypes + nativeparams, md->paramtypes, typedesc,
4217 md_param_alloc(nmd);
4220 /* create method header */
4222 (void) dseg_addaddress(cd, m); /* MethodPointer */
4223 (void) dseg_adds4(cd, 0 * 8); /* FrameSize */
4224 (void) dseg_adds4(cd, 0); /* IsSync */
4225 (void) dseg_adds4(cd, 0); /* IsLeaf */
4226 (void) dseg_adds4(cd, 0); /* IntSave */
4227 (void) dseg_adds4(cd, 0); /* FltSave */
4228 (void) dseg_addlinenumbertablesize(cd);
4229 (void) dseg_adds4(cd, 0); /* ExTableSize */
4232 /* initialize mcode variables */
4234 cd->mcodeptr = (u1 *) cd->mcodebase;
4235 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
4238 /* if function is static, check for initialized */
4240 if ((m->flags & ACC_STATIC) && !m->class->initialized) {
4241 codegen_addpatchref(cd, cd->mcodeptr, PATCHER_clinit, m->class);
4243 if (showdisassemble) {
4244 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
4251 M_ASUB_IMM((INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4253 /* save integer and float argument registers */
4255 for (i = 0; i < md->paramcount && i < INT_ARG_CNT; i++)
4256 M_LST(rd->argintregs[i], REG_SP, (1 + i) * 8);
4258 for (i = 0; i < md->paramcount && i < FLT_ARG_CNT; i++)
4259 M_DST(rd->argfltregs[i], REG_SP, (1 + INT_ARG_CNT + i) * 8);
4261 /* show integer hex code for float arguments */
4263 for (i = 0, l = 0; i < md->paramcount && i < INT_ARG_CNT; i++) {
4264 /* if the paramtype is a float, we have to right shift all */
4265 /* following integer registers */
4267 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type)) {
4268 for (s1 = INT_ARG_CNT - 2; s1 >= i; s1--)
4269 M_MOV(rd->argintregs[s1], rd->argintregs[s1 + 1]);
4271 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[i]);
4276 x86_64_mov_imm_reg(cd, (ptrint) m, REG_ITMP1);
4277 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, 0 * 8);
4278 x86_64_mov_imm_reg(cd, (ptrint) builtin_trace_args, REG_ITMP1);
4279 x86_64_call_reg(cd, REG_ITMP1);
4281 /* restore integer and float argument registers */
4283 for (i = 0; i < md->paramcount && i < INT_ARG_CNT; i++)
4284 M_LLD(rd->argintregs[i], REG_SP, (1 + i) * 8);
4286 for (i = 0; i < md->paramcount && i < FLT_ARG_CNT; i++)
4287 M_DLD(rd->argfltregs[i], REG_SP, (1 + INT_ARG_CNT + i) * 8);
4289 M_AADD_IMM((INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4292 /* 4 == additional size needed for native stack frame information*/
4294 M_ASUB_IMM((4 + INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4296 /* save integer and float argument registers */
4298 for (i = 0; i < md->paramcount && i < INT_ARG_CNT; i++)
4299 M_LST(rd->argintregs[i], REG_SP, i * 8);
4301 for (i = 0; i < md->paramcount && i < FLT_ARG_CNT; i++)
4302 M_DST(rd->argfltregs[i], REG_SP, (INT_ARG_CNT + i) * 8);
4306 0*8 void *oldThreadspecificHeadValue;
4307 1*8 void **addressOfThreadspecificHead;
4308 2*8 methodinfo *method;
4309 3*8 void *beginOfJavaStackframe; only used if != 0
4310 4*8 void *returnToFromNative;
4313 /* CREATE DYNAMIC STACK INFO -- BEGIN offsets:15,16,17,18*/
4315 x86_64_mov_imm_membase(cd, 0, REG_SP, 18 * 8);
4316 x86_64_mov_imm_membase(cd, (ptrint) m, REG_SP, 17 * 8);
4318 x86_64_mov_imm_reg(cd, (ptrint) builtin_asm_get_stackframeinfo, REG_ITMP1);
4319 x86_64_call_reg(cd, REG_ITMP1);
4321 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 16 * 8);
4322 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP2);
4323 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_SP, 15 * 8);
4324 x86_64_mov_reg_reg(cd, REG_SP, REG_ITMP2);
4325 x86_64_alu_imm_reg(cd, X86_64_ADD, (1 + INT_ARG_CNT + FLT_ARG_CNT) * 8, REG_ITMP2);
4326 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_RESULT, 0);
4328 /* CREATE DYNAMIC STACK INFO -- END*/
4331 x86_64_mov_imm_reg(cd,(u8)nativeinvokation,REG_ITMP1);
4332 x86_64_call_reg(cd,REG_ITMP1);
4335 /* restore integer and float argument registers */
4337 for (i = 0; i < md->paramcount && i < INT_ARG_CNT; i++)
4338 M_LLD(rd->argintregs[i], REG_SP, i * 8);
4340 for (i = 0; i < md->paramcount && i < FLT_ARG_CNT; i++)
4341 M_DLD(rd->argfltregs[i], REG_SP, (INT_ARG_CNT + i) * 8);
4343 M_AADD_IMM((INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4346 /* calculate stack frame size */
4348 stackframesize = nmd->memuse;
4350 /* keep stack 16-byte aligned */
4351 if (!(stackframesize & 0x1))
4354 M_ASUB_IMM(stackframesize * 8, REG_SP);
4356 /* copy or spill arguments to new locations */
4358 for (i = md->paramcount - 1, j = i + nativeparams; i >= 0; i--, j--) {
4359 t = md->paramtypes[i].type;
4361 if (IS_INT_LNG_TYPE(t)) {
4362 if (!md->params[i].inmemory) {
4363 s1 = rd->argintregs[md->params[i].regoff];
4365 if (!nmd->params[j].inmemory) {
4366 s2 = rd->argintregs[nmd->params[j].regoff];
4370 s2 = nmd->params[j].regoff;
4371 M_LST(s1, REG_SP, s2 * 8);
4375 s1 = md->params[i].regoff + stackframesize;
4376 s2 = nmd->params[j].regoff;
4377 M_LLD(REG_ITMP1, REG_SP, s1 * 8);
4378 M_LST(REG_ITMP1, REG_SP, s2 * 8);
4382 /* We only copy spilled float arguments, as the float argument */
4383 /* registers keep unchanged. */
4385 if (md->params[i].inmemory) {
4386 s1 = md->params[i].regoff + stackframesize;
4387 s2 = nmd->params[j].regoff;
4388 M_DLD(REG_FTMP1, REG_SP, s1 * 8);
4389 M_DST(REG_FTMP1, REG_SP, s2 * 8);
4394 /* put class into second argument register */
4396 if (m->flags & ACC_STATIC)
4397 x86_64_mov_imm_reg(cd, (ptrint) m->class, rd->argintregs[1]);
4399 /* put env into first argument register */
4401 x86_64_mov_imm_reg(cd, (ptrint) &env, rd->argintregs[0]);
4403 /* do the native function call */
4405 #if !defined(STATIC_CLASSPATH)
4407 codegen_addpatchref(cd, cd->mcodeptr, PATCHER_resolve_native, m);
4409 if (showdisassemble) {
4410 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
4415 x86_64_mov_imm_reg(cd, (ptrint) f, REG_ITMP1);
4416 x86_64_call_reg(cd, REG_ITMP1);
4419 /* remove stackframe if there is one */
4422 M_AADD_IMM(stackframesize * 8, REG_SP);
4424 /* remove dynamic stack info */
4426 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 2 * 8);
4427 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2);
4428 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, REG_RESULT);
4429 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_RESULT, 0);
4430 x86_64_mov_membase_reg(cd, REG_SP, 2 * 8, REG_RESULT);
4432 M_AADD_IMM(4 * 8, REG_SP);
4434 /* generate call trace */
4437 M_ASUB_IMM(3 * 8, REG_SP); /* keep stack 16-byte aligned */
4439 M_LST(REG_RESULT, REG_SP, 0 * 8);
4440 M_DST(REG_FRESULT, REG_SP, 1 * 8);
4442 x86_64_mov_imm_reg(cd, (ptrint) m, rd->argintregs[0]);
4443 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
4444 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
4445 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
4447 x86_64_mov_imm_reg(cd, (ptrint) builtin_displaymethodstop, REG_ITMP1);
4448 x86_64_call_reg(cd, REG_ITMP1);
4450 M_LLD(REG_RESULT, REG_SP, 0 * 8);
4451 M_DLD(REG_FRESULT, REG_SP, 1 * 8);
4453 M_AADD_IMM(3 * 8, REG_SP);
4456 /* check for exception */
4458 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4459 x86_64_push_reg(cd, REG_RESULT);
4460 x86_64_mov_imm_reg(cd, (ptrint) builtin_get_exceptionptrptr, REG_ITMP3);
4461 x86_64_call_reg(cd, REG_ITMP3);
4462 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
4463 x86_64_pop_reg(cd, REG_RESULT);
4465 x86_64_mov_imm_reg(cd, (ptrint) &_exceptionptr, REG_ITMP3);
4466 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP3);
4468 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
4469 x86_64_jcc(cd, X86_64_CC_NE, 1);
4473 /* handle exception */
4475 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4476 x86_64_push_reg(cd, REG_ITMP3);
4477 x86_64_mov_imm_reg(cd, (ptrint) builtin_get_exceptionptrptr, REG_ITMP3);
4478 x86_64_call_reg(cd, REG_ITMP3);
4479 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
4480 x86_64_pop_reg(cd, REG_ITMP1_XPTR);
4482 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
4483 x86_64_mov_imm_reg(cd, (ptrint) &_exceptionptr, REG_ITMP3);
4484 x86_64_alu_reg_reg(cd, X86_64_XOR, REG_ITMP2, REG_ITMP2);
4485 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_ITMP3, 0); /* clear exception pointer */
4488 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC); /* get return address from stack */
4489 x86_64_alu_imm_reg(cd, X86_64_SUB, 3, REG_ITMP2_XPC); /* callq */
4491 x86_64_mov_imm_reg(cd, (ptrint) asm_handle_nat_exception, REG_ITMP3);
4492 x86_64_jmp_reg(cd, REG_ITMP3);
4495 /* process patcher calls **************************************************/
4503 tmpcd = DNEW(codegendata);
4505 for (pref = cd->patchrefs; pref != NULL; pref = pref->next) {
4506 /* Get machine code which is patched back in later. A */
4507 /* `call rel32' is 5 bytes long (but read 8 bytes). */
4509 xcodeptr = cd->mcodebase + pref->branchpos;
4510 mcode = *((ptrint *) xcodeptr);
4512 /* patch in `call rel32' to call the following code */
4514 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
4515 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4517 /* move pointer to java_objectheader onto stack */
4519 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4520 /* create a virtual java_objectheader */
4522 (void) dseg_addaddress(cd, get_dummyLR()); /* monitorPtr */
4523 off = dseg_addaddress(cd, NULL); /* vftbl */
4525 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + off, REG_ITMP3);
4526 x86_64_push_reg(cd, REG_ITMP3);
4528 x86_64_push_imm(cd, 0);
4531 /* move machine code bytes and classinfo pointer into registers */
4533 x86_64_mov_imm_reg(cd, (ptrint) mcode, REG_ITMP3);
4534 x86_64_push_reg(cd, REG_ITMP3);
4535 x86_64_mov_imm_reg(cd, (ptrint) pref->ref, REG_ITMP3);
4536 x86_64_push_reg(cd, REG_ITMP3);
4538 x86_64_mov_imm_reg(cd, (ptrint) pref->patcher, REG_ITMP3);
4539 x86_64_push_reg(cd, REG_ITMP3);
4541 x86_64_mov_imm_reg(cd, (ptrint) asm_wrapper_patcher, REG_ITMP3);
4542 x86_64_jmp_reg(cd, REG_ITMP3);
4546 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
4548 return m->entrypoint;
4553 * These are local overrides for various environment variables in Emacs.
4554 * Please do not remove this and leave it at the end of the file, where
4555 * Emacs will automagically detect them.
4556 * ---------------------------------------------------------------------
4559 * indent-tabs-mode: t