1 /* src/vm/jit/x86_64/codegen.c - machine code generator for x86_64
3 Copyright (C) 1996-2005 R. Grafl, A. Krall, C. Kruegel, C. Oates,
4 R. Obermaisser, M. Platter, M. Probst, S. Ring, E. Steiner,
5 C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich, J. Wenninger,
6 Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
25 Contact: cacao@complang.tuwien.ac.at
27 Authors: Andreas Krall
30 Changes: Christian Ullrich
32 $Id: codegen.c 3473 2005-10-21 11:44:26Z twisti $
46 #include "vm/jit/x86_64/arch.h"
47 #include "vm/jit/x86_64/codegen.h"
48 #include "vm/jit/x86_64/emitfuncs.h"
50 #include "cacao/cacao.h"
51 #include "native/native.h"
52 #include "vm/global.h"
53 #include "vm/builtin.h"
54 #include "vm/loader.h"
55 #include "vm/statistics.h"
56 #include "vm/stringlocal.h"
57 #include "vm/tables.h"
58 #include "vm/jit/asmpart.h"
59 #include "vm/jit/codegen.inc"
60 #include "vm/jit/jit.h"
63 # include "vm/jit/lsra.inc"
66 #include "vm/jit/methodheader.h"
67 #include "vm/jit/parse.h"
68 #include "vm/jit/patcher.h"
69 #include "vm/jit/reg.h"
70 #include "vm/jit/reg.inc"
73 /* codegen *********************************************************************
75 Generates machine code.
77 *******************************************************************************/
79 void codegen(methodinfo *m, codegendata *cd, registerdata *rd)
81 s4 len, s1, s2, s3, d, disp;
90 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
91 builtintable_entry *bte;
94 /* prevent compiler warnings */
106 /* space to save used callee saved registers */
108 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
109 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse);
111 parentargs_base = rd->memuse + savedregs_num;
113 #if defined(USE_THREADS)
114 /* space to save argument of monitor_enter */
116 if (checksync && (m->flags & ACC_SYNCHRONIZED))
120 /* Keep stack of non-leaf functions 16-byte aligned for calls into native */
121 /* code e.g. libc or jni (alignment problems with movaps). */
123 if (!m->isleafmethod || runverbose)
124 parentargs_base |= 0x1;
126 /* create method header */
128 (void) dseg_addaddress(cd, m); /* MethodPointer */
129 (void) dseg_adds4(cd, parentargs_base * 8); /* FrameSize */
131 #if defined(USE_THREADS)
132 /* IsSync contains the offset relative to the stack pointer for the
133 argument of monitor_exit used in the exception handler. Since the
134 offset could be zero and give a wrong meaning of the flag it is
138 if (checksync && (m->flags & ACC_SYNCHRONIZED))
139 (void) dseg_adds4(cd, (rd->memuse + 1) * 8); /* IsSync */
142 (void) dseg_adds4(cd, 0); /* IsSync */
144 (void) dseg_adds4(cd, m->isleafmethod); /* IsLeaf */
145 (void) dseg_adds4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
146 (void) dseg_adds4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
148 (void) dseg_addlinenumbertablesize(cd);
150 (void) dseg_adds4(cd, cd->exceptiontablelength); /* ExTableSize */
152 /* create exception table */
154 for (ex = cd->exceptiontable; ex != NULL; ex = ex->down) {
155 dseg_addtarget(cd, ex->start);
156 dseg_addtarget(cd, ex->end);
157 dseg_addtarget(cd, ex->handler);
158 (void) dseg_addaddress(cd, ex->catchtype.cls);
161 /* initialize mcode variables */
163 cd->mcodeptr = (u1 *) cd->mcodebase;
164 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
167 /* initialize the last patcher pointer */
169 cd->lastmcodeptr = cd->mcodeptr;
171 /* create stack frame (if necessary) */
174 M_ASUB_IMM(parentargs_base * 8, REG_SP);
176 /* save used callee saved registers */
179 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
180 p--; M_LST(rd->savintregs[i], REG_SP, p * 8);
182 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
183 p--; M_DST(rd->savfltregs[i], REG_SP, p * 8);
186 /* take arguments out of register or stack frame */
190 for (p = 0, l = 0; p < md->paramcount; p++) {
191 t = md->paramtypes[p].type;
192 var = &(rd->locals[l][t]);
194 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
198 s1 = md->params[p].regoff;
199 if (IS_INT_LNG_TYPE(t)) { /* integer args */
200 s2 = rd->argintregs[s1];
201 if (!md->params[p].inmemory) { /* register arguments */
202 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
203 M_INTMOVE(s2, var->regoff);
205 } else { /* reg arg -> spilled */
206 M_LST(s2, REG_SP, var->regoff * 8);
209 } else { /* stack arguments */
210 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
211 /* + 8 for return address */
212 M_LLD(var->regoff, REG_SP, (parentargs_base + s1) * 8 + 8);
214 } else { /* stack arg -> spilled */
215 var->regoff = parentargs_base + s1 + 1;
219 } else { /* floating args */
220 if (!md->params[p].inmemory) { /* register arguments */
221 s2 = rd->argfltregs[s1];
222 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
223 M_FLTMOVE(s2, var->regoff);
225 } else { /* reg arg -> spilled */
226 M_DST(s2, REG_SP, var->regoff * 8);
229 } else { /* stack arguments */
230 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
231 M_DLD(var->regoff, REG_SP, (parentargs_base + s1) * 8 + 8);
234 var->regoff = parentargs_base + s1 + 1;
240 /* save monitorenter argument */
242 #if defined(USE_THREADS)
243 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
244 /* stack offset for monitor argument */
249 M_LSUB_IMM((INT_ARG_CNT + FLT_ARG_CNT) * 8, REG_SP);
251 for (p = 0; p < INT_ARG_CNT; p++)
252 M_LST(rd->argintregs[p], REG_SP, p * 8);
254 for (p = 0; p < FLT_ARG_CNT; p++)
255 M_DST(rd->argfltregs[p], REG_SP, (INT_ARG_CNT + p) * 8);
257 s1 += INT_ARG_CNT + FLT_ARG_CNT;
260 /* decide which monitor enter function to call */
262 if (m->flags & ACC_STATIC) {
263 x86_64_mov_imm_reg(cd, (ptrint) m->class, REG_ITMP1);
264 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, s1 * 8);
265 M_INTMOVE(REG_ITMP1, rd->argintregs[0]);
266 x86_64_mov_imm_reg(cd, (ptrint) BUILTIN_staticmonitorenter, REG_ITMP1);
267 x86_64_call_reg(cd, REG_ITMP1);
270 x86_64_test_reg_reg(cd, rd->argintregs[0], rd->argintregs[0]);
271 x86_64_jcc(cd, X86_64_CC_Z, 0);
272 codegen_addxnullrefs(cd, cd->mcodeptr);
273 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, s1 * 8);
274 x86_64_mov_imm_reg(cd, (ptrint) BUILTIN_monitorenter, REG_ITMP1);
275 x86_64_call_reg(cd, REG_ITMP1);
279 for (p = 0; p < INT_ARG_CNT; p++)
280 M_LLD(rd->argintregs[p], REG_SP, p * 8);
282 for (p = 0; p < FLT_ARG_CNT; p++)
283 M_DLD(rd->argfltregs[p], REG_SP, (INT_ARG_CNT + p) * 8);
285 M_LADD_IMM((INT_ARG_CNT + FLT_ARG_CNT) * 8, REG_SP);
290 /* Copy argument registers to stack and call trace function with pointer */
291 /* to arguments on stack. */
293 if (runverbose || opt_stat) {
294 M_LSUB_IMM((INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + FLT_TMP_CNT + 1 + 1) * 8, REG_SP);
296 /* save integer argument registers */
298 for (p = 0; p < INT_ARG_CNT; p++)
299 M_LST(rd->argintregs[p], REG_SP, (1 + p) * 8);
301 /* save float argument registers */
303 for (p = 0; p < FLT_ARG_CNT; p++)
304 M_DST(rd->argfltregs[p], REG_SP, (1 + INT_ARG_CNT + p) * 8);
306 /* save temporary registers for leaf methods */
308 if (m->isleafmethod) {
309 for (p = 0; p < INT_TMP_CNT; p++)
310 M_LST(rd->tmpintregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + p) * 8);
312 for (p = 0; p < FLT_TMP_CNT; p++)
313 M_DST(rd->tmpfltregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + p) * 8);
317 /* show integer hex code for float arguments */
319 for (p = 0, l = 0; p < md->paramcount && p < INT_ARG_CNT; p++) {
320 /* if the paramtype is a float, we have to right shift all */
321 /* following integer registers */
323 if (IS_FLT_DBL_TYPE(md->paramtypes[p].type)) {
324 for (s1 = INT_ARG_CNT - 2; s1 >= p; s1--) {
325 M_MOV(rd->argintregs[s1], rd->argintregs[s1 + 1]);
328 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[p]);
333 x86_64_mov_imm_reg(cd, (ptrint) m, REG_ITMP2);
334 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_SP, 0 * 8);
335 x86_64_mov_imm_reg(cd, (ptrint) builtin_trace_args, REG_ITMP1);
336 x86_64_call_reg(cd, REG_ITMP1);
339 x86_64_mov_imm_reg(cd, (ptrint) compiledinvokation, REG_ITMP1);
340 x86_64_call_reg(cd, REG_ITMP1);
343 /* restore integer argument registers */
345 for (p = 0; p < INT_ARG_CNT; p++)
346 M_LLD(rd->argintregs[p], REG_SP, (1 + p) * 8);
348 /* restore float argument registers */
350 for (p = 0; p < FLT_ARG_CNT; p++)
351 M_DLD(rd->argfltregs[p], REG_SP, (1 + INT_ARG_CNT + p) * 8);
353 /* restore temporary registers for leaf methods */
355 if (m->isleafmethod) {
356 for (p = 0; p < INT_TMP_CNT; p++)
357 M_LLD(rd->tmpintregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + p) * 8);
359 for (p = 0; p < FLT_TMP_CNT; p++)
360 M_DLD(rd->tmpfltregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + p) * 8);
363 M_LADD_IMM((INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + FLT_TMP_CNT + 1 + 1) * 8, REG_SP);
368 /* end of header generation */
370 /* walk through all basic blocks */
371 for (bptr = m->basicblocks; bptr != NULL; bptr = bptr->next) {
373 bptr->mpc = (u4) ((u1 *) cd->mcodeptr - cd->mcodebase);
375 if (bptr->flags >= BBREACHED) {
377 /* branch resolving */
380 for (bref = bptr->branchrefs; bref != NULL; bref = bref->next) {
381 gen_resolvebranch((u1 *) cd->mcodebase + bref->branchpos,
386 /* copy interface registers to their destination */
394 while (src != NULL) {
396 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
397 if (bptr->type == BBTYPE_SBR) {
398 /* d = reg_of_var(rd, src, REG_ITMP1); */
399 if (!(src->flags & INMEMORY))
403 x86_64_pop_reg(cd, d);
404 store_reg_to_var_int(src, d);
406 } else if (bptr->type == BBTYPE_EXH) {
407 /* d = reg_of_var(rd, src, REG_ITMP1); */
408 if (!(src->flags & INMEMORY))
412 M_INTMOVE(REG_ITMP1, d);
413 store_reg_to_var_int(src, d);
422 while (src != NULL) {
424 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
425 if (bptr->type == BBTYPE_SBR) {
426 d = reg_of_var(rd, src, REG_ITMP1);
427 x86_64_pop_reg(cd, d);
428 store_reg_to_var_int(src, d);
430 } else if (bptr->type == BBTYPE_EXH) {
431 d = reg_of_var(rd, src, REG_ITMP1);
432 M_INTMOVE(REG_ITMP1, d);
433 store_reg_to_var_int(src, d);
437 d = reg_of_var(rd, src, REG_ITMP1);
438 if ((src->varkind != STACKVAR)) {
440 if (IS_FLT_DBL_TYPE(s2)) {
441 s1 = rd->interfaces[len][s2].regoff;
442 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
446 x86_64_movq_membase_reg(cd, REG_SP, s1 * 8, d);
448 store_reg_to_var_flt(src, d);
451 s1 = rd->interfaces[len][s2].regoff;
452 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
456 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, d);
458 store_reg_to_var_int(src, d);
467 /* walk through all instructions */
473 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
474 if (iptr->line != currentline) {
475 dseg_addlinenumber(cd, iptr->line, cd->mcodeptr);
476 currentline = iptr->line;
479 MCODECHECK(1024); /* 1KB should be enough */
482 case ICMD_INLINE_START: /* internal ICMDs */
483 case ICMD_INLINE_END:
486 case ICMD_NOP: /* ... ==> ... */
489 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
490 if (src->flags & INMEMORY) {
491 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
494 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
496 x86_64_jcc(cd, X86_64_CC_Z, 0);
497 codegen_addxnullrefs(cd, cd->mcodeptr);
500 /* constant operations ************************************************/
502 case ICMD_ICONST: /* ... ==> ..., constant */
503 /* op1 = 0, val.i = constant */
505 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
506 if (iptr->val.i == 0) {
507 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
509 x86_64_movl_imm_reg(cd, iptr->val.i, d);
511 store_reg_to_var_int(iptr->dst, d);
514 case ICMD_ACONST: /* ... ==> ..., constant */
515 /* op1 = 0, val.a = constant */
517 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
518 if (iptr->val.a == 0) {
519 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
521 x86_64_mov_imm_reg(cd, (s8) iptr->val.a, d);
523 store_reg_to_var_int(iptr->dst, d);
526 case ICMD_LCONST: /* ... ==> ..., constant */
527 /* op1 = 0, val.l = constant */
529 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
530 if (iptr->val.l == 0) {
531 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
533 x86_64_mov_imm_reg(cd, iptr->val.l, d);
535 store_reg_to_var_int(iptr->dst, d);
538 case ICMD_FCONST: /* ... ==> ..., constant */
539 /* op1 = 0, val.f = constant */
541 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
542 disp = dseg_addfloat(cd, iptr->val.f);
543 x86_64_movdl_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + ((d > 7) ? 9 : 8)) - (s8) cd->mcodebase) + disp, d);
544 store_reg_to_var_flt(iptr->dst, d);
547 case ICMD_DCONST: /* ... ==> ..., constant */
548 /* op1 = 0, val.d = constant */
550 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
551 disp = dseg_adddouble(cd, iptr->val.d);
552 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + disp, d);
553 store_reg_to_var_flt(iptr->dst, d);
557 /* load/store operations **********************************************/
559 case ICMD_ILOAD: /* ... ==> ..., content of local variable */
560 /* op1 = local variable */
562 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
563 if ((iptr->dst->varkind == LOCALVAR) &&
564 (iptr->dst->varnum == iptr->op1)) {
567 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
568 if (var->flags & INMEMORY) {
569 x86_64_movl_membase_reg(cd, REG_SP, var->regoff * 8, d);
570 store_reg_to_var_int(iptr->dst, d);
573 if (iptr->dst->flags & INMEMORY) {
574 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
577 M_INTMOVE(var->regoff, d);
582 case ICMD_LLOAD: /* ... ==> ..., content of local variable */
583 case ICMD_ALOAD: /* op1 = local variable */
585 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
586 if ((iptr->dst->varkind == LOCALVAR) &&
587 (iptr->dst->varnum == iptr->op1)) {
590 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
591 if (var->flags & INMEMORY) {
592 x86_64_mov_membase_reg(cd, REG_SP, var->regoff * 8, d);
593 store_reg_to_var_int(iptr->dst, d);
596 if (iptr->dst->flags & INMEMORY) {
597 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
600 M_INTMOVE(var->regoff, d);
605 case ICMD_FLOAD: /* ... ==> ..., content of local variable */
606 case ICMD_DLOAD: /* op1 = local variable */
608 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
609 if ((iptr->dst->varkind == LOCALVAR) &&
610 (iptr->dst->varnum == iptr->op1)) {
613 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
614 if (var->flags & INMEMORY) {
615 x86_64_movq_membase_reg(cd, REG_SP, var->regoff * 8, d);
616 store_reg_to_var_flt(iptr->dst, d);
619 if (iptr->dst->flags & INMEMORY) {
620 x86_64_movq_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
623 M_FLTMOVE(var->regoff, d);
628 case ICMD_ISTORE: /* ..., value ==> ... */
629 case ICMD_LSTORE: /* op1 = local variable */
632 if ((src->varkind == LOCALVAR) &&
633 (src->varnum == iptr->op1)) {
636 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
637 if (var->flags & INMEMORY) {
638 var_to_reg_int(s1, src, REG_ITMP1);
639 x86_64_mov_reg_membase(cd, s1, REG_SP, var->regoff * 8);
642 var_to_reg_int(s1, src, var->regoff);
643 M_INTMOVE(s1, var->regoff);
647 case ICMD_FSTORE: /* ..., value ==> ... */
648 case ICMD_DSTORE: /* op1 = local variable */
650 if ((src->varkind == LOCALVAR) &&
651 (src->varnum == iptr->op1)) {
654 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
655 if (var->flags & INMEMORY) {
656 var_to_reg_flt(s1, src, REG_FTMP1);
657 x86_64_movq_reg_membase(cd, s1, REG_SP, var->regoff * 8);
660 var_to_reg_flt(s1, src, var->regoff);
661 M_FLTMOVE(s1, var->regoff);
666 /* pop/dup/swap operations ********************************************/
668 /* attention: double and longs are only one entry in CACAO ICMDs */
670 case ICMD_POP: /* ..., value ==> ... */
671 case ICMD_POP2: /* ..., value, value ==> ... */
674 case ICMD_DUP: /* ..., a ==> ..., a, a */
675 M_COPY(src, iptr->dst);
678 case ICMD_DUP_X1: /* ..., a, b ==> ..., b, a, b */
680 M_COPY(src, iptr->dst);
681 M_COPY(src->prev, iptr->dst->prev);
682 M_COPY(iptr->dst, iptr->dst->prev->prev);
685 case ICMD_DUP_X2: /* ..., a, b, c ==> ..., c, a, b, c */
687 M_COPY(src, iptr->dst);
688 M_COPY(src->prev, iptr->dst->prev);
689 M_COPY(src->prev->prev, iptr->dst->prev->prev);
690 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
693 case ICMD_DUP2: /* ..., a, b ==> ..., a, b, a, b */
695 M_COPY(src, iptr->dst);
696 M_COPY(src->prev, iptr->dst->prev);
699 case ICMD_DUP2_X1: /* ..., a, b, c ==> ..., b, c, a, b, c */
701 M_COPY(src, iptr->dst);
702 M_COPY(src->prev, iptr->dst->prev);
703 M_COPY(src->prev->prev, iptr->dst->prev->prev);
704 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
705 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev);
708 case ICMD_DUP2_X2: /* ..., a, b, c, d ==> ..., c, d, a, b, c, d */
710 M_COPY(src, iptr->dst);
711 M_COPY(src->prev, iptr->dst->prev);
712 M_COPY(src->prev->prev, iptr->dst->prev->prev);
713 M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
714 M_COPY(iptr->dst, iptr->dst->prev->prev->prev->prev);
715 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev->prev);
718 case ICMD_SWAP: /* ..., a, b ==> ..., b, a */
720 M_COPY(src, iptr->dst->prev);
721 M_COPY(src->prev, iptr->dst);
725 /* integer operations *************************************************/
727 case ICMD_INEG: /* ..., value ==> ..., - value */
729 d = reg_of_var(rd, iptr->dst, REG_NULL);
730 if (iptr->dst->flags & INMEMORY) {
731 if (src->flags & INMEMORY) {
732 if (src->regoff == iptr->dst->regoff) {
733 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
736 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
737 x86_64_negl_reg(cd, REG_ITMP1);
738 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
742 x86_64_movl_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
743 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
747 if (src->flags & INMEMORY) {
748 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
749 x86_64_negl_reg(cd, d);
752 M_INTMOVE(src->regoff, iptr->dst->regoff);
753 x86_64_negl_reg(cd, iptr->dst->regoff);
758 case ICMD_LNEG: /* ..., value ==> ..., - value */
760 d = reg_of_var(rd, iptr->dst, REG_NULL);
761 if (iptr->dst->flags & INMEMORY) {
762 if (src->flags & INMEMORY) {
763 if (src->regoff == iptr->dst->regoff) {
764 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
767 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
768 x86_64_neg_reg(cd, REG_ITMP1);
769 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
773 x86_64_mov_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
774 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
778 if (src->flags & INMEMORY) {
779 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
780 x86_64_neg_reg(cd, iptr->dst->regoff);
783 M_INTMOVE(src->regoff, iptr->dst->regoff);
784 x86_64_neg_reg(cd, iptr->dst->regoff);
789 case ICMD_I2L: /* ..., value ==> ..., value */
791 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
792 if (src->flags & INMEMORY) {
793 x86_64_movslq_membase_reg(cd, REG_SP, src->regoff * 8, d);
796 x86_64_movslq_reg_reg(cd, src->regoff, d);
798 store_reg_to_var_int(iptr->dst, d);
801 case ICMD_L2I: /* ..., value ==> ..., value */
803 var_to_reg_int(s1, src, REG_ITMP1);
804 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
806 store_reg_to_var_int(iptr->dst, d);
809 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
811 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
812 if (src->flags & INMEMORY) {
813 x86_64_movsbq_membase_reg(cd, REG_SP, src->regoff * 8, d);
816 x86_64_movsbq_reg_reg(cd, src->regoff, d);
818 store_reg_to_var_int(iptr->dst, d);
821 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
823 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
824 if (src->flags & INMEMORY) {
825 x86_64_movzwq_membase_reg(cd, REG_SP, src->regoff * 8, d);
828 x86_64_movzwq_reg_reg(cd, src->regoff, d);
830 store_reg_to_var_int(iptr->dst, d);
833 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
835 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
836 if (src->flags & INMEMORY) {
837 x86_64_movswq_membase_reg(cd, REG_SP, src->regoff * 8, d);
840 x86_64_movswq_reg_reg(cd, src->regoff, d);
842 store_reg_to_var_int(iptr->dst, d);
846 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
848 d = reg_of_var(rd, iptr->dst, REG_NULL);
849 x86_64_emit_ialu(cd, X86_64_ADD, src, iptr);
852 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
853 /* val.i = constant */
855 d = reg_of_var(rd, iptr->dst, REG_NULL);
856 x86_64_emit_ialuconst(cd, X86_64_ADD, src, iptr);
859 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
861 d = reg_of_var(rd, iptr->dst, REG_NULL);
862 x86_64_emit_lalu(cd, X86_64_ADD, src, iptr);
865 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
866 /* val.l = constant */
868 d = reg_of_var(rd, iptr->dst, REG_NULL);
869 x86_64_emit_laluconst(cd, X86_64_ADD, src, iptr);
872 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
874 d = reg_of_var(rd, iptr->dst, REG_NULL);
875 if (iptr->dst->flags & INMEMORY) {
876 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
877 if (src->prev->regoff == iptr->dst->regoff) {
878 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
879 x86_64_alul_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
882 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
883 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
884 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
887 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
888 M_INTMOVE(src->prev->regoff, REG_ITMP1);
889 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
890 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
892 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
893 if (src->prev->regoff == iptr->dst->regoff) {
894 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
897 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
898 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
899 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
903 x86_64_movl_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
904 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
908 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
909 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
910 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
912 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
913 M_INTMOVE(src->prev->regoff, d);
914 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
916 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
917 /* workaround for reg alloc */
918 if (src->regoff == iptr->dst->regoff) {
919 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
920 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
921 M_INTMOVE(REG_ITMP1, d);
924 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
925 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
929 /* workaround for reg alloc */
930 if (src->regoff == iptr->dst->regoff) {
931 M_INTMOVE(src->prev->regoff, REG_ITMP1);
932 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
933 M_INTMOVE(REG_ITMP1, d);
936 M_INTMOVE(src->prev->regoff, d);
937 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
943 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
944 /* val.i = constant */
946 d = reg_of_var(rd, iptr->dst, REG_NULL);
947 x86_64_emit_ialuconst(cd, X86_64_SUB, src, iptr);
950 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
952 d = reg_of_var(rd, iptr->dst, REG_NULL);
953 if (iptr->dst->flags & INMEMORY) {
954 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
955 if (src->prev->regoff == iptr->dst->regoff) {
956 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
957 x86_64_alu_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
960 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
961 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
962 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
965 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
966 M_INTMOVE(src->prev->regoff, REG_ITMP1);
967 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
968 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
970 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
971 if (src->prev->regoff == iptr->dst->regoff) {
972 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
975 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
976 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
977 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
981 x86_64_mov_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
982 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
986 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
987 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
988 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
990 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
991 M_INTMOVE(src->prev->regoff, d);
992 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
994 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
995 /* workaround for reg alloc */
996 if (src->regoff == iptr->dst->regoff) {
997 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
998 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
999 M_INTMOVE(REG_ITMP1, d);
1002 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1003 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1007 /* workaround for reg alloc */
1008 if (src->regoff == iptr->dst->regoff) {
1009 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1010 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1011 M_INTMOVE(REG_ITMP1, d);
1014 M_INTMOVE(src->prev->regoff, d);
1015 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1021 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
1022 /* val.l = constant */
1024 d = reg_of_var(rd, iptr->dst, REG_NULL);
1025 x86_64_emit_laluconst(cd, X86_64_SUB, src, iptr);
1028 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1030 d = reg_of_var(rd, iptr->dst, REG_NULL);
1031 if (iptr->dst->flags & INMEMORY) {
1032 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1033 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1034 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1035 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1037 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1038 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1039 x86_64_imull_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1040 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1042 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1043 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1044 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1045 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1048 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1049 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1050 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1054 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1055 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1056 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1058 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1059 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1060 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1062 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1063 M_INTMOVE(src->regoff, iptr->dst->regoff);
1064 x86_64_imull_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1067 if (src->regoff == iptr->dst->regoff) {
1068 x86_64_imull_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1071 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1072 x86_64_imull_reg_reg(cd, src->regoff, iptr->dst->regoff);
1078 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
1079 /* val.i = constant */
1081 d = reg_of_var(rd, iptr->dst, REG_NULL);
1082 if (iptr->dst->flags & INMEMORY) {
1083 if (src->flags & INMEMORY) {
1084 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1085 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1088 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, REG_ITMP1);
1089 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1093 if (src->flags & INMEMORY) {
1094 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1097 if (iptr->val.i == 2) {
1098 M_INTMOVE(src->regoff, iptr->dst->regoff);
1099 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1102 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, iptr->dst->regoff); /* 3 cycles */
1108 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1110 d = reg_of_var(rd, iptr->dst, REG_NULL);
1111 if (iptr->dst->flags & INMEMORY) {
1112 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1113 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1114 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1115 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1117 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1118 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1119 x86_64_imul_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1120 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1122 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1123 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1124 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1125 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1128 x86_64_mov_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1129 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1130 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1134 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1135 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1136 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1138 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1139 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1140 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1142 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1143 M_INTMOVE(src->regoff, iptr->dst->regoff);
1144 x86_64_imul_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1147 if (src->regoff == iptr->dst->regoff) {
1148 x86_64_imul_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1151 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1152 x86_64_imul_reg_reg(cd, src->regoff, iptr->dst->regoff);
1158 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
1159 /* val.l = constant */
1161 d = reg_of_var(rd, iptr->dst, REG_NULL);
1162 if (iptr->dst->flags & INMEMORY) {
1163 if (src->flags & INMEMORY) {
1164 if (IS_IMM32(iptr->val.l)) {
1165 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1168 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1169 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1171 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1174 if (IS_IMM32(iptr->val.l)) {
1175 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, REG_ITMP1);
1178 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1179 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1181 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1185 if (src->flags & INMEMORY) {
1186 if (IS_IMM32(iptr->val.l)) {
1187 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1190 x86_64_mov_imm_reg(cd, iptr->val.l, iptr->dst->regoff);
1191 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1195 /* should match in many cases */
1196 if (iptr->val.l == 2) {
1197 M_INTMOVE(src->regoff, iptr->dst->regoff);
1198 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1201 if (IS_IMM32(iptr->val.l)) {
1202 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, iptr->dst->regoff); /* 4 cycles */
1205 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1206 M_INTMOVE(src->regoff, iptr->dst->regoff);
1207 x86_64_imul_reg_reg(cd, REG_ITMP1, iptr->dst->regoff);
1214 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1216 d = reg_of_var(rd, iptr->dst, REG_NULL);
1217 if (src->prev->flags & INMEMORY) {
1218 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1221 M_INTMOVE(src->prev->regoff, RAX);
1224 if (src->flags & INMEMORY) {
1225 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1228 M_INTMOVE(src->regoff, REG_ITMP3);
1232 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1233 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1234 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1235 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1237 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1239 x86_64_idivl_reg(cd, REG_ITMP3);
1241 if (iptr->dst->flags & INMEMORY) {
1242 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1243 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1246 M_INTMOVE(RAX, iptr->dst->regoff);
1248 if (iptr->dst->regoff != RDX) {
1249 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1254 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1255 d = reg_of_var(rd, iptr->dst, REG_NULL);
1256 if (src->prev->flags & INMEMORY) {
1257 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1260 M_INTMOVE(src->prev->regoff, RAX);
1263 if (src->flags & INMEMORY) {
1264 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1267 M_INTMOVE(src->regoff, REG_ITMP3);
1271 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1273 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1274 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1277 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1278 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1279 x86_64_jcc(cd, X86_64_CC_E, 1 + 3); /* 6 bytes */
1282 x86_64_idivl_reg(cd, REG_ITMP3);
1284 if (iptr->dst->flags & INMEMORY) {
1285 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1286 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1289 M_INTMOVE(RDX, iptr->dst->regoff);
1291 if (iptr->dst->regoff != RDX) {
1292 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1297 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
1298 /* val.i = constant */
1300 var_to_reg_int(s1, src, REG_ITMP1);
1301 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1302 M_INTMOVE(s1, REG_ITMP1);
1303 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1304 x86_64_leal_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1305 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1306 x86_64_shiftl_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1307 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1308 store_reg_to_var_int(iptr->dst, d);
1311 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
1312 /* val.i = constant */
1314 var_to_reg_int(s1, src, REG_ITMP1);
1315 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1316 M_INTMOVE(s1, REG_ITMP1);
1317 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1318 x86_64_leal_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1319 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1320 x86_64_alul_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1321 x86_64_alul_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1322 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1323 store_reg_to_var_int(iptr->dst, d);
1327 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1329 d = reg_of_var(rd, iptr->dst, REG_NULL);
1330 if (src->prev->flags & INMEMORY) {
1331 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1334 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1337 if (src->flags & INMEMORY) {
1338 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1341 M_INTMOVE(src->regoff, REG_ITMP3);
1345 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1346 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1347 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1348 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1349 x86_64_jcc(cd, X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1351 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1353 x86_64_idiv_reg(cd, REG_ITMP3);
1355 if (iptr->dst->flags & INMEMORY) {
1356 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1357 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1360 M_INTMOVE(RAX, iptr->dst->regoff);
1362 if (iptr->dst->regoff != RDX) {
1363 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1368 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1370 d = reg_of_var(rd, iptr->dst, REG_NULL);
1371 if (src->prev->flags & INMEMORY) {
1372 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1375 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1378 if (src->flags & INMEMORY) {
1379 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1382 M_INTMOVE(src->regoff, REG_ITMP3);
1386 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1388 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1389 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1390 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1393 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1394 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1395 x86_64_jcc(cd, X86_64_CC_E, 2 + 3); /* 6 bytes */
1398 x86_64_idiv_reg(cd, REG_ITMP3);
1400 if (iptr->dst->flags & INMEMORY) {
1401 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1402 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1405 M_INTMOVE(RDX, iptr->dst->regoff);
1407 if (iptr->dst->regoff != RDX) {
1408 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1413 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1414 /* val.i = constant */
1416 var_to_reg_int(s1, src, REG_ITMP1);
1417 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1418 M_INTMOVE(s1, REG_ITMP1);
1419 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1420 x86_64_lea_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1421 x86_64_cmovcc_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1422 x86_64_shift_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1423 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1424 store_reg_to_var_int(iptr->dst, d);
1427 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1428 /* val.l = constant */
1430 var_to_reg_int(s1, src, REG_ITMP1);
1431 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1432 M_INTMOVE(s1, REG_ITMP1);
1433 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1434 x86_64_lea_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1435 x86_64_cmovcc_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1436 x86_64_alu_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1437 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1438 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1439 store_reg_to_var_int(iptr->dst, d);
1442 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1444 d = reg_of_var(rd, iptr->dst, REG_NULL);
1445 x86_64_emit_ishift(cd, X86_64_SHL, src, iptr);
1448 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1449 /* val.i = constant */
1451 d = reg_of_var(rd, iptr->dst, REG_NULL);
1452 x86_64_emit_ishiftconst(cd, X86_64_SHL, src, iptr);
1455 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1457 d = reg_of_var(rd, iptr->dst, REG_NULL);
1458 x86_64_emit_ishift(cd, X86_64_SAR, src, iptr);
1461 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1462 /* val.i = constant */
1464 d = reg_of_var(rd, iptr->dst, REG_NULL);
1465 x86_64_emit_ishiftconst(cd, X86_64_SAR, src, iptr);
1468 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1470 d = reg_of_var(rd, iptr->dst, REG_NULL);
1471 x86_64_emit_ishift(cd, X86_64_SHR, src, iptr);
1474 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1475 /* val.i = constant */
1477 d = reg_of_var(rd, iptr->dst, REG_NULL);
1478 x86_64_emit_ishiftconst(cd, X86_64_SHR, src, iptr);
1481 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1483 d = reg_of_var(rd, iptr->dst, REG_NULL);
1484 x86_64_emit_lshift(cd, X86_64_SHL, src, iptr);
1487 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1488 /* val.i = constant */
1490 d = reg_of_var(rd, iptr->dst, REG_NULL);
1491 x86_64_emit_lshiftconst(cd, X86_64_SHL, src, iptr);
1494 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1496 d = reg_of_var(rd, iptr->dst, REG_NULL);
1497 x86_64_emit_lshift(cd, X86_64_SAR, src, iptr);
1500 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1501 /* val.i = constant */
1503 d = reg_of_var(rd, iptr->dst, REG_NULL);
1504 x86_64_emit_lshiftconst(cd, X86_64_SAR, src, iptr);
1507 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1509 d = reg_of_var(rd, iptr->dst, REG_NULL);
1510 x86_64_emit_lshift(cd, X86_64_SHR, src, iptr);
1513 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1514 /* val.l = constant */
1516 d = reg_of_var(rd, iptr->dst, REG_NULL);
1517 x86_64_emit_lshiftconst(cd, X86_64_SHR, src, iptr);
1520 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1522 d = reg_of_var(rd, iptr->dst, REG_NULL);
1523 x86_64_emit_ialu(cd, X86_64_AND, src, iptr);
1526 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1527 /* val.i = constant */
1529 d = reg_of_var(rd, iptr->dst, REG_NULL);
1530 x86_64_emit_ialuconst(cd, X86_64_AND, src, iptr);
1533 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1535 d = reg_of_var(rd, iptr->dst, REG_NULL);
1536 x86_64_emit_lalu(cd, X86_64_AND, src, iptr);
1539 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1540 /* val.l = constant */
1542 d = reg_of_var(rd, iptr->dst, REG_NULL);
1543 x86_64_emit_laluconst(cd, X86_64_AND, src, iptr);
1546 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1548 d = reg_of_var(rd, iptr->dst, REG_NULL);
1549 x86_64_emit_ialu(cd, X86_64_OR, src, iptr);
1552 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1553 /* val.i = constant */
1555 d = reg_of_var(rd, iptr->dst, REG_NULL);
1556 x86_64_emit_ialuconst(cd, X86_64_OR, src, iptr);
1559 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1561 d = reg_of_var(rd, iptr->dst, REG_NULL);
1562 x86_64_emit_lalu(cd, X86_64_OR, src, iptr);
1565 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1566 /* val.l = constant */
1568 d = reg_of_var(rd, iptr->dst, REG_NULL);
1569 x86_64_emit_laluconst(cd, X86_64_OR, src, iptr);
1572 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1574 d = reg_of_var(rd, iptr->dst, REG_NULL);
1575 x86_64_emit_ialu(cd, X86_64_XOR, src, iptr);
1578 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1579 /* val.i = constant */
1581 d = reg_of_var(rd, iptr->dst, REG_NULL);
1582 x86_64_emit_ialuconst(cd, X86_64_XOR, src, iptr);
1585 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1587 d = reg_of_var(rd, iptr->dst, REG_NULL);
1588 x86_64_emit_lalu(cd, X86_64_XOR, src, iptr);
1591 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1592 /* val.l = constant */
1594 d = reg_of_var(rd, iptr->dst, REG_NULL);
1595 x86_64_emit_laluconst(cd, X86_64_XOR, src, iptr);
1599 case ICMD_IINC: /* ..., value ==> ..., value + constant */
1600 /* op1 = variable, val.i = constant */
1602 /* using inc and dec is definitely faster than add -- tested */
1605 var = &(rd->locals[iptr->op1][TYPE_INT]);
1607 if (var->flags & INMEMORY) {
1608 if (iptr->val.i == 1) {
1609 x86_64_incl_membase(cd, REG_SP, d * 8);
1611 } else if (iptr->val.i == -1) {
1612 x86_64_decl_membase(cd, REG_SP, d * 8);
1615 x86_64_alul_imm_membase(cd, X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1619 if (iptr->val.i == 1) {
1620 x86_64_incl_reg(cd, d);
1622 } else if (iptr->val.i == -1) {
1623 x86_64_decl_reg(cd, d);
1626 x86_64_alul_imm_reg(cd, X86_64_ADD, iptr->val.i, d);
1632 /* floating operations ************************************************/
1634 case ICMD_FNEG: /* ..., value ==> ..., - value */
1636 var_to_reg_flt(s1, src, REG_FTMP1);
1637 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1638 disp = dseg_adds4(cd, 0x80000000);
1640 x86_64_movss_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + disp, REG_FTMP2);
1641 x86_64_xorps_reg_reg(cd, REG_FTMP2, d);
1642 store_reg_to_var_flt(iptr->dst, d);
1645 case ICMD_DNEG: /* ..., value ==> ..., - value */
1647 var_to_reg_flt(s1, src, REG_FTMP1);
1648 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1649 disp = dseg_adds8(cd, 0x8000000000000000);
1651 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + disp, REG_FTMP2);
1652 x86_64_xorpd_reg_reg(cd, REG_FTMP2, d);
1653 store_reg_to_var_flt(iptr->dst, d);
1656 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1658 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1659 var_to_reg_flt(s2, src, REG_FTMP2);
1660 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1662 x86_64_addss_reg_reg(cd, s2, d);
1663 } else if (s2 == d) {
1664 x86_64_addss_reg_reg(cd, s1, d);
1667 x86_64_addss_reg_reg(cd, s2, d);
1669 store_reg_to_var_flt(iptr->dst, d);
1672 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1674 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1675 var_to_reg_flt(s2, src, REG_FTMP2);
1676 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1678 x86_64_addsd_reg_reg(cd, s2, d);
1679 } else if (s2 == d) {
1680 x86_64_addsd_reg_reg(cd, s1, d);
1683 x86_64_addsd_reg_reg(cd, s2, d);
1685 store_reg_to_var_flt(iptr->dst, d);
1688 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1690 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1691 var_to_reg_flt(s2, src, REG_FTMP2);
1692 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1694 M_FLTMOVE(s2, REG_FTMP2);
1698 x86_64_subss_reg_reg(cd, s2, d);
1699 store_reg_to_var_flt(iptr->dst, d);
1702 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1704 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1705 var_to_reg_flt(s2, src, REG_FTMP2);
1706 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1708 M_FLTMOVE(s2, REG_FTMP2);
1712 x86_64_subsd_reg_reg(cd, s2, d);
1713 store_reg_to_var_flt(iptr->dst, d);
1716 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1718 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1719 var_to_reg_flt(s2, src, REG_FTMP2);
1720 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1722 x86_64_mulss_reg_reg(cd, s2, d);
1723 } else if (s2 == d) {
1724 x86_64_mulss_reg_reg(cd, s1, d);
1727 x86_64_mulss_reg_reg(cd, s2, d);
1729 store_reg_to_var_flt(iptr->dst, d);
1732 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1734 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1735 var_to_reg_flt(s2, src, REG_FTMP2);
1736 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1738 x86_64_mulsd_reg_reg(cd, s2, d);
1739 } else if (s2 == d) {
1740 x86_64_mulsd_reg_reg(cd, s1, d);
1743 x86_64_mulsd_reg_reg(cd, s2, d);
1745 store_reg_to_var_flt(iptr->dst, d);
1748 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1750 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1751 var_to_reg_flt(s2, src, REG_FTMP2);
1752 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1754 M_FLTMOVE(s2, REG_FTMP2);
1758 x86_64_divss_reg_reg(cd, s2, d);
1759 store_reg_to_var_flt(iptr->dst, d);
1762 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1764 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1765 var_to_reg_flt(s2, src, REG_FTMP2);
1766 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1768 M_FLTMOVE(s2, REG_FTMP2);
1772 x86_64_divsd_reg_reg(cd, s2, d);
1773 store_reg_to_var_flt(iptr->dst, d);
1776 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1778 var_to_reg_int(s1, src, REG_ITMP1);
1779 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1780 x86_64_cvtsi2ss_reg_reg(cd, s1, d);
1781 store_reg_to_var_flt(iptr->dst, d);
1784 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1786 var_to_reg_int(s1, src, REG_ITMP1);
1787 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1788 x86_64_cvtsi2sd_reg_reg(cd, s1, d);
1789 store_reg_to_var_flt(iptr->dst, d);
1792 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1794 var_to_reg_int(s1, src, REG_ITMP1);
1795 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1796 x86_64_cvtsi2ssq_reg_reg(cd, s1, d);
1797 store_reg_to_var_flt(iptr->dst, d);
1800 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1802 var_to_reg_int(s1, src, REG_ITMP1);
1803 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1804 x86_64_cvtsi2sdq_reg_reg(cd, s1, d);
1805 store_reg_to_var_flt(iptr->dst, d);
1808 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1810 var_to_reg_flt(s1, src, REG_FTMP1);
1811 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1812 x86_64_cvttss2si_reg_reg(cd, s1, d);
1813 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1814 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1815 x86_64_jcc(cd, X86_64_CC_NE, a);
1816 M_FLTMOVE(s1, REG_FTMP1);
1817 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2i, REG_ITMP2);
1818 x86_64_call_reg(cd, REG_ITMP2);
1819 M_INTMOVE(REG_RESULT, d);
1820 store_reg_to_var_int(iptr->dst, d);
1823 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1825 var_to_reg_flt(s1, src, REG_FTMP1);
1826 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1827 x86_64_cvttsd2si_reg_reg(cd, s1, d);
1828 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1829 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1830 x86_64_jcc(cd, X86_64_CC_NE, a);
1831 M_FLTMOVE(s1, REG_FTMP1);
1832 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2i, REG_ITMP2);
1833 x86_64_call_reg(cd, REG_ITMP2);
1834 M_INTMOVE(REG_RESULT, d);
1835 store_reg_to_var_int(iptr->dst, d);
1838 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1840 var_to_reg_flt(s1, src, REG_FTMP1);
1841 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1842 x86_64_cvttss2siq_reg_reg(cd, s1, d);
1843 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1844 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1845 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1846 x86_64_jcc(cd, X86_64_CC_NE, a);
1847 M_FLTMOVE(s1, REG_FTMP1);
1848 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2l, REG_ITMP2);
1849 x86_64_call_reg(cd, REG_ITMP2);
1850 M_INTMOVE(REG_RESULT, d);
1851 store_reg_to_var_int(iptr->dst, d);
1854 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1856 var_to_reg_flt(s1, src, REG_FTMP1);
1857 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1858 x86_64_cvttsd2siq_reg_reg(cd, s1, d);
1859 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1860 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1861 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1862 x86_64_jcc(cd, X86_64_CC_NE, a);
1863 M_FLTMOVE(s1, REG_FTMP1);
1864 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2l, REG_ITMP2);
1865 x86_64_call_reg(cd, REG_ITMP2);
1866 M_INTMOVE(REG_RESULT, d);
1867 store_reg_to_var_int(iptr->dst, d);
1870 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1872 var_to_reg_flt(s1, src, REG_FTMP1);
1873 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1874 x86_64_cvtss2sd_reg_reg(cd, s1, d);
1875 store_reg_to_var_flt(iptr->dst, d);
1878 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1880 var_to_reg_flt(s1, src, REG_FTMP1);
1881 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1882 x86_64_cvtsd2ss_reg_reg(cd, s1, d);
1883 store_reg_to_var_flt(iptr->dst, d);
1886 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1887 /* == => 0, < => 1, > => -1 */
1889 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1890 var_to_reg_flt(s2, src, REG_FTMP2);
1891 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1892 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1893 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1894 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1895 x86_64_ucomiss_reg_reg(cd, s1, s2);
1896 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1897 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1898 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1899 store_reg_to_var_int(iptr->dst, d);
1902 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1903 /* == => 0, < => 1, > => -1 */
1905 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1906 var_to_reg_flt(s2, src, REG_FTMP2);
1907 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1908 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1909 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1910 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1911 x86_64_ucomiss_reg_reg(cd, s1, s2);
1912 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1913 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1914 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1915 store_reg_to_var_int(iptr->dst, d);
1918 case ICMD_DCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1919 /* == => 0, < => 1, > => -1 */
1921 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1922 var_to_reg_flt(s2, src, REG_FTMP2);
1923 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1924 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1925 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1926 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1927 x86_64_ucomisd_reg_reg(cd, s1, s2);
1928 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1929 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1930 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1931 store_reg_to_var_int(iptr->dst, d);
1934 case ICMD_DCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1935 /* == => 0, < => 1, > => -1 */
1937 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1938 var_to_reg_flt(s2, src, REG_FTMP2);
1939 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1940 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1941 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1942 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1943 x86_64_ucomisd_reg_reg(cd, s1, s2);
1944 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1945 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1946 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1947 store_reg_to_var_int(iptr->dst, d);
1951 /* memory operations **************************************************/
1953 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., (int) length */
1955 var_to_reg_int(s1, src, REG_ITMP1);
1956 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1957 gen_nullptr_check(s1);
1958 x86_64_movl_membase_reg(cd, s1, OFFSET(java_arrayheader, size), d);
1959 store_reg_to_var_int(iptr->dst, d);
1962 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
1964 var_to_reg_int(s1, src->prev, REG_ITMP1);
1965 var_to_reg_int(s2, src, REG_ITMP2);
1966 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1967 if (iptr->op1 == 0) {
1968 gen_nullptr_check(s1);
1971 x86_64_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
1972 store_reg_to_var_int(iptr->dst, d);
1975 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
1977 var_to_reg_int(s1, src->prev, REG_ITMP1);
1978 var_to_reg_int(s2, src, REG_ITMP2);
1979 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1980 if (iptr->op1 == 0) {
1981 gen_nullptr_check(s1);
1984 x86_64_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]), s1, s2, 3, d);
1985 store_reg_to_var_int(iptr->dst, d);
1988 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
1990 var_to_reg_int(s1, src->prev, REG_ITMP1);
1991 var_to_reg_int(s2, src, REG_ITMP2);
1992 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1993 if (iptr->op1 == 0) {
1994 gen_nullptr_check(s1);
1997 x86_64_movl_memindex_reg(cd, OFFSET(java_intarray, data[0]), s1, s2, 2, d);
1998 store_reg_to_var_int(iptr->dst, d);
2001 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2003 var_to_reg_int(s1, src->prev, REG_ITMP1);
2004 var_to_reg_int(s2, src, REG_ITMP2);
2005 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2006 if (iptr->op1 == 0) {
2007 gen_nullptr_check(s1);
2010 x86_64_movss_memindex_reg(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2011 store_reg_to_var_flt(iptr->dst, d);
2014 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2016 var_to_reg_int(s1, src->prev, REG_ITMP1);
2017 var_to_reg_int(s2, src, REG_ITMP2);
2018 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2019 if (iptr->op1 == 0) {
2020 gen_nullptr_check(s1);
2023 x86_64_movsd_memindex_reg(cd, OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2024 store_reg_to_var_flt(iptr->dst, d);
2027 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
2029 var_to_reg_int(s1, src->prev, REG_ITMP1);
2030 var_to_reg_int(s2, src, REG_ITMP2);
2031 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2032 if (iptr->op1 == 0) {
2033 gen_nullptr_check(s1);
2036 x86_64_movzwq_memindex_reg(cd, OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2037 store_reg_to_var_int(iptr->dst, d);
2040 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
2042 var_to_reg_int(s1, src->prev, REG_ITMP1);
2043 var_to_reg_int(s2, src, REG_ITMP2);
2044 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2045 if (iptr->op1 == 0) {
2046 gen_nullptr_check(s1);
2049 x86_64_movswq_memindex_reg(cd, OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2050 store_reg_to_var_int(iptr->dst, d);
2053 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
2055 var_to_reg_int(s1, src->prev, REG_ITMP1);
2056 var_to_reg_int(s2, src, REG_ITMP2);
2057 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2058 if (iptr->op1 == 0) {
2059 gen_nullptr_check(s1);
2062 x86_64_movsbq_memindex_reg(cd, OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2063 store_reg_to_var_int(iptr->dst, d);
2067 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2069 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2070 var_to_reg_int(s2, src->prev, REG_ITMP2);
2071 if (iptr->op1 == 0) {
2072 gen_nullptr_check(s1);
2075 var_to_reg_int(s3, src, REG_ITMP3);
2076 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2079 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2081 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2082 var_to_reg_int(s2, src->prev, REG_ITMP2);
2083 if (iptr->op1 == 0) {
2084 gen_nullptr_check(s1);
2087 var_to_reg_int(s3, src, REG_ITMP3);
2088 x86_64_movl_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2091 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2093 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2094 var_to_reg_int(s2, src->prev, REG_ITMP2);
2095 if (iptr->op1 == 0) {
2096 gen_nullptr_check(s1);
2099 var_to_reg_flt(s3, src, REG_FTMP3);
2100 x86_64_movss_reg_memindex(cd, s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2103 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2105 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2106 var_to_reg_int(s2, src->prev, REG_ITMP2);
2107 if (iptr->op1 == 0) {
2108 gen_nullptr_check(s1);
2111 var_to_reg_flt(s3, src, REG_FTMP3);
2112 x86_64_movsd_reg_memindex(cd, s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2115 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2117 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2118 var_to_reg_int(s2, src->prev, REG_ITMP2);
2119 if (iptr->op1 == 0) {
2120 gen_nullptr_check(s1);
2123 var_to_reg_int(s3, src, REG_ITMP3);
2124 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2127 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2129 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2130 var_to_reg_int(s2, src->prev, REG_ITMP2);
2131 if (iptr->op1 == 0) {
2132 gen_nullptr_check(s1);
2135 var_to_reg_int(s3, src, REG_ITMP3);
2136 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2139 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2141 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2142 var_to_reg_int(s2, src->prev, REG_ITMP2);
2143 if (iptr->op1 == 0) {
2144 gen_nullptr_check(s1);
2147 var_to_reg_int(s3, src, REG_ITMP3);
2148 x86_64_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2151 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2153 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2154 var_to_reg_int(s2, src->prev, REG_ITMP2);
2155 /* if (iptr->op1 == 0) { */
2156 gen_nullptr_check(s1);
2159 var_to_reg_int(s3, src, REG_ITMP3);
2161 M_MOV(s1, rd->argintregs[0]);
2162 M_MOV(s3, rd->argintregs[1]);
2164 x86_64_mov_imm_reg(cd, (ptrint) bte->fp, REG_ITMP1);
2165 x86_64_call_reg(cd, REG_ITMP1);
2168 codegen_addxstorerefs(cd, cd->mcodeptr);
2170 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2171 var_to_reg_int(s2, src->prev, REG_ITMP2);
2172 var_to_reg_int(s3, src, REG_ITMP3);
2173 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2177 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2179 var_to_reg_int(s1, src->prev, REG_ITMP1);
2180 var_to_reg_int(s2, src, REG_ITMP2);
2181 if (iptr->op1 == 0) {
2182 gen_nullptr_check(s1);
2185 x86_64_movl_imm_memindex(cd, iptr->val.i, OFFSET(java_intarray, data[0]), s1, s2, 2);
2188 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2190 var_to_reg_int(s1, src->prev, REG_ITMP1);
2191 var_to_reg_int(s2, src, REG_ITMP2);
2192 if (iptr->op1 == 0) {
2193 gen_nullptr_check(s1);
2197 if (IS_IMM32(iptr->val.l)) {
2198 x86_64_mov_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2201 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2202 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l >> 32), OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2206 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2208 var_to_reg_int(s1, src->prev, REG_ITMP1);
2209 var_to_reg_int(s2, src, REG_ITMP2);
2210 if (iptr->op1 == 0) {
2211 gen_nullptr_check(s1);
2214 x86_64_mov_imm_memindex(cd, 0, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2217 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2219 var_to_reg_int(s1, src->prev, REG_ITMP1);
2220 var_to_reg_int(s2, src, REG_ITMP2);
2221 if (iptr->op1 == 0) {
2222 gen_nullptr_check(s1);
2225 x86_64_movb_imm_memindex(cd, iptr->val.i, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2228 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2230 var_to_reg_int(s1, src->prev, REG_ITMP1);
2231 var_to_reg_int(s2, src, REG_ITMP2);
2232 if (iptr->op1 == 0) {
2233 gen_nullptr_check(s1);
2236 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_chararray, data[0]), s1, s2, 1);
2239 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2241 var_to_reg_int(s1, src->prev, REG_ITMP1);
2242 var_to_reg_int(s2, src, REG_ITMP2);
2243 if (iptr->op1 == 0) {
2244 gen_nullptr_check(s1);
2247 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2251 case ICMD_GETSTATIC: /* ... ==> ..., value */
2252 /* op1 = type, val.a = field address */
2255 disp = dseg_addaddress(cd, NULL);
2257 codegen_addpatchref(cd, cd->mcodeptr,
2258 PATCHER_get_putstatic,
2259 (unresolved_field *) iptr->target, disp);
2261 if (opt_showdisassemble) {
2262 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2266 fieldinfo *fi = iptr->val.a;
2268 disp = dseg_addaddress(cd, &(fi->value));
2270 if (!fi->class->initialized) {
2271 codegen_addpatchref(cd, cd->mcodeptr,
2272 PATCHER_clinit, fi->class, 0);
2274 if (opt_showdisassemble) {
2275 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2280 /* This approach is much faster than moving the field address */
2281 /* inline into a register. */
2282 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, REG_ITMP2);
2283 switch (iptr->op1) {
2285 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2286 x86_64_movl_membase_reg(cd, REG_ITMP2, 0, d);
2287 store_reg_to_var_int(iptr->dst, d);
2291 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2292 x86_64_mov_membase_reg(cd, REG_ITMP2, 0, d);
2293 store_reg_to_var_int(iptr->dst, d);
2296 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2297 x86_64_movss_membase_reg(cd, REG_ITMP2, 0, d);
2298 store_reg_to_var_flt(iptr->dst, d);
2301 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2302 x86_64_movsd_membase_reg(cd, REG_ITMP2, 0, d);
2303 store_reg_to_var_flt(iptr->dst, d);
2308 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2309 /* op1 = type, val.a = field address */
2312 disp = dseg_addaddress(cd, NULL);
2314 codegen_addpatchref(cd, cd->mcodeptr,
2315 PATCHER_get_putstatic,
2316 (unresolved_field *) iptr->target, disp);
2318 if (opt_showdisassemble) {
2319 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2323 fieldinfo *fi = iptr->val.a;
2325 disp = dseg_addaddress(cd, &(fi->value));
2327 if (!fi->class->initialized) {
2328 codegen_addpatchref(cd, cd->mcodeptr,
2329 PATCHER_clinit, fi->class, 0);
2331 if (opt_showdisassemble) {
2332 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2337 /* This approach is much faster than moving the field address */
2338 /* inline into a register. */
2339 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, REG_ITMP2);
2340 switch (iptr->op1) {
2342 var_to_reg_int(s2, src, REG_ITMP1);
2343 x86_64_movl_reg_membase(cd, s2, REG_ITMP2, 0);
2347 var_to_reg_int(s2, src, REG_ITMP1);
2348 x86_64_mov_reg_membase(cd, s2, REG_ITMP2, 0);
2351 var_to_reg_flt(s2, src, REG_FTMP1);
2352 x86_64_movss_reg_membase(cd, s2, REG_ITMP2, 0);
2355 var_to_reg_flt(s2, src, REG_FTMP1);
2356 x86_64_movsd_reg_membase(cd, s2, REG_ITMP2, 0);
2361 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2362 /* val = value (in current instruction) */
2363 /* op1 = type, val.a = field address (in */
2364 /* following NOP) */
2366 if (!iptr[1].val.a) {
2367 disp = dseg_addaddress(cd, NULL);
2369 codegen_addpatchref(cd, cd->mcodeptr,
2370 PATCHER_get_putstatic,
2371 (unresolved_field *) iptr[1].target, disp);
2373 if (opt_showdisassemble) {
2374 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2378 fieldinfo *fi = iptr[1].val.a;
2380 disp = dseg_addaddress(cd, &(fi->value));
2382 if (!fi->class->initialized) {
2383 codegen_addpatchref(cd, cd->mcodeptr,
2384 PATCHER_clinit, fi->class, 0);
2386 if (opt_showdisassemble) {
2387 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2392 /* This approach is much faster than moving the field address */
2393 /* inline into a register. */
2394 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, REG_ITMP1);
2395 switch (iptr->op1) {
2398 x86_64_movl_imm_membase(cd, iptr->val.i, REG_ITMP1, 0);
2403 if (IS_IMM32(iptr->val.l)) {
2404 x86_64_mov_imm_membase(cd, iptr->val.l, REG_ITMP1, 0);
2406 x86_64_movl_imm_membase(cd, iptr->val.l, REG_ITMP1, 0);
2407 x86_64_movl_imm_membase(cd, iptr->val.l >> 32, REG_ITMP1, 4);
2413 case ICMD_GETFIELD: /* ... ==> ..., value */
2414 /* op1 = type, val.i = field offset */
2416 var_to_reg_int(s1, src, REG_ITMP1);
2417 gen_nullptr_check(s1);
2420 codegen_addpatchref(cd, cd->mcodeptr,
2421 PATCHER_get_putfield,
2422 (unresolved_field *) iptr->target, 0);
2424 if (opt_showdisassemble) {
2425 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2431 a = ((fieldinfo *) (iptr->val.a))->offset;
2434 switch (iptr->op1) {
2436 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2437 x86_64_movl_membase32_reg(cd, s1, a, d);
2438 store_reg_to_var_int(iptr->dst, d);
2442 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2443 x86_64_mov_membase32_reg(cd, s1, a, d);
2444 store_reg_to_var_int(iptr->dst, d);
2447 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2448 x86_64_movss_membase32_reg(cd, s1, a, d);
2449 store_reg_to_var_flt(iptr->dst, d);
2452 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2453 x86_64_movsd_membase32_reg(cd, s1, a, d);
2454 store_reg_to_var_flt(iptr->dst, d);
2459 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2460 /* op1 = type, val.i = field offset */
2462 var_to_reg_int(s1, src->prev, REG_ITMP1);
2463 gen_nullptr_check(s1);
2464 if (IS_INT_LNG_TYPE(iptr->op1)) {
2465 var_to_reg_int(s2, src, REG_ITMP2);
2467 var_to_reg_flt(s2, src, REG_FTMP2);
2471 codegen_addpatchref(cd, cd->mcodeptr,
2472 PATCHER_get_putfield,
2473 (unresolved_field *) iptr->target, 0);
2475 if (opt_showdisassemble) {
2476 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2482 a = ((fieldinfo *) (iptr->val.a))->offset;
2485 switch (iptr->op1) {
2487 x86_64_movl_reg_membase32(cd, s2, s1, a);
2491 x86_64_mov_reg_membase32(cd, s2, s1, a);
2494 x86_64_movss_reg_membase32(cd, s2, s1, a);
2497 x86_64_movsd_reg_membase32(cd, s2, s1, a);
2502 case ICMD_PUTFIELDCONST: /* ..., objectref, value ==> ... */
2503 /* val = value (in current instruction) */
2504 /* op1 = type, val.a = field address (in */
2505 /* following NOP) */
2507 var_to_reg_int(s1, src, REG_ITMP1);
2508 gen_nullptr_check(s1);
2510 if (!iptr[1].val.a) {
2511 codegen_addpatchref(cd, cd->mcodeptr,
2512 PATCHER_putfieldconst,
2513 (unresolved_field *) iptr[1].target, 0);
2515 if (opt_showdisassemble) {
2516 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2522 a = ((fieldinfo *) (iptr[1].val.a))->offset;
2525 switch (iptr->op1) {
2528 x86_64_movl_imm_membase32(cd, iptr->val.i, s1, a);
2533 /* We can only optimize the move, if the class is resolved. */
2534 /* Otherwise we don't know what to patch. */
2535 if (iptr[1].val.a && IS_IMM32(iptr->val.l)) {
2536 x86_64_mov_imm_membase32(cd, iptr->val.l, s1, a);
2538 x86_64_movl_imm_membase32(cd, iptr->val.l, s1, a);
2539 x86_64_movl_imm_membase32(cd, iptr->val.l >> 32, s1, a + 4);
2546 /* branch operations **************************************************/
2548 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2550 var_to_reg_int(s1, src, REG_ITMP1);
2551 M_INTMOVE(s1, REG_ITMP1_XPTR);
2554 codegen_addpatchref(cd, cd->mcodeptr,
2555 PATCHER_athrow_areturn,
2556 (unresolved_class *) iptr->val.a, 0);
2558 if (opt_showdisassemble) {
2559 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2563 M_CALL_IMM(0); /* passing exception pc */
2564 M_POP(REG_ITMP2_XPC);
2566 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
2570 case ICMD_GOTO: /* ... ==> ... */
2571 /* op1 = target JavaVM pc */
2574 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2577 case ICMD_JSR: /* ... ==> ... */
2578 /* op1 = target JavaVM pc */
2581 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2584 case ICMD_RET: /* ... ==> ... */
2585 /* op1 = local variable */
2587 var = &(rd->locals[iptr->op1][TYPE_ADR]);
2588 var_to_reg_int(s1, var, REG_ITMP1);
2592 case ICMD_IFNULL: /* ..., value ==> ... */
2593 /* op1 = target JavaVM pc */
2595 if (src->flags & INMEMORY) {
2596 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2599 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2601 x86_64_jcc(cd, X86_64_CC_E, 0);
2602 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2605 case ICMD_IFNONNULL: /* ..., value ==> ... */
2606 /* op1 = target JavaVM pc */
2608 if (src->flags & INMEMORY) {
2609 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2612 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2614 x86_64_jcc(cd, X86_64_CC_NE, 0);
2615 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2618 case ICMD_IFEQ: /* ..., value ==> ... */
2619 /* op1 = target JavaVM pc, val.i = constant */
2621 x86_64_emit_ifcc(cd, X86_64_CC_E, src, iptr);
2624 case ICMD_IFLT: /* ..., value ==> ... */
2625 /* op1 = target JavaVM pc, val.i = constant */
2627 x86_64_emit_ifcc(cd, X86_64_CC_L, src, iptr);
2630 case ICMD_IFLE: /* ..., value ==> ... */
2631 /* op1 = target JavaVM pc, val.i = constant */
2633 x86_64_emit_ifcc(cd, X86_64_CC_LE, src, iptr);
2636 case ICMD_IFNE: /* ..., value ==> ... */
2637 /* op1 = target JavaVM pc, val.i = constant */
2639 x86_64_emit_ifcc(cd, X86_64_CC_NE, src, iptr);
2642 case ICMD_IFGT: /* ..., value ==> ... */
2643 /* op1 = target JavaVM pc, val.i = constant */
2645 x86_64_emit_ifcc(cd, X86_64_CC_G, src, iptr);
2648 case ICMD_IFGE: /* ..., value ==> ... */
2649 /* op1 = target JavaVM pc, val.i = constant */
2651 x86_64_emit_ifcc(cd, X86_64_CC_GE, src, iptr);
2654 case ICMD_IF_LEQ: /* ..., value ==> ... */
2655 /* op1 = target JavaVM pc, val.l = constant */
2657 x86_64_emit_if_lcc(cd, X86_64_CC_E, src, iptr);
2660 case ICMD_IF_LLT: /* ..., value ==> ... */
2661 /* op1 = target JavaVM pc, val.l = constant */
2663 x86_64_emit_if_lcc(cd, X86_64_CC_L, src, iptr);
2666 case ICMD_IF_LLE: /* ..., value ==> ... */
2667 /* op1 = target JavaVM pc, val.l = constant */
2669 x86_64_emit_if_lcc(cd, X86_64_CC_LE, src, iptr);
2672 case ICMD_IF_LNE: /* ..., value ==> ... */
2673 /* op1 = target JavaVM pc, val.l = constant */
2675 x86_64_emit_if_lcc(cd, X86_64_CC_NE, src, iptr);
2678 case ICMD_IF_LGT: /* ..., value ==> ... */
2679 /* op1 = target JavaVM pc, val.l = constant */
2681 x86_64_emit_if_lcc(cd, X86_64_CC_G, src, iptr);
2684 case ICMD_IF_LGE: /* ..., value ==> ... */
2685 /* op1 = target JavaVM pc, val.l = constant */
2687 x86_64_emit_if_lcc(cd, X86_64_CC_GE, src, iptr);
2690 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2691 /* op1 = target JavaVM pc */
2693 x86_64_emit_if_icmpcc(cd, X86_64_CC_E, src, iptr);
2696 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2697 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2699 x86_64_emit_if_lcmpcc(cd, X86_64_CC_E, src, iptr);
2702 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2703 /* op1 = target JavaVM pc */
2705 x86_64_emit_if_icmpcc(cd, X86_64_CC_NE, src, iptr);
2708 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2709 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2711 x86_64_emit_if_lcmpcc(cd, X86_64_CC_NE, src, iptr);
2714 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2715 /* op1 = target JavaVM pc */
2717 x86_64_emit_if_icmpcc(cd, X86_64_CC_L, src, iptr);
2720 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2721 /* op1 = target JavaVM pc */
2723 x86_64_emit_if_lcmpcc(cd, X86_64_CC_L, src, iptr);
2726 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2727 /* op1 = target JavaVM pc */
2729 x86_64_emit_if_icmpcc(cd, X86_64_CC_G, src, iptr);
2732 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2733 /* op1 = target JavaVM pc */
2735 x86_64_emit_if_lcmpcc(cd, X86_64_CC_G, src, iptr);
2738 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2739 /* op1 = target JavaVM pc */
2741 x86_64_emit_if_icmpcc(cd, X86_64_CC_LE, src, iptr);
2744 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2745 /* op1 = target JavaVM pc */
2747 x86_64_emit_if_lcmpcc(cd, X86_64_CC_LE, src, iptr);
2750 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2751 /* op1 = target JavaVM pc */
2753 x86_64_emit_if_icmpcc(cd, X86_64_CC_GE, src, iptr);
2756 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2757 /* op1 = target JavaVM pc */
2759 x86_64_emit_if_lcmpcc(cd, X86_64_CC_GE, src, iptr);
2762 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST */
2764 case ICMD_ELSE_ICONST: /* handled by IFxx_ICONST */
2767 case ICMD_IFEQ_ICONST: /* ..., value ==> ..., constant */
2768 /* val.i = constant */
2770 var_to_reg_int(s1, src, REG_ITMP1);
2771 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2772 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2774 M_INTMOVE(s1, REG_ITMP1);
2777 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2779 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2780 x86_64_testl_reg_reg(cd, s1, s1);
2781 x86_64_cmovccl_reg_reg(cd, X86_64_CC_E, REG_ITMP2, d);
2782 store_reg_to_var_int(iptr->dst, d);
2785 case ICMD_IFNE_ICONST: /* ..., value ==> ..., constant */
2786 /* val.i = constant */
2788 var_to_reg_int(s1, src, REG_ITMP1);
2789 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2790 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2792 M_INTMOVE(s1, REG_ITMP1);
2795 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2797 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2798 x86_64_testl_reg_reg(cd, s1, s1);
2799 x86_64_cmovccl_reg_reg(cd, X86_64_CC_NE, REG_ITMP2, d);
2800 store_reg_to_var_int(iptr->dst, d);
2803 case ICMD_IFLT_ICONST: /* ..., value ==> ..., constant */
2804 /* val.i = constant */
2806 var_to_reg_int(s1, src, REG_ITMP1);
2807 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2808 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2810 M_INTMOVE(s1, REG_ITMP1);
2813 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2815 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2816 x86_64_testl_reg_reg(cd, s1, s1);
2817 x86_64_cmovccl_reg_reg(cd, X86_64_CC_L, REG_ITMP2, d);
2818 store_reg_to_var_int(iptr->dst, d);
2821 case ICMD_IFGE_ICONST: /* ..., value ==> ..., constant */
2822 /* val.i = constant */
2824 var_to_reg_int(s1, src, REG_ITMP1);
2825 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2826 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2828 M_INTMOVE(s1, REG_ITMP1);
2831 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2833 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2834 x86_64_testl_reg_reg(cd, s1, s1);
2835 x86_64_cmovccl_reg_reg(cd, X86_64_CC_GE, REG_ITMP2, d);
2836 store_reg_to_var_int(iptr->dst, d);
2839 case ICMD_IFGT_ICONST: /* ..., value ==> ..., constant */
2840 /* val.i = constant */
2842 var_to_reg_int(s1, src, REG_ITMP1);
2843 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2844 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2846 M_INTMOVE(s1, REG_ITMP1);
2849 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2851 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2852 x86_64_testl_reg_reg(cd, s1, s1);
2853 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP2, d);
2854 store_reg_to_var_int(iptr->dst, d);
2857 case ICMD_IFLE_ICONST: /* ..., value ==> ..., constant */
2858 /* val.i = constant */
2860 var_to_reg_int(s1, src, REG_ITMP1);
2861 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2862 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2864 M_INTMOVE(s1, REG_ITMP1);
2867 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2869 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2870 x86_64_testl_reg_reg(cd, s1, s1);
2871 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, d);
2872 store_reg_to_var_int(iptr->dst, d);
2876 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2879 var_to_reg_int(s1, src, REG_RESULT);
2880 M_INTMOVE(s1, REG_RESULT);
2881 goto nowperformreturn;
2883 case ICMD_ARETURN: /* ..., retvalue ==> ... */
2885 var_to_reg_int(s1, src, REG_RESULT);
2886 M_INTMOVE(s1, REG_RESULT);
2889 codegen_addpatchref(cd, cd->mcodeptr,
2890 PATCHER_athrow_areturn,
2891 (unresolved_class *) iptr->val.a, 0);
2893 if (opt_showdisassemble) {
2894 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2897 goto nowperformreturn;
2899 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2902 var_to_reg_flt(s1, src, REG_FRESULT);
2903 M_FLTMOVE(s1, REG_FRESULT);
2904 goto nowperformreturn;
2906 case ICMD_RETURN: /* ... ==> ... */
2912 p = parentargs_base;
2914 /* call trace function */
2916 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
2918 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
2919 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
2921 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
2922 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
2923 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
2924 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
2926 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
2927 x86_64_call_reg(cd, REG_ITMP1);
2929 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
2930 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
2932 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
2935 #if defined(USE_THREADS)
2936 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2937 M_ALD(rd->argintregs[0], REG_SP, rd->memuse * 8);
2939 /* we need to save the proper return value */
2940 switch (iptr->opc) {
2944 M_LST(REG_RESULT, REG_SP, rd->memuse * 8);
2948 M_DST(REG_FRESULT, REG_SP, rd->memuse * 8);
2952 M_MOV_IMM((ptrint) builtin_monitorexit, REG_ITMP1);
2955 /* and now restore the proper return value */
2956 switch (iptr->opc) {
2960 M_LLD(REG_RESULT, REG_SP, rd->memuse * 8);
2964 M_DLD(REG_FRESULT, REG_SP, rd->memuse * 8);
2970 /* restore saved registers */
2972 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
2973 p--; M_LLD(rd->savintregs[i], REG_SP, p * 8);
2975 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
2976 p--; M_DLD(rd->savfltregs[i], REG_SP, p * 8);
2979 /* deallocate stack */
2981 if (parentargs_base)
2982 M_AADD_IMM(parentargs_base * 8, REG_SP);
2989 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2994 tptr = (void **) iptr->target;
2996 s4ptr = iptr->val.a;
2997 l = s4ptr[1]; /* low */
2998 i = s4ptr[2]; /* high */
3000 var_to_reg_int(s1, src, REG_ITMP1);
3001 M_INTMOVE(s1, REG_ITMP1);
3003 x86_64_alul_imm_reg(cd, X86_64_SUB, l, REG_ITMP1);
3008 x86_64_alul_imm_reg(cd, X86_64_CMP, i - 1, REG_ITMP1);
3009 x86_64_jcc(cd, X86_64_CC_A, 0);
3011 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[0]), cd->mcodeptr); */
3012 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3014 /* build jump table top down and use address of lowest entry */
3016 /* s4ptr += 3 + i; */
3020 dseg_addtarget(cd, (basicblock *) tptr[0]);
3024 /* length of dataseg after last dseg_addtarget is used by load */
3026 x86_64_mov_imm_reg(cd, 0, REG_ITMP2);
3027 dseg_adddata(cd, cd->mcodeptr);
3028 x86_64_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
3029 x86_64_jmp_reg(cd, REG_ITMP1);
3034 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
3036 s4 i, l, val, *s4ptr;
3039 tptr = (void **) iptr->target;
3041 s4ptr = iptr->val.a;
3042 l = s4ptr[0]; /* default */
3043 i = s4ptr[1]; /* count */
3045 MCODECHECK(8 + ((7 + 6) * i) + 5);
3046 var_to_reg_int(s1, src, REG_ITMP1); /* reg compare should always be faster */
3052 x86_64_alul_imm_reg(cd, X86_64_CMP, val, s1);
3053 x86_64_jcc(cd, X86_64_CC_E, 0);
3054 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3057 x86_64_jmp_imm(cd, 0);
3059 tptr = (void **) iptr->target;
3060 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3065 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
3066 /* op1 = arg count val.a = builtintable entry */
3072 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
3073 /* op1 = arg count, val.a = method pointer */
3075 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3076 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
3077 case ICMD_INVOKEINTERFACE:
3082 md = lm->parseddesc;
3084 unresolved_method *um = iptr->target;
3085 md = um->methodref->parseddesc.md;
3089 s3 = md->paramcount;
3091 MCODECHECK((20 * s3) + 128);
3093 /* copy arguments to registers or stack location */
3095 for (s3 = s3 - 1; s3 >= 0; s3--, src = src->prev) {
3096 if (src->varkind == ARGVAR)
3098 if (IS_INT_LNG_TYPE(src->type)) {
3099 if (!md->params[s3].inmemory) {
3100 s1 = rd->argintregs[md->params[s3].regoff];
3101 var_to_reg_int(d, src, s1);
3104 var_to_reg_int(d, src, REG_ITMP1);
3105 M_LST(d, REG_SP, md->params[s3].regoff * 8);
3109 if (!md->params[s3].inmemory) {
3110 s1 = rd->argfltregs[md->params[s3].regoff];
3111 var_to_reg_flt(d, src, s1);
3114 var_to_reg_flt(d, src, REG_FTMP1);
3115 M_DST(d, REG_SP, md->params[s3].regoff * 8);
3120 switch (iptr->opc) {
3123 codegen_addpatchref(cd, cd->mcodeptr,
3124 bte->fp, iptr->target, 0);
3126 if (opt_showdisassemble) {
3127 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3133 a = (ptrint) bte->fp;
3136 d = md->returntype.type;
3138 M_MOV_IMM(a, REG_ITMP1);
3141 /* if op1 == true, we need to check for an exception */
3143 if (iptr->op1 == true) {
3146 codegen_addxexceptionrefs(cd, cd->mcodeptr);
3150 case ICMD_INVOKESPECIAL:
3151 M_TEST(rd->argintregs[0]);
3153 codegen_addxnullrefs(cd, cd->mcodeptr);
3155 /* first argument contains pointer */
3156 /* gen_nullptr_check(rd->argintregs[0]); */
3158 /* access memory for hardware nullptr */
3159 /* x86_64_mov_membase_reg(cd, rd->argintregs[0], 0, REG_ITMP2); */
3163 case ICMD_INVOKESTATIC:
3165 unresolved_method *um = iptr->target;
3167 codegen_addpatchref(cd, cd->mcodeptr,
3168 PATCHER_invokestatic_special, um, 0);
3170 if (opt_showdisassemble) {
3171 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3175 d = um->methodref->parseddesc.md->returntype.type;
3178 a = (ptrint) lm->stubroutine;
3179 d = lm->parseddesc->returntype.type;
3182 M_MOV_IMM(a, REG_ITMP2);
3186 case ICMD_INVOKEVIRTUAL:
3187 gen_nullptr_check(rd->argintregs[0]);
3190 unresolved_method *um = iptr->target;
3192 codegen_addpatchref(cd, cd->mcodeptr,
3193 PATCHER_invokevirtual, um, 0);
3195 if (opt_showdisassemble) {
3196 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3200 d = um->methodref->parseddesc.md->returntype.type;
3203 s1 = OFFSET(vftbl_t, table[0]) +
3204 sizeof(methodptr) * lm->vftblindex;
3205 d = lm->parseddesc->returntype.type;
3208 x86_64_mov_membase_reg(cd, rd->argintregs[0],
3209 OFFSET(java_objectheader, vftbl),
3211 x86_64_mov_membase32_reg(cd, REG_ITMP2, s1, REG_ITMP1);
3215 case ICMD_INVOKEINTERFACE:
3216 gen_nullptr_check(rd->argintregs[0]);
3219 unresolved_method *um = iptr->target;
3221 codegen_addpatchref(cd, cd->mcodeptr,
3222 PATCHER_invokeinterface, um, 0);
3224 if (opt_showdisassemble) {
3225 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3230 d = um->methodref->parseddesc.md->returntype.type;
3233 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3234 sizeof(methodptr) * lm->class->index;
3236 s2 = sizeof(methodptr) * (lm - lm->class->methods);
3238 d = lm->parseddesc->returntype.type;
3241 M_ALD(REG_ITMP2, rd->argintregs[0],
3242 OFFSET(java_objectheader, vftbl));
3243 x86_64_mov_membase32_reg(cd, REG_ITMP2, s1, REG_ITMP2);
3244 x86_64_mov_membase32_reg(cd, REG_ITMP2, s2, REG_ITMP1);
3249 /* d contains return type */
3251 if (d != TYPE_VOID) {
3252 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3253 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3254 M_INTMOVE(REG_RESULT, s1);
3255 store_reg_to_var_int(iptr->dst, s1);
3257 s1 = reg_of_var(rd, iptr->dst, REG_FRESULT);
3258 M_FLTMOVE(REG_FRESULT, s1);
3259 store_reg_to_var_flt(iptr->dst, s1);
3265 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3267 /* op1: 0 == array, 1 == class */
3268 /* val.a: (classinfo *) superclass */
3270 /* superclass is an interface:
3272 * OK if ((sub == NULL) ||
3273 * (sub->vftbl->interfacetablelength > super->index) &&
3274 * (sub->vftbl->interfacetable[-super->index] != NULL));
3276 * superclass is a class:
3278 * OK if ((sub == NULL) || (0
3279 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3280 * super->vftbl->diffval));
3285 vftbl_t *supervftbl;
3288 super = (classinfo *) iptr->val.a;
3295 superindex = super->index;
3296 supervftbl = super->vftbl;
3299 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3300 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3302 var_to_reg_int(s1, src, REG_ITMP1);
3304 /* calculate interface checkcast code size */
3306 s2 = 3; /* mov_membase_reg */
3307 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3309 s2 += 3 + 4 /* movl_membase32_reg */ + 3 + 4 /* sub imm32 */ +
3310 3 /* test */ + 6 /* jcc */ + 3 + 4 /* mov_membase32_reg */ +
3311 3 /* test */ + 6 /* jcc */;
3314 s2 += (opt_showdisassemble ? 5 : 0);
3316 /* calculate class checkcast code size */
3318 s3 = 3; /* mov_membase_reg */
3319 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3320 s3 += 10 /* mov_imm_reg */ + 3 + 4 /* movl_membase32_reg */;
3323 if (s1 != REG_ITMP1) {
3324 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3325 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, baseval));
3326 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3327 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, diffval));
3333 s3 += 3 + 4 /* movl_membase32_reg */ + 3 /* sub */ +
3334 10 /* mov_imm_reg */ + 3 /* movl_membase_reg */;
3335 CALCOFFSETBYTES(s3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3338 s3 += 3 /* cmp */ + 6 /* jcc */;
3341 s3 += (opt_showdisassemble ? 5 : 0);
3343 /* if class is not resolved, check which code to call */
3346 x86_64_test_reg_reg(cd, s1, s1);
3347 x86_64_jcc(cd, X86_64_CC_Z, 6 + (opt_showdisassemble ? 5 : 0) + 7 + 6 + s2 + 5 + s3);
3349 codegen_addpatchref(cd, cd->mcodeptr,
3350 PATCHER_checkcast_instanceof_flags,
3351 (constant_classref *) iptr->target, 0);
3353 if (opt_showdisassemble) {
3354 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3357 x86_64_movl_imm_reg(cd, 0, REG_ITMP2); /* super->flags */
3358 x86_64_alul_imm_reg(cd, X86_64_AND, ACC_INTERFACE, REG_ITMP2);
3359 x86_64_jcc(cd, X86_64_CC_Z, s2 + 5);
3362 /* interface checkcast code */
3364 if (!super || (super->flags & ACC_INTERFACE)) {
3366 x86_64_test_reg_reg(cd, s1, s1);
3367 x86_64_jcc(cd, X86_64_CC_Z, s2);
3370 x86_64_mov_membase_reg(cd, s1,
3371 OFFSET(java_objectheader, vftbl),
3375 codegen_addpatchref(cd, cd->mcodeptr,
3376 PATCHER_checkcast_instanceof_interface,
3377 (constant_classref *) iptr->target, 0);
3379 if (opt_showdisassemble) {
3380 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3384 x86_64_movl_membase32_reg(cd, REG_ITMP2,
3385 OFFSET(vftbl_t, interfacetablelength),
3387 x86_64_alu_imm32_reg(cd, X86_64_SUB, superindex, REG_ITMP3);
3388 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3389 x86_64_jcc(cd, X86_64_CC_LE, 0);
3390 codegen_addxcastrefs(cd, cd->mcodeptr);
3391 x86_64_mov_membase32_reg(cd, REG_ITMP2,
3392 OFFSET(vftbl_t, interfacetable[0]) -
3393 superindex * sizeof(methodptr*),
3395 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3396 x86_64_jcc(cd, X86_64_CC_E, 0);
3397 codegen_addxcastrefs(cd, cd->mcodeptr);
3400 x86_64_jmp_imm(cd, s3);
3403 /* class checkcast code */
3405 if (!super || !(super->flags & ACC_INTERFACE)) {
3407 x86_64_test_reg_reg(cd, s1, s1);
3408 x86_64_jcc(cd, X86_64_CC_Z, s3);
3411 x86_64_mov_membase_reg(cd, s1,
3412 OFFSET(java_objectheader, vftbl),
3416 codegen_addpatchref(cd, cd->mcodeptr,
3417 PATCHER_checkcast_class,
3418 (constant_classref *) iptr->target, 0);
3420 if (opt_showdisassemble) {
3421 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3425 x86_64_mov_imm_reg(cd, (ptrint) supervftbl, REG_ITMP3);
3426 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3427 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3429 x86_64_movl_membase32_reg(cd, REG_ITMP2,
3430 OFFSET(vftbl_t, baseval),
3432 /* if (s1 != REG_ITMP1) { */
3433 /* x86_64_movl_membase_reg(cd, REG_ITMP3, */
3434 /* OFFSET(vftbl_t, baseval), */
3436 /* x86_64_movl_membase_reg(cd, REG_ITMP3, */
3437 /* OFFSET(vftbl_t, diffval), */
3439 /* #if defined(USE_THREADS) && defined(NATIVE_THREADS) */
3440 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3442 /* x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP1, REG_ITMP2); */
3445 x86_64_movl_membase32_reg(cd, REG_ITMP3,
3446 OFFSET(vftbl_t, baseval),
3448 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP2);
3449 x86_64_mov_imm_reg(cd, (ptrint) supervftbl, REG_ITMP3);
3450 x86_64_movl_membase_reg(cd, REG_ITMP3,
3451 OFFSET(vftbl_t, diffval),
3454 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3455 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3457 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP3, REG_ITMP2);
3458 x86_64_jcc(cd, X86_64_CC_A, 0); /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3459 codegen_addxcastrefs(cd, cd->mcodeptr);
3461 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3463 store_reg_to_var_int(iptr->dst, d);
3467 case ICMD_ARRAYCHECKCAST: /* ..., objectref ==> ..., objectref */
3468 /* op1: 1... resolved, 0... not resolved */
3470 var_to_reg_int(s1, src, REG_ITMP1);
3471 M_INTMOVE(s1, rd->argintregs[0]);
3476 codegen_addpatchref(cd, cd->mcodeptr, bte->fp, iptr->target, 0);
3478 if (opt_showdisassemble) {
3479 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3485 a = (ptrint) bte->fp;
3488 x86_64_mov_imm_reg(cd, (ptrint) iptr->target, rd->argintregs[1]);
3489 x86_64_mov_imm_reg(cd, (ptrint) a, REG_ITMP1);
3490 x86_64_call_reg(cd, REG_ITMP1);
3493 codegen_addxcastrefs(cd, cd->mcodeptr);
3495 var_to_reg_int(s1, src, REG_ITMP1);
3496 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
3498 store_reg_to_var_int(iptr->dst, d);
3501 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3503 /* op1: 0 == array, 1 == class */
3504 /* val.a: (classinfo *) superclass */
3506 /* superclass is an interface:
3508 * return (sub != NULL) &&
3509 * (sub->vftbl->interfacetablelength > super->index) &&
3510 * (sub->vftbl->interfacetable[-super->index] != NULL);
3512 * superclass is a class:
3514 * return ((sub != NULL) && (0
3515 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3516 * super->vftbl->diffvall));
3521 vftbl_t *supervftbl;
3524 super = (classinfo *) iptr->val.a;
3531 superindex = super->index;
3532 supervftbl = super->vftbl;
3535 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3536 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3539 var_to_reg_int(s1, src, REG_ITMP1);
3540 d = reg_of_var(rd, iptr->dst, REG_ITMP2);
3542 M_INTMOVE(s1, REG_ITMP1);
3546 /* calculate interface instanceof code size */
3548 s2 = 3; /* mov_membase_reg */
3549 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3550 s2 += 3 + 4 /* movl_membase32_reg */ + 3 + 4 /* sub_imm32 */ +
3551 3 /* test */ + 6 /* jcc */ + 3 + 4 /* mov_membase32_reg */ +
3552 3 /* test */ + 4 /* setcc */;
3555 s2 += (opt_showdisassemble ? 5 : 0);
3557 /* calculate class instanceof code size */
3559 s3 = 3; /* mov_membase_reg */
3560 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3561 s3 += 10; /* mov_imm_reg */
3562 s3 += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3563 CALCOFFSETBYTES(s3, REG_ITMP1, OFFSET(vftbl_t, baseval));
3564 s3 += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3565 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, baseval));
3566 s3 += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3567 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3568 s3 += 3 /* sub */ + 3 /* xor */ + 3 /* cmp */ + 4 /* setcc */;
3571 s3 += (opt_showdisassemble ? 5 : 0);
3573 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3575 /* if class is not resolved, check which code to call */
3578 x86_64_test_reg_reg(cd, s1, s1);
3579 x86_64_jcc(cd, X86_64_CC_Z, (6 + (opt_showdisassemble ? 5 : 0) +
3580 7 + 6 + s2 + 5 + s3));
3582 codegen_addpatchref(cd, cd->mcodeptr,
3583 PATCHER_checkcast_instanceof_flags,
3584 (constant_classref *) iptr->target, 0);
3586 if (opt_showdisassemble) {
3587 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3590 x86_64_movl_imm_reg(cd, 0, REG_ITMP3); /* super->flags */
3591 x86_64_alul_imm_reg(cd, X86_64_AND, ACC_INTERFACE, REG_ITMP3);
3592 x86_64_jcc(cd, X86_64_CC_Z, s2 + 5);
3595 /* interface instanceof code */
3597 if (!super || (super->flags & ACC_INTERFACE)) {
3599 x86_64_test_reg_reg(cd, s1, s1);
3600 x86_64_jcc(cd, X86_64_CC_Z, s2);
3603 x86_64_mov_membase_reg(cd, s1,
3604 OFFSET(java_objectheader, vftbl),
3607 codegen_addpatchref(cd, cd->mcodeptr,
3608 PATCHER_checkcast_instanceof_interface,
3609 (constant_classref *) iptr->target, 0);
3611 if (opt_showdisassemble) {
3612 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3616 x86_64_movl_membase32_reg(cd, REG_ITMP1,
3617 OFFSET(vftbl_t, interfacetablelength),
3619 x86_64_alu_imm32_reg(cd, X86_64_SUB, superindex, REG_ITMP3);
3620 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3622 a = 3 + 4 /* mov_membase32_reg */ + 3 /* test */ + 4 /* setcc */;
3624 x86_64_jcc(cd, X86_64_CC_LE, a);
3625 x86_64_mov_membase32_reg(cd, REG_ITMP1,
3626 OFFSET(vftbl_t, interfacetable[0]) -
3627 superindex * sizeof(methodptr*),
3629 x86_64_test_reg_reg(cd, REG_ITMP1, REG_ITMP1);
3630 x86_64_setcc_reg(cd, X86_64_CC_NE, d);
3633 x86_64_jmp_imm(cd, s3);
3636 /* class instanceof code */
3638 if (!super || !(super->flags & ACC_INTERFACE)) {
3640 x86_64_test_reg_reg(cd, s1, s1);
3641 x86_64_jcc(cd, X86_64_CC_E, s3);
3644 x86_64_mov_membase_reg(cd, s1,
3645 OFFSET(java_objectheader, vftbl),
3649 codegen_addpatchref(cd, cd->mcodeptr,
3650 PATCHER_instanceof_class,
3651 (constant_classref *) iptr->target, 0);
3653 if (opt_showdisassemble) {
3654 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3658 x86_64_mov_imm_reg(cd, (ptrint) supervftbl, REG_ITMP2);
3659 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3660 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3662 x86_64_movl_membase_reg(cd, REG_ITMP1,
3663 OFFSET(vftbl_t, baseval),
3665 x86_64_movl_membase_reg(cd, REG_ITMP2,
3666 OFFSET(vftbl_t, diffval),
3668 x86_64_movl_membase_reg(cd, REG_ITMP2,
3669 OFFSET(vftbl_t, baseval),
3671 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3672 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3674 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
3675 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d); /* may be REG_ITMP2 */
3676 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP3, REG_ITMP1);
3677 x86_64_setcc_reg(cd, X86_64_CC_BE, d);
3679 store_reg_to_var_int(iptr->dst, d);
3683 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3684 /* op1 = dimension, val.a = array descriptor */
3686 /* check for negative sizes and copy sizes to stack if necessary */
3688 MCODECHECK((10 * 4 * iptr->op1) + 5 + 10 * 8);
3690 for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3691 /* copy SAVEDVAR sizes to stack */
3693 if (src->varkind != ARGVAR) {
3694 var_to_reg_int(s2, src, REG_ITMP1);
3695 M_LST(s2, REG_SP, s1 * 8);
3699 /* is a patcher function set? */
3702 codegen_addpatchref(cd, cd->mcodeptr,
3703 (functionptr) (ptrint) iptr->target,
3706 if (opt_showdisassemble) {
3707 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3713 a = (ptrint) iptr->val.a;
3716 /* a0 = dimension count */
3718 M_MOV_IMM(iptr->op1, rd->argintregs[0]);
3720 /* a1 = arrayvftbl */
3722 M_MOV_IMM((ptrint) iptr->val.a, rd->argintregs[1]);
3724 /* a2 = pointer to dimensions = stack pointer */
3726 M_MOV(REG_SP, rd->argintregs[2]);
3728 M_MOV_IMM((ptrint) BUILTIN_multianewarray, REG_ITMP1);
3731 /* check for exception before result assignment */
3735 codegen_addxexceptionrefs(cd, cd->mcodeptr);
3737 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3738 M_INTMOVE(REG_RESULT, s1);
3739 store_reg_to_var_int(iptr->dst, s1);
3743 throw_cacao_exception_exit(string_java_lang_InternalError,
3744 "Unknown ICMD %d", iptr->opc);
3747 } /* for instruction */
3749 /* copy values to interface registers */
3751 src = bptr->outstack;
3752 len = bptr->outdepth;
3759 if ((src->varkind != STACKVAR)) {
3761 if (IS_FLT_DBL_TYPE(s2)) {
3762 var_to_reg_flt(s1, src, REG_FTMP1);
3763 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3764 M_FLTMOVE(s1, rd->interfaces[len][s2].regoff);
3767 x86_64_movq_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3771 var_to_reg_int(s1, src, REG_ITMP1);
3772 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3773 M_INTMOVE(s1, rd->interfaces[len][s2].regoff);
3776 x86_64_mov_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3783 /* At the end of a basic block we may have to append some nops,
3784 because the patcher stub calling code might be longer than the
3785 actual instruction. So codepatching does not change the
3786 following block unintentionally. */
3788 if (cd->mcodeptr < cd->lastmcodeptr) {
3789 while (cd->mcodeptr < cd->lastmcodeptr) {
3794 } /* if (bptr -> flags >= BBREACHED) */
3795 } /* for basic block */
3797 codegen_createlinenumbertable(cd);
3804 /* generate ArithmeticException stubs */
3808 for (bref = cd->xdivrefs; bref != NULL; bref = bref->next) {
3809 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3810 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3812 xcodeptr - cd->mcodebase - (10 + 7));
3816 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3818 cd->mcodeptr - cd->mcodebase);
3822 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
3823 dseg_adddata(cd, cd->mcodeptr);
3824 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
3826 if (xcodeptr != NULL) {
3827 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
3830 xcodeptr = cd->mcodeptr;
3832 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
3833 M_MOV(REG_SP, rd->argintregs[1]);
3834 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
3835 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
3837 M_ASUB_IMM(2 * 8, REG_SP);
3838 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
3840 M_MOV_IMM((ptrint) stacktrace_inline_arithmeticexception,
3844 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
3845 M_AADD_IMM(2 * 8, REG_SP);
3847 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
3852 /* generate ArrayIndexOutOfBoundsException stubs */
3856 for (bref = cd->xboundrefs; bref != NULL; bref = bref->next) {
3857 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3859 cd->mcodeptr - cd->mcodebase);
3863 /* move index register into REG_ITMP1 */
3865 M_MOV(bref->reg, REG_ITMP1); /* 3 bytes */
3867 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
3868 dseg_adddata(cd, cd->mcodeptr);
3869 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
3871 if (xcodeptr != NULL) {
3872 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
3875 xcodeptr = cd->mcodeptr;
3877 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
3878 M_MOV(REG_SP, rd->argintregs[1]);
3879 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
3880 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
3881 M_MOV(REG_ITMP1, rd->argintregs[4]);
3883 M_ASUB_IMM(2 * 8, REG_SP);
3884 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
3886 M_MOV_IMM((ptrint) stacktrace_inline_arrayindexoutofboundsexception,
3890 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
3891 M_AADD_IMM(2 * 8, REG_SP);
3893 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
3898 /* generate ArrayStoreException stubs */
3902 for (bref = cd->xstorerefs; bref != NULL; bref = bref->next) {
3903 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3904 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3906 xcodeptr - cd->mcodebase - (10 + 7));
3910 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3912 cd->mcodeptr - cd->mcodebase);
3916 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
3917 dseg_adddata(cd, cd->mcodeptr);
3918 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
3920 if (xcodeptr != NULL) {
3921 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
3924 xcodeptr = cd->mcodeptr;
3926 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
3927 M_MOV(REG_SP, rd->argintregs[1]);
3928 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
3929 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
3931 M_ASUB_IMM(2 * 8, REG_SP);
3932 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
3934 M_MOV_IMM((ptrint) stacktrace_inline_arraystoreexception,
3938 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
3939 M_AADD_IMM(2 * 8, REG_SP);
3941 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
3946 /* generate ClassCastException stubs */
3950 for (bref = cd->xcastrefs; bref != NULL; bref = bref->next) {
3951 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3952 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3954 xcodeptr - cd->mcodebase - (10 + 7));
3958 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3960 cd->mcodeptr - cd->mcodebase);
3964 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
3965 dseg_adddata(cd, cd->mcodeptr);
3966 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
3968 if (xcodeptr != NULL) {
3969 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
3972 xcodeptr = cd->mcodeptr;
3974 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
3975 M_MOV(REG_SP, rd->argintregs[1]);
3976 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
3977 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
3979 M_ASUB_IMM(2 * 8, REG_SP);
3980 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
3982 M_MOV_IMM((ptrint) stacktrace_inline_classcastexception, REG_ITMP3);
3985 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
3986 M_AADD_IMM(2 * 8, REG_SP);
3988 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
3993 /* generate NullpointerException stubs */
3997 for (bref = cd->xnullrefs; bref != NULL; bref = bref->next) {
3998 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3999 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4001 xcodeptr - cd->mcodebase - (10 + 7));
4005 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4007 cd->mcodeptr - cd->mcodebase);
4011 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
4012 dseg_adddata(cd, cd->mcodeptr);
4013 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
4015 if (xcodeptr != NULL) {
4016 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
4019 xcodeptr = cd->mcodeptr;
4021 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
4022 M_MOV(REG_SP, rd->argintregs[1]);
4023 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
4024 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
4026 M_ASUB_IMM(2 * 8, REG_SP);
4027 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
4029 M_MOV_IMM((ptrint) stacktrace_inline_nullpointerexception,
4033 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
4034 M_AADD_IMM(2 * 8, REG_SP);
4036 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
4041 /* generate exception check stubs */
4045 for (bref = cd->xexceptionrefs; bref != NULL; bref = bref->next) {
4046 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
4047 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4049 xcodeptr - cd->mcodebase - (10 + 7));
4053 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4055 cd->mcodeptr - cd->mcodebase);
4059 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
4060 dseg_adddata(cd, cd->mcodeptr);
4061 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
4063 if (xcodeptr != NULL) {
4064 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
4067 xcodeptr = cd->mcodeptr;
4069 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
4070 M_MOV(REG_SP, rd->argintregs[1]);
4071 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
4072 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
4074 M_ASUB_IMM(2 * 8, REG_SP);
4075 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
4077 M_MOV_IMM((ptrint) stacktrace_inline_fillInStackTrace, REG_ITMP3);
4080 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
4081 M_AADD_IMM(2 * 8, REG_SP);
4083 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
4088 /* generate code patching stub call code */
4095 tmpcd = DNEW(codegendata);
4097 for (pref = cd->patchrefs; pref != NULL; pref = pref->next) {
4098 /* check size of code segment */
4102 /* Get machine code which is patched back in later. A */
4103 /* `call rel32' is 5 bytes long (but read 8 bytes). */
4105 xcodeptr = cd->mcodebase + pref->branchpos;
4106 mcode = *((ptrint *) xcodeptr);
4108 /* patch in `call rel32' to call the following code */
4110 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
4111 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4113 /* move pointer to java_objectheader onto stack */
4115 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4116 /* create a virtual java_objectheader */
4118 (void) dseg_addaddress(cd, get_dummyLR()); /* monitorPtr */
4119 a = dseg_addaddress(cd, NULL); /* vftbl */
4121 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP3);
4127 /* move machine code bytes and classinfo pointer into registers */
4129 M_MOV_IMM((ptrint) mcode, REG_ITMP3);
4131 M_MOV_IMM((ptrint) pref->ref, REG_ITMP3);
4133 M_MOV_IMM((ptrint) pref->disp, REG_ITMP3);
4136 M_MOV_IMM((ptrint) pref->patcher, REG_ITMP3);
4139 M_MOV_IMM((ptrint) asm_wrapper_patcher, REG_ITMP3);
4145 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
4149 /* createcompilerstub **********************************************************
4151 Creates a stub routine which calls the compiler.
4153 *******************************************************************************/
4155 #define COMPILERSTUB_SIZE 23
4157 functionptr createcompilerstub(methodinfo *m)
4159 u1 *s; /* memory to hold the stub */
4163 s = CNEW(u1, COMPILERSTUB_SIZE);
4165 /* mark start of dump memory area */
4167 dumpsize = dump_size();
4169 cd = DNEW(codegendata);
4172 /* code for the stub */
4174 x86_64_mov_imm_reg(cd, (ptrint) m, REG_ITMP1); /* pass method to compiler */
4175 x86_64_mov_imm_reg(cd, (ptrint) asm_call_jit_compiler, REG_ITMP3);
4176 x86_64_jmp_reg(cd, REG_ITMP3);
4178 #if defined(STATISTICS)
4180 count_cstub_len += COMPILERSTUB_SIZE;
4183 /* release dump area */
4185 dump_release(dumpsize);
4187 return (functionptr) (ptrint) s;
4191 /* createnativestub ************************************************************
4193 Creates a stub routine which calls a native method.
4195 *******************************************************************************/
4197 functionptr createnativestub(functionptr f, methodinfo *m, codegendata *cd,
4198 registerdata *rd, methoddesc *nmd)
4201 s4 stackframesize; /* size of stackframe if needed */
4203 s4 i, j; /* count variables */
4207 /* initialize variables */
4210 nativeparams = (m->flags & ACC_STATIC) ? 2 : 1;
4213 /* calculate stack frame size */
4216 sizeof(stackframeinfo) / SIZEOF_VOID_P +
4217 sizeof(localref_table) / SIZEOF_VOID_P +
4218 INT_ARG_CNT + FLT_ARG_CNT + 1 + /* + 1 for function address */
4221 if (!(stackframesize & 0x1)) /* keep stack 16-byte aligned */
4225 /* create method header */
4227 (void) dseg_addaddress(cd, m); /* MethodPointer */
4228 (void) dseg_adds4(cd, stackframesize * 8); /* FrameSize */
4229 (void) dseg_adds4(cd, 0); /* IsSync */
4230 (void) dseg_adds4(cd, 0); /* IsLeaf */
4231 (void) dseg_adds4(cd, 0); /* IntSave */
4232 (void) dseg_adds4(cd, 0); /* FltSave */
4233 (void) dseg_addlinenumbertablesize(cd);
4234 (void) dseg_adds4(cd, 0); /* ExTableSize */
4237 /* initialize mcode variables */
4239 cd->mcodeptr = (u1 *) cd->mcodebase;
4240 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
4243 /* generate stub code */
4245 M_ASUB_IMM(stackframesize * 8, REG_SP);
4248 /* save integer and float argument registers */
4250 for (i = 0, j = 0; i < md->paramcount && i < INT_ARG_CNT; i++)
4251 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4252 M_LST(rd->argintregs[j++], REG_SP, (1 + i) * 8);
4254 for (i = 0, j = 0; i < md->paramcount && i < FLT_ARG_CNT; i++)
4255 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4256 M_DST(rd->argfltregs[j++], REG_SP, (1 + INT_ARG_CNT + i) * 8);
4258 /* show integer hex code for float arguments */
4260 for (i = 0, j = 0; i < md->paramcount && i < INT_ARG_CNT; i++) {
4261 /* if the paramtype is a float, we have to right shift all */
4262 /* following integer registers */
4264 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type)) {
4265 for (s1 = INT_ARG_CNT - 2; s1 >= i; s1--)
4266 M_MOV(rd->argintregs[s1], rd->argintregs[s1 + 1]);
4268 x86_64_movd_freg_reg(cd, rd->argfltregs[j], rd->argintregs[i]);
4273 x86_64_mov_imm_reg(cd, (ptrint) m, REG_ITMP1);
4274 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, 0 * 8);
4275 x86_64_mov_imm_reg(cd, (ptrint) builtin_trace_args, REG_ITMP1);
4276 x86_64_call_reg(cd, REG_ITMP1);
4278 /* restore integer and float argument registers */
4280 for (i = 0, j = 0; i < md->paramcount && i < INT_ARG_CNT; i++)
4281 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4282 M_LLD(rd->argintregs[j++], REG_SP, (1 + i) * 8);
4284 for (i = 0, j = 0; i < md->paramcount && i < FLT_ARG_CNT; i++)
4285 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4286 M_DLD(rd->argfltregs[j++], REG_SP, (1 + INT_ARG_CNT + i) * 8);
4290 /* get function address (this must happen before the stackframeinfo) */
4292 #if !defined(ENABLE_STATICVM)
4294 codegen_addpatchref(cd, cd->mcodeptr, PATCHER_resolve_native, m, 0);
4296 if (opt_showdisassemble) {
4297 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
4302 M_MOV_IMM((ptrint) f, REG_ITMP3);
4305 /* save integer and float argument registers */
4307 for (i = 0, j = 0; i < md->paramcount && i < INT_ARG_CNT; i++)
4308 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4309 M_LST(rd->argintregs[j++], REG_SP, i * 8);
4311 for (i = 0, j = 0; i < md->paramcount && i < FLT_ARG_CNT; i++)
4312 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4313 M_DST(rd->argfltregs[j++], REG_SP, (INT_ARG_CNT + i) * 8);
4315 M_AST(REG_ITMP3, REG_SP, (INT_ARG_CNT + FLT_ARG_CNT) * 8);
4317 /* create dynamic stack info */
4319 M_ALEA(REG_SP, stackframesize * 8, rd->argintregs[0]);
4320 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[1]);
4321 M_ALEA(REG_SP, stackframesize * 8 + SIZEOF_VOID_P, rd->argintregs[2]);
4322 M_ALD(rd->argintregs[3], REG_SP, stackframesize * 8);
4323 M_MOV_IMM((ptrint) codegen_start_native_call, REG_ITMP1);
4327 x86_64_mov_imm_reg(cd, (ptrint) nativeinvokation, REG_ITMP1);
4328 x86_64_call_reg(cd, REG_ITMP1);
4331 /* restore integer and float argument registers */
4333 for (i = 0, j = 0; i < md->paramcount && i < INT_ARG_CNT; i++)
4334 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4335 M_LLD(rd->argintregs[j++], REG_SP, i * 8);
4337 for (i = 0, j = 0; i < md->paramcount && i < FLT_ARG_CNT; i++)
4338 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4339 M_DLD(rd->argfltregs[j++], REG_SP, (INT_ARG_CNT + i) * 8);
4341 M_ALD(REG_ITMP3, REG_SP, (INT_ARG_CNT + FLT_ARG_CNT) * 8);
4344 /* copy or spill arguments to new locations */
4346 for (i = md->paramcount - 1, j = i + nativeparams; i >= 0; i--, j--) {
4347 t = md->paramtypes[i].type;
4349 if (IS_INT_LNG_TYPE(t)) {
4350 if (!md->params[i].inmemory) {
4351 s1 = rd->argintregs[md->params[i].regoff];
4353 if (!nmd->params[j].inmemory) {
4354 s2 = rd->argintregs[nmd->params[j].regoff];
4358 s2 = nmd->params[j].regoff;
4359 M_LST(s1, REG_SP, s2 * 8);
4363 s1 = md->params[i].regoff + stackframesize + 1; /* + 1 (RA) */
4364 s2 = nmd->params[j].regoff;
4365 M_LLD(REG_ITMP1, REG_SP, s1 * 8);
4366 M_LST(REG_ITMP1, REG_SP, s2 * 8);
4370 /* We only copy spilled float arguments, as the float argument */
4371 /* registers keep unchanged. */
4373 if (md->params[i].inmemory) {
4374 s1 = md->params[i].regoff + stackframesize + 1; /* + 1 (RA) */
4375 s2 = nmd->params[j].regoff;
4376 M_DLD(REG_FTMP1, REG_SP, s1 * 8);
4377 M_DST(REG_FTMP1, REG_SP, s2 * 8);
4382 /* put class into second argument register */
4384 if (m->flags & ACC_STATIC)
4385 M_MOV_IMM((ptrint) m->class, rd->argintregs[1]);
4387 /* put env into first argument register */
4389 M_MOV_IMM((ptrint) &env, rd->argintregs[0]);
4391 /* do the native function call */
4395 /* save return value */
4397 if (IS_INT_LNG_TYPE(md->returntype.type))
4398 M_LST(REG_RESULT, REG_SP, 0 * 8);
4400 M_DST(REG_FRESULT, REG_SP, 0 * 8);
4402 /* remove native stackframe info */
4404 M_ALEA(REG_SP, stackframesize * 8, rd->argintregs[0]);
4405 M_MOV_IMM((ptrint) codegen_finish_native_call, REG_ITMP1);
4408 /* generate call trace */
4411 /* just restore the value we need, don't care about the other */
4413 if (IS_INT_LNG_TYPE(md->returntype.type))
4414 M_LLD(REG_RESULT, REG_SP, 0 * 8);
4416 M_DLD(REG_FRESULT, REG_SP, 0 * 8);
4418 M_MOV_IMM((ptrint) m, rd->argintregs[0]);
4419 M_MOV(REG_RESULT, rd->argintregs[1]);
4420 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
4421 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
4423 M_MOV_IMM((ptrint) builtin_displaymethodstop, REG_ITMP1);
4427 /* check for exception */
4429 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4430 M_MOV_IMM((ptrint) builtin_get_exceptionptrptr, REG_ITMP3);
4433 M_MOV_IMM((ptrint) &_no_threads_exceptionptr, REG_RESULT);
4435 M_ALD(REG_ITMP3, REG_RESULT, 0);
4438 /* restore return value */
4440 if (IS_INT_LNG_TYPE(md->returntype.type))
4441 M_LLD(REG_RESULT, REG_SP, 0 * 8);
4443 M_DLD(REG_FRESULT, REG_SP, 0 * 8);
4445 /* test for exception */
4450 /* remove stackframe */
4452 M_AADD_IMM(stackframesize * 8, REG_SP);
4456 /* handle exception */
4458 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4459 M_LST(REG_ITMP3, REG_SP, 0 * 8);
4460 M_MOV_IMM((ptrint) builtin_get_exceptionptrptr, REG_ITMP3);
4462 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
4463 M_LLD(REG_ITMP1_XPTR, REG_SP, 0 * 8);
4465 M_MOV(REG_ITMP3, REG_ITMP1_XPTR);
4466 M_MOV_IMM((ptrint) &_no_threads_exceptionptr, REG_ITMP3);
4467 M_XOR(REG_ITMP2, REG_ITMP2);
4468 M_AST(REG_ITMP2, REG_ITMP3, 0); /* clear exception pointer */
4471 /* remove stackframe */
4473 M_AADD_IMM(stackframesize * 8, REG_SP);
4475 M_LLD(REG_ITMP2_XPC, REG_SP, 0 * 8); /* get return address from stack */
4476 M_ASUB_IMM(3, REG_ITMP2_XPC); /* callq */
4478 M_MOV_IMM((ptrint) asm_handle_nat_exception, REG_ITMP3);
4482 /* process patcher calls **************************************************/
4489 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4493 tmpcd = DNEW(codegendata);
4495 for (pref = cd->patchrefs; pref != NULL; pref = pref->next) {
4496 /* Get machine code which is patched back in later. A */
4497 /* `call rel32' is 5 bytes long (but read 8 bytes). */
4499 xcodeptr = cd->mcodebase + pref->branchpos;
4500 mcode = *((ptrint *) xcodeptr);
4502 /* patch in `call rel32' to call the following code */
4504 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
4505 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4507 /* move pointer to java_objectheader onto stack */
4509 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4510 /* create a virtual java_objectheader */
4512 (void) dseg_addaddress(cd, get_dummyLR()); /* monitorPtr */
4513 disp = dseg_addaddress(cd, NULL); /* vftbl */
4515 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, REG_ITMP3);
4521 /* move machine code bytes and classinfo pointer into registers */
4523 M_MOV_IMM((ptrint) mcode, REG_ITMP3);
4525 M_MOV_IMM((ptrint) pref->ref, REG_ITMP3);
4527 M_MOV_IMM((ptrint) pref->disp, REG_ITMP3);
4530 M_MOV_IMM((ptrint) pref->patcher, REG_ITMP3);
4533 M_MOV_IMM((ptrint) asm_wrapper_patcher, REG_ITMP3);
4538 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
4540 return m->entrypoint;
4545 * These are local overrides for various environment variables in Emacs.
4546 * Please do not remove this and leave it at the end of the file, where
4547 * Emacs will automagically detect them.
4548 * ---------------------------------------------------------------------
4551 * indent-tabs-mode: t