1 /* src/vm/jit/x86_64/codegen.c - machine code generator for x86_64
3 Copyright (C) 1996-2005 R. Grafl, A. Krall, C. Kruegel, C. Oates,
4 R. Obermaisser, M. Platter, M. Probst, S. Ring, E. Steiner,
5 C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich, J. Wenninger,
6 Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
25 Contact: cacao@complang.tuwien.ac.at
27 Authors: Andreas Krall
30 Changes: Christian Ullrich
32 $Id: codegen.c 3621 2005-11-07 18:48:16Z twisti $
46 #include "vm/jit/x86_64/arch.h"
47 #include "vm/jit/x86_64/codegen.h"
48 #include "vm/jit/x86_64/emitfuncs.h"
50 #include "cacao/cacao.h"
51 #include "native/native.h"
52 #include "vm/global.h"
53 #include "vm/builtin.h"
54 #include "vm/loader.h"
55 #include "vm/statistics.h"
56 #include "vm/stringlocal.h"
57 #include "vm/tables.h"
58 #include "vm/jit/asmpart.h"
59 #include "vm/jit/codegen.inc"
60 #include "vm/jit/jit.h"
63 # include "vm/jit/lsra.inc"
66 #include "vm/jit/methodheader.h"
67 #include "vm/jit/parse.h"
68 #include "vm/jit/patcher.h"
69 #include "vm/jit/reg.h"
70 #include "vm/jit/reg.inc"
73 /* codegen *********************************************************************
75 Generates machine code.
77 *******************************************************************************/
79 void codegen(methodinfo *m, codegendata *cd, registerdata *rd)
81 s4 len, s1, s2, s3, d, disp;
90 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
91 builtintable_entry *bte;
94 /* prevent compiler warnings */
106 /* space to save used callee saved registers */
108 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
109 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse);
111 parentargs_base = rd->memuse + savedregs_num;
113 #if defined(USE_THREADS)
114 /* space to save argument of monitor_enter */
116 if (checksync && (m->flags & ACC_SYNCHRONIZED))
120 /* Keep stack of non-leaf functions 16-byte aligned for calls into native */
121 /* code e.g. libc or jni (alignment problems with movaps). */
123 if (!m->isleafmethod || runverbose)
124 parentargs_base |= 0x1;
126 /* create method header */
128 (void) dseg_addaddress(cd, m); /* MethodPointer */
129 (void) dseg_adds4(cd, parentargs_base * 8); /* FrameSize */
131 #if defined(USE_THREADS)
132 /* IsSync contains the offset relative to the stack pointer for the
133 argument of monitor_exit used in the exception handler. Since the
134 offset could be zero and give a wrong meaning of the flag it is
138 if (checksync && (m->flags & ACC_SYNCHRONIZED))
139 (void) dseg_adds4(cd, (rd->memuse + 1) * 8); /* IsSync */
142 (void) dseg_adds4(cd, 0); /* IsSync */
144 (void) dseg_adds4(cd, m->isleafmethod); /* IsLeaf */
145 (void) dseg_adds4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
146 (void) dseg_adds4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
148 (void) dseg_addlinenumbertablesize(cd);
150 (void) dseg_adds4(cd, cd->exceptiontablelength); /* ExTableSize */
152 /* create exception table */
154 for (ex = cd->exceptiontable; ex != NULL; ex = ex->down) {
155 dseg_addtarget(cd, ex->start);
156 dseg_addtarget(cd, ex->end);
157 dseg_addtarget(cd, ex->handler);
158 (void) dseg_addaddress(cd, ex->catchtype.cls);
161 /* initialize mcode variables */
163 cd->mcodeptr = (u1 *) cd->mcodebase;
164 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
167 /* initialize the last patcher pointer */
169 cd->lastmcodeptr = cd->mcodeptr;
171 /* create stack frame (if necessary) */
174 M_ASUB_IMM(parentargs_base * 8, REG_SP);
176 /* save used callee saved registers */
179 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
180 p--; M_LST(rd->savintregs[i], REG_SP, p * 8);
182 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
183 p--; M_DST(rd->savfltregs[i], REG_SP, p * 8);
186 /* take arguments out of register or stack frame */
190 for (p = 0, l = 0; p < md->paramcount; p++) {
191 t = md->paramtypes[p].type;
192 var = &(rd->locals[l][t]);
194 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
198 s1 = md->params[p].regoff;
199 if (IS_INT_LNG_TYPE(t)) { /* integer args */
200 s2 = rd->argintregs[s1];
201 if (!md->params[p].inmemory) { /* register arguments */
202 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
203 M_INTMOVE(s2, var->regoff);
205 } else { /* reg arg -> spilled */
206 M_LST(s2, REG_SP, var->regoff * 8);
209 } else { /* stack arguments */
210 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
211 /* + 8 for return address */
212 M_LLD(var->regoff, REG_SP, (parentargs_base + s1) * 8 + 8);
214 } else { /* stack arg -> spilled */
215 var->regoff = parentargs_base + s1 + 1;
219 } else { /* floating args */
220 if (!md->params[p].inmemory) { /* register arguments */
221 s2 = rd->argfltregs[s1];
222 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
223 M_FLTMOVE(s2, var->regoff);
225 } else { /* reg arg -> spilled */
226 M_DST(s2, REG_SP, var->regoff * 8);
229 } else { /* stack arguments */
230 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
231 M_DLD(var->regoff, REG_SP, (parentargs_base + s1) * 8 + 8);
234 var->regoff = parentargs_base + s1 + 1;
240 /* save monitorenter argument */
242 #if defined(USE_THREADS)
243 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
244 /* stack offset for monitor argument */
249 M_LSUB_IMM((INT_ARG_CNT + FLT_ARG_CNT) * 8, REG_SP);
251 for (p = 0; p < INT_ARG_CNT; p++)
252 M_LST(rd->argintregs[p], REG_SP, p * 8);
254 for (p = 0; p < FLT_ARG_CNT; p++)
255 M_DST(rd->argfltregs[p], REG_SP, (INT_ARG_CNT + p) * 8);
257 s1 += INT_ARG_CNT + FLT_ARG_CNT;
260 /* decide which monitor enter function to call */
262 if (m->flags & ACC_STATIC) {
263 x86_64_mov_imm_reg(cd, (ptrint) m->class, REG_ITMP1);
264 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, s1 * 8);
265 M_INTMOVE(REG_ITMP1, rd->argintregs[0]);
266 x86_64_mov_imm_reg(cd, (ptrint) BUILTIN_staticmonitorenter, REG_ITMP1);
267 x86_64_call_reg(cd, REG_ITMP1);
270 x86_64_test_reg_reg(cd, rd->argintregs[0], rd->argintregs[0]);
271 x86_64_jcc(cd, X86_64_CC_Z, 0);
272 codegen_addxnullrefs(cd, cd->mcodeptr);
273 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, s1 * 8);
274 x86_64_mov_imm_reg(cd, (ptrint) BUILTIN_monitorenter, REG_ITMP1);
275 x86_64_call_reg(cd, REG_ITMP1);
279 for (p = 0; p < INT_ARG_CNT; p++)
280 M_LLD(rd->argintregs[p], REG_SP, p * 8);
282 for (p = 0; p < FLT_ARG_CNT; p++)
283 M_DLD(rd->argfltregs[p], REG_SP, (INT_ARG_CNT + p) * 8);
285 M_LADD_IMM((INT_ARG_CNT + FLT_ARG_CNT) * 8, REG_SP);
290 /* Copy argument registers to stack and call trace function with pointer */
291 /* to arguments on stack. */
293 if (runverbose || opt_stat) {
294 M_LSUB_IMM((INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + FLT_TMP_CNT + 1 + 1) * 8, REG_SP);
296 /* save integer argument registers */
298 for (p = 0; p < INT_ARG_CNT; p++)
299 M_LST(rd->argintregs[p], REG_SP, (1 + p) * 8);
301 /* save float argument registers */
303 for (p = 0; p < FLT_ARG_CNT; p++)
304 M_DST(rd->argfltregs[p], REG_SP, (1 + INT_ARG_CNT + p) * 8);
306 /* save temporary registers for leaf methods */
308 if (m->isleafmethod) {
309 for (p = 0; p < INT_TMP_CNT; p++)
310 M_LST(rd->tmpintregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + p) * 8);
312 for (p = 0; p < FLT_TMP_CNT; p++)
313 M_DST(rd->tmpfltregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + p) * 8);
317 /* show integer hex code for float arguments */
319 for (p = 0, l = 0; p < md->paramcount && p < INT_ARG_CNT; p++) {
320 /* if the paramtype is a float, we have to right shift all */
321 /* following integer registers */
323 if (IS_FLT_DBL_TYPE(md->paramtypes[p].type)) {
324 for (s1 = INT_ARG_CNT - 2; s1 >= p; s1--) {
325 M_MOV(rd->argintregs[s1], rd->argintregs[s1 + 1]);
328 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[p]);
333 x86_64_mov_imm_reg(cd, (ptrint) m, REG_ITMP2);
334 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_SP, 0 * 8);
335 x86_64_mov_imm_reg(cd, (ptrint) builtin_trace_args, REG_ITMP1);
336 x86_64_call_reg(cd, REG_ITMP1);
339 x86_64_mov_imm_reg(cd, (ptrint) compiledinvokation, REG_ITMP1);
340 x86_64_call_reg(cd, REG_ITMP1);
343 /* restore integer argument registers */
345 for (p = 0; p < INT_ARG_CNT; p++)
346 M_LLD(rd->argintregs[p], REG_SP, (1 + p) * 8);
348 /* restore float argument registers */
350 for (p = 0; p < FLT_ARG_CNT; p++)
351 M_DLD(rd->argfltregs[p], REG_SP, (1 + INT_ARG_CNT + p) * 8);
353 /* restore temporary registers for leaf methods */
355 if (m->isleafmethod) {
356 for (p = 0; p < INT_TMP_CNT; p++)
357 M_LLD(rd->tmpintregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + p) * 8);
359 for (p = 0; p < FLT_TMP_CNT; p++)
360 M_DLD(rd->tmpfltregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + p) * 8);
363 M_LADD_IMM((INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + FLT_TMP_CNT + 1 + 1) * 8, REG_SP);
368 /* end of header generation */
370 /* walk through all basic blocks */
371 for (bptr = m->basicblocks; bptr != NULL; bptr = bptr->next) {
373 bptr->mpc = (u4) ((u1 *) cd->mcodeptr - cd->mcodebase);
375 if (bptr->flags >= BBREACHED) {
377 /* branch resolving */
380 for (bref = bptr->branchrefs; bref != NULL; bref = bref->next) {
381 gen_resolvebranch((u1 *) cd->mcodebase + bref->branchpos,
386 /* copy interface registers to their destination */
394 while (src != NULL) {
396 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
397 if (bptr->type == BBTYPE_SBR) {
398 /* d = reg_of_var(rd, src, REG_ITMP1); */
399 if (!(src->flags & INMEMORY))
403 x86_64_pop_reg(cd, d);
404 store_reg_to_var_int(src, d);
406 } else if (bptr->type == BBTYPE_EXH) {
407 /* d = reg_of_var(rd, src, REG_ITMP1); */
408 if (!(src->flags & INMEMORY))
412 M_INTMOVE(REG_ITMP1, d);
413 store_reg_to_var_int(src, d);
422 while (src != NULL) {
424 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
425 if (bptr->type == BBTYPE_SBR) {
426 d = reg_of_var(rd, src, REG_ITMP1);
427 x86_64_pop_reg(cd, d);
428 store_reg_to_var_int(src, d);
430 } else if (bptr->type == BBTYPE_EXH) {
431 d = reg_of_var(rd, src, REG_ITMP1);
432 M_INTMOVE(REG_ITMP1, d);
433 store_reg_to_var_int(src, d);
437 d = reg_of_var(rd, src, REG_ITMP1);
438 if ((src->varkind != STACKVAR)) {
440 if (IS_FLT_DBL_TYPE(s2)) {
441 s1 = rd->interfaces[len][s2].regoff;
442 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
446 x86_64_movq_membase_reg(cd, REG_SP, s1 * 8, d);
448 store_reg_to_var_flt(src, d);
451 s1 = rd->interfaces[len][s2].regoff;
452 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
456 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, d);
458 store_reg_to_var_int(src, d);
467 /* walk through all instructions */
473 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
474 if (iptr->line != currentline) {
475 dseg_addlinenumber(cd, iptr->line, cd->mcodeptr);
476 currentline = iptr->line;
479 MCODECHECK(1024); /* 1KB should be enough */
482 case ICMD_INLINE_START: /* internal ICMDs */
483 case ICMD_INLINE_END:
486 case ICMD_NOP: /* ... ==> ... */
489 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
490 if (src->flags & INMEMORY) {
491 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
494 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
496 x86_64_jcc(cd, X86_64_CC_Z, 0);
497 codegen_addxnullrefs(cd, cd->mcodeptr);
500 /* constant operations ************************************************/
502 case ICMD_ICONST: /* ... ==> ..., constant */
503 /* op1 = 0, val.i = constant */
505 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
506 if (iptr->val.i == 0) {
509 M_IMOV_IMM(iptr->val.i, d);
511 store_reg_to_var_int(iptr->dst, d);
514 case ICMD_LCONST: /* ... ==> ..., constant */
515 /* op1 = 0, val.l = constant */
517 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
518 if (iptr->val.l == 0) {
521 M_MOV_IMM(iptr->val.l, d);
523 store_reg_to_var_int(iptr->dst, d);
526 case ICMD_FCONST: /* ... ==> ..., constant */
527 /* op1 = 0, val.f = constant */
529 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
530 disp = dseg_addfloat(cd, iptr->val.f);
531 x86_64_movdl_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + ((d > 7) ? 9 : 8)) - (s8) cd->mcodebase) + disp, d);
532 store_reg_to_var_flt(iptr->dst, d);
535 case ICMD_DCONST: /* ... ==> ..., constant */
536 /* op1 = 0, val.d = constant */
538 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
539 disp = dseg_adddouble(cd, iptr->val.d);
540 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + disp, d);
541 store_reg_to_var_flt(iptr->dst, d);
544 case ICMD_ACONST: /* ... ==> ..., constant */
545 /* op1 = 0, val.a = constant */
547 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
549 if ((iptr->target != NULL) && (iptr->val.a == NULL)) {
550 codegen_addpatchref(cd, cd->mcodeptr,
552 (unresolved_class *) iptr->target, 0);
554 if (opt_showdisassemble) {
555 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
558 M_MOV_IMM((ptrint) iptr->val.a, d);
561 if (iptr->val.a == 0) {
564 M_MOV_IMM((ptrint) iptr->val.a, d);
567 store_reg_to_var_int(iptr->dst, d);
571 /* load/store operations **********************************************/
573 case ICMD_ILOAD: /* ... ==> ..., content of local variable */
574 /* op1 = local variable */
576 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
577 if ((iptr->dst->varkind == LOCALVAR) &&
578 (iptr->dst->varnum == iptr->op1)) {
581 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
582 if (var->flags & INMEMORY) {
583 x86_64_movl_membase_reg(cd, REG_SP, var->regoff * 8, d);
584 store_reg_to_var_int(iptr->dst, d);
587 if (iptr->dst->flags & INMEMORY) {
588 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
591 M_INTMOVE(var->regoff, d);
596 case ICMD_LLOAD: /* ... ==> ..., content of local variable */
597 case ICMD_ALOAD: /* op1 = local variable */
599 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
600 if ((iptr->dst->varkind == LOCALVAR) &&
601 (iptr->dst->varnum == iptr->op1)) {
604 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
605 if (var->flags & INMEMORY) {
606 x86_64_mov_membase_reg(cd, REG_SP, var->regoff * 8, d);
607 store_reg_to_var_int(iptr->dst, d);
610 if (iptr->dst->flags & INMEMORY) {
611 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
614 M_INTMOVE(var->regoff, d);
619 case ICMD_FLOAD: /* ... ==> ..., content of local variable */
620 case ICMD_DLOAD: /* op1 = local variable */
622 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
623 if ((iptr->dst->varkind == LOCALVAR) &&
624 (iptr->dst->varnum == iptr->op1)) {
627 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
628 if (var->flags & INMEMORY) {
629 x86_64_movq_membase_reg(cd, REG_SP, var->regoff * 8, d);
630 store_reg_to_var_flt(iptr->dst, d);
633 if (iptr->dst->flags & INMEMORY) {
634 x86_64_movq_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
637 M_FLTMOVE(var->regoff, d);
642 case ICMD_ISTORE: /* ..., value ==> ... */
643 case ICMD_LSTORE: /* op1 = local variable */
646 if ((src->varkind == LOCALVAR) &&
647 (src->varnum == iptr->op1)) {
650 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
651 if (var->flags & INMEMORY) {
652 var_to_reg_int(s1, src, REG_ITMP1);
653 x86_64_mov_reg_membase(cd, s1, REG_SP, var->regoff * 8);
656 var_to_reg_int(s1, src, var->regoff);
657 M_INTMOVE(s1, var->regoff);
661 case ICMD_FSTORE: /* ..., value ==> ... */
662 case ICMD_DSTORE: /* op1 = local variable */
664 if ((src->varkind == LOCALVAR) &&
665 (src->varnum == iptr->op1)) {
668 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
669 if (var->flags & INMEMORY) {
670 var_to_reg_flt(s1, src, REG_FTMP1);
671 x86_64_movq_reg_membase(cd, s1, REG_SP, var->regoff * 8);
674 var_to_reg_flt(s1, src, var->regoff);
675 M_FLTMOVE(s1, var->regoff);
680 /* pop/dup/swap operations ********************************************/
682 /* attention: double and longs are only one entry in CACAO ICMDs */
684 case ICMD_POP: /* ..., value ==> ... */
685 case ICMD_POP2: /* ..., value, value ==> ... */
688 case ICMD_DUP: /* ..., a ==> ..., a, a */
689 M_COPY(src, iptr->dst);
692 case ICMD_DUP_X1: /* ..., a, b ==> ..., b, a, b */
694 M_COPY(src, iptr->dst);
695 M_COPY(src->prev, iptr->dst->prev);
696 M_COPY(iptr->dst, iptr->dst->prev->prev);
699 case ICMD_DUP_X2: /* ..., a, b, c ==> ..., c, a, b, c */
701 M_COPY(src, iptr->dst);
702 M_COPY(src->prev, iptr->dst->prev);
703 M_COPY(src->prev->prev, iptr->dst->prev->prev);
704 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
707 case ICMD_DUP2: /* ..., a, b ==> ..., a, b, a, b */
709 M_COPY(src, iptr->dst);
710 M_COPY(src->prev, iptr->dst->prev);
713 case ICMD_DUP2_X1: /* ..., a, b, c ==> ..., b, c, a, b, c */
715 M_COPY(src, iptr->dst);
716 M_COPY(src->prev, iptr->dst->prev);
717 M_COPY(src->prev->prev, iptr->dst->prev->prev);
718 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
719 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev);
722 case ICMD_DUP2_X2: /* ..., a, b, c, d ==> ..., c, d, a, b, c, d */
724 M_COPY(src, iptr->dst);
725 M_COPY(src->prev, iptr->dst->prev);
726 M_COPY(src->prev->prev, iptr->dst->prev->prev);
727 M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
728 M_COPY(iptr->dst, iptr->dst->prev->prev->prev->prev);
729 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev->prev);
732 case ICMD_SWAP: /* ..., a, b ==> ..., b, a */
734 M_COPY(src, iptr->dst->prev);
735 M_COPY(src->prev, iptr->dst);
739 /* integer operations *************************************************/
741 case ICMD_INEG: /* ..., value ==> ..., - value */
743 d = reg_of_var(rd, iptr->dst, REG_NULL);
744 if (iptr->dst->flags & INMEMORY) {
745 if (src->flags & INMEMORY) {
746 if (src->regoff == iptr->dst->regoff) {
747 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
750 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
751 x86_64_negl_reg(cd, REG_ITMP1);
752 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
756 x86_64_movl_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
757 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
761 if (src->flags & INMEMORY) {
762 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
763 x86_64_negl_reg(cd, d);
766 M_INTMOVE(src->regoff, iptr->dst->regoff);
767 x86_64_negl_reg(cd, iptr->dst->regoff);
772 case ICMD_LNEG: /* ..., value ==> ..., - value */
774 d = reg_of_var(rd, iptr->dst, REG_NULL);
775 if (iptr->dst->flags & INMEMORY) {
776 if (src->flags & INMEMORY) {
777 if (src->regoff == iptr->dst->regoff) {
778 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
781 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
782 x86_64_neg_reg(cd, REG_ITMP1);
783 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
787 x86_64_mov_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
788 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
792 if (src->flags & INMEMORY) {
793 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
794 x86_64_neg_reg(cd, iptr->dst->regoff);
797 M_INTMOVE(src->regoff, iptr->dst->regoff);
798 x86_64_neg_reg(cd, iptr->dst->regoff);
803 case ICMD_I2L: /* ..., value ==> ..., value */
805 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
806 if (src->flags & INMEMORY) {
807 x86_64_movslq_membase_reg(cd, REG_SP, src->regoff * 8, d);
810 x86_64_movslq_reg_reg(cd, src->regoff, d);
812 store_reg_to_var_int(iptr->dst, d);
815 case ICMD_L2I: /* ..., value ==> ..., value */
817 var_to_reg_int(s1, src, REG_ITMP1);
818 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
820 store_reg_to_var_int(iptr->dst, d);
823 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
825 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
826 if (src->flags & INMEMORY) {
827 x86_64_movsbq_membase_reg(cd, REG_SP, src->regoff * 8, d);
830 x86_64_movsbq_reg_reg(cd, src->regoff, d);
832 store_reg_to_var_int(iptr->dst, d);
835 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
837 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
838 if (src->flags & INMEMORY) {
839 x86_64_movzwq_membase_reg(cd, REG_SP, src->regoff * 8, d);
842 x86_64_movzwq_reg_reg(cd, src->regoff, d);
844 store_reg_to_var_int(iptr->dst, d);
847 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
849 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
850 if (src->flags & INMEMORY) {
851 x86_64_movswq_membase_reg(cd, REG_SP, src->regoff * 8, d);
854 x86_64_movswq_reg_reg(cd, src->regoff, d);
856 store_reg_to_var_int(iptr->dst, d);
860 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
862 d = reg_of_var(rd, iptr->dst, REG_NULL);
863 x86_64_emit_ialu(cd, X86_64_ADD, src, iptr);
866 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
867 /* val.i = constant */
869 d = reg_of_var(rd, iptr->dst, REG_NULL);
870 x86_64_emit_ialuconst(cd, X86_64_ADD, src, iptr);
873 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
875 d = reg_of_var(rd, iptr->dst, REG_NULL);
876 x86_64_emit_lalu(cd, X86_64_ADD, src, iptr);
879 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
880 /* val.l = constant */
882 d = reg_of_var(rd, iptr->dst, REG_NULL);
883 x86_64_emit_laluconst(cd, X86_64_ADD, src, iptr);
886 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
888 d = reg_of_var(rd, iptr->dst, REG_NULL);
889 if (iptr->dst->flags & INMEMORY) {
890 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
891 if (src->prev->regoff == iptr->dst->regoff) {
892 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
893 x86_64_alul_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
896 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
897 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
898 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
901 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
902 M_INTMOVE(src->prev->regoff, REG_ITMP1);
903 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
904 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
906 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
907 if (src->prev->regoff == iptr->dst->regoff) {
908 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
911 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
912 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
913 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
917 x86_64_movl_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
918 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
922 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
923 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
924 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
926 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
927 M_INTMOVE(src->prev->regoff, d);
928 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
930 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
931 /* workaround for reg alloc */
932 if (src->regoff == iptr->dst->regoff) {
933 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
934 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
935 M_INTMOVE(REG_ITMP1, d);
938 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
939 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
943 /* workaround for reg alloc */
944 if (src->regoff == iptr->dst->regoff) {
945 M_INTMOVE(src->prev->regoff, REG_ITMP1);
946 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
947 M_INTMOVE(REG_ITMP1, d);
950 M_INTMOVE(src->prev->regoff, d);
951 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
957 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
958 /* val.i = constant */
960 d = reg_of_var(rd, iptr->dst, REG_NULL);
961 x86_64_emit_ialuconst(cd, X86_64_SUB, src, iptr);
964 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
966 d = reg_of_var(rd, iptr->dst, REG_NULL);
967 if (iptr->dst->flags & INMEMORY) {
968 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
969 if (src->prev->regoff == iptr->dst->regoff) {
970 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
971 x86_64_alu_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
974 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
975 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
976 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
979 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
980 M_INTMOVE(src->prev->regoff, REG_ITMP1);
981 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
982 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
984 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
985 if (src->prev->regoff == iptr->dst->regoff) {
986 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
989 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
990 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
991 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
995 x86_64_mov_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
996 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1000 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1001 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1002 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1004 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1005 M_INTMOVE(src->prev->regoff, d);
1006 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1008 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1009 /* workaround for reg alloc */
1010 if (src->regoff == iptr->dst->regoff) {
1011 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1012 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1013 M_INTMOVE(REG_ITMP1, d);
1016 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1017 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1021 /* workaround for reg alloc */
1022 if (src->regoff == iptr->dst->regoff) {
1023 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1024 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1025 M_INTMOVE(REG_ITMP1, d);
1028 M_INTMOVE(src->prev->regoff, d);
1029 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1035 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
1036 /* val.l = constant */
1038 d = reg_of_var(rd, iptr->dst, REG_NULL);
1039 x86_64_emit_laluconst(cd, X86_64_SUB, src, iptr);
1042 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1044 d = reg_of_var(rd, iptr->dst, REG_NULL);
1045 if (iptr->dst->flags & INMEMORY) {
1046 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1047 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1048 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1049 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1051 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1052 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1053 x86_64_imull_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1054 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1056 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1057 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1058 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1059 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1062 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1063 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1064 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1068 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1069 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1070 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1072 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1073 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1074 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1076 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1077 M_INTMOVE(src->regoff, iptr->dst->regoff);
1078 x86_64_imull_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1081 if (src->regoff == iptr->dst->regoff) {
1082 x86_64_imull_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1085 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1086 x86_64_imull_reg_reg(cd, src->regoff, iptr->dst->regoff);
1092 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
1093 /* val.i = constant */
1095 d = reg_of_var(rd, iptr->dst, REG_NULL);
1096 if (iptr->dst->flags & INMEMORY) {
1097 if (src->flags & INMEMORY) {
1098 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1099 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1102 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, REG_ITMP1);
1103 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1107 if (src->flags & INMEMORY) {
1108 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1111 if (iptr->val.i == 2) {
1112 M_INTMOVE(src->regoff, iptr->dst->regoff);
1113 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1116 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, iptr->dst->regoff); /* 3 cycles */
1122 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1124 d = reg_of_var(rd, iptr->dst, REG_NULL);
1125 if (iptr->dst->flags & INMEMORY) {
1126 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1127 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1128 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1129 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1131 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1132 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1133 x86_64_imul_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1134 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1136 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1137 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1138 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1139 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1142 x86_64_mov_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1143 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1144 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1148 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1149 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1150 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1152 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1153 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1154 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1156 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1157 M_INTMOVE(src->regoff, iptr->dst->regoff);
1158 x86_64_imul_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1161 if (src->regoff == iptr->dst->regoff) {
1162 x86_64_imul_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1165 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1166 x86_64_imul_reg_reg(cd, src->regoff, iptr->dst->regoff);
1172 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
1173 /* val.l = constant */
1175 d = reg_of_var(rd, iptr->dst, REG_NULL);
1176 if (iptr->dst->flags & INMEMORY) {
1177 if (src->flags & INMEMORY) {
1178 if (IS_IMM32(iptr->val.l)) {
1179 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1182 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1183 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1185 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1188 if (IS_IMM32(iptr->val.l)) {
1189 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, REG_ITMP1);
1192 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1193 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1195 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1199 if (src->flags & INMEMORY) {
1200 if (IS_IMM32(iptr->val.l)) {
1201 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1204 x86_64_mov_imm_reg(cd, iptr->val.l, iptr->dst->regoff);
1205 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1209 /* should match in many cases */
1210 if (iptr->val.l == 2) {
1211 M_INTMOVE(src->regoff, iptr->dst->regoff);
1212 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1215 if (IS_IMM32(iptr->val.l)) {
1216 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, iptr->dst->regoff); /* 4 cycles */
1219 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1220 M_INTMOVE(src->regoff, iptr->dst->regoff);
1221 x86_64_imul_reg_reg(cd, REG_ITMP1, iptr->dst->regoff);
1228 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1230 d = reg_of_var(rd, iptr->dst, REG_NULL);
1231 if (src->prev->flags & INMEMORY) {
1232 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1235 M_INTMOVE(src->prev->regoff, RAX);
1238 if (src->flags & INMEMORY) {
1239 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1242 M_INTMOVE(src->regoff, REG_ITMP3);
1246 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1247 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1248 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1249 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1251 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1253 x86_64_idivl_reg(cd, REG_ITMP3);
1255 if (iptr->dst->flags & INMEMORY) {
1256 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1257 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1260 M_INTMOVE(RAX, iptr->dst->regoff);
1262 if (iptr->dst->regoff != RDX) {
1263 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1268 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1269 d = reg_of_var(rd, iptr->dst, REG_NULL);
1270 if (src->prev->flags & INMEMORY) {
1271 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1274 M_INTMOVE(src->prev->regoff, RAX);
1277 if (src->flags & INMEMORY) {
1278 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1281 M_INTMOVE(src->regoff, REG_ITMP3);
1285 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1287 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1288 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1291 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1292 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1293 x86_64_jcc(cd, X86_64_CC_E, 1 + 3); /* 6 bytes */
1296 x86_64_idivl_reg(cd, REG_ITMP3);
1298 if (iptr->dst->flags & INMEMORY) {
1299 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1300 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1303 M_INTMOVE(RDX, iptr->dst->regoff);
1305 if (iptr->dst->regoff != RDX) {
1306 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1311 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
1312 /* val.i = constant */
1314 var_to_reg_int(s1, src, REG_ITMP1);
1315 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1316 M_INTMOVE(s1, REG_ITMP1);
1317 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1318 x86_64_leal_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1319 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1320 x86_64_shiftl_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1321 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1322 store_reg_to_var_int(iptr->dst, d);
1325 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
1326 /* val.i = constant */
1328 var_to_reg_int(s1, src, REG_ITMP1);
1329 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1330 M_INTMOVE(s1, REG_ITMP1);
1331 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1332 x86_64_leal_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1333 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1334 x86_64_alul_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1335 x86_64_alul_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1336 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1337 store_reg_to_var_int(iptr->dst, d);
1341 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1343 d = reg_of_var(rd, iptr->dst, REG_NULL);
1345 if (src->prev->flags & INMEMORY) {
1346 M_LLD(RAX, REG_SP, src->prev->regoff * 8);
1349 M_INTMOVE(src->prev->regoff, RAX);
1352 if (src->flags & INMEMORY) {
1353 M_LLD(REG_ITMP3, REG_SP, src->regoff * 8);
1356 M_INTMOVE(src->regoff, REG_ITMP3);
1360 /* check as described in jvm spec */
1361 disp = dseg_adds8(cd, 0x8000000000000000LL);
1362 M_CMP_MEMBASE(RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, RAX);
1364 M_CMP_IMM(-1, REG_ITMP3); /* 4 bytes */
1365 M_BEQ(3 + 2 + 3); /* 6 bytes */
1367 M_MOV(RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1369 x86_64_idiv_reg(cd, REG_ITMP3);
1371 if (iptr->dst->flags & INMEMORY) {
1372 M_LST(RAX, REG_SP, iptr->dst->regoff * 8);
1373 M_MOV(REG_ITMP2, RDX); /* restore %rdx */
1376 M_INTMOVE(RAX, iptr->dst->regoff);
1378 if (iptr->dst->regoff != RDX) {
1379 M_MOV(REG_ITMP2, RDX); /* restore %rdx */
1384 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1386 d = reg_of_var(rd, iptr->dst, REG_NULL);
1387 if (src->prev->flags & INMEMORY) {
1388 M_LLD(REG_ITMP1, REG_SP, src->prev->regoff * 8);
1391 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1394 if (src->flags & INMEMORY) {
1395 M_LLD(REG_ITMP3, REG_SP, src->regoff * 8);
1398 M_INTMOVE(src->regoff, REG_ITMP3);
1402 M_MOV(RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1404 /* check as described in jvm spec */
1405 disp = dseg_adds8(cd, 0x8000000000000000LL);
1406 M_CMP_MEMBASE(RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, REG_ITMP1);
1410 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1412 M_XOR(RDX, RDX); /* 3 bytes */
1413 M_CMP_IMM(-1, REG_ITMP3); /* 4 bytes */
1414 M_BEQ(2 + 3); /* 6 bytes */
1417 x86_64_idiv_reg(cd, REG_ITMP3);
1419 if (iptr->dst->flags & INMEMORY) {
1420 M_LST(RDX, REG_SP, iptr->dst->regoff * 8);
1421 M_MOV(REG_ITMP2, RDX); /* restore %rdx */
1424 M_INTMOVE(RDX, iptr->dst->regoff);
1426 if (iptr->dst->regoff != RDX) {
1427 M_MOV(REG_ITMP2, RDX); /* restore %rdx */
1432 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1433 /* val.i = constant */
1435 var_to_reg_int(s1, src, REG_ITMP1);
1436 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1437 M_INTMOVE(s1, REG_ITMP1);
1438 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1439 x86_64_lea_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1440 x86_64_cmovcc_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1441 x86_64_shift_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1442 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1443 store_reg_to_var_int(iptr->dst, d);
1446 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1447 /* val.l = constant */
1449 var_to_reg_int(s1, src, REG_ITMP1);
1450 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1451 M_INTMOVE(s1, REG_ITMP1);
1452 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1453 x86_64_lea_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1454 x86_64_cmovcc_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1455 x86_64_alu_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1456 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1457 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1458 store_reg_to_var_int(iptr->dst, d);
1461 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1463 d = reg_of_var(rd, iptr->dst, REG_NULL);
1464 x86_64_emit_ishift(cd, X86_64_SHL, src, iptr);
1467 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1468 /* val.i = constant */
1470 d = reg_of_var(rd, iptr->dst, REG_NULL);
1471 x86_64_emit_ishiftconst(cd, X86_64_SHL, src, iptr);
1474 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1476 d = reg_of_var(rd, iptr->dst, REG_NULL);
1477 x86_64_emit_ishift(cd, X86_64_SAR, src, iptr);
1480 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1481 /* val.i = constant */
1483 d = reg_of_var(rd, iptr->dst, REG_NULL);
1484 x86_64_emit_ishiftconst(cd, X86_64_SAR, src, iptr);
1487 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1489 d = reg_of_var(rd, iptr->dst, REG_NULL);
1490 x86_64_emit_ishift(cd, X86_64_SHR, src, iptr);
1493 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1494 /* val.i = constant */
1496 d = reg_of_var(rd, iptr->dst, REG_NULL);
1497 x86_64_emit_ishiftconst(cd, X86_64_SHR, src, iptr);
1500 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1502 d = reg_of_var(rd, iptr->dst, REG_NULL);
1503 x86_64_emit_lshift(cd, X86_64_SHL, src, iptr);
1506 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1507 /* val.i = constant */
1509 d = reg_of_var(rd, iptr->dst, REG_NULL);
1510 x86_64_emit_lshiftconst(cd, X86_64_SHL, src, iptr);
1513 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1515 d = reg_of_var(rd, iptr->dst, REG_NULL);
1516 x86_64_emit_lshift(cd, X86_64_SAR, src, iptr);
1519 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1520 /* val.i = constant */
1522 d = reg_of_var(rd, iptr->dst, REG_NULL);
1523 x86_64_emit_lshiftconst(cd, X86_64_SAR, src, iptr);
1526 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1528 d = reg_of_var(rd, iptr->dst, REG_NULL);
1529 x86_64_emit_lshift(cd, X86_64_SHR, src, iptr);
1532 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1533 /* val.l = constant */
1535 d = reg_of_var(rd, iptr->dst, REG_NULL);
1536 x86_64_emit_lshiftconst(cd, X86_64_SHR, src, iptr);
1539 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1541 d = reg_of_var(rd, iptr->dst, REG_NULL);
1542 x86_64_emit_ialu(cd, X86_64_AND, src, iptr);
1545 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1546 /* val.i = constant */
1548 d = reg_of_var(rd, iptr->dst, REG_NULL);
1549 x86_64_emit_ialuconst(cd, X86_64_AND, src, iptr);
1552 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1554 d = reg_of_var(rd, iptr->dst, REG_NULL);
1555 x86_64_emit_lalu(cd, X86_64_AND, src, iptr);
1558 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1559 /* val.l = constant */
1561 d = reg_of_var(rd, iptr->dst, REG_NULL);
1562 x86_64_emit_laluconst(cd, X86_64_AND, src, iptr);
1565 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1567 d = reg_of_var(rd, iptr->dst, REG_NULL);
1568 x86_64_emit_ialu(cd, X86_64_OR, src, iptr);
1571 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1572 /* val.i = constant */
1574 d = reg_of_var(rd, iptr->dst, REG_NULL);
1575 x86_64_emit_ialuconst(cd, X86_64_OR, src, iptr);
1578 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1580 d = reg_of_var(rd, iptr->dst, REG_NULL);
1581 x86_64_emit_lalu(cd, X86_64_OR, src, iptr);
1584 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1585 /* val.l = constant */
1587 d = reg_of_var(rd, iptr->dst, REG_NULL);
1588 x86_64_emit_laluconst(cd, X86_64_OR, src, iptr);
1591 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1593 d = reg_of_var(rd, iptr->dst, REG_NULL);
1594 x86_64_emit_ialu(cd, X86_64_XOR, src, iptr);
1597 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1598 /* val.i = constant */
1600 d = reg_of_var(rd, iptr->dst, REG_NULL);
1601 x86_64_emit_ialuconst(cd, X86_64_XOR, src, iptr);
1604 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1606 d = reg_of_var(rd, iptr->dst, REG_NULL);
1607 x86_64_emit_lalu(cd, X86_64_XOR, src, iptr);
1610 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1611 /* val.l = constant */
1613 d = reg_of_var(rd, iptr->dst, REG_NULL);
1614 x86_64_emit_laluconst(cd, X86_64_XOR, src, iptr);
1618 case ICMD_IINC: /* ..., value ==> ..., value + constant */
1619 /* op1 = variable, val.i = constant */
1621 /* using inc and dec is definitely faster than add -- tested */
1624 var = &(rd->locals[iptr->op1][TYPE_INT]);
1626 if (var->flags & INMEMORY) {
1627 if (iptr->val.i == 1) {
1628 x86_64_incl_membase(cd, REG_SP, d * 8);
1630 } else if (iptr->val.i == -1) {
1631 x86_64_decl_membase(cd, REG_SP, d * 8);
1634 x86_64_alul_imm_membase(cd, X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1638 if (iptr->val.i == 1) {
1639 x86_64_incl_reg(cd, d);
1641 } else if (iptr->val.i == -1) {
1642 x86_64_decl_reg(cd, d);
1645 x86_64_alul_imm_reg(cd, X86_64_ADD, iptr->val.i, d);
1651 /* floating operations ************************************************/
1653 case ICMD_FNEG: /* ..., value ==> ..., - value */
1655 var_to_reg_flt(s1, src, REG_FTMP1);
1656 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1657 disp = dseg_adds4(cd, 0x80000000);
1659 x86_64_movss_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + disp, REG_FTMP2);
1660 x86_64_xorps_reg_reg(cd, REG_FTMP2, d);
1661 store_reg_to_var_flt(iptr->dst, d);
1664 case ICMD_DNEG: /* ..., value ==> ..., - value */
1666 var_to_reg_flt(s1, src, REG_FTMP1);
1667 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1668 disp = dseg_adds8(cd, 0x8000000000000000);
1670 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + disp, REG_FTMP2);
1671 x86_64_xorpd_reg_reg(cd, REG_FTMP2, d);
1672 store_reg_to_var_flt(iptr->dst, d);
1675 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1677 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1678 var_to_reg_flt(s2, src, REG_FTMP2);
1679 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1681 x86_64_addss_reg_reg(cd, s2, d);
1682 } else if (s2 == d) {
1683 x86_64_addss_reg_reg(cd, s1, d);
1686 x86_64_addss_reg_reg(cd, s2, d);
1688 store_reg_to_var_flt(iptr->dst, d);
1691 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1693 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1694 var_to_reg_flt(s2, src, REG_FTMP2);
1695 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1697 x86_64_addsd_reg_reg(cd, s2, d);
1698 } else if (s2 == d) {
1699 x86_64_addsd_reg_reg(cd, s1, d);
1702 x86_64_addsd_reg_reg(cd, s2, d);
1704 store_reg_to_var_flt(iptr->dst, d);
1707 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1709 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1710 var_to_reg_flt(s2, src, REG_FTMP2);
1711 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1713 M_FLTMOVE(s2, REG_FTMP2);
1717 x86_64_subss_reg_reg(cd, s2, d);
1718 store_reg_to_var_flt(iptr->dst, d);
1721 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1723 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1724 var_to_reg_flt(s2, src, REG_FTMP2);
1725 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1727 M_FLTMOVE(s2, REG_FTMP2);
1731 x86_64_subsd_reg_reg(cd, s2, d);
1732 store_reg_to_var_flt(iptr->dst, d);
1735 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1737 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1738 var_to_reg_flt(s2, src, REG_FTMP2);
1739 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1741 x86_64_mulss_reg_reg(cd, s2, d);
1742 } else if (s2 == d) {
1743 x86_64_mulss_reg_reg(cd, s1, d);
1746 x86_64_mulss_reg_reg(cd, s2, d);
1748 store_reg_to_var_flt(iptr->dst, d);
1751 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1753 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1754 var_to_reg_flt(s2, src, REG_FTMP2);
1755 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1757 x86_64_mulsd_reg_reg(cd, s2, d);
1758 } else if (s2 == d) {
1759 x86_64_mulsd_reg_reg(cd, s1, d);
1762 x86_64_mulsd_reg_reg(cd, s2, d);
1764 store_reg_to_var_flt(iptr->dst, d);
1767 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1769 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1770 var_to_reg_flt(s2, src, REG_FTMP2);
1771 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1773 M_FLTMOVE(s2, REG_FTMP2);
1777 x86_64_divss_reg_reg(cd, s2, d);
1778 store_reg_to_var_flt(iptr->dst, d);
1781 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1783 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1784 var_to_reg_flt(s2, src, REG_FTMP2);
1785 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1787 M_FLTMOVE(s2, REG_FTMP2);
1791 x86_64_divsd_reg_reg(cd, s2, d);
1792 store_reg_to_var_flt(iptr->dst, d);
1795 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1797 var_to_reg_int(s1, src, REG_ITMP1);
1798 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1799 x86_64_cvtsi2ss_reg_reg(cd, s1, d);
1800 store_reg_to_var_flt(iptr->dst, d);
1803 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1805 var_to_reg_int(s1, src, REG_ITMP1);
1806 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1807 x86_64_cvtsi2sd_reg_reg(cd, s1, d);
1808 store_reg_to_var_flt(iptr->dst, d);
1811 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1813 var_to_reg_int(s1, src, REG_ITMP1);
1814 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1815 x86_64_cvtsi2ssq_reg_reg(cd, s1, d);
1816 store_reg_to_var_flt(iptr->dst, d);
1819 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1821 var_to_reg_int(s1, src, REG_ITMP1);
1822 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1823 x86_64_cvtsi2sdq_reg_reg(cd, s1, d);
1824 store_reg_to_var_flt(iptr->dst, d);
1827 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1829 var_to_reg_flt(s1, src, REG_FTMP1);
1830 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1831 x86_64_cvttss2si_reg_reg(cd, s1, d);
1832 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1833 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1834 x86_64_jcc(cd, X86_64_CC_NE, a);
1835 M_FLTMOVE(s1, REG_FTMP1);
1836 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2i, REG_ITMP2);
1837 x86_64_call_reg(cd, REG_ITMP2);
1838 M_INTMOVE(REG_RESULT, d);
1839 store_reg_to_var_int(iptr->dst, d);
1842 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1844 var_to_reg_flt(s1, src, REG_FTMP1);
1845 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1846 x86_64_cvttsd2si_reg_reg(cd, s1, d);
1847 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1848 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1849 x86_64_jcc(cd, X86_64_CC_NE, a);
1850 M_FLTMOVE(s1, REG_FTMP1);
1851 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2i, REG_ITMP2);
1852 x86_64_call_reg(cd, REG_ITMP2);
1853 M_INTMOVE(REG_RESULT, d);
1854 store_reg_to_var_int(iptr->dst, d);
1857 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1859 var_to_reg_flt(s1, src, REG_FTMP1);
1860 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1861 x86_64_cvttss2siq_reg_reg(cd, s1, d);
1862 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1863 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1864 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1865 x86_64_jcc(cd, X86_64_CC_NE, a);
1866 M_FLTMOVE(s1, REG_FTMP1);
1867 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2l, REG_ITMP2);
1868 x86_64_call_reg(cd, REG_ITMP2);
1869 M_INTMOVE(REG_RESULT, d);
1870 store_reg_to_var_int(iptr->dst, d);
1873 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1875 var_to_reg_flt(s1, src, REG_FTMP1);
1876 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1877 x86_64_cvttsd2siq_reg_reg(cd, s1, d);
1878 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1879 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1880 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1881 x86_64_jcc(cd, X86_64_CC_NE, a);
1882 M_FLTMOVE(s1, REG_FTMP1);
1883 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2l, REG_ITMP2);
1884 x86_64_call_reg(cd, REG_ITMP2);
1885 M_INTMOVE(REG_RESULT, d);
1886 store_reg_to_var_int(iptr->dst, d);
1889 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1891 var_to_reg_flt(s1, src, REG_FTMP1);
1892 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1893 x86_64_cvtss2sd_reg_reg(cd, s1, d);
1894 store_reg_to_var_flt(iptr->dst, d);
1897 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1899 var_to_reg_flt(s1, src, REG_FTMP1);
1900 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1901 x86_64_cvtsd2ss_reg_reg(cd, s1, d);
1902 store_reg_to_var_flt(iptr->dst, d);
1905 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1906 /* == => 0, < => 1, > => -1 */
1908 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1909 var_to_reg_flt(s2, src, REG_FTMP2);
1910 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1911 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1912 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1913 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1914 x86_64_ucomiss_reg_reg(cd, s1, s2);
1915 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1916 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1917 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1918 store_reg_to_var_int(iptr->dst, d);
1921 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1922 /* == => 0, < => 1, > => -1 */
1924 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1925 var_to_reg_flt(s2, src, REG_FTMP2);
1926 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1927 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1928 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1929 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1930 x86_64_ucomiss_reg_reg(cd, s1, s2);
1931 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1932 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1933 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1934 store_reg_to_var_int(iptr->dst, d);
1937 case ICMD_DCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1938 /* == => 0, < => 1, > => -1 */
1940 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1941 var_to_reg_flt(s2, src, REG_FTMP2);
1942 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1943 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1944 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1945 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1946 x86_64_ucomisd_reg_reg(cd, s1, s2);
1947 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1948 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1949 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1950 store_reg_to_var_int(iptr->dst, d);
1953 case ICMD_DCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1954 /* == => 0, < => 1, > => -1 */
1956 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1957 var_to_reg_flt(s2, src, REG_FTMP2);
1958 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1959 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1960 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1961 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1962 x86_64_ucomisd_reg_reg(cd, s1, s2);
1963 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1964 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1965 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1966 store_reg_to_var_int(iptr->dst, d);
1970 /* memory operations **************************************************/
1972 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., (int) length */
1974 var_to_reg_int(s1, src, REG_ITMP1);
1975 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1976 gen_nullptr_check(s1);
1977 x86_64_movl_membase_reg(cd, s1, OFFSET(java_arrayheader, size), d);
1978 store_reg_to_var_int(iptr->dst, d);
1981 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
1983 var_to_reg_int(s1, src->prev, REG_ITMP1);
1984 var_to_reg_int(s2, src, REG_ITMP2);
1985 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1986 if (iptr->op1 == 0) {
1987 gen_nullptr_check(s1);
1990 x86_64_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
1991 store_reg_to_var_int(iptr->dst, d);
1994 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
1996 var_to_reg_int(s1, src->prev, REG_ITMP1);
1997 var_to_reg_int(s2, src, REG_ITMP2);
1998 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1999 if (iptr->op1 == 0) {
2000 gen_nullptr_check(s1);
2003 x86_64_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]), s1, s2, 3, d);
2004 store_reg_to_var_int(iptr->dst, d);
2007 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
2009 var_to_reg_int(s1, src->prev, REG_ITMP1);
2010 var_to_reg_int(s2, src, REG_ITMP2);
2011 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2012 if (iptr->op1 == 0) {
2013 gen_nullptr_check(s1);
2016 x86_64_movl_memindex_reg(cd, OFFSET(java_intarray, data[0]), s1, s2, 2, d);
2017 store_reg_to_var_int(iptr->dst, d);
2020 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2022 var_to_reg_int(s1, src->prev, REG_ITMP1);
2023 var_to_reg_int(s2, src, REG_ITMP2);
2024 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2025 if (iptr->op1 == 0) {
2026 gen_nullptr_check(s1);
2029 x86_64_movss_memindex_reg(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2030 store_reg_to_var_flt(iptr->dst, d);
2033 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2035 var_to_reg_int(s1, src->prev, REG_ITMP1);
2036 var_to_reg_int(s2, src, REG_ITMP2);
2037 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2038 if (iptr->op1 == 0) {
2039 gen_nullptr_check(s1);
2042 x86_64_movsd_memindex_reg(cd, OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2043 store_reg_to_var_flt(iptr->dst, d);
2046 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
2048 var_to_reg_int(s1, src->prev, REG_ITMP1);
2049 var_to_reg_int(s2, src, REG_ITMP2);
2050 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2051 if (iptr->op1 == 0) {
2052 gen_nullptr_check(s1);
2055 x86_64_movzwq_memindex_reg(cd, OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2056 store_reg_to_var_int(iptr->dst, d);
2059 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
2061 var_to_reg_int(s1, src->prev, REG_ITMP1);
2062 var_to_reg_int(s2, src, REG_ITMP2);
2063 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2064 if (iptr->op1 == 0) {
2065 gen_nullptr_check(s1);
2068 x86_64_movswq_memindex_reg(cd, OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2069 store_reg_to_var_int(iptr->dst, d);
2072 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
2074 var_to_reg_int(s1, src->prev, REG_ITMP1);
2075 var_to_reg_int(s2, src, REG_ITMP2);
2076 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2077 if (iptr->op1 == 0) {
2078 gen_nullptr_check(s1);
2081 x86_64_movsbq_memindex_reg(cd, OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2082 store_reg_to_var_int(iptr->dst, d);
2086 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2088 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2089 var_to_reg_int(s2, src->prev, REG_ITMP2);
2090 if (iptr->op1 == 0) {
2091 gen_nullptr_check(s1);
2094 var_to_reg_int(s3, src, REG_ITMP3);
2095 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2098 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2100 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2101 var_to_reg_int(s2, src->prev, REG_ITMP2);
2102 if (iptr->op1 == 0) {
2103 gen_nullptr_check(s1);
2106 var_to_reg_int(s3, src, REG_ITMP3);
2107 x86_64_movl_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2110 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2112 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2113 var_to_reg_int(s2, src->prev, REG_ITMP2);
2114 if (iptr->op1 == 0) {
2115 gen_nullptr_check(s1);
2118 var_to_reg_flt(s3, src, REG_FTMP3);
2119 x86_64_movss_reg_memindex(cd, s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2122 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2124 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2125 var_to_reg_int(s2, src->prev, REG_ITMP2);
2126 if (iptr->op1 == 0) {
2127 gen_nullptr_check(s1);
2130 var_to_reg_flt(s3, src, REG_FTMP3);
2131 x86_64_movsd_reg_memindex(cd, s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2134 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2136 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2137 var_to_reg_int(s2, src->prev, REG_ITMP2);
2138 if (iptr->op1 == 0) {
2139 gen_nullptr_check(s1);
2142 var_to_reg_int(s3, src, REG_ITMP3);
2143 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2146 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2148 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2149 var_to_reg_int(s2, src->prev, REG_ITMP2);
2150 if (iptr->op1 == 0) {
2151 gen_nullptr_check(s1);
2154 var_to_reg_int(s3, src, REG_ITMP3);
2155 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2158 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2160 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2161 var_to_reg_int(s2, src->prev, REG_ITMP2);
2162 if (iptr->op1 == 0) {
2163 gen_nullptr_check(s1);
2166 var_to_reg_int(s3, src, REG_ITMP3);
2167 x86_64_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2170 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2172 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2173 var_to_reg_int(s2, src->prev, REG_ITMP2);
2174 if (iptr->op1 == 0) {
2175 gen_nullptr_check(s1);
2178 var_to_reg_int(s3, src, REG_ITMP3);
2180 M_MOV(s1, rd->argintregs[0]);
2181 M_MOV(s3, rd->argintregs[1]);
2182 M_MOV_IMM((ptrint) BUILTIN_canstore, REG_ITMP1);
2186 codegen_addxstorerefs(cd, cd->mcodeptr);
2188 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2189 var_to_reg_int(s2, src->prev, REG_ITMP2);
2190 var_to_reg_int(s3, src, REG_ITMP3);
2191 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2195 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2197 var_to_reg_int(s1, src->prev, REG_ITMP1);
2198 var_to_reg_int(s2, src, REG_ITMP2);
2199 if (iptr->op1 == 0) {
2200 gen_nullptr_check(s1);
2203 x86_64_movl_imm_memindex(cd, iptr->val.i, OFFSET(java_intarray, data[0]), s1, s2, 2);
2206 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2208 var_to_reg_int(s1, src->prev, REG_ITMP1);
2209 var_to_reg_int(s2, src, REG_ITMP2);
2210 if (iptr->op1 == 0) {
2211 gen_nullptr_check(s1);
2215 if (IS_IMM32(iptr->val.l)) {
2216 x86_64_mov_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2219 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2220 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l >> 32), OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2224 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2226 var_to_reg_int(s1, src->prev, REG_ITMP1);
2227 var_to_reg_int(s2, src, REG_ITMP2);
2228 if (iptr->op1 == 0) {
2229 gen_nullptr_check(s1);
2232 x86_64_mov_imm_memindex(cd, 0, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2235 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2237 var_to_reg_int(s1, src->prev, REG_ITMP1);
2238 var_to_reg_int(s2, src, REG_ITMP2);
2239 if (iptr->op1 == 0) {
2240 gen_nullptr_check(s1);
2243 x86_64_movb_imm_memindex(cd, iptr->val.i, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2246 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2248 var_to_reg_int(s1, src->prev, REG_ITMP1);
2249 var_to_reg_int(s2, src, REG_ITMP2);
2250 if (iptr->op1 == 0) {
2251 gen_nullptr_check(s1);
2254 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_chararray, data[0]), s1, s2, 1);
2257 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2259 var_to_reg_int(s1, src->prev, REG_ITMP1);
2260 var_to_reg_int(s2, src, REG_ITMP2);
2261 if (iptr->op1 == 0) {
2262 gen_nullptr_check(s1);
2265 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2269 case ICMD_GETSTATIC: /* ... ==> ..., value */
2270 /* op1 = type, val.a = field address */
2273 disp = dseg_addaddress(cd, NULL);
2275 codegen_addpatchref(cd, cd->mcodeptr,
2276 PATCHER_get_putstatic,
2277 (unresolved_field *) iptr->target, disp);
2279 if (opt_showdisassemble) {
2280 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2284 fieldinfo *fi = iptr->val.a;
2286 disp = dseg_addaddress(cd, &(fi->value));
2288 if (!fi->class->initialized) {
2289 codegen_addpatchref(cd, cd->mcodeptr,
2290 PATCHER_clinit, fi->class, 0);
2292 if (opt_showdisassemble) {
2293 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2298 /* This approach is much faster than moving the field address */
2299 /* inline into a register. */
2300 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, REG_ITMP2);
2301 switch (iptr->op1) {
2303 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2304 x86_64_movl_membase_reg(cd, REG_ITMP2, 0, d);
2305 store_reg_to_var_int(iptr->dst, d);
2309 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2310 x86_64_mov_membase_reg(cd, REG_ITMP2, 0, d);
2311 store_reg_to_var_int(iptr->dst, d);
2314 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2315 x86_64_movss_membase_reg(cd, REG_ITMP2, 0, d);
2316 store_reg_to_var_flt(iptr->dst, d);
2319 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2320 x86_64_movsd_membase_reg(cd, REG_ITMP2, 0, d);
2321 store_reg_to_var_flt(iptr->dst, d);
2326 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2327 /* op1 = type, val.a = field address */
2330 disp = dseg_addaddress(cd, NULL);
2332 codegen_addpatchref(cd, cd->mcodeptr,
2333 PATCHER_get_putstatic,
2334 (unresolved_field *) iptr->target, disp);
2336 if (opt_showdisassemble) {
2337 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2341 fieldinfo *fi = iptr->val.a;
2343 disp = dseg_addaddress(cd, &(fi->value));
2345 if (!fi->class->initialized) {
2346 codegen_addpatchref(cd, cd->mcodeptr,
2347 PATCHER_clinit, fi->class, 0);
2349 if (opt_showdisassemble) {
2350 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2355 /* This approach is much faster than moving the field address */
2356 /* inline into a register. */
2357 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, REG_ITMP2);
2358 switch (iptr->op1) {
2360 var_to_reg_int(s2, src, REG_ITMP1);
2361 x86_64_movl_reg_membase(cd, s2, REG_ITMP2, 0);
2365 var_to_reg_int(s2, src, REG_ITMP1);
2366 x86_64_mov_reg_membase(cd, s2, REG_ITMP2, 0);
2369 var_to_reg_flt(s2, src, REG_FTMP1);
2370 x86_64_movss_reg_membase(cd, s2, REG_ITMP2, 0);
2373 var_to_reg_flt(s2, src, REG_FTMP1);
2374 x86_64_movsd_reg_membase(cd, s2, REG_ITMP2, 0);
2379 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2380 /* val = value (in current instruction) */
2381 /* op1 = type, val.a = field address (in */
2382 /* following NOP) */
2384 if (!iptr[1].val.a) {
2385 disp = dseg_addaddress(cd, NULL);
2387 codegen_addpatchref(cd, cd->mcodeptr,
2388 PATCHER_get_putstatic,
2389 (unresolved_field *) iptr[1].target, disp);
2391 if (opt_showdisassemble) {
2392 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2396 fieldinfo *fi = iptr[1].val.a;
2398 disp = dseg_addaddress(cd, &(fi->value));
2400 if (!fi->class->initialized) {
2401 codegen_addpatchref(cd, cd->mcodeptr,
2402 PATCHER_clinit, fi->class, 0);
2404 if (opt_showdisassemble) {
2405 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2410 /* This approach is much faster than moving the field address */
2411 /* inline into a register. */
2412 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, REG_ITMP1);
2413 switch (iptr->op1) {
2416 x86_64_movl_imm_membase(cd, iptr->val.i, REG_ITMP1, 0);
2421 if (IS_IMM32(iptr->val.l)) {
2422 x86_64_mov_imm_membase(cd, iptr->val.l, REG_ITMP1, 0);
2424 x86_64_movl_imm_membase(cd, iptr->val.l, REG_ITMP1, 0);
2425 x86_64_movl_imm_membase(cd, iptr->val.l >> 32, REG_ITMP1, 4);
2431 case ICMD_GETFIELD: /* ... ==> ..., value */
2432 /* op1 = type, val.i = field offset */
2434 var_to_reg_int(s1, src, REG_ITMP1);
2435 gen_nullptr_check(s1);
2438 codegen_addpatchref(cd, cd->mcodeptr,
2439 PATCHER_get_putfield,
2440 (unresolved_field *) iptr->target, 0);
2442 if (opt_showdisassemble) {
2443 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2449 a = ((fieldinfo *) (iptr->val.a))->offset;
2452 switch (iptr->op1) {
2454 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2455 x86_64_movl_membase32_reg(cd, s1, a, d);
2456 store_reg_to_var_int(iptr->dst, d);
2460 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2461 x86_64_mov_membase32_reg(cd, s1, a, d);
2462 store_reg_to_var_int(iptr->dst, d);
2465 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2466 x86_64_movss_membase32_reg(cd, s1, a, d);
2467 store_reg_to_var_flt(iptr->dst, d);
2470 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2471 x86_64_movsd_membase32_reg(cd, s1, a, d);
2472 store_reg_to_var_flt(iptr->dst, d);
2477 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2478 /* op1 = type, val.i = field offset */
2480 var_to_reg_int(s1, src->prev, REG_ITMP1);
2481 gen_nullptr_check(s1);
2482 if (IS_INT_LNG_TYPE(iptr->op1)) {
2483 var_to_reg_int(s2, src, REG_ITMP2);
2485 var_to_reg_flt(s2, src, REG_FTMP2);
2489 codegen_addpatchref(cd, cd->mcodeptr,
2490 PATCHER_get_putfield,
2491 (unresolved_field *) iptr->target, 0);
2493 if (opt_showdisassemble) {
2494 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2500 a = ((fieldinfo *) (iptr->val.a))->offset;
2503 switch (iptr->op1) {
2505 x86_64_movl_reg_membase32(cd, s2, s1, a);
2509 x86_64_mov_reg_membase32(cd, s2, s1, a);
2512 x86_64_movss_reg_membase32(cd, s2, s1, a);
2515 x86_64_movsd_reg_membase32(cd, s2, s1, a);
2520 case ICMD_PUTFIELDCONST: /* ..., objectref, value ==> ... */
2521 /* val = value (in current instruction) */
2522 /* op1 = type, val.a = field address (in */
2523 /* following NOP) */
2525 var_to_reg_int(s1, src, REG_ITMP1);
2526 gen_nullptr_check(s1);
2528 if (!iptr[1].val.a) {
2529 codegen_addpatchref(cd, cd->mcodeptr,
2530 PATCHER_putfieldconst,
2531 (unresolved_field *) iptr[1].target, 0);
2533 if (opt_showdisassemble) {
2534 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2540 a = ((fieldinfo *) (iptr[1].val.a))->offset;
2543 switch (iptr->op1) {
2546 x86_64_movl_imm_membase32(cd, iptr->val.i, s1, a);
2551 /* We can only optimize the move, if the class is resolved. */
2552 /* Otherwise we don't know what to patch. */
2553 if (iptr[1].val.a && IS_IMM32(iptr->val.l)) {
2554 x86_64_mov_imm_membase32(cd, iptr->val.l, s1, a);
2556 x86_64_movl_imm_membase32(cd, iptr->val.l, s1, a);
2557 x86_64_movl_imm_membase32(cd, iptr->val.l >> 32, s1, a + 4);
2564 /* branch operations **************************************************/
2566 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2568 var_to_reg_int(s1, src, REG_ITMP1);
2569 M_INTMOVE(s1, REG_ITMP1_XPTR);
2572 codegen_addpatchref(cd, cd->mcodeptr,
2573 PATCHER_athrow_areturn,
2574 (unresolved_class *) iptr->val.a, 0);
2576 if (opt_showdisassemble) {
2577 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2581 M_CALL_IMM(0); /* passing exception pc */
2582 M_POP(REG_ITMP2_XPC);
2584 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
2588 case ICMD_GOTO: /* ... ==> ... */
2589 /* op1 = target JavaVM pc */
2592 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2595 case ICMD_JSR: /* ... ==> ... */
2596 /* op1 = target JavaVM pc */
2599 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2602 case ICMD_RET: /* ... ==> ... */
2603 /* op1 = local variable */
2605 var = &(rd->locals[iptr->op1][TYPE_ADR]);
2606 var_to_reg_int(s1, var, REG_ITMP1);
2610 case ICMD_IFNULL: /* ..., value ==> ... */
2611 /* op1 = target JavaVM pc */
2613 if (src->flags & INMEMORY) {
2614 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2617 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2619 x86_64_jcc(cd, X86_64_CC_E, 0);
2620 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2623 case ICMD_IFNONNULL: /* ..., value ==> ... */
2624 /* op1 = target JavaVM pc */
2626 if (src->flags & INMEMORY) {
2627 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2630 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2632 x86_64_jcc(cd, X86_64_CC_NE, 0);
2633 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2636 case ICMD_IFEQ: /* ..., value ==> ... */
2637 /* op1 = target JavaVM pc, val.i = constant */
2639 x86_64_emit_ifcc(cd, X86_64_CC_E, src, iptr);
2642 case ICMD_IFLT: /* ..., value ==> ... */
2643 /* op1 = target JavaVM pc, val.i = constant */
2645 x86_64_emit_ifcc(cd, X86_64_CC_L, src, iptr);
2648 case ICMD_IFLE: /* ..., value ==> ... */
2649 /* op1 = target JavaVM pc, val.i = constant */
2651 x86_64_emit_ifcc(cd, X86_64_CC_LE, src, iptr);
2654 case ICMD_IFNE: /* ..., value ==> ... */
2655 /* op1 = target JavaVM pc, val.i = constant */
2657 x86_64_emit_ifcc(cd, X86_64_CC_NE, src, iptr);
2660 case ICMD_IFGT: /* ..., value ==> ... */
2661 /* op1 = target JavaVM pc, val.i = constant */
2663 x86_64_emit_ifcc(cd, X86_64_CC_G, src, iptr);
2666 case ICMD_IFGE: /* ..., value ==> ... */
2667 /* op1 = target JavaVM pc, val.i = constant */
2669 x86_64_emit_ifcc(cd, X86_64_CC_GE, src, iptr);
2672 case ICMD_IF_LEQ: /* ..., value ==> ... */
2673 /* op1 = target JavaVM pc, val.l = constant */
2675 x86_64_emit_if_lcc(cd, X86_64_CC_E, src, iptr);
2678 case ICMD_IF_LLT: /* ..., value ==> ... */
2679 /* op1 = target JavaVM pc, val.l = constant */
2681 x86_64_emit_if_lcc(cd, X86_64_CC_L, src, iptr);
2684 case ICMD_IF_LLE: /* ..., value ==> ... */
2685 /* op1 = target JavaVM pc, val.l = constant */
2687 x86_64_emit_if_lcc(cd, X86_64_CC_LE, src, iptr);
2690 case ICMD_IF_LNE: /* ..., value ==> ... */
2691 /* op1 = target JavaVM pc, val.l = constant */
2693 x86_64_emit_if_lcc(cd, X86_64_CC_NE, src, iptr);
2696 case ICMD_IF_LGT: /* ..., value ==> ... */
2697 /* op1 = target JavaVM pc, val.l = constant */
2699 x86_64_emit_if_lcc(cd, X86_64_CC_G, src, iptr);
2702 case ICMD_IF_LGE: /* ..., value ==> ... */
2703 /* op1 = target JavaVM pc, val.l = constant */
2705 x86_64_emit_if_lcc(cd, X86_64_CC_GE, src, iptr);
2708 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2709 /* op1 = target JavaVM pc */
2711 x86_64_emit_if_icmpcc(cd, X86_64_CC_E, src, iptr);
2714 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2715 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2717 x86_64_emit_if_lcmpcc(cd, X86_64_CC_E, src, iptr);
2720 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2721 /* op1 = target JavaVM pc */
2723 x86_64_emit_if_icmpcc(cd, X86_64_CC_NE, src, iptr);
2726 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2727 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2729 x86_64_emit_if_lcmpcc(cd, X86_64_CC_NE, src, iptr);
2732 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2733 /* op1 = target JavaVM pc */
2735 x86_64_emit_if_icmpcc(cd, X86_64_CC_L, src, iptr);
2738 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2739 /* op1 = target JavaVM pc */
2741 x86_64_emit_if_lcmpcc(cd, X86_64_CC_L, src, iptr);
2744 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2745 /* op1 = target JavaVM pc */
2747 x86_64_emit_if_icmpcc(cd, X86_64_CC_G, src, iptr);
2750 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2751 /* op1 = target JavaVM pc */
2753 x86_64_emit_if_lcmpcc(cd, X86_64_CC_G, src, iptr);
2756 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2757 /* op1 = target JavaVM pc */
2759 x86_64_emit_if_icmpcc(cd, X86_64_CC_LE, src, iptr);
2762 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2763 /* op1 = target JavaVM pc */
2765 x86_64_emit_if_lcmpcc(cd, X86_64_CC_LE, src, iptr);
2768 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2769 /* op1 = target JavaVM pc */
2771 x86_64_emit_if_icmpcc(cd, X86_64_CC_GE, src, iptr);
2774 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2775 /* op1 = target JavaVM pc */
2777 x86_64_emit_if_lcmpcc(cd, X86_64_CC_GE, src, iptr);
2780 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST */
2782 case ICMD_ELSE_ICONST: /* handled by IFxx_ICONST */
2785 case ICMD_IFEQ_ICONST: /* ..., value ==> ..., constant */
2786 /* val.i = constant */
2788 var_to_reg_int(s1, src, REG_ITMP1);
2789 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2790 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2792 M_INTMOVE(s1, REG_ITMP1);
2795 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2797 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2798 x86_64_testl_reg_reg(cd, s1, s1);
2799 x86_64_cmovccl_reg_reg(cd, X86_64_CC_E, REG_ITMP2, d);
2800 store_reg_to_var_int(iptr->dst, d);
2803 case ICMD_IFNE_ICONST: /* ..., value ==> ..., constant */
2804 /* val.i = constant */
2806 var_to_reg_int(s1, src, REG_ITMP1);
2807 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2808 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2810 M_INTMOVE(s1, REG_ITMP1);
2813 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2815 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2816 x86_64_testl_reg_reg(cd, s1, s1);
2817 x86_64_cmovccl_reg_reg(cd, X86_64_CC_NE, REG_ITMP2, d);
2818 store_reg_to_var_int(iptr->dst, d);
2821 case ICMD_IFLT_ICONST: /* ..., value ==> ..., constant */
2822 /* val.i = constant */
2824 var_to_reg_int(s1, src, REG_ITMP1);
2825 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2826 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2828 M_INTMOVE(s1, REG_ITMP1);
2831 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2833 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2834 x86_64_testl_reg_reg(cd, s1, s1);
2835 x86_64_cmovccl_reg_reg(cd, X86_64_CC_L, REG_ITMP2, d);
2836 store_reg_to_var_int(iptr->dst, d);
2839 case ICMD_IFGE_ICONST: /* ..., value ==> ..., constant */
2840 /* val.i = constant */
2842 var_to_reg_int(s1, src, REG_ITMP1);
2843 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2844 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2846 M_INTMOVE(s1, REG_ITMP1);
2849 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2851 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2852 x86_64_testl_reg_reg(cd, s1, s1);
2853 x86_64_cmovccl_reg_reg(cd, X86_64_CC_GE, REG_ITMP2, d);
2854 store_reg_to_var_int(iptr->dst, d);
2857 case ICMD_IFGT_ICONST: /* ..., value ==> ..., constant */
2858 /* val.i = constant */
2860 var_to_reg_int(s1, src, REG_ITMP1);
2861 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2862 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2864 M_INTMOVE(s1, REG_ITMP1);
2867 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2869 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2870 x86_64_testl_reg_reg(cd, s1, s1);
2871 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP2, d);
2872 store_reg_to_var_int(iptr->dst, d);
2875 case ICMD_IFLE_ICONST: /* ..., value ==> ..., constant */
2876 /* val.i = constant */
2878 var_to_reg_int(s1, src, REG_ITMP1);
2879 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2880 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2882 M_INTMOVE(s1, REG_ITMP1);
2885 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2887 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2888 x86_64_testl_reg_reg(cd, s1, s1);
2889 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, d);
2890 store_reg_to_var_int(iptr->dst, d);
2894 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2897 var_to_reg_int(s1, src, REG_RESULT);
2898 M_INTMOVE(s1, REG_RESULT);
2899 goto nowperformreturn;
2901 case ICMD_ARETURN: /* ..., retvalue ==> ... */
2903 var_to_reg_int(s1, src, REG_RESULT);
2904 M_INTMOVE(s1, REG_RESULT);
2907 codegen_addpatchref(cd, cd->mcodeptr,
2908 PATCHER_athrow_areturn,
2909 (unresolved_class *) iptr->val.a, 0);
2911 if (opt_showdisassemble) {
2912 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2915 goto nowperformreturn;
2917 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2920 var_to_reg_flt(s1, src, REG_FRESULT);
2921 M_FLTMOVE(s1, REG_FRESULT);
2922 goto nowperformreturn;
2924 case ICMD_RETURN: /* ... ==> ... */
2930 p = parentargs_base;
2932 /* call trace function */
2934 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
2936 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
2937 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
2939 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
2940 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
2941 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
2942 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
2944 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
2945 x86_64_call_reg(cd, REG_ITMP1);
2947 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
2948 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
2950 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
2953 #if defined(USE_THREADS)
2954 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2955 M_ALD(rd->argintregs[0], REG_SP, rd->memuse * 8);
2957 /* we need to save the proper return value */
2958 switch (iptr->opc) {
2962 M_LST(REG_RESULT, REG_SP, rd->memuse * 8);
2966 M_DST(REG_FRESULT, REG_SP, rd->memuse * 8);
2970 M_MOV_IMM((ptrint) builtin_monitorexit, REG_ITMP1);
2973 /* and now restore the proper return value */
2974 switch (iptr->opc) {
2978 M_LLD(REG_RESULT, REG_SP, rd->memuse * 8);
2982 M_DLD(REG_FRESULT, REG_SP, rd->memuse * 8);
2988 /* restore saved registers */
2990 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
2991 p--; M_LLD(rd->savintregs[i], REG_SP, p * 8);
2993 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
2994 p--; M_DLD(rd->savfltregs[i], REG_SP, p * 8);
2997 /* deallocate stack */
2999 if (parentargs_base)
3000 M_AADD_IMM(parentargs_base * 8, REG_SP);
3007 case ICMD_TABLESWITCH: /* ..., index ==> ... */
3012 tptr = (void **) iptr->target;
3014 s4ptr = iptr->val.a;
3015 l = s4ptr[1]; /* low */
3016 i = s4ptr[2]; /* high */
3018 var_to_reg_int(s1, src, REG_ITMP1);
3019 M_INTMOVE(s1, REG_ITMP1);
3021 x86_64_alul_imm_reg(cd, X86_64_SUB, l, REG_ITMP1);
3026 x86_64_alul_imm_reg(cd, X86_64_CMP, i - 1, REG_ITMP1);
3027 x86_64_jcc(cd, X86_64_CC_A, 0);
3029 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[0]), cd->mcodeptr); */
3030 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3032 /* build jump table top down and use address of lowest entry */
3034 /* s4ptr += 3 + i; */
3038 dseg_addtarget(cd, (basicblock *) tptr[0]);
3042 /* length of dataseg after last dseg_addtarget is used by load */
3044 x86_64_mov_imm_reg(cd, 0, REG_ITMP2);
3045 dseg_adddata(cd, cd->mcodeptr);
3046 x86_64_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
3047 x86_64_jmp_reg(cd, REG_ITMP1);
3052 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
3054 s4 i, l, val, *s4ptr;
3057 tptr = (void **) iptr->target;
3059 s4ptr = iptr->val.a;
3060 l = s4ptr[0]; /* default */
3061 i = s4ptr[1]; /* count */
3063 MCODECHECK(8 + ((7 + 6) * i) + 5);
3064 var_to_reg_int(s1, src, REG_ITMP1); /* reg compare should always be faster */
3070 x86_64_alul_imm_reg(cd, X86_64_CMP, val, s1);
3071 x86_64_jcc(cd, X86_64_CC_E, 0);
3072 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3075 x86_64_jmp_imm(cd, 0);
3077 tptr = (void **) iptr->target;
3078 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3083 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
3084 /* op1 = arg count val.a = builtintable entry */
3090 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
3091 /* op1 = arg count, val.a = method pointer */
3093 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3094 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
3095 case ICMD_INVOKEINTERFACE:
3100 unresolved_method *um = iptr->target;
3101 md = um->methodref->parseddesc.md;
3103 md = lm->parseddesc;
3107 s3 = md->paramcount;
3109 MCODECHECK((20 * s3) + 128);
3111 /* copy arguments to registers or stack location */
3113 for (s3 = s3 - 1; s3 >= 0; s3--, src = src->prev) {
3114 if (src->varkind == ARGVAR)
3116 if (IS_INT_LNG_TYPE(src->type)) {
3117 if (!md->params[s3].inmemory) {
3118 s1 = rd->argintregs[md->params[s3].regoff];
3119 var_to_reg_int(d, src, s1);
3122 var_to_reg_int(d, src, REG_ITMP1);
3123 M_LST(d, REG_SP, md->params[s3].regoff * 8);
3127 if (!md->params[s3].inmemory) {
3128 s1 = rd->argfltregs[md->params[s3].regoff];
3129 var_to_reg_flt(d, src, s1);
3132 var_to_reg_flt(d, src, REG_FTMP1);
3133 M_DST(d, REG_SP, md->params[s3].regoff * 8);
3138 switch (iptr->opc) {
3140 a = (ptrint) bte->fp;
3141 d = md->returntype.type;
3143 M_MOV_IMM(a, REG_ITMP1);
3146 /* if op1 == true, we need to check for an exception */
3148 if (iptr->op1 == true) {
3151 codegen_addxexceptionrefs(cd, cd->mcodeptr);
3155 case ICMD_INVOKESPECIAL:
3156 M_TEST(rd->argintregs[0]);
3158 codegen_addxnullrefs(cd, cd->mcodeptr);
3160 /* first argument contains pointer */
3161 /* gen_nullptr_check(rd->argintregs[0]); */
3163 /* access memory for hardware nullptr */
3164 /* x86_64_mov_membase_reg(cd, rd->argintregs[0], 0, REG_ITMP2); */
3168 case ICMD_INVOKESTATIC:
3170 unresolved_method *um = iptr->target;
3172 codegen_addpatchref(cd, cd->mcodeptr,
3173 PATCHER_invokestatic_special, um, 0);
3175 if (opt_showdisassemble) {
3176 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3180 d = um->methodref->parseddesc.md->returntype.type;
3183 a = (ptrint) lm->stubroutine;
3184 d = lm->parseddesc->returntype.type;
3187 M_MOV_IMM(a, REG_ITMP2);
3191 case ICMD_INVOKEVIRTUAL:
3192 gen_nullptr_check(rd->argintregs[0]);
3195 unresolved_method *um = iptr->target;
3197 codegen_addpatchref(cd, cd->mcodeptr,
3198 PATCHER_invokevirtual, um, 0);
3200 if (opt_showdisassemble) {
3201 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3205 d = um->methodref->parseddesc.md->returntype.type;
3208 s1 = OFFSET(vftbl_t, table[0]) +
3209 sizeof(methodptr) * lm->vftblindex;
3210 d = lm->parseddesc->returntype.type;
3213 x86_64_mov_membase_reg(cd, rd->argintregs[0],
3214 OFFSET(java_objectheader, vftbl),
3216 x86_64_mov_membase32_reg(cd, REG_ITMP2, s1, REG_ITMP1);
3220 case ICMD_INVOKEINTERFACE:
3221 gen_nullptr_check(rd->argintregs[0]);
3224 unresolved_method *um = iptr->target;
3226 codegen_addpatchref(cd, cd->mcodeptr,
3227 PATCHER_invokeinterface, um, 0);
3229 if (opt_showdisassemble) {
3230 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3235 d = um->methodref->parseddesc.md->returntype.type;
3238 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3239 sizeof(methodptr) * lm->class->index;
3241 s2 = sizeof(methodptr) * (lm - lm->class->methods);
3243 d = lm->parseddesc->returntype.type;
3246 M_ALD(REG_ITMP2, rd->argintregs[0],
3247 OFFSET(java_objectheader, vftbl));
3248 x86_64_mov_membase32_reg(cd, REG_ITMP2, s1, REG_ITMP2);
3249 x86_64_mov_membase32_reg(cd, REG_ITMP2, s2, REG_ITMP1);
3254 /* d contains return type */
3256 if (d != TYPE_VOID) {
3257 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3258 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3259 M_INTMOVE(REG_RESULT, s1);
3260 store_reg_to_var_int(iptr->dst, s1);
3262 s1 = reg_of_var(rd, iptr->dst, REG_FRESULT);
3263 M_FLTMOVE(REG_FRESULT, s1);
3264 store_reg_to_var_flt(iptr->dst, s1);
3270 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3272 /* op1: 0 == array, 1 == class */
3273 /* val.a: (classinfo *) superclass */
3275 /* superclass is an interface:
3277 * OK if ((sub == NULL) ||
3278 * (sub->vftbl->interfacetablelength > super->index) &&
3279 * (sub->vftbl->interfacetable[-super->index] != NULL));
3281 * superclass is a class:
3283 * OK if ((sub == NULL) || (0
3284 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3285 * super->vftbl->diffval));
3288 if (iptr->op1 == 1) {
3289 /* object type cast-check */
3292 vftbl_t *supervftbl;
3295 super = (classinfo *) iptr->val.a;
3302 superindex = super->index;
3303 supervftbl = super->vftbl;
3306 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3307 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3309 var_to_reg_int(s1, src, REG_ITMP1);
3311 /* calculate interface checkcast code size */
3313 s2 = 3; /* mov_membase_reg */
3314 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3316 s2 += 3 + 4 /* movl_membase32_reg */ + 3 + 4 /* sub imm32 */ +
3317 3 /* test */ + 6 /* jcc */ + 3 + 4 /* mov_membase32_reg */ +
3318 3 /* test */ + 6 /* jcc */;
3321 s2 += (opt_showdisassemble ? 5 : 0);
3323 /* calculate class checkcast code size */
3325 s3 = 3; /* mov_membase_reg */
3326 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3327 s3 += 10 /* mov_imm_reg */ + 3 + 4 /* movl_membase32_reg */;
3330 if (s1 != REG_ITMP1) {
3331 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3332 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, baseval));
3333 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3334 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, diffval));
3340 s3 += 3 + 4 /* movl_membase32_reg */ + 3 /* sub */ +
3341 10 /* mov_imm_reg */ + 3 /* movl_membase_reg */;
3342 CALCOFFSETBYTES(s3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3345 s3 += 3 /* cmp */ + 6 /* jcc */;
3348 s3 += (opt_showdisassemble ? 5 : 0);
3350 /* if class is not resolved, check which code to call */
3354 M_BEQ(6 + (opt_showdisassemble ? 5 : 0) + 7 + 6 + s2 + 5 + s3);
3356 codegen_addpatchref(cd, cd->mcodeptr,
3357 PATCHER_checkcast_instanceof_flags,
3358 (constant_classref *) iptr->target, 0);
3360 if (opt_showdisassemble) {
3361 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3364 M_IMOV_IMM(0, REG_ITMP2); /* super->flags */
3365 M_IAND_IMM(ACC_INTERFACE, REG_ITMP2);
3369 /* interface checkcast code */
3371 if (!super || (super->flags & ACC_INTERFACE)) {
3377 M_ALD(REG_ITMP2, s1, OFFSET(java_objectheader, vftbl));
3380 codegen_addpatchref(cd, cd->mcodeptr,
3381 PATCHER_checkcast_instanceof_interface,
3382 (constant_classref *) iptr->target, 0);
3384 if (opt_showdisassemble) {
3385 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3389 x86_64_movl_membase32_reg(cd, REG_ITMP2,
3390 OFFSET(vftbl_t, interfacetablelength),
3392 /* XXX TWISTI: should this be int arithmetic? */
3393 M_LSUB_IMM32(superindex, REG_ITMP3);
3396 codegen_addxcastrefs(cd, cd->mcodeptr);
3397 x86_64_mov_membase32_reg(cd, REG_ITMP2,
3398 OFFSET(vftbl_t, interfacetable[0]) -
3399 superindex * sizeof(methodptr*),
3403 codegen_addxcastrefs(cd, cd->mcodeptr);
3409 /* class checkcast code */
3411 if (!super || !(super->flags & ACC_INTERFACE)) {
3417 M_ALD(REG_ITMP2, s1, OFFSET(java_objectheader, vftbl));
3420 codegen_addpatchref(cd, cd->mcodeptr,
3421 PATCHER_checkcast_class,
3422 (constant_classref *) iptr->target,
3425 if (opt_showdisassemble) {
3426 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3430 M_MOV_IMM((ptrint) supervftbl, REG_ITMP3);
3431 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3432 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3434 x86_64_movl_membase32_reg(cd, REG_ITMP2,
3435 OFFSET(vftbl_t, baseval),
3437 /* if (s1 != REG_ITMP1) { */
3438 /* x86_64_movl_membase_reg(cd, REG_ITMP3, */
3439 /* OFFSET(vftbl_t, baseval), */
3441 /* x86_64_movl_membase_reg(cd, REG_ITMP3, */
3442 /* OFFSET(vftbl_t, diffval), */
3444 /* #if defined(USE_THREADS) && defined(NATIVE_THREADS) */
3445 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3447 /* x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP1, REG_ITMP2); */
3450 x86_64_movl_membase32_reg(cd, REG_ITMP3,
3451 OFFSET(vftbl_t, baseval),
3453 M_LSUB(REG_ITMP3, REG_ITMP2);
3454 M_MOV_IMM((ptrint) supervftbl, REG_ITMP3);
3455 M_ILD(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3457 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3458 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3460 M_CMP(REG_ITMP3, REG_ITMP2);
3461 M_BA(0); /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3462 codegen_addxcastrefs(cd, cd->mcodeptr);
3464 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3467 /* array type cast-check */
3469 var_to_reg_int(s1, src, REG_ITMP1);
3470 M_INTMOVE(s1, rd->argintregs[0]);
3472 if (iptr->val.a == NULL) {
3473 codegen_addpatchref(cd, cd->mcodeptr,
3474 PATCHER_builtin_arraycheckcast,
3475 (constant_classref *) iptr->target, 0);
3477 if (opt_showdisassemble) {
3478 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3484 a = (ptrint) BUILTIN_arraycheckcast;
3487 M_MOV_IMM((ptrint) iptr->val.a, rd->argintregs[1]);
3488 M_MOV_IMM((ptrint) a, REG_ITMP1);
3492 codegen_addxcastrefs(cd, cd->mcodeptr);
3494 var_to_reg_int(s1, src, REG_ITMP1);
3495 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
3498 store_reg_to_var_int(iptr->dst, d);
3501 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3503 /* op1: 0 == array, 1 == class */
3504 /* val.a: (classinfo *) superclass */
3506 /* superclass is an interface:
3508 * return (sub != NULL) &&
3509 * (sub->vftbl->interfacetablelength > super->index) &&
3510 * (sub->vftbl->interfacetable[-super->index] != NULL);
3512 * superclass is a class:
3514 * return ((sub != NULL) && (0
3515 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3516 * super->vftbl->diffvall));
3521 vftbl_t *supervftbl;
3524 super = (classinfo *) iptr->val.a;
3531 superindex = super->index;
3532 supervftbl = super->vftbl;
3535 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3536 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3539 var_to_reg_int(s1, src, REG_ITMP1);
3540 d = reg_of_var(rd, iptr->dst, REG_ITMP2);
3542 M_INTMOVE(s1, REG_ITMP1);
3546 /* calculate interface instanceof code size */
3548 s2 = 3; /* mov_membase_reg */
3549 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3550 s2 += 3 + 4 /* movl_membase32_reg */ + 3 + 4 /* sub_imm32 */ +
3551 3 /* test */ + 6 /* jcc */ + 3 + 4 /* mov_membase32_reg */ +
3552 3 /* test */ + 4 /* setcc */;
3555 s2 += (opt_showdisassemble ? 5 : 0);
3557 /* calculate class instanceof code size */
3559 s3 = 3; /* mov_membase_reg */
3560 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3561 s3 += 10; /* mov_imm_reg */
3562 s3 += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3563 CALCOFFSETBYTES(s3, REG_ITMP1, OFFSET(vftbl_t, baseval));
3564 s3 += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3565 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, baseval));
3566 s3 += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3567 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3568 s3 += 3 /* sub */ + 3 /* xor */ + 3 /* cmp */ + 4 /* setcc */;
3571 s3 += (opt_showdisassemble ? 5 : 0);
3573 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3575 /* if class is not resolved, check which code to call */
3578 x86_64_test_reg_reg(cd, s1, s1);
3579 x86_64_jcc(cd, X86_64_CC_Z, (6 + (opt_showdisassemble ? 5 : 0) +
3580 7 + 6 + s2 + 5 + s3));
3582 codegen_addpatchref(cd, cd->mcodeptr,
3583 PATCHER_checkcast_instanceof_flags,
3584 (constant_classref *) iptr->target, 0);
3586 if (opt_showdisassemble) {
3587 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3590 x86_64_movl_imm_reg(cd, 0, REG_ITMP3); /* super->flags */
3591 x86_64_alul_imm_reg(cd, X86_64_AND, ACC_INTERFACE, REG_ITMP3);
3592 x86_64_jcc(cd, X86_64_CC_Z, s2 + 5);
3595 /* interface instanceof code */
3597 if (!super || (super->flags & ACC_INTERFACE)) {
3599 x86_64_test_reg_reg(cd, s1, s1);
3600 x86_64_jcc(cd, X86_64_CC_Z, s2);
3603 x86_64_mov_membase_reg(cd, s1,
3604 OFFSET(java_objectheader, vftbl),
3607 codegen_addpatchref(cd, cd->mcodeptr,
3608 PATCHER_checkcast_instanceof_interface,
3609 (constant_classref *) iptr->target, 0);
3611 if (opt_showdisassemble) {
3612 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3616 x86_64_movl_membase32_reg(cd, REG_ITMP1,
3617 OFFSET(vftbl_t, interfacetablelength),
3619 x86_64_alu_imm32_reg(cd, X86_64_SUB, superindex, REG_ITMP3);
3620 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3622 a = 3 + 4 /* mov_membase32_reg */ + 3 /* test */ + 4 /* setcc */;
3624 x86_64_jcc(cd, X86_64_CC_LE, a);
3625 x86_64_mov_membase32_reg(cd, REG_ITMP1,
3626 OFFSET(vftbl_t, interfacetable[0]) -
3627 superindex * sizeof(methodptr*),
3629 x86_64_test_reg_reg(cd, REG_ITMP1, REG_ITMP1);
3630 x86_64_setcc_reg(cd, X86_64_CC_NE, d);
3633 x86_64_jmp_imm(cd, s3);
3636 /* class instanceof code */
3638 if (!super || !(super->flags & ACC_INTERFACE)) {
3640 x86_64_test_reg_reg(cd, s1, s1);
3641 x86_64_jcc(cd, X86_64_CC_E, s3);
3644 x86_64_mov_membase_reg(cd, s1,
3645 OFFSET(java_objectheader, vftbl),
3649 codegen_addpatchref(cd, cd->mcodeptr,
3650 PATCHER_instanceof_class,
3651 (constant_classref *) iptr->target, 0);
3653 if (opt_showdisassemble) {
3654 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3658 x86_64_mov_imm_reg(cd, (ptrint) supervftbl, REG_ITMP2);
3659 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3660 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3662 x86_64_movl_membase_reg(cd, REG_ITMP1,
3663 OFFSET(vftbl_t, baseval),
3665 x86_64_movl_membase_reg(cd, REG_ITMP2,
3666 OFFSET(vftbl_t, diffval),
3668 x86_64_movl_membase_reg(cd, REG_ITMP2,
3669 OFFSET(vftbl_t, baseval),
3671 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3672 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3674 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
3675 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d); /* may be REG_ITMP2 */
3676 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP3, REG_ITMP1);
3677 x86_64_setcc_reg(cd, X86_64_CC_BE, d);
3679 store_reg_to_var_int(iptr->dst, d);
3683 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3684 /* op1 = dimension, val.a = array descriptor */
3686 /* check for negative sizes and copy sizes to stack if necessary */
3688 MCODECHECK((10 * 4 * iptr->op1) + 5 + 10 * 8);
3690 for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3691 /* copy SAVEDVAR sizes to stack */
3693 if (src->varkind != ARGVAR) {
3694 var_to_reg_int(s2, src, REG_ITMP1);
3695 M_LST(s2, REG_SP, s1 * 8);
3699 /* is a patcher function set? */
3702 codegen_addpatchref(cd, cd->mcodeptr,
3703 (functionptr) (ptrint) iptr->target,
3706 if (opt_showdisassemble) {
3707 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3713 a = (ptrint) iptr->val.a;
3716 /* a0 = dimension count */
3718 M_MOV_IMM(iptr->op1, rd->argintregs[0]);
3720 /* a1 = arrayvftbl */
3722 M_MOV_IMM((ptrint) iptr->val.a, rd->argintregs[1]);
3724 /* a2 = pointer to dimensions = stack pointer */
3726 M_MOV(REG_SP, rd->argintregs[2]);
3728 M_MOV_IMM((ptrint) BUILTIN_multianewarray, REG_ITMP1);
3731 /* check for exception before result assignment */
3735 codegen_addxexceptionrefs(cd, cd->mcodeptr);
3737 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3738 M_INTMOVE(REG_RESULT, s1);
3739 store_reg_to_var_int(iptr->dst, s1);
3743 throw_cacao_exception_exit(string_java_lang_InternalError,
3744 "Unknown ICMD %d", iptr->opc);
3747 } /* for instruction */
3749 /* copy values to interface registers */
3751 src = bptr->outstack;
3752 len = bptr->outdepth;
3759 if ((src->varkind != STACKVAR)) {
3761 if (IS_FLT_DBL_TYPE(s2)) {
3762 var_to_reg_flt(s1, src, REG_FTMP1);
3763 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3764 M_FLTMOVE(s1, rd->interfaces[len][s2].regoff);
3767 x86_64_movq_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3771 var_to_reg_int(s1, src, REG_ITMP1);
3772 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3773 M_INTMOVE(s1, rd->interfaces[len][s2].regoff);
3776 x86_64_mov_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3783 /* At the end of a basic block we may have to append some nops,
3784 because the patcher stub calling code might be longer than the
3785 actual instruction. So codepatching does not change the
3786 following block unintentionally. */
3788 if (cd->mcodeptr < cd->lastmcodeptr) {
3789 while (cd->mcodeptr < cd->lastmcodeptr) {
3794 } /* if (bptr -> flags >= BBREACHED) */
3795 } /* for basic block */
3797 codegen_createlinenumbertable(cd);
3804 /* generate ArithmeticException stubs */
3808 for (bref = cd->xdivrefs; bref != NULL; bref = bref->next) {
3809 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3811 cd->mcodeptr - cd->mcodebase);
3815 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
3816 dseg_adddata(cd, cd->mcodeptr);
3817 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
3819 if (xcodeptr != NULL) {
3820 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
3823 xcodeptr = cd->mcodeptr;
3825 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
3826 M_MOV(REG_SP, rd->argintregs[1]);
3827 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
3828 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
3830 M_ASUB_IMM(2 * 8, REG_SP);
3831 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
3833 M_MOV_IMM((ptrint) stacktrace_inline_arithmeticexception,
3837 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
3838 M_AADD_IMM(2 * 8, REG_SP);
3840 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
3845 /* generate ArrayIndexOutOfBoundsException stubs */
3849 for (bref = cd->xboundrefs; bref != NULL; bref = bref->next) {
3850 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3852 cd->mcodeptr - cd->mcodebase);
3856 /* move index register into REG_ITMP1 */
3858 M_MOV(bref->reg, REG_ITMP1); /* 3 bytes */
3860 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
3861 dseg_adddata(cd, cd->mcodeptr);
3862 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
3864 if (xcodeptr != NULL) {
3865 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
3868 xcodeptr = cd->mcodeptr;
3870 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
3871 M_MOV(REG_SP, rd->argintregs[1]);
3872 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
3873 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
3874 M_MOV(REG_ITMP1, rd->argintregs[4]);
3876 M_ASUB_IMM(2 * 8, REG_SP);
3877 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
3879 M_MOV_IMM((ptrint) stacktrace_inline_arrayindexoutofboundsexception,
3883 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
3884 M_AADD_IMM(2 * 8, REG_SP);
3886 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
3891 /* generate ArrayStoreException stubs */
3895 for (bref = cd->xstorerefs; bref != NULL; bref = bref->next) {
3896 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3898 cd->mcodeptr - cd->mcodebase);
3902 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
3903 dseg_adddata(cd, cd->mcodeptr);
3904 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
3906 if (xcodeptr != NULL) {
3907 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
3910 xcodeptr = cd->mcodeptr;
3912 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
3913 M_MOV(REG_SP, rd->argintregs[1]);
3914 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
3915 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
3917 M_ASUB_IMM(2 * 8, REG_SP);
3918 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
3920 M_MOV_IMM((ptrint) stacktrace_inline_arraystoreexception,
3924 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
3925 M_AADD_IMM(2 * 8, REG_SP);
3927 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
3932 /* generate ClassCastException stubs */
3936 for (bref = cd->xcastrefs; bref != NULL; bref = bref->next) {
3937 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3939 cd->mcodeptr - cd->mcodebase);
3943 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
3944 dseg_adddata(cd, cd->mcodeptr);
3945 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
3947 if (xcodeptr != NULL) {
3948 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
3951 xcodeptr = cd->mcodeptr;
3953 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
3954 M_MOV(REG_SP, rd->argintregs[1]);
3955 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
3956 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
3958 M_ASUB_IMM(2 * 8, REG_SP);
3959 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
3961 M_MOV_IMM((ptrint) stacktrace_inline_classcastexception, REG_ITMP3);
3964 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
3965 M_AADD_IMM(2 * 8, REG_SP);
3967 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
3972 /* generate NullpointerException stubs */
3976 for (bref = cd->xnullrefs; bref != NULL; bref = bref->next) {
3977 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3979 cd->mcodeptr - cd->mcodebase);
3983 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
3984 dseg_adddata(cd, cd->mcodeptr);
3985 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
3987 if (xcodeptr != NULL) {
3988 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
3991 xcodeptr = cd->mcodeptr;
3993 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
3994 M_MOV(REG_SP, rd->argintregs[1]);
3995 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
3996 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
3998 M_ASUB_IMM(2 * 8, REG_SP);
3999 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
4001 M_MOV_IMM((ptrint) stacktrace_inline_nullpointerexception,
4005 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
4006 M_AADD_IMM(2 * 8, REG_SP);
4008 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
4013 /* generate exception check stubs */
4017 for (bref = cd->xexceptionrefs; bref != NULL; bref = bref->next) {
4018 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4020 cd->mcodeptr - cd->mcodebase);
4024 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
4025 dseg_adddata(cd, cd->mcodeptr);
4026 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
4028 if (xcodeptr != NULL) {
4029 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
4032 xcodeptr = cd->mcodeptr;
4034 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
4035 M_MOV(REG_SP, rd->argintregs[1]);
4036 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
4037 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
4039 M_ASUB_IMM(2 * 8, REG_SP);
4040 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
4042 M_MOV_IMM((ptrint) stacktrace_inline_fillInStackTrace, REG_ITMP3);
4045 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
4046 M_AADD_IMM(2 * 8, REG_SP);
4048 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
4053 /* generate code patching stub call code */
4060 tmpcd = DNEW(codegendata);
4062 for (pref = cd->patchrefs; pref != NULL; pref = pref->next) {
4063 /* check size of code segment */
4067 /* Get machine code which is patched back in later. A */
4068 /* `call rel32' is 5 bytes long (but read 8 bytes). */
4070 xcodeptr = cd->mcodebase + pref->branchpos;
4071 mcode = *((ptrint *) xcodeptr);
4073 /* patch in `call rel32' to call the following code */
4075 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
4076 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4078 /* move pointer to java_objectheader onto stack */
4080 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4081 /* create a virtual java_objectheader */
4083 (void) dseg_addaddress(cd, get_dummyLR()); /* monitorPtr */
4084 a = dseg_addaddress(cd, NULL); /* vftbl */
4086 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP3);
4092 /* move machine code bytes and classinfo pointer into registers */
4094 M_MOV_IMM((ptrint) mcode, REG_ITMP3);
4096 M_MOV_IMM((ptrint) pref->ref, REG_ITMP3);
4098 M_MOV_IMM((ptrint) pref->disp, REG_ITMP3);
4101 M_MOV_IMM((ptrint) pref->patcher, REG_ITMP3);
4104 M_MOV_IMM((ptrint) asm_wrapper_patcher, REG_ITMP3);
4110 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
4114 /* createcompilerstub **********************************************************
4116 Creates a stub routine which calls the compiler.
4118 *******************************************************************************/
4120 #define COMPILERSTUB_SIZE 23
4122 functionptr createcompilerstub(methodinfo *m)
4124 u1 *s; /* memory to hold the stub */
4128 s = CNEW(u1, COMPILERSTUB_SIZE);
4130 /* mark start of dump memory area */
4132 dumpsize = dump_size();
4134 cd = DNEW(codegendata);
4137 /* code for the stub */
4139 x86_64_mov_imm_reg(cd, (ptrint) m, REG_ITMP1); /* pass method to compiler */
4140 x86_64_mov_imm_reg(cd, (ptrint) asm_call_jit_compiler, REG_ITMP3);
4141 x86_64_jmp_reg(cd, REG_ITMP3);
4143 #if defined(STATISTICS)
4145 count_cstub_len += COMPILERSTUB_SIZE;
4148 /* release dump area */
4150 dump_release(dumpsize);
4152 return (functionptr) (ptrint) s;
4156 /* createnativestub ************************************************************
4158 Creates a stub routine which calls a native method.
4160 *******************************************************************************/
4162 functionptr createnativestub(functionptr f, methodinfo *m, codegendata *cd,
4163 registerdata *rd, methoddesc *nmd)
4166 s4 stackframesize; /* size of stackframe if needed */
4168 s4 i, j; /* count variables */
4172 /* initialize variables */
4175 nativeparams = (m->flags & ACC_STATIC) ? 2 : 1;
4178 /* calculate stack frame size */
4181 sizeof(stackframeinfo) / SIZEOF_VOID_P +
4182 sizeof(localref_table) / SIZEOF_VOID_P +
4183 INT_ARG_CNT + FLT_ARG_CNT + 1 + /* + 1 for function address */
4186 if (!(stackframesize & 0x1)) /* keep stack 16-byte aligned */
4190 /* create method header */
4192 (void) dseg_addaddress(cd, m); /* MethodPointer */
4193 (void) dseg_adds4(cd, stackframesize * 8); /* FrameSize */
4194 (void) dseg_adds4(cd, 0); /* IsSync */
4195 (void) dseg_adds4(cd, 0); /* IsLeaf */
4196 (void) dseg_adds4(cd, 0); /* IntSave */
4197 (void) dseg_adds4(cd, 0); /* FltSave */
4198 (void) dseg_addlinenumbertablesize(cd);
4199 (void) dseg_adds4(cd, 0); /* ExTableSize */
4202 /* initialize mcode variables */
4204 cd->mcodeptr = (u1 *) cd->mcodebase;
4205 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
4208 /* generate stub code */
4210 M_ASUB_IMM(stackframesize * 8, REG_SP);
4213 /* save integer and float argument registers */
4215 for (i = 0, j = 0; i < md->paramcount && i < INT_ARG_CNT; i++)
4216 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4217 M_LST(rd->argintregs[j++], REG_SP, (1 + i) * 8);
4219 for (i = 0, j = 0; i < md->paramcount && i < FLT_ARG_CNT; i++)
4220 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4221 M_DST(rd->argfltregs[j++], REG_SP, (1 + INT_ARG_CNT + i) * 8);
4223 /* show integer hex code for float arguments */
4225 for (i = 0, j = 0; i < md->paramcount && i < INT_ARG_CNT; i++) {
4226 /* if the paramtype is a float, we have to right shift all */
4227 /* following integer registers */
4229 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type)) {
4230 for (s1 = INT_ARG_CNT - 2; s1 >= i; s1--)
4231 M_MOV(rd->argintregs[s1], rd->argintregs[s1 + 1]);
4233 x86_64_movd_freg_reg(cd, rd->argfltregs[j], rd->argintregs[i]);
4238 x86_64_mov_imm_reg(cd, (ptrint) m, REG_ITMP1);
4239 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, 0 * 8);
4240 x86_64_mov_imm_reg(cd, (ptrint) builtin_trace_args, REG_ITMP1);
4241 x86_64_call_reg(cd, REG_ITMP1);
4243 /* restore integer and float argument registers */
4245 for (i = 0, j = 0; i < md->paramcount && i < INT_ARG_CNT; i++)
4246 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4247 M_LLD(rd->argintregs[j++], REG_SP, (1 + i) * 8);
4249 for (i = 0, j = 0; i < md->paramcount && i < FLT_ARG_CNT; i++)
4250 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4251 M_DLD(rd->argfltregs[j++], REG_SP, (1 + INT_ARG_CNT + i) * 8);
4255 /* get function address (this must happen before the stackframeinfo) */
4257 #if !defined(ENABLE_STATICVM)
4259 codegen_addpatchref(cd, cd->mcodeptr, PATCHER_resolve_native, m, 0);
4261 if (opt_showdisassemble) {
4262 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
4267 M_MOV_IMM((ptrint) f, REG_ITMP3);
4270 /* save integer and float argument registers */
4272 for (i = 0, j = 0; i < md->paramcount && i < INT_ARG_CNT; i++)
4273 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4274 M_LST(rd->argintregs[j++], REG_SP, i * 8);
4276 for (i = 0, j = 0; i < md->paramcount && i < FLT_ARG_CNT; i++)
4277 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4278 M_DST(rd->argfltregs[j++], REG_SP, (INT_ARG_CNT + i) * 8);
4280 M_AST(REG_ITMP3, REG_SP, (INT_ARG_CNT + FLT_ARG_CNT) * 8);
4282 /* create dynamic stack info */
4284 M_ALEA(REG_SP, stackframesize * 8, rd->argintregs[0]);
4285 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[1]);
4286 M_ALEA(REG_SP, stackframesize * 8 + SIZEOF_VOID_P, rd->argintregs[2]);
4287 M_ALD(rd->argintregs[3], REG_SP, stackframesize * 8);
4288 M_MOV_IMM((ptrint) codegen_start_native_call, REG_ITMP1);
4292 x86_64_mov_imm_reg(cd, (ptrint) nativeinvokation, REG_ITMP1);
4293 x86_64_call_reg(cd, REG_ITMP1);
4296 /* restore integer and float argument registers */
4298 for (i = 0, j = 0; i < md->paramcount && i < INT_ARG_CNT; i++)
4299 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4300 M_LLD(rd->argintregs[j++], REG_SP, i * 8);
4302 for (i = 0, j = 0; i < md->paramcount && i < FLT_ARG_CNT; i++)
4303 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4304 M_DLD(rd->argfltregs[j++], REG_SP, (INT_ARG_CNT + i) * 8);
4306 M_ALD(REG_ITMP3, REG_SP, (INT_ARG_CNT + FLT_ARG_CNT) * 8);
4309 /* copy or spill arguments to new locations */
4311 for (i = md->paramcount - 1, j = i + nativeparams; i >= 0; i--, j--) {
4312 t = md->paramtypes[i].type;
4314 if (IS_INT_LNG_TYPE(t)) {
4315 if (!md->params[i].inmemory) {
4316 s1 = rd->argintregs[md->params[i].regoff];
4318 if (!nmd->params[j].inmemory) {
4319 s2 = rd->argintregs[nmd->params[j].regoff];
4323 s2 = nmd->params[j].regoff;
4324 M_LST(s1, REG_SP, s2 * 8);
4328 s1 = md->params[i].regoff + stackframesize + 1; /* + 1 (RA) */
4329 s2 = nmd->params[j].regoff;
4330 M_LLD(REG_ITMP1, REG_SP, s1 * 8);
4331 M_LST(REG_ITMP1, REG_SP, s2 * 8);
4335 /* We only copy spilled float arguments, as the float argument */
4336 /* registers keep unchanged. */
4338 if (md->params[i].inmemory) {
4339 s1 = md->params[i].regoff + stackframesize + 1; /* + 1 (RA) */
4340 s2 = nmd->params[j].regoff;
4341 M_DLD(REG_FTMP1, REG_SP, s1 * 8);
4342 M_DST(REG_FTMP1, REG_SP, s2 * 8);
4347 /* put class into second argument register */
4349 if (m->flags & ACC_STATIC)
4350 M_MOV_IMM((ptrint) m->class, rd->argintregs[1]);
4352 /* put env into first argument register */
4354 M_MOV_IMM((ptrint) &env, rd->argintregs[0]);
4356 /* do the native function call */
4360 /* save return value */
4362 if (IS_INT_LNG_TYPE(md->returntype.type))
4363 M_LST(REG_RESULT, REG_SP, 0 * 8);
4365 M_DST(REG_FRESULT, REG_SP, 0 * 8);
4367 /* remove native stackframe info */
4369 M_ALEA(REG_SP, stackframesize * 8, rd->argintregs[0]);
4370 M_MOV_IMM((ptrint) codegen_finish_native_call, REG_ITMP1);
4373 /* generate call trace */
4376 /* just restore the value we need, don't care about the other */
4378 if (IS_INT_LNG_TYPE(md->returntype.type))
4379 M_LLD(REG_RESULT, REG_SP, 0 * 8);
4381 M_DLD(REG_FRESULT, REG_SP, 0 * 8);
4383 M_MOV_IMM((ptrint) m, rd->argintregs[0]);
4384 M_MOV(REG_RESULT, rd->argintregs[1]);
4385 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
4386 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
4388 M_MOV_IMM((ptrint) builtin_displaymethodstop, REG_ITMP1);
4392 /* check for exception */
4394 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4395 M_MOV_IMM((ptrint) builtin_get_exceptionptrptr, REG_ITMP3);
4398 M_MOV_IMM((ptrint) &_no_threads_exceptionptr, REG_RESULT);
4400 M_ALD(REG_ITMP3, REG_RESULT, 0);
4403 /* restore return value */
4405 if (IS_INT_LNG_TYPE(md->returntype.type))
4406 M_LLD(REG_RESULT, REG_SP, 0 * 8);
4408 M_DLD(REG_FRESULT, REG_SP, 0 * 8);
4410 /* test for exception */
4415 /* remove stackframe */
4417 M_AADD_IMM(stackframesize * 8, REG_SP);
4421 /* handle exception */
4423 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4424 M_LST(REG_ITMP3, REG_SP, 0 * 8);
4425 M_MOV_IMM((ptrint) builtin_get_exceptionptrptr, REG_ITMP3);
4427 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
4428 M_LLD(REG_ITMP1_XPTR, REG_SP, 0 * 8);
4430 M_MOV(REG_ITMP3, REG_ITMP1_XPTR);
4431 M_MOV_IMM((ptrint) &_no_threads_exceptionptr, REG_ITMP3);
4432 M_XOR(REG_ITMP2, REG_ITMP2);
4433 M_AST(REG_ITMP2, REG_ITMP3, 0); /* clear exception pointer */
4436 /* remove stackframe */
4438 M_AADD_IMM(stackframesize * 8, REG_SP);
4440 M_LLD(REG_ITMP2_XPC, REG_SP, 0 * 8); /* get return address from stack */
4441 M_ASUB_IMM(3, REG_ITMP2_XPC); /* callq */
4443 M_MOV_IMM((ptrint) asm_handle_nat_exception, REG_ITMP3);
4447 /* process patcher calls **************************************************/
4454 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4458 tmpcd = DNEW(codegendata);
4460 for (pref = cd->patchrefs; pref != NULL; pref = pref->next) {
4461 /* Get machine code which is patched back in later. A */
4462 /* `call rel32' is 5 bytes long (but read 8 bytes). */
4464 xcodeptr = cd->mcodebase + pref->branchpos;
4465 mcode = *((ptrint *) xcodeptr);
4467 /* patch in `call rel32' to call the following code */
4469 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
4470 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4472 /* move pointer to java_objectheader onto stack */
4474 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4475 /* create a virtual java_objectheader */
4477 (void) dseg_addaddress(cd, get_dummyLR()); /* monitorPtr */
4478 disp = dseg_addaddress(cd, NULL); /* vftbl */
4480 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, REG_ITMP3);
4486 /* move machine code bytes and classinfo pointer into registers */
4488 M_MOV_IMM((ptrint) mcode, REG_ITMP3);
4490 M_MOV_IMM((ptrint) pref->ref, REG_ITMP3);
4492 M_MOV_IMM((ptrint) pref->disp, REG_ITMP3);
4495 M_MOV_IMM((ptrint) pref->patcher, REG_ITMP3);
4498 M_MOV_IMM((ptrint) asm_wrapper_patcher, REG_ITMP3);
4503 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
4505 return m->entrypoint;
4510 * These are local overrides for various environment variables in Emacs.
4511 * Please do not remove this and leave it at the end of the file, where
4512 * Emacs will automagically detect them.
4513 * ---------------------------------------------------------------------
4516 * indent-tabs-mode: t