1 /* src/vm/jit/x86_64/codegen.c - machine code generator for x86_64
3 Copyright (C) 1996-2005 R. Grafl, A. Krall, C. Kruegel, C. Oates,
4 R. Obermaisser, M. Platter, M. Probst, S. Ring, E. Steiner,
5 C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich, J. Wenninger,
6 Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
25 Contact: cacao@complang.tuwien.ac.at
27 Authors: Andreas Krall
30 Changes: Christian Ullrich
32 $Id: codegen.c 3851 2005-12-03 12:33:19Z twisti $
46 #include "vm/jit/x86_64/arch.h"
47 #include "vm/jit/x86_64/codegen.h"
48 #include "vm/jit/x86_64/emitfuncs.h"
50 #include "cacao/cacao.h"
51 #include "native/native.h"
52 #include "vm/global.h"
53 #include "vm/builtin.h"
54 #include "vm/loader.h"
55 #include "vm/statistics.h"
56 #include "vm/stringlocal.h"
57 #include "vm/jit/asmpart.h"
58 #include "vm/jit/codegen.inc"
59 #include "vm/jit/jit.h"
62 # include "vm/jit/lsra.inc"
65 #include "vm/jit/methodheader.h"
66 #include "vm/jit/parse.h"
67 #include "vm/jit/patcher.h"
68 #include "vm/jit/reg.h"
69 #include "vm/jit/reg.inc"
72 /* codegen *********************************************************************
74 Generates machine code.
76 *******************************************************************************/
78 bool codegen(methodinfo *m, codegendata *cd, registerdata *rd)
80 s4 len, s1, s2, s3, d, disp;
89 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
90 builtintable_entry *bte;
93 /* prevent compiler warnings */
105 /* space to save used callee saved registers */
107 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
108 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse);
110 parentargs_base = rd->memuse + savedregs_num;
112 #if defined(USE_THREADS)
113 /* space to save argument of monitor_enter */
115 if (checksync && (m->flags & ACC_SYNCHRONIZED))
119 /* Keep stack of non-leaf functions 16-byte aligned for calls into native */
120 /* code e.g. libc or jni (alignment problems with movaps). */
122 if (!m->isleafmethod || runverbose)
123 parentargs_base |= 0x1;
125 /* create method header */
127 (void) dseg_addaddress(cd, m); /* MethodPointer */
128 (void) dseg_adds4(cd, parentargs_base * 8); /* FrameSize */
130 #if defined(USE_THREADS)
131 /* IsSync contains the offset relative to the stack pointer for the
132 argument of monitor_exit used in the exception handler. Since the
133 offset could be zero and give a wrong meaning of the flag it is
137 if (checksync && (m->flags & ACC_SYNCHRONIZED))
138 (void) dseg_adds4(cd, (rd->memuse + 1) * 8); /* IsSync */
141 (void) dseg_adds4(cd, 0); /* IsSync */
143 (void) dseg_adds4(cd, m->isleafmethod); /* IsLeaf */
144 (void) dseg_adds4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
145 (void) dseg_adds4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
147 (void) dseg_addlinenumbertablesize(cd);
149 (void) dseg_adds4(cd, cd->exceptiontablelength); /* ExTableSize */
151 /* create exception table */
153 for (ex = cd->exceptiontable; ex != NULL; ex = ex->down) {
154 dseg_addtarget(cd, ex->start);
155 dseg_addtarget(cd, ex->end);
156 dseg_addtarget(cd, ex->handler);
157 (void) dseg_addaddress(cd, ex->catchtype.cls);
160 /* initialize mcode variables */
162 cd->mcodeptr = (u1 *) cd->mcodebase;
163 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
166 /* initialize the last patcher pointer */
168 cd->lastmcodeptr = cd->mcodeptr;
170 /* create stack frame (if necessary) */
173 M_ASUB_IMM(parentargs_base * 8, REG_SP);
175 /* save used callee saved registers */
178 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
179 p--; M_LST(rd->savintregs[i], REG_SP, p * 8);
181 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
182 p--; M_DST(rd->savfltregs[i], REG_SP, p * 8);
185 /* take arguments out of register or stack frame */
189 for (p = 0, l = 0; p < md->paramcount; p++) {
190 t = md->paramtypes[p].type;
191 var = &(rd->locals[l][t]);
193 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
197 s1 = md->params[p].regoff;
198 if (IS_INT_LNG_TYPE(t)) { /* integer args */
199 s2 = rd->argintregs[s1];
200 if (!md->params[p].inmemory) { /* register arguments */
201 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
202 M_INTMOVE(s2, var->regoff);
204 } else { /* reg arg -> spilled */
205 M_LST(s2, REG_SP, var->regoff * 8);
208 } else { /* stack arguments */
209 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
210 /* + 8 for return address */
211 M_LLD(var->regoff, REG_SP, (parentargs_base + s1) * 8 + 8);
213 } else { /* stack arg -> spilled */
214 var->regoff = parentargs_base + s1 + 1;
218 } else { /* floating args */
219 if (!md->params[p].inmemory) { /* register arguments */
220 s2 = rd->argfltregs[s1];
221 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
222 M_FLTMOVE(s2, var->regoff);
224 } else { /* reg arg -> spilled */
225 M_DST(s2, REG_SP, var->regoff * 8);
228 } else { /* stack arguments */
229 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
230 M_DLD(var->regoff, REG_SP, (parentargs_base + s1) * 8 + 8);
233 var->regoff = parentargs_base + s1 + 1;
239 /* save monitorenter argument */
241 #if defined(USE_THREADS)
242 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
243 /* stack offset for monitor argument */
248 M_LSUB_IMM((INT_ARG_CNT + FLT_ARG_CNT) * 8, REG_SP);
250 for (p = 0; p < INT_ARG_CNT; p++)
251 M_LST(rd->argintregs[p], REG_SP, p * 8);
253 for (p = 0; p < FLT_ARG_CNT; p++)
254 M_DST(rd->argfltregs[p], REG_SP, (INT_ARG_CNT + p) * 8);
256 s1 += INT_ARG_CNT + FLT_ARG_CNT;
259 /* decide which monitor enter function to call */
261 if (m->flags & ACC_STATIC) {
262 x86_64_mov_imm_reg(cd, (ptrint) m->class, REG_ITMP1);
263 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, s1 * 8);
264 M_INTMOVE(REG_ITMP1, rd->argintregs[0]);
265 x86_64_mov_imm_reg(cd, (ptrint) BUILTIN_staticmonitorenter, REG_ITMP1);
266 x86_64_call_reg(cd, REG_ITMP1);
269 x86_64_test_reg_reg(cd, rd->argintregs[0], rd->argintregs[0]);
270 x86_64_jcc(cd, X86_64_CC_Z, 0);
271 codegen_addxnullrefs(cd, cd->mcodeptr);
272 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, s1 * 8);
273 x86_64_mov_imm_reg(cd, (ptrint) BUILTIN_monitorenter, REG_ITMP1);
274 x86_64_call_reg(cd, REG_ITMP1);
278 for (p = 0; p < INT_ARG_CNT; p++)
279 M_LLD(rd->argintregs[p], REG_SP, p * 8);
281 for (p = 0; p < FLT_ARG_CNT; p++)
282 M_DLD(rd->argfltregs[p], REG_SP, (INT_ARG_CNT + p) * 8);
284 M_LADD_IMM((INT_ARG_CNT + FLT_ARG_CNT) * 8, REG_SP);
289 /* Copy argument registers to stack and call trace function with pointer */
290 /* to arguments on stack. */
292 if (runverbose || opt_stat) {
293 M_LSUB_IMM((INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + FLT_TMP_CNT + 1 + 1) * 8, REG_SP);
295 /* save integer argument registers */
297 for (p = 0; p < INT_ARG_CNT; p++)
298 M_LST(rd->argintregs[p], REG_SP, (1 + p) * 8);
300 /* save float argument registers */
302 for (p = 0; p < FLT_ARG_CNT; p++)
303 M_DST(rd->argfltregs[p], REG_SP, (1 + INT_ARG_CNT + p) * 8);
305 /* save temporary registers for leaf methods */
307 if (m->isleafmethod) {
308 for (p = 0; p < INT_TMP_CNT; p++)
309 M_LST(rd->tmpintregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + p) * 8);
311 for (p = 0; p < FLT_TMP_CNT; p++)
312 M_DST(rd->tmpfltregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + p) * 8);
316 /* show integer hex code for float arguments */
318 for (p = 0, l = 0; p < md->paramcount && p < INT_ARG_CNT; p++) {
319 /* if the paramtype is a float, we have to right shift all */
320 /* following integer registers */
322 if (IS_FLT_DBL_TYPE(md->paramtypes[p].type)) {
323 for (s1 = INT_ARG_CNT - 2; s1 >= p; s1--) {
324 M_MOV(rd->argintregs[s1], rd->argintregs[s1 + 1]);
327 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[p]);
332 x86_64_mov_imm_reg(cd, (ptrint) m, REG_ITMP2);
333 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_SP, 0 * 8);
334 x86_64_mov_imm_reg(cd, (ptrint) builtin_trace_args, REG_ITMP1);
335 x86_64_call_reg(cd, REG_ITMP1);
338 x86_64_mov_imm_reg(cd, (ptrint) compiledinvokation, REG_ITMP1);
339 x86_64_call_reg(cd, REG_ITMP1);
342 /* restore integer argument registers */
344 for (p = 0; p < INT_ARG_CNT; p++)
345 M_LLD(rd->argintregs[p], REG_SP, (1 + p) * 8);
347 /* restore float argument registers */
349 for (p = 0; p < FLT_ARG_CNT; p++)
350 M_DLD(rd->argfltregs[p], REG_SP, (1 + INT_ARG_CNT + p) * 8);
352 /* restore temporary registers for leaf methods */
354 if (m->isleafmethod) {
355 for (p = 0; p < INT_TMP_CNT; p++)
356 M_LLD(rd->tmpintregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + p) * 8);
358 for (p = 0; p < FLT_TMP_CNT; p++)
359 M_DLD(rd->tmpfltregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + p) * 8);
362 M_LADD_IMM((INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + FLT_TMP_CNT + 1 + 1) * 8, REG_SP);
367 /* end of header generation */
369 /* walk through all basic blocks */
370 for (bptr = m->basicblocks; bptr != NULL; bptr = bptr->next) {
372 bptr->mpc = (u4) ((u1 *) cd->mcodeptr - cd->mcodebase);
374 if (bptr->flags >= BBREACHED) {
376 /* branch resolving */
379 for (bref = bptr->branchrefs; bref != NULL; bref = bref->next) {
380 gen_resolvebranch((u1 *) cd->mcodebase + bref->branchpos,
385 /* copy interface registers to their destination */
393 while (src != NULL) {
395 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
396 if (bptr->type == BBTYPE_SBR) {
397 /* d = reg_of_var(rd, src, REG_ITMP1); */
398 if (!(src->flags & INMEMORY))
402 x86_64_pop_reg(cd, d);
403 store_reg_to_var_int(src, d);
405 } else if (bptr->type == BBTYPE_EXH) {
406 /* d = reg_of_var(rd, src, REG_ITMP1); */
407 if (!(src->flags & INMEMORY))
411 M_INTMOVE(REG_ITMP1, d);
412 store_reg_to_var_int(src, d);
421 while (src != NULL) {
423 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
424 if (bptr->type == BBTYPE_SBR) {
425 d = reg_of_var(rd, src, REG_ITMP1);
426 x86_64_pop_reg(cd, d);
427 store_reg_to_var_int(src, d);
429 } else if (bptr->type == BBTYPE_EXH) {
430 d = reg_of_var(rd, src, REG_ITMP1);
431 M_INTMOVE(REG_ITMP1, d);
432 store_reg_to_var_int(src, d);
436 d = reg_of_var(rd, src, REG_ITMP1);
437 if ((src->varkind != STACKVAR)) {
439 if (IS_FLT_DBL_TYPE(s2)) {
440 s1 = rd->interfaces[len][s2].regoff;
441 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
445 x86_64_movq_membase_reg(cd, REG_SP, s1 * 8, d);
447 store_reg_to_var_flt(src, d);
450 s1 = rd->interfaces[len][s2].regoff;
451 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
455 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, d);
457 store_reg_to_var_int(src, d);
466 /* walk through all instructions */
472 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
473 if (iptr->line != currentline) {
474 dseg_addlinenumber(cd, iptr->line, cd->mcodeptr);
475 currentline = iptr->line;
478 MCODECHECK(1024); /* 1KB should be enough */
481 case ICMD_INLINE_START: /* internal ICMDs */
482 case ICMD_INLINE_END:
485 case ICMD_NOP: /* ... ==> ... */
488 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
489 if (src->flags & INMEMORY) {
490 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
493 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
495 x86_64_jcc(cd, X86_64_CC_Z, 0);
496 codegen_addxnullrefs(cd, cd->mcodeptr);
499 /* constant operations ************************************************/
501 case ICMD_ICONST: /* ... ==> ..., constant */
502 /* op1 = 0, val.i = constant */
504 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
505 if (iptr->val.i == 0) {
508 M_IMOV_IMM(iptr->val.i, d);
510 store_reg_to_var_int(iptr->dst, d);
513 case ICMD_LCONST: /* ... ==> ..., constant */
514 /* op1 = 0, val.l = constant */
516 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
517 if (iptr->val.l == 0) {
520 M_MOV_IMM(iptr->val.l, d);
522 store_reg_to_var_int(iptr->dst, d);
525 case ICMD_FCONST: /* ... ==> ..., constant */
526 /* op1 = 0, val.f = constant */
528 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
529 disp = dseg_addfloat(cd, iptr->val.f);
530 x86_64_movdl_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + ((d > 7) ? 9 : 8)) - (s8) cd->mcodebase) + disp, d);
531 store_reg_to_var_flt(iptr->dst, d);
534 case ICMD_DCONST: /* ... ==> ..., constant */
535 /* op1 = 0, val.d = constant */
537 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
538 disp = dseg_adddouble(cd, iptr->val.d);
539 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + disp, d);
540 store_reg_to_var_flt(iptr->dst, d);
543 case ICMD_ACONST: /* ... ==> ..., constant */
544 /* op1 = 0, val.a = constant */
546 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
548 if ((iptr->target != NULL) && (iptr->val.a == NULL)) {
549 codegen_addpatchref(cd, cd->mcodeptr,
551 (unresolved_class *) iptr->target, 0);
553 if (opt_showdisassemble) {
554 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
557 M_MOV_IMM((ptrint) iptr->val.a, d);
560 if (iptr->val.a == 0) {
563 M_MOV_IMM((ptrint) iptr->val.a, d);
566 store_reg_to_var_int(iptr->dst, d);
570 /* load/store operations **********************************************/
572 case ICMD_ILOAD: /* ... ==> ..., content of local variable */
573 /* op1 = local variable */
575 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
576 if ((iptr->dst->varkind == LOCALVAR) &&
577 (iptr->dst->varnum == iptr->op1)) {
580 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
581 if (var->flags & INMEMORY) {
582 x86_64_movl_membase_reg(cd, REG_SP, var->regoff * 8, d);
583 store_reg_to_var_int(iptr->dst, d);
586 if (iptr->dst->flags & INMEMORY) {
587 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
590 M_INTMOVE(var->regoff, d);
595 case ICMD_LLOAD: /* ... ==> ..., content of local variable */
596 case ICMD_ALOAD: /* op1 = local variable */
598 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
599 if ((iptr->dst->varkind == LOCALVAR) &&
600 (iptr->dst->varnum == iptr->op1)) {
603 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
604 if (var->flags & INMEMORY) {
605 x86_64_mov_membase_reg(cd, REG_SP, var->regoff * 8, d);
606 store_reg_to_var_int(iptr->dst, d);
609 if (iptr->dst->flags & INMEMORY) {
610 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
613 M_INTMOVE(var->regoff, d);
618 case ICMD_FLOAD: /* ... ==> ..., content of local variable */
619 case ICMD_DLOAD: /* op1 = local variable */
621 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
622 if ((iptr->dst->varkind == LOCALVAR) &&
623 (iptr->dst->varnum == iptr->op1)) {
626 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
627 if (var->flags & INMEMORY) {
628 x86_64_movq_membase_reg(cd, REG_SP, var->regoff * 8, d);
629 store_reg_to_var_flt(iptr->dst, d);
632 if (iptr->dst->flags & INMEMORY) {
633 x86_64_movq_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
636 M_FLTMOVE(var->regoff, d);
641 case ICMD_ISTORE: /* ..., value ==> ... */
642 case ICMD_LSTORE: /* op1 = local variable */
645 if ((src->varkind == LOCALVAR) &&
646 (src->varnum == iptr->op1)) {
649 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
650 if (var->flags & INMEMORY) {
651 var_to_reg_int(s1, src, REG_ITMP1);
652 x86_64_mov_reg_membase(cd, s1, REG_SP, var->regoff * 8);
655 var_to_reg_int(s1, src, var->regoff);
656 M_INTMOVE(s1, var->regoff);
660 case ICMD_FSTORE: /* ..., value ==> ... */
661 case ICMD_DSTORE: /* op1 = local variable */
663 if ((src->varkind == LOCALVAR) &&
664 (src->varnum == iptr->op1)) {
667 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
668 if (var->flags & INMEMORY) {
669 var_to_reg_flt(s1, src, REG_FTMP1);
670 x86_64_movq_reg_membase(cd, s1, REG_SP, var->regoff * 8);
673 var_to_reg_flt(s1, src, var->regoff);
674 M_FLTMOVE(s1, var->regoff);
679 /* pop/dup/swap operations ********************************************/
681 /* attention: double and longs are only one entry in CACAO ICMDs */
683 case ICMD_POP: /* ..., value ==> ... */
684 case ICMD_POP2: /* ..., value, value ==> ... */
687 case ICMD_DUP: /* ..., a ==> ..., a, a */
688 M_COPY(src, iptr->dst);
691 case ICMD_DUP_X1: /* ..., a, b ==> ..., b, a, b */
693 M_COPY(src, iptr->dst);
694 M_COPY(src->prev, iptr->dst->prev);
695 M_COPY(iptr->dst, iptr->dst->prev->prev);
698 case ICMD_DUP_X2: /* ..., a, b, c ==> ..., c, a, b, c */
700 M_COPY(src, iptr->dst);
701 M_COPY(src->prev, iptr->dst->prev);
702 M_COPY(src->prev->prev, iptr->dst->prev->prev);
703 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
706 case ICMD_DUP2: /* ..., a, b ==> ..., a, b, a, b */
708 M_COPY(src, iptr->dst);
709 M_COPY(src->prev, iptr->dst->prev);
712 case ICMD_DUP2_X1: /* ..., a, b, c ==> ..., b, c, a, b, c */
714 M_COPY(src, iptr->dst);
715 M_COPY(src->prev, iptr->dst->prev);
716 M_COPY(src->prev->prev, iptr->dst->prev->prev);
717 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
718 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev);
721 case ICMD_DUP2_X2: /* ..., a, b, c, d ==> ..., c, d, a, b, c, d */
723 M_COPY(src, iptr->dst);
724 M_COPY(src->prev, iptr->dst->prev);
725 M_COPY(src->prev->prev, iptr->dst->prev->prev);
726 M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
727 M_COPY(iptr->dst, iptr->dst->prev->prev->prev->prev);
728 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev->prev);
731 case ICMD_SWAP: /* ..., a, b ==> ..., b, a */
733 M_COPY(src, iptr->dst->prev);
734 M_COPY(src->prev, iptr->dst);
738 /* integer operations *************************************************/
740 case ICMD_INEG: /* ..., value ==> ..., - value */
742 d = reg_of_var(rd, iptr->dst, REG_NULL);
743 if (iptr->dst->flags & INMEMORY) {
744 if (src->flags & INMEMORY) {
745 if (src->regoff == iptr->dst->regoff) {
746 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
749 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
750 x86_64_negl_reg(cd, REG_ITMP1);
751 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
755 x86_64_movl_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
756 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
760 if (src->flags & INMEMORY) {
761 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
762 x86_64_negl_reg(cd, d);
765 M_INTMOVE(src->regoff, iptr->dst->regoff);
766 x86_64_negl_reg(cd, iptr->dst->regoff);
771 case ICMD_LNEG: /* ..., value ==> ..., - value */
773 d = reg_of_var(rd, iptr->dst, REG_NULL);
774 if (iptr->dst->flags & INMEMORY) {
775 if (src->flags & INMEMORY) {
776 if (src->regoff == iptr->dst->regoff) {
777 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
780 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
781 x86_64_neg_reg(cd, REG_ITMP1);
782 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
786 x86_64_mov_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
787 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
791 if (src->flags & INMEMORY) {
792 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
793 x86_64_neg_reg(cd, iptr->dst->regoff);
796 M_INTMOVE(src->regoff, iptr->dst->regoff);
797 x86_64_neg_reg(cd, iptr->dst->regoff);
802 case ICMD_I2L: /* ..., value ==> ..., value */
804 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
805 if (src->flags & INMEMORY) {
806 x86_64_movslq_membase_reg(cd, REG_SP, src->regoff * 8, d);
809 x86_64_movslq_reg_reg(cd, src->regoff, d);
811 store_reg_to_var_int(iptr->dst, d);
814 case ICMD_L2I: /* ..., value ==> ..., value */
816 var_to_reg_int(s1, src, REG_ITMP1);
817 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
819 store_reg_to_var_int(iptr->dst, d);
822 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
824 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
825 if (src->flags & INMEMORY) {
826 x86_64_movsbq_membase_reg(cd, REG_SP, src->regoff * 8, d);
829 x86_64_movsbq_reg_reg(cd, src->regoff, d);
831 store_reg_to_var_int(iptr->dst, d);
834 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
836 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
837 if (src->flags & INMEMORY) {
838 x86_64_movzwq_membase_reg(cd, REG_SP, src->regoff * 8, d);
841 x86_64_movzwq_reg_reg(cd, src->regoff, d);
843 store_reg_to_var_int(iptr->dst, d);
846 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
848 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
849 if (src->flags & INMEMORY) {
850 x86_64_movswq_membase_reg(cd, REG_SP, src->regoff * 8, d);
853 x86_64_movswq_reg_reg(cd, src->regoff, d);
855 store_reg_to_var_int(iptr->dst, d);
859 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
861 d = reg_of_var(rd, iptr->dst, REG_NULL);
862 x86_64_emit_ialu(cd, X86_64_ADD, src, iptr);
865 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
866 /* val.i = constant */
868 d = reg_of_var(rd, iptr->dst, REG_NULL);
869 x86_64_emit_ialuconst(cd, X86_64_ADD, src, iptr);
872 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
874 d = reg_of_var(rd, iptr->dst, REG_NULL);
875 x86_64_emit_lalu(cd, X86_64_ADD, src, iptr);
878 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
879 /* val.l = constant */
881 d = reg_of_var(rd, iptr->dst, REG_NULL);
882 x86_64_emit_laluconst(cd, X86_64_ADD, src, iptr);
885 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
887 d = reg_of_var(rd, iptr->dst, REG_NULL);
888 if (iptr->dst->flags & INMEMORY) {
889 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
890 if (src->prev->regoff == iptr->dst->regoff) {
891 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
892 x86_64_alul_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
895 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
896 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
897 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
900 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
901 M_INTMOVE(src->prev->regoff, REG_ITMP1);
902 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
903 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
905 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
906 if (src->prev->regoff == iptr->dst->regoff) {
907 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
910 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
911 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
912 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
916 x86_64_movl_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
917 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
921 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
922 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
923 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
925 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
926 M_INTMOVE(src->prev->regoff, d);
927 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
929 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
930 /* workaround for reg alloc */
931 if (src->regoff == iptr->dst->regoff) {
932 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
933 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
934 M_INTMOVE(REG_ITMP1, d);
937 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
938 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
942 /* workaround for reg alloc */
943 if (src->regoff == iptr->dst->regoff) {
944 M_INTMOVE(src->prev->regoff, REG_ITMP1);
945 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
946 M_INTMOVE(REG_ITMP1, d);
949 M_INTMOVE(src->prev->regoff, d);
950 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
956 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
957 /* val.i = constant */
959 d = reg_of_var(rd, iptr->dst, REG_NULL);
960 x86_64_emit_ialuconst(cd, X86_64_SUB, src, iptr);
963 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
965 d = reg_of_var(rd, iptr->dst, REG_NULL);
966 if (iptr->dst->flags & INMEMORY) {
967 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
968 if (src->prev->regoff == iptr->dst->regoff) {
969 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
970 x86_64_alu_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
973 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
974 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
975 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
978 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
979 M_INTMOVE(src->prev->regoff, REG_ITMP1);
980 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
981 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
983 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
984 if (src->prev->regoff == iptr->dst->regoff) {
985 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
988 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
989 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
990 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
994 x86_64_mov_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
995 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
999 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1000 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1001 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1003 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1004 M_INTMOVE(src->prev->regoff, d);
1005 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1007 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1008 /* workaround for reg alloc */
1009 if (src->regoff == iptr->dst->regoff) {
1010 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1011 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1012 M_INTMOVE(REG_ITMP1, d);
1015 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1016 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1020 /* workaround for reg alloc */
1021 if (src->regoff == iptr->dst->regoff) {
1022 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1023 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1024 M_INTMOVE(REG_ITMP1, d);
1027 M_INTMOVE(src->prev->regoff, d);
1028 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1034 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
1035 /* val.l = constant */
1037 d = reg_of_var(rd, iptr->dst, REG_NULL);
1038 x86_64_emit_laluconst(cd, X86_64_SUB, src, iptr);
1041 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1043 d = reg_of_var(rd, iptr->dst, REG_NULL);
1044 if (iptr->dst->flags & INMEMORY) {
1045 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1046 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1047 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1048 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1050 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1051 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1052 x86_64_imull_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1053 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1055 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1056 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1057 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1058 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1061 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1062 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1063 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1067 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1068 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1069 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1071 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1072 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1073 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1075 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1076 M_INTMOVE(src->regoff, iptr->dst->regoff);
1077 x86_64_imull_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1080 if (src->regoff == iptr->dst->regoff) {
1081 x86_64_imull_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1084 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1085 x86_64_imull_reg_reg(cd, src->regoff, iptr->dst->regoff);
1091 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
1092 /* val.i = constant */
1094 d = reg_of_var(rd, iptr->dst, REG_NULL);
1095 if (iptr->dst->flags & INMEMORY) {
1096 if (src->flags & INMEMORY) {
1097 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1098 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1101 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, REG_ITMP1);
1102 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1106 if (src->flags & INMEMORY) {
1107 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1110 if (iptr->val.i == 2) {
1111 M_INTMOVE(src->regoff, iptr->dst->regoff);
1112 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1115 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, iptr->dst->regoff); /* 3 cycles */
1121 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1123 d = reg_of_var(rd, iptr->dst, REG_NULL);
1124 if (iptr->dst->flags & INMEMORY) {
1125 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1126 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1127 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1128 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1130 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1131 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1132 x86_64_imul_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1133 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1135 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1136 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1137 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1138 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1141 x86_64_mov_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1142 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1143 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1147 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1148 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1149 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1151 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1152 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1153 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1155 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1156 M_INTMOVE(src->regoff, iptr->dst->regoff);
1157 x86_64_imul_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1160 if (src->regoff == iptr->dst->regoff) {
1161 x86_64_imul_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1164 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1165 x86_64_imul_reg_reg(cd, src->regoff, iptr->dst->regoff);
1171 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
1172 /* val.l = constant */
1174 d = reg_of_var(rd, iptr->dst, REG_NULL);
1175 if (iptr->dst->flags & INMEMORY) {
1176 if (src->flags & INMEMORY) {
1177 if (IS_IMM32(iptr->val.l)) {
1178 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1181 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1182 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1184 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1187 if (IS_IMM32(iptr->val.l)) {
1188 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, REG_ITMP1);
1191 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1192 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1194 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1198 if (src->flags & INMEMORY) {
1199 if (IS_IMM32(iptr->val.l)) {
1200 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1203 x86_64_mov_imm_reg(cd, iptr->val.l, iptr->dst->regoff);
1204 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1208 /* should match in many cases */
1209 if (iptr->val.l == 2) {
1210 M_INTMOVE(src->regoff, iptr->dst->regoff);
1211 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1214 if (IS_IMM32(iptr->val.l)) {
1215 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, iptr->dst->regoff); /* 4 cycles */
1218 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1219 M_INTMOVE(src->regoff, iptr->dst->regoff);
1220 x86_64_imul_reg_reg(cd, REG_ITMP1, iptr->dst->regoff);
1227 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1229 d = reg_of_var(rd, iptr->dst, REG_NULL);
1230 if (src->prev->flags & INMEMORY) {
1231 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1234 M_INTMOVE(src->prev->regoff, RAX);
1237 if (src->flags & INMEMORY) {
1238 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1241 M_INTMOVE(src->regoff, REG_ITMP3);
1245 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1246 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1247 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1248 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1250 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1252 x86_64_idivl_reg(cd, REG_ITMP3);
1254 if (iptr->dst->flags & INMEMORY) {
1255 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1256 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1259 M_INTMOVE(RAX, iptr->dst->regoff);
1261 if (iptr->dst->regoff != RDX) {
1262 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1267 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1268 d = reg_of_var(rd, iptr->dst, REG_NULL);
1269 if (src->prev->flags & INMEMORY) {
1270 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1273 M_INTMOVE(src->prev->regoff, RAX);
1276 if (src->flags & INMEMORY) {
1277 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1280 M_INTMOVE(src->regoff, REG_ITMP3);
1284 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1286 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1287 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1290 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1291 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1292 x86_64_jcc(cd, X86_64_CC_E, 1 + 3); /* 6 bytes */
1295 x86_64_idivl_reg(cd, REG_ITMP3);
1297 if (iptr->dst->flags & INMEMORY) {
1298 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1299 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1302 M_INTMOVE(RDX, iptr->dst->regoff);
1304 if (iptr->dst->regoff != RDX) {
1305 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1310 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
1311 /* val.i = constant */
1313 var_to_reg_int(s1, src, REG_ITMP1);
1314 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1315 M_INTMOVE(s1, REG_ITMP1);
1316 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1317 x86_64_leal_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1318 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1319 x86_64_shiftl_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1320 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1321 store_reg_to_var_int(iptr->dst, d);
1324 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
1325 /* val.i = constant */
1327 var_to_reg_int(s1, src, REG_ITMP1);
1328 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1329 M_INTMOVE(s1, REG_ITMP1);
1330 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1331 x86_64_leal_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1332 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1333 x86_64_alul_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1334 x86_64_alul_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1335 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1336 store_reg_to_var_int(iptr->dst, d);
1340 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1342 d = reg_of_var(rd, iptr->dst, REG_NULL);
1344 if (src->prev->flags & INMEMORY) {
1345 M_LLD(RAX, REG_SP, src->prev->regoff * 8);
1348 M_INTMOVE(src->prev->regoff, RAX);
1351 if (src->flags & INMEMORY) {
1352 M_LLD(REG_ITMP3, REG_SP, src->regoff * 8);
1355 M_INTMOVE(src->regoff, REG_ITMP3);
1359 /* check as described in jvm spec */
1360 disp = dseg_adds8(cd, 0x8000000000000000LL);
1361 M_CMP_MEMBASE(RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, RAX);
1363 M_CMP_IMM(-1, REG_ITMP3); /* 4 bytes */
1364 M_BEQ(3 + 2 + 3); /* 6 bytes */
1366 M_MOV(RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1368 x86_64_idiv_reg(cd, REG_ITMP3);
1370 if (iptr->dst->flags & INMEMORY) {
1371 M_LST(RAX, REG_SP, iptr->dst->regoff * 8);
1372 M_MOV(REG_ITMP2, RDX); /* restore %rdx */
1375 M_INTMOVE(RAX, iptr->dst->regoff);
1377 if (iptr->dst->regoff != RDX) {
1378 M_MOV(REG_ITMP2, RDX); /* restore %rdx */
1383 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1385 d = reg_of_var(rd, iptr->dst, REG_NULL);
1386 if (src->prev->flags & INMEMORY) {
1387 M_LLD(REG_ITMP1, REG_SP, src->prev->regoff * 8);
1390 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1393 if (src->flags & INMEMORY) {
1394 M_LLD(REG_ITMP3, REG_SP, src->regoff * 8);
1397 M_INTMOVE(src->regoff, REG_ITMP3);
1401 M_MOV(RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1403 /* check as described in jvm spec */
1404 disp = dseg_adds8(cd, 0x8000000000000000LL);
1405 M_CMP_MEMBASE(RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, REG_ITMP1);
1409 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1411 M_XOR(RDX, RDX); /* 3 bytes */
1412 M_CMP_IMM(-1, REG_ITMP3); /* 4 bytes */
1413 M_BEQ(2 + 3); /* 6 bytes */
1416 x86_64_idiv_reg(cd, REG_ITMP3);
1418 if (iptr->dst->flags & INMEMORY) {
1419 M_LST(RDX, REG_SP, iptr->dst->regoff * 8);
1420 M_MOV(REG_ITMP2, RDX); /* restore %rdx */
1423 M_INTMOVE(RDX, iptr->dst->regoff);
1425 if (iptr->dst->regoff != RDX) {
1426 M_MOV(REG_ITMP2, RDX); /* restore %rdx */
1431 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1432 /* val.i = constant */
1434 var_to_reg_int(s1, src, REG_ITMP1);
1435 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1436 M_INTMOVE(s1, REG_ITMP1);
1437 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1438 x86_64_lea_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1439 x86_64_cmovcc_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1440 x86_64_shift_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1441 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1442 store_reg_to_var_int(iptr->dst, d);
1445 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1446 /* val.l = constant */
1448 var_to_reg_int(s1, src, REG_ITMP1);
1449 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1450 M_INTMOVE(s1, REG_ITMP1);
1451 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1452 x86_64_lea_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1453 x86_64_cmovcc_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1454 x86_64_alu_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1455 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1456 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1457 store_reg_to_var_int(iptr->dst, d);
1460 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1462 d = reg_of_var(rd, iptr->dst, REG_NULL);
1463 x86_64_emit_ishift(cd, X86_64_SHL, src, iptr);
1466 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1467 /* val.i = constant */
1469 d = reg_of_var(rd, iptr->dst, REG_NULL);
1470 x86_64_emit_ishiftconst(cd, X86_64_SHL, src, iptr);
1473 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1475 d = reg_of_var(rd, iptr->dst, REG_NULL);
1476 x86_64_emit_ishift(cd, X86_64_SAR, src, iptr);
1479 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1480 /* val.i = constant */
1482 d = reg_of_var(rd, iptr->dst, REG_NULL);
1483 x86_64_emit_ishiftconst(cd, X86_64_SAR, src, iptr);
1486 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1488 d = reg_of_var(rd, iptr->dst, REG_NULL);
1489 x86_64_emit_ishift(cd, X86_64_SHR, src, iptr);
1492 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1493 /* val.i = constant */
1495 d = reg_of_var(rd, iptr->dst, REG_NULL);
1496 x86_64_emit_ishiftconst(cd, X86_64_SHR, src, iptr);
1499 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1501 d = reg_of_var(rd, iptr->dst, REG_NULL);
1502 x86_64_emit_lshift(cd, X86_64_SHL, src, iptr);
1505 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1506 /* val.i = constant */
1508 d = reg_of_var(rd, iptr->dst, REG_NULL);
1509 x86_64_emit_lshiftconst(cd, X86_64_SHL, src, iptr);
1512 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1514 d = reg_of_var(rd, iptr->dst, REG_NULL);
1515 x86_64_emit_lshift(cd, X86_64_SAR, src, iptr);
1518 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1519 /* val.i = constant */
1521 d = reg_of_var(rd, iptr->dst, REG_NULL);
1522 x86_64_emit_lshiftconst(cd, X86_64_SAR, src, iptr);
1525 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1527 d = reg_of_var(rd, iptr->dst, REG_NULL);
1528 x86_64_emit_lshift(cd, X86_64_SHR, src, iptr);
1531 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1532 /* val.l = constant */
1534 d = reg_of_var(rd, iptr->dst, REG_NULL);
1535 x86_64_emit_lshiftconst(cd, X86_64_SHR, src, iptr);
1538 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1540 d = reg_of_var(rd, iptr->dst, REG_NULL);
1541 x86_64_emit_ialu(cd, X86_64_AND, src, iptr);
1544 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1545 /* val.i = constant */
1547 d = reg_of_var(rd, iptr->dst, REG_NULL);
1548 x86_64_emit_ialuconst(cd, X86_64_AND, src, iptr);
1551 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1553 d = reg_of_var(rd, iptr->dst, REG_NULL);
1554 x86_64_emit_lalu(cd, X86_64_AND, src, iptr);
1557 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1558 /* val.l = constant */
1560 d = reg_of_var(rd, iptr->dst, REG_NULL);
1561 x86_64_emit_laluconst(cd, X86_64_AND, src, iptr);
1564 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1566 d = reg_of_var(rd, iptr->dst, REG_NULL);
1567 x86_64_emit_ialu(cd, X86_64_OR, src, iptr);
1570 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1571 /* val.i = constant */
1573 d = reg_of_var(rd, iptr->dst, REG_NULL);
1574 x86_64_emit_ialuconst(cd, X86_64_OR, src, iptr);
1577 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1579 d = reg_of_var(rd, iptr->dst, REG_NULL);
1580 x86_64_emit_lalu(cd, X86_64_OR, src, iptr);
1583 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1584 /* val.l = constant */
1586 d = reg_of_var(rd, iptr->dst, REG_NULL);
1587 x86_64_emit_laluconst(cd, X86_64_OR, src, iptr);
1590 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1592 d = reg_of_var(rd, iptr->dst, REG_NULL);
1593 x86_64_emit_ialu(cd, X86_64_XOR, src, iptr);
1596 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1597 /* val.i = constant */
1599 d = reg_of_var(rd, iptr->dst, REG_NULL);
1600 x86_64_emit_ialuconst(cd, X86_64_XOR, src, iptr);
1603 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1605 d = reg_of_var(rd, iptr->dst, REG_NULL);
1606 x86_64_emit_lalu(cd, X86_64_XOR, src, iptr);
1609 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1610 /* val.l = constant */
1612 d = reg_of_var(rd, iptr->dst, REG_NULL);
1613 x86_64_emit_laluconst(cd, X86_64_XOR, src, iptr);
1617 case ICMD_IINC: /* ..., value ==> ..., value + constant */
1618 /* op1 = variable, val.i = constant */
1620 /* using inc and dec is definitely faster than add -- tested */
1623 var = &(rd->locals[iptr->op1][TYPE_INT]);
1625 if (var->flags & INMEMORY) {
1626 if (iptr->val.i == 1) {
1627 x86_64_incl_membase(cd, REG_SP, d * 8);
1629 } else if (iptr->val.i == -1) {
1630 x86_64_decl_membase(cd, REG_SP, d * 8);
1633 x86_64_alul_imm_membase(cd, X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1637 if (iptr->val.i == 1) {
1638 x86_64_incl_reg(cd, d);
1640 } else if (iptr->val.i == -1) {
1641 x86_64_decl_reg(cd, d);
1644 x86_64_alul_imm_reg(cd, X86_64_ADD, iptr->val.i, d);
1650 /* floating operations ************************************************/
1652 case ICMD_FNEG: /* ..., value ==> ..., - value */
1654 var_to_reg_flt(s1, src, REG_FTMP1);
1655 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1656 disp = dseg_adds4(cd, 0x80000000);
1658 x86_64_movss_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + disp, REG_FTMP2);
1659 x86_64_xorps_reg_reg(cd, REG_FTMP2, d);
1660 store_reg_to_var_flt(iptr->dst, d);
1663 case ICMD_DNEG: /* ..., value ==> ..., - value */
1665 var_to_reg_flt(s1, src, REG_FTMP1);
1666 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1667 disp = dseg_adds8(cd, 0x8000000000000000);
1669 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + disp, REG_FTMP2);
1670 x86_64_xorpd_reg_reg(cd, REG_FTMP2, d);
1671 store_reg_to_var_flt(iptr->dst, d);
1674 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1676 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1677 var_to_reg_flt(s2, src, REG_FTMP2);
1678 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1680 x86_64_addss_reg_reg(cd, s2, d);
1681 } else if (s2 == d) {
1682 x86_64_addss_reg_reg(cd, s1, d);
1685 x86_64_addss_reg_reg(cd, s2, d);
1687 store_reg_to_var_flt(iptr->dst, d);
1690 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1692 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1693 var_to_reg_flt(s2, src, REG_FTMP2);
1694 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1696 x86_64_addsd_reg_reg(cd, s2, d);
1697 } else if (s2 == d) {
1698 x86_64_addsd_reg_reg(cd, s1, d);
1701 x86_64_addsd_reg_reg(cd, s2, d);
1703 store_reg_to_var_flt(iptr->dst, d);
1706 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1708 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1709 var_to_reg_flt(s2, src, REG_FTMP2);
1710 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1712 M_FLTMOVE(s2, REG_FTMP2);
1716 x86_64_subss_reg_reg(cd, s2, d);
1717 store_reg_to_var_flt(iptr->dst, d);
1720 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1722 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1723 var_to_reg_flt(s2, src, REG_FTMP2);
1724 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1726 M_FLTMOVE(s2, REG_FTMP2);
1730 x86_64_subsd_reg_reg(cd, s2, d);
1731 store_reg_to_var_flt(iptr->dst, d);
1734 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1736 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1737 var_to_reg_flt(s2, src, REG_FTMP2);
1738 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1740 x86_64_mulss_reg_reg(cd, s2, d);
1741 } else if (s2 == d) {
1742 x86_64_mulss_reg_reg(cd, s1, d);
1745 x86_64_mulss_reg_reg(cd, s2, d);
1747 store_reg_to_var_flt(iptr->dst, d);
1750 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1752 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1753 var_to_reg_flt(s2, src, REG_FTMP2);
1754 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1756 x86_64_mulsd_reg_reg(cd, s2, d);
1757 } else if (s2 == d) {
1758 x86_64_mulsd_reg_reg(cd, s1, d);
1761 x86_64_mulsd_reg_reg(cd, s2, d);
1763 store_reg_to_var_flt(iptr->dst, d);
1766 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1768 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1769 var_to_reg_flt(s2, src, REG_FTMP2);
1770 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1772 M_FLTMOVE(s2, REG_FTMP2);
1776 x86_64_divss_reg_reg(cd, s2, d);
1777 store_reg_to_var_flt(iptr->dst, d);
1780 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1782 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1783 var_to_reg_flt(s2, src, REG_FTMP2);
1784 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1786 M_FLTMOVE(s2, REG_FTMP2);
1790 x86_64_divsd_reg_reg(cd, s2, d);
1791 store_reg_to_var_flt(iptr->dst, d);
1794 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1796 var_to_reg_int(s1, src, REG_ITMP1);
1797 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1798 x86_64_cvtsi2ss_reg_reg(cd, s1, d);
1799 store_reg_to_var_flt(iptr->dst, d);
1802 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1804 var_to_reg_int(s1, src, REG_ITMP1);
1805 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1806 x86_64_cvtsi2sd_reg_reg(cd, s1, d);
1807 store_reg_to_var_flt(iptr->dst, d);
1810 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1812 var_to_reg_int(s1, src, REG_ITMP1);
1813 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1814 x86_64_cvtsi2ssq_reg_reg(cd, s1, d);
1815 store_reg_to_var_flt(iptr->dst, d);
1818 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1820 var_to_reg_int(s1, src, REG_ITMP1);
1821 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1822 x86_64_cvtsi2sdq_reg_reg(cd, s1, d);
1823 store_reg_to_var_flt(iptr->dst, d);
1826 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1828 var_to_reg_flt(s1, src, REG_FTMP1);
1829 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1830 x86_64_cvttss2si_reg_reg(cd, s1, d);
1831 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1832 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1833 x86_64_jcc(cd, X86_64_CC_NE, a);
1834 M_FLTMOVE(s1, REG_FTMP1);
1835 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2i, REG_ITMP2);
1836 x86_64_call_reg(cd, REG_ITMP2);
1837 M_INTMOVE(REG_RESULT, d);
1838 store_reg_to_var_int(iptr->dst, d);
1841 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1843 var_to_reg_flt(s1, src, REG_FTMP1);
1844 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1845 x86_64_cvttsd2si_reg_reg(cd, s1, d);
1846 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1847 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1848 x86_64_jcc(cd, X86_64_CC_NE, a);
1849 M_FLTMOVE(s1, REG_FTMP1);
1850 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2i, REG_ITMP2);
1851 x86_64_call_reg(cd, REG_ITMP2);
1852 M_INTMOVE(REG_RESULT, d);
1853 store_reg_to_var_int(iptr->dst, d);
1856 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1858 var_to_reg_flt(s1, src, REG_FTMP1);
1859 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1860 x86_64_cvttss2siq_reg_reg(cd, s1, d);
1861 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1862 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1863 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1864 x86_64_jcc(cd, X86_64_CC_NE, a);
1865 M_FLTMOVE(s1, REG_FTMP1);
1866 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2l, REG_ITMP2);
1867 x86_64_call_reg(cd, REG_ITMP2);
1868 M_INTMOVE(REG_RESULT, d);
1869 store_reg_to_var_int(iptr->dst, d);
1872 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1874 var_to_reg_flt(s1, src, REG_FTMP1);
1875 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1876 x86_64_cvttsd2siq_reg_reg(cd, s1, d);
1877 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1878 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1879 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1880 x86_64_jcc(cd, X86_64_CC_NE, a);
1881 M_FLTMOVE(s1, REG_FTMP1);
1882 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2l, REG_ITMP2);
1883 x86_64_call_reg(cd, REG_ITMP2);
1884 M_INTMOVE(REG_RESULT, d);
1885 store_reg_to_var_int(iptr->dst, d);
1888 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1890 var_to_reg_flt(s1, src, REG_FTMP1);
1891 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1892 x86_64_cvtss2sd_reg_reg(cd, s1, d);
1893 store_reg_to_var_flt(iptr->dst, d);
1896 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1898 var_to_reg_flt(s1, src, REG_FTMP1);
1899 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1900 x86_64_cvtsd2ss_reg_reg(cd, s1, d);
1901 store_reg_to_var_flt(iptr->dst, d);
1904 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1905 /* == => 0, < => 1, > => -1 */
1907 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1908 var_to_reg_flt(s2, src, REG_FTMP2);
1909 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1910 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1911 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1912 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1913 x86_64_ucomiss_reg_reg(cd, s1, s2);
1914 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1915 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1916 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1917 store_reg_to_var_int(iptr->dst, d);
1920 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1921 /* == => 0, < => 1, > => -1 */
1923 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1924 var_to_reg_flt(s2, src, REG_FTMP2);
1925 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1926 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1927 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1928 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1929 x86_64_ucomiss_reg_reg(cd, s1, s2);
1930 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1931 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1932 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1933 store_reg_to_var_int(iptr->dst, d);
1936 case ICMD_DCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1937 /* == => 0, < => 1, > => -1 */
1939 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1940 var_to_reg_flt(s2, src, REG_FTMP2);
1941 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1942 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1943 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1944 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1945 x86_64_ucomisd_reg_reg(cd, s1, s2);
1946 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1947 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1948 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1949 store_reg_to_var_int(iptr->dst, d);
1952 case ICMD_DCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1953 /* == => 0, < => 1, > => -1 */
1955 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1956 var_to_reg_flt(s2, src, REG_FTMP2);
1957 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1958 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1959 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1960 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1961 x86_64_ucomisd_reg_reg(cd, s1, s2);
1962 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1963 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1964 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1965 store_reg_to_var_int(iptr->dst, d);
1969 /* memory operations **************************************************/
1971 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., (int) length */
1973 var_to_reg_int(s1, src, REG_ITMP1);
1974 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1975 gen_nullptr_check(s1);
1976 x86_64_movl_membase_reg(cd, s1, OFFSET(java_arrayheader, size), d);
1977 store_reg_to_var_int(iptr->dst, d);
1980 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
1982 var_to_reg_int(s1, src->prev, REG_ITMP1);
1983 var_to_reg_int(s2, src, REG_ITMP2);
1984 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1985 if (iptr->op1 == 0) {
1986 gen_nullptr_check(s1);
1989 x86_64_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
1990 store_reg_to_var_int(iptr->dst, d);
1993 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
1995 var_to_reg_int(s1, src->prev, REG_ITMP1);
1996 var_to_reg_int(s2, src, REG_ITMP2);
1997 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1998 if (iptr->op1 == 0) {
1999 gen_nullptr_check(s1);
2002 x86_64_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]), s1, s2, 3, d);
2003 store_reg_to_var_int(iptr->dst, d);
2006 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
2008 var_to_reg_int(s1, src->prev, REG_ITMP1);
2009 var_to_reg_int(s2, src, REG_ITMP2);
2010 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2011 if (iptr->op1 == 0) {
2012 gen_nullptr_check(s1);
2015 x86_64_movl_memindex_reg(cd, OFFSET(java_intarray, data[0]), s1, s2, 2, d);
2016 store_reg_to_var_int(iptr->dst, d);
2019 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2021 var_to_reg_int(s1, src->prev, REG_ITMP1);
2022 var_to_reg_int(s2, src, REG_ITMP2);
2023 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2024 if (iptr->op1 == 0) {
2025 gen_nullptr_check(s1);
2028 x86_64_movss_memindex_reg(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2029 store_reg_to_var_flt(iptr->dst, d);
2032 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2034 var_to_reg_int(s1, src->prev, REG_ITMP1);
2035 var_to_reg_int(s2, src, REG_ITMP2);
2036 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2037 if (iptr->op1 == 0) {
2038 gen_nullptr_check(s1);
2041 x86_64_movsd_memindex_reg(cd, OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2042 store_reg_to_var_flt(iptr->dst, d);
2045 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
2047 var_to_reg_int(s1, src->prev, REG_ITMP1);
2048 var_to_reg_int(s2, src, REG_ITMP2);
2049 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2050 if (iptr->op1 == 0) {
2051 gen_nullptr_check(s1);
2054 x86_64_movzwq_memindex_reg(cd, OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2055 store_reg_to_var_int(iptr->dst, d);
2058 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
2060 var_to_reg_int(s1, src->prev, REG_ITMP1);
2061 var_to_reg_int(s2, src, REG_ITMP2);
2062 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2063 if (iptr->op1 == 0) {
2064 gen_nullptr_check(s1);
2067 x86_64_movswq_memindex_reg(cd, OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2068 store_reg_to_var_int(iptr->dst, d);
2071 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
2073 var_to_reg_int(s1, src->prev, REG_ITMP1);
2074 var_to_reg_int(s2, src, REG_ITMP2);
2075 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2076 if (iptr->op1 == 0) {
2077 gen_nullptr_check(s1);
2080 x86_64_movsbq_memindex_reg(cd, OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2081 store_reg_to_var_int(iptr->dst, d);
2085 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2087 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2088 var_to_reg_int(s2, src->prev, REG_ITMP2);
2089 if (iptr->op1 == 0) {
2090 gen_nullptr_check(s1);
2093 var_to_reg_int(s3, src, REG_ITMP3);
2094 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2097 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2099 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2100 var_to_reg_int(s2, src->prev, REG_ITMP2);
2101 if (iptr->op1 == 0) {
2102 gen_nullptr_check(s1);
2105 var_to_reg_int(s3, src, REG_ITMP3);
2106 x86_64_movl_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2109 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2111 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2112 var_to_reg_int(s2, src->prev, REG_ITMP2);
2113 if (iptr->op1 == 0) {
2114 gen_nullptr_check(s1);
2117 var_to_reg_flt(s3, src, REG_FTMP3);
2118 x86_64_movss_reg_memindex(cd, s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2121 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2123 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2124 var_to_reg_int(s2, src->prev, REG_ITMP2);
2125 if (iptr->op1 == 0) {
2126 gen_nullptr_check(s1);
2129 var_to_reg_flt(s3, src, REG_FTMP3);
2130 x86_64_movsd_reg_memindex(cd, s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2133 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2135 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2136 var_to_reg_int(s2, src->prev, REG_ITMP2);
2137 if (iptr->op1 == 0) {
2138 gen_nullptr_check(s1);
2141 var_to_reg_int(s3, src, REG_ITMP3);
2142 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2145 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2147 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2148 var_to_reg_int(s2, src->prev, REG_ITMP2);
2149 if (iptr->op1 == 0) {
2150 gen_nullptr_check(s1);
2153 var_to_reg_int(s3, src, REG_ITMP3);
2154 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2157 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2159 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2160 var_to_reg_int(s2, src->prev, REG_ITMP2);
2161 if (iptr->op1 == 0) {
2162 gen_nullptr_check(s1);
2165 var_to_reg_int(s3, src, REG_ITMP3);
2166 x86_64_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2169 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2171 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2172 var_to_reg_int(s2, src->prev, REG_ITMP2);
2173 if (iptr->op1 == 0) {
2174 gen_nullptr_check(s1);
2177 var_to_reg_int(s3, src, REG_ITMP3);
2179 M_MOV(s1, rd->argintregs[0]);
2180 M_MOV(s3, rd->argintregs[1]);
2181 M_MOV_IMM((ptrint) BUILTIN_canstore, REG_ITMP1);
2185 codegen_addxstorerefs(cd, cd->mcodeptr);
2187 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2188 var_to_reg_int(s2, src->prev, REG_ITMP2);
2189 var_to_reg_int(s3, src, REG_ITMP3);
2190 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2194 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2196 var_to_reg_int(s1, src->prev, REG_ITMP1);
2197 var_to_reg_int(s2, src, REG_ITMP2);
2198 if (iptr->op1 == 0) {
2199 gen_nullptr_check(s1);
2202 x86_64_movl_imm_memindex(cd, iptr->val.i, OFFSET(java_intarray, data[0]), s1, s2, 2);
2205 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2207 var_to_reg_int(s1, src->prev, REG_ITMP1);
2208 var_to_reg_int(s2, src, REG_ITMP2);
2209 if (iptr->op1 == 0) {
2210 gen_nullptr_check(s1);
2214 if (IS_IMM32(iptr->val.l)) {
2215 x86_64_mov_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2218 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2219 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l >> 32), OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2223 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2225 var_to_reg_int(s1, src->prev, REG_ITMP1);
2226 var_to_reg_int(s2, src, REG_ITMP2);
2227 if (iptr->op1 == 0) {
2228 gen_nullptr_check(s1);
2231 x86_64_mov_imm_memindex(cd, 0, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2234 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2236 var_to_reg_int(s1, src->prev, REG_ITMP1);
2237 var_to_reg_int(s2, src, REG_ITMP2);
2238 if (iptr->op1 == 0) {
2239 gen_nullptr_check(s1);
2242 x86_64_movb_imm_memindex(cd, iptr->val.i, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2245 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2247 var_to_reg_int(s1, src->prev, REG_ITMP1);
2248 var_to_reg_int(s2, src, REG_ITMP2);
2249 if (iptr->op1 == 0) {
2250 gen_nullptr_check(s1);
2253 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_chararray, data[0]), s1, s2, 1);
2256 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2258 var_to_reg_int(s1, src->prev, REG_ITMP1);
2259 var_to_reg_int(s2, src, REG_ITMP2);
2260 if (iptr->op1 == 0) {
2261 gen_nullptr_check(s1);
2264 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2268 case ICMD_GETSTATIC: /* ... ==> ..., value */
2269 /* op1 = type, val.a = field address */
2272 disp = dseg_addaddress(cd, NULL);
2274 codegen_addpatchref(cd, cd->mcodeptr,
2275 PATCHER_get_putstatic,
2276 (unresolved_field *) iptr->target, disp);
2278 if (opt_showdisassemble) {
2279 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2283 fieldinfo *fi = iptr->val.a;
2285 disp = dseg_addaddress(cd, &(fi->value));
2287 if (!(fi->class->state & CLASS_INITIALIZED)) {
2288 codegen_addpatchref(cd, cd->mcodeptr,
2289 PATCHER_clinit, fi->class, 0);
2291 if (opt_showdisassemble) {
2292 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2297 /* This approach is much faster than moving the field address */
2298 /* inline into a register. */
2299 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, REG_ITMP2);
2300 switch (iptr->op1) {
2302 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2303 x86_64_movl_membase_reg(cd, REG_ITMP2, 0, d);
2304 store_reg_to_var_int(iptr->dst, d);
2308 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2309 x86_64_mov_membase_reg(cd, REG_ITMP2, 0, d);
2310 store_reg_to_var_int(iptr->dst, d);
2313 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2314 x86_64_movss_membase_reg(cd, REG_ITMP2, 0, d);
2315 store_reg_to_var_flt(iptr->dst, d);
2318 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2319 x86_64_movsd_membase_reg(cd, REG_ITMP2, 0, d);
2320 store_reg_to_var_flt(iptr->dst, d);
2325 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2326 /* op1 = type, val.a = field address */
2329 disp = dseg_addaddress(cd, NULL);
2331 codegen_addpatchref(cd, cd->mcodeptr,
2332 PATCHER_get_putstatic,
2333 (unresolved_field *) iptr->target, disp);
2335 if (opt_showdisassemble) {
2336 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2340 fieldinfo *fi = iptr->val.a;
2342 disp = dseg_addaddress(cd, &(fi->value));
2344 if (!(fi->class->state & CLASS_INITIALIZED)) {
2345 codegen_addpatchref(cd, cd->mcodeptr,
2346 PATCHER_clinit, fi->class, 0);
2348 if (opt_showdisassemble) {
2349 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2354 /* This approach is much faster than moving the field address */
2355 /* inline into a register. */
2356 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, REG_ITMP2);
2357 switch (iptr->op1) {
2359 var_to_reg_int(s2, src, REG_ITMP1);
2360 x86_64_movl_reg_membase(cd, s2, REG_ITMP2, 0);
2364 var_to_reg_int(s2, src, REG_ITMP1);
2365 x86_64_mov_reg_membase(cd, s2, REG_ITMP2, 0);
2368 var_to_reg_flt(s2, src, REG_FTMP1);
2369 x86_64_movss_reg_membase(cd, s2, REG_ITMP2, 0);
2372 var_to_reg_flt(s2, src, REG_FTMP1);
2373 x86_64_movsd_reg_membase(cd, s2, REG_ITMP2, 0);
2378 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2379 /* val = value (in current instruction) */
2380 /* op1 = type, val.a = field address (in */
2381 /* following NOP) */
2383 if (!iptr[1].val.a) {
2384 disp = dseg_addaddress(cd, NULL);
2386 codegen_addpatchref(cd, cd->mcodeptr,
2387 PATCHER_get_putstatic,
2388 (unresolved_field *) iptr[1].target, disp);
2390 if (opt_showdisassemble) {
2391 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2395 fieldinfo *fi = iptr[1].val.a;
2397 disp = dseg_addaddress(cd, &(fi->value));
2399 if (!(fi->class->state & CLASS_INITIALIZED)) {
2400 codegen_addpatchref(cd, cd->mcodeptr,
2401 PATCHER_clinit, fi->class, 0);
2403 if (opt_showdisassemble) {
2404 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2409 /* This approach is much faster than moving the field address */
2410 /* inline into a register. */
2411 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, REG_ITMP1);
2412 switch (iptr->op1) {
2415 x86_64_movl_imm_membase(cd, iptr->val.i, REG_ITMP1, 0);
2420 if (IS_IMM32(iptr->val.l)) {
2421 x86_64_mov_imm_membase(cd, iptr->val.l, REG_ITMP1, 0);
2423 x86_64_movl_imm_membase(cd, iptr->val.l, REG_ITMP1, 0);
2424 x86_64_movl_imm_membase(cd, iptr->val.l >> 32, REG_ITMP1, 4);
2430 case ICMD_GETFIELD: /* ... ==> ..., value */
2431 /* op1 = type, val.i = field offset */
2433 var_to_reg_int(s1, src, REG_ITMP1);
2434 gen_nullptr_check(s1);
2437 codegen_addpatchref(cd, cd->mcodeptr,
2438 PATCHER_get_putfield,
2439 (unresolved_field *) iptr->target, 0);
2441 if (opt_showdisassemble) {
2442 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2448 a = ((fieldinfo *) (iptr->val.a))->offset;
2451 switch (iptr->op1) {
2453 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2454 x86_64_movl_membase32_reg(cd, s1, a, d);
2455 store_reg_to_var_int(iptr->dst, d);
2459 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2460 x86_64_mov_membase32_reg(cd, s1, a, d);
2461 store_reg_to_var_int(iptr->dst, d);
2464 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2465 x86_64_movss_membase32_reg(cd, s1, a, d);
2466 store_reg_to_var_flt(iptr->dst, d);
2469 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2470 x86_64_movsd_membase32_reg(cd, s1, a, d);
2471 store_reg_to_var_flt(iptr->dst, d);
2476 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2477 /* op1 = type, val.i = field offset */
2479 var_to_reg_int(s1, src->prev, REG_ITMP1);
2480 gen_nullptr_check(s1);
2481 if (IS_INT_LNG_TYPE(iptr->op1)) {
2482 var_to_reg_int(s2, src, REG_ITMP2);
2484 var_to_reg_flt(s2, src, REG_FTMP2);
2488 codegen_addpatchref(cd, cd->mcodeptr,
2489 PATCHER_get_putfield,
2490 (unresolved_field *) iptr->target, 0);
2492 if (opt_showdisassemble) {
2493 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2499 a = ((fieldinfo *) (iptr->val.a))->offset;
2502 switch (iptr->op1) {
2504 x86_64_movl_reg_membase32(cd, s2, s1, a);
2508 x86_64_mov_reg_membase32(cd, s2, s1, a);
2511 x86_64_movss_reg_membase32(cd, s2, s1, a);
2514 x86_64_movsd_reg_membase32(cd, s2, s1, a);
2519 case ICMD_PUTFIELDCONST: /* ..., objectref, value ==> ... */
2520 /* val = value (in current instruction) */
2521 /* op1 = type, val.a = field address (in */
2522 /* following NOP) */
2524 var_to_reg_int(s1, src, REG_ITMP1);
2525 gen_nullptr_check(s1);
2527 if (!iptr[1].val.a) {
2528 codegen_addpatchref(cd, cd->mcodeptr,
2529 PATCHER_putfieldconst,
2530 (unresolved_field *) iptr[1].target, 0);
2532 if (opt_showdisassemble) {
2533 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2539 a = ((fieldinfo *) (iptr[1].val.a))->offset;
2542 switch (iptr->op1) {
2545 x86_64_movl_imm_membase32(cd, iptr->val.i, s1, a);
2550 /* We can only optimize the move, if the class is resolved. */
2551 /* Otherwise we don't know what to patch. */
2552 if (iptr[1].val.a && IS_IMM32(iptr->val.l)) {
2553 x86_64_mov_imm_membase32(cd, iptr->val.l, s1, a);
2555 x86_64_movl_imm_membase32(cd, iptr->val.l, s1, a);
2556 x86_64_movl_imm_membase32(cd, iptr->val.l >> 32, s1, a + 4);
2563 /* branch operations **************************************************/
2565 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2567 var_to_reg_int(s1, src, REG_ITMP1);
2568 M_INTMOVE(s1, REG_ITMP1_XPTR);
2570 #ifdef ENABLE_VERIFIER
2572 codegen_addpatchref(cd, cd->mcodeptr,
2573 PATCHER_athrow_areturn,
2574 (unresolved_class *) iptr->val.a, 0);
2576 if (opt_showdisassemble) {
2577 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2580 #endif /* ENABLE_VERIFIER */
2582 M_CALL_IMM(0); /* passing exception pc */
2583 M_POP(REG_ITMP2_XPC);
2585 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
2589 case ICMD_GOTO: /* ... ==> ... */
2590 /* op1 = target JavaVM pc */
2593 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2596 case ICMD_JSR: /* ... ==> ... */
2597 /* op1 = target JavaVM pc */
2600 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2603 case ICMD_RET: /* ... ==> ... */
2604 /* op1 = local variable */
2606 var = &(rd->locals[iptr->op1][TYPE_ADR]);
2607 var_to_reg_int(s1, var, REG_ITMP1);
2611 case ICMD_IFNULL: /* ..., value ==> ... */
2612 /* op1 = target JavaVM pc */
2614 if (src->flags & INMEMORY) {
2615 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2618 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2620 x86_64_jcc(cd, X86_64_CC_E, 0);
2621 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2624 case ICMD_IFNONNULL: /* ..., value ==> ... */
2625 /* op1 = target JavaVM pc */
2627 if (src->flags & INMEMORY) {
2628 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2631 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2633 x86_64_jcc(cd, X86_64_CC_NE, 0);
2634 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2637 case ICMD_IFEQ: /* ..., value ==> ... */
2638 /* op1 = target JavaVM pc, val.i = constant */
2640 x86_64_emit_ifcc(cd, X86_64_CC_E, src, iptr);
2643 case ICMD_IFLT: /* ..., value ==> ... */
2644 /* op1 = target JavaVM pc, val.i = constant */
2646 x86_64_emit_ifcc(cd, X86_64_CC_L, src, iptr);
2649 case ICMD_IFLE: /* ..., value ==> ... */
2650 /* op1 = target JavaVM pc, val.i = constant */
2652 x86_64_emit_ifcc(cd, X86_64_CC_LE, src, iptr);
2655 case ICMD_IFNE: /* ..., value ==> ... */
2656 /* op1 = target JavaVM pc, val.i = constant */
2658 x86_64_emit_ifcc(cd, X86_64_CC_NE, src, iptr);
2661 case ICMD_IFGT: /* ..., value ==> ... */
2662 /* op1 = target JavaVM pc, val.i = constant */
2664 x86_64_emit_ifcc(cd, X86_64_CC_G, src, iptr);
2667 case ICMD_IFGE: /* ..., value ==> ... */
2668 /* op1 = target JavaVM pc, val.i = constant */
2670 x86_64_emit_ifcc(cd, X86_64_CC_GE, src, iptr);
2673 case ICMD_IF_LEQ: /* ..., value ==> ... */
2674 /* op1 = target JavaVM pc, val.l = constant */
2676 x86_64_emit_if_lcc(cd, X86_64_CC_E, src, iptr);
2679 case ICMD_IF_LLT: /* ..., value ==> ... */
2680 /* op1 = target JavaVM pc, val.l = constant */
2682 x86_64_emit_if_lcc(cd, X86_64_CC_L, src, iptr);
2685 case ICMD_IF_LLE: /* ..., value ==> ... */
2686 /* op1 = target JavaVM pc, val.l = constant */
2688 x86_64_emit_if_lcc(cd, X86_64_CC_LE, src, iptr);
2691 case ICMD_IF_LNE: /* ..., value ==> ... */
2692 /* op1 = target JavaVM pc, val.l = constant */
2694 x86_64_emit_if_lcc(cd, X86_64_CC_NE, src, iptr);
2697 case ICMD_IF_LGT: /* ..., value ==> ... */
2698 /* op1 = target JavaVM pc, val.l = constant */
2700 x86_64_emit_if_lcc(cd, X86_64_CC_G, src, iptr);
2703 case ICMD_IF_LGE: /* ..., value ==> ... */
2704 /* op1 = target JavaVM pc, val.l = constant */
2706 x86_64_emit_if_lcc(cd, X86_64_CC_GE, src, iptr);
2709 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2710 /* op1 = target JavaVM pc */
2712 x86_64_emit_if_icmpcc(cd, X86_64_CC_E, src, iptr);
2715 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2716 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2718 x86_64_emit_if_lcmpcc(cd, X86_64_CC_E, src, iptr);
2721 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2722 /* op1 = target JavaVM pc */
2724 x86_64_emit_if_icmpcc(cd, X86_64_CC_NE, src, iptr);
2727 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2728 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2730 x86_64_emit_if_lcmpcc(cd, X86_64_CC_NE, src, iptr);
2733 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2734 /* op1 = target JavaVM pc */
2736 x86_64_emit_if_icmpcc(cd, X86_64_CC_L, src, iptr);
2739 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2740 /* op1 = target JavaVM pc */
2742 x86_64_emit_if_lcmpcc(cd, X86_64_CC_L, src, iptr);
2745 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2746 /* op1 = target JavaVM pc */
2748 x86_64_emit_if_icmpcc(cd, X86_64_CC_G, src, iptr);
2751 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2752 /* op1 = target JavaVM pc */
2754 x86_64_emit_if_lcmpcc(cd, X86_64_CC_G, src, iptr);
2757 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2758 /* op1 = target JavaVM pc */
2760 x86_64_emit_if_icmpcc(cd, X86_64_CC_LE, src, iptr);
2763 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2764 /* op1 = target JavaVM pc */
2766 x86_64_emit_if_lcmpcc(cd, X86_64_CC_LE, src, iptr);
2769 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2770 /* op1 = target JavaVM pc */
2772 x86_64_emit_if_icmpcc(cd, X86_64_CC_GE, src, iptr);
2775 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2776 /* op1 = target JavaVM pc */
2778 x86_64_emit_if_lcmpcc(cd, X86_64_CC_GE, src, iptr);
2781 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST */
2783 case ICMD_ELSE_ICONST: /* handled by IFxx_ICONST */
2786 case ICMD_IFEQ_ICONST: /* ..., value ==> ..., constant */
2787 /* val.i = constant */
2789 var_to_reg_int(s1, src, REG_ITMP1);
2790 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2791 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2793 M_INTMOVE(s1, REG_ITMP1);
2796 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2798 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2799 x86_64_testl_reg_reg(cd, s1, s1);
2800 x86_64_cmovccl_reg_reg(cd, X86_64_CC_E, REG_ITMP2, d);
2801 store_reg_to_var_int(iptr->dst, d);
2804 case ICMD_IFNE_ICONST: /* ..., value ==> ..., constant */
2805 /* val.i = constant */
2807 var_to_reg_int(s1, src, REG_ITMP1);
2808 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2809 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2811 M_INTMOVE(s1, REG_ITMP1);
2814 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2816 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2817 x86_64_testl_reg_reg(cd, s1, s1);
2818 x86_64_cmovccl_reg_reg(cd, X86_64_CC_NE, REG_ITMP2, d);
2819 store_reg_to_var_int(iptr->dst, d);
2822 case ICMD_IFLT_ICONST: /* ..., value ==> ..., constant */
2823 /* val.i = constant */
2825 var_to_reg_int(s1, src, REG_ITMP1);
2826 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2827 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2829 M_INTMOVE(s1, REG_ITMP1);
2832 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2834 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2835 x86_64_testl_reg_reg(cd, s1, s1);
2836 x86_64_cmovccl_reg_reg(cd, X86_64_CC_L, REG_ITMP2, d);
2837 store_reg_to_var_int(iptr->dst, d);
2840 case ICMD_IFGE_ICONST: /* ..., value ==> ..., constant */
2841 /* val.i = constant */
2843 var_to_reg_int(s1, src, REG_ITMP1);
2844 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2845 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2847 M_INTMOVE(s1, REG_ITMP1);
2850 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2852 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2853 x86_64_testl_reg_reg(cd, s1, s1);
2854 x86_64_cmovccl_reg_reg(cd, X86_64_CC_GE, REG_ITMP2, d);
2855 store_reg_to_var_int(iptr->dst, d);
2858 case ICMD_IFGT_ICONST: /* ..., value ==> ..., constant */
2859 /* val.i = constant */
2861 var_to_reg_int(s1, src, REG_ITMP1);
2862 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2863 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2865 M_INTMOVE(s1, REG_ITMP1);
2868 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2870 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2871 x86_64_testl_reg_reg(cd, s1, s1);
2872 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP2, d);
2873 store_reg_to_var_int(iptr->dst, d);
2876 case ICMD_IFLE_ICONST: /* ..., value ==> ..., constant */
2877 /* val.i = constant */
2879 var_to_reg_int(s1, src, REG_ITMP1);
2880 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2881 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2883 M_INTMOVE(s1, REG_ITMP1);
2886 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2888 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2889 x86_64_testl_reg_reg(cd, s1, s1);
2890 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, d);
2891 store_reg_to_var_int(iptr->dst, d);
2895 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2898 var_to_reg_int(s1, src, REG_RESULT);
2899 M_INTMOVE(s1, REG_RESULT);
2900 goto nowperformreturn;
2902 case ICMD_ARETURN: /* ..., retvalue ==> ... */
2904 var_to_reg_int(s1, src, REG_RESULT);
2905 M_INTMOVE(s1, REG_RESULT);
2907 #ifdef ENABLE_VERIFIER
2909 codegen_addpatchref(cd, cd->mcodeptr,
2910 PATCHER_athrow_areturn,
2911 (unresolved_class *) iptr->val.a, 0);
2913 if (opt_showdisassemble) {
2914 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2917 #endif /* ENABLE_VERIFIER */
2918 goto nowperformreturn;
2920 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2923 var_to_reg_flt(s1, src, REG_FRESULT);
2924 M_FLTMOVE(s1, REG_FRESULT);
2925 goto nowperformreturn;
2927 case ICMD_RETURN: /* ... ==> ... */
2933 p = parentargs_base;
2935 /* call trace function */
2937 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
2939 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
2940 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
2942 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
2943 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
2944 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
2945 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
2947 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
2948 x86_64_call_reg(cd, REG_ITMP1);
2950 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
2951 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
2953 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
2956 #if defined(USE_THREADS)
2957 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2958 M_ALD(rd->argintregs[0], REG_SP, rd->memuse * 8);
2960 /* we need to save the proper return value */
2961 switch (iptr->opc) {
2965 M_LST(REG_RESULT, REG_SP, rd->memuse * 8);
2969 M_DST(REG_FRESULT, REG_SP, rd->memuse * 8);
2973 M_MOV_IMM((ptrint) builtin_monitorexit, REG_ITMP1);
2976 /* and now restore the proper return value */
2977 switch (iptr->opc) {
2981 M_LLD(REG_RESULT, REG_SP, rd->memuse * 8);
2985 M_DLD(REG_FRESULT, REG_SP, rd->memuse * 8);
2991 /* restore saved registers */
2993 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
2994 p--; M_LLD(rd->savintregs[i], REG_SP, p * 8);
2996 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
2997 p--; M_DLD(rd->savfltregs[i], REG_SP, p * 8);
3000 /* deallocate stack */
3002 if (parentargs_base)
3003 M_AADD_IMM(parentargs_base * 8, REG_SP);
3010 case ICMD_TABLESWITCH: /* ..., index ==> ... */
3015 tptr = (void **) iptr->target;
3017 s4ptr = iptr->val.a;
3018 l = s4ptr[1]; /* low */
3019 i = s4ptr[2]; /* high */
3021 var_to_reg_int(s1, src, REG_ITMP1);
3022 M_INTMOVE(s1, REG_ITMP1);
3024 x86_64_alul_imm_reg(cd, X86_64_SUB, l, REG_ITMP1);
3029 x86_64_alul_imm_reg(cd, X86_64_CMP, i - 1, REG_ITMP1);
3030 x86_64_jcc(cd, X86_64_CC_A, 0);
3032 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[0]), cd->mcodeptr); */
3033 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3035 /* build jump table top down and use address of lowest entry */
3037 /* s4ptr += 3 + i; */
3041 dseg_addtarget(cd, (basicblock *) tptr[0]);
3045 /* length of dataseg after last dseg_addtarget is used by load */
3047 x86_64_mov_imm_reg(cd, 0, REG_ITMP2);
3048 dseg_adddata(cd, cd->mcodeptr);
3049 x86_64_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
3050 x86_64_jmp_reg(cd, REG_ITMP1);
3055 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
3057 s4 i, l, val, *s4ptr;
3060 tptr = (void **) iptr->target;
3062 s4ptr = iptr->val.a;
3063 l = s4ptr[0]; /* default */
3064 i = s4ptr[1]; /* count */
3066 MCODECHECK(8 + ((7 + 6) * i) + 5);
3067 var_to_reg_int(s1, src, REG_ITMP1); /* reg compare should always be faster */
3073 x86_64_alul_imm_reg(cd, X86_64_CMP, val, s1);
3074 x86_64_jcc(cd, X86_64_CC_E, 0);
3075 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3078 x86_64_jmp_imm(cd, 0);
3080 tptr = (void **) iptr->target;
3081 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3086 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
3087 /* op1 = arg count val.a = builtintable entry */
3093 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
3094 /* op1 = arg count, val.a = method pointer */
3096 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3097 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
3098 case ICMD_INVOKEINTERFACE:
3103 unresolved_method *um = iptr->target;
3104 md = um->methodref->parseddesc.md;
3106 md = lm->parseddesc;
3110 s3 = md->paramcount;
3112 MCODECHECK((20 * s3) + 128);
3114 /* copy arguments to registers or stack location */
3116 for (s3 = s3 - 1; s3 >= 0; s3--, src = src->prev) {
3117 if (src->varkind == ARGVAR)
3119 if (IS_INT_LNG_TYPE(src->type)) {
3120 if (!md->params[s3].inmemory) {
3121 s1 = rd->argintregs[md->params[s3].regoff];
3122 var_to_reg_int(d, src, s1);
3125 var_to_reg_int(d, src, REG_ITMP1);
3126 M_LST(d, REG_SP, md->params[s3].regoff * 8);
3130 if (!md->params[s3].inmemory) {
3131 s1 = rd->argfltregs[md->params[s3].regoff];
3132 var_to_reg_flt(d, src, s1);
3135 var_to_reg_flt(d, src, REG_FTMP1);
3136 M_DST(d, REG_SP, md->params[s3].regoff * 8);
3141 switch (iptr->opc) {
3143 a = (ptrint) bte->fp;
3144 d = md->returntype.type;
3146 M_MOV_IMM(a, REG_ITMP1);
3149 /* if op1 == true, we need to check for an exception */
3151 if (iptr->op1 == true) {
3154 codegen_addxexceptionrefs(cd, cd->mcodeptr);
3158 case ICMD_INVOKESPECIAL:
3159 M_TEST(rd->argintregs[0]);
3161 codegen_addxnullrefs(cd, cd->mcodeptr);
3163 /* first argument contains pointer */
3164 /* gen_nullptr_check(rd->argintregs[0]); */
3166 /* access memory for hardware nullptr */
3167 /* x86_64_mov_membase_reg(cd, rd->argintregs[0], 0, REG_ITMP2); */
3171 case ICMD_INVOKESTATIC:
3173 unresolved_method *um = iptr->target;
3175 codegen_addpatchref(cd, cd->mcodeptr,
3176 PATCHER_invokestatic_special, um, 0);
3178 if (opt_showdisassemble) {
3179 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3183 d = um->methodref->parseddesc.md->returntype.type;
3186 a = (ptrint) lm->stubroutine;
3187 d = lm->parseddesc->returntype.type;
3190 M_MOV_IMM(a, REG_ITMP2);
3194 case ICMD_INVOKEVIRTUAL:
3195 gen_nullptr_check(rd->argintregs[0]);
3198 unresolved_method *um = iptr->target;
3200 codegen_addpatchref(cd, cd->mcodeptr,
3201 PATCHER_invokevirtual, um, 0);
3203 if (opt_showdisassemble) {
3204 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3208 d = um->methodref->parseddesc.md->returntype.type;
3211 s1 = OFFSET(vftbl_t, table[0]) +
3212 sizeof(methodptr) * lm->vftblindex;
3213 d = lm->parseddesc->returntype.type;
3216 x86_64_mov_membase_reg(cd, rd->argintregs[0],
3217 OFFSET(java_objectheader, vftbl),
3219 x86_64_mov_membase32_reg(cd, REG_ITMP2, s1, REG_ITMP1);
3223 case ICMD_INVOKEINTERFACE:
3224 gen_nullptr_check(rd->argintregs[0]);
3227 unresolved_method *um = iptr->target;
3229 codegen_addpatchref(cd, cd->mcodeptr,
3230 PATCHER_invokeinterface, um, 0);
3232 if (opt_showdisassemble) {
3233 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3238 d = um->methodref->parseddesc.md->returntype.type;
3241 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3242 sizeof(methodptr) * lm->class->index;
3244 s2 = sizeof(methodptr) * (lm - lm->class->methods);
3246 d = lm->parseddesc->returntype.type;
3249 M_ALD(REG_ITMP2, rd->argintregs[0],
3250 OFFSET(java_objectheader, vftbl));
3251 x86_64_mov_membase32_reg(cd, REG_ITMP2, s1, REG_ITMP2);
3252 x86_64_mov_membase32_reg(cd, REG_ITMP2, s2, REG_ITMP1);
3257 /* d contains return type */
3259 if (d != TYPE_VOID) {
3260 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3261 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3262 M_INTMOVE(REG_RESULT, s1);
3263 store_reg_to_var_int(iptr->dst, s1);
3265 s1 = reg_of_var(rd, iptr->dst, REG_FRESULT);
3266 M_FLTMOVE(REG_FRESULT, s1);
3267 store_reg_to_var_flt(iptr->dst, s1);
3273 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3275 /* op1: 0 == array, 1 == class */
3276 /* val.a: (classinfo *) superclass */
3278 /* superclass is an interface:
3280 * OK if ((sub == NULL) ||
3281 * (sub->vftbl->interfacetablelength > super->index) &&
3282 * (sub->vftbl->interfacetable[-super->index] != NULL));
3284 * superclass is a class:
3286 * OK if ((sub == NULL) || (0
3287 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3288 * super->vftbl->diffval));
3291 if (iptr->op1 == 1) {
3292 /* object type cast-check */
3295 vftbl_t *supervftbl;
3298 super = (classinfo *) iptr->val.a;
3305 superindex = super->index;
3306 supervftbl = super->vftbl;
3309 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3310 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3312 var_to_reg_int(s1, src, REG_ITMP1);
3314 /* calculate interface checkcast code size */
3316 s2 = 3; /* mov_membase_reg */
3317 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3319 s2 += 3 + 4 /* movl_membase32_reg */ + 3 + 4 /* sub imm32 */ +
3320 3 /* test */ + 6 /* jcc */ + 3 + 4 /* mov_membase32_reg */ +
3321 3 /* test */ + 6 /* jcc */;
3324 s2 += (opt_showdisassemble ? 5 : 0);
3326 /* calculate class checkcast code size */
3328 s3 = 3; /* mov_membase_reg */
3329 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3330 s3 += 10 /* mov_imm_reg */ + 3 + 4 /* movl_membase32_reg */;
3333 if (s1 != REG_ITMP1) {
3334 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3335 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, baseval));
3336 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3337 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, diffval));
3343 s3 += 3 + 4 /* movl_membase32_reg */ + 3 /* sub */ +
3344 10 /* mov_imm_reg */ + 3 /* movl_membase_reg */;
3345 CALCOFFSETBYTES(s3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3348 s3 += 3 /* cmp */ + 6 /* jcc */;
3351 s3 += (opt_showdisassemble ? 5 : 0);
3353 /* if class is not resolved, check which code to call */
3357 M_BEQ(6 + (opt_showdisassemble ? 5 : 0) + 7 + 6 + s2 + 5 + s3);
3359 codegen_addpatchref(cd, cd->mcodeptr,
3360 PATCHER_checkcast_instanceof_flags,
3361 (constant_classref *) iptr->target, 0);
3363 if (opt_showdisassemble) {
3364 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3367 M_IMOV_IMM(0, REG_ITMP2); /* super->flags */
3368 M_IAND_IMM(ACC_INTERFACE, REG_ITMP2);
3372 /* interface checkcast code */
3374 if (!super || (super->flags & ACC_INTERFACE)) {
3380 M_ALD(REG_ITMP2, s1, OFFSET(java_objectheader, vftbl));
3383 codegen_addpatchref(cd, cd->mcodeptr,
3384 PATCHER_checkcast_instanceof_interface,
3385 (constant_classref *) iptr->target, 0);
3387 if (opt_showdisassemble) {
3388 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3392 x86_64_movl_membase32_reg(cd, REG_ITMP2,
3393 OFFSET(vftbl_t, interfacetablelength),
3395 /* XXX TWISTI: should this be int arithmetic? */
3396 M_LSUB_IMM32(superindex, REG_ITMP3);
3399 codegen_addxcastrefs(cd, cd->mcodeptr);
3400 x86_64_mov_membase32_reg(cd, REG_ITMP2,
3401 OFFSET(vftbl_t, interfacetable[0]) -
3402 superindex * sizeof(methodptr*),
3406 codegen_addxcastrefs(cd, cd->mcodeptr);
3412 /* class checkcast code */
3414 if (!super || !(super->flags & ACC_INTERFACE)) {
3420 M_ALD(REG_ITMP2, s1, OFFSET(java_objectheader, vftbl));
3423 codegen_addpatchref(cd, cd->mcodeptr,
3424 PATCHER_checkcast_class,
3425 (constant_classref *) iptr->target,
3428 if (opt_showdisassemble) {
3429 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3433 M_MOV_IMM((ptrint) supervftbl, REG_ITMP3);
3434 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3435 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3437 x86_64_movl_membase32_reg(cd, REG_ITMP2,
3438 OFFSET(vftbl_t, baseval),
3440 /* if (s1 != REG_ITMP1) { */
3441 /* x86_64_movl_membase_reg(cd, REG_ITMP3, */
3442 /* OFFSET(vftbl_t, baseval), */
3444 /* x86_64_movl_membase_reg(cd, REG_ITMP3, */
3445 /* OFFSET(vftbl_t, diffval), */
3447 /* #if defined(USE_THREADS) && defined(NATIVE_THREADS) */
3448 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3450 /* x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP1, REG_ITMP2); */
3453 x86_64_movl_membase32_reg(cd, REG_ITMP3,
3454 OFFSET(vftbl_t, baseval),
3456 M_LSUB(REG_ITMP3, REG_ITMP2);
3457 M_MOV_IMM((ptrint) supervftbl, REG_ITMP3);
3458 M_ILD(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3460 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3461 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3463 M_CMP(REG_ITMP3, REG_ITMP2);
3464 M_BA(0); /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3465 codegen_addxcastrefs(cd, cd->mcodeptr);
3467 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3470 /* array type cast-check */
3472 var_to_reg_int(s1, src, REG_ITMP1);
3473 M_INTMOVE(s1, rd->argintregs[0]);
3475 if (iptr->val.a == NULL) {
3476 codegen_addpatchref(cd, cd->mcodeptr,
3477 PATCHER_builtin_arraycheckcast,
3478 (constant_classref *) iptr->target, 0);
3480 if (opt_showdisassemble) {
3481 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3485 M_MOV_IMM((ptrint) iptr->val.a, rd->argintregs[1]);
3486 M_MOV_IMM((ptrint) BUILTIN_arraycheckcast, REG_ITMP1);
3490 codegen_addxcastrefs(cd, cd->mcodeptr);
3492 var_to_reg_int(s1, src, REG_ITMP1);
3493 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
3496 store_reg_to_var_int(iptr->dst, d);
3499 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3501 /* op1: 0 == array, 1 == class */
3502 /* val.a: (classinfo *) superclass */
3504 /* superclass is an interface:
3506 * return (sub != NULL) &&
3507 * (sub->vftbl->interfacetablelength > super->index) &&
3508 * (sub->vftbl->interfacetable[-super->index] != NULL);
3510 * superclass is a class:
3512 * return ((sub != NULL) && (0
3513 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3514 * super->vftbl->diffvall));
3519 vftbl_t *supervftbl;
3522 super = (classinfo *) iptr->val.a;
3529 superindex = super->index;
3530 supervftbl = super->vftbl;
3533 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3534 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3537 var_to_reg_int(s1, src, REG_ITMP1);
3538 d = reg_of_var(rd, iptr->dst, REG_ITMP2);
3540 M_INTMOVE(s1, REG_ITMP1);
3544 /* calculate interface instanceof code size */
3546 s2 = 3; /* mov_membase_reg */
3547 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3548 s2 += 3 + 4 /* movl_membase32_reg */ + 3 + 4 /* sub_imm32 */ +
3549 3 /* test */ + 6 /* jcc */ + 3 + 4 /* mov_membase32_reg */ +
3550 3 /* test */ + 4 /* setcc */;
3553 s2 += (opt_showdisassemble ? 5 : 0);
3555 /* calculate class instanceof code size */
3557 s3 = 3; /* mov_membase_reg */
3558 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3559 s3 += 10; /* mov_imm_reg */
3560 s3 += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3561 CALCOFFSETBYTES(s3, REG_ITMP1, OFFSET(vftbl_t, baseval));
3562 s3 += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3563 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, baseval));
3564 s3 += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3565 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3566 s3 += 3 /* sub */ + 3 /* xor */ + 3 /* cmp */ + 4 /* setcc */;
3569 s3 += (opt_showdisassemble ? 5 : 0);
3571 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3573 /* if class is not resolved, check which code to call */
3576 x86_64_test_reg_reg(cd, s1, s1);
3577 x86_64_jcc(cd, X86_64_CC_Z, (6 + (opt_showdisassemble ? 5 : 0) +
3578 7 + 6 + s2 + 5 + s3));
3580 codegen_addpatchref(cd, cd->mcodeptr,
3581 PATCHER_checkcast_instanceof_flags,
3582 (constant_classref *) iptr->target, 0);
3584 if (opt_showdisassemble) {
3585 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3588 x86_64_movl_imm_reg(cd, 0, REG_ITMP3); /* super->flags */
3589 x86_64_alul_imm_reg(cd, X86_64_AND, ACC_INTERFACE, REG_ITMP3);
3590 x86_64_jcc(cd, X86_64_CC_Z, s2 + 5);
3593 /* interface instanceof code */
3595 if (!super || (super->flags & ACC_INTERFACE)) {
3597 x86_64_test_reg_reg(cd, s1, s1);
3598 x86_64_jcc(cd, X86_64_CC_Z, s2);
3601 x86_64_mov_membase_reg(cd, s1,
3602 OFFSET(java_objectheader, vftbl),
3605 codegen_addpatchref(cd, cd->mcodeptr,
3606 PATCHER_checkcast_instanceof_interface,
3607 (constant_classref *) iptr->target, 0);
3609 if (opt_showdisassemble) {
3610 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3614 x86_64_movl_membase32_reg(cd, REG_ITMP1,
3615 OFFSET(vftbl_t, interfacetablelength),
3617 x86_64_alu_imm32_reg(cd, X86_64_SUB, superindex, REG_ITMP3);
3618 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3620 a = 3 + 4 /* mov_membase32_reg */ + 3 /* test */ + 4 /* setcc */;
3622 x86_64_jcc(cd, X86_64_CC_LE, a);
3623 x86_64_mov_membase32_reg(cd, REG_ITMP1,
3624 OFFSET(vftbl_t, interfacetable[0]) -
3625 superindex * sizeof(methodptr*),
3627 x86_64_test_reg_reg(cd, REG_ITMP1, REG_ITMP1);
3628 x86_64_setcc_reg(cd, X86_64_CC_NE, d);
3631 x86_64_jmp_imm(cd, s3);
3634 /* class instanceof code */
3636 if (!super || !(super->flags & ACC_INTERFACE)) {
3638 x86_64_test_reg_reg(cd, s1, s1);
3639 x86_64_jcc(cd, X86_64_CC_E, s3);
3642 x86_64_mov_membase_reg(cd, s1,
3643 OFFSET(java_objectheader, vftbl),
3647 codegen_addpatchref(cd, cd->mcodeptr,
3648 PATCHER_instanceof_class,
3649 (constant_classref *) iptr->target, 0);
3651 if (opt_showdisassemble) {
3652 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3656 x86_64_mov_imm_reg(cd, (ptrint) supervftbl, REG_ITMP2);
3657 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3658 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3660 x86_64_movl_membase_reg(cd, REG_ITMP1,
3661 OFFSET(vftbl_t, baseval),
3663 x86_64_movl_membase_reg(cd, REG_ITMP2,
3664 OFFSET(vftbl_t, diffval),
3666 x86_64_movl_membase_reg(cd, REG_ITMP2,
3667 OFFSET(vftbl_t, baseval),
3669 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3670 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3672 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
3673 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d); /* may be REG_ITMP2 */
3674 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP3, REG_ITMP1);
3675 x86_64_setcc_reg(cd, X86_64_CC_BE, d);
3677 store_reg_to_var_int(iptr->dst, d);
3681 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3682 /* op1 = dimension, val.a = array descriptor */
3684 /* check for negative sizes and copy sizes to stack if necessary */
3686 MCODECHECK((10 * 4 * iptr->op1) + 5 + 10 * 8);
3688 for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3689 /* copy SAVEDVAR sizes to stack */
3691 if (src->varkind != ARGVAR) {
3692 var_to_reg_int(s2, src, REG_ITMP1);
3693 M_LST(s2, REG_SP, s1 * 8);
3697 /* is a patcher function set? */
3700 codegen_addpatchref(cd, cd->mcodeptr,
3701 (functionptr) (ptrint) iptr->target,
3704 if (opt_showdisassemble) {
3705 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3711 a = (ptrint) iptr->val.a;
3714 /* a0 = dimension count */
3716 M_MOV_IMM(iptr->op1, rd->argintregs[0]);
3718 /* a1 = arrayvftbl */
3720 M_MOV_IMM((ptrint) iptr->val.a, rd->argintregs[1]);
3722 /* a2 = pointer to dimensions = stack pointer */
3724 M_MOV(REG_SP, rd->argintregs[2]);
3726 M_MOV_IMM((ptrint) BUILTIN_multianewarray, REG_ITMP1);
3729 /* check for exception before result assignment */
3733 codegen_addxexceptionrefs(cd, cd->mcodeptr);
3735 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3736 M_INTMOVE(REG_RESULT, s1);
3737 store_reg_to_var_int(iptr->dst, s1);
3741 *exceptionptr = new_internalerror("Unknown ICMD %d", iptr->opc);
3745 } /* for instruction */
3747 /* copy values to interface registers */
3749 src = bptr->outstack;
3750 len = bptr->outdepth;
3757 if ((src->varkind != STACKVAR)) {
3759 if (IS_FLT_DBL_TYPE(s2)) {
3760 var_to_reg_flt(s1, src, REG_FTMP1);
3761 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3762 M_FLTMOVE(s1, rd->interfaces[len][s2].regoff);
3765 x86_64_movq_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3769 var_to_reg_int(s1, src, REG_ITMP1);
3770 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3771 M_INTMOVE(s1, rd->interfaces[len][s2].regoff);
3774 x86_64_mov_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3781 /* At the end of a basic block we may have to append some nops,
3782 because the patcher stub calling code might be longer than the
3783 actual instruction. So codepatching does not change the
3784 following block unintentionally. */
3786 if (cd->mcodeptr < cd->lastmcodeptr) {
3787 while (cd->mcodeptr < cd->lastmcodeptr) {
3792 } /* if (bptr -> flags >= BBREACHED) */
3793 } /* for basic block */
3795 codegen_createlinenumbertable(cd);
3802 /* generate ArithmeticException stubs */
3806 for (bref = cd->xdivrefs; bref != NULL; bref = bref->next) {
3807 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3809 cd->mcodeptr - cd->mcodebase);
3813 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
3814 dseg_adddata(cd, cd->mcodeptr);
3815 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
3817 if (xcodeptr != NULL) {
3818 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
3821 xcodeptr = cd->mcodeptr;
3823 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
3824 M_MOV(REG_SP, rd->argintregs[1]);
3825 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
3826 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
3828 M_ASUB_IMM(2 * 8, REG_SP);
3829 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
3831 M_MOV_IMM((ptrint) stacktrace_inline_arithmeticexception,
3835 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
3836 M_AADD_IMM(2 * 8, REG_SP);
3838 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
3843 /* generate ArrayIndexOutOfBoundsException stubs */
3847 for (bref = cd->xboundrefs; bref != NULL; bref = bref->next) {
3848 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3850 cd->mcodeptr - cd->mcodebase);
3854 /* move index register into REG_ITMP1 */
3856 M_MOV(bref->reg, REG_ITMP1); /* 3 bytes */
3858 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
3859 dseg_adddata(cd, cd->mcodeptr);
3860 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
3862 if (xcodeptr != NULL) {
3863 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
3866 xcodeptr = cd->mcodeptr;
3868 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
3869 M_MOV(REG_SP, rd->argintregs[1]);
3870 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
3871 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
3872 M_MOV(REG_ITMP1, rd->argintregs[4]);
3874 M_ASUB_IMM(2 * 8, REG_SP);
3875 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
3877 M_MOV_IMM((ptrint) stacktrace_inline_arrayindexoutofboundsexception,
3881 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
3882 M_AADD_IMM(2 * 8, REG_SP);
3884 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
3889 /* generate ArrayStoreException stubs */
3893 for (bref = cd->xstorerefs; bref != NULL; bref = bref->next) {
3894 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3896 cd->mcodeptr - cd->mcodebase);
3900 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
3901 dseg_adddata(cd, cd->mcodeptr);
3902 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
3904 if (xcodeptr != NULL) {
3905 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
3908 xcodeptr = cd->mcodeptr;
3910 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
3911 M_MOV(REG_SP, rd->argintregs[1]);
3912 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
3913 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
3915 M_ASUB_IMM(2 * 8, REG_SP);
3916 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
3918 M_MOV_IMM((ptrint) stacktrace_inline_arraystoreexception,
3922 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
3923 M_AADD_IMM(2 * 8, REG_SP);
3925 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
3930 /* generate ClassCastException stubs */
3934 for (bref = cd->xcastrefs; bref != NULL; bref = bref->next) {
3935 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3937 cd->mcodeptr - cd->mcodebase);
3941 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
3942 dseg_adddata(cd, cd->mcodeptr);
3943 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
3945 if (xcodeptr != NULL) {
3946 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
3949 xcodeptr = cd->mcodeptr;
3951 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
3952 M_MOV(REG_SP, rd->argintregs[1]);
3953 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
3954 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
3956 M_ASUB_IMM(2 * 8, REG_SP);
3957 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
3959 M_MOV_IMM((ptrint) stacktrace_inline_classcastexception, REG_ITMP3);
3962 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
3963 M_AADD_IMM(2 * 8, REG_SP);
3965 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
3970 /* generate NullpointerException stubs */
3974 for (bref = cd->xnullrefs; bref != NULL; bref = bref->next) {
3975 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3977 cd->mcodeptr - cd->mcodebase);
3981 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
3982 dseg_adddata(cd, cd->mcodeptr);
3983 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
3985 if (xcodeptr != NULL) {
3986 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
3989 xcodeptr = cd->mcodeptr;
3991 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
3992 M_MOV(REG_SP, rd->argintregs[1]);
3993 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
3994 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
3996 M_ASUB_IMM(2 * 8, REG_SP);
3997 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
3999 M_MOV_IMM((ptrint) stacktrace_inline_nullpointerexception,
4003 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
4004 M_AADD_IMM(2 * 8, REG_SP);
4006 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
4011 /* generate exception check stubs */
4015 for (bref = cd->xexceptionrefs; bref != NULL; bref = bref->next) {
4016 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4018 cd->mcodeptr - cd->mcodebase);
4022 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
4023 dseg_adddata(cd, cd->mcodeptr);
4024 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
4026 if (xcodeptr != NULL) {
4027 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
4030 xcodeptr = cd->mcodeptr;
4032 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
4033 M_MOV(REG_SP, rd->argintregs[1]);
4034 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
4035 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
4037 M_ASUB_IMM(2 * 8, REG_SP);
4038 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
4040 M_MOV_IMM((ptrint) stacktrace_inline_fillInStackTrace, REG_ITMP3);
4043 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
4044 M_AADD_IMM(2 * 8, REG_SP);
4046 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
4051 /* generate code patching stub call code */
4058 tmpcd = DNEW(codegendata);
4060 for (pref = cd->patchrefs; pref != NULL; pref = pref->next) {
4061 /* check size of code segment */
4065 /* Get machine code which is patched back in later. A */
4066 /* `call rel32' is 5 bytes long (but read 8 bytes). */
4068 xcodeptr = cd->mcodebase + pref->branchpos;
4069 mcode = *((ptrint *) xcodeptr);
4071 /* patch in `call rel32' to call the following code */
4073 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
4074 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4076 /* move pointer to java_objectheader onto stack */
4078 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4079 /* create a virtual java_objectheader */
4081 (void) dseg_addaddress(cd, get_dummyLR()); /* monitorPtr */
4082 a = dseg_addaddress(cd, NULL); /* vftbl */
4084 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP3);
4090 /* move machine code bytes and classinfo pointer into registers */
4092 M_MOV_IMM((ptrint) mcode, REG_ITMP3);
4094 M_MOV_IMM((ptrint) pref->ref, REG_ITMP3);
4096 M_MOV_IMM((ptrint) pref->disp, REG_ITMP3);
4099 M_MOV_IMM((ptrint) pref->patcher, REG_ITMP3);
4102 M_MOV_IMM((ptrint) asm_wrapper_patcher, REG_ITMP3);
4108 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
4110 /* everything's ok */
4116 /* createcompilerstub **********************************************************
4118 Creates a stub routine which calls the compiler.
4120 *******************************************************************************/
4122 #define COMPILERSTUB_SIZE 23
4124 u1 *createcompilerstub(methodinfo *m)
4126 u1 *s; /* memory to hold the stub */
4130 s = CNEW(u1, COMPILERSTUB_SIZE);
4132 /* mark start of dump memory area */
4134 dumpsize = dump_size();
4136 cd = DNEW(codegendata);
4139 /* code for the stub */
4141 M_MOV_IMM((ptrint) m, REG_ITMP1); /* pass method to compiler */
4142 M_MOV_IMM((ptrint) asm_call_jit_compiler, REG_ITMP3);
4145 #if defined(STATISTICS)
4147 count_cstub_len += COMPILERSTUB_SIZE;
4150 /* release dump area */
4152 dump_release(dumpsize);
4158 /* createnativestub ************************************************************
4160 Creates a stub routine which calls a native method.
4162 *******************************************************************************/
4164 u1 *createnativestub(functionptr f, methodinfo *m, codegendata *cd,
4165 registerdata *rd, methoddesc *nmd)
4168 s4 stackframesize; /* size of stackframe if needed */
4170 s4 i, j; /* count variables */
4174 /* initialize variables */
4177 nativeparams = (m->flags & ACC_STATIC) ? 2 : 1;
4180 /* calculate stack frame size */
4183 sizeof(stackframeinfo) / SIZEOF_VOID_P +
4184 sizeof(localref_table) / SIZEOF_VOID_P +
4185 INT_ARG_CNT + FLT_ARG_CNT + 1 + /* + 1 for function address */
4188 if (!(stackframesize & 0x1)) /* keep stack 16-byte aligned */
4192 /* create method header */
4194 (void) dseg_addaddress(cd, m); /* MethodPointer */
4195 (void) dseg_adds4(cd, stackframesize * 8); /* FrameSize */
4196 (void) dseg_adds4(cd, 0); /* IsSync */
4197 (void) dseg_adds4(cd, 0); /* IsLeaf */
4198 (void) dseg_adds4(cd, 0); /* IntSave */
4199 (void) dseg_adds4(cd, 0); /* FltSave */
4200 (void) dseg_addlinenumbertablesize(cd);
4201 (void) dseg_adds4(cd, 0); /* ExTableSize */
4204 /* initialize mcode variables */
4206 cd->mcodeptr = (u1 *) cd->mcodebase;
4207 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
4210 /* generate stub code */
4212 M_ASUB_IMM(stackframesize * 8, REG_SP);
4215 /* save integer and float argument registers */
4217 for (i = 0, j = 0; i < md->paramcount && j < INT_ARG_CNT; i++)
4218 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4219 M_LST(rd->argintregs[j++], REG_SP, (1 + i) * 8);
4221 for (i = 0, j = 0; i < md->paramcount && j < FLT_ARG_CNT; i++)
4222 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4223 M_DST(rd->argfltregs[j++], REG_SP, (1 + INT_ARG_CNT + i) * 8);
4225 /* show integer hex code for float arguments */
4227 for (i = 0, j = 0; i < md->paramcount && j < INT_ARG_CNT; i++) {
4228 /* if the paramtype is a float, we have to right shift all
4229 following integer registers */
4231 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type)) {
4232 for (s1 = INT_ARG_CNT - 2; s1 >= i; s1--)
4233 M_MOV(rd->argintregs[s1], rd->argintregs[s1 + 1]);
4235 x86_64_movd_freg_reg(cd, rd->argfltregs[j], rd->argintregs[i]);
4240 M_MOV_IMM((ptrint) m, REG_ITMP1);
4241 M_AST(REG_ITMP1, REG_SP, 0 * 8);
4242 M_MOV_IMM((ptrint) builtin_trace_args, REG_ITMP1);
4245 /* restore integer and float argument registers */
4247 for (i = 0, j = 0; i < md->paramcount && j < INT_ARG_CNT; i++)
4248 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4249 M_LLD(rd->argintregs[j++], REG_SP, (1 + i) * 8);
4251 for (i = 0, j = 0; i < md->paramcount && j < FLT_ARG_CNT; i++)
4252 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4253 M_DLD(rd->argfltregs[j++], REG_SP, (1 + INT_ARG_CNT + i) * 8);
4257 /* get function address (this must happen before the stackframeinfo) */
4259 #if !defined(ENABLE_STATICVM)
4261 codegen_addpatchref(cd, cd->mcodeptr, PATCHER_resolve_native, m, 0);
4263 if (opt_showdisassemble) {
4264 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
4269 M_MOV_IMM((ptrint) f, REG_ITMP3);
4272 /* save integer and float argument registers */
4274 for (i = 0, j = 0; i < md->paramcount && j < INT_ARG_CNT; i++)
4275 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4276 M_LST(rd->argintregs[j++], REG_SP, i * 8);
4278 for (i = 0, j = 0; i < md->paramcount && j < FLT_ARG_CNT; i++)
4279 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4280 M_DST(rd->argfltregs[j++], REG_SP, (INT_ARG_CNT + i) * 8);
4282 M_AST(REG_ITMP3, REG_SP, (INT_ARG_CNT + FLT_ARG_CNT) * 8);
4284 /* create dynamic stack info */
4286 M_ALEA(REG_SP, stackframesize * 8, rd->argintregs[0]);
4287 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[1]);
4288 M_ALEA(REG_SP, stackframesize * 8 + SIZEOF_VOID_P, rd->argintregs[2]);
4289 M_ALD(rd->argintregs[3], REG_SP, stackframesize * 8);
4290 M_MOV_IMM((ptrint) codegen_start_native_call, REG_ITMP1);
4294 x86_64_mov_imm_reg(cd, (ptrint) nativeinvokation, REG_ITMP1);
4295 x86_64_call_reg(cd, REG_ITMP1);
4298 /* restore integer and float argument registers */
4300 for (i = 0, j = 0; i < md->paramcount && j < INT_ARG_CNT; i++)
4301 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4302 M_LLD(rd->argintregs[j++], REG_SP, i * 8);
4304 for (i = 0, j = 0; i < md->paramcount && j < FLT_ARG_CNT; i++)
4305 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4306 M_DLD(rd->argfltregs[j++], REG_SP, (INT_ARG_CNT + i) * 8);
4308 M_ALD(REG_ITMP3, REG_SP, (INT_ARG_CNT + FLT_ARG_CNT) * 8);
4311 /* copy or spill arguments to new locations */
4313 for (i = md->paramcount - 1, j = i + nativeparams; i >= 0; i--, j--) {
4314 t = md->paramtypes[i].type;
4316 if (IS_INT_LNG_TYPE(t)) {
4317 if (!md->params[i].inmemory) {
4318 s1 = rd->argintregs[md->params[i].regoff];
4320 if (!nmd->params[j].inmemory) {
4321 s2 = rd->argintregs[nmd->params[j].regoff];
4325 s2 = nmd->params[j].regoff;
4326 M_LST(s1, REG_SP, s2 * 8);
4330 s1 = md->params[i].regoff + stackframesize + 1; /* + 1 (RA) */
4331 s2 = nmd->params[j].regoff;
4332 M_LLD(REG_ITMP1, REG_SP, s1 * 8);
4333 M_LST(REG_ITMP1, REG_SP, s2 * 8);
4337 /* We only copy spilled float arguments, as the float argument */
4338 /* registers keep unchanged. */
4340 if (md->params[i].inmemory) {
4341 s1 = md->params[i].regoff + stackframesize + 1; /* + 1 (RA) */
4342 s2 = nmd->params[j].regoff;
4343 M_DLD(REG_FTMP1, REG_SP, s1 * 8);
4344 M_DST(REG_FTMP1, REG_SP, s2 * 8);
4349 /* put class into second argument register */
4351 if (m->flags & ACC_STATIC)
4352 M_MOV_IMM((ptrint) m->class, rd->argintregs[1]);
4354 /* put env into first argument register */
4356 M_MOV_IMM((ptrint) &env, rd->argintregs[0]);
4358 /* do the native function call */
4362 /* save return value */
4364 if (md->returntype.type != TYPE_VOID) {
4365 if (IS_INT_LNG_TYPE(md->returntype.type))
4366 M_LST(REG_RESULT, REG_SP, 0 * 8);
4368 M_DST(REG_FRESULT, REG_SP, 0 * 8);
4371 /* remove native stackframe info */
4373 M_ALEA(REG_SP, stackframesize * 8, rd->argintregs[0]);
4374 M_MOV_IMM((ptrint) codegen_finish_native_call, REG_ITMP1);
4377 /* generate call trace */
4380 /* just restore the value we need, don't care about the other */
4382 if (md->returntype.type != TYPE_VOID) {
4383 if (IS_INT_LNG_TYPE(md->returntype.type))
4384 M_LLD(REG_RESULT, REG_SP, 0 * 8);
4386 M_DLD(REG_FRESULT, REG_SP, 0 * 8);
4389 M_MOV_IMM((ptrint) m, rd->argintregs[0]);
4390 M_MOV(REG_RESULT, rd->argintregs[1]);
4391 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
4392 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
4394 M_MOV_IMM((ptrint) builtin_displaymethodstop, REG_ITMP1);
4398 /* check for exception */
4400 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4401 M_MOV_IMM((ptrint) builtin_get_exceptionptrptr, REG_ITMP3);
4404 M_MOV_IMM((ptrint) &_no_threads_exceptionptr, REG_RESULT);
4406 M_ALD(REG_ITMP2, REG_RESULT, 0);
4408 /* restore return value */
4410 if (md->returntype.type != TYPE_VOID) {
4411 if (IS_INT_LNG_TYPE(md->returntype.type))
4412 M_LLD(REG_RESULT, REG_SP, 0 * 8);
4414 M_DLD(REG_FRESULT, REG_SP, 0 * 8);
4417 /* test for exception */
4422 /* remove stackframe */
4424 M_AADD_IMM(stackframesize * 8, REG_SP);
4428 /* handle exception */
4430 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4431 M_LST(REG_ITMP2, REG_SP, 0 * 8);
4432 M_MOV_IMM((ptrint) builtin_get_exceptionptrptr, REG_ITMP3);
4434 M_AST_IMM32(0, REG_RESULT, 0); /* clear exception pointer */
4435 M_LLD(REG_ITMP1_XPTR, REG_SP, 0 * 8);
4437 M_MOV(REG_ITMP3, REG_ITMP1_XPTR);
4438 M_MOV_IMM((ptrint) &_no_threads_exceptionptr, REG_ITMP3);
4439 M_AST_IMM32(0, REG_ITMP3, 0); /* clear exception pointer */
4442 /* remove stackframe */
4444 M_AADD_IMM(stackframesize * 8, REG_SP);
4446 M_LLD(REG_ITMP2_XPC, REG_SP, 0 * 8); /* get return address from stack */
4447 M_ASUB_IMM(3, REG_ITMP2_XPC); /* callq */
4449 M_MOV_IMM((ptrint) asm_handle_nat_exception, REG_ITMP3);
4453 /* process patcher calls **************************************************/
4460 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4464 tmpcd = DNEW(codegendata);
4466 for (pref = cd->patchrefs; pref != NULL; pref = pref->next) {
4467 /* Get machine code which is patched back in later. A */
4468 /* `call rel32' is 5 bytes long (but read 8 bytes). */
4470 xcodeptr = cd->mcodebase + pref->branchpos;
4471 mcode = *((ptrint *) xcodeptr);
4473 /* patch in `call rel32' to call the following code */
4475 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
4476 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4478 /* move pointer to java_objectheader onto stack */
4480 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4481 /* create a virtual java_objectheader */
4483 (void) dseg_addaddress(cd, get_dummyLR()); /* monitorPtr */
4484 disp = dseg_addaddress(cd, NULL); /* vftbl */
4486 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, REG_ITMP3);
4492 /* move machine code bytes and classinfo pointer into registers */
4494 M_MOV_IMM((ptrint) mcode, REG_ITMP3);
4496 M_MOV_IMM((ptrint) pref->ref, REG_ITMP3);
4498 M_MOV_IMM((ptrint) pref->disp, REG_ITMP3);
4501 M_MOV_IMM((ptrint) pref->patcher, REG_ITMP3);
4504 M_MOV_IMM((ptrint) asm_wrapper_patcher, REG_ITMP3);
4509 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
4511 return m->entrypoint;
4516 * These are local overrides for various environment variables in Emacs.
4517 * Please do not remove this and leave it at the end of the file, where
4518 * Emacs will automagically detect them.
4519 * ---------------------------------------------------------------------
4522 * indent-tabs-mode: t