1 /* src/vm/jit/x86_64/codegen.c - machine code generator for x86_64
3 Copyright (C) 1996-2005 R. Grafl, A. Krall, C. Kruegel, C. Oates,
4 R. Obermaisser, M. Platter, M. Probst, S. Ring, E. Steiner,
5 C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich, J. Wenninger,
6 Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
25 Contact: cacao@complang.tuwien.ac.at
27 Authors: Andreas Krall
30 Changes: Christian Ullrich
32 $Id: codegen.c 4173 2006-01-12 22:38:37Z twisti $
46 #include "vm/jit/x86_64/arch.h"
47 #include "vm/jit/x86_64/codegen.h"
48 #include "vm/jit/x86_64/emitfuncs.h"
50 #include "cacao/cacao.h"
51 #include "native/native.h"
52 #include "vm/builtin.h"
53 #include "vm/exceptions.h"
54 #include "vm/global.h"
55 #include "vm/loader.h"
56 #include "vm/options.h"
57 #include "vm/statistics.h"
58 #include "vm/stringlocal.h"
59 #include "vm/jit/asmpart.h"
60 #include "vm/jit/codegen-common.h"
61 #include "vm/jit/dseg.h"
62 #include "vm/jit/jit.h"
63 #include "vm/jit/methodheader.h"
64 #include "vm/jit/parse.h"
65 #include "vm/jit/patcher.h"
66 #include "vm/jit/reg.h"
68 #if defined(ENABLE_LSRA)
69 # include "vm/jit/allocator/lsra.h"
75 /* codegen *********************************************************************
77 Generates machine code.
79 *******************************************************************************/
81 bool codegen(methodinfo *m, codegendata *cd, registerdata *rd)
83 s4 len, s1, s2, s3, d, disp;
92 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
93 builtintable_entry *bte;
96 /* prevent compiler warnings */
108 /* space to save used callee saved registers */
110 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
111 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse);
113 parentargs_base = rd->memuse + savedregs_num;
115 #if defined(USE_THREADS)
116 /* space to save argument of monitor_enter */
118 if (checksync && (m->flags & ACC_SYNCHRONIZED))
122 /* Keep stack of non-leaf functions 16-byte aligned for calls into native */
123 /* code e.g. libc or jni (alignment problems with movaps). */
125 if (!m->isleafmethod || runverbose)
126 parentargs_base |= 0x1;
128 /* create method header */
130 (void) dseg_addaddress(cd, m); /* MethodPointer */
131 (void) dseg_adds4(cd, parentargs_base * 8); /* FrameSize */
133 #if defined(USE_THREADS)
134 /* IsSync contains the offset relative to the stack pointer for the
135 argument of monitor_exit used in the exception handler. Since the
136 offset could be zero and give a wrong meaning of the flag it is
140 if (checksync && (m->flags & ACC_SYNCHRONIZED))
141 (void) dseg_adds4(cd, (rd->memuse + 1) * 8); /* IsSync */
144 (void) dseg_adds4(cd, 0); /* IsSync */
146 (void) dseg_adds4(cd, m->isleafmethod); /* IsLeaf */
147 (void) dseg_adds4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
148 (void) dseg_adds4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
150 (void) dseg_addlinenumbertablesize(cd);
152 (void) dseg_adds4(cd, cd->exceptiontablelength); /* ExTableSize */
154 /* create exception table */
156 for (ex = cd->exceptiontable; ex != NULL; ex = ex->down) {
157 dseg_addtarget(cd, ex->start);
158 dseg_addtarget(cd, ex->end);
159 dseg_addtarget(cd, ex->handler);
160 (void) dseg_addaddress(cd, ex->catchtype.cls);
163 /* initialize mcode variables */
165 cd->mcodeptr = (u1 *) cd->mcodebase;
166 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
169 /* initialize the last patcher pointer */
171 cd->lastmcodeptr = cd->mcodeptr;
173 /* create stack frame (if necessary) */
176 M_ASUB_IMM(parentargs_base * 8, REG_SP);
178 /* save used callee saved registers */
181 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
182 p--; M_LST(rd->savintregs[i], REG_SP, p * 8);
184 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
185 p--; M_DST(rd->savfltregs[i], REG_SP, p * 8);
188 /* take arguments out of register or stack frame */
192 for (p = 0, l = 0; p < md->paramcount; p++) {
193 t = md->paramtypes[p].type;
194 var = &(rd->locals[l][t]);
196 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
200 s1 = md->params[p].regoff;
201 if (IS_INT_LNG_TYPE(t)) { /* integer args */
202 s2 = rd->argintregs[s1];
203 if (!md->params[p].inmemory) { /* register arguments */
204 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
205 M_INTMOVE(s2, var->regoff);
207 } else { /* reg arg -> spilled */
208 M_LST(s2, REG_SP, var->regoff * 8);
211 } else { /* stack arguments */
212 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
213 /* + 8 for return address */
214 M_LLD(var->regoff, REG_SP, (parentargs_base + s1) * 8 + 8);
216 } else { /* stack arg -> spilled */
217 var->regoff = parentargs_base + s1 + 1;
221 } else { /* floating args */
222 if (!md->params[p].inmemory) { /* register arguments */
223 s2 = rd->argfltregs[s1];
224 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
225 M_FLTMOVE(s2, var->regoff);
227 } else { /* reg arg -> spilled */
228 M_DST(s2, REG_SP, var->regoff * 8);
231 } else { /* stack arguments */
232 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
233 M_DLD(var->regoff, REG_SP, (parentargs_base + s1) * 8 + 8);
236 var->regoff = parentargs_base + s1 + 1;
242 /* save monitorenter argument */
244 #if defined(USE_THREADS)
245 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
246 /* stack offset for monitor argument */
251 M_LSUB_IMM((INT_ARG_CNT + FLT_ARG_CNT) * 8, REG_SP);
253 for (p = 0; p < INT_ARG_CNT; p++)
254 M_LST(rd->argintregs[p], REG_SP, p * 8);
256 for (p = 0; p < FLT_ARG_CNT; p++)
257 M_DST(rd->argfltregs[p], REG_SP, (INT_ARG_CNT + p) * 8);
259 s1 += INT_ARG_CNT + FLT_ARG_CNT;
262 /* decide which monitor enter function to call */
264 if (m->flags & ACC_STATIC) {
265 x86_64_mov_imm_reg(cd, (ptrint) m->class, REG_ITMP1);
266 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, s1 * 8);
267 M_INTMOVE(REG_ITMP1, rd->argintregs[0]);
268 x86_64_mov_imm_reg(cd, (ptrint) BUILTIN_staticmonitorenter, REG_ITMP1);
269 x86_64_call_reg(cd, REG_ITMP1);
272 x86_64_test_reg_reg(cd, rd->argintregs[0], rd->argintregs[0]);
273 x86_64_jcc(cd, X86_64_CC_Z, 0);
274 codegen_addxnullrefs(cd, cd->mcodeptr);
275 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, s1 * 8);
276 x86_64_mov_imm_reg(cd, (ptrint) BUILTIN_monitorenter, REG_ITMP1);
277 x86_64_call_reg(cd, REG_ITMP1);
281 for (p = 0; p < INT_ARG_CNT; p++)
282 M_LLD(rd->argintregs[p], REG_SP, p * 8);
284 for (p = 0; p < FLT_ARG_CNT; p++)
285 M_DLD(rd->argfltregs[p], REG_SP, (INT_ARG_CNT + p) * 8);
287 M_LADD_IMM((INT_ARG_CNT + FLT_ARG_CNT) * 8, REG_SP);
292 /* Copy argument registers to stack and call trace function with pointer */
293 /* to arguments on stack. */
295 if (runverbose || opt_stat) {
296 M_LSUB_IMM((INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + FLT_TMP_CNT + 1 + 1) * 8, REG_SP);
298 /* save integer argument registers */
300 for (p = 0; p < INT_ARG_CNT; p++)
301 M_LST(rd->argintregs[p], REG_SP, (1 + p) * 8);
303 /* save float argument registers */
305 for (p = 0; p < FLT_ARG_CNT; p++)
306 M_DST(rd->argfltregs[p], REG_SP, (1 + INT_ARG_CNT + p) * 8);
308 /* save temporary registers for leaf methods */
310 if (m->isleafmethod) {
311 for (p = 0; p < INT_TMP_CNT; p++)
312 M_LST(rd->tmpintregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + p) * 8);
314 for (p = 0; p < FLT_TMP_CNT; p++)
315 M_DST(rd->tmpfltregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + p) * 8);
319 /* show integer hex code for float arguments */
321 for (p = 0, l = 0; p < md->paramcount && p < INT_ARG_CNT; p++) {
322 /* if the paramtype is a float, we have to right shift all */
323 /* following integer registers */
325 if (IS_FLT_DBL_TYPE(md->paramtypes[p].type)) {
326 for (s1 = INT_ARG_CNT - 2; s1 >= p; s1--) {
327 M_MOV(rd->argintregs[s1], rd->argintregs[s1 + 1]);
330 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[p]);
335 x86_64_mov_imm_reg(cd, (ptrint) m, REG_ITMP2);
336 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_SP, 0 * 8);
337 x86_64_mov_imm_reg(cd, (ptrint) builtin_trace_args, REG_ITMP1);
338 x86_64_call_reg(cd, REG_ITMP1);
341 #if defined(ENABLE_STATISTICS)
342 M_MOV_IMM((ptrint) compiledinvokation, REG_ITMP1);
346 /* restore integer argument registers */
348 for (p = 0; p < INT_ARG_CNT; p++)
349 M_LLD(rd->argintregs[p], REG_SP, (1 + p) * 8);
351 /* restore float argument registers */
353 for (p = 0; p < FLT_ARG_CNT; p++)
354 M_DLD(rd->argfltregs[p], REG_SP, (1 + INT_ARG_CNT + p) * 8);
356 /* restore temporary registers for leaf methods */
358 if (m->isleafmethod) {
359 for (p = 0; p < INT_TMP_CNT; p++)
360 M_LLD(rd->tmpintregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + p) * 8);
362 for (p = 0; p < FLT_TMP_CNT; p++)
363 M_DLD(rd->tmpfltregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + p) * 8);
366 M_LADD_IMM((INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + FLT_TMP_CNT + 1 + 1) * 8, REG_SP);
371 /* end of header generation */
373 /* walk through all basic blocks */
374 for (bptr = m->basicblocks; bptr != NULL; bptr = bptr->next) {
376 bptr->mpc = (u4) ((u1 *) cd->mcodeptr - cd->mcodebase);
378 if (bptr->flags >= BBREACHED) {
380 /* branch resolving */
383 for (bref = bptr->branchrefs; bref != NULL; bref = bref->next) {
384 gen_resolvebranch((u1 *) cd->mcodebase + bref->branchpos,
389 /* copy interface registers to their destination */
395 #if defined(ENABLE_LSRA)
397 while (src != NULL) {
399 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
400 if (bptr->type == BBTYPE_SBR) {
401 /* d = reg_of_var(rd, src, REG_ITMP1); */
402 if (!(src->flags & INMEMORY))
406 x86_64_pop_reg(cd, d);
407 store_reg_to_var_int(src, d);
409 } else if (bptr->type == BBTYPE_EXH) {
410 /* d = reg_of_var(rd, src, REG_ITMP1); */
411 if (!(src->flags & INMEMORY))
415 M_INTMOVE(REG_ITMP1, d);
416 store_reg_to_var_int(src, d);
425 while (src != NULL) {
427 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
428 if (bptr->type == BBTYPE_SBR) {
429 d = reg_of_var(rd, src, REG_ITMP1);
430 x86_64_pop_reg(cd, d);
431 store_reg_to_var_int(src, d);
433 } else if (bptr->type == BBTYPE_EXH) {
434 d = reg_of_var(rd, src, REG_ITMP1);
435 M_INTMOVE(REG_ITMP1, d);
436 store_reg_to_var_int(src, d);
440 d = reg_of_var(rd, src, REG_ITMP1);
441 if ((src->varkind != STACKVAR)) {
443 if (IS_FLT_DBL_TYPE(s2)) {
444 s1 = rd->interfaces[len][s2].regoff;
445 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
449 x86_64_movq_membase_reg(cd, REG_SP, s1 * 8, d);
451 store_reg_to_var_flt(src, d);
454 s1 = rd->interfaces[len][s2].regoff;
455 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
459 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, d);
461 store_reg_to_var_int(src, d);
467 #if defined(ENABLE_LSRA)
470 /* walk through all instructions */
476 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
477 if (iptr->line != currentline) {
478 dseg_addlinenumber(cd, iptr->line, cd->mcodeptr);
479 currentline = iptr->line;
482 MCODECHECK(1024); /* 1KB should be enough */
485 case ICMD_INLINE_START: /* internal ICMDs */
486 case ICMD_INLINE_END:
489 case ICMD_NOP: /* ... ==> ... */
492 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
493 if (src->flags & INMEMORY) {
494 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
497 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
499 x86_64_jcc(cd, X86_64_CC_Z, 0);
500 codegen_addxnullrefs(cd, cd->mcodeptr);
503 /* constant operations ************************************************/
505 case ICMD_ICONST: /* ... ==> ..., constant */
506 /* op1 = 0, val.i = constant */
508 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
509 if (iptr->val.i == 0) {
512 M_IMOV_IMM(iptr->val.i, d);
514 store_reg_to_var_int(iptr->dst, d);
517 case ICMD_LCONST: /* ... ==> ..., constant */
518 /* op1 = 0, val.l = constant */
520 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
521 if (iptr->val.l == 0) {
524 M_MOV_IMM(iptr->val.l, d);
526 store_reg_to_var_int(iptr->dst, d);
529 case ICMD_FCONST: /* ... ==> ..., constant */
530 /* op1 = 0, val.f = constant */
532 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
533 disp = dseg_addfloat(cd, iptr->val.f);
534 x86_64_movdl_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + ((d > 7) ? 9 : 8)) - (s8) cd->mcodebase) + disp, d);
535 store_reg_to_var_flt(iptr->dst, d);
538 case ICMD_DCONST: /* ... ==> ..., constant */
539 /* op1 = 0, val.d = constant */
541 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
542 disp = dseg_adddouble(cd, iptr->val.d);
543 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + disp, d);
544 store_reg_to_var_flt(iptr->dst, d);
547 case ICMD_ACONST: /* ... ==> ..., constant */
548 /* op1 = 0, val.a = constant */
550 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
552 if ((iptr->target != NULL) && (iptr->val.a == NULL)) {
553 codegen_addpatchref(cd, cd->mcodeptr,
555 (unresolved_class *) iptr->target, 0);
557 if (opt_showdisassemble) {
558 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
561 M_MOV_IMM((ptrint) iptr->val.a, d);
564 if (iptr->val.a == 0) {
567 M_MOV_IMM((ptrint) iptr->val.a, d);
570 store_reg_to_var_int(iptr->dst, d);
574 /* load/store operations **********************************************/
576 case ICMD_ILOAD: /* ... ==> ..., content of local variable */
577 /* op1 = local variable */
579 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
580 if ((iptr->dst->varkind == LOCALVAR) &&
581 (iptr->dst->varnum == iptr->op1)) {
584 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
585 if (var->flags & INMEMORY) {
586 x86_64_movl_membase_reg(cd, REG_SP, var->regoff * 8, d);
587 store_reg_to_var_int(iptr->dst, d);
590 if (iptr->dst->flags & INMEMORY) {
591 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
594 M_INTMOVE(var->regoff, d);
599 case ICMD_LLOAD: /* ... ==> ..., content of local variable */
600 case ICMD_ALOAD: /* op1 = local variable */
602 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
603 if ((iptr->dst->varkind == LOCALVAR) &&
604 (iptr->dst->varnum == iptr->op1)) {
607 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
608 if (var->flags & INMEMORY) {
609 x86_64_mov_membase_reg(cd, REG_SP, var->regoff * 8, d);
610 store_reg_to_var_int(iptr->dst, d);
613 if (iptr->dst->flags & INMEMORY) {
614 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
617 M_INTMOVE(var->regoff, d);
622 case ICMD_FLOAD: /* ... ==> ..., content of local variable */
623 case ICMD_DLOAD: /* op1 = local variable */
625 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
626 if ((iptr->dst->varkind == LOCALVAR) &&
627 (iptr->dst->varnum == iptr->op1)) {
630 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
631 if (var->flags & INMEMORY) {
632 x86_64_movq_membase_reg(cd, REG_SP, var->regoff * 8, d);
633 store_reg_to_var_flt(iptr->dst, d);
636 if (iptr->dst->flags & INMEMORY) {
637 x86_64_movq_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
640 M_FLTMOVE(var->regoff, d);
645 case ICMD_ISTORE: /* ..., value ==> ... */
646 case ICMD_LSTORE: /* op1 = local variable */
649 if ((src->varkind == LOCALVAR) &&
650 (src->varnum == iptr->op1)) {
653 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
654 if (var->flags & INMEMORY) {
655 var_to_reg_int(s1, src, REG_ITMP1);
656 x86_64_mov_reg_membase(cd, s1, REG_SP, var->regoff * 8);
659 var_to_reg_int(s1, src, var->regoff);
660 M_INTMOVE(s1, var->regoff);
664 case ICMD_FSTORE: /* ..., value ==> ... */
665 case ICMD_DSTORE: /* op1 = local variable */
667 if ((src->varkind == LOCALVAR) &&
668 (src->varnum == iptr->op1)) {
671 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
672 if (var->flags & INMEMORY) {
673 var_to_reg_flt(s1, src, REG_FTMP1);
674 x86_64_movq_reg_membase(cd, s1, REG_SP, var->regoff * 8);
677 var_to_reg_flt(s1, src, var->regoff);
678 M_FLTMOVE(s1, var->regoff);
683 /* pop/dup/swap operations ********************************************/
685 /* attention: double and longs are only one entry in CACAO ICMDs */
687 case ICMD_POP: /* ..., value ==> ... */
688 case ICMD_POP2: /* ..., value, value ==> ... */
691 case ICMD_DUP: /* ..., a ==> ..., a, a */
692 M_COPY(src, iptr->dst);
695 case ICMD_DUP_X1: /* ..., a, b ==> ..., b, a, b */
697 M_COPY(src, iptr->dst);
698 M_COPY(src->prev, iptr->dst->prev);
699 M_COPY(iptr->dst, iptr->dst->prev->prev);
702 case ICMD_DUP_X2: /* ..., a, b, c ==> ..., c, a, b, c */
704 M_COPY(src, iptr->dst);
705 M_COPY(src->prev, iptr->dst->prev);
706 M_COPY(src->prev->prev, iptr->dst->prev->prev);
707 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
710 case ICMD_DUP2: /* ..., a, b ==> ..., a, b, a, b */
712 M_COPY(src, iptr->dst);
713 M_COPY(src->prev, iptr->dst->prev);
716 case ICMD_DUP2_X1: /* ..., a, b, c ==> ..., b, c, a, b, c */
718 M_COPY(src, iptr->dst);
719 M_COPY(src->prev, iptr->dst->prev);
720 M_COPY(src->prev->prev, iptr->dst->prev->prev);
721 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
722 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev);
725 case ICMD_DUP2_X2: /* ..., a, b, c, d ==> ..., c, d, a, b, c, d */
727 M_COPY(src, iptr->dst);
728 M_COPY(src->prev, iptr->dst->prev);
729 M_COPY(src->prev->prev, iptr->dst->prev->prev);
730 M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
731 M_COPY(iptr->dst, iptr->dst->prev->prev->prev->prev);
732 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev->prev);
735 case ICMD_SWAP: /* ..., a, b ==> ..., b, a */
737 M_COPY(src, iptr->dst->prev);
738 M_COPY(src->prev, iptr->dst);
742 /* integer operations *************************************************/
744 case ICMD_INEG: /* ..., value ==> ..., - value */
746 d = reg_of_var(rd, iptr->dst, REG_NULL);
747 if (iptr->dst->flags & INMEMORY) {
748 if (src->flags & INMEMORY) {
749 if (src->regoff == iptr->dst->regoff) {
750 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
753 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
754 x86_64_negl_reg(cd, REG_ITMP1);
755 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
759 x86_64_movl_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
760 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
764 if (src->flags & INMEMORY) {
765 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
766 x86_64_negl_reg(cd, d);
769 M_INTMOVE(src->regoff, iptr->dst->regoff);
770 x86_64_negl_reg(cd, iptr->dst->regoff);
775 case ICMD_LNEG: /* ..., value ==> ..., - value */
777 d = reg_of_var(rd, iptr->dst, REG_NULL);
778 if (iptr->dst->flags & INMEMORY) {
779 if (src->flags & INMEMORY) {
780 if (src->regoff == iptr->dst->regoff) {
781 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
784 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
785 x86_64_neg_reg(cd, REG_ITMP1);
786 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
790 x86_64_mov_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
791 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
795 if (src->flags & INMEMORY) {
796 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
797 x86_64_neg_reg(cd, iptr->dst->regoff);
800 M_INTMOVE(src->regoff, iptr->dst->regoff);
801 x86_64_neg_reg(cd, iptr->dst->regoff);
806 case ICMD_I2L: /* ..., value ==> ..., value */
808 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
809 if (src->flags & INMEMORY) {
810 x86_64_movslq_membase_reg(cd, REG_SP, src->regoff * 8, d);
813 x86_64_movslq_reg_reg(cd, src->regoff, d);
815 store_reg_to_var_int(iptr->dst, d);
818 case ICMD_L2I: /* ..., value ==> ..., value */
820 var_to_reg_int(s1, src, REG_ITMP1);
821 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
823 store_reg_to_var_int(iptr->dst, d);
826 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
828 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
829 if (src->flags & INMEMORY) {
830 x86_64_movsbq_membase_reg(cd, REG_SP, src->regoff * 8, d);
833 x86_64_movsbq_reg_reg(cd, src->regoff, d);
835 store_reg_to_var_int(iptr->dst, d);
838 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
840 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
841 if (src->flags & INMEMORY) {
842 x86_64_movzwq_membase_reg(cd, REG_SP, src->regoff * 8, d);
845 x86_64_movzwq_reg_reg(cd, src->regoff, d);
847 store_reg_to_var_int(iptr->dst, d);
850 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
852 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
853 if (src->flags & INMEMORY) {
854 x86_64_movswq_membase_reg(cd, REG_SP, src->regoff * 8, d);
857 x86_64_movswq_reg_reg(cd, src->regoff, d);
859 store_reg_to_var_int(iptr->dst, d);
863 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
865 d = reg_of_var(rd, iptr->dst, REG_NULL);
866 x86_64_emit_ialu(cd, X86_64_ADD, src, iptr);
869 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
870 /* val.i = constant */
872 d = reg_of_var(rd, iptr->dst, REG_NULL);
873 x86_64_emit_ialuconst(cd, X86_64_ADD, src, iptr);
876 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
878 d = reg_of_var(rd, iptr->dst, REG_NULL);
879 x86_64_emit_lalu(cd, X86_64_ADD, src, iptr);
882 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
883 /* val.l = constant */
885 d = reg_of_var(rd, iptr->dst, REG_NULL);
886 x86_64_emit_laluconst(cd, X86_64_ADD, src, iptr);
889 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
891 d = reg_of_var(rd, iptr->dst, REG_NULL);
892 if (iptr->dst->flags & INMEMORY) {
893 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
894 if (src->prev->regoff == iptr->dst->regoff) {
895 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
896 x86_64_alul_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
899 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
900 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
901 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
904 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
905 M_INTMOVE(src->prev->regoff, REG_ITMP1);
906 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
907 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
909 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
910 if (src->prev->regoff == iptr->dst->regoff) {
911 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
914 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
915 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
916 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
920 x86_64_movl_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
921 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
925 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
926 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
927 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
929 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
930 M_INTMOVE(src->prev->regoff, d);
931 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
933 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
934 /* workaround for reg alloc */
935 if (src->regoff == iptr->dst->regoff) {
936 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
937 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
938 M_INTMOVE(REG_ITMP1, d);
941 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
942 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
946 /* workaround for reg alloc */
947 if (src->regoff == iptr->dst->regoff) {
948 M_INTMOVE(src->prev->regoff, REG_ITMP1);
949 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
950 M_INTMOVE(REG_ITMP1, d);
953 M_INTMOVE(src->prev->regoff, d);
954 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
960 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
961 /* val.i = constant */
963 d = reg_of_var(rd, iptr->dst, REG_NULL);
964 x86_64_emit_ialuconst(cd, X86_64_SUB, src, iptr);
967 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
969 d = reg_of_var(rd, iptr->dst, REG_NULL);
970 if (iptr->dst->flags & INMEMORY) {
971 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
972 if (src->prev->regoff == iptr->dst->regoff) {
973 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
974 x86_64_alu_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
977 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
978 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
979 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
982 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
983 M_INTMOVE(src->prev->regoff, REG_ITMP1);
984 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
985 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
987 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
988 if (src->prev->regoff == iptr->dst->regoff) {
989 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
992 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
993 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
994 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
998 x86_64_mov_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
999 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1003 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1004 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1005 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1007 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1008 M_INTMOVE(src->prev->regoff, d);
1009 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1011 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1012 /* workaround for reg alloc */
1013 if (src->regoff == iptr->dst->regoff) {
1014 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1015 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1016 M_INTMOVE(REG_ITMP1, d);
1019 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1020 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1024 /* workaround for reg alloc */
1025 if (src->regoff == iptr->dst->regoff) {
1026 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1027 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1028 M_INTMOVE(REG_ITMP1, d);
1031 M_INTMOVE(src->prev->regoff, d);
1032 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1038 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
1039 /* val.l = constant */
1041 d = reg_of_var(rd, iptr->dst, REG_NULL);
1042 x86_64_emit_laluconst(cd, X86_64_SUB, src, iptr);
1045 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1047 d = reg_of_var(rd, iptr->dst, REG_NULL);
1048 if (iptr->dst->flags & INMEMORY) {
1049 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1050 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1051 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1052 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1054 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1055 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1056 x86_64_imull_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1057 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1059 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1060 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1061 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1062 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1065 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1066 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1067 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1071 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1072 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1073 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1075 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1076 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1077 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1079 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1080 M_INTMOVE(src->regoff, iptr->dst->regoff);
1081 x86_64_imull_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1084 if (src->regoff == iptr->dst->regoff) {
1085 x86_64_imull_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1088 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1089 x86_64_imull_reg_reg(cd, src->regoff, iptr->dst->regoff);
1095 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
1096 /* val.i = constant */
1098 d = reg_of_var(rd, iptr->dst, REG_NULL);
1099 if (iptr->dst->flags & INMEMORY) {
1100 if (src->flags & INMEMORY) {
1101 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1102 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1105 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, REG_ITMP1);
1106 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1110 if (src->flags & INMEMORY) {
1111 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1114 if (iptr->val.i == 2) {
1115 M_INTMOVE(src->regoff, iptr->dst->regoff);
1116 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1119 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, iptr->dst->regoff); /* 3 cycles */
1125 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1127 d = reg_of_var(rd, iptr->dst, REG_NULL);
1128 if (iptr->dst->flags & INMEMORY) {
1129 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1130 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1131 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1132 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1134 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1135 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1136 x86_64_imul_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1137 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1139 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1140 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1141 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1142 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1145 x86_64_mov_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1146 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1147 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1151 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1152 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1153 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1155 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1156 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1157 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1159 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1160 M_INTMOVE(src->regoff, iptr->dst->regoff);
1161 x86_64_imul_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1164 if (src->regoff == iptr->dst->regoff) {
1165 x86_64_imul_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1168 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1169 x86_64_imul_reg_reg(cd, src->regoff, iptr->dst->regoff);
1175 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
1176 /* val.l = constant */
1178 d = reg_of_var(rd, iptr->dst, REG_NULL);
1179 if (iptr->dst->flags & INMEMORY) {
1180 if (src->flags & INMEMORY) {
1181 if (IS_IMM32(iptr->val.l)) {
1182 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1185 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1186 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1188 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1191 if (IS_IMM32(iptr->val.l)) {
1192 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, REG_ITMP1);
1195 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1196 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1198 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1202 if (src->flags & INMEMORY) {
1203 if (IS_IMM32(iptr->val.l)) {
1204 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1207 x86_64_mov_imm_reg(cd, iptr->val.l, iptr->dst->regoff);
1208 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1212 /* should match in many cases */
1213 if (iptr->val.l == 2) {
1214 M_INTMOVE(src->regoff, iptr->dst->regoff);
1215 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1218 if (IS_IMM32(iptr->val.l)) {
1219 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, iptr->dst->regoff); /* 4 cycles */
1222 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1223 M_INTMOVE(src->regoff, iptr->dst->regoff);
1224 x86_64_imul_reg_reg(cd, REG_ITMP1, iptr->dst->regoff);
1231 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1233 d = reg_of_var(rd, iptr->dst, REG_NULL);
1234 if (src->prev->flags & INMEMORY) {
1235 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1238 M_INTMOVE(src->prev->regoff, RAX);
1241 if (src->flags & INMEMORY) {
1242 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1245 M_INTMOVE(src->regoff, REG_ITMP3);
1249 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1250 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1251 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1252 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1254 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1256 x86_64_idivl_reg(cd, REG_ITMP3);
1258 if (iptr->dst->flags & INMEMORY) {
1259 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1260 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1263 M_INTMOVE(RAX, iptr->dst->regoff);
1265 if (iptr->dst->regoff != RDX) {
1266 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1271 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1272 d = reg_of_var(rd, iptr->dst, REG_NULL);
1273 if (src->prev->flags & INMEMORY) {
1274 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1277 M_INTMOVE(src->prev->regoff, RAX);
1280 if (src->flags & INMEMORY) {
1281 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1284 M_INTMOVE(src->regoff, REG_ITMP3);
1288 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1290 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1291 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1294 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1295 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1296 x86_64_jcc(cd, X86_64_CC_E, 1 + 3); /* 6 bytes */
1299 x86_64_idivl_reg(cd, REG_ITMP3);
1301 if (iptr->dst->flags & INMEMORY) {
1302 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1303 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1306 M_INTMOVE(RDX, iptr->dst->regoff);
1308 if (iptr->dst->regoff != RDX) {
1309 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1314 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
1315 /* val.i = constant */
1317 var_to_reg_int(s1, src, REG_ITMP1);
1318 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1319 M_INTMOVE(s1, REG_ITMP1);
1320 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1321 x86_64_leal_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1322 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1323 x86_64_shiftl_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1324 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1325 store_reg_to_var_int(iptr->dst, d);
1328 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
1329 /* val.i = constant */
1331 var_to_reg_int(s1, src, REG_ITMP1);
1332 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1333 M_INTMOVE(s1, REG_ITMP1);
1334 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1335 x86_64_leal_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1336 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1337 x86_64_alul_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1338 x86_64_alul_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1339 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1340 store_reg_to_var_int(iptr->dst, d);
1344 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1346 d = reg_of_var(rd, iptr->dst, REG_NULL);
1348 if (src->prev->flags & INMEMORY) {
1349 M_LLD(RAX, REG_SP, src->prev->regoff * 8);
1352 M_INTMOVE(src->prev->regoff, RAX);
1355 if (src->flags & INMEMORY) {
1356 M_LLD(REG_ITMP3, REG_SP, src->regoff * 8);
1359 M_INTMOVE(src->regoff, REG_ITMP3);
1363 /* check as described in jvm spec */
1364 disp = dseg_adds8(cd, 0x8000000000000000LL);
1365 M_CMP_MEMBASE(RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, RAX);
1367 M_CMP_IMM(-1, REG_ITMP3); /* 4 bytes */
1368 M_BEQ(3 + 2 + 3); /* 6 bytes */
1370 M_MOV(RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1372 x86_64_idiv_reg(cd, REG_ITMP3);
1374 if (iptr->dst->flags & INMEMORY) {
1375 M_LST(RAX, REG_SP, iptr->dst->regoff * 8);
1376 M_MOV(REG_ITMP2, RDX); /* restore %rdx */
1379 M_INTMOVE(RAX, iptr->dst->regoff);
1381 if (iptr->dst->regoff != RDX) {
1382 M_MOV(REG_ITMP2, RDX); /* restore %rdx */
1387 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1389 d = reg_of_var(rd, iptr->dst, REG_NULL);
1390 if (src->prev->flags & INMEMORY) {
1391 M_LLD(REG_ITMP1, REG_SP, src->prev->regoff * 8);
1394 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1397 if (src->flags & INMEMORY) {
1398 M_LLD(REG_ITMP3, REG_SP, src->regoff * 8);
1401 M_INTMOVE(src->regoff, REG_ITMP3);
1405 M_MOV(RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1407 /* check as described in jvm spec */
1408 disp = dseg_adds8(cd, 0x8000000000000000LL);
1409 M_CMP_MEMBASE(RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, REG_ITMP1);
1413 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1415 M_XOR(RDX, RDX); /* 3 bytes */
1416 M_CMP_IMM(-1, REG_ITMP3); /* 4 bytes */
1417 M_BEQ(2 + 3); /* 6 bytes */
1420 x86_64_idiv_reg(cd, REG_ITMP3);
1422 if (iptr->dst->flags & INMEMORY) {
1423 M_LST(RDX, REG_SP, iptr->dst->regoff * 8);
1424 M_MOV(REG_ITMP2, RDX); /* restore %rdx */
1427 M_INTMOVE(RDX, iptr->dst->regoff);
1429 if (iptr->dst->regoff != RDX) {
1430 M_MOV(REG_ITMP2, RDX); /* restore %rdx */
1435 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1436 /* val.i = constant */
1438 var_to_reg_int(s1, src, REG_ITMP1);
1439 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1440 M_INTMOVE(s1, REG_ITMP1);
1441 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1442 x86_64_lea_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1443 x86_64_cmovcc_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1444 x86_64_shift_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1445 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1446 store_reg_to_var_int(iptr->dst, d);
1449 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1450 /* val.l = constant */
1452 var_to_reg_int(s1, src, REG_ITMP1);
1453 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1454 M_INTMOVE(s1, REG_ITMP1);
1455 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1456 x86_64_lea_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1457 x86_64_cmovcc_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1458 x86_64_alu_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1459 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1460 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1461 store_reg_to_var_int(iptr->dst, d);
1464 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1466 d = reg_of_var(rd, iptr->dst, REG_NULL);
1467 x86_64_emit_ishift(cd, X86_64_SHL, src, iptr);
1470 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1471 /* val.i = constant */
1473 d = reg_of_var(rd, iptr->dst, REG_NULL);
1474 x86_64_emit_ishiftconst(cd, X86_64_SHL, src, iptr);
1477 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1479 d = reg_of_var(rd, iptr->dst, REG_NULL);
1480 x86_64_emit_ishift(cd, X86_64_SAR, src, iptr);
1483 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1484 /* val.i = constant */
1486 d = reg_of_var(rd, iptr->dst, REG_NULL);
1487 x86_64_emit_ishiftconst(cd, X86_64_SAR, src, iptr);
1490 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1492 d = reg_of_var(rd, iptr->dst, REG_NULL);
1493 x86_64_emit_ishift(cd, X86_64_SHR, src, iptr);
1496 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1497 /* val.i = constant */
1499 d = reg_of_var(rd, iptr->dst, REG_NULL);
1500 x86_64_emit_ishiftconst(cd, X86_64_SHR, src, iptr);
1503 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1505 d = reg_of_var(rd, iptr->dst, REG_NULL);
1506 x86_64_emit_lshift(cd, X86_64_SHL, src, iptr);
1509 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1510 /* val.i = constant */
1512 d = reg_of_var(rd, iptr->dst, REG_NULL);
1513 x86_64_emit_lshiftconst(cd, X86_64_SHL, src, iptr);
1516 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1518 d = reg_of_var(rd, iptr->dst, REG_NULL);
1519 x86_64_emit_lshift(cd, X86_64_SAR, src, iptr);
1522 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1523 /* val.i = constant */
1525 d = reg_of_var(rd, iptr->dst, REG_NULL);
1526 x86_64_emit_lshiftconst(cd, X86_64_SAR, src, iptr);
1529 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1531 d = reg_of_var(rd, iptr->dst, REG_NULL);
1532 x86_64_emit_lshift(cd, X86_64_SHR, src, iptr);
1535 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1536 /* val.l = constant */
1538 d = reg_of_var(rd, iptr->dst, REG_NULL);
1539 x86_64_emit_lshiftconst(cd, X86_64_SHR, src, iptr);
1542 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1544 d = reg_of_var(rd, iptr->dst, REG_NULL);
1545 x86_64_emit_ialu(cd, X86_64_AND, src, iptr);
1548 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1549 /* val.i = constant */
1551 d = reg_of_var(rd, iptr->dst, REG_NULL);
1552 x86_64_emit_ialuconst(cd, X86_64_AND, src, iptr);
1555 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1557 d = reg_of_var(rd, iptr->dst, REG_NULL);
1558 x86_64_emit_lalu(cd, X86_64_AND, src, iptr);
1561 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1562 /* val.l = constant */
1564 d = reg_of_var(rd, iptr->dst, REG_NULL);
1565 x86_64_emit_laluconst(cd, X86_64_AND, src, iptr);
1568 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1570 d = reg_of_var(rd, iptr->dst, REG_NULL);
1571 x86_64_emit_ialu(cd, X86_64_OR, src, iptr);
1574 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1575 /* val.i = constant */
1577 d = reg_of_var(rd, iptr->dst, REG_NULL);
1578 x86_64_emit_ialuconst(cd, X86_64_OR, src, iptr);
1581 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1583 d = reg_of_var(rd, iptr->dst, REG_NULL);
1584 x86_64_emit_lalu(cd, X86_64_OR, src, iptr);
1587 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1588 /* val.l = constant */
1590 d = reg_of_var(rd, iptr->dst, REG_NULL);
1591 x86_64_emit_laluconst(cd, X86_64_OR, src, iptr);
1594 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1596 d = reg_of_var(rd, iptr->dst, REG_NULL);
1597 x86_64_emit_ialu(cd, X86_64_XOR, src, iptr);
1600 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1601 /* val.i = constant */
1603 d = reg_of_var(rd, iptr->dst, REG_NULL);
1604 x86_64_emit_ialuconst(cd, X86_64_XOR, src, iptr);
1607 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1609 d = reg_of_var(rd, iptr->dst, REG_NULL);
1610 x86_64_emit_lalu(cd, X86_64_XOR, src, iptr);
1613 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1614 /* val.l = constant */
1616 d = reg_of_var(rd, iptr->dst, REG_NULL);
1617 x86_64_emit_laluconst(cd, X86_64_XOR, src, iptr);
1621 case ICMD_IINC: /* ..., value ==> ..., value + constant */
1622 /* op1 = variable, val.i = constant */
1624 /* using inc and dec is definitely faster than add -- tested */
1627 var = &(rd->locals[iptr->op1][TYPE_INT]);
1629 if (var->flags & INMEMORY) {
1630 if (iptr->val.i == 1) {
1631 x86_64_incl_membase(cd, REG_SP, d * 8);
1633 } else if (iptr->val.i == -1) {
1634 x86_64_decl_membase(cd, REG_SP, d * 8);
1637 x86_64_alul_imm_membase(cd, X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1641 if (iptr->val.i == 1) {
1642 x86_64_incl_reg(cd, d);
1644 } else if (iptr->val.i == -1) {
1645 x86_64_decl_reg(cd, d);
1648 x86_64_alul_imm_reg(cd, X86_64_ADD, iptr->val.i, d);
1654 /* floating operations ************************************************/
1656 case ICMD_FNEG: /* ..., value ==> ..., - value */
1658 var_to_reg_flt(s1, src, REG_FTMP1);
1659 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1660 disp = dseg_adds4(cd, 0x80000000);
1662 x86_64_movss_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + disp, REG_FTMP2);
1663 x86_64_xorps_reg_reg(cd, REG_FTMP2, d);
1664 store_reg_to_var_flt(iptr->dst, d);
1667 case ICMD_DNEG: /* ..., value ==> ..., - value */
1669 var_to_reg_flt(s1, src, REG_FTMP1);
1670 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1671 disp = dseg_adds8(cd, 0x8000000000000000);
1673 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + disp, REG_FTMP2);
1674 x86_64_xorpd_reg_reg(cd, REG_FTMP2, d);
1675 store_reg_to_var_flt(iptr->dst, d);
1678 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1680 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1681 var_to_reg_flt(s2, src, REG_FTMP2);
1682 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1684 x86_64_addss_reg_reg(cd, s2, d);
1685 } else if (s2 == d) {
1686 x86_64_addss_reg_reg(cd, s1, d);
1689 x86_64_addss_reg_reg(cd, s2, d);
1691 store_reg_to_var_flt(iptr->dst, d);
1694 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1696 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1697 var_to_reg_flt(s2, src, REG_FTMP2);
1698 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1700 x86_64_addsd_reg_reg(cd, s2, d);
1701 } else if (s2 == d) {
1702 x86_64_addsd_reg_reg(cd, s1, d);
1705 x86_64_addsd_reg_reg(cd, s2, d);
1707 store_reg_to_var_flt(iptr->dst, d);
1710 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1712 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1713 var_to_reg_flt(s2, src, REG_FTMP2);
1714 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1716 M_FLTMOVE(s2, REG_FTMP2);
1720 x86_64_subss_reg_reg(cd, s2, d);
1721 store_reg_to_var_flt(iptr->dst, d);
1724 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1726 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1727 var_to_reg_flt(s2, src, REG_FTMP2);
1728 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1730 M_FLTMOVE(s2, REG_FTMP2);
1734 x86_64_subsd_reg_reg(cd, s2, d);
1735 store_reg_to_var_flt(iptr->dst, d);
1738 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1740 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1741 var_to_reg_flt(s2, src, REG_FTMP2);
1742 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1744 x86_64_mulss_reg_reg(cd, s2, d);
1745 } else if (s2 == d) {
1746 x86_64_mulss_reg_reg(cd, s1, d);
1749 x86_64_mulss_reg_reg(cd, s2, d);
1751 store_reg_to_var_flt(iptr->dst, d);
1754 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1756 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1757 var_to_reg_flt(s2, src, REG_FTMP2);
1758 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1760 x86_64_mulsd_reg_reg(cd, s2, d);
1761 } else if (s2 == d) {
1762 x86_64_mulsd_reg_reg(cd, s1, d);
1765 x86_64_mulsd_reg_reg(cd, s2, d);
1767 store_reg_to_var_flt(iptr->dst, d);
1770 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1772 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1773 var_to_reg_flt(s2, src, REG_FTMP2);
1774 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1776 M_FLTMOVE(s2, REG_FTMP2);
1780 x86_64_divss_reg_reg(cd, s2, d);
1781 store_reg_to_var_flt(iptr->dst, d);
1784 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1786 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1787 var_to_reg_flt(s2, src, REG_FTMP2);
1788 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1790 M_FLTMOVE(s2, REG_FTMP2);
1794 x86_64_divsd_reg_reg(cd, s2, d);
1795 store_reg_to_var_flt(iptr->dst, d);
1798 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1800 var_to_reg_int(s1, src, REG_ITMP1);
1801 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1802 x86_64_cvtsi2ss_reg_reg(cd, s1, d);
1803 store_reg_to_var_flt(iptr->dst, d);
1806 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1808 var_to_reg_int(s1, src, REG_ITMP1);
1809 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1810 x86_64_cvtsi2sd_reg_reg(cd, s1, d);
1811 store_reg_to_var_flt(iptr->dst, d);
1814 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1816 var_to_reg_int(s1, src, REG_ITMP1);
1817 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1818 x86_64_cvtsi2ssq_reg_reg(cd, s1, d);
1819 store_reg_to_var_flt(iptr->dst, d);
1822 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1824 var_to_reg_int(s1, src, REG_ITMP1);
1825 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1826 x86_64_cvtsi2sdq_reg_reg(cd, s1, d);
1827 store_reg_to_var_flt(iptr->dst, d);
1830 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1832 var_to_reg_flt(s1, src, REG_FTMP1);
1833 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1834 x86_64_cvttss2si_reg_reg(cd, s1, d);
1835 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1836 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1837 x86_64_jcc(cd, X86_64_CC_NE, a);
1838 M_FLTMOVE(s1, REG_FTMP1);
1839 x86_64_mov_imm_reg(cd, (ptrint) asm_builtin_f2i, REG_ITMP2);
1840 x86_64_call_reg(cd, REG_ITMP2);
1841 M_INTMOVE(REG_RESULT, d);
1842 store_reg_to_var_int(iptr->dst, d);
1845 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1847 var_to_reg_flt(s1, src, REG_FTMP1);
1848 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1849 x86_64_cvttsd2si_reg_reg(cd, s1, d);
1850 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1851 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1852 x86_64_jcc(cd, X86_64_CC_NE, a);
1853 M_FLTMOVE(s1, REG_FTMP1);
1854 x86_64_mov_imm_reg(cd, (ptrint) asm_builtin_d2i, REG_ITMP2);
1855 x86_64_call_reg(cd, REG_ITMP2);
1856 M_INTMOVE(REG_RESULT, d);
1857 store_reg_to_var_int(iptr->dst, d);
1860 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1862 var_to_reg_flt(s1, src, REG_FTMP1);
1863 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1864 x86_64_cvttss2siq_reg_reg(cd, s1, d);
1865 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1866 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1867 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1868 x86_64_jcc(cd, X86_64_CC_NE, a);
1869 M_FLTMOVE(s1, REG_FTMP1);
1870 x86_64_mov_imm_reg(cd, (ptrint) asm_builtin_f2l, REG_ITMP2);
1871 x86_64_call_reg(cd, REG_ITMP2);
1872 M_INTMOVE(REG_RESULT, d);
1873 store_reg_to_var_int(iptr->dst, d);
1876 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1878 var_to_reg_flt(s1, src, REG_FTMP1);
1879 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1880 x86_64_cvttsd2siq_reg_reg(cd, s1, d);
1881 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1882 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1883 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1884 x86_64_jcc(cd, X86_64_CC_NE, a);
1885 M_FLTMOVE(s1, REG_FTMP1);
1886 x86_64_mov_imm_reg(cd, (ptrint) asm_builtin_d2l, REG_ITMP2);
1887 x86_64_call_reg(cd, REG_ITMP2);
1888 M_INTMOVE(REG_RESULT, d);
1889 store_reg_to_var_int(iptr->dst, d);
1892 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1894 var_to_reg_flt(s1, src, REG_FTMP1);
1895 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1896 x86_64_cvtss2sd_reg_reg(cd, s1, d);
1897 store_reg_to_var_flt(iptr->dst, d);
1900 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1902 var_to_reg_flt(s1, src, REG_FTMP1);
1903 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1904 x86_64_cvtsd2ss_reg_reg(cd, s1, d);
1905 store_reg_to_var_flt(iptr->dst, d);
1908 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1909 /* == => 0, < => 1, > => -1 */
1911 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1912 var_to_reg_flt(s2, src, REG_FTMP2);
1913 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1914 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1915 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1916 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1917 x86_64_ucomiss_reg_reg(cd, s1, s2);
1918 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1919 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1920 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1921 store_reg_to_var_int(iptr->dst, d);
1924 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1925 /* == => 0, < => 1, > => -1 */
1927 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1928 var_to_reg_flt(s2, src, REG_FTMP2);
1929 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1930 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1931 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1932 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1933 x86_64_ucomiss_reg_reg(cd, s1, s2);
1934 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1935 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1936 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1937 store_reg_to_var_int(iptr->dst, d);
1940 case ICMD_DCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1941 /* == => 0, < => 1, > => -1 */
1943 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1944 var_to_reg_flt(s2, src, REG_FTMP2);
1945 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1946 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1947 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1948 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1949 x86_64_ucomisd_reg_reg(cd, s1, s2);
1950 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1951 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1952 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1953 store_reg_to_var_int(iptr->dst, d);
1956 case ICMD_DCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1957 /* == => 0, < => 1, > => -1 */
1959 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1960 var_to_reg_flt(s2, src, REG_FTMP2);
1961 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1962 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1963 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1964 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1965 x86_64_ucomisd_reg_reg(cd, s1, s2);
1966 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1967 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1968 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1969 store_reg_to_var_int(iptr->dst, d);
1973 /* memory operations **************************************************/
1975 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., (int) length */
1977 var_to_reg_int(s1, src, REG_ITMP1);
1978 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1979 gen_nullptr_check(s1);
1980 x86_64_movl_membase_reg(cd, s1, OFFSET(java_arrayheader, size), d);
1981 store_reg_to_var_int(iptr->dst, d);
1984 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
1986 var_to_reg_int(s1, src->prev, REG_ITMP1);
1987 var_to_reg_int(s2, src, REG_ITMP2);
1988 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1989 if (iptr->op1 == 0) {
1990 gen_nullptr_check(s1);
1993 x86_64_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
1994 store_reg_to_var_int(iptr->dst, d);
1997 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
1999 var_to_reg_int(s1, src->prev, REG_ITMP1);
2000 var_to_reg_int(s2, src, REG_ITMP2);
2001 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2002 if (iptr->op1 == 0) {
2003 gen_nullptr_check(s1);
2006 x86_64_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]), s1, s2, 3, d);
2007 store_reg_to_var_int(iptr->dst, d);
2010 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
2012 var_to_reg_int(s1, src->prev, REG_ITMP1);
2013 var_to_reg_int(s2, src, REG_ITMP2);
2014 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2015 if (iptr->op1 == 0) {
2016 gen_nullptr_check(s1);
2019 x86_64_movl_memindex_reg(cd, OFFSET(java_intarray, data[0]), s1, s2, 2, d);
2020 store_reg_to_var_int(iptr->dst, d);
2023 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2025 var_to_reg_int(s1, src->prev, REG_ITMP1);
2026 var_to_reg_int(s2, src, REG_ITMP2);
2027 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2028 if (iptr->op1 == 0) {
2029 gen_nullptr_check(s1);
2032 x86_64_movss_memindex_reg(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2033 store_reg_to_var_flt(iptr->dst, d);
2036 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2038 var_to_reg_int(s1, src->prev, REG_ITMP1);
2039 var_to_reg_int(s2, src, REG_ITMP2);
2040 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2041 if (iptr->op1 == 0) {
2042 gen_nullptr_check(s1);
2045 x86_64_movsd_memindex_reg(cd, OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2046 store_reg_to_var_flt(iptr->dst, d);
2049 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
2051 var_to_reg_int(s1, src->prev, REG_ITMP1);
2052 var_to_reg_int(s2, src, REG_ITMP2);
2053 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2054 if (iptr->op1 == 0) {
2055 gen_nullptr_check(s1);
2058 x86_64_movzwq_memindex_reg(cd, OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2059 store_reg_to_var_int(iptr->dst, d);
2062 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
2064 var_to_reg_int(s1, src->prev, REG_ITMP1);
2065 var_to_reg_int(s2, src, REG_ITMP2);
2066 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2067 if (iptr->op1 == 0) {
2068 gen_nullptr_check(s1);
2071 x86_64_movswq_memindex_reg(cd, OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2072 store_reg_to_var_int(iptr->dst, d);
2075 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
2077 var_to_reg_int(s1, src->prev, REG_ITMP1);
2078 var_to_reg_int(s2, src, REG_ITMP2);
2079 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2080 if (iptr->op1 == 0) {
2081 gen_nullptr_check(s1);
2084 x86_64_movsbq_memindex_reg(cd, OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2085 store_reg_to_var_int(iptr->dst, d);
2089 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2091 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2092 var_to_reg_int(s2, src->prev, REG_ITMP2);
2093 if (iptr->op1 == 0) {
2094 gen_nullptr_check(s1);
2097 var_to_reg_int(s3, src, REG_ITMP3);
2098 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2101 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2103 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2104 var_to_reg_int(s2, src->prev, REG_ITMP2);
2105 if (iptr->op1 == 0) {
2106 gen_nullptr_check(s1);
2109 var_to_reg_int(s3, src, REG_ITMP3);
2110 x86_64_movl_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2113 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2115 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2116 var_to_reg_int(s2, src->prev, REG_ITMP2);
2117 if (iptr->op1 == 0) {
2118 gen_nullptr_check(s1);
2121 var_to_reg_flt(s3, src, REG_FTMP3);
2122 x86_64_movss_reg_memindex(cd, s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2125 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2127 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2128 var_to_reg_int(s2, src->prev, REG_ITMP2);
2129 if (iptr->op1 == 0) {
2130 gen_nullptr_check(s1);
2133 var_to_reg_flt(s3, src, REG_FTMP3);
2134 x86_64_movsd_reg_memindex(cd, s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2137 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2139 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2140 var_to_reg_int(s2, src->prev, REG_ITMP2);
2141 if (iptr->op1 == 0) {
2142 gen_nullptr_check(s1);
2145 var_to_reg_int(s3, src, REG_ITMP3);
2146 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2149 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2151 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2152 var_to_reg_int(s2, src->prev, REG_ITMP2);
2153 if (iptr->op1 == 0) {
2154 gen_nullptr_check(s1);
2157 var_to_reg_int(s3, src, REG_ITMP3);
2158 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2161 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2163 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2164 var_to_reg_int(s2, src->prev, REG_ITMP2);
2165 if (iptr->op1 == 0) {
2166 gen_nullptr_check(s1);
2169 var_to_reg_int(s3, src, REG_ITMP3);
2170 x86_64_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2173 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2175 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2176 var_to_reg_int(s2, src->prev, REG_ITMP2);
2177 if (iptr->op1 == 0) {
2178 gen_nullptr_check(s1);
2181 var_to_reg_int(s3, src, REG_ITMP3);
2183 M_MOV(s1, rd->argintregs[0]);
2184 M_MOV(s3, rd->argintregs[1]);
2185 M_MOV_IMM((ptrint) BUILTIN_canstore, REG_ITMP1);
2189 codegen_addxstorerefs(cd, cd->mcodeptr);
2191 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2192 var_to_reg_int(s2, src->prev, REG_ITMP2);
2193 var_to_reg_int(s3, src, REG_ITMP3);
2194 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2198 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2200 var_to_reg_int(s1, src->prev, REG_ITMP1);
2201 var_to_reg_int(s2, src, REG_ITMP2);
2202 if (iptr->op1 == 0) {
2203 gen_nullptr_check(s1);
2206 x86_64_movb_imm_memindex(cd, iptr->val.i, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2209 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2211 var_to_reg_int(s1, src->prev, REG_ITMP1);
2212 var_to_reg_int(s2, src, REG_ITMP2);
2213 if (iptr->op1 == 0) {
2214 gen_nullptr_check(s1);
2217 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_chararray, data[0]), s1, s2, 1);
2220 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2222 var_to_reg_int(s1, src->prev, REG_ITMP1);
2223 var_to_reg_int(s2, src, REG_ITMP2);
2224 if (iptr->op1 == 0) {
2225 gen_nullptr_check(s1);
2228 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2231 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2233 var_to_reg_int(s1, src->prev, REG_ITMP1);
2234 var_to_reg_int(s2, src, REG_ITMP2);
2235 if (iptr->op1 == 0) {
2236 gen_nullptr_check(s1);
2239 x86_64_movl_imm_memindex(cd, iptr->val.i, OFFSET(java_intarray, data[0]), s1, s2, 2);
2242 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2244 var_to_reg_int(s1, src->prev, REG_ITMP1);
2245 var_to_reg_int(s2, src, REG_ITMP2);
2246 if (iptr->op1 == 0) {
2247 gen_nullptr_check(s1);
2251 if (IS_IMM32(iptr->val.l)) {
2252 x86_64_mov_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2255 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2256 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l >> 32), OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2260 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2262 var_to_reg_int(s1, src->prev, REG_ITMP1);
2263 var_to_reg_int(s2, src, REG_ITMP2);
2264 if (iptr->op1 == 0) {
2265 gen_nullptr_check(s1);
2268 x86_64_mov_imm_memindex(cd, 0, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2272 case ICMD_GETSTATIC: /* ... ==> ..., value */
2273 /* op1 = type, val.a = field address */
2276 disp = dseg_addaddress(cd, NULL);
2278 codegen_addpatchref(cd, cd->mcodeptr,
2279 PATCHER_get_putstatic,
2280 (unresolved_field *) iptr->target, disp);
2282 if (opt_showdisassemble) {
2283 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2287 fieldinfo *fi = iptr->val.a;
2289 disp = dseg_addaddress(cd, &(fi->value));
2291 if (!(fi->class->state & CLASS_INITIALIZED)) {
2292 codegen_addpatchref(cd, cd->mcodeptr,
2293 PATCHER_clinit, fi->class, 0);
2295 if (opt_showdisassemble) {
2296 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2301 /* This approach is much faster than moving the field address */
2302 /* inline into a register. */
2303 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, REG_ITMP2);
2304 switch (iptr->op1) {
2306 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2307 x86_64_movl_membase_reg(cd, REG_ITMP2, 0, d);
2308 store_reg_to_var_int(iptr->dst, d);
2312 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2313 x86_64_mov_membase_reg(cd, REG_ITMP2, 0, d);
2314 store_reg_to_var_int(iptr->dst, d);
2317 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2318 x86_64_movss_membase_reg(cd, REG_ITMP2, 0, d);
2319 store_reg_to_var_flt(iptr->dst, d);
2322 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2323 x86_64_movsd_membase_reg(cd, REG_ITMP2, 0, d);
2324 store_reg_to_var_flt(iptr->dst, d);
2329 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2330 /* op1 = type, val.a = field address */
2333 disp = dseg_addaddress(cd, NULL);
2335 codegen_addpatchref(cd, cd->mcodeptr,
2336 PATCHER_get_putstatic,
2337 (unresolved_field *) iptr->target, disp);
2339 if (opt_showdisassemble) {
2340 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2344 fieldinfo *fi = iptr->val.a;
2346 disp = dseg_addaddress(cd, &(fi->value));
2348 if (!(fi->class->state & CLASS_INITIALIZED)) {
2349 codegen_addpatchref(cd, cd->mcodeptr,
2350 PATCHER_clinit, fi->class, 0);
2352 if (opt_showdisassemble) {
2353 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2358 /* This approach is much faster than moving the field address */
2359 /* inline into a register. */
2360 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, REG_ITMP2);
2361 switch (iptr->op1) {
2363 var_to_reg_int(s2, src, REG_ITMP1);
2364 x86_64_movl_reg_membase(cd, s2, REG_ITMP2, 0);
2368 var_to_reg_int(s2, src, REG_ITMP1);
2369 x86_64_mov_reg_membase(cd, s2, REG_ITMP2, 0);
2372 var_to_reg_flt(s2, src, REG_FTMP1);
2373 x86_64_movss_reg_membase(cd, s2, REG_ITMP2, 0);
2376 var_to_reg_flt(s2, src, REG_FTMP1);
2377 x86_64_movsd_reg_membase(cd, s2, REG_ITMP2, 0);
2382 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2383 /* val = value (in current instruction) */
2384 /* op1 = type, val.a = field address (in */
2385 /* following NOP) */
2387 if (!iptr[1].val.a) {
2388 disp = dseg_addaddress(cd, NULL);
2390 codegen_addpatchref(cd, cd->mcodeptr,
2391 PATCHER_get_putstatic,
2392 (unresolved_field *) iptr[1].target, disp);
2394 if (opt_showdisassemble) {
2395 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2399 fieldinfo *fi = iptr[1].val.a;
2401 disp = dseg_addaddress(cd, &(fi->value));
2403 if (!(fi->class->state & CLASS_INITIALIZED)) {
2404 codegen_addpatchref(cd, cd->mcodeptr,
2405 PATCHER_clinit, fi->class, 0);
2407 if (opt_showdisassemble) {
2408 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2413 /* This approach is much faster than moving the field address */
2414 /* inline into a register. */
2415 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, REG_ITMP1);
2416 switch (iptr->op1) {
2419 x86_64_movl_imm_membase(cd, iptr->val.i, REG_ITMP1, 0);
2424 if (IS_IMM32(iptr->val.l)) {
2425 x86_64_mov_imm_membase(cd, iptr->val.l, REG_ITMP1, 0);
2427 x86_64_movl_imm_membase(cd, iptr->val.l, REG_ITMP1, 0);
2428 x86_64_movl_imm_membase(cd, iptr->val.l >> 32, REG_ITMP1, 4);
2434 case ICMD_GETFIELD: /* ... ==> ..., value */
2435 /* op1 = type, val.i = field offset */
2437 var_to_reg_int(s1, src, REG_ITMP1);
2438 gen_nullptr_check(s1);
2441 codegen_addpatchref(cd, cd->mcodeptr,
2442 PATCHER_get_putfield,
2443 (unresolved_field *) iptr->target, 0);
2445 if (opt_showdisassemble) {
2446 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2452 a = ((fieldinfo *) (iptr->val.a))->offset;
2455 switch (iptr->op1) {
2457 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2458 x86_64_movl_membase32_reg(cd, s1, a, d);
2459 store_reg_to_var_int(iptr->dst, d);
2463 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2464 x86_64_mov_membase32_reg(cd, s1, a, d);
2465 store_reg_to_var_int(iptr->dst, d);
2468 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2469 x86_64_movss_membase32_reg(cd, s1, a, d);
2470 store_reg_to_var_flt(iptr->dst, d);
2473 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2474 x86_64_movsd_membase32_reg(cd, s1, a, d);
2475 store_reg_to_var_flt(iptr->dst, d);
2480 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2481 /* op1 = type, val.i = field offset */
2483 var_to_reg_int(s1, src->prev, REG_ITMP1);
2484 gen_nullptr_check(s1);
2485 if (IS_INT_LNG_TYPE(iptr->op1)) {
2486 var_to_reg_int(s2, src, REG_ITMP2);
2488 var_to_reg_flt(s2, src, REG_FTMP2);
2492 codegen_addpatchref(cd, cd->mcodeptr,
2493 PATCHER_get_putfield,
2494 (unresolved_field *) iptr->target, 0);
2496 if (opt_showdisassemble) {
2497 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2503 a = ((fieldinfo *) (iptr->val.a))->offset;
2506 switch (iptr->op1) {
2508 x86_64_movl_reg_membase32(cd, s2, s1, a);
2512 x86_64_mov_reg_membase32(cd, s2, s1, a);
2515 x86_64_movss_reg_membase32(cd, s2, s1, a);
2518 x86_64_movsd_reg_membase32(cd, s2, s1, a);
2523 case ICMD_PUTFIELDCONST: /* ..., objectref, value ==> ... */
2524 /* val = value (in current instruction) */
2525 /* op1 = type, val.a = field address (in */
2526 /* following NOP) */
2528 var_to_reg_int(s1, src, REG_ITMP1);
2529 gen_nullptr_check(s1);
2531 if (!iptr[1].val.a) {
2532 codegen_addpatchref(cd, cd->mcodeptr,
2533 PATCHER_putfieldconst,
2534 (unresolved_field *) iptr[1].target, 0);
2536 if (opt_showdisassemble) {
2537 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2543 a = ((fieldinfo *) (iptr[1].val.a))->offset;
2546 switch (iptr->op1) {
2549 x86_64_movl_imm_membase32(cd, iptr->val.i, s1, a);
2554 /* We can only optimize the move, if the class is resolved. */
2555 /* Otherwise we don't know what to patch. */
2556 if (iptr[1].val.a && IS_IMM32(iptr->val.l)) {
2557 x86_64_mov_imm_membase32(cd, iptr->val.l, s1, a);
2559 x86_64_movl_imm_membase32(cd, iptr->val.l, s1, a);
2560 x86_64_movl_imm_membase32(cd, iptr->val.l >> 32, s1, a + 4);
2567 /* branch operations **************************************************/
2569 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2571 var_to_reg_int(s1, src, REG_ITMP1);
2572 M_INTMOVE(s1, REG_ITMP1_XPTR);
2574 #ifdef ENABLE_VERIFIER
2576 codegen_addpatchref(cd, cd->mcodeptr,
2577 PATCHER_athrow_areturn,
2578 (unresolved_class *) iptr->val.a, 0);
2580 if (opt_showdisassemble) {
2581 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2584 #endif /* ENABLE_VERIFIER */
2586 M_CALL_IMM(0); /* passing exception pc */
2587 M_POP(REG_ITMP2_XPC);
2589 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
2593 case ICMD_GOTO: /* ... ==> ... */
2594 /* op1 = target JavaVM pc */
2597 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2600 case ICMD_JSR: /* ... ==> ... */
2601 /* op1 = target JavaVM pc */
2604 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2607 case ICMD_RET: /* ... ==> ... */
2608 /* op1 = local variable */
2610 var = &(rd->locals[iptr->op1][TYPE_ADR]);
2611 var_to_reg_int(s1, var, REG_ITMP1);
2615 case ICMD_IFNULL: /* ..., value ==> ... */
2616 /* op1 = target JavaVM pc */
2618 if (src->flags & INMEMORY) {
2619 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2622 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2624 x86_64_jcc(cd, X86_64_CC_E, 0);
2625 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2628 case ICMD_IFNONNULL: /* ..., value ==> ... */
2629 /* op1 = target JavaVM pc */
2631 if (src->flags & INMEMORY) {
2632 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2635 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2637 x86_64_jcc(cd, X86_64_CC_NE, 0);
2638 codegen_addreference(cd, (basicblock *) iptr->target, cd->mcodeptr);
2641 case ICMD_IFEQ: /* ..., value ==> ... */
2642 /* op1 = target JavaVM pc, val.i = constant */
2644 x86_64_emit_ifcc(cd, X86_64_CC_E, src, iptr);
2647 case ICMD_IFLT: /* ..., value ==> ... */
2648 /* op1 = target JavaVM pc, val.i = constant */
2650 x86_64_emit_ifcc(cd, X86_64_CC_L, src, iptr);
2653 case ICMD_IFLE: /* ..., value ==> ... */
2654 /* op1 = target JavaVM pc, val.i = constant */
2656 x86_64_emit_ifcc(cd, X86_64_CC_LE, src, iptr);
2659 case ICMD_IFNE: /* ..., value ==> ... */
2660 /* op1 = target JavaVM pc, val.i = constant */
2662 x86_64_emit_ifcc(cd, X86_64_CC_NE, src, iptr);
2665 case ICMD_IFGT: /* ..., value ==> ... */
2666 /* op1 = target JavaVM pc, val.i = constant */
2668 x86_64_emit_ifcc(cd, X86_64_CC_G, src, iptr);
2671 case ICMD_IFGE: /* ..., value ==> ... */
2672 /* op1 = target JavaVM pc, val.i = constant */
2674 x86_64_emit_ifcc(cd, X86_64_CC_GE, src, iptr);
2677 case ICMD_IF_LEQ: /* ..., value ==> ... */
2678 /* op1 = target JavaVM pc, val.l = constant */
2680 x86_64_emit_if_lcc(cd, X86_64_CC_E, src, iptr);
2683 case ICMD_IF_LLT: /* ..., value ==> ... */
2684 /* op1 = target JavaVM pc, val.l = constant */
2686 x86_64_emit_if_lcc(cd, X86_64_CC_L, src, iptr);
2689 case ICMD_IF_LLE: /* ..., value ==> ... */
2690 /* op1 = target JavaVM pc, val.l = constant */
2692 x86_64_emit_if_lcc(cd, X86_64_CC_LE, src, iptr);
2695 case ICMD_IF_LNE: /* ..., value ==> ... */
2696 /* op1 = target JavaVM pc, val.l = constant */
2698 x86_64_emit_if_lcc(cd, X86_64_CC_NE, src, iptr);
2701 case ICMD_IF_LGT: /* ..., value ==> ... */
2702 /* op1 = target JavaVM pc, val.l = constant */
2704 x86_64_emit_if_lcc(cd, X86_64_CC_G, src, iptr);
2707 case ICMD_IF_LGE: /* ..., value ==> ... */
2708 /* op1 = target JavaVM pc, val.l = constant */
2710 x86_64_emit_if_lcc(cd, X86_64_CC_GE, src, iptr);
2713 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2714 /* op1 = target JavaVM pc */
2716 x86_64_emit_if_icmpcc(cd, X86_64_CC_E, src, iptr);
2719 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2720 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2722 x86_64_emit_if_lcmpcc(cd, X86_64_CC_E, src, iptr);
2725 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2726 /* op1 = target JavaVM pc */
2728 x86_64_emit_if_icmpcc(cd, X86_64_CC_NE, src, iptr);
2731 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2732 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2734 x86_64_emit_if_lcmpcc(cd, X86_64_CC_NE, src, iptr);
2737 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2738 /* op1 = target JavaVM pc */
2740 x86_64_emit_if_icmpcc(cd, X86_64_CC_L, src, iptr);
2743 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2744 /* op1 = target JavaVM pc */
2746 x86_64_emit_if_lcmpcc(cd, X86_64_CC_L, src, iptr);
2749 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2750 /* op1 = target JavaVM pc */
2752 x86_64_emit_if_icmpcc(cd, X86_64_CC_G, src, iptr);
2755 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2756 /* op1 = target JavaVM pc */
2758 x86_64_emit_if_lcmpcc(cd, X86_64_CC_G, src, iptr);
2761 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2762 /* op1 = target JavaVM pc */
2764 x86_64_emit_if_icmpcc(cd, X86_64_CC_LE, src, iptr);
2767 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2768 /* op1 = target JavaVM pc */
2770 x86_64_emit_if_lcmpcc(cd, X86_64_CC_LE, src, iptr);
2773 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2774 /* op1 = target JavaVM pc */
2776 x86_64_emit_if_icmpcc(cd, X86_64_CC_GE, src, iptr);
2779 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2780 /* op1 = target JavaVM pc */
2782 x86_64_emit_if_lcmpcc(cd, X86_64_CC_GE, src, iptr);
2785 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST */
2787 case ICMD_ELSE_ICONST: /* handled by IFxx_ICONST */
2790 case ICMD_IFEQ_ICONST: /* ..., value ==> ..., constant */
2791 /* val.i = constant */
2793 var_to_reg_int(s1, src, REG_ITMP1);
2794 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2795 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2797 M_INTMOVE(s1, REG_ITMP1);
2800 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2802 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2803 x86_64_testl_reg_reg(cd, s1, s1);
2804 x86_64_cmovccl_reg_reg(cd, X86_64_CC_E, REG_ITMP2, d);
2805 store_reg_to_var_int(iptr->dst, d);
2808 case ICMD_IFNE_ICONST: /* ..., value ==> ..., constant */
2809 /* val.i = constant */
2811 var_to_reg_int(s1, src, REG_ITMP1);
2812 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2813 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2815 M_INTMOVE(s1, REG_ITMP1);
2818 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2820 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2821 x86_64_testl_reg_reg(cd, s1, s1);
2822 x86_64_cmovccl_reg_reg(cd, X86_64_CC_NE, REG_ITMP2, d);
2823 store_reg_to_var_int(iptr->dst, d);
2826 case ICMD_IFLT_ICONST: /* ..., value ==> ..., constant */
2827 /* val.i = constant */
2829 var_to_reg_int(s1, src, REG_ITMP1);
2830 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2831 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2833 M_INTMOVE(s1, REG_ITMP1);
2836 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2838 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2839 x86_64_testl_reg_reg(cd, s1, s1);
2840 x86_64_cmovccl_reg_reg(cd, X86_64_CC_L, REG_ITMP2, d);
2841 store_reg_to_var_int(iptr->dst, d);
2844 case ICMD_IFGE_ICONST: /* ..., value ==> ..., constant */
2845 /* val.i = constant */
2847 var_to_reg_int(s1, src, REG_ITMP1);
2848 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2849 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2851 M_INTMOVE(s1, REG_ITMP1);
2854 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2856 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2857 x86_64_testl_reg_reg(cd, s1, s1);
2858 x86_64_cmovccl_reg_reg(cd, X86_64_CC_GE, REG_ITMP2, d);
2859 store_reg_to_var_int(iptr->dst, d);
2862 case ICMD_IFGT_ICONST: /* ..., value ==> ..., constant */
2863 /* val.i = constant */
2865 var_to_reg_int(s1, src, REG_ITMP1);
2866 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2867 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2869 M_INTMOVE(s1, REG_ITMP1);
2872 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2874 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2875 x86_64_testl_reg_reg(cd, s1, s1);
2876 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP2, d);
2877 store_reg_to_var_int(iptr->dst, d);
2880 case ICMD_IFLE_ICONST: /* ..., value ==> ..., constant */
2881 /* val.i = constant */
2883 var_to_reg_int(s1, src, REG_ITMP1);
2884 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2885 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2887 M_INTMOVE(s1, REG_ITMP1);
2890 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2892 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2893 x86_64_testl_reg_reg(cd, s1, s1);
2894 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, d);
2895 store_reg_to_var_int(iptr->dst, d);
2899 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2902 var_to_reg_int(s1, src, REG_RESULT);
2903 M_INTMOVE(s1, REG_RESULT);
2904 goto nowperformreturn;
2906 case ICMD_ARETURN: /* ..., retvalue ==> ... */
2908 var_to_reg_int(s1, src, REG_RESULT);
2909 M_INTMOVE(s1, REG_RESULT);
2911 #ifdef ENABLE_VERIFIER
2913 codegen_addpatchref(cd, cd->mcodeptr,
2914 PATCHER_athrow_areturn,
2915 (unresolved_class *) iptr->val.a, 0);
2917 if (opt_showdisassemble) {
2918 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2921 #endif /* ENABLE_VERIFIER */
2922 goto nowperformreturn;
2924 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2927 var_to_reg_flt(s1, src, REG_FRESULT);
2928 M_FLTMOVE(s1, REG_FRESULT);
2929 goto nowperformreturn;
2931 case ICMD_RETURN: /* ... ==> ... */
2937 p = parentargs_base;
2939 /* call trace function */
2941 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
2943 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
2944 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
2946 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
2947 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
2948 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
2949 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
2951 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
2952 x86_64_call_reg(cd, REG_ITMP1);
2954 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
2955 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
2957 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
2960 #if defined(USE_THREADS)
2961 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2962 M_ALD(rd->argintregs[0], REG_SP, rd->memuse * 8);
2964 /* we need to save the proper return value */
2965 switch (iptr->opc) {
2969 M_LST(REG_RESULT, REG_SP, rd->memuse * 8);
2973 M_DST(REG_FRESULT, REG_SP, rd->memuse * 8);
2977 M_MOV_IMM((ptrint) builtin_monitorexit, REG_ITMP1);
2980 /* and now restore the proper return value */
2981 switch (iptr->opc) {
2985 M_LLD(REG_RESULT, REG_SP, rd->memuse * 8);
2989 M_DLD(REG_FRESULT, REG_SP, rd->memuse * 8);
2995 /* restore saved registers */
2997 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
2998 p--; M_LLD(rd->savintregs[i], REG_SP, p * 8);
3000 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
3001 p--; M_DLD(rd->savfltregs[i], REG_SP, p * 8);
3004 /* deallocate stack */
3006 if (parentargs_base)
3007 M_AADD_IMM(parentargs_base * 8, REG_SP);
3014 case ICMD_TABLESWITCH: /* ..., index ==> ... */
3019 tptr = (void **) iptr->target;
3021 s4ptr = iptr->val.a;
3022 l = s4ptr[1]; /* low */
3023 i = s4ptr[2]; /* high */
3025 var_to_reg_int(s1, src, REG_ITMP1);
3026 M_INTMOVE(s1, REG_ITMP1);
3028 x86_64_alul_imm_reg(cd, X86_64_SUB, l, REG_ITMP1);
3033 x86_64_alul_imm_reg(cd, X86_64_CMP, i - 1, REG_ITMP1);
3034 x86_64_jcc(cd, X86_64_CC_A, 0);
3036 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[0]), cd->mcodeptr); */
3037 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3039 /* build jump table top down and use address of lowest entry */
3041 /* s4ptr += 3 + i; */
3045 dseg_addtarget(cd, (basicblock *) tptr[0]);
3049 /* length of dataseg after last dseg_addtarget is used by load */
3051 x86_64_mov_imm_reg(cd, 0, REG_ITMP2);
3052 dseg_adddata(cd, cd->mcodeptr);
3053 x86_64_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
3054 x86_64_jmp_reg(cd, REG_ITMP1);
3059 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
3061 s4 i, l, val, *s4ptr;
3064 tptr = (void **) iptr->target;
3066 s4ptr = iptr->val.a;
3067 l = s4ptr[0]; /* default */
3068 i = s4ptr[1]; /* count */
3070 MCODECHECK(8 + ((7 + 6) * i) + 5);
3071 var_to_reg_int(s1, src, REG_ITMP1); /* reg compare should always be faster */
3077 x86_64_alul_imm_reg(cd, X86_64_CMP, val, s1);
3078 x86_64_jcc(cd, X86_64_CC_E, 0);
3079 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3082 x86_64_jmp_imm(cd, 0);
3084 tptr = (void **) iptr->target;
3085 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3090 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
3091 /* op1 = arg count val.a = builtintable entry */
3097 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
3098 /* op1 = arg count, val.a = method pointer */
3100 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3101 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
3102 case ICMD_INVOKEINTERFACE:
3107 unresolved_method *um = iptr->target;
3108 md = um->methodref->parseddesc.md;
3110 md = lm->parseddesc;
3114 s3 = md->paramcount;
3116 MCODECHECK((20 * s3) + 128);
3118 /* copy arguments to registers or stack location */
3120 for (s3 = s3 - 1; s3 >= 0; s3--, src = src->prev) {
3121 if (src->varkind == ARGVAR)
3123 if (IS_INT_LNG_TYPE(src->type)) {
3124 if (!md->params[s3].inmemory) {
3125 s1 = rd->argintregs[md->params[s3].regoff];
3126 var_to_reg_int(d, src, s1);
3129 var_to_reg_int(d, src, REG_ITMP1);
3130 M_LST(d, REG_SP, md->params[s3].regoff * 8);
3134 if (!md->params[s3].inmemory) {
3135 s1 = rd->argfltregs[md->params[s3].regoff];
3136 var_to_reg_flt(d, src, s1);
3139 var_to_reg_flt(d, src, REG_FTMP1);
3140 M_DST(d, REG_SP, md->params[s3].regoff * 8);
3145 switch (iptr->opc) {
3147 a = (ptrint) bte->fp;
3148 d = md->returntype.type;
3150 M_MOV_IMM(a, REG_ITMP1);
3153 /* if op1 == true, we need to check for an exception */
3155 if (iptr->op1 == true) {
3158 codegen_addxexceptionrefs(cd, cd->mcodeptr);
3162 case ICMD_INVOKESPECIAL:
3163 M_TEST(rd->argintregs[0]);
3165 codegen_addxnullrefs(cd, cd->mcodeptr);
3167 /* first argument contains pointer */
3168 /* gen_nullptr_check(rd->argintregs[0]); */
3170 /* access memory for hardware nullptr */
3171 /* x86_64_mov_membase_reg(cd, rd->argintregs[0], 0, REG_ITMP2); */
3175 case ICMD_INVOKESTATIC:
3177 unresolved_method *um = iptr->target;
3179 codegen_addpatchref(cd, cd->mcodeptr,
3180 PATCHER_invokestatic_special, um, 0);
3182 if (opt_showdisassemble) {
3183 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3187 d = um->methodref->parseddesc.md->returntype.type;
3190 a = (ptrint) lm->stubroutine;
3191 d = lm->parseddesc->returntype.type;
3194 M_MOV_IMM(a, REG_ITMP2);
3198 case ICMD_INVOKEVIRTUAL:
3199 gen_nullptr_check(rd->argintregs[0]);
3202 unresolved_method *um = iptr->target;
3204 codegen_addpatchref(cd, cd->mcodeptr,
3205 PATCHER_invokevirtual, um, 0);
3207 if (opt_showdisassemble) {
3208 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3212 d = um->methodref->parseddesc.md->returntype.type;
3215 s1 = OFFSET(vftbl_t, table[0]) +
3216 sizeof(methodptr) * lm->vftblindex;
3217 d = lm->parseddesc->returntype.type;
3220 x86_64_mov_membase_reg(cd, rd->argintregs[0],
3221 OFFSET(java_objectheader, vftbl),
3223 x86_64_mov_membase32_reg(cd, REG_ITMP2, s1, REG_ITMP1);
3227 case ICMD_INVOKEINTERFACE:
3228 gen_nullptr_check(rd->argintregs[0]);
3231 unresolved_method *um = iptr->target;
3233 codegen_addpatchref(cd, cd->mcodeptr,
3234 PATCHER_invokeinterface, um, 0);
3236 if (opt_showdisassemble) {
3237 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3242 d = um->methodref->parseddesc.md->returntype.type;
3245 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3246 sizeof(methodptr) * lm->class->index;
3248 s2 = sizeof(methodptr) * (lm - lm->class->methods);
3250 d = lm->parseddesc->returntype.type;
3253 M_ALD(REG_ITMP2, rd->argintregs[0],
3254 OFFSET(java_objectheader, vftbl));
3255 x86_64_mov_membase32_reg(cd, REG_ITMP2, s1, REG_ITMP2);
3256 x86_64_mov_membase32_reg(cd, REG_ITMP2, s2, REG_ITMP1);
3261 /* d contains return type */
3263 if (d != TYPE_VOID) {
3264 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3265 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3266 M_INTMOVE(REG_RESULT, s1);
3267 store_reg_to_var_int(iptr->dst, s1);
3269 s1 = reg_of_var(rd, iptr->dst, REG_FRESULT);
3270 M_FLTMOVE(REG_FRESULT, s1);
3271 store_reg_to_var_flt(iptr->dst, s1);
3277 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3279 /* op1: 0 == array, 1 == class */
3280 /* val.a: (classinfo *) superclass */
3282 /* superclass is an interface:
3284 * OK if ((sub == NULL) ||
3285 * (sub->vftbl->interfacetablelength > super->index) &&
3286 * (sub->vftbl->interfacetable[-super->index] != NULL));
3288 * superclass is a class:
3290 * OK if ((sub == NULL) || (0
3291 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3292 * super->vftbl->diffval));
3295 if (iptr->op1 == 1) {
3296 /* object type cast-check */
3299 vftbl_t *supervftbl;
3302 super = (classinfo *) iptr->val.a;
3309 superindex = super->index;
3310 supervftbl = super->vftbl;
3313 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3314 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3316 var_to_reg_int(s1, src, REG_ITMP1);
3318 /* calculate interface checkcast code size */
3320 s2 = 3; /* mov_membase_reg */
3321 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3323 s2 += 3 + 4 /* movl_membase32_reg */ + 3 + 4 /* sub imm32 */ +
3324 3 /* test */ + 6 /* jcc */ + 3 + 4 /* mov_membase32_reg */ +
3325 3 /* test */ + 6 /* jcc */;
3328 s2 += (opt_showdisassemble ? 5 : 0);
3330 /* calculate class checkcast code size */
3332 s3 = 3; /* mov_membase_reg */
3333 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3334 s3 += 10 /* mov_imm_reg */ + 3 + 4 /* movl_membase32_reg */;
3337 if (s1 != REG_ITMP1) {
3338 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3339 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, baseval));
3340 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3341 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, diffval));
3347 s3 += 3 + 4 /* movl_membase32_reg */ + 3 /* sub */ +
3348 10 /* mov_imm_reg */ + 3 /* movl_membase_reg */;
3349 CALCOFFSETBYTES(s3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3352 s3 += 3 /* cmp */ + 6 /* jcc */;
3355 s3 += (opt_showdisassemble ? 5 : 0);
3357 /* if class is not resolved, check which code to call */
3361 M_BEQ(6 + (opt_showdisassemble ? 5 : 0) + 7 + 6 + s2 + 5 + s3);
3363 codegen_addpatchref(cd, cd->mcodeptr,
3364 PATCHER_checkcast_instanceof_flags,
3365 (constant_classref *) iptr->target, 0);
3367 if (opt_showdisassemble) {
3368 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3371 M_IMOV_IMM(0, REG_ITMP2); /* super->flags */
3372 M_IAND_IMM(ACC_INTERFACE, REG_ITMP2);
3376 /* interface checkcast code */
3378 if (!super || (super->flags & ACC_INTERFACE)) {
3384 M_ALD(REG_ITMP2, s1, OFFSET(java_objectheader, vftbl));
3387 codegen_addpatchref(cd, cd->mcodeptr,
3388 PATCHER_checkcast_instanceof_interface,
3389 (constant_classref *) iptr->target, 0);
3391 if (opt_showdisassemble) {
3392 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3396 x86_64_movl_membase32_reg(cd, REG_ITMP2,
3397 OFFSET(vftbl_t, interfacetablelength),
3399 /* XXX TWISTI: should this be int arithmetic? */
3400 M_LSUB_IMM32(superindex, REG_ITMP3);
3403 codegen_addxcastrefs(cd, cd->mcodeptr);
3404 x86_64_mov_membase32_reg(cd, REG_ITMP2,
3405 OFFSET(vftbl_t, interfacetable[0]) -
3406 superindex * sizeof(methodptr*),
3410 codegen_addxcastrefs(cd, cd->mcodeptr);
3416 /* class checkcast code */
3418 if (!super || !(super->flags & ACC_INTERFACE)) {
3424 M_ALD(REG_ITMP2, s1, OFFSET(java_objectheader, vftbl));
3427 codegen_addpatchref(cd, cd->mcodeptr,
3428 PATCHER_checkcast_class,
3429 (constant_classref *) iptr->target,
3432 if (opt_showdisassemble) {
3433 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3437 M_MOV_IMM((ptrint) supervftbl, REG_ITMP3);
3438 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3439 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3441 x86_64_movl_membase32_reg(cd, REG_ITMP2,
3442 OFFSET(vftbl_t, baseval),
3444 /* if (s1 != REG_ITMP1) { */
3445 /* x86_64_movl_membase_reg(cd, REG_ITMP3, */
3446 /* OFFSET(vftbl_t, baseval), */
3448 /* x86_64_movl_membase_reg(cd, REG_ITMP3, */
3449 /* OFFSET(vftbl_t, diffval), */
3451 /* #if defined(USE_THREADS) && defined(NATIVE_THREADS) */
3452 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3454 /* x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP1, REG_ITMP2); */
3457 x86_64_movl_membase32_reg(cd, REG_ITMP3,
3458 OFFSET(vftbl_t, baseval),
3460 M_LSUB(REG_ITMP3, REG_ITMP2);
3461 M_MOV_IMM((ptrint) supervftbl, REG_ITMP3);
3462 M_ILD(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3464 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3465 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3467 M_CMP(REG_ITMP3, REG_ITMP2);
3468 M_BA(0); /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3469 codegen_addxcastrefs(cd, cd->mcodeptr);
3471 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3474 /* array type cast-check */
3476 var_to_reg_int(s1, src, REG_ITMP1);
3477 M_INTMOVE(s1, rd->argintregs[0]);
3479 if (iptr->val.a == NULL) {
3480 codegen_addpatchref(cd, cd->mcodeptr,
3481 PATCHER_builtin_arraycheckcast,
3482 (constant_classref *) iptr->target, 0);
3484 if (opt_showdisassemble) {
3485 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3489 M_MOV_IMM((ptrint) iptr->val.a, rd->argintregs[1]);
3490 M_MOV_IMM((ptrint) BUILTIN_arraycheckcast, REG_ITMP1);
3494 codegen_addxcastrefs(cd, cd->mcodeptr);
3496 var_to_reg_int(s1, src, REG_ITMP1);
3497 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
3500 store_reg_to_var_int(iptr->dst, d);
3503 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3505 /* op1: 0 == array, 1 == class */
3506 /* val.a: (classinfo *) superclass */
3508 /* superclass is an interface:
3510 * return (sub != NULL) &&
3511 * (sub->vftbl->interfacetablelength > super->index) &&
3512 * (sub->vftbl->interfacetable[-super->index] != NULL);
3514 * superclass is a class:
3516 * return ((sub != NULL) && (0
3517 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3518 * super->vftbl->diffvall));
3523 vftbl_t *supervftbl;
3526 super = (classinfo *) iptr->val.a;
3533 superindex = super->index;
3534 supervftbl = super->vftbl;
3537 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3538 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3541 var_to_reg_int(s1, src, REG_ITMP1);
3542 d = reg_of_var(rd, iptr->dst, REG_ITMP2);
3544 M_INTMOVE(s1, REG_ITMP1);
3548 /* calculate interface instanceof code size */
3550 s2 = 3; /* mov_membase_reg */
3551 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3552 s2 += 3 + 4 /* movl_membase32_reg */ + 3 + 4 /* sub_imm32 */ +
3553 3 /* test */ + 6 /* jcc */ + 3 + 4 /* mov_membase32_reg */ +
3554 3 /* test */ + 4 /* setcc */;
3557 s2 += (opt_showdisassemble ? 5 : 0);
3559 /* calculate class instanceof code size */
3561 s3 = 3; /* mov_membase_reg */
3562 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3563 s3 += 10; /* mov_imm_reg */
3564 s3 += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3565 CALCOFFSETBYTES(s3, REG_ITMP1, OFFSET(vftbl_t, baseval));
3566 s3 += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3567 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, baseval));
3568 s3 += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3569 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3570 s3 += 3 /* sub */ + 3 /* xor */ + 3 /* cmp */ + 4 /* setcc */;
3573 s3 += (opt_showdisassemble ? 5 : 0);
3575 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3577 /* if class is not resolved, check which code to call */
3580 x86_64_test_reg_reg(cd, s1, s1);
3581 x86_64_jcc(cd, X86_64_CC_Z, (6 + (opt_showdisassemble ? 5 : 0) +
3582 7 + 6 + s2 + 5 + s3));
3584 codegen_addpatchref(cd, cd->mcodeptr,
3585 PATCHER_checkcast_instanceof_flags,
3586 (constant_classref *) iptr->target, 0);
3588 if (opt_showdisassemble) {
3589 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3592 x86_64_movl_imm_reg(cd, 0, REG_ITMP3); /* super->flags */
3593 x86_64_alul_imm_reg(cd, X86_64_AND, ACC_INTERFACE, REG_ITMP3);
3594 x86_64_jcc(cd, X86_64_CC_Z, s2 + 5);
3597 /* interface instanceof code */
3599 if (!super || (super->flags & ACC_INTERFACE)) {
3601 x86_64_test_reg_reg(cd, s1, s1);
3602 x86_64_jcc(cd, X86_64_CC_Z, s2);
3605 x86_64_mov_membase_reg(cd, s1,
3606 OFFSET(java_objectheader, vftbl),
3609 codegen_addpatchref(cd, cd->mcodeptr,
3610 PATCHER_checkcast_instanceof_interface,
3611 (constant_classref *) iptr->target, 0);
3613 if (opt_showdisassemble) {
3614 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3618 x86_64_movl_membase32_reg(cd, REG_ITMP1,
3619 OFFSET(vftbl_t, interfacetablelength),
3621 x86_64_alu_imm32_reg(cd, X86_64_SUB, superindex, REG_ITMP3);
3622 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3624 a = 3 + 4 /* mov_membase32_reg */ + 3 /* test */ + 4 /* setcc */;
3626 x86_64_jcc(cd, X86_64_CC_LE, a);
3627 x86_64_mov_membase32_reg(cd, REG_ITMP1,
3628 OFFSET(vftbl_t, interfacetable[0]) -
3629 superindex * sizeof(methodptr*),
3631 x86_64_test_reg_reg(cd, REG_ITMP1, REG_ITMP1);
3632 x86_64_setcc_reg(cd, X86_64_CC_NE, d);
3635 x86_64_jmp_imm(cd, s3);
3638 /* class instanceof code */
3640 if (!super || !(super->flags & ACC_INTERFACE)) {
3642 x86_64_test_reg_reg(cd, s1, s1);
3643 x86_64_jcc(cd, X86_64_CC_E, s3);
3646 x86_64_mov_membase_reg(cd, s1,
3647 OFFSET(java_objectheader, vftbl),
3651 codegen_addpatchref(cd, cd->mcodeptr,
3652 PATCHER_instanceof_class,
3653 (constant_classref *) iptr->target, 0);
3655 if (opt_showdisassemble) {
3656 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3660 x86_64_mov_imm_reg(cd, (ptrint) supervftbl, REG_ITMP2);
3661 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3662 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3664 x86_64_movl_membase_reg(cd, REG_ITMP1,
3665 OFFSET(vftbl_t, baseval),
3667 x86_64_movl_membase_reg(cd, REG_ITMP2,
3668 OFFSET(vftbl_t, diffval),
3670 x86_64_movl_membase_reg(cd, REG_ITMP2,
3671 OFFSET(vftbl_t, baseval),
3673 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3674 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3676 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
3677 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d); /* may be REG_ITMP2 */
3678 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP3, REG_ITMP1);
3679 x86_64_setcc_reg(cd, X86_64_CC_BE, d);
3681 store_reg_to_var_int(iptr->dst, d);
3685 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3686 /* op1 = dimension, val.a = class */
3688 /* check for negative sizes and copy sizes to stack if necessary */
3690 MCODECHECK((10 * 4 * iptr->op1) + 5 + 10 * 8);
3692 for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3693 /* copy SAVEDVAR sizes to stack */
3695 if (src->varkind != ARGVAR) {
3696 var_to_reg_int(s2, src, REG_ITMP1);
3697 M_LST(s2, REG_SP, s1 * 8);
3701 /* is a patcher function set? */
3703 if (iptr->val.a == NULL) {
3704 codegen_addpatchref(cd, cd->mcodeptr,
3705 PATCHER_builtin_multianewarray,
3706 (constant_classref *) iptr->target, 0);
3708 if (opt_showdisassemble) {
3709 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3715 a = (ptrint) iptr->val.a;
3718 /* a0 = dimension count */
3720 M_MOV_IMM(iptr->op1, rd->argintregs[0]);
3722 /* a1 = arrayvftbl */
3724 M_MOV_IMM((ptrint) iptr->val.a, rd->argintregs[1]);
3726 /* a2 = pointer to dimensions = stack pointer */
3728 M_MOV(REG_SP, rd->argintregs[2]);
3730 M_MOV_IMM((ptrint) BUILTIN_multianewarray, REG_ITMP1);
3733 /* check for exception before result assignment */
3737 codegen_addxexceptionrefs(cd, cd->mcodeptr);
3739 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3740 M_INTMOVE(REG_RESULT, s1);
3741 store_reg_to_var_int(iptr->dst, s1);
3745 *exceptionptr = new_internalerror("Unknown ICMD %d", iptr->opc);
3749 } /* for instruction */
3751 /* copy values to interface registers */
3753 src = bptr->outstack;
3754 len = bptr->outdepth;
3756 #if defined(ENABLE_LSRA)
3761 if ((src->varkind != STACKVAR)) {
3763 if (IS_FLT_DBL_TYPE(s2)) {
3764 var_to_reg_flt(s1, src, REG_FTMP1);
3765 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3766 M_FLTMOVE(s1, rd->interfaces[len][s2].regoff);
3769 x86_64_movq_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3773 var_to_reg_int(s1, src, REG_ITMP1);
3774 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3775 M_INTMOVE(s1, rd->interfaces[len][s2].regoff);
3778 x86_64_mov_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3785 /* At the end of a basic block we may have to append some nops,
3786 because the patcher stub calling code might be longer than the
3787 actual instruction. So codepatching does not change the
3788 following block unintentionally. */
3790 if (cd->mcodeptr < cd->lastmcodeptr) {
3791 while (cd->mcodeptr < cd->lastmcodeptr) {
3796 } /* if (bptr -> flags >= BBREACHED) */
3797 } /* for basic block */
3799 dseg_createlinenumbertable(cd);
3806 /* generate ArithmeticException stubs */
3810 for (bref = cd->xdivrefs; bref != NULL; bref = bref->next) {
3811 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3813 cd->mcodeptr - cd->mcodebase);
3817 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
3818 dseg_adddata(cd, cd->mcodeptr);
3819 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
3821 if (xcodeptr != NULL) {
3822 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
3825 xcodeptr = cd->mcodeptr;
3827 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
3828 M_MOV(REG_SP, rd->argintregs[1]);
3829 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
3830 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
3832 M_ASUB_IMM(2 * 8, REG_SP);
3833 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
3835 M_MOV_IMM((ptrint) stacktrace_inline_arithmeticexception,
3839 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
3840 M_AADD_IMM(2 * 8, REG_SP);
3842 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
3847 /* generate ArrayIndexOutOfBoundsException stubs */
3851 for (bref = cd->xboundrefs; bref != NULL; bref = bref->next) {
3852 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3854 cd->mcodeptr - cd->mcodebase);
3858 /* move index register into REG_ITMP1 */
3860 M_MOV(bref->reg, REG_ITMP1); /* 3 bytes */
3862 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
3863 dseg_adddata(cd, cd->mcodeptr);
3864 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
3866 if (xcodeptr != NULL) {
3867 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
3870 xcodeptr = cd->mcodeptr;
3872 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
3873 M_MOV(REG_SP, rd->argintregs[1]);
3874 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
3875 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
3876 M_MOV(REG_ITMP1, rd->argintregs[4]);
3878 M_ASUB_IMM(2 * 8, REG_SP);
3879 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
3881 M_MOV_IMM((ptrint) stacktrace_inline_arrayindexoutofboundsexception,
3885 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
3886 M_AADD_IMM(2 * 8, REG_SP);
3888 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
3893 /* generate ArrayStoreException stubs */
3897 for (bref = cd->xstorerefs; bref != NULL; bref = bref->next) {
3898 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3900 cd->mcodeptr - cd->mcodebase);
3904 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
3905 dseg_adddata(cd, cd->mcodeptr);
3906 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
3908 if (xcodeptr != NULL) {
3909 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
3912 xcodeptr = cd->mcodeptr;
3914 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
3915 M_MOV(REG_SP, rd->argintregs[1]);
3916 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
3917 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
3919 M_ASUB_IMM(2 * 8, REG_SP);
3920 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
3922 M_MOV_IMM((ptrint) stacktrace_inline_arraystoreexception,
3926 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
3927 M_AADD_IMM(2 * 8, REG_SP);
3929 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
3934 /* generate ClassCastException stubs */
3938 for (bref = cd->xcastrefs; bref != NULL; bref = bref->next) {
3939 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3941 cd->mcodeptr - cd->mcodebase);
3945 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
3946 dseg_adddata(cd, cd->mcodeptr);
3947 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
3949 if (xcodeptr != NULL) {
3950 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
3953 xcodeptr = cd->mcodeptr;
3955 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
3956 M_MOV(REG_SP, rd->argintregs[1]);
3957 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
3958 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
3960 M_ASUB_IMM(2 * 8, REG_SP);
3961 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
3963 M_MOV_IMM((ptrint) stacktrace_inline_classcastexception, REG_ITMP3);
3966 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
3967 M_AADD_IMM(2 * 8, REG_SP);
3969 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
3974 /* generate NullpointerException stubs */
3978 for (bref = cd->xnullrefs; bref != NULL; bref = bref->next) {
3979 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3981 cd->mcodeptr - cd->mcodebase);
3985 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
3986 dseg_adddata(cd, cd->mcodeptr);
3987 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
3989 if (xcodeptr != NULL) {
3990 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
3993 xcodeptr = cd->mcodeptr;
3995 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
3996 M_MOV(REG_SP, rd->argintregs[1]);
3997 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
3998 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
4000 M_ASUB_IMM(2 * 8, REG_SP);
4001 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
4003 M_MOV_IMM((ptrint) stacktrace_inline_nullpointerexception,
4007 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
4008 M_AADD_IMM(2 * 8, REG_SP);
4010 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
4015 /* generate exception check stubs */
4019 for (bref = cd->xexceptionrefs; bref != NULL; bref = bref->next) {
4020 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4022 cd->mcodeptr - cd->mcodebase);
4026 M_MOV_IMM(0, REG_ITMP2_XPC); /* 10 bytes */
4027 dseg_adddata(cd, cd->mcodeptr);
4028 M_AADD_IMM32(bref->branchpos - 6, REG_ITMP2_XPC); /* 7 bytes */
4030 if (xcodeptr != NULL) {
4031 M_JMP_IMM(xcodeptr - cd->mcodeptr - 5);
4034 xcodeptr = cd->mcodeptr;
4036 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[0]);
4037 M_MOV(REG_SP, rd->argintregs[1]);
4038 M_ALD(rd->argintregs[2], REG_SP, parentargs_base * 8);
4039 M_MOV(REG_ITMP2_XPC, rd->argintregs[3]);
4041 M_ASUB_IMM(2 * 8, REG_SP);
4042 M_AST(REG_ITMP2_XPC, REG_SP, 0 * 8);
4044 M_MOV_IMM((ptrint) stacktrace_inline_fillInStackTrace, REG_ITMP3);
4047 M_ALD(REG_ITMP2_XPC, REG_SP, 0 * 8);
4048 M_AADD_IMM(2 * 8, REG_SP);
4050 M_MOV_IMM((ptrint) asm_handle_exception, REG_ITMP3);
4055 /* generate code patching stub call code */
4062 tmpcd = DNEW(codegendata);
4064 for (pref = cd->patchrefs; pref != NULL; pref = pref->next) {
4065 /* check size of code segment */
4069 /* Get machine code which is patched back in later. A */
4070 /* `call rel32' is 5 bytes long (but read 8 bytes). */
4072 xcodeptr = cd->mcodebase + pref->branchpos;
4073 mcode = *((ptrint *) xcodeptr);
4075 /* patch in `call rel32' to call the following code */
4077 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
4078 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4080 /* move pointer to java_objectheader onto stack */
4082 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4083 /* create a virtual java_objectheader */
4085 (void) dseg_addaddress(cd, get_dummyLR()); /* monitorPtr */
4086 a = dseg_addaddress(cd, NULL); /* vftbl */
4088 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP3);
4094 /* move machine code bytes and classinfo pointer into registers */
4096 M_MOV_IMM((ptrint) mcode, REG_ITMP3);
4098 M_MOV_IMM((ptrint) pref->ref, REG_ITMP3);
4100 M_MOV_IMM((ptrint) pref->disp, REG_ITMP3);
4103 M_MOV_IMM((ptrint) pref->patcher, REG_ITMP3);
4106 M_MOV_IMM((ptrint) asm_wrapper_patcher, REG_ITMP3);
4112 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
4114 /* everything's ok */
4120 /* createcompilerstub **********************************************************
4122 Creates a stub routine which calls the compiler.
4124 *******************************************************************************/
4126 #define COMPILERSTUB_SIZE 23
4128 u1 *createcompilerstub(methodinfo *m)
4130 u1 *s; /* memory to hold the stub */
4134 s = CNEW(u1, COMPILERSTUB_SIZE);
4136 /* mark start of dump memory area */
4138 dumpsize = dump_size();
4140 cd = DNEW(codegendata);
4143 /* code for the stub */
4145 M_MOV_IMM((ptrint) m, REG_ITMP1); /* pass method to compiler */
4146 M_MOV_IMM((ptrint) asm_call_jit_compiler, REG_ITMP3);
4149 #if defined(ENABLE_STATISTICS)
4151 count_cstub_len += COMPILERSTUB_SIZE;
4154 /* release dump area */
4156 dump_release(dumpsize);
4162 /* createnativestub ************************************************************
4164 Creates a stub routine which calls a native method.
4166 *******************************************************************************/
4168 u1 *createnativestub(functionptr f, methodinfo *m, codegendata *cd,
4169 registerdata *rd, methoddesc *nmd)
4172 s4 stackframesize; /* size of stackframe if needed */
4174 s4 i, j; /* count variables */
4178 /* initialize variables */
4181 nativeparams = (m->flags & ACC_STATIC) ? 2 : 1;
4184 /* calculate stack frame size */
4187 sizeof(stackframeinfo) / SIZEOF_VOID_P +
4188 sizeof(localref_table) / SIZEOF_VOID_P +
4189 INT_ARG_CNT + FLT_ARG_CNT + 1 + /* + 1 for function address */
4192 if (!(stackframesize & 0x1)) /* keep stack 16-byte aligned */
4196 /* create method header */
4198 (void) dseg_addaddress(cd, m); /* MethodPointer */
4199 (void) dseg_adds4(cd, stackframesize * 8); /* FrameSize */
4200 (void) dseg_adds4(cd, 0); /* IsSync */
4201 (void) dseg_adds4(cd, 0); /* IsLeaf */
4202 (void) dseg_adds4(cd, 0); /* IntSave */
4203 (void) dseg_adds4(cd, 0); /* FltSave */
4204 (void) dseg_addlinenumbertablesize(cd);
4205 (void) dseg_adds4(cd, 0); /* ExTableSize */
4208 /* initialize mcode variables */
4210 cd->mcodeptr = (u1 *) cd->mcodebase;
4211 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
4214 /* generate stub code */
4216 M_ASUB_IMM(stackframesize * 8, REG_SP);
4219 /* save integer and float argument registers */
4221 for (i = 0, j = 0; i < md->paramcount && j < INT_ARG_CNT; i++)
4222 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4223 M_LST(rd->argintregs[j++], REG_SP, (1 + i) * 8);
4225 for (i = 0, j = 0; i < md->paramcount && j < FLT_ARG_CNT; i++)
4226 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4227 M_DST(rd->argfltregs[j++], REG_SP, (1 + INT_ARG_CNT + i) * 8);
4229 /* show integer hex code for float arguments */
4231 for (i = 0, j = 0; i < md->paramcount && j < INT_ARG_CNT; i++) {
4232 /* if the paramtype is a float, we have to right shift all
4233 following integer registers */
4235 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type)) {
4236 for (s1 = INT_ARG_CNT - 2; s1 >= i; s1--)
4237 M_MOV(rd->argintregs[s1], rd->argintregs[s1 + 1]);
4239 x86_64_movd_freg_reg(cd, rd->argfltregs[j], rd->argintregs[i]);
4244 M_MOV_IMM((ptrint) m, REG_ITMP1);
4245 M_AST(REG_ITMP1, REG_SP, 0 * 8);
4246 M_MOV_IMM((ptrint) builtin_trace_args, REG_ITMP1);
4249 /* restore integer and float argument registers */
4251 for (i = 0, j = 0; i < md->paramcount && j < INT_ARG_CNT; i++)
4252 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4253 M_LLD(rd->argintregs[j++], REG_SP, (1 + i) * 8);
4255 for (i = 0, j = 0; i < md->paramcount && j < FLT_ARG_CNT; i++)
4256 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4257 M_DLD(rd->argfltregs[j++], REG_SP, (1 + INT_ARG_CNT + i) * 8);
4261 /* get function address (this must happen before the stackframeinfo) */
4263 #if !defined(ENABLE_STATICVM)
4265 codegen_addpatchref(cd, cd->mcodeptr, PATCHER_resolve_native, m, 0);
4267 if (opt_showdisassemble) {
4268 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
4273 M_MOV_IMM((ptrint) f, REG_ITMP3);
4276 /* save integer and float argument registers */
4278 for (i = 0, j = 0; i < md->paramcount && j < INT_ARG_CNT; i++)
4279 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4280 M_LST(rd->argintregs[j++], REG_SP, i * 8);
4282 for (i = 0, j = 0; i < md->paramcount && j < FLT_ARG_CNT; i++)
4283 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4284 M_DST(rd->argfltregs[j++], REG_SP, (INT_ARG_CNT + i) * 8);
4286 M_AST(REG_ITMP3, REG_SP, (INT_ARG_CNT + FLT_ARG_CNT) * 8);
4288 /* create dynamic stack info */
4290 M_ALEA(REG_SP, stackframesize * 8, rd->argintregs[0]);
4291 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase), rd->argintregs[1]);
4292 M_ALEA(REG_SP, stackframesize * 8 + SIZEOF_VOID_P, rd->argintregs[2]);
4293 M_ALD(rd->argintregs[3], REG_SP, stackframesize * 8);
4294 M_MOV_IMM((ptrint) codegen_start_native_call, REG_ITMP1);
4297 #if defined(ENABLE_STATISTICS)
4299 M_MOV_IMM((ptrint) nativeinvokation, REG_ITMP1);
4304 /* restore integer and float argument registers */
4306 for (i = 0, j = 0; i < md->paramcount && j < INT_ARG_CNT; i++)
4307 if (IS_INT_LNG_TYPE(md->paramtypes[i].type))
4308 M_LLD(rd->argintregs[j++], REG_SP, i * 8);
4310 for (i = 0, j = 0; i < md->paramcount && j < FLT_ARG_CNT; i++)
4311 if (IS_FLT_DBL_TYPE(md->paramtypes[i].type))
4312 M_DLD(rd->argfltregs[j++], REG_SP, (INT_ARG_CNT + i) * 8);
4314 M_ALD(REG_ITMP3, REG_SP, (INT_ARG_CNT + FLT_ARG_CNT) * 8);
4317 /* copy or spill arguments to new locations */
4319 for (i = md->paramcount - 1, j = i + nativeparams; i >= 0; i--, j--) {
4320 t = md->paramtypes[i].type;
4322 if (IS_INT_LNG_TYPE(t)) {
4323 if (!md->params[i].inmemory) {
4324 s1 = rd->argintregs[md->params[i].regoff];
4326 if (!nmd->params[j].inmemory) {
4327 s2 = rd->argintregs[nmd->params[j].regoff];
4331 s2 = nmd->params[j].regoff;
4332 M_LST(s1, REG_SP, s2 * 8);
4336 s1 = md->params[i].regoff + stackframesize + 1; /* + 1 (RA) */
4337 s2 = nmd->params[j].regoff;
4338 M_LLD(REG_ITMP1, REG_SP, s1 * 8);
4339 M_LST(REG_ITMP1, REG_SP, s2 * 8);
4343 /* We only copy spilled float arguments, as the float argument */
4344 /* registers keep unchanged. */
4346 if (md->params[i].inmemory) {
4347 s1 = md->params[i].regoff + stackframesize + 1; /* + 1 (RA) */
4348 s2 = nmd->params[j].regoff;
4349 M_DLD(REG_FTMP1, REG_SP, s1 * 8);
4350 M_DST(REG_FTMP1, REG_SP, s2 * 8);
4355 /* put class into second argument register */
4357 if (m->flags & ACC_STATIC)
4358 M_MOV_IMM((ptrint) m->class, rd->argintregs[1]);
4360 /* put env into first argument register */
4362 M_MOV_IMM((ptrint) &env, rd->argintregs[0]);
4364 /* do the native function call */
4368 /* save return value */
4370 if (md->returntype.type != TYPE_VOID) {
4371 if (IS_INT_LNG_TYPE(md->returntype.type))
4372 M_LST(REG_RESULT, REG_SP, 0 * 8);
4374 M_DST(REG_FRESULT, REG_SP, 0 * 8);
4377 /* remove native stackframe info */
4379 M_ALEA(REG_SP, stackframesize * 8, rd->argintregs[0]);
4380 M_MOV_IMM((ptrint) codegen_finish_native_call, REG_ITMP1);
4383 /* generate call trace */
4386 /* just restore the value we need, don't care about the other */
4388 if (md->returntype.type != TYPE_VOID) {
4389 if (IS_INT_LNG_TYPE(md->returntype.type))
4390 M_LLD(REG_RESULT, REG_SP, 0 * 8);
4392 M_DLD(REG_FRESULT, REG_SP, 0 * 8);
4395 M_MOV_IMM((ptrint) m, rd->argintregs[0]);
4396 M_MOV(REG_RESULT, rd->argintregs[1]);
4397 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
4398 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
4400 M_MOV_IMM((ptrint) builtin_displaymethodstop, REG_ITMP1);
4404 /* check for exception */
4406 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4407 M_MOV_IMM((ptrint) builtin_get_exceptionptrptr, REG_ITMP3);
4410 M_MOV_IMM((ptrint) &_no_threads_exceptionptr, REG_RESULT);
4412 M_ALD(REG_ITMP2, REG_RESULT, 0);
4414 /* restore return value */
4416 if (md->returntype.type != TYPE_VOID) {
4417 if (IS_INT_LNG_TYPE(md->returntype.type))
4418 M_LLD(REG_RESULT, REG_SP, 0 * 8);
4420 M_DLD(REG_FRESULT, REG_SP, 0 * 8);
4423 /* test for exception */
4428 /* remove stackframe */
4430 M_AADD_IMM(stackframesize * 8, REG_SP);
4434 /* handle exception */
4436 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4437 M_LST(REG_ITMP2, REG_SP, 0 * 8);
4438 M_MOV_IMM((ptrint) builtin_get_exceptionptrptr, REG_ITMP3);
4440 M_AST_IMM32(0, REG_RESULT, 0); /* clear exception pointer */
4441 M_LLD(REG_ITMP1_XPTR, REG_SP, 0 * 8);
4443 M_MOV(REG_ITMP3, REG_ITMP1_XPTR);
4444 M_MOV_IMM((ptrint) &_no_threads_exceptionptr, REG_ITMP3);
4445 M_AST_IMM32(0, REG_ITMP3, 0); /* clear exception pointer */
4448 /* remove stackframe */
4450 M_AADD_IMM(stackframesize * 8, REG_SP);
4452 M_LLD(REG_ITMP2_XPC, REG_SP, 0 * 8); /* get return address from stack */
4453 M_ASUB_IMM(3, REG_ITMP2_XPC); /* callq */
4455 M_MOV_IMM((ptrint) asm_handle_nat_exception, REG_ITMP3);
4459 /* process patcher calls **************************************************/
4466 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4470 tmpcd = DNEW(codegendata);
4472 for (pref = cd->patchrefs; pref != NULL; pref = pref->next) {
4473 /* Get machine code which is patched back in later. A */
4474 /* `call rel32' is 5 bytes long (but read 8 bytes). */
4476 xcodeptr = cd->mcodebase + pref->branchpos;
4477 mcode = *((ptrint *) xcodeptr);
4479 /* patch in `call rel32' to call the following code */
4481 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
4482 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4484 /* move pointer to java_objectheader onto stack */
4486 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4487 /* create a virtual java_objectheader */
4489 (void) dseg_addaddress(cd, get_dummyLR()); /* monitorPtr */
4490 disp = dseg_addaddress(cd, NULL); /* vftbl */
4492 x86_64_lea_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + disp, REG_ITMP3);
4498 /* move machine code bytes and classinfo pointer into registers */
4500 M_MOV_IMM((ptrint) mcode, REG_ITMP3);
4502 M_MOV_IMM((ptrint) pref->ref, REG_ITMP3);
4504 M_MOV_IMM((ptrint) pref->disp, REG_ITMP3);
4507 M_MOV_IMM((ptrint) pref->patcher, REG_ITMP3);
4510 M_MOV_IMM((ptrint) asm_wrapper_patcher, REG_ITMP3);
4515 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
4517 return m->entrypoint;
4522 * These are local overrides for various environment variables in Emacs.
4523 * Please do not remove this and leave it at the end of the file, where
4524 * Emacs will automagically detect them.
4525 * ---------------------------------------------------------------------
4528 * indent-tabs-mode: t