1 /* src/vm/jit/i386/codegen.c - machine code generator for i386
3 Copyright (C) 1996-2005, 2006, 2007 R. Grafl, A. Krall, C. Kruegel,
4 C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
5 E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
6 J. Wenninger, Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
36 #include "vm/jit/i386/md-abi.h"
38 #include "vm/jit/i386/codegen.h"
39 #include "vm/jit/i386/emit.h"
41 #include "mm/memory.h"
42 #include "native/jni.h"
43 #include "native/localref.h"
44 #include "native/native.h"
46 #include "threads/lock-common.h"
48 #include "vm/builtin.h"
49 #include "vm/exceptions.h"
50 #include "vm/global.h"
51 #include "vm/stringlocal.h"
54 #include "vm/jit/abi.h"
55 #include "vm/jit/asmpart.h"
56 #include "vm/jit/codegen-common.h"
57 #include "vm/jit/dseg.h"
58 #include "vm/jit/emit-common.h"
59 #include "vm/jit/jit.h"
60 #include "vm/jit/parse.h"
61 #include "vm/jit/patcher.h"
62 #include "vm/jit/reg.h"
63 #include "vm/jit/replace.h"
64 #include "vm/jit/stacktrace.h"
66 #if defined(ENABLE_SSA)
67 # include "vm/jit/optimizing/lsra.h"
68 # include "vm/jit/optimizing/ssa.h"
69 #elif defined(ENABLE_LSRA)
70 # include "vm/jit/allocator/lsra.h"
73 #include "vmcore/loader.h"
74 #include "vmcore/options.h"
75 #include "vmcore/utf8.h"
78 /* codegen_emit ****************************************************************
80 Generates machine code.
82 *******************************************************************************/
84 bool codegen_emit(jitdata *jd)
90 s4 len, s1, s2, s3, d, disp;
96 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
97 builtintable_entry *bte;
100 unresolved_field *uf;
103 #if defined(ENABLE_SSA)
105 bool last_cmd_was_goto;
107 last_cmd_was_goto = false;
111 /* get required compiler data */
118 /* prevent compiler warnings */
129 s4 savedregs_num = 0;
132 /* space to save used callee saved registers */
134 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
135 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse);
137 cd->stackframesize = rd->memuse + savedregs_num;
140 #if defined(ENABLE_THREADS)
141 /* space to save argument of monitor_enter */
143 if (checksync && (m->flags & ACC_SYNCHRONIZED))
144 cd->stackframesize++;
147 /* create method header */
149 /* Keep stack of non-leaf functions 16-byte aligned. */
151 if (!jd->isleafmethod) {
152 ALIGN_ODD(cd->stackframesize); /* XXX this is wrong, +4 is missing */
155 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
156 (void) dseg_add_unique_s4(cd, cd->stackframesize * 8); /* FrameSize */
158 #if defined(ENABLE_THREADS)
159 /* IsSync contains the offset relative to the stack pointer for the
160 argument of monitor_exit used in the exception handler. Since the
161 offset could be zero and give a wrong meaning of the flag it is
165 if (checksync && (m->flags & ACC_SYNCHRONIZED))
166 (void) dseg_add_unique_s4(cd, (rd->memuse + 1) * 8); /* IsSync */
169 (void) dseg_add_unique_s4(cd, 0); /* IsSync */
171 (void) dseg_add_unique_s4(cd, jd->isleafmethod); /* IsLeaf */
172 (void) dseg_add_unique_s4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
173 (void) dseg_add_unique_s4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
175 /* adds a reference for the length of the line number counter. We don't
176 know the size yet, since we evaluate the information during code
177 generation, to save one additional iteration over the whole
178 instructions. During code optimization the position could have changed
179 to the information gotten from the class file */
180 (void) dseg_addlinenumbertablesize(cd);
182 (void) dseg_add_unique_s4(cd, jd->exceptiontablelength); /* ExTableSize */
184 /* create exception table */
186 for (ex = jd->exceptiontable; ex != NULL; ex = ex->down) {
187 dseg_add_target(cd, ex->start);
188 dseg_add_target(cd, ex->end);
189 dseg_add_target(cd, ex->handler);
190 (void) dseg_add_unique_address(cd, ex->catchtype.any);
193 #if defined(ENABLE_PROFILING)
194 /* generate method profiling code */
196 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
197 /* count frequency */
199 M_MOV_IMM(code, REG_ITMP3);
200 M_IADD_IMM_MEMBASE(1, REG_ITMP3, OFFSET(codeinfo, frequency));
204 /* create stack frame (if necessary) */
206 if (cd->stackframesize)
207 M_ASUB_IMM(cd->stackframesize * 8, REG_SP);
209 /* save return address and used callee saved registers */
211 p = cd->stackframesize;
212 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
213 p--; M_AST(rd->savintregs[i], REG_SP, p * 8);
215 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
216 p--; emit_fld_reg(cd, rd->savfltregs[i]); emit_fstpl_membase(cd, REG_SP, p * 8);
219 /* take arguments out of register or stack frame */
224 for (p = 0, l = 0; p < md->paramcount; p++) {
225 t = md->paramtypes[p].type;
227 varindex = jd->local_map[l * 5 + t];
228 #if defined(ENABLE_SSA)
230 if (varindex != UNUSED)
231 varindex = ls->var_0[varindex];
232 if ((varindex != UNUSED) && (ls->lifetime[varindex].type == UNUSED))
237 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
240 if (varindex == UNUSED)
244 s1 = md->params[p].regoff;
247 if (IS_INT_LNG_TYPE(t)) { /* integer args */
248 if (!md->params[p].inmemory) { /* register arguments */
249 log_text("integer register argument");
251 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
252 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff */
254 else { /* reg arg -> spilled */
255 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff * 4 */
259 if (!(var->flags & INMEMORY)) {
260 M_ILD(d, REG_SP, cd->stackframesize * 8 + 4 + s1);
263 if (!IS_2_WORD_TYPE(t)) {
264 #if defined(ENABLE_SSA)
265 /* no copy avoiding by now possible with SSA */
267 emit_mov_membase_reg( /* + 4 for return address */
268 cd, REG_SP, cd->stackframesize * 8 + s1 + 4,
270 emit_mov_reg_membase(
271 cd, REG_ITMP1, REG_SP, var->vv.regoff);
274 #endif /*defined(ENABLE_SSA)*/
275 /* reuse stackslot */
276 var->vv.regoff = cd->stackframesize * 8 + 4 + s1;
280 #if defined(ENABLE_SSA)
281 /* no copy avoiding by now possible with SSA */
283 emit_mov_membase_reg( /* + 4 for return address */
284 cd, REG_SP, cd->stackframesize * 8 + s1 + 4,
286 emit_mov_reg_membase(
287 cd, REG_ITMP1, REG_SP, var->vv.regoff);
288 emit_mov_membase_reg( /* + 4 for return address */
289 cd, REG_SP, cd->stackframesize * 8 + s1 + 4 + 4,
291 emit_mov_reg_membase(
292 cd, REG_ITMP1, REG_SP, var->vv.regoff + 4);
295 #endif /*defined(ENABLE_SSA)*/
296 /* reuse stackslot */
297 var->vv.regoff = cd->stackframesize * 8 + 4 + s1;
302 else { /* floating args */
303 if (!md->params[p].inmemory) { /* register arguments */
304 log_text("There are no float argument registers!");
306 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
307 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff */
308 } else { /* reg arg -> spilled */
309 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff * 8 */
313 else { /* stack arguments */
314 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
317 cd, REG_SP, cd->stackframesize * 8 + s1 + 4);
319 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
324 cd, REG_SP, cd->stackframesize * 8 + s1 + 4);
326 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
329 } else { /* stack-arg -> spilled */
330 #if defined(ENABLE_SSA)
331 /* no copy avoiding by now possible with SSA */
333 emit_mov_membase_reg(
334 cd, REG_SP, cd->stackframesize * 8 + s1 + 4, REG_ITMP1);
335 emit_mov_reg_membase(
336 cd, REG_ITMP1, REG_SP, var->vv.regoff);
339 cd, REG_SP, cd->stackframesize * 8 + s1 + 4);
340 emit_fstps_membase(cd, REG_SP, var->vv.regoff);
344 cd, REG_SP, cd->stackframesize * 8 + s1 + 4);
345 emit_fstpl_membase(cd, REG_SP, var->vv.regoff);
349 #endif /*defined(ENABLE_SSA)*/
350 /* reuse stackslot */
351 var->vv.regoff = cd->stackframesize * 8 + 4 + s1;
357 /* call monitorenter function */
359 #if defined(ENABLE_THREADS)
360 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
363 if (m->flags & ACC_STATIC) {
364 M_MOV_IMM(&m->class->object.header, REG_ITMP1);
367 M_ALD(REG_ITMP1, REG_SP, cd->stackframesize * 8 + 4);
370 M_ALD_MEM(REG_ITMP1, EXCEPTION_HARDWARE_NULLPOINTER);
373 M_AST(REG_ITMP1, REG_SP, s1 * 8);
374 M_AST(REG_ITMP1, REG_SP, 0 * 4);
375 M_MOV_IMM(LOCK_monitor_enter, REG_ITMP3);
381 emit_verbosecall_enter(jd);
386 #if defined(ENABLE_SSA)
387 /* with SSA the Header is Basic Block 0 - insert phi Moves if necessary */
389 codegen_emit_phi_moves(jd, ls->basicblocks[0]);
392 /* end of header generation */
394 /* create replacement points */
396 REPLACEMENT_POINTS_INIT(cd, jd);
398 /* walk through all basic blocks */
400 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
402 bptr->mpc = (s4) (cd->mcodeptr - cd->mcodebase);
404 if (bptr->flags >= BBREACHED) {
405 /* branch resolving */
407 codegen_resolve_branchrefs(cd, bptr);
409 /* handle replacement points */
411 REPLACEMENT_POINT_BLOCK_START(cd, bptr);
413 #if defined(ENABLE_REPLACEMENT)
414 if (bptr->bitflags & BBFLAG_REPLACEMENT) {
415 if (cd->replacementpoint[-1].flags & RPLPOINT_FLAG_COUNTDOWN) {
417 disp = (s4) &(m->hitcountdown);
418 M_ISUB_IMM_MEMABS(1, disp);
424 /* copy interface registers to their destination */
429 #if defined(ENABLE_PROFILING)
430 /* generate basic block profiling code */
432 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
433 /* count frequency */
435 M_MOV_IMM(code->bbfrequency, REG_ITMP3);
436 M_IADD_IMM_MEMBASE(1, REG_ITMP3, bptr->nr * 4);
440 #if defined(ENABLE_LSRA) || defined(ENABLE_SSA)
441 # if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
444 # if defined(ENABLE_SSA)
446 last_cmd_was_goto = false;
450 var = VAR(bptr->invars[len]);
451 if (bptr->type != BBTYPE_STD) {
452 if (!IS_2_WORD_TYPE(var->type)) {
453 if (bptr->type == BBTYPE_EXH) {
454 d = codegen_reg_of_var(0, var, REG_ITMP1);
455 M_INTMOVE(REG_ITMP1, d);
456 emit_store(jd, NULL, var, d);
460 log_text("copy interface registers(EXH, SBR): longs \
461 have to be in memory (begin 1)");
469 #endif /* defined(ENABLE_LSRA) || defined(ENABLE_SSA) */
473 var = VAR(bptr->invars[len]);
474 if ((len == bptr->indepth-1) && (bptr->type != BBTYPE_STD)) {
475 if (!IS_2_WORD_TYPE(var->type)) {
476 if (bptr->type == BBTYPE_EXH) {
477 d = codegen_reg_of_var(0, var, REG_ITMP1);
478 M_INTMOVE(REG_ITMP1, d);
479 emit_store(jd, NULL, var, d);
483 log_text("copy interface registers: longs have to be in \
490 assert((var->flags & INOUT));
495 /* walk through all instructions */
500 for (iptr = bptr->iinstr; len > 0; len--, iptr++) {
501 if (iptr->line != currentline) {
502 dseg_addlinenumber(cd, iptr->line);
503 currentline = iptr->line;
506 MCODECHECK(1024); /* 1kB should be enough */
509 case ICMD_NOP: /* ... ==> ... */
510 case ICMD_POP: /* ..., value ==> ... */
511 case ICMD_POP2: /* ..., value, value ==> ... */
514 case ICMD_INLINE_START:
516 REPLACEMENT_POINT_INLINE_START(cd, iptr);
519 case ICMD_INLINE_BODY:
521 REPLACEMENT_POINT_INLINE_BODY(cd, iptr);
522 dseg_addlinenumber_inline_start(cd, iptr);
523 dseg_addlinenumber(cd, iptr->line);
526 case ICMD_INLINE_END:
528 dseg_addlinenumber_inline_end(cd, iptr);
529 dseg_addlinenumber(cd, iptr->line);
532 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
534 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
535 emit_nullpointer_check(cd, iptr, s1);
538 /* constant operations ************************************************/
540 case ICMD_ICONST: /* ... ==> ..., constant */
542 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
543 ICONST(d, iptr->sx.val.i);
544 emit_store_dst(jd, iptr, d);
547 case ICMD_LCONST: /* ... ==> ..., constant */
549 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
550 LCONST(d, iptr->sx.val.l);
551 emit_store_dst(jd, iptr, d);
554 case ICMD_FCONST: /* ... ==> ..., constant */
556 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
557 if (iptr->sx.val.f == 0.0) {
561 if (iptr->sx.val.i == 0x80000000) {
565 } else if (iptr->sx.val.f == 1.0) {
568 } else if (iptr->sx.val.f == 2.0) {
574 disp = dseg_add_float(cd, iptr->sx.val.f);
575 emit_mov_imm_reg(cd, 0, REG_ITMP1);
577 emit_flds_membase(cd, REG_ITMP1, disp);
579 emit_store_dst(jd, iptr, d);
582 case ICMD_DCONST: /* ... ==> ..., constant */
584 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
585 if (iptr->sx.val.d == 0.0) {
589 if (iptr->sx.val.l == 0x8000000000000000LL) {
593 } else if (iptr->sx.val.d == 1.0) {
596 } else if (iptr->sx.val.d == 2.0) {
602 disp = dseg_add_double(cd, iptr->sx.val.d);
603 emit_mov_imm_reg(cd, 0, REG_ITMP1);
605 emit_fldl_membase(cd, REG_ITMP1, disp);
607 emit_store_dst(jd, iptr, d);
610 case ICMD_ACONST: /* ... ==> ..., constant */
612 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
614 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
615 codegen_addpatchref(cd, PATCHER_aconst,
616 iptr->sx.val.c.ref, 0);
621 if (iptr->sx.val.anyptr == NULL)
624 M_MOV_IMM(iptr->sx.val.anyptr, d);
626 emit_store_dst(jd, iptr, d);
630 /* load/store/copy/move operations ************************************/
648 if (!(iptr->flags.bits & INS_FLAG_RETADDR))
653 /* integer operations *************************************************/
655 case ICMD_INEG: /* ..., value ==> ..., - value */
657 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
658 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
661 emit_store_dst(jd, iptr, d);
664 case ICMD_LNEG: /* ..., value ==> ..., - value */
666 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
667 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
669 M_NEG(GET_LOW_REG(d));
670 M_IADDC_IMM(0, GET_HIGH_REG(d));
671 M_NEG(GET_HIGH_REG(d));
672 emit_store_dst(jd, iptr, d);
675 case ICMD_I2L: /* ..., value ==> ..., value */
677 s1 = emit_load_s1(jd, iptr, EAX);
678 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
681 M_LNGMOVE(EAX_EDX_PACKED, d);
682 emit_store_dst(jd, iptr, d);
685 case ICMD_L2I: /* ..., value ==> ..., value */
687 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
688 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
690 emit_store_dst(jd, iptr, d);
693 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
695 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
696 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
700 emit_store_dst(jd, iptr, d);
703 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
705 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
706 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
708 emit_store_dst(jd, iptr, d);
711 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
713 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
714 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
716 emit_store_dst(jd, iptr, d);
720 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
722 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
723 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
724 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
731 emit_store_dst(jd, iptr, d);
735 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
736 /* sx.val.i = constant */
738 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
739 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
741 /* `inc reg' is slower on p4's (regarding to ia32
742 optimization reference manual and benchmarks) and as
746 M_IADD_IMM(iptr->sx.val.i, d);
747 emit_store_dst(jd, iptr, d);
750 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
752 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
753 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
754 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
755 M_INTMOVE(s1, GET_LOW_REG(d));
756 M_IADD(s2, GET_LOW_REG(d));
757 /* don't use REG_ITMP1 */
758 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
759 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
760 M_INTMOVE(s1, GET_HIGH_REG(d));
761 M_IADDC(s2, GET_HIGH_REG(d));
762 emit_store_dst(jd, iptr, d);
765 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
766 /* sx.val.l = constant */
768 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
769 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
771 M_IADD_IMM(iptr->sx.val.l, GET_LOW_REG(d));
772 M_IADDC_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
773 emit_store_dst(jd, iptr, d);
776 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
778 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
779 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
780 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
782 M_INTMOVE(s1, REG_ITMP1);
783 M_ISUB(s2, REG_ITMP1);
784 M_INTMOVE(REG_ITMP1, d);
790 emit_store_dst(jd, iptr, d);
793 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
794 /* sx.val.i = constant */
796 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
797 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
799 M_ISUB_IMM(iptr->sx.val.i, d);
800 emit_store_dst(jd, iptr, d);
803 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
805 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
806 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
807 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
808 if (s2 == GET_LOW_REG(d)) {
809 M_INTMOVE(s1, REG_ITMP1);
810 M_ISUB(s2, REG_ITMP1);
811 M_INTMOVE(REG_ITMP1, GET_LOW_REG(d));
814 M_INTMOVE(s1, GET_LOW_REG(d));
815 M_ISUB(s2, GET_LOW_REG(d));
817 /* don't use REG_ITMP1 */
818 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
819 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
820 if (s2 == GET_HIGH_REG(d)) {
821 M_INTMOVE(s1, REG_ITMP2);
822 M_ISUBB(s2, REG_ITMP2);
823 M_INTMOVE(REG_ITMP2, GET_HIGH_REG(d));
826 M_INTMOVE(s1, GET_HIGH_REG(d));
827 M_ISUBB(s2, GET_HIGH_REG(d));
829 emit_store_dst(jd, iptr, d);
832 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
833 /* sx.val.l = constant */
835 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
836 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
838 M_ISUB_IMM(iptr->sx.val.l, GET_LOW_REG(d));
839 M_ISUBB_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
840 emit_store_dst(jd, iptr, d);
843 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
845 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
846 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
847 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
854 emit_store_dst(jd, iptr, d);
857 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
858 /* sx.val.i = constant */
860 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
861 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
862 M_IMUL_IMM(s1, iptr->sx.val.i, d);
863 emit_store_dst(jd, iptr, d);
866 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
868 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
869 s2 = emit_load_s2_low(jd, iptr, EDX);
870 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
872 M_INTMOVE(s1, REG_ITMP2);
873 M_IMUL(s2, REG_ITMP2);
875 s1 = emit_load_s1_low(jd, iptr, EAX);
876 s2 = emit_load_s2_high(jd, iptr, EDX);
879 M_IADD(EDX, REG_ITMP2);
881 s1 = emit_load_s1_low(jd, iptr, EAX);
882 s2 = emit_load_s2_low(jd, iptr, EDX);
885 M_INTMOVE(EAX, GET_LOW_REG(d));
886 M_IADD(REG_ITMP2, GET_HIGH_REG(d));
888 emit_store_dst(jd, iptr, d);
891 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
892 /* sx.val.l = constant */
894 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
895 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
896 ICONST(EAX, iptr->sx.val.l);
898 M_IMUL_IMM(s1, iptr->sx.val.l >> 32, REG_ITMP2);
899 M_IADD(REG_ITMP2, EDX);
900 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
901 M_IMUL_IMM(s1, iptr->sx.val.l, REG_ITMP2);
902 M_IADD(REG_ITMP2, EDX);
903 M_LNGMOVE(EAX_EDX_PACKED, d);
904 emit_store_dst(jd, iptr, d);
907 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
909 s1 = emit_load_s1(jd, iptr, EAX);
910 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
911 d = codegen_reg_of_dst(jd, iptr, EAX);
912 emit_arithmetic_check(cd, iptr, s2);
914 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
916 /* check as described in jvm spec */
918 M_CMP_IMM(0x80000000, EAX);
925 M_INTMOVE(EAX, d); /* if INMEMORY then d is already EAX */
926 emit_store_dst(jd, iptr, d);
929 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
931 s1 = emit_load_s1(jd, iptr, EAX);
932 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
933 d = codegen_reg_of_dst(jd, iptr, EDX);
934 emit_arithmetic_check(cd, iptr, s2);
936 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
938 /* check as described in jvm spec */
940 M_CMP_IMM(0x80000000, EAX);
948 M_INTMOVE(EDX, d); /* if INMEMORY then d is already EDX */
949 emit_store_dst(jd, iptr, d);
952 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
953 /* sx.val.i = constant */
955 /* TODO: optimize for `/ 2' */
956 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
957 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
961 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, d);/* 32-bit for jump off */
962 M_SRA_IMM(iptr->sx.val.i, d);
963 emit_store_dst(jd, iptr, d);
966 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
967 /* sx.val.i = constant */
969 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
970 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
972 M_MOV(s1, REG_ITMP1);
976 M_AND_IMM(iptr->sx.val.i, d);
978 M_BGE(2 + 2 + 6 + 2);
979 M_MOV(s1, d); /* don't use M_INTMOVE, so we know the jump offset */
981 M_AND_IMM32(iptr->sx.val.i, d); /* use 32-bit for jump offset */
983 emit_store_dst(jd, iptr, d);
986 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
987 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
989 s2 = emit_load_s2(jd, iptr, REG_ITMP12_PACKED);
990 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
992 M_INTMOVE(GET_LOW_REG(s2), REG_ITMP3);
993 M_OR(GET_HIGH_REG(s2), REG_ITMP3);
994 /* XXX could be optimized */
995 emit_arithmetic_check(cd, iptr, REG_ITMP3);
997 bte = iptr->sx.s23.s3.bte;
1000 M_LST(s2, REG_SP, 2 * 4);
1002 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1003 M_LST(s1, REG_SP, 0 * 4);
1005 M_MOV_IMM(bte->fp, REG_ITMP3);
1007 emit_store_dst(jd, iptr, d);
1010 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1011 /* sx.val.i = constant */
1013 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1014 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1016 M_TEST(GET_HIGH_REG(d));
1018 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, GET_LOW_REG(d));
1019 M_IADDC_IMM(0, GET_HIGH_REG(d));
1020 M_SRLD_IMM(iptr->sx.val.i, GET_HIGH_REG(d), GET_LOW_REG(d));
1021 M_SRA_IMM(iptr->sx.val.i, GET_HIGH_REG(d));
1022 emit_store_dst(jd, iptr, d);
1026 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1027 /* sx.val.l = constant */
1029 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1030 if (iptr->dst.var->flags & INMEMORY) {
1031 if (iptr->s1.var->flags & INMEMORY) {
1032 /* Alpha algorithm */
1034 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8);
1036 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1042 /* TODO: hmm, don't know if this is always correct */
1044 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l & 0x00000000ffffffff);
1046 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l >> 32);
1052 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1053 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1055 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1056 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1057 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1058 emit_jcc(cd, CC_GE, disp);
1060 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1061 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1063 emit_neg_reg(cd, REG_ITMP1);
1064 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1065 emit_neg_reg(cd, REG_ITMP2);
1067 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1068 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1070 emit_neg_reg(cd, REG_ITMP1);
1071 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1072 emit_neg_reg(cd, REG_ITMP2);
1074 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst.var->vv.regoff * 8);
1075 emit_mov_reg_membase(cd, REG_ITMP2, REG_SP, iptr->dst.var->vv.regoff * 8 + 4);
1079 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1080 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1082 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1083 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1084 M_TEST(GET_LOW_REG(s1));
1090 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1092 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1093 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1094 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1095 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1098 emit_store_dst(jd, iptr, d);
1101 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1102 /* sx.val.i = constant */
1104 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1105 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1107 M_SLL_IMM(iptr->sx.val.i, d);
1108 emit_store_dst(jd, iptr, d);
1111 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1113 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1114 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1115 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1116 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1119 emit_store_dst(jd, iptr, d);
1122 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1123 /* sx.val.i = constant */
1125 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1126 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1128 M_SRA_IMM(iptr->sx.val.i, d);
1129 emit_store_dst(jd, iptr, d);
1132 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1134 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1135 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1136 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1137 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1140 emit_store_dst(jd, iptr, d);
1143 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1144 /* sx.val.i = constant */
1146 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1147 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1149 M_SRL_IMM(iptr->sx.val.i, d);
1150 emit_store_dst(jd, iptr, d);
1153 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1155 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1156 s2 = emit_load_s2(jd, iptr, ECX);
1157 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1160 M_TEST_IMM(32, ECX);
1162 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1163 M_CLR(GET_LOW_REG(d));
1164 M_SLLD(GET_LOW_REG(d), GET_HIGH_REG(d));
1165 M_SLL(GET_LOW_REG(d));
1166 emit_store_dst(jd, iptr, d);
1169 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1170 /* sx.val.i = constant */
1172 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1173 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1175 if (iptr->sx.val.i & 0x20) {
1176 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1177 M_CLR(GET_LOW_REG(d));
1178 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1182 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1184 M_SLL_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d));
1186 emit_store_dst(jd, iptr, d);
1189 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1191 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1192 s2 = emit_load_s2(jd, iptr, ECX);
1193 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1196 M_TEST_IMM(32, ECX);
1198 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1199 M_SRA_IMM(31, GET_HIGH_REG(d));
1200 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1201 M_SRA(GET_HIGH_REG(d));
1202 emit_store_dst(jd, iptr, d);
1205 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1206 /* sx.val.i = constant */
1208 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1209 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1211 if (iptr->sx.val.i & 0x20) {
1212 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1213 M_SRA_IMM(31, GET_HIGH_REG(d));
1214 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1218 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1220 M_SRA_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1222 emit_store_dst(jd, iptr, d);
1225 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1227 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1228 s2 = emit_load_s2(jd, iptr, ECX);
1229 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1232 M_TEST_IMM(32, ECX);
1234 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1235 M_CLR(GET_HIGH_REG(d));
1236 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1237 M_SRL(GET_HIGH_REG(d));
1238 emit_store_dst(jd, iptr, d);
1241 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1242 /* sx.val.l = constant */
1244 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1245 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1247 if (iptr->sx.val.i & 0x20) {
1248 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1249 M_CLR(GET_HIGH_REG(d));
1250 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1254 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1256 M_SRL_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1258 emit_store_dst(jd, iptr, d);
1261 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1263 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1264 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1265 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1272 emit_store_dst(jd, iptr, d);
1275 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1276 /* sx.val.i = constant */
1278 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1279 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1281 M_AND_IMM(iptr->sx.val.i, d);
1282 emit_store_dst(jd, iptr, d);
1285 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1287 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1288 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1289 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1290 if (s2 == GET_LOW_REG(d))
1291 M_AND(s1, GET_LOW_REG(d));
1293 M_INTMOVE(s1, GET_LOW_REG(d));
1294 M_AND(s2, GET_LOW_REG(d));
1296 /* REG_ITMP1 probably contains low 32-bit of destination */
1297 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1298 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1299 if (s2 == GET_HIGH_REG(d))
1300 M_AND(s1, GET_HIGH_REG(d));
1302 M_INTMOVE(s1, GET_HIGH_REG(d));
1303 M_AND(s2, GET_HIGH_REG(d));
1305 emit_store_dst(jd, iptr, d);
1308 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1309 /* sx.val.l = constant */
1311 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1312 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1314 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1315 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1316 emit_store_dst(jd, iptr, d);
1319 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1321 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1322 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1323 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1330 emit_store_dst(jd, iptr, d);
1333 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1334 /* sx.val.i = constant */
1336 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1337 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1339 M_OR_IMM(iptr->sx.val.i, d);
1340 emit_store_dst(jd, iptr, d);
1343 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1345 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1346 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1347 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1348 if (s2 == GET_LOW_REG(d))
1349 M_OR(s1, GET_LOW_REG(d));
1351 M_INTMOVE(s1, GET_LOW_REG(d));
1352 M_OR(s2, GET_LOW_REG(d));
1354 /* REG_ITMP1 probably contains low 32-bit of destination */
1355 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1356 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1357 if (s2 == GET_HIGH_REG(d))
1358 M_OR(s1, GET_HIGH_REG(d));
1360 M_INTMOVE(s1, GET_HIGH_REG(d));
1361 M_OR(s2, GET_HIGH_REG(d));
1363 emit_store_dst(jd, iptr, d);
1366 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1367 /* sx.val.l = constant */
1369 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1370 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1372 M_OR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1373 M_OR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1374 emit_store_dst(jd, iptr, d);
1377 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1379 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1380 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1381 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1388 emit_store_dst(jd, iptr, d);
1391 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1392 /* sx.val.i = constant */
1394 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1395 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1397 M_XOR_IMM(iptr->sx.val.i, d);
1398 emit_store_dst(jd, iptr, d);
1401 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1403 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1404 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1405 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1406 if (s2 == GET_LOW_REG(d))
1407 M_XOR(s1, GET_LOW_REG(d));
1409 M_INTMOVE(s1, GET_LOW_REG(d));
1410 M_XOR(s2, GET_LOW_REG(d));
1412 /* REG_ITMP1 probably contains low 32-bit of destination */
1413 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1414 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1415 if (s2 == GET_HIGH_REG(d))
1416 M_XOR(s1, GET_HIGH_REG(d));
1418 M_INTMOVE(s1, GET_HIGH_REG(d));
1419 M_XOR(s2, GET_HIGH_REG(d));
1421 emit_store_dst(jd, iptr, d);
1424 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1425 /* sx.val.l = constant */
1427 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1428 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1430 M_XOR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1431 M_XOR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1432 emit_store_dst(jd, iptr, d);
1436 /* floating operations ************************************************/
1438 case ICMD_FNEG: /* ..., value ==> ..., - value */
1440 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1441 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1443 emit_store_dst(jd, iptr, d);
1446 case ICMD_DNEG: /* ..., value ==> ..., - value */
1448 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1449 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1451 emit_store_dst(jd, iptr, d);
1454 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1456 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1457 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1458 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1460 emit_store_dst(jd, iptr, d);
1463 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1465 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1466 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1467 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1469 emit_store_dst(jd, iptr, d);
1472 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1474 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1475 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1476 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1478 emit_store_dst(jd, iptr, d);
1481 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1483 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1484 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1485 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1487 emit_store_dst(jd, iptr, d);
1490 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1492 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1493 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1494 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1496 emit_store_dst(jd, iptr, d);
1499 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1501 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1502 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1503 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1505 emit_store_dst(jd, iptr, d);
1508 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1510 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1511 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1512 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1514 emit_store_dst(jd, iptr, d);
1517 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1519 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1520 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1521 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1523 emit_store_dst(jd, iptr, d);
1526 case ICMD_FREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1528 /* exchanged to skip fxch */
1529 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1530 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1531 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1532 /* emit_fxch(cd); */
1537 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1538 emit_store_dst(jd, iptr, d);
1539 emit_ffree_reg(cd, 0);
1543 case ICMD_DREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1545 /* exchanged to skip fxch */
1546 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1547 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1548 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1549 /* emit_fxch(cd); */
1554 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1555 emit_store_dst(jd, iptr, d);
1556 emit_ffree_reg(cd, 0);
1560 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1561 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1563 var = VAROP(iptr->s1);
1564 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1566 if (var->flags & INMEMORY) {
1567 emit_fildl_membase(cd, REG_SP, var->vv.regoff);
1569 /* XXX not thread safe! */
1570 disp = dseg_add_unique_s4(cd, 0);
1571 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1573 emit_mov_reg_membase(cd, var->vv.regoff, REG_ITMP1, disp);
1574 emit_fildl_membase(cd, REG_ITMP1, disp);
1577 emit_store_dst(jd, iptr, d);
1580 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1581 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1583 var = VAROP(iptr->s1);
1584 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1585 if (var->flags & INMEMORY) {
1586 emit_fildll_membase(cd, REG_SP, var->vv.regoff);
1589 log_text("L2F: longs have to be in memory");
1592 emit_store_dst(jd, iptr, d);
1595 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1597 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1598 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1600 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1603 /* Round to zero, 53-bit mode, exception masked */
1604 disp = dseg_add_s4(cd, 0x0e7f);
1605 emit_fldcw_membase(cd, REG_ITMP1, disp);
1607 var = VAROP(iptr->dst);
1608 var1 = VAROP(iptr->s1);
1610 if (var->flags & INMEMORY) {
1611 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1613 /* Round to nearest, 53-bit mode, exceptions masked */
1614 disp = dseg_add_s4(cd, 0x027f);
1615 emit_fldcw_membase(cd, REG_ITMP1, disp);
1617 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1618 REG_SP, var->vv.regoff);
1621 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1623 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1626 /* XXX not thread safe! */
1627 disp = dseg_add_unique_s4(cd, 0);
1628 emit_fistpl_membase(cd, REG_ITMP1, disp);
1629 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1631 /* Round to nearest, 53-bit mode, exceptions masked */
1632 disp = dseg_add_s4(cd, 0x027f);
1633 emit_fldcw_membase(cd, REG_ITMP1, disp);
1635 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1638 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1639 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1642 emit_jcc(cd, CC_NE, disp);
1644 /* XXX: change this when we use registers */
1645 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1646 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2i, REG_ITMP1);
1647 emit_call_reg(cd, REG_ITMP1);
1649 if (var->flags & INMEMORY) {
1650 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1653 M_INTMOVE(REG_RESULT, var->vv.regoff);
1657 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1659 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1660 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1662 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1665 /* Round to zero, 53-bit mode, exception masked */
1666 disp = dseg_add_s4(cd, 0x0e7f);
1667 emit_fldcw_membase(cd, REG_ITMP1, disp);
1669 var = VAROP(iptr->dst);
1670 var1 = VAROP(iptr->s1);
1672 if (var->flags & INMEMORY) {
1673 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1675 /* Round to nearest, 53-bit mode, exceptions masked */
1676 disp = dseg_add_s4(cd, 0x027f);
1677 emit_fldcw_membase(cd, REG_ITMP1, disp);
1679 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1680 REG_SP, var->vv.regoff);
1683 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1685 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1688 /* XXX not thread safe! */
1689 disp = dseg_add_unique_s4(cd, 0);
1690 emit_fistpl_membase(cd, REG_ITMP1, disp);
1691 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1693 /* Round to nearest, 53-bit mode, exceptions masked */
1694 disp = dseg_add_s4(cd, 0x027f);
1695 emit_fldcw_membase(cd, REG_ITMP1, disp);
1697 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1700 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1701 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1704 emit_jcc(cd, CC_NE, disp);
1706 /* XXX: change this when we use registers */
1707 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1708 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2i, REG_ITMP1);
1709 emit_call_reg(cd, REG_ITMP1);
1711 if (var->flags & INMEMORY) {
1712 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1714 M_INTMOVE(REG_RESULT, var->vv.regoff);
1718 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1720 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1721 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1723 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1726 /* Round to zero, 53-bit mode, exception masked */
1727 disp = dseg_add_s4(cd, 0x0e7f);
1728 emit_fldcw_membase(cd, REG_ITMP1, disp);
1730 var = VAROP(iptr->dst);
1731 var1 = VAROP(iptr->s1);
1733 if (var->flags & INMEMORY) {
1734 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1736 /* Round to nearest, 53-bit mode, exceptions masked */
1737 disp = dseg_add_s4(cd, 0x027f);
1738 emit_fldcw_membase(cd, REG_ITMP1, disp);
1740 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1741 REG_SP, var->vv.regoff + 4);
1744 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1746 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1749 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1751 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1753 emit_jcc(cd, CC_NE, disp);
1755 emit_alu_imm_membase(cd, ALU_CMP, 0,
1756 REG_SP, var->vv.regoff);
1759 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1761 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1763 emit_jcc(cd, CC_NE, disp);
1765 /* XXX: change this when we use registers */
1766 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1767 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2l, REG_ITMP1);
1768 emit_call_reg(cd, REG_ITMP1);
1769 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1770 emit_mov_reg_membase(cd, REG_RESULT2,
1771 REG_SP, var->vv.regoff + 4);
1774 log_text("F2L: longs have to be in memory");
1779 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1781 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1782 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1784 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1787 /* Round to zero, 53-bit mode, exception masked */
1788 disp = dseg_add_s4(cd, 0x0e7f);
1789 emit_fldcw_membase(cd, REG_ITMP1, disp);
1791 var = VAROP(iptr->dst);
1792 var1 = VAROP(iptr->s1);
1794 if (var->flags & INMEMORY) {
1795 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1797 /* Round to nearest, 53-bit mode, exceptions masked */
1798 disp = dseg_add_s4(cd, 0x027f);
1799 emit_fldcw_membase(cd, REG_ITMP1, disp);
1801 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1802 REG_SP, var->vv.regoff + 4);
1805 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1807 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1810 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1812 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1814 emit_jcc(cd, CC_NE, disp);
1816 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, var->vv.regoff);
1819 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1821 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1823 emit_jcc(cd, CC_NE, disp);
1825 /* XXX: change this when we use registers */
1826 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1827 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2l, REG_ITMP1);
1828 emit_call_reg(cd, REG_ITMP1);
1829 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1830 emit_mov_reg_membase(cd, REG_RESULT2,
1831 REG_SP, var->vv.regoff + 4);
1834 log_text("D2L: longs have to be in memory");
1839 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1841 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1842 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1844 emit_store_dst(jd, iptr, d);
1847 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1849 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1850 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1852 emit_store_dst(jd, iptr, d);
1855 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1858 /* exchanged to skip fxch */
1859 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1860 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1861 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1862 /* emit_fxch(cd); */
1865 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as GT */
1866 emit_jcc(cd, CC_E, 6);
1867 emit_alu_imm_reg(cd, ALU_AND, 0x000000ff, EAX);
1869 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1870 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1871 emit_jcc(cd, CC_B, 3 + 5);
1872 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1873 emit_jmp_imm(cd, 3);
1874 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1875 emit_store_dst(jd, iptr, d);
1878 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1881 /* exchanged to skip fxch */
1882 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1883 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1884 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1885 /* emit_fxch(cd); */
1888 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as LT */
1889 emit_jcc(cd, CC_E, 3);
1890 emit_movb_imm_reg(cd, 1, REG_AH);
1892 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1893 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1894 emit_jcc(cd, CC_B, 3 + 5);
1895 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1896 emit_jmp_imm(cd, 3);
1897 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1898 emit_store_dst(jd, iptr, d);
1902 /* memory operations **************************************************/
1904 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., length */
1906 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1907 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1908 /* implicit null-pointer check */
1909 M_ILD(d, s1, OFFSET(java_array_t, size));
1910 emit_store_dst(jd, iptr, d);
1913 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
1915 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1916 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1917 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1918 /* implicit null-pointer check */
1919 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1920 emit_movsbl_memindex_reg(cd, OFFSET(java_bytearray_t, data[0]),
1922 emit_store_dst(jd, iptr, d);
1925 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
1927 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1928 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1929 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1930 /* implicit null-pointer check */
1931 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1932 emit_movzwl_memindex_reg(cd, OFFSET(java_chararray_t, data[0]),
1934 emit_store_dst(jd, iptr, d);
1937 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
1939 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1940 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1941 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1942 /* implicit null-pointer check */
1943 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1944 emit_movswl_memindex_reg(cd, OFFSET(java_shortarray_t, data[0]),
1946 emit_store_dst(jd, iptr, d);
1949 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
1951 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1952 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1953 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1954 /* implicit null-pointer check */
1955 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1956 emit_mov_memindex_reg(cd, OFFSET(java_intarray_t, data[0]),
1958 emit_store_dst(jd, iptr, d);
1961 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
1963 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1964 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1965 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
1966 /* implicit null-pointer check */
1967 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1969 var = VAROP(iptr->dst);
1971 assert(var->flags & INMEMORY);
1972 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]),
1973 s1, s2, 3, REG_ITMP3);
1974 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff);
1975 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]) + 4,
1976 s1, s2, 3, REG_ITMP3);
1977 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff + 4);
1980 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
1982 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1983 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1984 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1985 /* implicit null-pointer check */
1986 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1987 emit_flds_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2, 2);
1988 emit_store_dst(jd, iptr, d);
1991 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
1993 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1994 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1995 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1996 /* implicit null-pointer check */
1997 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1998 emit_fldl_memindex(cd, OFFSET(java_doublearray_t, data[0]), s1, s2,3);
1999 emit_store_dst(jd, iptr, d);
2002 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2004 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2005 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2006 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2007 /* implicit null-pointer check */
2008 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2009 emit_mov_memindex_reg(cd, OFFSET(java_objectarray_t, data[0]),
2011 emit_store_dst(jd, iptr, d);
2015 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2017 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2018 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2019 /* implicit null-pointer check */
2020 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2021 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2023 /* because EBP, ESI, EDI have no xH and xL nibbles */
2024 M_INTMOVE(s3, REG_ITMP3);
2027 emit_movb_reg_memindex(cd, s3, OFFSET(java_bytearray_t, data[0]),
2031 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2033 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2034 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2035 /* implicit null-pointer check */
2036 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2037 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2038 emit_movw_reg_memindex(cd, s3, OFFSET(java_chararray_t, data[0]),
2042 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2044 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2045 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2046 /* implicit null-pointer check */
2047 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2048 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2049 emit_movw_reg_memindex(cd, s3, OFFSET(java_shortarray_t, data[0]),
2053 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2055 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2056 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2057 /* implicit null-pointer check */
2058 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2059 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2060 emit_mov_reg_memindex(cd, s3, OFFSET(java_intarray_t, data[0]),
2064 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2066 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2067 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2068 /* implicit null-pointer check */
2069 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2071 var = VAROP(iptr->sx.s23.s3);
2073 assert(var->flags & INMEMORY);
2074 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff, REG_ITMP3);
2075 emit_mov_reg_memindex(cd, REG_ITMP3, OFFSET(java_longarray_t, data[0])
2077 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff + 4, REG_ITMP3);
2078 emit_mov_reg_memindex(cd, REG_ITMP3,
2079 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2082 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2084 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2085 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2086 /* implicit null-pointer check */
2087 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2088 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2089 emit_fstps_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2,2);
2092 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2094 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2095 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2096 /* implicit null-pointer check */
2097 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2098 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2099 emit_fstpl_memindex(cd, OFFSET(java_doublearray_t, data[0]),
2103 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2105 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2106 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2107 /* implicit null-pointer check */
2108 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2109 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2111 M_AST(s1, REG_SP, 0 * 4);
2112 M_AST(s3, REG_SP, 1 * 4);
2113 M_MOV_IMM(BUILTIN_FAST_canstore, REG_ITMP1);
2115 emit_arraystore_check(cd, iptr);
2117 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2118 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2119 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2120 emit_mov_reg_memindex(cd, s3, OFFSET(java_objectarray_t, data[0]),
2124 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2126 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2127 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2128 /* implicit null-pointer check */
2129 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2130 emit_movb_imm_memindex(cd, iptr->sx.s23.s3.constval,
2131 OFFSET(java_bytearray_t, data[0]), s1, s2, 0);
2134 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2136 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2137 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2138 /* implicit null-pointer check */
2139 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2140 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2141 OFFSET(java_chararray_t, data[0]), s1, s2, 1);
2144 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2146 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2147 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2148 /* implicit null-pointer check */
2149 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2150 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2151 OFFSET(java_shortarray_t, data[0]), s1, s2, 1);
2154 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2156 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2157 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2158 /* implicit null-pointer check */
2159 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2160 emit_mov_imm_memindex(cd, iptr->sx.s23.s3.constval,
2161 OFFSET(java_intarray_t, data[0]), s1, s2, 2);
2164 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2166 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2167 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2168 /* implicit null-pointer check */
2169 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2170 emit_mov_imm_memindex(cd,
2171 (u4) (iptr->sx.s23.s3.constval & 0x00000000ffffffff),
2172 OFFSET(java_longarray_t, data[0]), s1, s2, 3);
2173 emit_mov_imm_memindex(cd,
2174 ((s4)iptr->sx.s23.s3.constval) >> 31,
2175 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2178 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2180 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2181 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2182 /* implicit null-pointer check */
2183 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2184 emit_mov_imm_memindex(cd, 0,
2185 OFFSET(java_objectarray_t, data[0]), s1, s2, 2);
2189 case ICMD_GETSTATIC: /* ... ==> ..., value */
2191 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2192 uf = iptr->sx.s23.s3.uf;
2193 fieldtype = uf->fieldref->parseddesc.fd->type;
2196 codegen_addpatchref(cd, PATCHER_get_putstatic, uf, 0);
2200 fi = iptr->sx.s23.s3.fmiref->p.field;
2201 fieldtype = fi->type;
2202 disp = (intptr_t) fi->value;
2204 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2205 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2208 M_MOV_IMM(disp, REG_ITMP1);
2209 switch (fieldtype) {
2212 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2213 M_ILD(d, REG_ITMP1, 0);
2216 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2217 M_LLD(d, REG_ITMP1, 0);
2220 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2221 M_FLD(d, REG_ITMP1, 0);
2224 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2225 M_DLD(d, REG_ITMP1, 0);
2228 emit_store_dst(jd, iptr, d);
2231 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2233 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2234 uf = iptr->sx.s23.s3.uf;
2235 fieldtype = uf->fieldref->parseddesc.fd->type;
2238 codegen_addpatchref(cd, PATCHER_get_putstatic, uf, 0);
2241 fi = iptr->sx.s23.s3.fmiref->p.field;
2242 fieldtype = fi->type;
2243 disp = (intptr_t) fi->value;
2245 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2246 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2249 M_MOV_IMM(disp, REG_ITMP1);
2250 switch (fieldtype) {
2253 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
2254 M_IST(s1, REG_ITMP1, 0);
2257 s1 = emit_load_s1(jd, iptr, REG_ITMP23_PACKED);
2258 M_LST(s1, REG_ITMP1, 0);
2261 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2262 emit_fstps_membase(cd, REG_ITMP1, 0);
2265 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2266 emit_fstpl_membase(cd, REG_ITMP1, 0);
2271 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2272 /* val = value (in current instruction) */
2273 /* following NOP) */
2275 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2276 uf = iptr->sx.s23.s3.uf;
2277 fieldtype = uf->fieldref->parseddesc.fd->type;
2280 codegen_addpatchref(cd, PATCHER_get_putstatic, uf, 0);
2283 fi = iptr->sx.s23.s3.fmiref->p.field;
2284 fieldtype = fi->type;
2285 disp = (intptr_t) fi->value;
2287 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2288 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2291 M_MOV_IMM(disp, REG_ITMP1);
2292 switch (fieldtype) {
2295 M_IST_IMM(iptr->sx.s23.s2.constval, REG_ITMP1, 0);
2298 M_IST_IMM(iptr->sx.s23.s2.constval & 0xffffffff, REG_ITMP1, 0);
2299 M_IST_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, REG_ITMP1, 4);
2306 case ICMD_GETFIELD: /* .., objectref. ==> ..., value */
2308 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2309 emit_nullpointer_check(cd, iptr, s1);
2311 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2312 uf = iptr->sx.s23.s3.uf;
2313 fieldtype = uf->fieldref->parseddesc.fd->type;
2316 codegen_addpatchref(cd, PATCHER_getfield,
2317 iptr->sx.s23.s3.uf, 0);
2320 fi = iptr->sx.s23.s3.fmiref->p.field;
2321 fieldtype = fi->type;
2325 switch (fieldtype) {
2328 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2329 M_ILD32(d, s1, disp);
2332 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2333 M_LLD32(d, s1, disp);
2336 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2337 M_FLD32(d, s1, disp);
2340 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2341 M_DLD32(d, s1, disp);
2344 emit_store_dst(jd, iptr, d);
2347 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2349 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2350 emit_nullpointer_check(cd, iptr, s1);
2352 /* must be done here because of code patching */
2354 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2355 uf = iptr->sx.s23.s3.uf;
2356 fieldtype = uf->fieldref->parseddesc.fd->type;
2359 fi = iptr->sx.s23.s3.fmiref->p.field;
2360 fieldtype = fi->type;
2363 if (!IS_FLT_DBL_TYPE(fieldtype)) {
2364 if (IS_2_WORD_TYPE(fieldtype))
2365 s2 = emit_load_s2(jd, iptr, REG_ITMP23_PACKED);
2367 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2370 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
2372 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2374 uf = iptr->sx.s23.s3.uf;
2377 codegen_addpatchref(cd, PATCHER_putfield, uf, 0);
2381 fi = iptr->sx.s23.s3.fmiref->p.field;
2385 switch (fieldtype) {
2388 M_IST32(s2, s1, disp);
2391 M_LST32(s2, s1, disp);
2394 emit_fstps_membase32(cd, s1, disp);
2397 emit_fstpl_membase32(cd, s1, disp);
2402 case ICMD_PUTFIELDCONST: /* ..., objectref ==> ... */
2403 /* val = value (in current instruction) */
2404 /* following NOP) */
2406 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2407 emit_nullpointer_check(cd, iptr, s1);
2409 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2410 uf = iptr->sx.s23.s3.uf;
2411 fieldtype = uf->fieldref->parseddesc.fd->type;
2414 codegen_addpatchref(cd, PATCHER_putfieldconst,
2418 fi = iptr->sx.s23.s3.fmiref->p.field;
2419 fieldtype = fi->type;
2423 switch (fieldtype) {
2426 M_IST32_IMM(iptr->sx.s23.s2.constval, s1, disp);
2429 M_IST32_IMM(iptr->sx.s23.s2.constval & 0xffffffff, s1, disp);
2430 M_IST32_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, s1, disp + 4);
2438 /* branch operations **************************************************/
2440 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2442 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2443 M_INTMOVE(s1, REG_ITMP1_XPTR);
2445 #ifdef ENABLE_VERIFIER
2446 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2447 codegen_addpatchref(cd, PATCHER_athrow_areturn,
2448 iptr->sx.s23.s2.uc, 0);
2450 #endif /* ENABLE_VERIFIER */
2452 M_CALL_IMM(0); /* passing exception pc */
2453 M_POP(REG_ITMP2_XPC);
2455 M_MOV_IMM(asm_handle_exception, REG_ITMP3);
2459 case ICMD_GOTO: /* ... ==> ... */
2460 case ICMD_RET: /* ... ==> ... */
2462 #if defined(ENABLE_SSA)
2464 last_cmd_was_goto = true;
2466 /* In case of a Goto phimoves have to be inserted before the */
2469 codegen_emit_phi_moves(jd, bptr);
2472 emit_br(cd, iptr->dst.block);
2476 case ICMD_JSR: /* ... ==> ... */
2478 emit_br(cd, iptr->sx.s23.s3.jsrtarget.block);
2482 case ICMD_IFNULL: /* ..., value ==> ... */
2483 case ICMD_IFNONNULL:
2485 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2487 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFNULL, BRANCH_OPT_NONE);
2490 case ICMD_IFEQ: /* ..., value ==> ... */
2497 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2498 M_CMP_IMM(iptr->sx.val.i, s1);
2499 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFEQ, BRANCH_OPT_NONE);
2502 case ICMD_IF_LEQ: /* ..., value ==> ... */
2504 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2505 if (iptr->sx.val.l == 0) {
2506 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2507 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2510 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2511 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2512 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2513 M_OR(REG_ITMP2, REG_ITMP1);
2515 emit_beq(cd, iptr->dst.block);
2518 case ICMD_IF_LLT: /* ..., value ==> ... */
2520 if (iptr->sx.val.l == 0) {
2521 /* If high 32-bit are less than zero, then the 64-bits
2523 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2525 emit_blt(cd, iptr->dst.block);
2528 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2529 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2530 emit_blt(cd, iptr->dst.block);
2532 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2533 emit_bult(cd, iptr->dst.block);
2537 case ICMD_IF_LLE: /* ..., value ==> ... */
2539 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2540 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2541 emit_blt(cd, iptr->dst.block);
2543 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2544 emit_bule(cd, iptr->dst.block);
2547 case ICMD_IF_LNE: /* ..., value ==> ... */
2549 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2550 if (iptr->sx.val.l == 0) {
2551 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2552 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2555 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2556 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2557 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2558 M_OR(REG_ITMP2, REG_ITMP1);
2560 emit_bne(cd, iptr->dst.block);
2563 case ICMD_IF_LGT: /* ..., value ==> ... */
2565 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2566 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2567 emit_bgt(cd, iptr->dst.block);
2569 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2570 emit_bugt(cd, iptr->dst.block);
2573 case ICMD_IF_LGE: /* ..., value ==> ... */
2575 if (iptr->sx.val.l == 0) {
2576 /* If high 32-bit are greater equal zero, then the
2578 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2580 emit_bge(cd, iptr->dst.block);
2583 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2584 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2585 emit_bgt(cd, iptr->dst.block);
2587 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2588 emit_buge(cd, iptr->dst.block);
2592 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2593 case ICMD_IF_ICMPNE:
2594 case ICMD_IF_ICMPLT:
2595 case ICMD_IF_ICMPGT:
2596 case ICMD_IF_ICMPGE:
2597 case ICMD_IF_ICMPLE:
2599 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2600 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2602 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ICMPEQ, BRANCH_OPT_NONE);
2605 case ICMD_IF_ACMPEQ: /* ..., value, value ==> ... */
2606 case ICMD_IF_ACMPNE:
2608 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2609 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2611 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ACMPEQ, BRANCH_OPT_NONE);
2614 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2616 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2617 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2618 M_INTMOVE(s1, REG_ITMP1);
2619 M_XOR(s2, REG_ITMP1);
2620 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2621 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2622 M_INTMOVE(s1, REG_ITMP2);
2623 M_XOR(s2, REG_ITMP2);
2624 M_OR(REG_ITMP1, REG_ITMP2);
2625 emit_beq(cd, iptr->dst.block);
2628 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2630 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2631 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2632 M_INTMOVE(s1, REG_ITMP1);
2633 M_XOR(s2, REG_ITMP1);
2634 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2635 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2636 M_INTMOVE(s1, REG_ITMP2);
2637 M_XOR(s2, REG_ITMP2);
2638 M_OR(REG_ITMP1, REG_ITMP2);
2639 emit_bne(cd, iptr->dst.block);
2642 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2644 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2645 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2647 emit_blt(cd, iptr->dst.block);
2648 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2649 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2652 emit_bult(cd, iptr->dst.block);
2655 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2657 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2658 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2660 emit_bgt(cd, iptr->dst.block);
2661 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2662 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2665 emit_bugt(cd, iptr->dst.block);
2668 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2670 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2671 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2673 emit_blt(cd, iptr->dst.block);
2674 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2675 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2678 emit_bule(cd, iptr->dst.block);
2681 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2683 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2684 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2686 emit_bgt(cd, iptr->dst.block);
2687 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2688 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2691 emit_buge(cd, iptr->dst.block);
2695 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2697 REPLACEMENT_POINT_RETURN(cd, iptr);
2698 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2699 M_INTMOVE(s1, REG_RESULT);
2700 goto nowperformreturn;
2702 case ICMD_LRETURN: /* ..., retvalue ==> ... */
2704 REPLACEMENT_POINT_RETURN(cd, iptr);
2705 s1 = emit_load_s1(jd, iptr, REG_RESULT_PACKED);
2706 M_LNGMOVE(s1, REG_RESULT_PACKED);
2707 goto nowperformreturn;
2709 case ICMD_ARETURN: /* ..., retvalue ==> ... */
2711 REPLACEMENT_POINT_RETURN(cd, iptr);
2712 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2713 M_INTMOVE(s1, REG_RESULT);
2715 #ifdef ENABLE_VERIFIER
2716 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2717 codegen_addpatchref(cd, PATCHER_athrow_areturn,
2718 iptr->sx.s23.s2.uc, 0);
2720 #endif /* ENABLE_VERIFIER */
2721 goto nowperformreturn;
2723 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2726 REPLACEMENT_POINT_RETURN(cd, iptr);
2727 s1 = emit_load_s1(jd, iptr, REG_FRESULT);
2728 goto nowperformreturn;
2730 case ICMD_RETURN: /* ... ==> ... */
2732 REPLACEMENT_POINT_RETURN(cd, iptr);
2738 p = cd->stackframesize;
2740 #if !defined(NDEBUG)
2741 emit_verbosecall_exit(jd);
2744 #if defined(ENABLE_THREADS)
2745 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2746 M_ALD(REG_ITMP2, REG_SP, rd->memuse * 8);
2748 /* we need to save the proper return value */
2749 switch (iptr->opc) {
2752 M_IST(REG_RESULT, REG_SP, rd->memuse * 8);
2756 M_LST(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2760 emit_fstps_membase(cd, REG_SP, rd->memuse * 8);
2764 emit_fstpl_membase(cd, REG_SP, rd->memuse * 8);
2768 M_AST(REG_ITMP2, REG_SP, 0);
2769 M_MOV_IMM(LOCK_monitor_exit, REG_ITMP3);
2772 /* and now restore the proper return value */
2773 switch (iptr->opc) {
2776 M_ILD(REG_RESULT, REG_SP, rd->memuse * 8);
2780 M_LLD(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2784 emit_flds_membase(cd, REG_SP, rd->memuse * 8);
2788 emit_fldl_membase(cd, REG_SP, rd->memuse * 8);
2794 /* restore saved registers */
2796 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
2797 p--; M_ALD(rd->savintregs[i], REG_SP, p * 8);
2800 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
2802 emit_fldl_membase(cd, REG_SP, p * 8);
2803 if (iptr->opc == ICMD_FRETURN || iptr->opc == ICMD_DRETURN) {
2805 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset + 1); */
2808 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset); */
2812 /* deallocate stack */
2814 if (cd->stackframesize)
2815 M_AADD_IMM(cd->stackframesize * 8, REG_SP);
2822 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2825 branch_target_t *table;
2827 table = iptr->dst.table;
2829 l = iptr->sx.s23.s2.tablelow;
2830 i = iptr->sx.s23.s3.tablehigh;
2832 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2833 M_INTMOVE(s1, REG_ITMP1);
2836 M_ISUB_IMM(l, REG_ITMP1);
2842 M_CMP_IMM(i - 1, REG_ITMP1);
2843 emit_bugt(cd, table[0].block);
2845 /* build jump table top down and use address of lowest entry */
2850 dseg_add_target(cd, table->block);
2854 /* length of dataseg after last dseg_addtarget is used
2857 M_MOV_IMM(0, REG_ITMP2);
2859 emit_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 2, REG_ITMP1);
2865 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2868 lookup_target_t *lookup;
2870 lookup = iptr->dst.lookup;
2872 i = iptr->sx.s23.s2.lookupcount;
2874 MCODECHECK((i<<2)+8);
2875 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2878 M_CMP_IMM(lookup->value, s1);
2879 emit_beq(cd, lookup->target.block);
2883 emit_br(cd, iptr->sx.s23.s3.lookupdefault.block);
2888 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
2890 REPLACEMENT_POINT_FORGC_BUILTIN(cd, iptr);
2892 bte = iptr->sx.s23.s3.bte;
2896 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2898 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2899 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
2900 case ICMD_INVOKEINTERFACE:
2902 REPLACEMENT_POINT_INVOKE(cd, iptr);
2904 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2905 md = iptr->sx.s23.s3.um->methodref->parseddesc.md;
2909 lm = iptr->sx.s23.s3.fmiref->p.method;
2910 md = lm->parseddesc;
2914 s3 = md->paramcount;
2916 MCODECHECK((s3 << 1) + 64);
2918 /* copy arguments to registers or stack location */
2920 for (s3 = s3 - 1; s3 >= 0; s3--) {
2921 var = VAR(iptr->sx.s23.s2.args[s3]);
2923 /* Already Preallocated (ARGVAR) ? */
2924 if (var->flags & PREALLOC)
2926 if (IS_INT_LNG_TYPE(var->type)) {
2927 if (!md->params[s3].inmemory) {
2928 log_text("No integer argument registers available!");
2932 if (IS_2_WORD_TYPE(var->type)) {
2933 d = emit_load(jd, iptr, var, REG_ITMP12_PACKED);
2934 M_LST(d, REG_SP, md->params[s3].regoff);
2936 d = emit_load(jd, iptr, var, REG_ITMP1);
2937 M_IST(d, REG_SP, md->params[s3].regoff);
2942 if (!md->params[s3].inmemory) {
2943 s1 = md->params[s3].regoff;
2944 d = emit_load(jd, iptr, var, s1);
2948 d = emit_load(jd, iptr, var, REG_FTMP1);
2949 if (IS_2_WORD_TYPE(var->type))
2950 M_DST(d, REG_SP, md->params[s3].regoff);
2952 M_FST(d, REG_SP, md->params[s3].regoff);
2957 switch (iptr->opc) {
2959 d = md->returntype.type;
2961 if (bte->stub == NULL) {
2962 M_MOV_IMM(bte->fp, REG_ITMP1);
2964 M_MOV_IMM(bte->stub, REG_ITMP1);
2968 emit_exception_check(cd, iptr);
2971 case ICMD_INVOKESPECIAL:
2972 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
2973 emit_nullpointer_check(cd, iptr, REG_ITMP1);
2976 case ICMD_INVOKESTATIC:
2978 unresolved_method *um = iptr->sx.s23.s3.um;
2980 codegen_addpatchref(cd, PATCHER_invokestatic_special,
2984 d = md->returntype.type;
2987 disp = (ptrint) lm->stubroutine;
2988 d = lm->parseddesc->returntype.type;
2991 M_MOV_IMM(disp, REG_ITMP2);
2995 case ICMD_INVOKEVIRTUAL:
2996 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
2997 emit_nullpointer_check(cd, iptr, s1);
3000 unresolved_method *um = iptr->sx.s23.s3.um;
3002 codegen_addpatchref(cd, PATCHER_invokevirtual, um, 0);
3005 d = md->returntype.type;
3008 s1 = OFFSET(vftbl_t, table[0]) +
3009 sizeof(methodptr) * lm->vftblindex;
3010 d = md->returntype.type;
3013 M_ALD(REG_METHODPTR, REG_ITMP1,
3014 OFFSET(java_object_t, vftbl));
3015 M_ALD32(REG_ITMP3, REG_METHODPTR, s1);
3019 case ICMD_INVOKEINTERFACE:
3020 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
3021 emit_nullpointer_check(cd, iptr, s1);
3024 unresolved_method *um = iptr->sx.s23.s3.um;
3026 codegen_addpatchref(cd, PATCHER_invokeinterface, um, 0);
3030 d = md->returntype.type;
3033 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3034 sizeof(methodptr) * lm->class->index;
3036 s2 = sizeof(methodptr) * (lm - lm->class->methods);
3038 d = md->returntype.type;
3041 M_ALD(REG_METHODPTR, REG_ITMP1,
3042 OFFSET(java_object_t, vftbl));
3043 M_ALD32(REG_METHODPTR, REG_METHODPTR, s1);
3044 M_ALD32(REG_ITMP3, REG_METHODPTR, s2);
3049 /* store size of call code in replacement point */
3051 REPLACEMENT_POINT_INVOKE_RETURN(cd, iptr);
3052 REPLACEMENT_POINT_FORGC_BUILTIN_RETURN(cd, iptr);
3054 /* d contains return type */
3056 if (d != TYPE_VOID) {
3057 #if defined(ENABLE_SSA)
3058 if ((ls == NULL) /* || (!IS_TEMPVAR_INDEX(iptr->dst.varindex)) */ ||
3059 (ls->lifetime[iptr->dst.varindex].type != UNUSED))
3060 /* a "living" stackslot */
3063 if (IS_INT_LNG_TYPE(d)) {
3064 if (IS_2_WORD_TYPE(d)) {
3065 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
3066 M_LNGMOVE(REG_RESULT_PACKED, s1);
3069 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3070 M_INTMOVE(REG_RESULT, s1);
3074 s1 = codegen_reg_of_dst(jd, iptr, REG_NULL);
3076 emit_store_dst(jd, iptr, s1);
3082 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3084 if (!(iptr->flags.bits & INS_FLAG_ARRAY)) {
3085 /* object type cast-check */
3088 vftbl_t *supervftbl;
3091 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3097 super = iptr->sx.s23.s3.c.cls;
3098 superindex = super->index;
3099 supervftbl = super->vftbl;
3102 if ((super == NULL) || !(super->flags & ACC_INTERFACE))
3103 CODEGEN_CRITICAL_SECTION_NEW;
3105 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3107 /* if class is not resolved, check which code to call */
3109 if (super == NULL) {
3111 emit_label_beq(cd, BRANCH_LABEL_1);
3113 codegen_addpatchref(cd, PATCHER_checkcast_instanceof_flags,
3114 iptr->sx.s23.s3.c.ref, 0);
3116 M_MOV_IMM(0, REG_ITMP2); /* super->flags */
3117 M_AND_IMM32(ACC_INTERFACE, REG_ITMP2);
3118 emit_label_beq(cd, BRANCH_LABEL_2);
3121 /* interface checkcast code */
3123 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3124 if (super != NULL) {
3126 emit_label_beq(cd, BRANCH_LABEL_3);
3129 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3131 if (super == NULL) {
3132 codegen_addpatchref(cd, PATCHER_checkcast_interface,
3133 iptr->sx.s23.s3.c.ref,
3138 REG_ITMP2, OFFSET(vftbl_t, interfacetablelength));
3139 M_ISUB_IMM32(superindex, REG_ITMP3);
3140 /* XXX do we need this one? */
3142 emit_classcast_check(cd, iptr, BRANCH_LE, REG_ITMP3, s1);
3144 M_ALD32(REG_ITMP3, REG_ITMP2,
3145 OFFSET(vftbl_t, interfacetable[0]) -
3146 superindex * sizeof(methodptr*));
3148 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_ITMP3, s1);
3151 emit_label_br(cd, BRANCH_LABEL_4);
3153 emit_label(cd, BRANCH_LABEL_3);
3156 /* class checkcast code */
3158 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3159 if (super == NULL) {
3160 emit_label(cd, BRANCH_LABEL_2);
3164 emit_label_beq(cd, BRANCH_LABEL_5);
3167 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3169 if (super == NULL) {
3170 codegen_addpatchref(cd, PATCHER_checkcast_class,
3171 iptr->sx.s23.s3.c.ref,
3175 M_MOV_IMM(supervftbl, REG_ITMP3);
3177 CODEGEN_CRITICAL_SECTION_START;
3179 M_ILD32(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3181 /* if (s1 != REG_ITMP1) { */
3182 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP1); */
3183 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3); */
3184 /* #if defined(ENABLE_THREADS) */
3185 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3187 /* emit_alu_reg_reg(cd, ALU_SUB, REG_ITMP1, REG_ITMP2); */
3190 M_ILD32(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, baseval));
3191 M_ISUB(REG_ITMP3, REG_ITMP2);
3192 M_MOV_IMM(supervftbl, REG_ITMP3);
3193 M_ILD(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3195 CODEGEN_CRITICAL_SECTION_END;
3199 M_CMP(REG_ITMP3, REG_ITMP2);
3200 emit_classcast_check(cd, iptr, BRANCH_ULE, REG_ITMP3, s1);
3203 emit_label(cd, BRANCH_LABEL_5);
3206 if (super == NULL) {
3207 emit_label(cd, BRANCH_LABEL_1);
3208 emit_label(cd, BRANCH_LABEL_4);
3211 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
3214 /* array type cast-check */
3216 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3217 M_AST(s1, REG_SP, 0 * 4);
3219 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3220 codegen_addpatchref(cd, PATCHER_builtin_arraycheckcast,
3221 iptr->sx.s23.s3.c.ref, 0);
3224 M_AST_IMM(iptr->sx.s23.s3.c.cls, REG_SP, 1 * 4);
3225 M_MOV_IMM(BUILTIN_arraycheckcast, REG_ITMP3);
3228 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3230 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_RESULT, s1);
3232 d = codegen_reg_of_dst(jd, iptr, s1);
3236 emit_store_dst(jd, iptr, d);
3239 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3243 vftbl_t *supervftbl;
3246 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3252 super = iptr->sx.s23.s3.c.cls;
3253 superindex = super->index;
3254 supervftbl = super->vftbl;
3257 if ((super == NULL) || !(super->flags & ACC_INTERFACE))
3258 CODEGEN_CRITICAL_SECTION_NEW;
3260 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3261 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
3264 M_INTMOVE(s1, REG_ITMP1);
3270 /* if class is not resolved, check which code to call */
3272 if (super == NULL) {
3274 emit_label_beq(cd, BRANCH_LABEL_1);
3276 codegen_addpatchref(cd, PATCHER_checkcast_instanceof_flags,
3277 iptr->sx.s23.s3.c.ref, 0);
3279 M_MOV_IMM(0, REG_ITMP3); /* super->flags */
3280 M_AND_IMM32(ACC_INTERFACE, REG_ITMP3);
3281 emit_label_beq(cd, BRANCH_LABEL_2);
3284 /* interface instanceof code */
3286 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3287 if (super != NULL) {
3289 emit_label_beq(cd, BRANCH_LABEL_3);
3292 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3294 if (super == NULL) {
3295 codegen_addpatchref(cd, PATCHER_instanceof_interface,
3296 iptr->sx.s23.s3.c.ref, 0);
3300 REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3301 M_ISUB_IMM32(superindex, REG_ITMP3);
3304 disp = (2 + 4 /* mov_membase32_reg */ + 2 /* test */ +
3305 6 /* jcc */ + 5 /* mov_imm_reg */);
3308 M_ALD32(REG_ITMP1, REG_ITMP1,
3309 OFFSET(vftbl_t, interfacetable[0]) -
3310 superindex * sizeof(methodptr*));
3312 /* emit_setcc_reg(cd, CC_A, d); */
3313 /* emit_jcc(cd, CC_BE, 5); */
3318 emit_label_br(cd, BRANCH_LABEL_4);
3320 emit_label(cd, BRANCH_LABEL_3);
3323 /* class instanceof code */
3325 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3326 if (super == NULL) {
3327 emit_label(cd, BRANCH_LABEL_2);
3331 emit_label_beq(cd, BRANCH_LABEL_5);
3334 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3336 if (super == NULL) {
3337 codegen_addpatchref(cd, PATCHER_instanceof_class,
3338 iptr->sx.s23.s3.c.ref, 0);
3341 M_MOV_IMM(supervftbl, REG_ITMP2);
3343 CODEGEN_CRITICAL_SECTION_START;
3345 M_ILD(REG_ITMP1, REG_ITMP1, OFFSET(vftbl_t, baseval));
3346 M_ILD(REG_ITMP3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3347 M_ILD(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3349 CODEGEN_CRITICAL_SECTION_END;
3351 M_ISUB(REG_ITMP2, REG_ITMP1);
3352 M_CLR(d); /* may be REG_ITMP2 */
3353 M_CMP(REG_ITMP3, REG_ITMP1);
3358 emit_label(cd, BRANCH_LABEL_5);
3361 if (super == NULL) {
3362 emit_label(cd, BRANCH_LABEL_1);
3363 emit_label(cd, BRANCH_LABEL_4);
3366 emit_store_dst(jd, iptr, d);
3370 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3372 /* check for negative sizes and copy sizes to stack if necessary */
3374 MCODECHECK((iptr->s1.argcount << 1) + 64);
3376 for (s1 = iptr->s1.argcount; --s1 >= 0; ) {
3377 /* copy SAVEDVAR sizes to stack */
3378 var = VAR(iptr->sx.s23.s2.args[s1]);
3380 /* Already Preallocated? */
3381 if (!(var->flags & PREALLOC)) {
3382 if (var->flags & INMEMORY) {
3383 M_ILD(REG_ITMP1, REG_SP, var->vv.regoff);
3384 M_IST(REG_ITMP1, REG_SP, (s1 + 3) * 4);
3387 M_IST(var->vv.regoff, REG_SP, (s1 + 3) * 4);
3391 /* is a patcher function set? */
3393 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3394 codegen_addpatchref(cd, PATCHER_builtin_multianewarray,
3395 iptr->sx.s23.s3.c.ref, 0);
3401 disp = (ptrint) iptr->sx.s23.s3.c.cls;
3403 /* a0 = dimension count */
3405 M_IST_IMM(iptr->s1.argcount, REG_SP, 0 * 4);
3407 /* a1 = arraydescriptor */
3409 M_IST_IMM(disp, REG_SP, 1 * 4);
3411 /* a2 = pointer to dimensions = stack pointer */
3413 M_MOV(REG_SP, REG_ITMP1);
3414 M_AADD_IMM(3 * 4, REG_ITMP1);
3415 M_AST(REG_ITMP1, REG_SP, 2 * 4);
3417 M_MOV_IMM(BUILTIN_multianewarray, REG_ITMP1);
3420 /* check for exception before result assignment */
3422 emit_exception_check(cd, iptr);
3424 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3425 M_INTMOVE(REG_RESULT, s1);
3426 emit_store_dst(jd, iptr, s1);
3430 exceptions_throw_internalerror("Unknown ICMD %d during code generation",
3435 } /* for instruction */
3439 #if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
3442 #if defined(ENABLE_SSA)
3445 /* by edge splitting, in Blocks with phi moves there can only */
3446 /* be a goto as last command, no other Jump/Branch Command */
3448 if (!last_cmd_was_goto)
3449 codegen_emit_phi_moves(jd, bptr);
3454 /* At the end of a basic block we may have to append some nops,
3455 because the patcher stub calling code might be longer than the
3456 actual instruction. So codepatching does not change the
3457 following block unintentionally. */
3459 if (cd->mcodeptr < cd->lastmcodeptr) {
3460 while (cd->mcodeptr < cd->lastmcodeptr) {
3465 } /* if (bptr -> flags >= BBREACHED) */
3466 } /* for basic block */
3468 dseg_createlinenumbertable(cd);
3470 /* generate stubs */
3472 emit_patcher_stubs(jd);
3474 /* everything's ok */
3479 /* codegen_emit_stub_compiler **************************************************
3481 Emit a stub routine which calls the compiler.
3483 *******************************************************************************/
3485 void codegen_emit_stub_compiler(jitdata *jd)
3490 /* get required compiler data */
3495 /* code for the stub */
3497 M_MOV_IMM(m, REG_ITMP1);
3498 M_MOV_IMM(asm_call_jit_compiler, REG_ITMP3);
3503 /* codegen_emit_stub_builtin ***************************************************
3505 Creates a stub routine which calls a builtin function.
3507 *******************************************************************************/
3509 void codegen_emit_stub_builtin(jitdata *jd, builtintable_entry *bte)
3518 /* get required compiler data */
3523 /* set some variables */
3527 /* calculate stack frame size */
3529 cd->stackframesize =
3530 sizeof(stackframeinfo) / SIZEOF_VOID_P +
3531 4; /* 4 arguments or return value */
3533 cd->stackframesize |= 0x3; /* keep stack 16-byte aligned */
3535 /* create method header */
3537 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
3538 (void) dseg_add_unique_s4(cd, cd->stackframesize * 4); /* FrameSize */
3539 (void) dseg_add_unique_s4(cd, 0); /* IsSync */
3540 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
3541 (void) dseg_add_unique_s4(cd, 0); /* IntSave */
3542 (void) dseg_add_unique_s4(cd, 0); /* FltSave */
3543 (void) dseg_addlinenumbertablesize(cd);
3544 (void) dseg_add_unique_s4(cd, 0); /* ExTableSize */
3546 /* generate stub code */
3548 M_ASUB_IMM(cd->stackframesize * 4, REG_SP);
3550 #if defined(ENABLE_GC_CACAO)
3551 /* Save callee saved integer registers in stackframeinfo (GC may
3552 need to recover them during a collection). */
3554 disp = cd->stackframesize * 4 - sizeof(stackframeinfo) +
3555 OFFSET(stackframeinfo, intregs);
3557 for (i = 0; i < INT_SAV_CNT; i++)
3558 M_AST(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3561 /* create dynamic stack info */
3563 M_MOV(REG_SP, REG_ITMP1);
3564 M_AADD_IMM(cd->stackframesize * 4, REG_ITMP1);
3565 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3567 M_IST_IMM(0, REG_SP, 1 * 4);
3570 M_MOV(REG_SP, REG_ITMP2);
3571 M_AADD_IMM(cd->stackframesize * 4 + SIZEOF_VOID_P, REG_ITMP2);
3572 M_AST(REG_ITMP2, REG_SP, 2 * 4);
3574 M_ALD(REG_ITMP3, REG_SP, cd->stackframesize * 4);
3575 M_AST(REG_ITMP3, REG_SP, 3 * 4);
3577 M_MOV_IMM(codegen_stub_builtin_enter, REG_ITMP1);
3580 /* copy arguments into new stackframe */
3582 for (i = 0; i < md->paramcount; i++) {
3583 if (!md->params[i].inmemory) {
3584 log_text("No integer argument registers available!");
3587 } else { /* float/double in memory can be copied like int/longs */
3588 s1 = md->params[i].regoff + cd->stackframesize * 4 + 4;
3589 s2 = md->params[i].regoff;
3591 M_ILD(REG_ITMP1, REG_SP, s1);
3592 M_IST(REG_ITMP1, REG_SP, s2);
3593 if (IS_2_WORD_TYPE(md->paramtypes[i].type)) {
3594 M_ILD(REG_ITMP1, REG_SP, s1 + 4);
3595 M_IST(REG_ITMP1, REG_SP, s2 + 4);
3601 /* call the builtin function */
3603 M_MOV_IMM(bte->fp, REG_ITMP3);
3606 /* save return value */
3608 if (md->returntype.type != TYPE_VOID) {
3609 if (IS_INT_LNG_TYPE(md->returntype.type)) {
3610 if (IS_2_WORD_TYPE(md->returntype.type))
3611 M_IST(REG_RESULT2, REG_SP, 2 * 4);
3612 M_IST(REG_RESULT, REG_SP, 1 * 4);
3615 if (IS_2_WORD_TYPE(md->returntype.type))
3616 emit_fstl_membase(cd, REG_SP, 1 * 4);
3618 emit_fsts_membase(cd, REG_SP, 1 * 4);
3622 /* remove native stackframe info */
3624 M_MOV(REG_SP, REG_ITMP1);
3625 M_AADD_IMM(cd->stackframesize * 4, REG_ITMP1);
3626 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3628 M_MOV_IMM(codegen_stub_builtin_exit, REG_ITMP1);
3631 /* restore return value */
3633 if (md->returntype.type != TYPE_VOID) {
3634 if (IS_INT_LNG_TYPE(md->returntype.type)) {
3635 if (IS_2_WORD_TYPE(md->returntype.type))
3636 M_ILD(REG_RESULT2, REG_SP, 2 * 4);
3637 M_ILD(REG_RESULT, REG_SP, 1 * 4);
3640 if (IS_2_WORD_TYPE(md->returntype.type))
3641 emit_fldl_membase(cd, REG_SP, 1 * 4);
3643 emit_flds_membase(cd, REG_SP, 1 * 4);
3647 #if defined(ENABLE_GC_CACAO)
3648 /* Restore callee saved integer registers from stackframeinfo (GC
3649 might have modified them during a collection). */
3651 disp = cd->stackframesize * 4 - sizeof(stackframeinfo) +
3652 OFFSET(stackframeinfo, intregs);
3654 for (i = 0; i < INT_SAV_CNT; i++)
3655 M_ALD(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3658 /* remove stackframe */
3660 M_AADD_IMM(cd->stackframesize * 4, REG_SP);
3665 /* codegen_emit_stub_native ****************************************************
3667 Emits a stub routine which calls a native method.
3669 *******************************************************************************/
3671 void codegen_emit_stub_native(jitdata *jd, methoddesc *nmd, functionptr f)
3678 s4 i, j; /* count variables */
3683 /* get required compiler data */
3689 /* set some variables */
3692 nativeparams = (m->flags & ACC_STATIC) ? 2 : 1;
3694 /* calculate stackframe size */
3696 cd->stackframesize =
3697 sizeof(stackframeinfo) / SIZEOF_VOID_P +
3698 sizeof(localref_table) / SIZEOF_VOID_P +
3699 1 + /* function pointer */
3700 4 + /* 4 arguments (start_native_call) */
3703 /* keep stack 16-byte aligned */
3705 ALIGN_ODD(cd->stackframesize); /* XXX this is wrong, +4 is missing */
3707 /* create method header */
3709 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
3710 (void) dseg_add_unique_s4(cd, cd->stackframesize * 8); /* FrameSize */
3711 (void) dseg_add_unique_s4(cd, 0); /* IsSync */
3712 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
3713 (void) dseg_add_unique_s4(cd, 0); /* IntSave */
3714 (void) dseg_add_unique_s4(cd, 0); /* FltSave */
3715 (void) dseg_addlinenumbertablesize(cd);
3716 (void) dseg_add_unique_s4(cd, 0); /* ExTableSize */
3718 #if defined(ENABLE_PROFILING)
3719 /* generate native method profiling code */
3721 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
3722 /* count frequency */
3724 M_MOV_IMM(code, REG_ITMP1);
3725 M_IADD_IMM_MEMBASE(1, REG_ITMP1, OFFSET(codeinfo, frequency));
3729 /* calculate stackframe size for native function */
3731 M_ASUB_IMM(cd->stackframesize * 8, REG_SP);
3733 #if !defined(NDEBUG)
3734 emit_verbosecall_enter(jd);
3737 /* get function address (this must happen before the stackframeinfo) */
3739 #if !defined(WITH_STATIC_CLASSPATH)
3741 codegen_addpatchref(cd, PATCHER_resolve_native, m, 0);
3744 M_AST_IMM((ptrint) f, REG_SP, 4 * 4);
3746 /* Mark the whole fpu stack as free for native functions (only for saved */
3747 /* register count == 0). */
3749 emit_ffree_reg(cd, 0);
3750 emit_ffree_reg(cd, 1);
3751 emit_ffree_reg(cd, 2);
3752 emit_ffree_reg(cd, 3);
3753 emit_ffree_reg(cd, 4);
3754 emit_ffree_reg(cd, 5);
3755 emit_ffree_reg(cd, 6);
3756 emit_ffree_reg(cd, 7);
3758 #if defined(ENABLE_GC_CACAO)
3759 /* remember callee saved int registers in stackframeinfo (GC may need to */
3760 /* recover them during a collection). */
3762 disp = cd->stackframesize * 8 - sizeof(stackframeinfo) +
3763 OFFSET(stackframeinfo, intregs);
3765 for (i = 0; i < INT_SAV_CNT; i++)
3766 M_AST(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3769 /* prepare data structures for native function call */
3771 M_MOV(REG_SP, REG_ITMP1);
3772 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3773 M_IST_IMM(0, REG_SP, 1 * 4);
3776 M_MOV_IMM(codegen_start_native_call, REG_ITMP1);
3779 /* remember class argument */
3781 if (m->flags & ACC_STATIC)
3782 M_MOV(REG_RESULT, REG_ITMP2);
3784 M_ALD(REG_ITMP3, REG_SP, 4 * 4);
3786 /* copy arguments into new stackframe */
3788 for (i = md->paramcount - 1, j = i + nativeparams; i >= 0; i--, j--) {
3789 t = md->paramtypes[i].type;
3791 if (!md->params[i].inmemory) {
3792 /* no integer argument registers */
3795 /* float/double in memory can be copied like int/longs */
3797 s1 = md->params[i].regoff + cd->stackframesize * 8 + 4;
3798 s2 = nmd->params[j].regoff;
3800 M_ILD(REG_ITMP1, REG_SP, s1);
3801 M_IST(REG_ITMP1, REG_SP, s2);
3802 if (IS_2_WORD_TYPE(t)) {
3803 M_ILD(REG_ITMP1, REG_SP, s1 + 4);
3804 M_IST(REG_ITMP1, REG_SP, s2 + 4);
3809 /* if function is static, put class into second argument */
3811 if (m->flags & ACC_STATIC)
3812 M_AST(REG_ITMP2, REG_SP, 1 * 4);
3814 /* put env into first argument */
3816 M_AST_IMM(_Jv_env, REG_SP, 0 * 4);
3818 /* call the native function */
3822 /* save return value */
3824 switch (md->returntype.type) {
3827 M_IST(REG_RESULT, REG_SP, 1 * 8);
3830 M_LST(REG_RESULT_PACKED, REG_SP, 1 * 8);
3833 emit_fsts_membase(cd, REG_SP, 1 * 8);
3836 emit_fstl_membase(cd, REG_SP, 1 * 8);
3842 #if !defined(NDEBUG)
3843 emit_verbosecall_exit(jd);
3846 /* remove native stackframe info */
3848 M_MOV(REG_SP, REG_ITMP1);
3849 M_AADD_IMM(cd->stackframesize * 8, REG_ITMP1);
3851 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3852 M_MOV_IMM(codegen_finish_native_call, REG_ITMP1);
3854 M_MOV(REG_RESULT, REG_ITMP2); /* REG_ITMP3 == REG_RESULT2 */
3856 /* restore return value */
3858 switch (md->returntype.type) {
3861 M_ILD(REG_RESULT, REG_SP, 1 * 8);
3864 M_LLD(REG_RESULT_PACKED, REG_SP, 1 * 8);
3867 emit_flds_membase(cd, REG_SP, 1 * 8);
3870 emit_fldl_membase(cd, REG_SP, 1 * 8);
3876 #if defined(ENABLE_GC_CACAO)
3877 /* restore callee saved int registers from stackframeinfo (GC might have */
3878 /* modified them during a collection). */
3880 disp = cd->stackframesize * 8 - sizeof(stackframeinfo) +
3881 OFFSET(stackframeinfo, intregs);
3883 for (i = 0; i < INT_SAV_CNT; i++)
3884 M_ALD(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3887 M_AADD_IMM(cd->stackframesize * 8, REG_SP);
3889 /* check for exception */
3896 /* handle exception */
3898 M_MOV(REG_ITMP2, REG_ITMP1_XPTR);
3899 M_ALD(REG_ITMP2_XPC, REG_SP, 0);
3900 M_ASUB_IMM(2, REG_ITMP2_XPC);
3902 M_MOV_IMM(asm_handle_nat_exception, REG_ITMP3);
3905 /* generate patcher stubs */
3907 emit_patcher_stubs(jd);
3912 * These are local overrides for various environment variables in Emacs.
3913 * Please do not remove this and leave it at the end of the file, where
3914 * Emacs will automagically detect them.
3915 * ---------------------------------------------------------------------
3918 * indent-tabs-mode: t
3922 * vim:noexpandtab:sw=4:ts=4: