1 /* src/vm/jit/i386/codegen.c - machine code generator for i386
3 Copyright (C) 1996-2005, 2006, 2007, 2008
4 CACAOVM - Verein zur Foerderung der freien virtuellen Maschine CACAO
6 This file is part of CACAO.
8 This program is free software; you can redistribute it and/or
9 modify it under the terms of the GNU General Public License as
10 published by the Free Software Foundation; either version 2, or (at
11 your option) any later version.
13 This program is distributed in the hope that it will be useful, but
14 WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with this program; if not, write to the Free Software
20 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
34 #include "vm/jit/i386/md-abi.h"
36 #include "vm/jit/i386/codegen.h"
37 #include "vm/jit/i386/emit.h"
39 #include "mm/memory.h"
41 #include "native/localref.h"
42 #include "native/native.h"
44 #include "threads/lock-common.h"
46 #include "vm/jit/builtin.hpp"
47 #include "vm/exceptions.hpp"
48 #include "vm/global.h"
49 #include "vm/loader.hpp"
50 #include "vm/options.h"
51 #include "vm/primitive.hpp"
55 #include "vm/jit/abi.h"
56 #include "vm/jit/asmpart.h"
57 #include "vm/jit/codegen-common.h"
58 #include "vm/jit/dseg.h"
59 #include "vm/jit/emit-common.h"
60 #include "vm/jit/jit.hpp"
61 #include "vm/jit/linenumbertable.h"
62 #include "vm/jit/parse.h"
63 #include "vm/jit/patcher-common.h"
64 #include "vm/jit/reg.h"
65 #include "vm/jit/replace.hpp"
66 #include "vm/jit/stacktrace.hpp"
67 #include "vm/jit/trap.h"
69 #if defined(ENABLE_SSA)
70 # include "vm/jit/optimizing/lsra.h"
71 # include "vm/jit/optimizing/ssa.h"
72 #elif defined(ENABLE_LSRA)
73 # include "vm/jit/allocator/lsra.h"
77 /* codegen_emit ****************************************************************
79 Generates machine code.
81 *******************************************************************************/
83 bool codegen_emit(jitdata *jd)
89 s4 len, s1, s2, s3, d, disp;
90 int align_off; /* offset for alignment compensation */
95 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
96 builtintable_entry *bte;
102 #if defined(ENABLE_SSA)
104 bool last_cmd_was_goto;
106 last_cmd_was_goto = false;
110 /* get required compiler data */
117 /* prevent compiler warnings */
128 s4 savedregs_num = 0;
131 /* space to save used callee saved registers */
133 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
134 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse);
136 cd->stackframesize = rd->memuse + savedregs_num;
139 #if defined(ENABLE_THREADS)
140 /* space to save argument of monitor_enter */
142 if (checksync && code_is_synchronized(code))
143 cd->stackframesize++;
146 /* create method header */
148 /* Keep stack of non-leaf functions 16-byte aligned. */
150 if (!code_is_leafmethod(code)) {
151 ALIGN_ODD(cd->stackframesize);
154 align_off = cd->stackframesize ? 4 : 0;
156 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
157 (void) dseg_add_unique_s4(
158 cd, cd->stackframesize * 8 + align_off); /* FrameSize */
160 code->synchronizedoffset = rd->memuse * 8;
162 /* REMOVEME: We still need it for exception handling in assembler. */
164 if (code_is_leafmethod(code))
165 (void) dseg_add_unique_s4(cd, 1); /* IsLeaf */
167 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
169 (void) dseg_add_unique_s4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
170 (void) dseg_add_unique_s4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
172 #if defined(ENABLE_PROFILING)
173 /* generate method profiling code */
175 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
176 /* count frequency */
178 M_MOV_IMM(code, REG_ITMP3);
179 M_IADD_IMM_MEMBASE(1, REG_ITMP3, OFFSET(codeinfo, frequency));
183 /* create stack frame (if necessary) */
185 if (cd->stackframesize)
187 M_ASUB_IMM(cd->stackframesize * 8 + 4, REG_SP);
189 /* save return address and used callee saved registers */
191 p = cd->stackframesize;
192 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
193 p--; M_AST(rd->savintregs[i], REG_SP, p * 8);
195 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
196 p--; emit_fld_reg(cd, rd->savfltregs[i]); emit_fstpl_membase(cd, REG_SP, p * 8);
199 /* take arguments out of register or stack frame */
204 for (p = 0, l = 0; p < md->paramcount; p++) {
205 t = md->paramtypes[p].type;
207 varindex = jd->local_map[l * 5 + t];
208 #if defined(ENABLE_SSA)
210 if (varindex != UNUSED)
211 varindex = ls->var_0[varindex];
212 if ((varindex != UNUSED) && (ls->lifetime[varindex].type == UNUSED))
217 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
220 if (varindex == UNUSED)
224 s1 = md->params[p].regoff;
227 if (IS_INT_LNG_TYPE(t)) { /* integer args */
228 if (!md->params[p].inmemory) { /* register arguments */
229 log_text("integer register argument");
231 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
232 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff */
234 else { /* reg arg -> spilled */
235 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff * 4 */
239 if (!(var->flags & INMEMORY)) {
241 cd->stackframesize * 8 + 4 + align_off + s1);
244 if (!IS_2_WORD_TYPE(t)) {
245 #if defined(ENABLE_SSA)
246 /* no copy avoiding by now possible with SSA */
248 emit_mov_membase_reg( /* + 4 for return address */
250 cd->stackframesize * 8 + s1 + 4 + align_off,
252 emit_mov_reg_membase(
253 cd, REG_ITMP1, REG_SP, var->vv.regoff);
256 #endif /*defined(ENABLE_SSA)*/
257 /* reuse stackslot */
258 var->vv.regoff = cd->stackframesize * 8 + 4 +
263 #if defined(ENABLE_SSA)
264 /* no copy avoiding by now possible with SSA */
266 emit_mov_membase_reg( /* + 4 for return address */
268 cd->stackframesize * 8 + s1 + 4 + align_off,
270 emit_mov_reg_membase(
271 cd, REG_ITMP1, REG_SP, var->vv.regoff);
272 emit_mov_membase_reg( /* + 4 for return address */
274 cd->stackframesize * 8 + s1 + 4 + 4 + align_off,
276 emit_mov_reg_membase(
277 cd, REG_ITMP1, REG_SP, var->vv.regoff + 4);
280 #endif /*defined(ENABLE_SSA)*/
281 /* reuse stackslot */
282 var->vv.regoff = cd->stackframesize * 8 + 8 + s1;
287 else { /* floating args */
288 if (!md->params[p].inmemory) { /* register arguments */
289 log_text("There are no float argument registers!");
291 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
292 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff */
293 } else { /* reg arg -> spilled */
294 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff * 8 */
298 else { /* stack arguments */
299 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
303 cd->stackframesize * 8 + s1 + 4 + align_off);
305 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
311 cd->stackframesize * 8 + s1 + 4 + align_off);
313 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
316 } else { /* stack-arg -> spilled */
317 #if defined(ENABLE_SSA)
318 /* no copy avoiding by now possible with SSA */
320 emit_mov_membase_reg(
322 cd->stackframesize * 8 + s1 + 4 + align_off,
324 emit_mov_reg_membase(
325 cd, REG_ITMP1, REG_SP, var->vv.regoff);
329 cd->stackframesize * 8 + s1 + 4 + align_off);
330 emit_fstps_membase(cd, REG_SP, var->vv.regoff);
335 cd->stackframesize * 8 + s1 + 4 + align_off);
336 emit_fstpl_membase(cd, REG_SP, var->vv.regoff);
340 #endif /*defined(ENABLE_SSA)*/
341 /* reuse stackslot */
342 var->vv.regoff = cd->stackframesize * 8 + 4 +
349 /* call monitorenter function */
351 #if defined(ENABLE_THREADS)
352 if (checksync && code_is_synchronized(code)) {
355 if (m->flags & ACC_STATIC) {
356 M_MOV_IMM(&m->clazz->object.header, REG_ITMP1);
359 M_ALD(REG_ITMP1, REG_SP, cd->stackframesize * 8 + 4 + align_off);
362 M_ALD_MEM(REG_ITMP1, TRAP_NullPointerException);
365 M_AST(REG_ITMP1, REG_SP, s1 * 8);
366 M_AST(REG_ITMP1, REG_SP, 0 * 4);
367 M_MOV_IMM(LOCK_monitor_enter, REG_ITMP3);
373 emit_verbosecall_enter(jd);
378 #if defined(ENABLE_SSA)
379 /* with SSA the Header is Basic Block 0 - insert phi Moves if necessary */
381 codegen_emit_phi_moves(jd, ls->basicblocks[0]);
384 /* end of header generation */
386 /* create replacement points */
388 REPLACEMENT_POINTS_INIT(cd, jd);
390 /* walk through all basic blocks */
392 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
394 bptr->mpc = (s4) (cd->mcodeptr - cd->mcodebase);
396 if (bptr->flags >= BBREACHED) {
397 /* branch resolving */
399 codegen_resolve_branchrefs(cd, bptr);
401 /* handle replacement points */
403 REPLACEMENT_POINT_BLOCK_START(cd, bptr);
405 #if defined(ENABLE_REPLACEMENT)
406 if (bptr->bitflags & BBFLAG_REPLACEMENT) {
407 if (cd->replacementpoint[-1].flags & RPLPOINT_FLAG_COUNTDOWN) {
409 emit_trap_countdown(cd, &(m->hitcountdown));
414 /* copy interface registers to their destination */
419 #if defined(ENABLE_PROFILING)
420 /* generate basic block profiling code */
422 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
423 /* count frequency */
425 M_MOV_IMM(code->bbfrequency, REG_ITMP3);
426 M_IADD_IMM_MEMBASE(1, REG_ITMP3, bptr->nr * 4);
430 #if defined(ENABLE_LSRA) || defined(ENABLE_SSA)
431 # if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
434 # if defined(ENABLE_SSA)
436 last_cmd_was_goto = false;
440 var = VAR(bptr->invars[len]);
441 if (bptr->type != BBTYPE_STD) {
442 if (!IS_2_WORD_TYPE(var->type)) {
443 #if !defined(ENABLE_SSA)
444 if (bptr->type == BBTYPE_EXH) {
445 d = codegen_reg_of_var(0, var, REG_ITMP1);
446 M_INTMOVE(REG_ITMP1, d);
447 emit_store(jd, NULL, var, d);
452 log_text("copy interface registers(EXH, SBR): longs \
453 have to be in memory (begin 1)");
461 #endif /* defined(ENABLE_LSRA) || defined(ENABLE_SSA) */
465 var = VAR(bptr->invars[len]);
466 if ((len == bptr->indepth-1) && (bptr->type != BBTYPE_STD)) {
467 if (!IS_2_WORD_TYPE(var->type)) {
468 if (bptr->type == BBTYPE_EXH) {
469 d = codegen_reg_of_var(0, var, REG_ITMP1);
470 M_INTMOVE(REG_ITMP1, d);
471 emit_store(jd, NULL, var, d);
475 log_text("copy interface registers: longs have to be in \
482 assert((var->flags & INOUT));
487 /* walk through all instructions */
492 for (iptr = bptr->iinstr; len > 0; len--, iptr++) {
493 if (iptr->line != currentline) {
494 linenumbertable_list_entry_add(cd, iptr->line);
495 currentline = iptr->line;
498 MCODECHECK(1024); /* 1kB should be enough */
501 case ICMD_NOP: /* ... ==> ... */
502 case ICMD_POP: /* ..., value ==> ... */
503 case ICMD_POP2: /* ..., value, value ==> ... */
506 case ICMD_INLINE_START:
508 REPLACEMENT_POINT_INLINE_START(cd, iptr);
511 case ICMD_INLINE_BODY:
513 REPLACEMENT_POINT_INLINE_BODY(cd, iptr);
514 linenumbertable_list_entry_add_inline_start(cd, iptr);
515 linenumbertable_list_entry_add(cd, iptr->line);
518 case ICMD_INLINE_END:
520 linenumbertable_list_entry_add_inline_end(cd, iptr);
521 linenumbertable_list_entry_add(cd, iptr->line);
524 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
526 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
527 emit_nullpointer_check(cd, iptr, s1);
530 /* constant operations ************************************************/
532 case ICMD_ICONST: /* ... ==> ..., constant */
534 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
535 ICONST(d, iptr->sx.val.i);
536 emit_store_dst(jd, iptr, d);
539 case ICMD_LCONST: /* ... ==> ..., constant */
541 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
542 LCONST(d, iptr->sx.val.l);
543 emit_store_dst(jd, iptr, d);
546 case ICMD_FCONST: /* ... ==> ..., constant */
548 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
549 if (iptr->sx.val.f == 0.0) {
553 if (iptr->sx.val.i == 0x80000000) {
557 } else if (iptr->sx.val.f == 1.0) {
560 } else if (iptr->sx.val.f == 2.0) {
566 disp = dseg_add_float(cd, iptr->sx.val.f);
567 emit_mov_imm_reg(cd, 0, REG_ITMP1);
569 emit_flds_membase(cd, REG_ITMP1, disp);
571 emit_store_dst(jd, iptr, d);
574 case ICMD_DCONST: /* ... ==> ..., constant */
576 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
577 if (iptr->sx.val.d == 0.0) {
581 if (iptr->sx.val.l == 0x8000000000000000LL) {
585 } else if (iptr->sx.val.d == 1.0) {
588 } else if (iptr->sx.val.d == 2.0) {
594 disp = dseg_add_double(cd, iptr->sx.val.d);
595 emit_mov_imm_reg(cd, 0, REG_ITMP1);
597 emit_fldl_membase(cd, REG_ITMP1, disp);
599 emit_store_dst(jd, iptr, d);
602 case ICMD_ACONST: /* ... ==> ..., constant */
604 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
606 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
607 patcher_add_patch_ref(jd, PATCHER_aconst,
608 iptr->sx.val.c.ref, 0);
613 if (iptr->sx.val.anyptr == NULL)
616 M_MOV_IMM(iptr->sx.val.anyptr, d);
618 emit_store_dst(jd, iptr, d);
622 /* load/store/copy/move operations ************************************/
640 if (!(iptr->flags.bits & INS_FLAG_RETADDR))
645 /* integer operations *************************************************/
647 case ICMD_INEG: /* ..., value ==> ..., - value */
649 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
650 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
653 emit_store_dst(jd, iptr, d);
656 case ICMD_LNEG: /* ..., value ==> ..., - value */
658 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
659 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
661 M_NEG(GET_LOW_REG(d));
662 M_IADDC_IMM(0, GET_HIGH_REG(d));
663 M_NEG(GET_HIGH_REG(d));
664 emit_store_dst(jd, iptr, d);
667 case ICMD_I2L: /* ..., value ==> ..., value */
669 s1 = emit_load_s1(jd, iptr, EAX);
670 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
673 M_LNGMOVE(EAX_EDX_PACKED, d);
674 emit_store_dst(jd, iptr, d);
677 case ICMD_L2I: /* ..., value ==> ..., value */
679 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
680 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
682 emit_store_dst(jd, iptr, d);
685 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
687 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
688 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
692 emit_store_dst(jd, iptr, d);
695 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
697 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
698 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
700 emit_store_dst(jd, iptr, d);
703 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
705 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
706 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
708 emit_store_dst(jd, iptr, d);
712 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
714 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
715 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
716 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
723 emit_store_dst(jd, iptr, d);
727 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
728 /* sx.val.i = constant */
730 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
731 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
733 /* `inc reg' is slower on p4's (regarding to ia32
734 optimization reference manual and benchmarks) and as
738 M_IADD_IMM(iptr->sx.val.i, d);
739 emit_store_dst(jd, iptr, d);
742 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
744 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
745 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
746 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
747 M_INTMOVE(s1, GET_LOW_REG(d));
748 M_IADD(s2, GET_LOW_REG(d));
749 /* don't use REG_ITMP1 */
750 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
751 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
752 M_INTMOVE(s1, GET_HIGH_REG(d));
753 M_IADDC(s2, GET_HIGH_REG(d));
754 emit_store_dst(jd, iptr, d);
757 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
758 /* sx.val.l = constant */
760 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
761 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
763 M_IADD_IMM(iptr->sx.val.l, GET_LOW_REG(d));
764 M_IADDC_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
765 emit_store_dst(jd, iptr, d);
768 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
770 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
771 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
772 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
774 M_INTMOVE(s1, REG_ITMP1);
775 M_ISUB(s2, REG_ITMP1);
776 M_INTMOVE(REG_ITMP1, d);
782 emit_store_dst(jd, iptr, d);
785 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
786 /* sx.val.i = constant */
788 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
789 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
791 M_ISUB_IMM(iptr->sx.val.i, d);
792 emit_store_dst(jd, iptr, d);
795 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
797 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
798 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
799 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
800 if (s2 == GET_LOW_REG(d)) {
801 M_INTMOVE(s1, REG_ITMP1);
802 M_ISUB(s2, REG_ITMP1);
803 M_INTMOVE(REG_ITMP1, GET_LOW_REG(d));
806 M_INTMOVE(s1, GET_LOW_REG(d));
807 M_ISUB(s2, GET_LOW_REG(d));
809 /* don't use REG_ITMP1 */
810 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
811 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
812 if (s2 == GET_HIGH_REG(d)) {
813 M_INTMOVE(s1, REG_ITMP2);
814 M_ISUBB(s2, REG_ITMP2);
815 M_INTMOVE(REG_ITMP2, GET_HIGH_REG(d));
818 M_INTMOVE(s1, GET_HIGH_REG(d));
819 M_ISUBB(s2, GET_HIGH_REG(d));
821 emit_store_dst(jd, iptr, d);
824 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
825 /* sx.val.l = constant */
827 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
828 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
830 M_ISUB_IMM(iptr->sx.val.l, GET_LOW_REG(d));
831 M_ISUBB_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
832 emit_store_dst(jd, iptr, d);
835 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
837 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
838 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
839 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
846 emit_store_dst(jd, iptr, d);
849 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
850 /* sx.val.i = constant */
852 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
853 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
854 M_IMUL_IMM(s1, iptr->sx.val.i, d);
855 emit_store_dst(jd, iptr, d);
858 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
860 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
861 s2 = emit_load_s2_low(jd, iptr, EDX);
862 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
864 M_INTMOVE(s1, REG_ITMP2);
865 M_IMUL(s2, REG_ITMP2);
867 s1 = emit_load_s1_low(jd, iptr, EAX);
868 s2 = emit_load_s2_high(jd, iptr, EDX);
871 M_IADD(EDX, REG_ITMP2);
873 s1 = emit_load_s1_low(jd, iptr, EAX);
874 s2 = emit_load_s2_low(jd, iptr, EDX);
877 M_INTMOVE(EAX, GET_LOW_REG(d));
878 M_IADD(REG_ITMP2, GET_HIGH_REG(d));
880 emit_store_dst(jd, iptr, d);
883 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
884 /* sx.val.l = constant */
886 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
887 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
888 ICONST(EAX, iptr->sx.val.l);
890 M_IMUL_IMM(s1, iptr->sx.val.l >> 32, REG_ITMP2);
891 M_IADD(REG_ITMP2, EDX);
892 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
893 M_IMUL_IMM(s1, iptr->sx.val.l, REG_ITMP2);
894 M_IADD(REG_ITMP2, EDX);
895 M_LNGMOVE(EAX_EDX_PACKED, d);
896 emit_store_dst(jd, iptr, d);
899 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
901 s1 = emit_load_s1(jd, iptr, EAX);
902 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
903 d = codegen_reg_of_dst(jd, iptr, EAX);
904 emit_arithmetic_check(cd, iptr, s2);
906 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
908 /* check as described in jvm spec */
910 M_CMP_IMM(0x80000000, EAX);
917 M_INTMOVE(EAX, d); /* if INMEMORY then d is already EAX */
918 emit_store_dst(jd, iptr, d);
921 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
923 s1 = emit_load_s1(jd, iptr, EAX);
924 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
925 d = codegen_reg_of_dst(jd, iptr, EDX);
926 emit_arithmetic_check(cd, iptr, s2);
928 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
930 /* check as described in jvm spec */
932 M_CMP_IMM(0x80000000, EAX);
940 M_INTMOVE(EDX, d); /* if INMEMORY then d is already EDX */
941 emit_store_dst(jd, iptr, d);
944 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
945 /* sx.val.i = constant */
947 /* TODO: optimize for `/ 2' */
948 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
949 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
953 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, d);/* 32-bit for jump off */
954 M_SRA_IMM(iptr->sx.val.i, d);
955 emit_store_dst(jd, iptr, d);
958 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
959 /* sx.val.i = constant */
961 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
962 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
964 M_MOV(s1, REG_ITMP1);
968 M_AND_IMM(iptr->sx.val.i, d);
970 M_BGE(2 + 2 + 6 + 2);
971 M_MOV(s1, d); /* don't use M_INTMOVE, so we know the jump offset */
973 M_AND_IMM32(iptr->sx.val.i, d); /* use 32-bit for jump offset */
975 emit_store_dst(jd, iptr, d);
978 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
979 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
981 s2 = emit_load_s2(jd, iptr, REG_ITMP12_PACKED);
982 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
984 M_INTMOVE(GET_LOW_REG(s2), REG_ITMP3);
985 M_OR(GET_HIGH_REG(s2), REG_ITMP3);
986 /* XXX could be optimized */
987 emit_arithmetic_check(cd, iptr, REG_ITMP3);
989 bte = iptr->sx.s23.s3.bte;
992 M_LST(s2, REG_SP, 2 * 4);
994 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
995 M_LST(s1, REG_SP, 0 * 4);
997 M_MOV_IMM(bte->fp, REG_ITMP3);
999 emit_store_dst(jd, iptr, d);
1002 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1003 /* sx.val.i = constant */
1005 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1006 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1008 M_TEST(GET_HIGH_REG(d));
1010 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, GET_LOW_REG(d));
1011 M_IADDC_IMM(0, GET_HIGH_REG(d));
1012 M_SRLD_IMM(iptr->sx.val.i, GET_HIGH_REG(d), GET_LOW_REG(d));
1013 M_SRA_IMM(iptr->sx.val.i, GET_HIGH_REG(d));
1014 emit_store_dst(jd, iptr, d);
1018 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1019 /* sx.val.l = constant */
1021 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1022 if (iptr->dst.var->flags & INMEMORY) {
1023 if (iptr->s1.var->flags & INMEMORY) {
1024 /* Alpha algorithm */
1026 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8);
1028 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1034 /* TODO: hmm, don't know if this is always correct */
1036 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l & 0x00000000ffffffff);
1038 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l >> 32);
1044 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1045 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1047 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1048 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1049 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1050 emit_jcc(cd, CC_GE, disp);
1052 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1053 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1055 emit_neg_reg(cd, REG_ITMP1);
1056 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1057 emit_neg_reg(cd, REG_ITMP2);
1059 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1060 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1062 emit_neg_reg(cd, REG_ITMP1);
1063 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1064 emit_neg_reg(cd, REG_ITMP2);
1066 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst.var->vv.regoff * 8);
1067 emit_mov_reg_membase(cd, REG_ITMP2, REG_SP, iptr->dst.var->vv.regoff * 8 + 4);
1071 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1072 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1074 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1075 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1076 M_TEST(GET_LOW_REG(s1));
1082 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1084 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1085 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1086 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1087 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1090 emit_store_dst(jd, iptr, d);
1093 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1094 /* sx.val.i = constant */
1096 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1097 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1099 M_SLL_IMM(iptr->sx.val.i, d);
1100 emit_store_dst(jd, iptr, d);
1103 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1105 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1106 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1107 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1108 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1111 emit_store_dst(jd, iptr, d);
1114 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1115 /* sx.val.i = constant */
1117 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1118 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1120 M_SRA_IMM(iptr->sx.val.i, d);
1121 emit_store_dst(jd, iptr, d);
1124 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1126 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1127 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1128 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1129 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1132 emit_store_dst(jd, iptr, d);
1135 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1136 /* sx.val.i = constant */
1138 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1139 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1141 M_SRL_IMM(iptr->sx.val.i, d);
1142 emit_store_dst(jd, iptr, d);
1145 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1147 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1148 s2 = emit_load_s2(jd, iptr, ECX);
1149 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1152 M_TEST_IMM(32, ECX);
1154 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1155 M_CLR(GET_LOW_REG(d));
1156 M_SLLD(GET_LOW_REG(d), GET_HIGH_REG(d));
1157 M_SLL(GET_LOW_REG(d));
1158 emit_store_dst(jd, iptr, d);
1161 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1162 /* sx.val.i = constant */
1164 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1165 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1167 if (iptr->sx.val.i & 0x20) {
1168 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1169 M_CLR(GET_LOW_REG(d));
1170 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1174 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1176 M_SLL_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d));
1178 emit_store_dst(jd, iptr, d);
1181 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1183 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1184 s2 = emit_load_s2(jd, iptr, ECX);
1185 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1188 M_TEST_IMM(32, ECX);
1190 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1191 M_SRA_IMM(31, GET_HIGH_REG(d));
1192 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1193 M_SRA(GET_HIGH_REG(d));
1194 emit_store_dst(jd, iptr, d);
1197 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1198 /* sx.val.i = constant */
1200 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1201 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1203 if (iptr->sx.val.i & 0x20) {
1204 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1205 M_SRA_IMM(31, GET_HIGH_REG(d));
1206 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1210 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1212 M_SRA_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1214 emit_store_dst(jd, iptr, d);
1217 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1219 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1220 s2 = emit_load_s2(jd, iptr, ECX);
1221 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1224 M_TEST_IMM(32, ECX);
1226 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1227 M_CLR(GET_HIGH_REG(d));
1228 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1229 M_SRL(GET_HIGH_REG(d));
1230 emit_store_dst(jd, iptr, d);
1233 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1234 /* sx.val.l = constant */
1236 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1237 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1239 if (iptr->sx.val.i & 0x20) {
1240 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1241 M_CLR(GET_HIGH_REG(d));
1242 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1246 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1248 M_SRL_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1250 emit_store_dst(jd, iptr, d);
1253 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1255 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1256 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1257 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1264 emit_store_dst(jd, iptr, d);
1267 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1268 /* sx.val.i = constant */
1270 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1271 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1273 M_AND_IMM(iptr->sx.val.i, d);
1274 emit_store_dst(jd, iptr, d);
1277 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1279 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1280 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1281 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1282 if (s2 == GET_LOW_REG(d))
1283 M_AND(s1, GET_LOW_REG(d));
1285 M_INTMOVE(s1, GET_LOW_REG(d));
1286 M_AND(s2, GET_LOW_REG(d));
1288 /* REG_ITMP1 probably contains low 32-bit of destination */
1289 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1290 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1291 if (s2 == GET_HIGH_REG(d))
1292 M_AND(s1, GET_HIGH_REG(d));
1294 M_INTMOVE(s1, GET_HIGH_REG(d));
1295 M_AND(s2, GET_HIGH_REG(d));
1297 emit_store_dst(jd, iptr, d);
1300 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1301 /* sx.val.l = constant */
1303 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1304 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1306 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1307 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1308 emit_store_dst(jd, iptr, d);
1311 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1313 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1314 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1315 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1322 emit_store_dst(jd, iptr, d);
1325 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1326 /* sx.val.i = constant */
1328 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1329 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1331 M_OR_IMM(iptr->sx.val.i, d);
1332 emit_store_dst(jd, iptr, d);
1335 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1337 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1338 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1339 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1340 if (s2 == GET_LOW_REG(d))
1341 M_OR(s1, GET_LOW_REG(d));
1343 M_INTMOVE(s1, GET_LOW_REG(d));
1344 M_OR(s2, GET_LOW_REG(d));
1346 /* REG_ITMP1 probably contains low 32-bit of destination */
1347 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1348 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1349 if (s2 == GET_HIGH_REG(d))
1350 M_OR(s1, GET_HIGH_REG(d));
1352 M_INTMOVE(s1, GET_HIGH_REG(d));
1353 M_OR(s2, GET_HIGH_REG(d));
1355 emit_store_dst(jd, iptr, d);
1358 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1359 /* sx.val.l = constant */
1361 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1362 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1364 M_OR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1365 M_OR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1366 emit_store_dst(jd, iptr, d);
1369 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1371 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1372 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1373 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1380 emit_store_dst(jd, iptr, d);
1383 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1384 /* sx.val.i = constant */
1386 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1387 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1389 M_XOR_IMM(iptr->sx.val.i, d);
1390 emit_store_dst(jd, iptr, d);
1393 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1395 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1396 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1397 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1398 if (s2 == GET_LOW_REG(d))
1399 M_XOR(s1, GET_LOW_REG(d));
1401 M_INTMOVE(s1, GET_LOW_REG(d));
1402 M_XOR(s2, GET_LOW_REG(d));
1404 /* REG_ITMP1 probably contains low 32-bit of destination */
1405 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1406 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1407 if (s2 == GET_HIGH_REG(d))
1408 M_XOR(s1, GET_HIGH_REG(d));
1410 M_INTMOVE(s1, GET_HIGH_REG(d));
1411 M_XOR(s2, GET_HIGH_REG(d));
1413 emit_store_dst(jd, iptr, d);
1416 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1417 /* sx.val.l = constant */
1419 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1420 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1422 M_XOR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1423 M_XOR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1424 emit_store_dst(jd, iptr, d);
1428 /* floating operations ************************************************/
1430 case ICMD_FNEG: /* ..., value ==> ..., - value */
1432 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1433 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1435 emit_store_dst(jd, iptr, d);
1438 case ICMD_DNEG: /* ..., value ==> ..., - value */
1440 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1441 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1443 emit_store_dst(jd, iptr, d);
1446 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1448 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1449 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1450 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1452 emit_store_dst(jd, iptr, d);
1455 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1457 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1458 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1459 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1461 emit_store_dst(jd, iptr, d);
1464 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1466 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1467 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1468 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1470 emit_store_dst(jd, iptr, d);
1473 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1475 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1476 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1477 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1479 emit_store_dst(jd, iptr, d);
1482 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1484 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1485 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1486 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1488 emit_store_dst(jd, iptr, d);
1491 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1493 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1494 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1495 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1497 emit_store_dst(jd, iptr, d);
1500 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1502 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1503 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1504 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1506 emit_store_dst(jd, iptr, d);
1509 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1511 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1512 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1513 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1515 emit_store_dst(jd, iptr, d);
1518 case ICMD_FREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1520 /* exchanged to skip fxch */
1521 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1522 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1523 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1524 /* emit_fxch(cd); */
1529 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1530 emit_store_dst(jd, iptr, d);
1531 emit_ffree_reg(cd, 0);
1535 case ICMD_DREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1537 /* exchanged to skip fxch */
1538 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1539 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1540 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1541 /* emit_fxch(cd); */
1546 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1547 emit_store_dst(jd, iptr, d);
1548 emit_ffree_reg(cd, 0);
1552 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1553 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1555 var = VAROP(iptr->s1);
1556 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1558 if (var->flags & INMEMORY) {
1559 emit_fildl_membase(cd, REG_SP, var->vv.regoff);
1561 /* XXX not thread safe! */
1562 disp = dseg_add_unique_s4(cd, 0);
1563 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1565 emit_mov_reg_membase(cd, var->vv.regoff, REG_ITMP1, disp);
1566 emit_fildl_membase(cd, REG_ITMP1, disp);
1569 emit_store_dst(jd, iptr, d);
1572 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1573 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1575 var = VAROP(iptr->s1);
1576 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1577 if (var->flags & INMEMORY) {
1578 emit_fildll_membase(cd, REG_SP, var->vv.regoff);
1581 log_text("L2F: longs have to be in memory");
1584 emit_store_dst(jd, iptr, d);
1587 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1589 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1590 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1592 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1595 /* Round to zero, 53-bit mode, exception masked */
1596 disp = dseg_add_s4(cd, 0x0e7f);
1597 emit_fldcw_membase(cd, REG_ITMP1, disp);
1599 var = VAROP(iptr->dst);
1600 var1 = VAROP(iptr->s1);
1602 if (var->flags & INMEMORY) {
1603 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1605 /* Round to nearest, 53-bit mode, exceptions masked */
1606 disp = dseg_add_s4(cd, 0x027f);
1607 emit_fldcw_membase(cd, REG_ITMP1, disp);
1609 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1610 REG_SP, var->vv.regoff);
1613 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1615 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1618 /* XXX not thread safe! */
1619 disp = dseg_add_unique_s4(cd, 0);
1620 emit_fistpl_membase(cd, REG_ITMP1, disp);
1621 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1623 /* Round to nearest, 53-bit mode, exceptions masked */
1624 disp = dseg_add_s4(cd, 0x027f);
1625 emit_fldcw_membase(cd, REG_ITMP1, disp);
1627 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1630 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1631 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1634 emit_jcc(cd, CC_NE, disp);
1636 /* XXX: change this when we use registers */
1637 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1638 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2i, REG_ITMP1);
1639 emit_call_reg(cd, REG_ITMP1);
1641 if (var->flags & INMEMORY) {
1642 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1645 M_INTMOVE(REG_RESULT, var->vv.regoff);
1649 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1651 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1652 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1654 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1657 /* Round to zero, 53-bit mode, exception masked */
1658 disp = dseg_add_s4(cd, 0x0e7f);
1659 emit_fldcw_membase(cd, REG_ITMP1, disp);
1661 var = VAROP(iptr->dst);
1662 var1 = VAROP(iptr->s1);
1664 if (var->flags & INMEMORY) {
1665 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1667 /* Round to nearest, 53-bit mode, exceptions masked */
1668 disp = dseg_add_s4(cd, 0x027f);
1669 emit_fldcw_membase(cd, REG_ITMP1, disp);
1671 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1672 REG_SP, var->vv.regoff);
1675 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1677 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1680 /* XXX not thread safe! */
1681 disp = dseg_add_unique_s4(cd, 0);
1682 emit_fistpl_membase(cd, REG_ITMP1, disp);
1683 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1685 /* Round to nearest, 53-bit mode, exceptions masked */
1686 disp = dseg_add_s4(cd, 0x027f);
1687 emit_fldcw_membase(cd, REG_ITMP1, disp);
1689 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1692 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1693 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1696 emit_jcc(cd, CC_NE, disp);
1698 /* XXX: change this when we use registers */
1699 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1700 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2i, REG_ITMP1);
1701 emit_call_reg(cd, REG_ITMP1);
1703 if (var->flags & INMEMORY) {
1704 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1706 M_INTMOVE(REG_RESULT, var->vv.regoff);
1710 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1712 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1713 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1715 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1718 /* Round to zero, 53-bit mode, exception masked */
1719 disp = dseg_add_s4(cd, 0x0e7f);
1720 emit_fldcw_membase(cd, REG_ITMP1, disp);
1722 var = VAROP(iptr->dst);
1723 var1 = VAROP(iptr->s1);
1725 if (var->flags & INMEMORY) {
1726 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1728 /* Round to nearest, 53-bit mode, exceptions masked */
1729 disp = dseg_add_s4(cd, 0x027f);
1730 emit_fldcw_membase(cd, REG_ITMP1, disp);
1732 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1733 REG_SP, var->vv.regoff + 4);
1736 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1738 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1741 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1743 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1745 emit_jcc(cd, CC_NE, disp);
1747 emit_alu_imm_membase(cd, ALU_CMP, 0,
1748 REG_SP, var->vv.regoff);
1751 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1753 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1755 emit_jcc(cd, CC_NE, disp);
1757 /* XXX: change this when we use registers */
1758 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1759 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2l, REG_ITMP1);
1760 emit_call_reg(cd, REG_ITMP1);
1761 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1762 emit_mov_reg_membase(cd, REG_RESULT2,
1763 REG_SP, var->vv.regoff + 4);
1766 log_text("F2L: longs have to be in memory");
1771 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1773 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1774 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1776 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1779 /* Round to zero, 53-bit mode, exception masked */
1780 disp = dseg_add_s4(cd, 0x0e7f);
1781 emit_fldcw_membase(cd, REG_ITMP1, disp);
1783 var = VAROP(iptr->dst);
1784 var1 = VAROP(iptr->s1);
1786 if (var->flags & INMEMORY) {
1787 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1789 /* Round to nearest, 53-bit mode, exceptions masked */
1790 disp = dseg_add_s4(cd, 0x027f);
1791 emit_fldcw_membase(cd, REG_ITMP1, disp);
1793 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1794 REG_SP, var->vv.regoff + 4);
1797 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1799 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1802 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1804 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1806 emit_jcc(cd, CC_NE, disp);
1808 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, var->vv.regoff);
1811 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1813 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1815 emit_jcc(cd, CC_NE, disp);
1817 /* XXX: change this when we use registers */
1818 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1819 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2l, REG_ITMP1);
1820 emit_call_reg(cd, REG_ITMP1);
1821 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1822 emit_mov_reg_membase(cd, REG_RESULT2,
1823 REG_SP, var->vv.regoff + 4);
1826 log_text("D2L: longs have to be in memory");
1831 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1833 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1834 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1836 emit_store_dst(jd, iptr, d);
1839 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1841 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1842 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1844 emit_store_dst(jd, iptr, d);
1847 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1850 /* exchanged to skip fxch */
1851 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1852 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1853 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1854 /* emit_fxch(cd); */
1857 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as GT */
1858 emit_jcc(cd, CC_E, 6);
1859 emit_alu_imm_reg(cd, ALU_AND, 0x000000ff, EAX);
1861 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1862 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1863 emit_jcc(cd, CC_B, 3 + 5);
1864 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1865 emit_jmp_imm(cd, 3);
1866 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1867 emit_store_dst(jd, iptr, d);
1870 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1873 /* exchanged to skip fxch */
1874 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1875 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1876 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1877 /* emit_fxch(cd); */
1880 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as LT */
1881 emit_jcc(cd, CC_E, 3);
1882 emit_movb_imm_reg(cd, 1, REG_AH);
1884 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1885 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1886 emit_jcc(cd, CC_B, 3 + 5);
1887 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1888 emit_jmp_imm(cd, 3);
1889 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1890 emit_store_dst(jd, iptr, d);
1894 /* memory operations **************************************************/
1896 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., length */
1898 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1899 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1900 /* implicit null-pointer check */
1901 M_ILD(d, s1, OFFSET(java_array_t, size));
1902 emit_store_dst(jd, iptr, d);
1905 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
1907 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1908 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1909 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1910 /* implicit null-pointer check */
1911 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1912 emit_movsbl_memindex_reg(cd, OFFSET(java_bytearray_t, data[0]),
1914 emit_store_dst(jd, iptr, d);
1917 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
1919 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1920 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1921 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1922 /* implicit null-pointer check */
1923 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1924 emit_movzwl_memindex_reg(cd, OFFSET(java_chararray_t, data[0]),
1926 emit_store_dst(jd, iptr, d);
1929 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
1931 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1932 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1933 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1934 /* implicit null-pointer check */
1935 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1936 emit_movswl_memindex_reg(cd, OFFSET(java_shortarray_t, data[0]),
1938 emit_store_dst(jd, iptr, d);
1941 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
1943 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1944 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1945 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1946 /* implicit null-pointer check */
1947 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1948 emit_mov_memindex_reg(cd, OFFSET(java_intarray_t, data[0]),
1950 emit_store_dst(jd, iptr, d);
1953 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
1955 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1956 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1957 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
1958 /* implicit null-pointer check */
1959 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1961 var = VAROP(iptr->dst);
1963 assert(var->flags & INMEMORY);
1964 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]),
1965 s1, s2, 3, REG_ITMP3);
1966 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff);
1967 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]) + 4,
1968 s1, s2, 3, REG_ITMP3);
1969 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff + 4);
1972 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
1974 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1975 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1976 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1977 /* implicit null-pointer check */
1978 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1979 emit_flds_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2, 2);
1980 emit_store_dst(jd, iptr, d);
1983 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
1985 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1986 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1987 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1988 /* implicit null-pointer check */
1989 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1990 emit_fldl_memindex(cd, OFFSET(java_doublearray_t, data[0]), s1, s2,3);
1991 emit_store_dst(jd, iptr, d);
1994 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
1996 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1997 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1998 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1999 /* implicit null-pointer check */
2000 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2001 emit_mov_memindex_reg(cd, OFFSET(java_objectarray_t, data[0]),
2003 emit_store_dst(jd, iptr, d);
2007 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2009 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2010 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2011 /* implicit null-pointer check */
2012 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2013 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2015 /* because EBP, ESI, EDI have no xH and xL nibbles */
2016 M_INTMOVE(s3, REG_ITMP3);
2019 emit_movb_reg_memindex(cd, s3, OFFSET(java_bytearray_t, data[0]),
2023 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2025 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2026 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2027 /* implicit null-pointer check */
2028 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2029 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2030 emit_movw_reg_memindex(cd, s3, OFFSET(java_chararray_t, data[0]),
2034 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2036 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2037 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2038 /* implicit null-pointer check */
2039 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2040 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2041 emit_movw_reg_memindex(cd, s3, OFFSET(java_shortarray_t, data[0]),
2045 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2047 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2048 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2049 /* implicit null-pointer check */
2050 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2051 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2052 emit_mov_reg_memindex(cd, s3, OFFSET(java_intarray_t, data[0]),
2056 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2058 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2059 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2060 /* implicit null-pointer check */
2061 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2063 var = VAROP(iptr->sx.s23.s3);
2065 assert(var->flags & INMEMORY);
2066 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff, REG_ITMP3);
2067 emit_mov_reg_memindex(cd, REG_ITMP3, OFFSET(java_longarray_t, data[0])
2069 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff + 4, REG_ITMP3);
2070 emit_mov_reg_memindex(cd, REG_ITMP3,
2071 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2074 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2076 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2077 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2078 /* implicit null-pointer check */
2079 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2080 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2081 emit_fstps_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2,2);
2084 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2086 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2087 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2088 /* implicit null-pointer check */
2089 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2090 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2091 emit_fstpl_memindex(cd, OFFSET(java_doublearray_t, data[0]),
2095 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2097 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2098 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2099 /* implicit null-pointer check */
2100 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2101 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2103 M_AST(s1, REG_SP, 0 * 4);
2104 M_AST(s3, REG_SP, 1 * 4);
2105 M_MOV_IMM(BUILTIN_FAST_canstore, REG_ITMP1);
2107 emit_arraystore_check(cd, iptr);
2109 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2110 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2111 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2112 emit_mov_reg_memindex(cd, s3, OFFSET(java_objectarray_t, data[0]),
2116 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2118 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2119 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2120 /* implicit null-pointer check */
2121 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2122 emit_movb_imm_memindex(cd, iptr->sx.s23.s3.constval,
2123 OFFSET(java_bytearray_t, data[0]), s1, s2, 0);
2126 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2128 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2129 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2130 /* implicit null-pointer check */
2131 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2132 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2133 OFFSET(java_chararray_t, data[0]), s1, s2, 1);
2136 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2138 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2139 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2140 /* implicit null-pointer check */
2141 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2142 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2143 OFFSET(java_shortarray_t, data[0]), s1, s2, 1);
2146 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2148 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2149 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2150 /* implicit null-pointer check */
2151 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2152 emit_mov_imm_memindex(cd, iptr->sx.s23.s3.constval,
2153 OFFSET(java_intarray_t, data[0]), s1, s2, 2);
2156 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2158 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2159 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2160 /* implicit null-pointer check */
2161 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2162 emit_mov_imm_memindex(cd,
2163 (u4) (iptr->sx.s23.s3.constval & 0x00000000ffffffff),
2164 OFFSET(java_longarray_t, data[0]), s1, s2, 3);
2165 emit_mov_imm_memindex(cd,
2166 ((s4)iptr->sx.s23.s3.constval) >> 31,
2167 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2170 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2172 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2173 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2174 /* implicit null-pointer check */
2175 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2176 emit_mov_imm_memindex(cd, 0,
2177 OFFSET(java_objectarray_t, data[0]), s1, s2, 2);
2181 case ICMD_GETSTATIC: /* ... ==> ..., value */
2183 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2184 uf = iptr->sx.s23.s3.uf;
2185 fieldtype = uf->fieldref->parseddesc.fd->type;
2188 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2192 fi = iptr->sx.s23.s3.fmiref->p.field;
2193 fieldtype = fi->type;
2194 disp = (intptr_t) fi->value;
2196 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->clazz))
2197 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->clazz, 0);
2200 M_MOV_IMM(disp, REG_ITMP1);
2201 switch (fieldtype) {
2204 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2205 M_ILD(d, REG_ITMP1, 0);
2208 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2209 M_LLD(d, REG_ITMP1, 0);
2212 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2213 M_FLD(d, REG_ITMP1, 0);
2216 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2217 M_DLD(d, REG_ITMP1, 0);
2220 emit_store_dst(jd, iptr, d);
2223 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2225 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2226 uf = iptr->sx.s23.s3.uf;
2227 fieldtype = uf->fieldref->parseddesc.fd->type;
2230 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2233 fi = iptr->sx.s23.s3.fmiref->p.field;
2234 fieldtype = fi->type;
2235 disp = (intptr_t) fi->value;
2237 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->clazz))
2238 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->clazz, 0);
2241 M_MOV_IMM(disp, REG_ITMP1);
2242 switch (fieldtype) {
2245 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
2246 M_IST(s1, REG_ITMP1, 0);
2249 s1 = emit_load_s1(jd, iptr, REG_ITMP23_PACKED);
2250 M_LST(s1, REG_ITMP1, 0);
2253 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2254 emit_fstps_membase(cd, REG_ITMP1, 0);
2257 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2258 emit_fstpl_membase(cd, REG_ITMP1, 0);
2263 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2264 /* val = value (in current instruction) */
2265 /* following NOP) */
2267 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2268 uf = iptr->sx.s23.s3.uf;
2269 fieldtype = uf->fieldref->parseddesc.fd->type;
2272 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2275 fi = iptr->sx.s23.s3.fmiref->p.field;
2276 fieldtype = fi->type;
2277 disp = (intptr_t) fi->value;
2279 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->clazz))
2280 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->clazz, 0);
2283 M_MOV_IMM(disp, REG_ITMP1);
2284 switch (fieldtype) {
2287 M_IST_IMM(iptr->sx.s23.s2.constval, REG_ITMP1, 0);
2290 M_IST_IMM(iptr->sx.s23.s2.constval & 0xffffffff, REG_ITMP1, 0);
2291 M_IST_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, REG_ITMP1, 4);
2298 case ICMD_GETFIELD: /* .., objectref. ==> ..., value */
2300 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2301 emit_nullpointer_check(cd, iptr, s1);
2303 #if defined(ENABLE_ESCAPE_CHECK)
2304 /*emit_escape_check(cd, s1);*/
2307 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2308 uf = iptr->sx.s23.s3.uf;
2309 fieldtype = uf->fieldref->parseddesc.fd->type;
2312 patcher_add_patch_ref(jd, PATCHER_getfield,
2313 iptr->sx.s23.s3.uf, 0);
2316 fi = iptr->sx.s23.s3.fmiref->p.field;
2317 fieldtype = fi->type;
2321 switch (fieldtype) {
2324 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2325 M_ILD32(d, s1, disp);
2328 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2329 M_LLD32(d, s1, disp);
2332 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2333 M_FLD32(d, s1, disp);
2336 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2337 M_DLD32(d, s1, disp);
2340 emit_store_dst(jd, iptr, d);
2343 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2345 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2346 emit_nullpointer_check(cd, iptr, s1);
2348 /* must be done here because of code patching */
2350 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2351 uf = iptr->sx.s23.s3.uf;
2352 fieldtype = uf->fieldref->parseddesc.fd->type;
2355 fi = iptr->sx.s23.s3.fmiref->p.field;
2356 fieldtype = fi->type;
2359 if (!IS_FLT_DBL_TYPE(fieldtype)) {
2360 if (IS_2_WORD_TYPE(fieldtype))
2361 s2 = emit_load_s2(jd, iptr, REG_ITMP23_PACKED);
2363 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2366 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
2368 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2370 uf = iptr->sx.s23.s3.uf;
2373 patcher_add_patch_ref(jd, PATCHER_putfield, uf, 0);
2377 fi = iptr->sx.s23.s3.fmiref->p.field;
2381 switch (fieldtype) {
2384 M_IST32(s2, s1, disp);
2387 M_LST32(s2, s1, disp);
2390 emit_fstps_membase32(cd, s1, disp);
2393 emit_fstpl_membase32(cd, s1, disp);
2398 case ICMD_PUTFIELDCONST: /* ..., objectref ==> ... */
2399 /* val = value (in current instruction) */
2400 /* following NOP) */
2402 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2403 emit_nullpointer_check(cd, iptr, s1);
2405 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2406 uf = iptr->sx.s23.s3.uf;
2407 fieldtype = uf->fieldref->parseddesc.fd->type;
2410 patcher_add_patch_ref(jd, PATCHER_putfieldconst,
2414 fi = iptr->sx.s23.s3.fmiref->p.field;
2415 fieldtype = fi->type;
2419 switch (fieldtype) {
2422 M_IST32_IMM(iptr->sx.s23.s2.constval, s1, disp);
2425 M_IST32_IMM(iptr->sx.s23.s2.constval & 0xffffffff, s1, disp);
2426 M_IST32_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, s1, disp + 4);
2434 /* branch operations **************************************************/
2436 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2438 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2439 M_INTMOVE(s1, REG_ITMP1_XPTR);
2441 #ifdef ENABLE_VERIFIER
2442 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2443 patcher_add_patch_ref(jd, PATCHER_resolve_class,
2444 iptr->sx.s23.s2.uc, 0);
2446 #endif /* ENABLE_VERIFIER */
2448 M_CALL_IMM(0); /* passing exception pc */
2449 M_POP(REG_ITMP2_XPC);
2451 M_MOV_IMM(asm_handle_exception, REG_ITMP3);
2455 case ICMD_GOTO: /* ... ==> ... */
2456 case ICMD_RET: /* ... ==> ... */
2458 #if defined(ENABLE_SSA)
2460 last_cmd_was_goto = true;
2462 /* In case of a Goto phimoves have to be inserted before the */
2465 codegen_emit_phi_moves(jd, bptr);
2468 emit_br(cd, iptr->dst.block);
2472 case ICMD_JSR: /* ... ==> ... */
2474 emit_br(cd, iptr->sx.s23.s3.jsrtarget.block);
2478 case ICMD_IFNULL: /* ..., value ==> ... */
2479 case ICMD_IFNONNULL:
2481 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2483 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFNULL, BRANCH_OPT_NONE);
2486 case ICMD_IFEQ: /* ..., value ==> ... */
2493 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2494 M_CMP_IMM(iptr->sx.val.i, s1);
2495 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFEQ, BRANCH_OPT_NONE);
2498 case ICMD_IF_LEQ: /* ..., value ==> ... */
2500 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2501 if (iptr->sx.val.l == 0) {
2502 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2503 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2506 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2507 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2508 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2509 M_OR(REG_ITMP2, REG_ITMP1);
2511 emit_beq(cd, iptr->dst.block);
2514 case ICMD_IF_LLT: /* ..., value ==> ... */
2516 if (iptr->sx.val.l == 0) {
2517 /* If high 32-bit are less than zero, then the 64-bits
2519 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2521 emit_blt(cd, iptr->dst.block);
2524 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2525 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2526 emit_blt(cd, iptr->dst.block);
2528 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2529 emit_bult(cd, iptr->dst.block);
2533 case ICMD_IF_LLE: /* ..., value ==> ... */
2535 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2536 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2537 emit_blt(cd, iptr->dst.block);
2539 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2540 emit_bule(cd, iptr->dst.block);
2543 case ICMD_IF_LNE: /* ..., value ==> ... */
2545 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2546 if (iptr->sx.val.l == 0) {
2547 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2548 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2551 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2552 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2553 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2554 M_OR(REG_ITMP2, REG_ITMP1);
2556 emit_bne(cd, iptr->dst.block);
2559 case ICMD_IF_LGT: /* ..., value ==> ... */
2561 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2562 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2563 emit_bgt(cd, iptr->dst.block);
2565 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2566 emit_bugt(cd, iptr->dst.block);
2569 case ICMD_IF_LGE: /* ..., value ==> ... */
2571 if (iptr->sx.val.l == 0) {
2572 /* If high 32-bit are greater equal zero, then the
2574 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2576 emit_bge(cd, iptr->dst.block);
2579 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2580 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2581 emit_bgt(cd, iptr->dst.block);
2583 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2584 emit_buge(cd, iptr->dst.block);
2588 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2589 case ICMD_IF_ICMPNE:
2590 case ICMD_IF_ICMPLT:
2591 case ICMD_IF_ICMPGT:
2592 case ICMD_IF_ICMPGE:
2593 case ICMD_IF_ICMPLE:
2595 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2596 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2598 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ICMPEQ, BRANCH_OPT_NONE);
2601 case ICMD_IF_ACMPEQ: /* ..., value, value ==> ... */
2602 case ICMD_IF_ACMPNE:
2604 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2605 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2607 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ACMPEQ, BRANCH_OPT_NONE);
2610 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2612 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2613 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2614 M_INTMOVE(s1, REG_ITMP1);
2615 M_XOR(s2, REG_ITMP1);
2616 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2617 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2618 M_INTMOVE(s1, REG_ITMP2);
2619 M_XOR(s2, REG_ITMP2);
2620 M_OR(REG_ITMP1, REG_ITMP2);
2621 emit_beq(cd, iptr->dst.block);
2624 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2626 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2627 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2628 M_INTMOVE(s1, REG_ITMP1);
2629 M_XOR(s2, REG_ITMP1);
2630 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2631 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2632 M_INTMOVE(s1, REG_ITMP2);
2633 M_XOR(s2, REG_ITMP2);
2634 M_OR(REG_ITMP1, REG_ITMP2);
2635 emit_bne(cd, iptr->dst.block);
2638 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2640 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2641 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2643 emit_blt(cd, iptr->dst.block);
2644 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2645 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2648 emit_bult(cd, iptr->dst.block);
2651 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2653 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2654 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2656 emit_bgt(cd, iptr->dst.block);
2657 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2658 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2661 emit_bugt(cd, iptr->dst.block);
2664 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2666 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2667 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2669 emit_blt(cd, iptr->dst.block);
2670 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2671 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2674 emit_bule(cd, iptr->dst.block);
2677 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2679 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2680 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2682 emit_bgt(cd, iptr->dst.block);
2683 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2684 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2687 emit_buge(cd, iptr->dst.block);
2691 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2693 REPLACEMENT_POINT_RETURN(cd, iptr);
2694 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2695 M_INTMOVE(s1, REG_RESULT);
2696 goto nowperformreturn;
2698 case ICMD_LRETURN: /* ..., retvalue ==> ... */
2700 REPLACEMENT_POINT_RETURN(cd, iptr);
2701 s1 = emit_load_s1(jd, iptr, REG_RESULT_PACKED);
2702 M_LNGMOVE(s1, REG_RESULT_PACKED);
2703 goto nowperformreturn;
2705 case ICMD_ARETURN: /* ..., retvalue ==> ... */
2707 REPLACEMENT_POINT_RETURN(cd, iptr);
2708 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2709 M_INTMOVE(s1, REG_RESULT);
2711 #ifdef ENABLE_VERIFIER
2712 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2713 patcher_add_patch_ref(jd, PATCHER_resolve_class,
2714 iptr->sx.s23.s2.uc, 0);
2716 #endif /* ENABLE_VERIFIER */
2717 goto nowperformreturn;
2719 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2722 REPLACEMENT_POINT_RETURN(cd, iptr);
2723 s1 = emit_load_s1(jd, iptr, REG_FRESULT);
2724 goto nowperformreturn;
2726 case ICMD_RETURN: /* ... ==> ... */
2728 REPLACEMENT_POINT_RETURN(cd, iptr);
2734 p = cd->stackframesize;
2736 #if !defined(NDEBUG)
2737 emit_verbosecall_exit(jd);
2740 #if defined(ENABLE_THREADS)
2741 if (checksync && code_is_synchronized(code)) {
2742 M_ALD(REG_ITMP2, REG_SP, rd->memuse * 8);
2744 /* we need to save the proper return value */
2745 switch (iptr->opc) {
2748 M_IST(REG_RESULT, REG_SP, rd->memuse * 8);
2752 M_LST(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2756 emit_fstps_membase(cd, REG_SP, rd->memuse * 8);
2760 emit_fstpl_membase(cd, REG_SP, rd->memuse * 8);
2764 M_AST(REG_ITMP2, REG_SP, 0);
2765 M_MOV_IMM(LOCK_monitor_exit, REG_ITMP3);
2768 /* and now restore the proper return value */
2769 switch (iptr->opc) {
2772 M_ILD(REG_RESULT, REG_SP, rd->memuse * 8);
2776 M_LLD(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2780 emit_flds_membase(cd, REG_SP, rd->memuse * 8);
2784 emit_fldl_membase(cd, REG_SP, rd->memuse * 8);
2790 /* restore saved registers */
2792 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
2793 p--; M_ALD(rd->savintregs[i], REG_SP, p * 8);
2796 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
2798 emit_fldl_membase(cd, REG_SP, p * 8);
2799 if (iptr->opc == ICMD_FRETURN || iptr->opc == ICMD_DRETURN) {
2801 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset + 1); */
2804 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset); */
2808 /* deallocate stack */
2810 if (cd->stackframesize)
2811 M_AADD_IMM(cd->stackframesize * 8 + 4, REG_SP);
2818 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2821 branch_target_t *table;
2823 table = iptr->dst.table;
2825 l = iptr->sx.s23.s2.tablelow;
2826 i = iptr->sx.s23.s3.tablehigh;
2828 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2829 M_INTMOVE(s1, REG_ITMP1);
2832 M_ISUB_IMM(l, REG_ITMP1);
2838 M_CMP_IMM(i - 1, REG_ITMP1);
2839 emit_bugt(cd, table[0].block);
2841 /* build jump table top down and use address of lowest entry */
2846 dseg_add_target(cd, table->block);
2850 /* length of dataseg after last dseg_addtarget is used
2853 M_MOV_IMM(0, REG_ITMP2);
2855 emit_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 2, REG_ITMP1);
2861 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2864 lookup_target_t *lookup;
2866 lookup = iptr->dst.lookup;
2868 i = iptr->sx.s23.s2.lookupcount;
2870 MCODECHECK((i<<2)+8);
2871 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2874 M_CMP_IMM(lookup->value, s1);
2875 emit_beq(cd, lookup->target.block);
2879 emit_br(cd, iptr->sx.s23.s3.lookupdefault.block);
2884 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
2886 REPLACEMENT_POINT_FORGC_BUILTIN(cd, iptr);
2888 bte = iptr->sx.s23.s3.bte;
2891 #if defined(ENABLE_ESCAPE_REASON)
2892 if (bte->fp == BUILTIN_escape_reason_new) {
2893 void set_escape_reasons(void *);
2894 M_ASUB_IMM(8, REG_SP);
2895 M_MOV_IMM(iptr->escape_reasons, REG_ITMP1);
2896 M_AST(EDX, REG_SP, 4);
2897 M_AST(REG_ITMP1, REG_SP, 0);
2898 M_MOV_IMM(set_escape_reasons, REG_ITMP1);
2900 M_ALD(EDX, REG_SP, 4);
2901 M_AADD_IMM(8, REG_SP);
2907 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2909 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2910 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
2911 case ICMD_INVOKEINTERFACE:
2913 REPLACEMENT_POINT_INVOKE(cd, iptr);
2915 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2916 md = iptr->sx.s23.s3.um->methodref->parseddesc.md;
2920 lm = iptr->sx.s23.s3.fmiref->p.method;
2921 md = lm->parseddesc;
2925 s3 = md->paramcount;
2927 MCODECHECK((s3 << 1) + 64);
2929 /* copy arguments to registers or stack location */
2931 for (s3 = s3 - 1; s3 >= 0; s3--) {
2932 var = VAR(iptr->sx.s23.s2.args[s3]);
2934 /* Already Preallocated (ARGVAR) ? */
2935 if (var->flags & PREALLOC)
2937 if (IS_INT_LNG_TYPE(var->type)) {
2938 if (!md->params[s3].inmemory) {
2939 log_text("No integer argument registers available!");
2943 if (IS_2_WORD_TYPE(var->type)) {
2944 d = emit_load(jd, iptr, var, REG_ITMP12_PACKED);
2945 M_LST(d, REG_SP, md->params[s3].regoff);
2947 d = emit_load(jd, iptr, var, REG_ITMP1);
2948 M_IST(d, REG_SP, md->params[s3].regoff);
2953 if (!md->params[s3].inmemory) {
2954 s1 = md->params[s3].regoff;
2955 d = emit_load(jd, iptr, var, s1);
2959 d = emit_load(jd, iptr, var, REG_FTMP1);
2960 if (IS_2_WORD_TYPE(var->type))
2961 M_DST(d, REG_SP, md->params[s3].regoff);
2963 M_FST(d, REG_SP, md->params[s3].regoff);
2968 switch (iptr->opc) {
2970 d = md->returntype.type;
2972 if (bte->stub == NULL) {
2973 M_MOV_IMM(bte->fp, REG_ITMP1);
2976 M_MOV_IMM(bte->stub, REG_ITMP1);
2980 #if defined(ENABLE_ESCAPE_CHECK)
2981 if (bte->opcode == ICMD_NEW || bte->opcode == ICMD_NEWARRAY) {
2982 /*emit_escape_annotate_object(cd, m);*/
2987 case ICMD_INVOKESPECIAL:
2988 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
2989 emit_nullpointer_check(cd, iptr, REG_ITMP1);
2992 case ICMD_INVOKESTATIC:
2994 unresolved_method *um = iptr->sx.s23.s3.um;
2996 patcher_add_patch_ref(jd, PATCHER_invokestatic_special,
3000 d = md->returntype.type;
3003 disp = (ptrint) lm->stubroutine;
3004 d = lm->parseddesc->returntype.type;
3007 M_MOV_IMM(disp, REG_ITMP2);
3011 case ICMD_INVOKEVIRTUAL:
3012 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
3013 emit_nullpointer_check(cd, iptr, s1);
3016 unresolved_method *um = iptr->sx.s23.s3.um;
3018 patcher_add_patch_ref(jd, PATCHER_invokevirtual, um, 0);
3021 d = md->returntype.type;
3024 s1 = OFFSET(vftbl_t, table[0]) +
3025 sizeof(methodptr) * lm->vftblindex;
3026 d = md->returntype.type;
3029 M_ALD(REG_METHODPTR, REG_ITMP1,
3030 OFFSET(java_object_t, vftbl));
3031 M_ALD32(REG_ITMP3, REG_METHODPTR, s1);
3035 case ICMD_INVOKEINTERFACE:
3036 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
3037 emit_nullpointer_check(cd, iptr, s1);
3040 unresolved_method *um = iptr->sx.s23.s3.um;
3042 patcher_add_patch_ref(jd, PATCHER_invokeinterface, um, 0);
3046 d = md->returntype.type;
3049 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3050 sizeof(methodptr) * lm->clazz->index;
3052 s2 = sizeof(methodptr) * (lm - lm->clazz->methods);
3054 d = md->returntype.type;
3057 M_ALD(REG_METHODPTR, REG_ITMP1,
3058 OFFSET(java_object_t, vftbl));
3059 M_ALD32(REG_METHODPTR, REG_METHODPTR, s1);
3060 M_ALD32(REG_ITMP3, REG_METHODPTR, s2);
3065 /* store size of call code in replacement point */
3067 REPLACEMENT_POINT_INVOKE_RETURN(cd, iptr);
3068 REPLACEMENT_POINT_FORGC_BUILTIN_RETURN(cd, iptr);
3070 /* d contains return type */
3072 if (d != TYPE_VOID) {
3073 #if defined(ENABLE_SSA)
3074 if ((ls == NULL) /* || (!IS_TEMPVAR_INDEX(iptr->dst.varindex)) */ ||
3075 (ls->lifetime[iptr->dst.varindex].type != UNUSED))
3076 /* a "living" stackslot */
3079 if (IS_INT_LNG_TYPE(d)) {
3080 if (IS_2_WORD_TYPE(d)) {
3081 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
3082 M_LNGMOVE(REG_RESULT_PACKED, s1);
3085 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3086 M_INTMOVE(REG_RESULT, s1);
3090 s1 = codegen_reg_of_dst(jd, iptr, REG_NULL);
3092 emit_store_dst(jd, iptr, s1);
3098 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3100 if (!(iptr->flags.bits & INS_FLAG_ARRAY)) {
3101 /* object type cast-check */
3104 vftbl_t *supervftbl;
3107 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3113 super = iptr->sx.s23.s3.c.cls;
3114 superindex = super->index;
3115 supervftbl = super->vftbl;
3118 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3120 /* if class is not resolved, check which code to call */
3122 if (super == NULL) {
3124 emit_label_beq(cd, BRANCH_LABEL_1);
3126 patcher_add_patch_ref(jd, PATCHER_checkcast_instanceof_flags,
3127 iptr->sx.s23.s3.c.ref, 0);
3129 M_MOV_IMM(0, REG_ITMP2); /* super->flags */
3130 M_AND_IMM32(ACC_INTERFACE, REG_ITMP2);
3131 emit_label_beq(cd, BRANCH_LABEL_2);
3134 /* interface checkcast code */
3136 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3137 if (super != NULL) {
3139 emit_label_beq(cd, BRANCH_LABEL_3);
3142 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3144 if (super == NULL) {
3145 patcher_add_patch_ref(jd, PATCHER_checkcast_interface,
3146 iptr->sx.s23.s3.c.ref,
3151 REG_ITMP2, OFFSET(vftbl_t, interfacetablelength));
3152 M_ISUB_IMM32(superindex, REG_ITMP3);
3153 /* XXX do we need this one? */
3155 emit_classcast_check(cd, iptr, BRANCH_LE, REG_ITMP3, s1);
3157 M_ALD32(REG_ITMP3, REG_ITMP2,
3158 OFFSET(vftbl_t, interfacetable[0]) -
3159 superindex * sizeof(methodptr*));
3161 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_ITMP3, s1);
3164 emit_label_br(cd, BRANCH_LABEL_4);
3166 emit_label(cd, BRANCH_LABEL_3);
3169 /* class checkcast code */
3171 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3172 if (super == NULL) {
3173 emit_label(cd, BRANCH_LABEL_2);
3177 emit_label_beq(cd, BRANCH_LABEL_5);
3180 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3182 if (super == NULL) {
3183 patcher_add_patch_ref(jd, PATCHER_checkcast_class,
3184 iptr->sx.s23.s3.c.ref,
3188 M_MOV_IMM(supervftbl, REG_ITMP3);
3190 M_ILD32(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3192 /* if (s1 != REG_ITMP1) { */
3193 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP1); */
3194 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3); */
3195 /* #if defined(ENABLE_THREADS) */
3196 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3198 /* emit_alu_reg_reg(cd, ALU_SUB, REG_ITMP1, REG_ITMP2); */
3201 M_ILD32(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, baseval));
3202 M_ISUB(REG_ITMP3, REG_ITMP2);
3203 M_MOV_IMM(supervftbl, REG_ITMP3);
3204 M_ILD(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3208 M_CMP(REG_ITMP3, REG_ITMP2);
3209 emit_classcast_check(cd, iptr, BRANCH_ULE, REG_ITMP3, s1);
3212 emit_label(cd, BRANCH_LABEL_5);
3215 if (super == NULL) {
3216 emit_label(cd, BRANCH_LABEL_1);
3217 emit_label(cd, BRANCH_LABEL_4);
3220 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
3223 /* array type cast-check */
3225 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3226 M_AST(s1, REG_SP, 0 * 4);
3228 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3229 patcher_add_patch_ref(jd, PATCHER_builtin_arraycheckcast,
3230 iptr->sx.s23.s3.c.ref, 0);
3233 M_AST_IMM(iptr->sx.s23.s3.c.cls, REG_SP, 1 * 4);
3234 M_MOV_IMM(BUILTIN_arraycheckcast, REG_ITMP3);
3237 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3239 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_RESULT, s1);
3241 d = codegen_reg_of_dst(jd, iptr, s1);
3245 emit_store_dst(jd, iptr, d);
3248 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3252 vftbl_t *supervftbl;
3255 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3261 super = iptr->sx.s23.s3.c.cls;
3262 superindex = super->index;
3263 supervftbl = super->vftbl;
3266 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3267 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
3270 M_INTMOVE(s1, REG_ITMP1);
3276 /* if class is not resolved, check which code to call */
3278 if (super == NULL) {
3280 emit_label_beq(cd, BRANCH_LABEL_1);
3282 patcher_add_patch_ref(jd, PATCHER_checkcast_instanceof_flags,
3283 iptr->sx.s23.s3.c.ref, 0);
3285 M_MOV_IMM(0, REG_ITMP3); /* super->flags */
3286 M_AND_IMM32(ACC_INTERFACE, REG_ITMP3);
3287 emit_label_beq(cd, BRANCH_LABEL_2);
3290 /* interface instanceof code */
3292 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3293 if (super != NULL) {
3295 emit_label_beq(cd, BRANCH_LABEL_3);
3298 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3300 if (super == NULL) {
3301 patcher_add_patch_ref(jd, PATCHER_instanceof_interface,
3302 iptr->sx.s23.s3.c.ref, 0);
3306 REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3307 M_ISUB_IMM32(superindex, REG_ITMP3);
3310 disp = (2 + 4 /* mov_membase32_reg */ + 2 /* test */ +
3311 6 /* jcc */ + 5 /* mov_imm_reg */);
3314 M_ALD32(REG_ITMP1, REG_ITMP1,
3315 OFFSET(vftbl_t, interfacetable[0]) -
3316 superindex * sizeof(methodptr*));
3318 /* emit_setcc_reg(cd, CC_A, d); */
3319 /* emit_jcc(cd, CC_BE, 5); */
3324 emit_label_br(cd, BRANCH_LABEL_4);
3326 emit_label(cd, BRANCH_LABEL_3);
3329 /* class instanceof code */
3331 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3332 if (super == NULL) {
3333 emit_label(cd, BRANCH_LABEL_2);
3337 emit_label_beq(cd, BRANCH_LABEL_5);
3340 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3342 if (super == NULL) {
3343 patcher_add_patch_ref(jd, PATCHER_instanceof_class,
3344 iptr->sx.s23.s3.c.ref, 0);
3347 M_MOV_IMM(supervftbl, REG_ITMP2);
3349 M_ILD(REG_ITMP1, REG_ITMP1, OFFSET(vftbl_t, baseval));
3350 M_ILD(REG_ITMP3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3351 M_ILD(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3353 M_ISUB(REG_ITMP2, REG_ITMP1);
3354 M_CLR(d); /* may be REG_ITMP2 */
3355 M_CMP(REG_ITMP3, REG_ITMP1);
3360 emit_label(cd, BRANCH_LABEL_5);
3363 if (super == NULL) {
3364 emit_label(cd, BRANCH_LABEL_1);
3365 emit_label(cd, BRANCH_LABEL_4);
3368 emit_store_dst(jd, iptr, d);
3372 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3374 /* check for negative sizes and copy sizes to stack if necessary */
3376 MCODECHECK((iptr->s1.argcount << 1) + 64);
3378 for (s1 = iptr->s1.argcount; --s1 >= 0; ) {
3379 /* copy SAVEDVAR sizes to stack */
3380 var = VAR(iptr->sx.s23.s2.args[s1]);
3382 /* Already Preallocated? */
3383 if (!(var->flags & PREALLOC)) {
3384 if (var->flags & INMEMORY) {
3385 M_ILD(REG_ITMP1, REG_SP, var->vv.regoff);
3386 M_IST(REG_ITMP1, REG_SP, (s1 + 3) * 4);
3389 M_IST(var->vv.regoff, REG_SP, (s1 + 3) * 4);
3393 /* is a patcher function set? */
3395 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3396 patcher_add_patch_ref(jd, PATCHER_builtin_multianewarray,
3397 iptr->sx.s23.s3.c.ref, 0);
3403 disp = (ptrint) iptr->sx.s23.s3.c.cls;
3405 /* a0 = dimension count */
3407 M_IST_IMM(iptr->s1.argcount, REG_SP, 0 * 4);
3409 /* a1 = arraydescriptor */
3411 M_IST_IMM(disp, REG_SP, 1 * 4);
3413 /* a2 = pointer to dimensions = stack pointer */
3415 M_MOV(REG_SP, REG_ITMP1);
3416 M_AADD_IMM(3 * 4, REG_ITMP1);
3417 M_AST(REG_ITMP1, REG_SP, 2 * 4);
3419 M_MOV_IMM(BUILTIN_multianewarray, REG_ITMP1);
3422 /* check for exception before result assignment */
3424 emit_exception_check(cd, iptr);
3426 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3427 M_INTMOVE(REG_RESULT, s1);
3428 emit_store_dst(jd, iptr, s1);
3431 #if defined(ENABLE_SSA)
3432 case ICMD_GETEXCEPTION:
3433 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
3434 M_INTMOVE(REG_ITMP1, d);
3435 emit_store_dst(jd, iptr, d);
3439 exceptions_throw_internalerror("Unknown ICMD %d during code generation",
3444 } /* for instruction */
3448 #if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
3451 #if defined(ENABLE_SSA)
3454 /* by edge splitting, in Blocks with phi moves there can only */
3455 /* be a goto as last command, no other Jump/Branch Command */
3457 if (!last_cmd_was_goto)
3458 codegen_emit_phi_moves(jd, bptr);
3463 /* At the end of a basic block we may have to append some nops,
3464 because the patcher stub calling code might be longer than the
3465 actual instruction. So codepatching does not change the
3466 following block unintentionally. */
3468 if (cd->mcodeptr < cd->lastmcodeptr) {
3469 while (cd->mcodeptr < cd->lastmcodeptr) {
3474 } /* if (bptr -> flags >= BBREACHED) */
3475 } /* for basic block */
3477 /* generate stubs */
3479 emit_patcher_traps(jd);
3481 /* everything's ok */
3487 /* codegen_emit_stub_native ****************************************************
3489 Emits a stub routine which calls a native method.
3491 *******************************************************************************/
3493 void codegen_emit_stub_native(jitdata *jd, methoddesc *nmd, functionptr f, int skipparams)
3499 int i, j; /* count variables */
3503 /* get required compiler data */
3509 /* set some variables */
3513 /* calculate stackframe size */
3515 cd->stackframesize =
3516 sizeof(stackframeinfo_t) / SIZEOF_VOID_P +
3517 sizeof(localref_table) / SIZEOF_VOID_P +
3518 4 + /* 4 arguments (start_native_call) */
3521 /* keep stack 16-byte aligned */
3523 ALIGN_ODD(cd->stackframesize);
3525 /* create method header */
3527 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
3528 (void) dseg_add_unique_s4(cd, cd->stackframesize * 8 + 4); /* FrameSize */
3529 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
3530 (void) dseg_add_unique_s4(cd, 0); /* IntSave */
3531 (void) dseg_add_unique_s4(cd, 0); /* FltSave */
3533 #if defined(ENABLE_PROFILING)
3534 /* generate native method profiling code */
3536 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
3537 /* count frequency */
3539 M_MOV_IMM(code, REG_ITMP1);
3540 M_IADD_IMM_MEMBASE(1, REG_ITMP1, OFFSET(codeinfo, frequency));
3544 /* calculate stackframe size for native function */
3546 M_ASUB_IMM(cd->stackframesize * 8 + 4, REG_SP);
3548 /* Mark the whole fpu stack as free for native functions (only for saved */
3549 /* register count == 0). */
3551 emit_ffree_reg(cd, 0);
3552 emit_ffree_reg(cd, 1);
3553 emit_ffree_reg(cd, 2);
3554 emit_ffree_reg(cd, 3);
3555 emit_ffree_reg(cd, 4);
3556 emit_ffree_reg(cd, 5);
3557 emit_ffree_reg(cd, 6);
3558 emit_ffree_reg(cd, 7);
3560 #if defined(ENABLE_GC_CACAO)
3561 /* remember callee saved int registers in stackframeinfo (GC may need to */
3562 /* recover them during a collection). */
3564 disp = cd->stackframesize * 8 - sizeof(stackframeinfo_t) +
3565 OFFSET(stackframeinfo_t, intregs);
3567 for (i = 0; i < INT_SAV_CNT; i++)
3568 M_AST(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3571 /* prepare data structures for native function call */
3573 M_MOV(REG_SP, REG_ITMP1);
3574 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3575 M_IST_IMM(0, REG_SP, 1 * 4);
3578 M_MOV_IMM(codegen_start_native_call, REG_ITMP1);
3581 /* remember class argument */
3583 if (m->flags & ACC_STATIC)
3584 M_MOV(REG_RESULT, REG_ITMP3);
3586 /* Copy or spill arguments to new locations. */
3588 for (i = md->paramcount - 1, j = i + skipparams; i >= 0; i--, j--) {
3589 if (!md->params[i].inmemory)
3592 s1 = md->params[i].regoff + cd->stackframesize * 8 + 8;
3593 s2 = nmd->params[j].regoff;
3595 /* float/double in memory can be copied like int/longs */
3597 switch (md->paramtypes[i].type) {
3601 M_ILD(REG_ITMP1, REG_SP, s1);
3602 M_IST(REG_ITMP1, REG_SP, s2);
3606 M_LLD(REG_ITMP12_PACKED, REG_SP, s1);
3607 M_LST(REG_ITMP12_PACKED, REG_SP, s2);
3612 /* Handle native Java methods. */
3614 if (m->flags & ACC_NATIVE) {
3615 /* if function is static, put class into second argument */
3617 if (m->flags & ACC_STATIC)
3618 M_AST(REG_ITMP3, REG_SP, 1 * 4);
3620 /* put env into first argument */
3622 M_AST_IMM(VM_get_jnienv(), REG_SP, 0 * 4);
3625 /* Call the native function. */
3627 disp = dseg_add_functionptr(cd, f);
3628 emit_mov_imm_reg(cd, 0, REG_ITMP3);
3630 M_ALD(REG_ITMP1, REG_ITMP3, disp);
3633 /* save return value */
3635 switch (md->returntype.type) {
3638 switch (md->returntype.primitivetype) {
3639 case PRIMITIVETYPE_BOOLEAN:
3640 M_BZEXT(REG_RESULT, REG_RESULT);
3642 case PRIMITIVETYPE_BYTE:
3643 M_BSEXT(REG_RESULT, REG_RESULT);
3645 case PRIMITIVETYPE_CHAR:
3646 M_CZEXT(REG_RESULT, REG_RESULT);
3648 case PRIMITIVETYPE_SHORT:
3649 M_SSEXT(REG_RESULT, REG_RESULT);
3652 M_IST(REG_RESULT, REG_SP, 1 * 8);
3655 M_LST(REG_RESULT_PACKED, REG_SP, 1 * 8);
3658 emit_fsts_membase(cd, REG_SP, 1 * 8);
3661 emit_fstl_membase(cd, REG_SP, 1 * 8);
3667 /* remove native stackframe info */
3669 M_MOV(REG_SP, REG_ITMP1);
3670 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3671 M_IST_IMM(0, REG_SP, 1 * 4);
3674 M_MOV_IMM(codegen_finish_native_call, REG_ITMP1);
3676 M_MOV(REG_RESULT, REG_ITMP2); /* REG_ITMP3 == REG_RESULT2 */
3678 /* restore return value */
3680 switch (md->returntype.type) {
3683 M_ILD(REG_RESULT, REG_SP, 1 * 8);
3686 M_LLD(REG_RESULT_PACKED, REG_SP, 1 * 8);
3689 emit_flds_membase(cd, REG_SP, 1 * 8);
3692 emit_fldl_membase(cd, REG_SP, 1 * 8);
3698 #if defined(ENABLE_GC_CACAO)
3699 /* restore callee saved int registers from stackframeinfo (GC might have */
3700 /* modified them during a collection). */
3702 disp = cd->stackframesize * 8 - sizeof(stackframeinfo_t) +
3703 OFFSET(stackframeinfo_t, intregs);
3705 for (i = 0; i < INT_SAV_CNT; i++)
3706 M_ALD(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3709 M_AADD_IMM(cd->stackframesize * 8 + 4, REG_SP);
3711 /* check for exception */
3718 /* handle exception */
3720 M_MOV(REG_ITMP2, REG_ITMP1_XPTR);
3721 M_ALD(REG_ITMP2_XPC, REG_SP, 0);
3722 M_ASUB_IMM(2, REG_ITMP2_XPC);
3724 M_MOV_IMM(asm_handle_nat_exception, REG_ITMP3);
3730 * These are local overrides for various environment variables in Emacs.
3731 * Please do not remove this and leave it at the end of the file, where
3732 * Emacs will automagically detect them.
3733 * ---------------------------------------------------------------------
3736 * indent-tabs-mode: t
3740 * vim:noexpandtab:sw=4:ts=4: