1 /* src/vm/jit/i386/codegen.c - machine code generator for i386
3 Copyright (C) 1996-2005, 2006, 2007, 2008
4 CACAOVM - Verein zur Foerderung der freien virtuellen Maschine CACAO
6 This file is part of CACAO.
8 This program is free software; you can redistribute it and/or
9 modify it under the terms of the GNU General Public License as
10 published by the Free Software Foundation; either version 2, or (at
11 your option) any later version.
13 This program is distributed in the hope that it will be useful, but
14 WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with this program; if not, write to the Free Software
20 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
34 #include "vm/jit/i386/md-abi.h"
36 #include "vm/jit/i386/codegen.h"
37 #include "vm/jit/i386/emit.h"
39 #include "mm/memory.h"
40 #include "native/jni.h"
41 #include "native/localref.h"
42 #include "native/native.h"
44 #include "threads/lock-common.h"
46 #include "vm/builtin.h"
47 #include "vm/exceptions.h"
48 #include "vm/global.h"
49 #include "vm/stringlocal.h"
52 #include "vm/jit/abi.h"
53 #include "vm/jit/asmpart.h"
54 #include "vm/jit/codegen-common.h"
55 #include "vm/jit/dseg.h"
56 #include "vm/jit/emit-common.h"
57 #include "vm/jit/jit.h"
58 #include "vm/jit/linenumbertable.h"
59 #include "vm/jit/parse.h"
60 #include "vm/jit/patcher-common.h"
61 #include "vm/jit/reg.h"
62 #include "vm/jit/replace.h"
63 #include "vm/jit/stacktrace.h"
65 #if defined(ENABLE_SSA)
66 # include "vm/jit/optimizing/lsra.h"
67 # include "vm/jit/optimizing/ssa.h"
68 #elif defined(ENABLE_LSRA)
69 # include "vm/jit/allocator/lsra.h"
72 #include "vmcore/loader.h"
73 #include "vmcore/options.h"
74 #include "vmcore/utf8.h"
77 /* codegen_emit ****************************************************************
79 Generates machine code.
81 *******************************************************************************/
83 bool codegen_emit(jitdata *jd)
89 s4 len, s1, s2, s3, d, disp;
90 int align_off; /* offset for alignment compensation */
95 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
96 builtintable_entry *bte;
102 #if defined(ENABLE_SSA)
104 bool last_cmd_was_goto;
106 last_cmd_was_goto = false;
110 /* get required compiler data */
117 /* prevent compiler warnings */
128 s4 savedregs_num = 0;
131 /* space to save used callee saved registers */
133 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
134 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse);
136 cd->stackframesize = rd->memuse + savedregs_num;
139 #if defined(ENABLE_THREADS)
140 /* space to save argument of monitor_enter */
142 if (checksync && code_is_synchronized(code))
143 cd->stackframesize++;
146 /* create method header */
148 /* Keep stack of non-leaf functions 16-byte aligned. */
150 if (!code_is_leafmethod(code)) {
151 ALIGN_ODD(cd->stackframesize);
154 align_off = cd->stackframesize ? 4 : 0;
156 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
157 (void) dseg_add_unique_s4(
158 cd, cd->stackframesize * 8 + align_off); /* FrameSize */
160 code->synchronizedoffset = rd->memuse * 8;
162 /* REMOVEME: We still need it for exception handling in assembler. */
164 if (code_is_leafmethod(code))
165 (void) dseg_add_unique_s4(cd, 1); /* IsLeaf */
167 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
169 (void) dseg_add_unique_s4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
170 (void) dseg_add_unique_s4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
172 #if defined(ENABLE_PROFILING)
173 /* generate method profiling code */
175 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
176 /* count frequency */
178 M_MOV_IMM(code, REG_ITMP3);
179 M_IADD_IMM_MEMBASE(1, REG_ITMP3, OFFSET(codeinfo, frequency));
183 /* create stack frame (if necessary) */
185 if (cd->stackframesize)
187 M_ASUB_IMM(cd->stackframesize * 8 + 4, REG_SP);
189 /* save return address and used callee saved registers */
191 p = cd->stackframesize;
192 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
193 p--; M_AST(rd->savintregs[i], REG_SP, p * 8);
195 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
196 p--; emit_fld_reg(cd, rd->savfltregs[i]); emit_fstpl_membase(cd, REG_SP, p * 8);
199 /* take arguments out of register or stack frame */
204 for (p = 0, l = 0; p < md->paramcount; p++) {
205 t = md->paramtypes[p].type;
207 varindex = jd->local_map[l * 5 + t];
208 #if defined(ENABLE_SSA)
210 if (varindex != UNUSED)
211 varindex = ls->var_0[varindex];
212 if ((varindex != UNUSED) && (ls->lifetime[varindex].type == UNUSED))
217 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
220 if (varindex == UNUSED)
224 s1 = md->params[p].regoff;
227 if (IS_INT_LNG_TYPE(t)) { /* integer args */
228 if (!md->params[p].inmemory) { /* register arguments */
229 log_text("integer register argument");
231 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
232 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff */
234 else { /* reg arg -> spilled */
235 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff * 4 */
239 if (!(var->flags & INMEMORY)) {
241 cd->stackframesize * 8 + 4 + align_off + s1);
244 if (!IS_2_WORD_TYPE(t)) {
245 #if defined(ENABLE_SSA)
246 /* no copy avoiding by now possible with SSA */
248 emit_mov_membase_reg( /* + 4 for return address */
250 cd->stackframesize * 8 + s1 + 4 + align_off,
252 emit_mov_reg_membase(
253 cd, REG_ITMP1, REG_SP, var->vv.regoff);
256 #endif /*defined(ENABLE_SSA)*/
257 /* reuse stackslot */
258 var->vv.regoff = cd->stackframesize * 8 + 4 +
263 #if defined(ENABLE_SSA)
264 /* no copy avoiding by now possible with SSA */
266 emit_mov_membase_reg( /* + 4 for return address */
268 cd->stackframesize * 8 + s1 + 4 + align_off,
270 emit_mov_reg_membase(
271 cd, REG_ITMP1, REG_SP, var->vv.regoff);
272 emit_mov_membase_reg( /* + 4 for return address */
274 cd->stackframesize * 8 + s1 + 4 + 4 + align_off,
276 emit_mov_reg_membase(
277 cd, REG_ITMP1, REG_SP, var->vv.regoff + 4);
280 #endif /*defined(ENABLE_SSA)*/
281 /* reuse stackslot */
282 var->vv.regoff = cd->stackframesize * 8 + 8 + s1;
287 else { /* floating args */
288 if (!md->params[p].inmemory) { /* register arguments */
289 log_text("There are no float argument registers!");
291 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
292 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff */
293 } else { /* reg arg -> spilled */
294 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff * 8 */
298 else { /* stack arguments */
299 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
303 cd->stackframesize * 8 + s1 + 4 + align_off);
305 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
311 cd->stackframesize * 8 + s1 + 4 + align_off);
313 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
316 } else { /* stack-arg -> spilled */
317 #if defined(ENABLE_SSA)
318 /* no copy avoiding by now possible with SSA */
320 emit_mov_membase_reg(
322 cd->stackframesize * 8 + s1 + 4 + align_off,
324 emit_mov_reg_membase(
325 cd, REG_ITMP1, REG_SP, var->vv.regoff);
329 cd->stackframesize * 8 + s1 + 4 + align_off);
330 emit_fstps_membase(cd, REG_SP, var->vv.regoff);
335 cd->stackframesize * 8 + s1 + 4 + align_off);
336 emit_fstpl_membase(cd, REG_SP, var->vv.regoff);
340 #endif /*defined(ENABLE_SSA)*/
341 /* reuse stackslot */
342 var->vv.regoff = cd->stackframesize * 8 + 4 +
349 /* call monitorenter function */
351 #if defined(ENABLE_THREADS)
352 if (checksync && code_is_synchronized(code)) {
355 if (m->flags & ACC_STATIC) {
356 M_MOV_IMM(&m->clazz->object.header, REG_ITMP1);
359 M_ALD(REG_ITMP1, REG_SP, cd->stackframesize * 8 + 4 + align_off);
362 M_ALD_MEM(REG_ITMP1, EXCEPTION_HARDWARE_NULLPOINTER);
365 M_AST(REG_ITMP1, REG_SP, s1 * 8);
366 M_AST(REG_ITMP1, REG_SP, 0 * 4);
367 M_MOV_IMM(LOCK_monitor_enter, REG_ITMP3);
373 emit_verbosecall_enter(jd);
378 #if defined(ENABLE_SSA)
379 /* with SSA the Header is Basic Block 0 - insert phi Moves if necessary */
381 codegen_emit_phi_moves(jd, ls->basicblocks[0]);
384 /* end of header generation */
386 /* create replacement points */
388 REPLACEMENT_POINTS_INIT(cd, jd);
390 /* walk through all basic blocks */
392 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
394 bptr->mpc = (s4) (cd->mcodeptr - cd->mcodebase);
396 if (bptr->flags >= BBREACHED) {
397 /* branch resolving */
399 codegen_resolve_branchrefs(cd, bptr);
401 /* handle replacement points */
403 REPLACEMENT_POINT_BLOCK_START(cd, bptr);
405 #if defined(ENABLE_REPLACEMENT)
406 if (bptr->bitflags & BBFLAG_REPLACEMENT) {
407 if (cd->replacementpoint[-1].flags & RPLPOINT_FLAG_COUNTDOWN) {
409 disp = (s4) &(m->hitcountdown);
410 M_ISUB_IMM_MEMABS(1, disp);
416 /* copy interface registers to their destination */
421 #if defined(ENABLE_PROFILING)
422 /* generate basic block profiling code */
424 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
425 /* count frequency */
427 M_MOV_IMM(code->bbfrequency, REG_ITMP3);
428 M_IADD_IMM_MEMBASE(1, REG_ITMP3, bptr->nr * 4);
432 #if defined(ENABLE_LSRA) || defined(ENABLE_SSA)
433 # if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
436 # if defined(ENABLE_SSA)
438 last_cmd_was_goto = false;
442 var = VAR(bptr->invars[len]);
443 if (bptr->type != BBTYPE_STD) {
444 if (!IS_2_WORD_TYPE(var->type)) {
445 if (bptr->type == BBTYPE_EXH) {
446 d = codegen_reg_of_var(0, var, REG_ITMP1);
447 M_INTMOVE(REG_ITMP1, d);
448 emit_store(jd, NULL, var, d);
452 log_text("copy interface registers(EXH, SBR): longs \
453 have to be in memory (begin 1)");
461 #endif /* defined(ENABLE_LSRA) || defined(ENABLE_SSA) */
465 var = VAR(bptr->invars[len]);
466 if ((len == bptr->indepth-1) && (bptr->type != BBTYPE_STD)) {
467 if (!IS_2_WORD_TYPE(var->type)) {
468 if (bptr->type == BBTYPE_EXH) {
469 d = codegen_reg_of_var(0, var, REG_ITMP1);
470 M_INTMOVE(REG_ITMP1, d);
471 emit_store(jd, NULL, var, d);
475 log_text("copy interface registers: longs have to be in \
482 assert((var->flags & INOUT));
487 /* walk through all instructions */
492 for (iptr = bptr->iinstr; len > 0; len--, iptr++) {
493 if (iptr->line != currentline) {
494 linenumbertable_list_entry_add(cd, iptr->line);
495 currentline = iptr->line;
498 MCODECHECK(1024); /* 1kB should be enough */
501 case ICMD_NOP: /* ... ==> ... */
502 case ICMD_POP: /* ..., value ==> ... */
503 case ICMD_POP2: /* ..., value, value ==> ... */
506 case ICMD_INLINE_START:
508 REPLACEMENT_POINT_INLINE_START(cd, iptr);
511 case ICMD_INLINE_BODY:
513 REPLACEMENT_POINT_INLINE_BODY(cd, iptr);
514 linenumbertable_list_entry_add_inline_start(cd, iptr);
515 linenumbertable_list_entry_add(cd, iptr->line);
518 case ICMD_INLINE_END:
520 linenumbertable_list_entry_add_inline_end(cd, iptr);
521 linenumbertable_list_entry_add(cd, iptr->line);
524 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
526 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
527 emit_nullpointer_check(cd, iptr, s1);
530 /* constant operations ************************************************/
532 case ICMD_ICONST: /* ... ==> ..., constant */
534 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
535 ICONST(d, iptr->sx.val.i);
536 emit_store_dst(jd, iptr, d);
539 case ICMD_LCONST: /* ... ==> ..., constant */
541 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
542 LCONST(d, iptr->sx.val.l);
543 emit_store_dst(jd, iptr, d);
546 case ICMD_FCONST: /* ... ==> ..., constant */
548 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
549 if (iptr->sx.val.f == 0.0) {
553 if (iptr->sx.val.i == 0x80000000) {
557 } else if (iptr->sx.val.f == 1.0) {
560 } else if (iptr->sx.val.f == 2.0) {
566 disp = dseg_add_float(cd, iptr->sx.val.f);
567 emit_mov_imm_reg(cd, 0, REG_ITMP1);
569 emit_flds_membase(cd, REG_ITMP1, disp);
571 emit_store_dst(jd, iptr, d);
574 case ICMD_DCONST: /* ... ==> ..., constant */
576 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
577 if (iptr->sx.val.d == 0.0) {
581 if (iptr->sx.val.l == 0x8000000000000000LL) {
585 } else if (iptr->sx.val.d == 1.0) {
588 } else if (iptr->sx.val.d == 2.0) {
594 disp = dseg_add_double(cd, iptr->sx.val.d);
595 emit_mov_imm_reg(cd, 0, REG_ITMP1);
597 emit_fldl_membase(cd, REG_ITMP1, disp);
599 emit_store_dst(jd, iptr, d);
602 case ICMD_ACONST: /* ... ==> ..., constant */
604 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
606 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
607 patcher_add_patch_ref(jd, PATCHER_aconst,
608 iptr->sx.val.c.ref, 0);
613 if (iptr->sx.val.anyptr == NULL)
616 M_MOV_IMM(iptr->sx.val.anyptr, d);
618 emit_store_dst(jd, iptr, d);
622 /* load/store/copy/move operations ************************************/
640 if (!(iptr->flags.bits & INS_FLAG_RETADDR))
645 /* integer operations *************************************************/
647 case ICMD_INEG: /* ..., value ==> ..., - value */
649 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
650 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
653 emit_store_dst(jd, iptr, d);
656 case ICMD_LNEG: /* ..., value ==> ..., - value */
658 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
659 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
661 M_NEG(GET_LOW_REG(d));
662 M_IADDC_IMM(0, GET_HIGH_REG(d));
663 M_NEG(GET_HIGH_REG(d));
664 emit_store_dst(jd, iptr, d);
667 case ICMD_I2L: /* ..., value ==> ..., value */
669 s1 = emit_load_s1(jd, iptr, EAX);
670 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
673 M_LNGMOVE(EAX_EDX_PACKED, d);
674 emit_store_dst(jd, iptr, d);
677 case ICMD_L2I: /* ..., value ==> ..., value */
679 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
680 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
682 emit_store_dst(jd, iptr, d);
685 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
687 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
688 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
692 emit_store_dst(jd, iptr, d);
695 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
697 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
698 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
700 emit_store_dst(jd, iptr, d);
703 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
705 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
706 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
708 emit_store_dst(jd, iptr, d);
712 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
714 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
715 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
716 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
723 emit_store_dst(jd, iptr, d);
727 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
728 /* sx.val.i = constant */
730 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
731 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
733 /* `inc reg' is slower on p4's (regarding to ia32
734 optimization reference manual and benchmarks) and as
738 M_IADD_IMM(iptr->sx.val.i, d);
739 emit_store_dst(jd, iptr, d);
742 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
744 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
745 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
746 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
747 M_INTMOVE(s1, GET_LOW_REG(d));
748 M_IADD(s2, GET_LOW_REG(d));
749 /* don't use REG_ITMP1 */
750 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
751 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
752 M_INTMOVE(s1, GET_HIGH_REG(d));
753 M_IADDC(s2, GET_HIGH_REG(d));
754 emit_store_dst(jd, iptr, d);
757 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
758 /* sx.val.l = constant */
760 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
761 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
763 M_IADD_IMM(iptr->sx.val.l, GET_LOW_REG(d));
764 M_IADDC_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
765 emit_store_dst(jd, iptr, d);
768 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
770 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
771 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
772 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
774 M_INTMOVE(s1, REG_ITMP1);
775 M_ISUB(s2, REG_ITMP1);
776 M_INTMOVE(REG_ITMP1, d);
782 emit_store_dst(jd, iptr, d);
785 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
786 /* sx.val.i = constant */
788 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
789 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
791 M_ISUB_IMM(iptr->sx.val.i, d);
792 emit_store_dst(jd, iptr, d);
795 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
797 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
798 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
799 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
800 if (s2 == GET_LOW_REG(d)) {
801 M_INTMOVE(s1, REG_ITMP1);
802 M_ISUB(s2, REG_ITMP1);
803 M_INTMOVE(REG_ITMP1, GET_LOW_REG(d));
806 M_INTMOVE(s1, GET_LOW_REG(d));
807 M_ISUB(s2, GET_LOW_REG(d));
809 /* don't use REG_ITMP1 */
810 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
811 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
812 if (s2 == GET_HIGH_REG(d)) {
813 M_INTMOVE(s1, REG_ITMP2);
814 M_ISUBB(s2, REG_ITMP2);
815 M_INTMOVE(REG_ITMP2, GET_HIGH_REG(d));
818 M_INTMOVE(s1, GET_HIGH_REG(d));
819 M_ISUBB(s2, GET_HIGH_REG(d));
821 emit_store_dst(jd, iptr, d);
824 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
825 /* sx.val.l = constant */
827 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
828 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
830 M_ISUB_IMM(iptr->sx.val.l, GET_LOW_REG(d));
831 M_ISUBB_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
832 emit_store_dst(jd, iptr, d);
835 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
837 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
838 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
839 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
846 emit_store_dst(jd, iptr, d);
849 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
850 /* sx.val.i = constant */
852 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
853 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
854 M_IMUL_IMM(s1, iptr->sx.val.i, d);
855 emit_store_dst(jd, iptr, d);
858 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
860 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
861 s2 = emit_load_s2_low(jd, iptr, EDX);
862 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
864 M_INTMOVE(s1, REG_ITMP2);
865 M_IMUL(s2, REG_ITMP2);
867 s1 = emit_load_s1_low(jd, iptr, EAX);
868 s2 = emit_load_s2_high(jd, iptr, EDX);
871 M_IADD(EDX, REG_ITMP2);
873 s1 = emit_load_s1_low(jd, iptr, EAX);
874 s2 = emit_load_s2_low(jd, iptr, EDX);
877 M_INTMOVE(EAX, GET_LOW_REG(d));
878 M_IADD(REG_ITMP2, GET_HIGH_REG(d));
880 emit_store_dst(jd, iptr, d);
883 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
884 /* sx.val.l = constant */
886 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
887 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
888 ICONST(EAX, iptr->sx.val.l);
890 M_IMUL_IMM(s1, iptr->sx.val.l >> 32, REG_ITMP2);
891 M_IADD(REG_ITMP2, EDX);
892 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
893 M_IMUL_IMM(s1, iptr->sx.val.l, REG_ITMP2);
894 M_IADD(REG_ITMP2, EDX);
895 M_LNGMOVE(EAX_EDX_PACKED, d);
896 emit_store_dst(jd, iptr, d);
899 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
901 s1 = emit_load_s1(jd, iptr, EAX);
902 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
903 d = codegen_reg_of_dst(jd, iptr, EAX);
904 emit_arithmetic_check(cd, iptr, s2);
906 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
908 /* check as described in jvm spec */
910 M_CMP_IMM(0x80000000, EAX);
917 M_INTMOVE(EAX, d); /* if INMEMORY then d is already EAX */
918 emit_store_dst(jd, iptr, d);
921 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
923 s1 = emit_load_s1(jd, iptr, EAX);
924 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
925 d = codegen_reg_of_dst(jd, iptr, EDX);
926 emit_arithmetic_check(cd, iptr, s2);
928 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
930 /* check as described in jvm spec */
932 M_CMP_IMM(0x80000000, EAX);
940 M_INTMOVE(EDX, d); /* if INMEMORY then d is already EDX */
941 emit_store_dst(jd, iptr, d);
944 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
945 /* sx.val.i = constant */
947 /* TODO: optimize for `/ 2' */
948 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
949 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
953 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, d);/* 32-bit for jump off */
954 M_SRA_IMM(iptr->sx.val.i, d);
955 emit_store_dst(jd, iptr, d);
958 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
959 /* sx.val.i = constant */
961 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
962 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
964 M_MOV(s1, REG_ITMP1);
968 M_AND_IMM(iptr->sx.val.i, d);
970 M_BGE(2 + 2 + 6 + 2);
971 M_MOV(s1, d); /* don't use M_INTMOVE, so we know the jump offset */
973 M_AND_IMM32(iptr->sx.val.i, d); /* use 32-bit for jump offset */
975 emit_store_dst(jd, iptr, d);
978 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
979 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
981 s2 = emit_load_s2(jd, iptr, REG_ITMP12_PACKED);
982 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
984 M_INTMOVE(GET_LOW_REG(s2), REG_ITMP3);
985 M_OR(GET_HIGH_REG(s2), REG_ITMP3);
986 /* XXX could be optimized */
987 emit_arithmetic_check(cd, iptr, REG_ITMP3);
989 bte = iptr->sx.s23.s3.bte;
992 M_LST(s2, REG_SP, 2 * 4);
994 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
995 M_LST(s1, REG_SP, 0 * 4);
997 M_MOV_IMM(bte->fp, REG_ITMP3);
999 emit_store_dst(jd, iptr, d);
1002 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1003 /* sx.val.i = constant */
1005 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1006 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1008 M_TEST(GET_HIGH_REG(d));
1010 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, GET_LOW_REG(d));
1011 M_IADDC_IMM(0, GET_HIGH_REG(d));
1012 M_SRLD_IMM(iptr->sx.val.i, GET_HIGH_REG(d), GET_LOW_REG(d));
1013 M_SRA_IMM(iptr->sx.val.i, GET_HIGH_REG(d));
1014 emit_store_dst(jd, iptr, d);
1018 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1019 /* sx.val.l = constant */
1021 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1022 if (iptr->dst.var->flags & INMEMORY) {
1023 if (iptr->s1.var->flags & INMEMORY) {
1024 /* Alpha algorithm */
1026 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8);
1028 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1034 /* TODO: hmm, don't know if this is always correct */
1036 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l & 0x00000000ffffffff);
1038 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l >> 32);
1044 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1045 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1047 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1048 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1049 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1050 emit_jcc(cd, CC_GE, disp);
1052 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1053 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1055 emit_neg_reg(cd, REG_ITMP1);
1056 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1057 emit_neg_reg(cd, REG_ITMP2);
1059 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1060 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1062 emit_neg_reg(cd, REG_ITMP1);
1063 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1064 emit_neg_reg(cd, REG_ITMP2);
1066 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst.var->vv.regoff * 8);
1067 emit_mov_reg_membase(cd, REG_ITMP2, REG_SP, iptr->dst.var->vv.regoff * 8 + 4);
1071 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1072 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1074 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1075 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1076 M_TEST(GET_LOW_REG(s1));
1082 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1084 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1085 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1086 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1087 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1090 emit_store_dst(jd, iptr, d);
1093 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1094 /* sx.val.i = constant */
1096 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1097 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1099 M_SLL_IMM(iptr->sx.val.i, d);
1100 emit_store_dst(jd, iptr, d);
1103 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1105 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1106 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1107 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1108 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1111 emit_store_dst(jd, iptr, d);
1114 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1115 /* sx.val.i = constant */
1117 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1118 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1120 M_SRA_IMM(iptr->sx.val.i, d);
1121 emit_store_dst(jd, iptr, d);
1124 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1126 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1127 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1128 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1129 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1132 emit_store_dst(jd, iptr, d);
1135 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1136 /* sx.val.i = constant */
1138 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1139 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1141 M_SRL_IMM(iptr->sx.val.i, d);
1142 emit_store_dst(jd, iptr, d);
1145 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1147 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1148 s2 = emit_load_s2(jd, iptr, ECX);
1149 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1152 M_TEST_IMM(32, ECX);
1154 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1155 M_CLR(GET_LOW_REG(d));
1156 M_SLLD(GET_LOW_REG(d), GET_HIGH_REG(d));
1157 M_SLL(GET_LOW_REG(d));
1158 emit_store_dst(jd, iptr, d);
1161 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1162 /* sx.val.i = constant */
1164 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1165 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1167 if (iptr->sx.val.i & 0x20) {
1168 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1169 M_CLR(GET_LOW_REG(d));
1170 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1174 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1176 M_SLL_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d));
1178 emit_store_dst(jd, iptr, d);
1181 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1183 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1184 s2 = emit_load_s2(jd, iptr, ECX);
1185 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1188 M_TEST_IMM(32, ECX);
1190 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1191 M_SRA_IMM(31, GET_HIGH_REG(d));
1192 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1193 M_SRA(GET_HIGH_REG(d));
1194 emit_store_dst(jd, iptr, d);
1197 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1198 /* sx.val.i = constant */
1200 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1201 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1203 if (iptr->sx.val.i & 0x20) {
1204 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1205 M_SRA_IMM(31, GET_HIGH_REG(d));
1206 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1210 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1212 M_SRA_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1214 emit_store_dst(jd, iptr, d);
1217 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1219 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1220 s2 = emit_load_s2(jd, iptr, ECX);
1221 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1224 M_TEST_IMM(32, ECX);
1226 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1227 M_CLR(GET_HIGH_REG(d));
1228 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1229 M_SRL(GET_HIGH_REG(d));
1230 emit_store_dst(jd, iptr, d);
1233 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1234 /* sx.val.l = constant */
1236 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1237 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1239 if (iptr->sx.val.i & 0x20) {
1240 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1241 M_CLR(GET_HIGH_REG(d));
1242 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1246 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1248 M_SRL_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1250 emit_store_dst(jd, iptr, d);
1253 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1255 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1256 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1257 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1264 emit_store_dst(jd, iptr, d);
1267 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1268 /* sx.val.i = constant */
1270 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1271 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1273 M_AND_IMM(iptr->sx.val.i, d);
1274 emit_store_dst(jd, iptr, d);
1277 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1279 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1280 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1281 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1282 if (s2 == GET_LOW_REG(d))
1283 M_AND(s1, GET_LOW_REG(d));
1285 M_INTMOVE(s1, GET_LOW_REG(d));
1286 M_AND(s2, GET_LOW_REG(d));
1288 /* REG_ITMP1 probably contains low 32-bit of destination */
1289 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1290 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1291 if (s2 == GET_HIGH_REG(d))
1292 M_AND(s1, GET_HIGH_REG(d));
1294 M_INTMOVE(s1, GET_HIGH_REG(d));
1295 M_AND(s2, GET_HIGH_REG(d));
1297 emit_store_dst(jd, iptr, d);
1300 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1301 /* sx.val.l = constant */
1303 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1304 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1306 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1307 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1308 emit_store_dst(jd, iptr, d);
1311 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1313 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1314 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1315 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1322 emit_store_dst(jd, iptr, d);
1325 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1326 /* sx.val.i = constant */
1328 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1329 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1331 M_OR_IMM(iptr->sx.val.i, d);
1332 emit_store_dst(jd, iptr, d);
1335 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1337 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1338 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1339 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1340 if (s2 == GET_LOW_REG(d))
1341 M_OR(s1, GET_LOW_REG(d));
1343 M_INTMOVE(s1, GET_LOW_REG(d));
1344 M_OR(s2, GET_LOW_REG(d));
1346 /* REG_ITMP1 probably contains low 32-bit of destination */
1347 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1348 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1349 if (s2 == GET_HIGH_REG(d))
1350 M_OR(s1, GET_HIGH_REG(d));
1352 M_INTMOVE(s1, GET_HIGH_REG(d));
1353 M_OR(s2, GET_HIGH_REG(d));
1355 emit_store_dst(jd, iptr, d);
1358 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1359 /* sx.val.l = constant */
1361 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1362 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1364 M_OR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1365 M_OR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1366 emit_store_dst(jd, iptr, d);
1369 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1371 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1372 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1373 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1380 emit_store_dst(jd, iptr, d);
1383 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1384 /* sx.val.i = constant */
1386 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1387 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1389 M_XOR_IMM(iptr->sx.val.i, d);
1390 emit_store_dst(jd, iptr, d);
1393 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1395 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1396 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1397 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1398 if (s2 == GET_LOW_REG(d))
1399 M_XOR(s1, GET_LOW_REG(d));
1401 M_INTMOVE(s1, GET_LOW_REG(d));
1402 M_XOR(s2, GET_LOW_REG(d));
1404 /* REG_ITMP1 probably contains low 32-bit of destination */
1405 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1406 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1407 if (s2 == GET_HIGH_REG(d))
1408 M_XOR(s1, GET_HIGH_REG(d));
1410 M_INTMOVE(s1, GET_HIGH_REG(d));
1411 M_XOR(s2, GET_HIGH_REG(d));
1413 emit_store_dst(jd, iptr, d);
1416 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1417 /* sx.val.l = constant */
1419 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1420 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1422 M_XOR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1423 M_XOR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1424 emit_store_dst(jd, iptr, d);
1428 /* floating operations ************************************************/
1430 case ICMD_FNEG: /* ..., value ==> ..., - value */
1432 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1433 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1435 emit_store_dst(jd, iptr, d);
1438 case ICMD_DNEG: /* ..., value ==> ..., - value */
1440 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1441 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1443 emit_store_dst(jd, iptr, d);
1446 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1448 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1449 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1450 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1452 emit_store_dst(jd, iptr, d);
1455 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1457 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1458 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1459 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1461 emit_store_dst(jd, iptr, d);
1464 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1466 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1467 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1468 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1470 emit_store_dst(jd, iptr, d);
1473 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1475 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1476 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1477 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1479 emit_store_dst(jd, iptr, d);
1482 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1484 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1485 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1486 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1488 emit_store_dst(jd, iptr, d);
1491 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1493 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1494 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1495 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1497 emit_store_dst(jd, iptr, d);
1500 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1502 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1503 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1504 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1506 emit_store_dst(jd, iptr, d);
1509 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1511 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1512 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1513 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1515 emit_store_dst(jd, iptr, d);
1518 case ICMD_FREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1520 /* exchanged to skip fxch */
1521 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1522 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1523 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1524 /* emit_fxch(cd); */
1529 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1530 emit_store_dst(jd, iptr, d);
1531 emit_ffree_reg(cd, 0);
1535 case ICMD_DREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1537 /* exchanged to skip fxch */
1538 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1539 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1540 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1541 /* emit_fxch(cd); */
1546 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1547 emit_store_dst(jd, iptr, d);
1548 emit_ffree_reg(cd, 0);
1552 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1553 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1555 var = VAROP(iptr->s1);
1556 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1558 if (var->flags & INMEMORY) {
1559 emit_fildl_membase(cd, REG_SP, var->vv.regoff);
1561 /* XXX not thread safe! */
1562 disp = dseg_add_unique_s4(cd, 0);
1563 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1565 emit_mov_reg_membase(cd, var->vv.regoff, REG_ITMP1, disp);
1566 emit_fildl_membase(cd, REG_ITMP1, disp);
1569 emit_store_dst(jd, iptr, d);
1572 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1573 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1575 var = VAROP(iptr->s1);
1576 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1577 if (var->flags & INMEMORY) {
1578 emit_fildll_membase(cd, REG_SP, var->vv.regoff);
1581 log_text("L2F: longs have to be in memory");
1584 emit_store_dst(jd, iptr, d);
1587 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1589 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1590 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1592 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1595 /* Round to zero, 53-bit mode, exception masked */
1596 disp = dseg_add_s4(cd, 0x0e7f);
1597 emit_fldcw_membase(cd, REG_ITMP1, disp);
1599 var = VAROP(iptr->dst);
1600 var1 = VAROP(iptr->s1);
1602 if (var->flags & INMEMORY) {
1603 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1605 /* Round to nearest, 53-bit mode, exceptions masked */
1606 disp = dseg_add_s4(cd, 0x027f);
1607 emit_fldcw_membase(cd, REG_ITMP1, disp);
1609 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1610 REG_SP, var->vv.regoff);
1613 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1615 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1618 /* XXX not thread safe! */
1619 disp = dseg_add_unique_s4(cd, 0);
1620 emit_fistpl_membase(cd, REG_ITMP1, disp);
1621 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1623 /* Round to nearest, 53-bit mode, exceptions masked */
1624 disp = dseg_add_s4(cd, 0x027f);
1625 emit_fldcw_membase(cd, REG_ITMP1, disp);
1627 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1630 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1631 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1634 emit_jcc(cd, CC_NE, disp);
1636 /* XXX: change this when we use registers */
1637 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1638 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2i, REG_ITMP1);
1639 emit_call_reg(cd, REG_ITMP1);
1641 if (var->flags & INMEMORY) {
1642 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1645 M_INTMOVE(REG_RESULT, var->vv.regoff);
1649 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1651 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1652 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1654 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1657 /* Round to zero, 53-bit mode, exception masked */
1658 disp = dseg_add_s4(cd, 0x0e7f);
1659 emit_fldcw_membase(cd, REG_ITMP1, disp);
1661 var = VAROP(iptr->dst);
1662 var1 = VAROP(iptr->s1);
1664 if (var->flags & INMEMORY) {
1665 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1667 /* Round to nearest, 53-bit mode, exceptions masked */
1668 disp = dseg_add_s4(cd, 0x027f);
1669 emit_fldcw_membase(cd, REG_ITMP1, disp);
1671 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1672 REG_SP, var->vv.regoff);
1675 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1677 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1680 /* XXX not thread safe! */
1681 disp = dseg_add_unique_s4(cd, 0);
1682 emit_fistpl_membase(cd, REG_ITMP1, disp);
1683 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1685 /* Round to nearest, 53-bit mode, exceptions masked */
1686 disp = dseg_add_s4(cd, 0x027f);
1687 emit_fldcw_membase(cd, REG_ITMP1, disp);
1689 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1692 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1693 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1696 emit_jcc(cd, CC_NE, disp);
1698 /* XXX: change this when we use registers */
1699 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1700 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2i, REG_ITMP1);
1701 emit_call_reg(cd, REG_ITMP1);
1703 if (var->flags & INMEMORY) {
1704 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1706 M_INTMOVE(REG_RESULT, var->vv.regoff);
1710 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1712 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1713 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1715 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1718 /* Round to zero, 53-bit mode, exception masked */
1719 disp = dseg_add_s4(cd, 0x0e7f);
1720 emit_fldcw_membase(cd, REG_ITMP1, disp);
1722 var = VAROP(iptr->dst);
1723 var1 = VAROP(iptr->s1);
1725 if (var->flags & INMEMORY) {
1726 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1728 /* Round to nearest, 53-bit mode, exceptions masked */
1729 disp = dseg_add_s4(cd, 0x027f);
1730 emit_fldcw_membase(cd, REG_ITMP1, disp);
1732 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1733 REG_SP, var->vv.regoff + 4);
1736 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1738 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1741 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1743 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1745 emit_jcc(cd, CC_NE, disp);
1747 emit_alu_imm_membase(cd, ALU_CMP, 0,
1748 REG_SP, var->vv.regoff);
1751 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1753 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1755 emit_jcc(cd, CC_NE, disp);
1757 /* XXX: change this when we use registers */
1758 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1759 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2l, REG_ITMP1);
1760 emit_call_reg(cd, REG_ITMP1);
1761 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1762 emit_mov_reg_membase(cd, REG_RESULT2,
1763 REG_SP, var->vv.regoff + 4);
1766 log_text("F2L: longs have to be in memory");
1771 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1773 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1774 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1776 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1779 /* Round to zero, 53-bit mode, exception masked */
1780 disp = dseg_add_s4(cd, 0x0e7f);
1781 emit_fldcw_membase(cd, REG_ITMP1, disp);
1783 var = VAROP(iptr->dst);
1784 var1 = VAROP(iptr->s1);
1786 if (var->flags & INMEMORY) {
1787 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1789 /* Round to nearest, 53-bit mode, exceptions masked */
1790 disp = dseg_add_s4(cd, 0x027f);
1791 emit_fldcw_membase(cd, REG_ITMP1, disp);
1793 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1794 REG_SP, var->vv.regoff + 4);
1797 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1799 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1802 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1804 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1806 emit_jcc(cd, CC_NE, disp);
1808 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, var->vv.regoff);
1811 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1813 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1815 emit_jcc(cd, CC_NE, disp);
1817 /* XXX: change this when we use registers */
1818 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1819 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2l, REG_ITMP1);
1820 emit_call_reg(cd, REG_ITMP1);
1821 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1822 emit_mov_reg_membase(cd, REG_RESULT2,
1823 REG_SP, var->vv.regoff + 4);
1826 log_text("D2L: longs have to be in memory");
1831 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1833 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1834 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1836 emit_store_dst(jd, iptr, d);
1839 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1841 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1842 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1844 emit_store_dst(jd, iptr, d);
1847 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1850 /* exchanged to skip fxch */
1851 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1852 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1853 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1854 /* emit_fxch(cd); */
1857 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as GT */
1858 emit_jcc(cd, CC_E, 6);
1859 emit_alu_imm_reg(cd, ALU_AND, 0x000000ff, EAX);
1861 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1862 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1863 emit_jcc(cd, CC_B, 3 + 5);
1864 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1865 emit_jmp_imm(cd, 3);
1866 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1867 emit_store_dst(jd, iptr, d);
1870 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1873 /* exchanged to skip fxch */
1874 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1875 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1876 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1877 /* emit_fxch(cd); */
1880 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as LT */
1881 emit_jcc(cd, CC_E, 3);
1882 emit_movb_imm_reg(cd, 1, REG_AH);
1884 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1885 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1886 emit_jcc(cd, CC_B, 3 + 5);
1887 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1888 emit_jmp_imm(cd, 3);
1889 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1890 emit_store_dst(jd, iptr, d);
1894 /* memory operations **************************************************/
1896 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., length */
1898 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1899 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1900 /* implicit null-pointer check */
1901 M_ILD(d, s1, OFFSET(java_array_t, size));
1902 emit_store_dst(jd, iptr, d);
1905 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
1907 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1908 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1909 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1910 /* implicit null-pointer check */
1911 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1912 emit_movsbl_memindex_reg(cd, OFFSET(java_bytearray_t, data[0]),
1914 emit_store_dst(jd, iptr, d);
1917 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
1919 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1920 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1921 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1922 /* implicit null-pointer check */
1923 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1924 emit_movzwl_memindex_reg(cd, OFFSET(java_chararray_t, data[0]),
1926 emit_store_dst(jd, iptr, d);
1929 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
1931 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1932 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1933 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1934 /* implicit null-pointer check */
1935 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1936 emit_movswl_memindex_reg(cd, OFFSET(java_shortarray_t, data[0]),
1938 emit_store_dst(jd, iptr, d);
1941 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
1943 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1944 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1945 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1946 /* implicit null-pointer check */
1947 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1948 emit_mov_memindex_reg(cd, OFFSET(java_intarray_t, data[0]),
1950 emit_store_dst(jd, iptr, d);
1953 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
1955 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1956 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1957 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
1958 /* implicit null-pointer check */
1959 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1961 var = VAROP(iptr->dst);
1963 assert(var->flags & INMEMORY);
1964 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]),
1965 s1, s2, 3, REG_ITMP3);
1966 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff);
1967 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]) + 4,
1968 s1, s2, 3, REG_ITMP3);
1969 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff + 4);
1972 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
1974 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1975 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1976 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1977 /* implicit null-pointer check */
1978 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1979 emit_flds_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2, 2);
1980 emit_store_dst(jd, iptr, d);
1983 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
1985 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1986 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1987 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1988 /* implicit null-pointer check */
1989 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1990 emit_fldl_memindex(cd, OFFSET(java_doublearray_t, data[0]), s1, s2,3);
1991 emit_store_dst(jd, iptr, d);
1994 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
1996 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1997 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1998 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1999 /* implicit null-pointer check */
2000 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2001 emit_mov_memindex_reg(cd, OFFSET(java_objectarray_t, data[0]),
2003 emit_store_dst(jd, iptr, d);
2007 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2009 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2010 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2011 /* implicit null-pointer check */
2012 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2013 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2015 /* because EBP, ESI, EDI have no xH and xL nibbles */
2016 M_INTMOVE(s3, REG_ITMP3);
2019 emit_movb_reg_memindex(cd, s3, OFFSET(java_bytearray_t, data[0]),
2023 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2025 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2026 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2027 /* implicit null-pointer check */
2028 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2029 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2030 emit_movw_reg_memindex(cd, s3, OFFSET(java_chararray_t, data[0]),
2034 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2036 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2037 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2038 /* implicit null-pointer check */
2039 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2040 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2041 emit_movw_reg_memindex(cd, s3, OFFSET(java_shortarray_t, data[0]),
2045 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2047 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2048 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2049 /* implicit null-pointer check */
2050 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2051 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2052 emit_mov_reg_memindex(cd, s3, OFFSET(java_intarray_t, data[0]),
2056 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2058 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2059 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2060 /* implicit null-pointer check */
2061 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2063 var = VAROP(iptr->sx.s23.s3);
2065 assert(var->flags & INMEMORY);
2066 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff, REG_ITMP3);
2067 emit_mov_reg_memindex(cd, REG_ITMP3, OFFSET(java_longarray_t, data[0])
2069 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff + 4, REG_ITMP3);
2070 emit_mov_reg_memindex(cd, REG_ITMP3,
2071 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2074 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2076 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2077 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2078 /* implicit null-pointer check */
2079 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2080 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2081 emit_fstps_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2,2);
2084 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2086 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2087 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2088 /* implicit null-pointer check */
2089 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2090 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2091 emit_fstpl_memindex(cd, OFFSET(java_doublearray_t, data[0]),
2095 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2097 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2098 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2099 /* implicit null-pointer check */
2100 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2101 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2103 M_AST(s1, REG_SP, 0 * 4);
2104 M_AST(s3, REG_SP, 1 * 4);
2105 M_MOV_IMM(BUILTIN_FAST_canstore, REG_ITMP1);
2107 emit_arraystore_check(cd, iptr);
2109 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2110 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2111 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2112 emit_mov_reg_memindex(cd, s3, OFFSET(java_objectarray_t, data[0]),
2116 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2118 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2119 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2120 /* implicit null-pointer check */
2121 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2122 emit_movb_imm_memindex(cd, iptr->sx.s23.s3.constval,
2123 OFFSET(java_bytearray_t, data[0]), s1, s2, 0);
2126 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2128 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2129 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2130 /* implicit null-pointer check */
2131 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2132 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2133 OFFSET(java_chararray_t, data[0]), s1, s2, 1);
2136 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2138 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2139 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2140 /* implicit null-pointer check */
2141 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2142 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2143 OFFSET(java_shortarray_t, data[0]), s1, s2, 1);
2146 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2148 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2149 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2150 /* implicit null-pointer check */
2151 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2152 emit_mov_imm_memindex(cd, iptr->sx.s23.s3.constval,
2153 OFFSET(java_intarray_t, data[0]), s1, s2, 2);
2156 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2158 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2159 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2160 /* implicit null-pointer check */
2161 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2162 emit_mov_imm_memindex(cd,
2163 (u4) (iptr->sx.s23.s3.constval & 0x00000000ffffffff),
2164 OFFSET(java_longarray_t, data[0]), s1, s2, 3);
2165 emit_mov_imm_memindex(cd,
2166 ((s4)iptr->sx.s23.s3.constval) >> 31,
2167 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2170 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2172 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2173 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2174 /* implicit null-pointer check */
2175 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2176 emit_mov_imm_memindex(cd, 0,
2177 OFFSET(java_objectarray_t, data[0]), s1, s2, 2);
2181 case ICMD_GETSTATIC: /* ... ==> ..., value */
2183 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2184 uf = iptr->sx.s23.s3.uf;
2185 fieldtype = uf->fieldref->parseddesc.fd->type;
2188 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2192 fi = iptr->sx.s23.s3.fmiref->p.field;
2193 fieldtype = fi->type;
2194 disp = (intptr_t) fi->value;
2196 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2197 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->class, 0);
2200 M_MOV_IMM(disp, REG_ITMP1);
2201 switch (fieldtype) {
2204 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2205 M_ILD(d, REG_ITMP1, 0);
2208 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2209 M_LLD(d, REG_ITMP1, 0);
2212 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2213 M_FLD(d, REG_ITMP1, 0);
2216 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2217 M_DLD(d, REG_ITMP1, 0);
2220 emit_store_dst(jd, iptr, d);
2223 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2225 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2226 uf = iptr->sx.s23.s3.uf;
2227 fieldtype = uf->fieldref->parseddesc.fd->type;
2230 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2233 fi = iptr->sx.s23.s3.fmiref->p.field;
2234 fieldtype = fi->type;
2235 disp = (intptr_t) fi->value;
2237 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2238 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->class, 0);
2241 M_MOV_IMM(disp, REG_ITMP1);
2242 switch (fieldtype) {
2245 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
2246 M_IST(s1, REG_ITMP1, 0);
2249 s1 = emit_load_s1(jd, iptr, REG_ITMP23_PACKED);
2250 M_LST(s1, REG_ITMP1, 0);
2253 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2254 emit_fstps_membase(cd, REG_ITMP1, 0);
2257 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2258 emit_fstpl_membase(cd, REG_ITMP1, 0);
2263 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2264 /* val = value (in current instruction) */
2265 /* following NOP) */
2267 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2268 uf = iptr->sx.s23.s3.uf;
2269 fieldtype = uf->fieldref->parseddesc.fd->type;
2272 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2275 fi = iptr->sx.s23.s3.fmiref->p.field;
2276 fieldtype = fi->type;
2277 disp = (intptr_t) fi->value;
2279 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2280 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->class, 0);
2283 M_MOV_IMM(disp, REG_ITMP1);
2284 switch (fieldtype) {
2287 M_IST_IMM(iptr->sx.s23.s2.constval, REG_ITMP1, 0);
2290 M_IST_IMM(iptr->sx.s23.s2.constval & 0xffffffff, REG_ITMP1, 0);
2291 M_IST_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, REG_ITMP1, 4);
2298 case ICMD_GETFIELD: /* .., objectref. ==> ..., value */
2300 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2301 emit_nullpointer_check(cd, iptr, s1);
2303 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2304 uf = iptr->sx.s23.s3.uf;
2305 fieldtype = uf->fieldref->parseddesc.fd->type;
2308 patcher_add_patch_ref(jd, PATCHER_getfield,
2309 iptr->sx.s23.s3.uf, 0);
2312 fi = iptr->sx.s23.s3.fmiref->p.field;
2313 fieldtype = fi->type;
2317 switch (fieldtype) {
2320 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2321 M_ILD32(d, s1, disp);
2324 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2325 M_LLD32(d, s1, disp);
2328 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2329 M_FLD32(d, s1, disp);
2332 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2333 M_DLD32(d, s1, disp);
2336 emit_store_dst(jd, iptr, d);
2339 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2341 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2342 emit_nullpointer_check(cd, iptr, s1);
2344 /* must be done here because of code patching */
2346 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2347 uf = iptr->sx.s23.s3.uf;
2348 fieldtype = uf->fieldref->parseddesc.fd->type;
2351 fi = iptr->sx.s23.s3.fmiref->p.field;
2352 fieldtype = fi->type;
2355 if (!IS_FLT_DBL_TYPE(fieldtype)) {
2356 if (IS_2_WORD_TYPE(fieldtype))
2357 s2 = emit_load_s2(jd, iptr, REG_ITMP23_PACKED);
2359 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2362 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
2364 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2366 uf = iptr->sx.s23.s3.uf;
2369 patcher_add_patch_ref(jd, PATCHER_putfield, uf, 0);
2373 fi = iptr->sx.s23.s3.fmiref->p.field;
2377 switch (fieldtype) {
2380 M_IST32(s2, s1, disp);
2383 M_LST32(s2, s1, disp);
2386 emit_fstps_membase32(cd, s1, disp);
2389 emit_fstpl_membase32(cd, s1, disp);
2394 case ICMD_PUTFIELDCONST: /* ..., objectref ==> ... */
2395 /* val = value (in current instruction) */
2396 /* following NOP) */
2398 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2399 emit_nullpointer_check(cd, iptr, s1);
2401 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2402 uf = iptr->sx.s23.s3.uf;
2403 fieldtype = uf->fieldref->parseddesc.fd->type;
2406 patcher_add_patch_ref(jd, PATCHER_putfieldconst,
2410 fi = iptr->sx.s23.s3.fmiref->p.field;
2411 fieldtype = fi->type;
2415 switch (fieldtype) {
2418 M_IST32_IMM(iptr->sx.s23.s2.constval, s1, disp);
2421 M_IST32_IMM(iptr->sx.s23.s2.constval & 0xffffffff, s1, disp);
2422 M_IST32_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, s1, disp + 4);
2430 /* branch operations **************************************************/
2432 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2434 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2435 M_INTMOVE(s1, REG_ITMP1_XPTR);
2437 #ifdef ENABLE_VERIFIER
2438 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2439 patcher_add_patch_ref(jd, PATCHER_resolve_class,
2440 iptr->sx.s23.s2.uc, 0);
2442 #endif /* ENABLE_VERIFIER */
2444 M_CALL_IMM(0); /* passing exception pc */
2445 M_POP(REG_ITMP2_XPC);
2447 M_MOV_IMM(asm_handle_exception, REG_ITMP3);
2451 case ICMD_GOTO: /* ... ==> ... */
2452 case ICMD_RET: /* ... ==> ... */
2454 #if defined(ENABLE_SSA)
2456 last_cmd_was_goto = true;
2458 /* In case of a Goto phimoves have to be inserted before the */
2461 codegen_emit_phi_moves(jd, bptr);
2464 emit_br(cd, iptr->dst.block);
2468 case ICMD_JSR: /* ... ==> ... */
2470 emit_br(cd, iptr->sx.s23.s3.jsrtarget.block);
2474 case ICMD_IFNULL: /* ..., value ==> ... */
2475 case ICMD_IFNONNULL:
2477 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2479 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFNULL, BRANCH_OPT_NONE);
2482 case ICMD_IFEQ: /* ..., value ==> ... */
2489 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2490 M_CMP_IMM(iptr->sx.val.i, s1);
2491 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFEQ, BRANCH_OPT_NONE);
2494 case ICMD_IF_LEQ: /* ..., value ==> ... */
2496 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2497 if (iptr->sx.val.l == 0) {
2498 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2499 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2502 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2503 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2504 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2505 M_OR(REG_ITMP2, REG_ITMP1);
2507 emit_beq(cd, iptr->dst.block);
2510 case ICMD_IF_LLT: /* ..., value ==> ... */
2512 if (iptr->sx.val.l == 0) {
2513 /* If high 32-bit are less than zero, then the 64-bits
2515 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2517 emit_blt(cd, iptr->dst.block);
2520 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2521 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2522 emit_blt(cd, iptr->dst.block);
2524 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2525 emit_bult(cd, iptr->dst.block);
2529 case ICMD_IF_LLE: /* ..., value ==> ... */
2531 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2532 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2533 emit_blt(cd, iptr->dst.block);
2535 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2536 emit_bule(cd, iptr->dst.block);
2539 case ICMD_IF_LNE: /* ..., value ==> ... */
2541 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2542 if (iptr->sx.val.l == 0) {
2543 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2544 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2547 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2548 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2549 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2550 M_OR(REG_ITMP2, REG_ITMP1);
2552 emit_bne(cd, iptr->dst.block);
2555 case ICMD_IF_LGT: /* ..., value ==> ... */
2557 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2558 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2559 emit_bgt(cd, iptr->dst.block);
2561 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2562 emit_bugt(cd, iptr->dst.block);
2565 case ICMD_IF_LGE: /* ..., value ==> ... */
2567 if (iptr->sx.val.l == 0) {
2568 /* If high 32-bit are greater equal zero, then the
2570 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2572 emit_bge(cd, iptr->dst.block);
2575 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2576 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2577 emit_bgt(cd, iptr->dst.block);
2579 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2580 emit_buge(cd, iptr->dst.block);
2584 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2585 case ICMD_IF_ICMPNE:
2586 case ICMD_IF_ICMPLT:
2587 case ICMD_IF_ICMPGT:
2588 case ICMD_IF_ICMPGE:
2589 case ICMD_IF_ICMPLE:
2591 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2592 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2594 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ICMPEQ, BRANCH_OPT_NONE);
2597 case ICMD_IF_ACMPEQ: /* ..., value, value ==> ... */
2598 case ICMD_IF_ACMPNE:
2600 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2601 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2603 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ACMPEQ, BRANCH_OPT_NONE);
2606 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2608 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2609 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2610 M_INTMOVE(s1, REG_ITMP1);
2611 M_XOR(s2, REG_ITMP1);
2612 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2613 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2614 M_INTMOVE(s1, REG_ITMP2);
2615 M_XOR(s2, REG_ITMP2);
2616 M_OR(REG_ITMP1, REG_ITMP2);
2617 emit_beq(cd, iptr->dst.block);
2620 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2622 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2623 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2624 M_INTMOVE(s1, REG_ITMP1);
2625 M_XOR(s2, REG_ITMP1);
2626 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2627 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2628 M_INTMOVE(s1, REG_ITMP2);
2629 M_XOR(s2, REG_ITMP2);
2630 M_OR(REG_ITMP1, REG_ITMP2);
2631 emit_bne(cd, iptr->dst.block);
2634 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2636 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2637 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2639 emit_blt(cd, iptr->dst.block);
2640 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2641 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2644 emit_bult(cd, iptr->dst.block);
2647 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2649 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2650 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2652 emit_bgt(cd, iptr->dst.block);
2653 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2654 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2657 emit_bugt(cd, iptr->dst.block);
2660 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2662 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2663 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2665 emit_blt(cd, iptr->dst.block);
2666 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2667 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2670 emit_bule(cd, iptr->dst.block);
2673 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2675 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2676 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2678 emit_bgt(cd, iptr->dst.block);
2679 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2680 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2683 emit_buge(cd, iptr->dst.block);
2687 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2689 REPLACEMENT_POINT_RETURN(cd, iptr);
2690 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2691 M_INTMOVE(s1, REG_RESULT);
2692 goto nowperformreturn;
2694 case ICMD_LRETURN: /* ..., retvalue ==> ... */
2696 REPLACEMENT_POINT_RETURN(cd, iptr);
2697 s1 = emit_load_s1(jd, iptr, REG_RESULT_PACKED);
2698 M_LNGMOVE(s1, REG_RESULT_PACKED);
2699 goto nowperformreturn;
2701 case ICMD_ARETURN: /* ..., retvalue ==> ... */
2703 REPLACEMENT_POINT_RETURN(cd, iptr);
2704 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2705 M_INTMOVE(s1, REG_RESULT);
2707 #ifdef ENABLE_VERIFIER
2708 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2709 patcher_add_patch_ref(jd, PATCHER_resolve_class,
2710 iptr->sx.s23.s2.uc, 0);
2712 #endif /* ENABLE_VERIFIER */
2713 goto nowperformreturn;
2715 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2718 REPLACEMENT_POINT_RETURN(cd, iptr);
2719 s1 = emit_load_s1(jd, iptr, REG_FRESULT);
2720 goto nowperformreturn;
2722 case ICMD_RETURN: /* ... ==> ... */
2724 REPLACEMENT_POINT_RETURN(cd, iptr);
2730 p = cd->stackframesize;
2732 #if !defined(NDEBUG)
2733 emit_verbosecall_exit(jd);
2736 #if defined(ENABLE_THREADS)
2737 if (checksync && code_is_synchronized(code)) {
2738 M_ALD(REG_ITMP2, REG_SP, rd->memuse * 8);
2740 /* we need to save the proper return value */
2741 switch (iptr->opc) {
2744 M_IST(REG_RESULT, REG_SP, rd->memuse * 8);
2748 M_LST(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2752 emit_fstps_membase(cd, REG_SP, rd->memuse * 8);
2756 emit_fstpl_membase(cd, REG_SP, rd->memuse * 8);
2760 M_AST(REG_ITMP2, REG_SP, 0);
2761 M_MOV_IMM(LOCK_monitor_exit, REG_ITMP3);
2764 /* and now restore the proper return value */
2765 switch (iptr->opc) {
2768 M_ILD(REG_RESULT, REG_SP, rd->memuse * 8);
2772 M_LLD(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2776 emit_flds_membase(cd, REG_SP, rd->memuse * 8);
2780 emit_fldl_membase(cd, REG_SP, rd->memuse * 8);
2786 /* restore saved registers */
2788 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
2789 p--; M_ALD(rd->savintregs[i], REG_SP, p * 8);
2792 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
2794 emit_fldl_membase(cd, REG_SP, p * 8);
2795 if (iptr->opc == ICMD_FRETURN || iptr->opc == ICMD_DRETURN) {
2797 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset + 1); */
2800 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset); */
2804 /* deallocate stack */
2806 if (cd->stackframesize)
2807 M_AADD_IMM(cd->stackframesize * 8 + 4, REG_SP);
2814 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2817 branch_target_t *table;
2819 table = iptr->dst.table;
2821 l = iptr->sx.s23.s2.tablelow;
2822 i = iptr->sx.s23.s3.tablehigh;
2824 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2825 M_INTMOVE(s1, REG_ITMP1);
2828 M_ISUB_IMM(l, REG_ITMP1);
2834 M_CMP_IMM(i - 1, REG_ITMP1);
2835 emit_bugt(cd, table[0].block);
2837 /* build jump table top down and use address of lowest entry */
2842 dseg_add_target(cd, table->block);
2846 /* length of dataseg after last dseg_addtarget is used
2849 M_MOV_IMM(0, REG_ITMP2);
2851 emit_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 2, REG_ITMP1);
2857 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2860 lookup_target_t *lookup;
2862 lookup = iptr->dst.lookup;
2864 i = iptr->sx.s23.s2.lookupcount;
2866 MCODECHECK((i<<2)+8);
2867 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2870 M_CMP_IMM(lookup->value, s1);
2871 emit_beq(cd, lookup->target.block);
2875 emit_br(cd, iptr->sx.s23.s3.lookupdefault.block);
2880 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
2882 REPLACEMENT_POINT_FORGC_BUILTIN(cd, iptr);
2884 bte = iptr->sx.s23.s3.bte;
2888 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2890 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2891 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
2892 case ICMD_INVOKEINTERFACE:
2894 REPLACEMENT_POINT_INVOKE(cd, iptr);
2896 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2897 md = iptr->sx.s23.s3.um->methodref->parseddesc.md;
2901 lm = iptr->sx.s23.s3.fmiref->p.method;
2902 md = lm->parseddesc;
2906 s3 = md->paramcount;
2908 MCODECHECK((s3 << 1) + 64);
2910 /* copy arguments to registers or stack location */
2912 for (s3 = s3 - 1; s3 >= 0; s3--) {
2913 var = VAR(iptr->sx.s23.s2.args[s3]);
2915 /* Already Preallocated (ARGVAR) ? */
2916 if (var->flags & PREALLOC)
2918 if (IS_INT_LNG_TYPE(var->type)) {
2919 if (!md->params[s3].inmemory) {
2920 log_text("No integer argument registers available!");
2924 if (IS_2_WORD_TYPE(var->type)) {
2925 d = emit_load(jd, iptr, var, REG_ITMP12_PACKED);
2926 M_LST(d, REG_SP, md->params[s3].regoff);
2928 d = emit_load(jd, iptr, var, REG_ITMP1);
2929 M_IST(d, REG_SP, md->params[s3].regoff);
2934 if (!md->params[s3].inmemory) {
2935 s1 = md->params[s3].regoff;
2936 d = emit_load(jd, iptr, var, s1);
2940 d = emit_load(jd, iptr, var, REG_FTMP1);
2941 if (IS_2_WORD_TYPE(var->type))
2942 M_DST(d, REG_SP, md->params[s3].regoff);
2944 M_FST(d, REG_SP, md->params[s3].regoff);
2949 switch (iptr->opc) {
2951 d = md->returntype.type;
2953 if (bte->stub == NULL) {
2954 M_MOV_IMM(bte->fp, REG_ITMP1);
2957 M_MOV_IMM(bte->stub, REG_ITMP1);
2962 case ICMD_INVOKESPECIAL:
2963 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
2964 emit_nullpointer_check(cd, iptr, REG_ITMP1);
2967 case ICMD_INVOKESTATIC:
2969 unresolved_method *um = iptr->sx.s23.s3.um;
2971 patcher_add_patch_ref(jd, PATCHER_invokestatic_special,
2975 d = md->returntype.type;
2978 disp = (ptrint) lm->stubroutine;
2979 d = lm->parseddesc->returntype.type;
2982 M_MOV_IMM(disp, REG_ITMP2);
2986 case ICMD_INVOKEVIRTUAL:
2987 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
2988 emit_nullpointer_check(cd, iptr, s1);
2991 unresolved_method *um = iptr->sx.s23.s3.um;
2993 patcher_add_patch_ref(jd, PATCHER_invokevirtual, um, 0);
2996 d = md->returntype.type;
2999 s1 = OFFSET(vftbl_t, table[0]) +
3000 sizeof(methodptr) * lm->vftblindex;
3001 d = md->returntype.type;
3004 M_ALD(REG_METHODPTR, REG_ITMP1,
3005 OFFSET(java_object_t, vftbl));
3006 M_ALD32(REG_ITMP3, REG_METHODPTR, s1);
3010 case ICMD_INVOKEINTERFACE:
3011 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
3012 emit_nullpointer_check(cd, iptr, s1);
3015 unresolved_method *um = iptr->sx.s23.s3.um;
3017 patcher_add_patch_ref(jd, PATCHER_invokeinterface, um, 0);
3021 d = md->returntype.type;
3024 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3025 sizeof(methodptr) * lm->clazz->index;
3027 s2 = sizeof(methodptr) * (lm - lm->clazz->methods);
3029 d = md->returntype.type;
3032 M_ALD(REG_METHODPTR, REG_ITMP1,
3033 OFFSET(java_object_t, vftbl));
3034 M_ALD32(REG_METHODPTR, REG_METHODPTR, s1);
3035 M_ALD32(REG_ITMP3, REG_METHODPTR, s2);
3040 /* store size of call code in replacement point */
3042 REPLACEMENT_POINT_INVOKE_RETURN(cd, iptr);
3043 REPLACEMENT_POINT_FORGC_BUILTIN_RETURN(cd, iptr);
3045 /* d contains return type */
3047 if (d != TYPE_VOID) {
3048 #if defined(ENABLE_SSA)
3049 if ((ls == NULL) /* || (!IS_TEMPVAR_INDEX(iptr->dst.varindex)) */ ||
3050 (ls->lifetime[iptr->dst.varindex].type != UNUSED))
3051 /* a "living" stackslot */
3054 if (IS_INT_LNG_TYPE(d)) {
3055 if (IS_2_WORD_TYPE(d)) {
3056 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
3057 M_LNGMOVE(REG_RESULT_PACKED, s1);
3060 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3061 M_INTMOVE(REG_RESULT, s1);
3065 s1 = codegen_reg_of_dst(jd, iptr, REG_NULL);
3067 emit_store_dst(jd, iptr, s1);
3073 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3075 if (!(iptr->flags.bits & INS_FLAG_ARRAY)) {
3076 /* object type cast-check */
3079 vftbl_t *supervftbl;
3082 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3088 super = iptr->sx.s23.s3.c.cls;
3089 superindex = super->index;
3090 supervftbl = super->vftbl;
3093 if ((super == NULL) || !(super->flags & ACC_INTERFACE))
3094 CODEGEN_CRITICAL_SECTION_NEW;
3096 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3098 /* if class is not resolved, check which code to call */
3100 if (super == NULL) {
3102 emit_label_beq(cd, BRANCH_LABEL_1);
3104 patcher_add_patch_ref(jd, PATCHER_checkcast_instanceof_flags,
3105 iptr->sx.s23.s3.c.ref, 0);
3107 M_MOV_IMM(0, REG_ITMP2); /* super->flags */
3108 M_AND_IMM32(ACC_INTERFACE, REG_ITMP2);
3109 emit_label_beq(cd, BRANCH_LABEL_2);
3112 /* interface checkcast code */
3114 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3115 if (super != NULL) {
3117 emit_label_beq(cd, BRANCH_LABEL_3);
3120 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3122 if (super == NULL) {
3123 patcher_add_patch_ref(jd, PATCHER_checkcast_interface,
3124 iptr->sx.s23.s3.c.ref,
3129 REG_ITMP2, OFFSET(vftbl_t, interfacetablelength));
3130 M_ISUB_IMM32(superindex, REG_ITMP3);
3131 /* XXX do we need this one? */
3133 emit_classcast_check(cd, iptr, BRANCH_LE, REG_ITMP3, s1);
3135 M_ALD32(REG_ITMP3, REG_ITMP2,
3136 OFFSET(vftbl_t, interfacetable[0]) -
3137 superindex * sizeof(methodptr*));
3139 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_ITMP3, s1);
3142 emit_label_br(cd, BRANCH_LABEL_4);
3144 emit_label(cd, BRANCH_LABEL_3);
3147 /* class checkcast code */
3149 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3150 if (super == NULL) {
3151 emit_label(cd, BRANCH_LABEL_2);
3155 emit_label_beq(cd, BRANCH_LABEL_5);
3158 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3160 if (super == NULL) {
3161 patcher_add_patch_ref(jd, PATCHER_checkcast_class,
3162 iptr->sx.s23.s3.c.ref,
3166 M_MOV_IMM(supervftbl, REG_ITMP3);
3168 CODEGEN_CRITICAL_SECTION_START;
3170 M_ILD32(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3172 /* if (s1 != REG_ITMP1) { */
3173 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP1); */
3174 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3); */
3175 /* #if defined(ENABLE_THREADS) */
3176 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3178 /* emit_alu_reg_reg(cd, ALU_SUB, REG_ITMP1, REG_ITMP2); */
3181 M_ILD32(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, baseval));
3182 M_ISUB(REG_ITMP3, REG_ITMP2);
3183 M_MOV_IMM(supervftbl, REG_ITMP3);
3184 M_ILD(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3186 CODEGEN_CRITICAL_SECTION_END;
3190 M_CMP(REG_ITMP3, REG_ITMP2);
3191 emit_classcast_check(cd, iptr, BRANCH_ULE, REG_ITMP3, s1);
3194 emit_label(cd, BRANCH_LABEL_5);
3197 if (super == NULL) {
3198 emit_label(cd, BRANCH_LABEL_1);
3199 emit_label(cd, BRANCH_LABEL_4);
3202 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
3205 /* array type cast-check */
3207 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3208 M_AST(s1, REG_SP, 0 * 4);
3210 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3211 patcher_add_patch_ref(jd, PATCHER_builtin_arraycheckcast,
3212 iptr->sx.s23.s3.c.ref, 0);
3215 M_AST_IMM(iptr->sx.s23.s3.c.cls, REG_SP, 1 * 4);
3216 M_MOV_IMM(BUILTIN_arraycheckcast, REG_ITMP3);
3219 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3221 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_RESULT, s1);
3223 d = codegen_reg_of_dst(jd, iptr, s1);
3227 emit_store_dst(jd, iptr, d);
3230 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3234 vftbl_t *supervftbl;
3237 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3243 super = iptr->sx.s23.s3.c.cls;
3244 superindex = super->index;
3245 supervftbl = super->vftbl;
3248 if ((super == NULL) || !(super->flags & ACC_INTERFACE))
3249 CODEGEN_CRITICAL_SECTION_NEW;
3251 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3252 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
3255 M_INTMOVE(s1, REG_ITMP1);
3261 /* if class is not resolved, check which code to call */
3263 if (super == NULL) {
3265 emit_label_beq(cd, BRANCH_LABEL_1);
3267 patcher_add_patch_ref(jd, PATCHER_checkcast_instanceof_flags,
3268 iptr->sx.s23.s3.c.ref, 0);
3270 M_MOV_IMM(0, REG_ITMP3); /* super->flags */
3271 M_AND_IMM32(ACC_INTERFACE, REG_ITMP3);
3272 emit_label_beq(cd, BRANCH_LABEL_2);
3275 /* interface instanceof code */
3277 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3278 if (super != NULL) {
3280 emit_label_beq(cd, BRANCH_LABEL_3);
3283 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3285 if (super == NULL) {
3286 patcher_add_patch_ref(jd, PATCHER_instanceof_interface,
3287 iptr->sx.s23.s3.c.ref, 0);
3291 REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3292 M_ISUB_IMM32(superindex, REG_ITMP3);
3295 disp = (2 + 4 /* mov_membase32_reg */ + 2 /* test */ +
3296 6 /* jcc */ + 5 /* mov_imm_reg */);
3299 M_ALD32(REG_ITMP1, REG_ITMP1,
3300 OFFSET(vftbl_t, interfacetable[0]) -
3301 superindex * sizeof(methodptr*));
3303 /* emit_setcc_reg(cd, CC_A, d); */
3304 /* emit_jcc(cd, CC_BE, 5); */
3309 emit_label_br(cd, BRANCH_LABEL_4);
3311 emit_label(cd, BRANCH_LABEL_3);
3314 /* class instanceof code */
3316 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3317 if (super == NULL) {
3318 emit_label(cd, BRANCH_LABEL_2);
3322 emit_label_beq(cd, BRANCH_LABEL_5);
3325 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3327 if (super == NULL) {
3328 patcher_add_patch_ref(jd, PATCHER_instanceof_class,
3329 iptr->sx.s23.s3.c.ref, 0);
3332 M_MOV_IMM(supervftbl, REG_ITMP2);
3334 CODEGEN_CRITICAL_SECTION_START;
3336 M_ILD(REG_ITMP1, REG_ITMP1, OFFSET(vftbl_t, baseval));
3337 M_ILD(REG_ITMP3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3338 M_ILD(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3340 CODEGEN_CRITICAL_SECTION_END;
3342 M_ISUB(REG_ITMP2, REG_ITMP1);
3343 M_CLR(d); /* may be REG_ITMP2 */
3344 M_CMP(REG_ITMP3, REG_ITMP1);
3349 emit_label(cd, BRANCH_LABEL_5);
3352 if (super == NULL) {
3353 emit_label(cd, BRANCH_LABEL_1);
3354 emit_label(cd, BRANCH_LABEL_4);
3357 emit_store_dst(jd, iptr, d);
3361 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3363 /* check for negative sizes and copy sizes to stack if necessary */
3365 MCODECHECK((iptr->s1.argcount << 1) + 64);
3367 for (s1 = iptr->s1.argcount; --s1 >= 0; ) {
3368 /* copy SAVEDVAR sizes to stack */
3369 var = VAR(iptr->sx.s23.s2.args[s1]);
3371 /* Already Preallocated? */
3372 if (!(var->flags & PREALLOC)) {
3373 if (var->flags & INMEMORY) {
3374 M_ILD(REG_ITMP1, REG_SP, var->vv.regoff);
3375 M_IST(REG_ITMP1, REG_SP, (s1 + 3) * 4);
3378 M_IST(var->vv.regoff, REG_SP, (s1 + 3) * 4);
3382 /* is a patcher function set? */
3384 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3385 patcher_add_patch_ref(jd, PATCHER_builtin_multianewarray,
3386 iptr->sx.s23.s3.c.ref, 0);
3392 disp = (ptrint) iptr->sx.s23.s3.c.cls;
3394 /* a0 = dimension count */
3396 M_IST_IMM(iptr->s1.argcount, REG_SP, 0 * 4);
3398 /* a1 = arraydescriptor */
3400 M_IST_IMM(disp, REG_SP, 1 * 4);
3402 /* a2 = pointer to dimensions = stack pointer */
3404 M_MOV(REG_SP, REG_ITMP1);
3405 M_AADD_IMM(3 * 4, REG_ITMP1);
3406 M_AST(REG_ITMP1, REG_SP, 2 * 4);
3408 M_MOV_IMM(BUILTIN_multianewarray, REG_ITMP1);
3411 /* check for exception before result assignment */
3413 emit_exception_check(cd, iptr);
3415 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3416 M_INTMOVE(REG_RESULT, s1);
3417 emit_store_dst(jd, iptr, s1);
3421 exceptions_throw_internalerror("Unknown ICMD %d during code generation",
3426 } /* for instruction */
3430 #if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
3433 #if defined(ENABLE_SSA)
3436 /* by edge splitting, in Blocks with phi moves there can only */
3437 /* be a goto as last command, no other Jump/Branch Command */
3439 if (!last_cmd_was_goto)
3440 codegen_emit_phi_moves(jd, bptr);
3445 /* At the end of a basic block we may have to append some nops,
3446 because the patcher stub calling code might be longer than the
3447 actual instruction. So codepatching does not change the
3448 following block unintentionally. */
3450 if (cd->mcodeptr < cd->lastmcodeptr) {
3451 while (cd->mcodeptr < cd->lastmcodeptr) {
3456 } /* if (bptr -> flags >= BBREACHED) */
3457 } /* for basic block */
3459 /* generate stubs */
3461 emit_patcher_traps(jd);
3463 /* everything's ok */
3469 /* codegen_emit_stub_native ****************************************************
3471 Emits a stub routine which calls a native method.
3473 *******************************************************************************/
3475 void codegen_emit_stub_native(jitdata *jd, methoddesc *nmd, functionptr f, int skipparams)
3481 int i, j; /* count variables */
3485 /* get required compiler data */
3491 /* set some variables */
3495 /* calculate stackframe size */
3497 cd->stackframesize =
3498 sizeof(stackframeinfo_t) / SIZEOF_VOID_P +
3499 sizeof(localref_table) / SIZEOF_VOID_P +
3500 4 + /* 4 arguments (start_native_call) */
3503 /* keep stack 16-byte aligned */
3505 ALIGN_ODD(cd->stackframesize);
3507 /* create method header */
3509 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
3510 (void) dseg_add_unique_s4(cd, cd->stackframesize * 8 + 4); /* FrameSize */
3511 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
3512 (void) dseg_add_unique_s4(cd, 0); /* IntSave */
3513 (void) dseg_add_unique_s4(cd, 0); /* FltSave */
3515 #if defined(ENABLE_PROFILING)
3516 /* generate native method profiling code */
3518 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
3519 /* count frequency */
3521 M_MOV_IMM(code, REG_ITMP1);
3522 M_IADD_IMM_MEMBASE(1, REG_ITMP1, OFFSET(codeinfo, frequency));
3526 /* calculate stackframe size for native function */
3528 M_ASUB_IMM(cd->stackframesize * 8 + 4, REG_SP);
3530 /* Mark the whole fpu stack as free for native functions (only for saved */
3531 /* register count == 0). */
3533 emit_ffree_reg(cd, 0);
3534 emit_ffree_reg(cd, 1);
3535 emit_ffree_reg(cd, 2);
3536 emit_ffree_reg(cd, 3);
3537 emit_ffree_reg(cd, 4);
3538 emit_ffree_reg(cd, 5);
3539 emit_ffree_reg(cd, 6);
3540 emit_ffree_reg(cd, 7);
3542 #if defined(ENABLE_GC_CACAO)
3543 /* remember callee saved int registers in stackframeinfo (GC may need to */
3544 /* recover them during a collection). */
3546 disp = cd->stackframesize * 8 - sizeof(stackframeinfo_t) +
3547 OFFSET(stackframeinfo_t, intregs);
3549 for (i = 0; i < INT_SAV_CNT; i++)
3550 M_AST(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3553 /* prepare data structures for native function call */
3555 M_MOV(REG_SP, REG_ITMP1);
3556 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3557 M_IST_IMM(0, REG_SP, 1 * 4);
3560 M_MOV_IMM(codegen_start_native_call, REG_ITMP1);
3563 /* remember class argument */
3565 if (m->flags & ACC_STATIC)
3566 M_MOV(REG_RESULT, REG_ITMP3);
3568 /* Copy or spill arguments to new locations. */
3570 for (i = md->paramcount - 1, j = i + skipparams; i >= 0; i--, j--) {
3571 if (!md->params[i].inmemory)
3574 s1 = md->params[i].regoff + cd->stackframesize * 8 + 8;
3575 s2 = nmd->params[j].regoff;
3577 /* float/double in memory can be copied like int/longs */
3579 switch (md->paramtypes[i].type) {
3583 M_ILD(REG_ITMP1, REG_SP, s1);
3584 M_IST(REG_ITMP1, REG_SP, s2);
3588 M_LLD(REG_ITMP12_PACKED, REG_SP, s1);
3589 M_LST(REG_ITMP12_PACKED, REG_SP, s2);
3594 /* Handle native Java methods. */
3596 if (m->flags & ACC_NATIVE) {
3597 /* if function is static, put class into second argument */
3599 if (m->flags & ACC_STATIC)
3600 M_AST(REG_ITMP3, REG_SP, 1 * 4);
3602 /* put env into first argument */
3604 M_AST_IMM(_Jv_env, REG_SP, 0 * 4);
3607 /* Call the native function. */
3609 disp = dseg_add_functionptr(cd, f);
3610 emit_mov_imm_reg(cd, 0, REG_ITMP3);
3612 M_ALD(REG_ITMP1, REG_ITMP3, disp);
3615 /* save return value */
3617 switch (md->returntype.type) {
3620 M_IST(REG_RESULT, REG_SP, 1 * 8);
3623 M_LST(REG_RESULT_PACKED, REG_SP, 1 * 8);
3626 emit_fsts_membase(cd, REG_SP, 1 * 8);
3629 emit_fstl_membase(cd, REG_SP, 1 * 8);
3635 /* remove native stackframe info */
3637 M_MOV(REG_SP, REG_ITMP1);
3638 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3639 M_IST_IMM(0, REG_SP, 1 * 4);
3642 M_MOV_IMM(codegen_finish_native_call, REG_ITMP1);
3644 M_MOV(REG_RESULT, REG_ITMP2); /* REG_ITMP3 == REG_RESULT2 */
3646 /* restore return value */
3648 switch (md->returntype.type) {
3651 M_ILD(REG_RESULT, REG_SP, 1 * 8);
3654 M_LLD(REG_RESULT_PACKED, REG_SP, 1 * 8);
3657 emit_flds_membase(cd, REG_SP, 1 * 8);
3660 emit_fldl_membase(cd, REG_SP, 1 * 8);
3666 #if defined(ENABLE_GC_CACAO)
3667 /* restore callee saved int registers from stackframeinfo (GC might have */
3668 /* modified them during a collection). */
3670 disp = cd->stackframesize * 8 - sizeof(stackframeinfo_t) +
3671 OFFSET(stackframeinfo_t, intregs);
3673 for (i = 0; i < INT_SAV_CNT; i++)
3674 M_ALD(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3677 M_AADD_IMM(cd->stackframesize * 8 + 4, REG_SP);
3679 /* check for exception */
3686 /* handle exception */
3688 M_MOV(REG_ITMP2, REG_ITMP1_XPTR);
3689 M_ALD(REG_ITMP2_XPC, REG_SP, 0);
3690 M_ASUB_IMM(2, REG_ITMP2_XPC);
3692 M_MOV_IMM(asm_handle_nat_exception, REG_ITMP3);
3698 * These are local overrides for various environment variables in Emacs.
3699 * Please do not remove this and leave it at the end of the file, where
3700 * Emacs will automagically detect them.
3701 * ---------------------------------------------------------------------
3704 * indent-tabs-mode: t
3708 * vim:noexpandtab:sw=4:ts=4: