1 /* src/vm/jit/i386/codegen.c - machine code generator for i386
3 Copyright (C) 1996-2005, 2006, 2007, 2008, 2009
4 CACAOVM - Verein zur Foerderung der freien virtuellen Maschine CACAO
6 This file is part of CACAO.
8 This program is free software; you can redistribute it and/or
9 modify it under the terms of the GNU General Public License as
10 published by the Free Software Foundation; either version 2, or (at
11 your option) any later version.
13 This program is distributed in the hope that it will be useful, but
14 WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with this program; if not, write to the Free Software
20 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
35 #include "vm/jit/i386/md-abi.h"
37 #include "vm/jit/i386/codegen.h"
38 #include "vm/jit/i386/emit.h"
40 #include "mm/memory.hpp"
42 #include "native/localref.hpp"
43 #include "native/native.hpp"
45 #include "threads/lock.hpp"
47 #include "vm/jit/builtin.hpp"
48 #include "vm/exceptions.hpp"
49 #include "vm/global.h"
50 #include "vm/loader.hpp"
51 #include "vm/options.h"
52 #include "vm/primitive.hpp"
56 #include "vm/jit/abi.h"
57 #include "vm/jit/asmpart.h"
58 #include "vm/jit/codegen-common.hpp"
59 #include "vm/jit/dseg.h"
60 #include "vm/jit/emit-common.hpp"
61 #include "vm/jit/jit.hpp"
62 #include "vm/jit/linenumbertable.hpp"
63 #include "vm/jit/parse.hpp"
64 #include "vm/jit/patcher-common.hpp"
65 #include "vm/jit/reg.h"
66 #include "vm/jit/replace.hpp"
67 #include "vm/jit/stacktrace.hpp"
68 #include "vm/jit/trap.hpp"
70 #if defined(ENABLE_SSA)
71 # include "vm/jit/optimizing/lsra.h"
72 # include "vm/jit/optimizing/ssa.h"
73 #elif defined(ENABLE_LSRA)
74 # include "vm/jit/allocator/lsra.h"
78 /* codegen_emit ****************************************************************
80 Generates machine code.
82 *******************************************************************************/
84 bool codegen_emit(jitdata *jd)
90 s4 len, s1, s2, s3, d, disp;
91 int align_off; /* offset for alignment compensation */
96 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
97 builtintable_entry *bte;
100 unresolved_field *uf;
103 #if defined(ENABLE_SSA)
105 bool last_cmd_was_goto;
107 last_cmd_was_goto = false;
111 /* get required compiler data */
118 /* prevent compiler warnings */
129 s4 savedregs_num = 0;
132 /* space to save used callee saved registers */
134 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
135 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse);
137 cd->stackframesize = rd->memuse + savedregs_num;
140 #if defined(ENABLE_THREADS)
141 /* space to save argument of monitor_enter */
143 if (checksync && code_is_synchronized(code))
144 cd->stackframesize++;
147 /* create method header */
149 /* Keep stack of non-leaf functions 16-byte aligned. */
151 if (!code_is_leafmethod(code)) {
152 ALIGN_ODD(cd->stackframesize);
155 align_off = cd->stackframesize ? 4 : 0;
157 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
158 (void) dseg_add_unique_s4(
159 cd, cd->stackframesize * 8 + align_off); /* FrameSize */
161 code->synchronizedoffset = rd->memuse * 8;
163 /* REMOVEME: We still need it for exception handling in assembler. */
165 if (code_is_leafmethod(code))
166 (void) dseg_add_unique_s4(cd, 1); /* IsLeaf */
168 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
170 (void) dseg_add_unique_s4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
171 (void) dseg_add_unique_s4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
173 #if defined(ENABLE_PROFILING)
174 /* generate method profiling code */
176 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
177 /* count frequency */
179 M_MOV_IMM(code, REG_ITMP3);
180 M_IADD_IMM_MEMBASE(1, REG_ITMP3, OFFSET(codeinfo, frequency));
184 /* create stack frame (if necessary) */
186 if (cd->stackframesize)
188 M_ASUB_IMM(cd->stackframesize * 8 + 4, REG_SP);
190 /* save return address and used callee saved registers */
192 p = cd->stackframesize;
193 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
194 p--; M_AST(rd->savintregs[i], REG_SP, p * 8);
196 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
197 p--; emit_fld_reg(cd, rd->savfltregs[i]); emit_fstpl_membase(cd, REG_SP, p * 8);
200 /* take arguments out of register or stack frame */
205 for (p = 0, l = 0; p < md->paramcount; p++) {
206 t = md->paramtypes[p].type;
208 varindex = jd->local_map[l * 5 + t];
209 #if defined(ENABLE_SSA)
211 if (varindex != UNUSED)
212 varindex = ls->var_0[varindex];
213 if ((varindex != UNUSED) && (ls->lifetime[varindex].type == UNUSED))
218 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
221 if (varindex == UNUSED)
225 s1 = md->params[p].regoff;
228 if (IS_INT_LNG_TYPE(t)) { /* integer args */
229 if (!md->params[p].inmemory) { /* register arguments */
230 log_text("integer register argument");
232 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
233 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff */
235 else { /* reg arg -> spilled */
236 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff * 4 */
240 if (!(var->flags & INMEMORY)) {
242 cd->stackframesize * 8 + 4 + align_off + s1);
245 if (!IS_2_WORD_TYPE(t)) {
246 #if defined(ENABLE_SSA)
247 /* no copy avoiding by now possible with SSA */
249 emit_mov_membase_reg( /* + 4 for return address */
251 cd->stackframesize * 8 + s1 + 4 + align_off,
253 emit_mov_reg_membase(
254 cd, REG_ITMP1, REG_SP, var->vv.regoff);
257 #endif /*defined(ENABLE_SSA)*/
258 /* reuse stackslot */
259 var->vv.regoff = cd->stackframesize * 8 + 4 +
264 #if defined(ENABLE_SSA)
265 /* no copy avoiding by now possible with SSA */
267 emit_mov_membase_reg( /* + 4 for return address */
269 cd->stackframesize * 8 + s1 + 4 + align_off,
271 emit_mov_reg_membase(
272 cd, REG_ITMP1, REG_SP, var->vv.regoff);
273 emit_mov_membase_reg( /* + 4 for return address */
275 cd->stackframesize * 8 + s1 + 4 + 4 + align_off,
277 emit_mov_reg_membase(
278 cd, REG_ITMP1, REG_SP, var->vv.regoff + 4);
281 #endif /*defined(ENABLE_SSA)*/
282 /* reuse stackslot */
283 var->vv.regoff = cd->stackframesize * 8 + 8 + s1;
288 else { /* floating args */
289 if (!md->params[p].inmemory) { /* register arguments */
290 log_text("There are no float argument registers!");
292 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
293 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff */
294 } else { /* reg arg -> spilled */
295 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff * 8 */
299 else { /* stack arguments */
300 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
304 cd->stackframesize * 8 + s1 + 4 + align_off);
306 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
312 cd->stackframesize * 8 + s1 + 4 + align_off);
314 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
317 } else { /* stack-arg -> spilled */
318 #if defined(ENABLE_SSA)
319 /* no copy avoiding by now possible with SSA */
321 emit_mov_membase_reg(
323 cd->stackframesize * 8 + s1 + 4 + align_off,
325 emit_mov_reg_membase(
326 cd, REG_ITMP1, REG_SP, var->vv.regoff);
330 cd->stackframesize * 8 + s1 + 4 + align_off);
331 emit_fstps_membase(cd, REG_SP, var->vv.regoff);
336 cd->stackframesize * 8 + s1 + 4 + align_off);
337 emit_fstpl_membase(cd, REG_SP, var->vv.regoff);
341 #endif /*defined(ENABLE_SSA)*/
342 /* reuse stackslot */
343 var->vv.regoff = cd->stackframesize * 8 + 4 +
350 /* call monitorenter function */
352 #if defined(ENABLE_THREADS)
353 if (checksync && code_is_synchronized(code)) {
356 if (m->flags & ACC_STATIC) {
357 M_MOV_IMM(&m->clazz->object.header, REG_ITMP1);
360 M_ALD(REG_ITMP1, REG_SP, cd->stackframesize * 8 + 4 + align_off);
363 M_ALD_MEM(REG_ITMP1, TRAP_NullPointerException);
366 M_AST(REG_ITMP1, REG_SP, s1 * 8);
367 M_AST(REG_ITMP1, REG_SP, 0 * 4);
368 M_MOV_IMM(LOCK_monitor_enter, REG_ITMP3);
374 emit_verbosecall_enter(jd);
379 #if defined(ENABLE_SSA)
380 /* with SSA the Header is Basic Block 0 - insert phi Moves if necessary */
382 codegen_emit_phi_moves(jd, ls->basicblocks[0]);
385 /* end of header generation */
387 /* create replacement points */
389 REPLACEMENT_POINTS_INIT(cd, jd);
391 /* walk through all basic blocks */
393 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
395 bptr->mpc = (s4) (cd->mcodeptr - cd->mcodebase);
397 if (bptr->flags >= BBREACHED) {
398 /* branch resolving */
400 codegen_resolve_branchrefs(cd, bptr);
402 /* handle replacement points */
404 REPLACEMENT_POINT_BLOCK_START(cd, bptr);
406 #if defined(ENABLE_REPLACEMENT)
407 if (bptr->bitflags & BBFLAG_REPLACEMENT) {
408 if (cd->replacementpoint[-1].flags & RPLPOINT_FLAG_COUNTDOWN) {
410 emit_trap_countdown(cd, &(m->hitcountdown));
415 /* copy interface registers to their destination */
420 #if defined(ENABLE_PROFILING)
421 /* generate basic block profiling code */
423 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
424 /* count frequency */
426 M_MOV_IMM(code->bbfrequency, REG_ITMP3);
427 M_IADD_IMM_MEMBASE(1, REG_ITMP3, bptr->nr * 4);
431 #if defined(ENABLE_LSRA) || defined(ENABLE_SSA)
432 # if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
435 # if defined(ENABLE_SSA)
437 last_cmd_was_goto = false;
441 var = VAR(bptr->invars[len]);
442 if (bptr->type != BBTYPE_STD) {
443 if (!IS_2_WORD_TYPE(var->type)) {
444 #if !defined(ENABLE_SSA)
445 if (bptr->type == BBTYPE_EXH) {
446 d = codegen_reg_of_var(0, var, REG_ITMP1);
447 M_INTMOVE(REG_ITMP1, d);
448 emit_store(jd, NULL, var, d);
453 log_text("copy interface registers(EXH, SBR): longs \
454 have to be in memory (begin 1)");
462 #endif /* defined(ENABLE_LSRA) || defined(ENABLE_SSA) */
466 var = VAR(bptr->invars[len]);
467 if ((len == bptr->indepth-1) && (bptr->type != BBTYPE_STD)) {
468 if (!IS_2_WORD_TYPE(var->type)) {
469 if (bptr->type == BBTYPE_EXH) {
470 d = codegen_reg_of_var(0, var, REG_ITMP1);
471 M_INTMOVE(REG_ITMP1, d);
472 emit_store(jd, NULL, var, d);
476 log_text("copy interface registers: longs have to be in \
483 assert((var->flags & INOUT));
488 /* walk through all instructions */
493 for (iptr = bptr->iinstr; len > 0; len--, iptr++) {
494 if (iptr->line != currentline) {
495 linenumbertable_list_entry_add(cd, iptr->line);
496 currentline = iptr->line;
499 MCODECHECK(1024); /* 1kB should be enough */
502 case ICMD_NOP: /* ... ==> ... */
503 case ICMD_POP: /* ..., value ==> ... */
504 case ICMD_POP2: /* ..., value, value ==> ... */
507 case ICMD_INLINE_START:
509 REPLACEMENT_POINT_INLINE_START(cd, iptr);
512 case ICMD_INLINE_BODY:
514 REPLACEMENT_POINT_INLINE_BODY(cd, iptr);
515 linenumbertable_list_entry_add_inline_start(cd, iptr);
516 linenumbertable_list_entry_add(cd, iptr->line);
519 case ICMD_INLINE_END:
521 linenumbertable_list_entry_add_inline_end(cd, iptr);
522 linenumbertable_list_entry_add(cd, iptr->line);
525 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
527 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
528 emit_nullpointer_check(cd, iptr, s1);
531 /* constant operations ************************************************/
533 case ICMD_ICONST: /* ... ==> ..., constant */
535 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
536 ICONST(d, iptr->sx.val.i);
537 emit_store_dst(jd, iptr, d);
540 case ICMD_LCONST: /* ... ==> ..., constant */
542 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
543 LCONST(d, iptr->sx.val.l);
544 emit_store_dst(jd, iptr, d);
547 case ICMD_FCONST: /* ... ==> ..., constant */
549 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
550 if (iptr->sx.val.f == 0.0) {
554 if (iptr->sx.val.i == 0x80000000) {
558 } else if (iptr->sx.val.f == 1.0) {
561 } else if (iptr->sx.val.f == 2.0) {
567 disp = dseg_add_float(cd, iptr->sx.val.f);
568 emit_mov_imm_reg(cd, 0, REG_ITMP1);
570 emit_flds_membase(cd, REG_ITMP1, disp);
572 emit_store_dst(jd, iptr, d);
575 case ICMD_DCONST: /* ... ==> ..., constant */
577 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
578 if (iptr->sx.val.d == 0.0) {
582 if (iptr->sx.val.l == 0x8000000000000000LL) {
586 } else if (iptr->sx.val.d == 1.0) {
589 } else if (iptr->sx.val.d == 2.0) {
595 disp = dseg_add_double(cd, iptr->sx.val.d);
596 emit_mov_imm_reg(cd, 0, REG_ITMP1);
598 emit_fldl_membase(cd, REG_ITMP1, disp);
600 emit_store_dst(jd, iptr, d);
603 case ICMD_ACONST: /* ... ==> ..., constant */
605 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
607 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
608 patcher_add_patch_ref(jd, PATCHER_aconst,
609 iptr->sx.val.c.ref, 0);
614 if (iptr->sx.val.anyptr == NULL)
617 M_MOV_IMM(iptr->sx.val.anyptr, d);
619 emit_store_dst(jd, iptr, d);
623 /* load/store/copy/move operations ************************************/
641 if (!(iptr->flags.bits & INS_FLAG_RETADDR))
646 /* integer operations *************************************************/
648 case ICMD_INEG: /* ..., value ==> ..., - value */
650 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
651 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
654 emit_store_dst(jd, iptr, d);
657 case ICMD_LNEG: /* ..., value ==> ..., - value */
659 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
660 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
662 M_NEG(GET_LOW_REG(d));
663 M_IADDC_IMM(0, GET_HIGH_REG(d));
664 M_NEG(GET_HIGH_REG(d));
665 emit_store_dst(jd, iptr, d);
668 case ICMD_I2L: /* ..., value ==> ..., value */
670 s1 = emit_load_s1(jd, iptr, EAX);
671 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
674 M_LNGMOVE(EAX_EDX_PACKED, d);
675 emit_store_dst(jd, iptr, d);
678 case ICMD_L2I: /* ..., value ==> ..., value */
680 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
681 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
683 emit_store_dst(jd, iptr, d);
686 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
688 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
689 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
693 emit_store_dst(jd, iptr, d);
696 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
698 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
699 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
701 emit_store_dst(jd, iptr, d);
704 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
706 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
707 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
709 emit_store_dst(jd, iptr, d);
713 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
715 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
716 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
717 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
724 emit_store_dst(jd, iptr, d);
728 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
729 /* sx.val.i = constant */
731 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
732 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
734 /* `inc reg' is slower on p4's (regarding to ia32
735 optimization reference manual and benchmarks) and as
739 M_IADD_IMM(iptr->sx.val.i, d);
740 emit_store_dst(jd, iptr, d);
743 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
745 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
746 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
747 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
748 M_INTMOVE(s1, GET_LOW_REG(d));
749 M_IADD(s2, GET_LOW_REG(d));
750 /* don't use REG_ITMP1 */
751 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
752 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
753 M_INTMOVE(s1, GET_HIGH_REG(d));
754 M_IADDC(s2, GET_HIGH_REG(d));
755 emit_store_dst(jd, iptr, d);
758 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
759 /* sx.val.l = constant */
761 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
762 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
764 M_IADD_IMM(iptr->sx.val.l, GET_LOW_REG(d));
765 M_IADDC_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
766 emit_store_dst(jd, iptr, d);
769 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
771 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
772 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
773 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
775 M_INTMOVE(s1, REG_ITMP1);
776 M_ISUB(s2, REG_ITMP1);
777 M_INTMOVE(REG_ITMP1, d);
783 emit_store_dst(jd, iptr, d);
786 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
787 /* sx.val.i = constant */
789 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
790 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
792 M_ISUB_IMM(iptr->sx.val.i, d);
793 emit_store_dst(jd, iptr, d);
796 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
798 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
799 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
800 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
801 if (s2 == GET_LOW_REG(d)) {
802 M_INTMOVE(s1, REG_ITMP1);
803 M_ISUB(s2, REG_ITMP1);
804 M_INTMOVE(REG_ITMP1, GET_LOW_REG(d));
807 M_INTMOVE(s1, GET_LOW_REG(d));
808 M_ISUB(s2, GET_LOW_REG(d));
810 /* don't use REG_ITMP1 */
811 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
812 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
813 if (s2 == GET_HIGH_REG(d)) {
814 M_INTMOVE(s1, REG_ITMP2);
815 M_ISUBB(s2, REG_ITMP2);
816 M_INTMOVE(REG_ITMP2, GET_HIGH_REG(d));
819 M_INTMOVE(s1, GET_HIGH_REG(d));
820 M_ISUBB(s2, GET_HIGH_REG(d));
822 emit_store_dst(jd, iptr, d);
825 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
826 /* sx.val.l = constant */
828 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
829 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
831 M_ISUB_IMM(iptr->sx.val.l, GET_LOW_REG(d));
832 M_ISUBB_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
833 emit_store_dst(jd, iptr, d);
836 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
838 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
839 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
840 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
847 emit_store_dst(jd, iptr, d);
850 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
851 /* sx.val.i = constant */
853 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
854 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
855 M_IMUL_IMM(s1, iptr->sx.val.i, d);
856 emit_store_dst(jd, iptr, d);
859 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
861 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
862 s2 = emit_load_s2_low(jd, iptr, EDX);
863 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
865 M_INTMOVE(s1, REG_ITMP2);
866 M_IMUL(s2, REG_ITMP2);
868 s1 = emit_load_s1_low(jd, iptr, EAX);
869 s2 = emit_load_s2_high(jd, iptr, EDX);
872 M_IADD(EDX, REG_ITMP2);
874 s1 = emit_load_s1_low(jd, iptr, EAX);
875 s2 = emit_load_s2_low(jd, iptr, EDX);
878 M_INTMOVE(EAX, GET_LOW_REG(d));
879 M_IADD(REG_ITMP2, GET_HIGH_REG(d));
881 emit_store_dst(jd, iptr, d);
884 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
885 /* sx.val.l = constant */
887 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
888 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
889 ICONST(EAX, iptr->sx.val.l);
891 M_IMUL_IMM(s1, iptr->sx.val.l >> 32, REG_ITMP2);
892 M_IADD(REG_ITMP2, EDX);
893 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
894 M_IMUL_IMM(s1, iptr->sx.val.l, REG_ITMP2);
895 M_IADD(REG_ITMP2, EDX);
896 M_LNGMOVE(EAX_EDX_PACKED, d);
897 emit_store_dst(jd, iptr, d);
900 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
902 s1 = emit_load_s1(jd, iptr, EAX);
903 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
904 d = codegen_reg_of_dst(jd, iptr, EAX);
905 emit_arithmetic_check(cd, iptr, s2);
907 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
909 /* check as described in jvm spec */
911 M_CMP_IMM(0x80000000, EAX);
918 M_INTMOVE(EAX, d); /* if INMEMORY then d is already EAX */
919 emit_store_dst(jd, iptr, d);
922 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
924 s1 = emit_load_s1(jd, iptr, EAX);
925 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
926 d = codegen_reg_of_dst(jd, iptr, EDX);
927 emit_arithmetic_check(cd, iptr, s2);
929 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
931 /* check as described in jvm spec */
933 M_CMP_IMM(0x80000000, EAX);
941 M_INTMOVE(EDX, d); /* if INMEMORY then d is already EDX */
942 emit_store_dst(jd, iptr, d);
945 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
946 /* sx.val.i = constant */
948 /* TODO: optimize for `/ 2' */
949 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
950 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
954 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, d);/* 32-bit for jump off */
955 M_SRA_IMM(iptr->sx.val.i, d);
956 emit_store_dst(jd, iptr, d);
959 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
960 /* sx.val.i = constant */
962 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
963 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
965 M_MOV(s1, REG_ITMP1);
969 M_AND_IMM(iptr->sx.val.i, d);
971 M_BGE(2 + 2 + 6 + 2);
972 M_MOV(s1, d); /* don't use M_INTMOVE, so we know the jump offset */
974 M_AND_IMM32(iptr->sx.val.i, d); /* use 32-bit for jump offset */
976 emit_store_dst(jd, iptr, d);
979 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
980 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
982 s2 = emit_load_s2(jd, iptr, REG_ITMP12_PACKED);
983 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
985 M_INTMOVE(GET_LOW_REG(s2), REG_ITMP3);
986 M_OR(GET_HIGH_REG(s2), REG_ITMP3);
987 /* XXX could be optimized */
988 emit_arithmetic_check(cd, iptr, REG_ITMP3);
990 bte = iptr->sx.s23.s3.bte;
993 M_LST(s2, REG_SP, 2 * 4);
995 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
996 M_LST(s1, REG_SP, 0 * 4);
998 M_MOV_IMM(bte->fp, REG_ITMP3);
1000 emit_store_dst(jd, iptr, d);
1003 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1004 /* sx.val.i = constant */
1006 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1007 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1009 M_TEST(GET_HIGH_REG(d));
1011 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, GET_LOW_REG(d));
1012 M_IADDC_IMM(0, GET_HIGH_REG(d));
1013 M_SRLD_IMM(iptr->sx.val.i, GET_HIGH_REG(d), GET_LOW_REG(d));
1014 M_SRA_IMM(iptr->sx.val.i, GET_HIGH_REG(d));
1015 emit_store_dst(jd, iptr, d);
1019 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1020 /* sx.val.l = constant */
1022 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1023 if (iptr->dst.var->flags & INMEMORY) {
1024 if (iptr->s1.var->flags & INMEMORY) {
1025 /* Alpha algorithm */
1027 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8);
1029 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1035 /* TODO: hmm, don't know if this is always correct */
1037 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l & 0x00000000ffffffff);
1039 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l >> 32);
1045 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1046 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1048 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1049 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1050 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1051 emit_jcc(cd, CC_GE, disp);
1053 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1054 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1056 emit_neg_reg(cd, REG_ITMP1);
1057 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1058 emit_neg_reg(cd, REG_ITMP2);
1060 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1061 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1063 emit_neg_reg(cd, REG_ITMP1);
1064 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1065 emit_neg_reg(cd, REG_ITMP2);
1067 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst.var->vv.regoff * 8);
1068 emit_mov_reg_membase(cd, REG_ITMP2, REG_SP, iptr->dst.var->vv.regoff * 8 + 4);
1072 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1073 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1075 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1076 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1077 M_TEST(GET_LOW_REG(s1));
1083 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1085 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1086 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1087 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1088 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1091 emit_store_dst(jd, iptr, d);
1094 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1095 /* sx.val.i = constant */
1097 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1098 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1100 M_SLL_IMM(iptr->sx.val.i, d);
1101 emit_store_dst(jd, iptr, d);
1104 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1106 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1107 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1108 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1109 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1112 emit_store_dst(jd, iptr, d);
1115 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1116 /* sx.val.i = constant */
1118 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1119 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1121 M_SRA_IMM(iptr->sx.val.i, d);
1122 emit_store_dst(jd, iptr, d);
1125 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1127 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1128 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1129 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1130 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1133 emit_store_dst(jd, iptr, d);
1136 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1137 /* sx.val.i = constant */
1139 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1140 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1142 M_SRL_IMM(iptr->sx.val.i, d);
1143 emit_store_dst(jd, iptr, d);
1146 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1148 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1149 s2 = emit_load_s2(jd, iptr, ECX);
1150 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1153 M_TEST_IMM(32, ECX);
1155 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1156 M_CLR(GET_LOW_REG(d));
1157 M_SLLD(GET_LOW_REG(d), GET_HIGH_REG(d));
1158 M_SLL(GET_LOW_REG(d));
1159 emit_store_dst(jd, iptr, d);
1162 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1163 /* sx.val.i = constant */
1165 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1166 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1168 if (iptr->sx.val.i & 0x20) {
1169 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1170 M_CLR(GET_LOW_REG(d));
1171 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1175 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1177 M_SLL_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d));
1179 emit_store_dst(jd, iptr, d);
1182 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1184 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1185 s2 = emit_load_s2(jd, iptr, ECX);
1186 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1189 M_TEST_IMM(32, ECX);
1191 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1192 M_SRA_IMM(31, GET_HIGH_REG(d));
1193 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1194 M_SRA(GET_HIGH_REG(d));
1195 emit_store_dst(jd, iptr, d);
1198 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1199 /* sx.val.i = constant */
1201 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1202 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1204 if (iptr->sx.val.i & 0x20) {
1205 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1206 M_SRA_IMM(31, GET_HIGH_REG(d));
1207 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1211 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1213 M_SRA_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1215 emit_store_dst(jd, iptr, d);
1218 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1220 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1221 s2 = emit_load_s2(jd, iptr, ECX);
1222 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1225 M_TEST_IMM(32, ECX);
1227 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1228 M_CLR(GET_HIGH_REG(d));
1229 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1230 M_SRL(GET_HIGH_REG(d));
1231 emit_store_dst(jd, iptr, d);
1234 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1235 /* sx.val.l = constant */
1237 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1238 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1240 if (iptr->sx.val.i & 0x20) {
1241 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1242 M_CLR(GET_HIGH_REG(d));
1243 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1247 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1249 M_SRL_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1251 emit_store_dst(jd, iptr, d);
1254 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1256 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1257 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1258 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1265 emit_store_dst(jd, iptr, d);
1268 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1269 /* sx.val.i = constant */
1271 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1272 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1274 M_AND_IMM(iptr->sx.val.i, d);
1275 emit_store_dst(jd, iptr, d);
1278 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1280 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1281 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1282 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1283 if (s2 == GET_LOW_REG(d))
1284 M_AND(s1, GET_LOW_REG(d));
1286 M_INTMOVE(s1, GET_LOW_REG(d));
1287 M_AND(s2, GET_LOW_REG(d));
1289 /* REG_ITMP1 probably contains low 32-bit of destination */
1290 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1291 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1292 if (s2 == GET_HIGH_REG(d))
1293 M_AND(s1, GET_HIGH_REG(d));
1295 M_INTMOVE(s1, GET_HIGH_REG(d));
1296 M_AND(s2, GET_HIGH_REG(d));
1298 emit_store_dst(jd, iptr, d);
1301 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1302 /* sx.val.l = constant */
1304 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1305 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1307 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1308 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1309 emit_store_dst(jd, iptr, d);
1312 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1314 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1315 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1316 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1323 emit_store_dst(jd, iptr, d);
1326 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1327 /* sx.val.i = constant */
1329 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1330 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1332 M_OR_IMM(iptr->sx.val.i, d);
1333 emit_store_dst(jd, iptr, d);
1336 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1338 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1339 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1340 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1341 if (s2 == GET_LOW_REG(d))
1342 M_OR(s1, GET_LOW_REG(d));
1344 M_INTMOVE(s1, GET_LOW_REG(d));
1345 M_OR(s2, GET_LOW_REG(d));
1347 /* REG_ITMP1 probably contains low 32-bit of destination */
1348 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1349 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1350 if (s2 == GET_HIGH_REG(d))
1351 M_OR(s1, GET_HIGH_REG(d));
1353 M_INTMOVE(s1, GET_HIGH_REG(d));
1354 M_OR(s2, GET_HIGH_REG(d));
1356 emit_store_dst(jd, iptr, d);
1359 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1360 /* sx.val.l = constant */
1362 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1363 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1365 M_OR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1366 M_OR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1367 emit_store_dst(jd, iptr, d);
1370 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1372 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1373 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1374 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1381 emit_store_dst(jd, iptr, d);
1384 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1385 /* sx.val.i = constant */
1387 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1388 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1390 M_XOR_IMM(iptr->sx.val.i, d);
1391 emit_store_dst(jd, iptr, d);
1394 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1396 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1397 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1398 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1399 if (s2 == GET_LOW_REG(d))
1400 M_XOR(s1, GET_LOW_REG(d));
1402 M_INTMOVE(s1, GET_LOW_REG(d));
1403 M_XOR(s2, GET_LOW_REG(d));
1405 /* REG_ITMP1 probably contains low 32-bit of destination */
1406 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1407 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1408 if (s2 == GET_HIGH_REG(d))
1409 M_XOR(s1, GET_HIGH_REG(d));
1411 M_INTMOVE(s1, GET_HIGH_REG(d));
1412 M_XOR(s2, GET_HIGH_REG(d));
1414 emit_store_dst(jd, iptr, d);
1417 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1418 /* sx.val.l = constant */
1420 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1421 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1423 M_XOR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1424 M_XOR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1425 emit_store_dst(jd, iptr, d);
1429 /* floating operations ************************************************/
1431 case ICMD_FNEG: /* ..., value ==> ..., - value */
1433 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1434 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1436 emit_store_dst(jd, iptr, d);
1439 case ICMD_DNEG: /* ..., value ==> ..., - value */
1441 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1442 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1444 emit_store_dst(jd, iptr, d);
1447 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1449 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1450 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1451 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1453 emit_store_dst(jd, iptr, d);
1456 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1458 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1459 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1460 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1462 emit_store_dst(jd, iptr, d);
1465 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1467 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1468 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1469 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1471 emit_store_dst(jd, iptr, d);
1474 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1476 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1477 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1478 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1480 emit_store_dst(jd, iptr, d);
1483 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1485 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1486 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1487 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1489 emit_store_dst(jd, iptr, d);
1492 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1494 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1495 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1496 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1498 emit_store_dst(jd, iptr, d);
1501 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1503 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1504 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1505 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1507 emit_store_dst(jd, iptr, d);
1510 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1512 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1513 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1514 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1516 emit_store_dst(jd, iptr, d);
1519 case ICMD_FREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1521 /* exchanged to skip fxch */
1522 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1523 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1524 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1525 /* emit_fxch(cd); */
1530 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1531 emit_store_dst(jd, iptr, d);
1532 emit_ffree_reg(cd, 0);
1536 case ICMD_DREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1538 /* exchanged to skip fxch */
1539 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1540 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1541 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1542 /* emit_fxch(cd); */
1547 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1548 emit_store_dst(jd, iptr, d);
1549 emit_ffree_reg(cd, 0);
1553 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1554 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1556 var = VAROP(iptr->s1);
1557 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1559 if (var->flags & INMEMORY) {
1560 emit_fildl_membase(cd, REG_SP, var->vv.regoff);
1562 /* XXX not thread safe! */
1563 disp = dseg_add_unique_s4(cd, 0);
1564 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1566 emit_mov_reg_membase(cd, var->vv.regoff, REG_ITMP1, disp);
1567 emit_fildl_membase(cd, REG_ITMP1, disp);
1570 emit_store_dst(jd, iptr, d);
1573 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1574 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1576 var = VAROP(iptr->s1);
1577 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1578 if (var->flags & INMEMORY) {
1579 emit_fildll_membase(cd, REG_SP, var->vv.regoff);
1582 log_text("L2F: longs have to be in memory");
1585 emit_store_dst(jd, iptr, d);
1588 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1590 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1591 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1593 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1596 /* Round to zero, 53-bit mode, exception masked */
1597 disp = dseg_add_s4(cd, 0x0e7f);
1598 emit_fldcw_membase(cd, REG_ITMP1, disp);
1600 var = VAROP(iptr->dst);
1601 var1 = VAROP(iptr->s1);
1603 if (var->flags & INMEMORY) {
1604 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1606 /* Round to nearest, 53-bit mode, exceptions masked */
1607 disp = dseg_add_s4(cd, 0x027f);
1608 emit_fldcw_membase(cd, REG_ITMP1, disp);
1610 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1611 REG_SP, var->vv.regoff);
1614 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1616 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1619 /* XXX not thread safe! */
1620 disp = dseg_add_unique_s4(cd, 0);
1621 emit_fistpl_membase(cd, REG_ITMP1, disp);
1622 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1624 /* Round to nearest, 53-bit mode, exceptions masked */
1625 disp = dseg_add_s4(cd, 0x027f);
1626 emit_fldcw_membase(cd, REG_ITMP1, disp);
1628 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1631 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1632 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1635 emit_jcc(cd, CC_NE, disp);
1637 /* XXX: change this when we use registers */
1638 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1639 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2i, REG_ITMP1);
1640 emit_call_reg(cd, REG_ITMP1);
1642 if (var->flags & INMEMORY) {
1643 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1646 M_INTMOVE(REG_RESULT, var->vv.regoff);
1650 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1652 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1653 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1655 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1658 /* Round to zero, 53-bit mode, exception masked */
1659 disp = dseg_add_s4(cd, 0x0e7f);
1660 emit_fldcw_membase(cd, REG_ITMP1, disp);
1662 var = VAROP(iptr->dst);
1663 var1 = VAROP(iptr->s1);
1665 if (var->flags & INMEMORY) {
1666 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1668 /* Round to nearest, 53-bit mode, exceptions masked */
1669 disp = dseg_add_s4(cd, 0x027f);
1670 emit_fldcw_membase(cd, REG_ITMP1, disp);
1672 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1673 REG_SP, var->vv.regoff);
1676 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1678 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1681 /* XXX not thread safe! */
1682 disp = dseg_add_unique_s4(cd, 0);
1683 emit_fistpl_membase(cd, REG_ITMP1, disp);
1684 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1686 /* Round to nearest, 53-bit mode, exceptions masked */
1687 disp = dseg_add_s4(cd, 0x027f);
1688 emit_fldcw_membase(cd, REG_ITMP1, disp);
1690 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1693 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1694 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1697 emit_jcc(cd, CC_NE, disp);
1699 /* XXX: change this when we use registers */
1700 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1701 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2i, REG_ITMP1);
1702 emit_call_reg(cd, REG_ITMP1);
1704 if (var->flags & INMEMORY) {
1705 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1707 M_INTMOVE(REG_RESULT, var->vv.regoff);
1711 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1713 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1714 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1716 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1719 /* Round to zero, 53-bit mode, exception masked */
1720 disp = dseg_add_s4(cd, 0x0e7f);
1721 emit_fldcw_membase(cd, REG_ITMP1, disp);
1723 var = VAROP(iptr->dst);
1724 var1 = VAROP(iptr->s1);
1726 if (var->flags & INMEMORY) {
1727 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1729 /* Round to nearest, 53-bit mode, exceptions masked */
1730 disp = dseg_add_s4(cd, 0x027f);
1731 emit_fldcw_membase(cd, REG_ITMP1, disp);
1733 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1734 REG_SP, var->vv.regoff + 4);
1737 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1739 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1742 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1744 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1746 emit_jcc(cd, CC_NE, disp);
1748 emit_alu_imm_membase(cd, ALU_CMP, 0,
1749 REG_SP, var->vv.regoff);
1752 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1754 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1756 emit_jcc(cd, CC_NE, disp);
1758 /* XXX: change this when we use registers */
1759 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1760 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2l, REG_ITMP1);
1761 emit_call_reg(cd, REG_ITMP1);
1762 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1763 emit_mov_reg_membase(cd, REG_RESULT2,
1764 REG_SP, var->vv.regoff + 4);
1767 log_text("F2L: longs have to be in memory");
1772 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1774 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1775 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1777 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1780 /* Round to zero, 53-bit mode, exception masked */
1781 disp = dseg_add_s4(cd, 0x0e7f);
1782 emit_fldcw_membase(cd, REG_ITMP1, disp);
1784 var = VAROP(iptr->dst);
1785 var1 = VAROP(iptr->s1);
1787 if (var->flags & INMEMORY) {
1788 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1790 /* Round to nearest, 53-bit mode, exceptions masked */
1791 disp = dseg_add_s4(cd, 0x027f);
1792 emit_fldcw_membase(cd, REG_ITMP1, disp);
1794 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1795 REG_SP, var->vv.regoff + 4);
1798 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1800 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1803 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1805 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1807 emit_jcc(cd, CC_NE, disp);
1809 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, var->vv.regoff);
1812 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1814 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1816 emit_jcc(cd, CC_NE, disp);
1818 /* XXX: change this when we use registers */
1819 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1820 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2l, REG_ITMP1);
1821 emit_call_reg(cd, REG_ITMP1);
1822 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1823 emit_mov_reg_membase(cd, REG_RESULT2,
1824 REG_SP, var->vv.regoff + 4);
1827 log_text("D2L: longs have to be in memory");
1832 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1834 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1835 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1837 emit_store_dst(jd, iptr, d);
1840 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1842 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1843 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1845 emit_store_dst(jd, iptr, d);
1848 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1851 /* exchanged to skip fxch */
1852 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1853 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1854 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1855 /* emit_fxch(cd); */
1858 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as GT */
1859 emit_jcc(cd, CC_E, 6);
1860 emit_alu_imm_reg(cd, ALU_AND, 0x000000ff, EAX);
1862 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1863 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1864 emit_jcc(cd, CC_B, 3 + 5);
1865 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1866 emit_jmp_imm(cd, 3);
1867 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1868 emit_store_dst(jd, iptr, d);
1871 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1874 /* exchanged to skip fxch */
1875 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1876 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1877 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1878 /* emit_fxch(cd); */
1881 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as LT */
1882 emit_jcc(cd, CC_E, 3);
1883 emit_movb_imm_reg(cd, 1, REG_AH);
1885 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1886 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1887 emit_jcc(cd, CC_B, 3 + 5);
1888 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1889 emit_jmp_imm(cd, 3);
1890 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1891 emit_store_dst(jd, iptr, d);
1895 /* memory operations **************************************************/
1897 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., length */
1899 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1900 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1901 /* implicit null-pointer check */
1902 M_ILD(d, s1, OFFSET(java_array_t, size));
1903 emit_store_dst(jd, iptr, d);
1906 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
1908 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1909 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1910 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1911 /* implicit null-pointer check */
1912 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1913 emit_movsbl_memindex_reg(cd, OFFSET(java_bytearray_t, data[0]),
1915 emit_store_dst(jd, iptr, d);
1918 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
1920 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1921 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1922 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1923 /* implicit null-pointer check */
1924 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1925 emit_movzwl_memindex_reg(cd, OFFSET(java_chararray_t, data[0]),
1927 emit_store_dst(jd, iptr, d);
1930 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
1932 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1933 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1934 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1935 /* implicit null-pointer check */
1936 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1937 emit_movswl_memindex_reg(cd, OFFSET(java_shortarray_t, data[0]),
1939 emit_store_dst(jd, iptr, d);
1942 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
1944 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1945 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1946 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1947 /* implicit null-pointer check */
1948 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1949 emit_mov_memindex_reg(cd, OFFSET(java_intarray_t, data[0]),
1951 emit_store_dst(jd, iptr, d);
1954 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
1956 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1957 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1958 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
1959 /* implicit null-pointer check */
1960 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1962 var = VAROP(iptr->dst);
1964 assert(var->flags & INMEMORY);
1965 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]),
1966 s1, s2, 3, REG_ITMP3);
1967 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff);
1968 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]) + 4,
1969 s1, s2, 3, REG_ITMP3);
1970 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff + 4);
1973 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
1975 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1976 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1977 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1978 /* implicit null-pointer check */
1979 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1980 emit_flds_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2, 2);
1981 emit_store_dst(jd, iptr, d);
1984 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
1986 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1987 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1988 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1989 /* implicit null-pointer check */
1990 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1991 emit_fldl_memindex(cd, OFFSET(java_doublearray_t, data[0]), s1, s2,3);
1992 emit_store_dst(jd, iptr, d);
1995 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
1997 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1998 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1999 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2000 /* implicit null-pointer check */
2001 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2002 emit_mov_memindex_reg(cd, OFFSET(java_objectarray_t, data[0]),
2004 emit_store_dst(jd, iptr, d);
2008 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2010 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2011 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2012 /* implicit null-pointer check */
2013 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2014 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2016 /* because EBP, ESI, EDI have no xH and xL nibbles */
2017 M_INTMOVE(s3, REG_ITMP3);
2020 emit_movb_reg_memindex(cd, s3, OFFSET(java_bytearray_t, data[0]),
2024 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2026 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2027 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2028 /* implicit null-pointer check */
2029 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2030 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2031 emit_movw_reg_memindex(cd, s3, OFFSET(java_chararray_t, data[0]),
2035 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2037 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2038 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2039 /* implicit null-pointer check */
2040 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2041 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2042 emit_movw_reg_memindex(cd, s3, OFFSET(java_shortarray_t, data[0]),
2046 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2048 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2049 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2050 /* implicit null-pointer check */
2051 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2052 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2053 emit_mov_reg_memindex(cd, s3, OFFSET(java_intarray_t, data[0]),
2057 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2059 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2060 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2061 /* implicit null-pointer check */
2062 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2064 var = VAROP(iptr->sx.s23.s3);
2066 assert(var->flags & INMEMORY);
2067 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff, REG_ITMP3);
2068 emit_mov_reg_memindex(cd, REG_ITMP3, OFFSET(java_longarray_t, data[0])
2070 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff + 4, REG_ITMP3);
2071 emit_mov_reg_memindex(cd, REG_ITMP3,
2072 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2075 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2077 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2078 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2079 /* implicit null-pointer check */
2080 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2081 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2082 emit_fstps_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2,2);
2085 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2087 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2088 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2089 /* implicit null-pointer check */
2090 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2091 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2092 emit_fstpl_memindex(cd, OFFSET(java_doublearray_t, data[0]),
2096 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2098 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2099 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2100 /* implicit null-pointer check */
2101 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2102 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2104 M_AST(s1, REG_SP, 0 * 4);
2105 M_AST(s3, REG_SP, 1 * 4);
2106 M_MOV_IMM(BUILTIN_FAST_canstore, REG_ITMP1);
2108 emit_arraystore_check(cd, iptr);
2110 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2111 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2112 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2113 emit_mov_reg_memindex(cd, s3, OFFSET(java_objectarray_t, data[0]),
2117 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2119 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2120 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2121 /* implicit null-pointer check */
2122 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2123 emit_movb_imm_memindex(cd, iptr->sx.s23.s3.constval,
2124 OFFSET(java_bytearray_t, data[0]), s1, s2, 0);
2127 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2129 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2130 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2131 /* implicit null-pointer check */
2132 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2133 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2134 OFFSET(java_chararray_t, data[0]), s1, s2, 1);
2137 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2139 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2140 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2141 /* implicit null-pointer check */
2142 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2143 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2144 OFFSET(java_shortarray_t, data[0]), s1, s2, 1);
2147 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2149 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2150 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2151 /* implicit null-pointer check */
2152 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2153 emit_mov_imm_memindex(cd, iptr->sx.s23.s3.constval,
2154 OFFSET(java_intarray_t, data[0]), s1, s2, 2);
2157 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2159 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2160 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2161 /* implicit null-pointer check */
2162 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2163 emit_mov_imm_memindex(cd,
2164 (u4) (iptr->sx.s23.s3.constval & 0x00000000ffffffff),
2165 OFFSET(java_longarray_t, data[0]), s1, s2, 3);
2166 emit_mov_imm_memindex(cd,
2167 ((s4)iptr->sx.s23.s3.constval) >> 31,
2168 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2171 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2173 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2174 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2175 /* implicit null-pointer check */
2176 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2177 emit_mov_imm_memindex(cd, 0,
2178 OFFSET(java_objectarray_t, data[0]), s1, s2, 2);
2182 case ICMD_GETSTATIC: /* ... ==> ..., value */
2184 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2185 uf = iptr->sx.s23.s3.uf;
2186 fieldtype = uf->fieldref->parseddesc.fd->type;
2189 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2193 fi = iptr->sx.s23.s3.fmiref->p.field;
2194 fieldtype = fi->type;
2195 disp = (intptr_t) fi->value;
2197 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->clazz))
2198 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->clazz, 0);
2201 M_MOV_IMM2(disp, REG_ITMP1);
2202 switch (fieldtype) {
2205 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2206 M_ILD(d, REG_ITMP1, 0);
2209 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2210 M_LLD(d, REG_ITMP1, 0);
2213 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2214 M_FLD(d, REG_ITMP1, 0);
2217 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2218 M_DLD(d, REG_ITMP1, 0);
2221 emit_store_dst(jd, iptr, d);
2224 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2226 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2227 uf = iptr->sx.s23.s3.uf;
2228 fieldtype = uf->fieldref->parseddesc.fd->type;
2231 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2234 fi = iptr->sx.s23.s3.fmiref->p.field;
2235 fieldtype = fi->type;
2236 disp = (intptr_t) fi->value;
2238 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->clazz))
2239 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->clazz, 0);
2242 M_MOV_IMM2(disp, REG_ITMP1);
2243 switch (fieldtype) {
2246 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
2247 M_IST(s1, REG_ITMP1, 0);
2250 s1 = emit_load_s1(jd, iptr, REG_ITMP23_PACKED);
2251 M_LST(s1, REG_ITMP1, 0);
2254 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2255 emit_fstps_membase(cd, REG_ITMP1, 0);
2258 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2259 emit_fstpl_membase(cd, REG_ITMP1, 0);
2264 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2265 /* val = value (in current instruction) */
2266 /* following NOP) */
2268 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2269 uf = iptr->sx.s23.s3.uf;
2270 fieldtype = uf->fieldref->parseddesc.fd->type;
2273 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2276 fi = iptr->sx.s23.s3.fmiref->p.field;
2277 fieldtype = fi->type;
2278 disp = (intptr_t) fi->value;
2280 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->clazz))
2281 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->clazz, 0);
2284 M_MOV_IMM2(disp, REG_ITMP1);
2285 switch (fieldtype) {
2288 M_IST_IMM(iptr->sx.s23.s2.constval, REG_ITMP1, 0);
2291 M_IST_IMM(iptr->sx.s23.s2.constval & 0xffffffff, REG_ITMP1, 0);
2292 M_IST_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, REG_ITMP1, 4);
2299 case ICMD_GETFIELD: /* .., objectref. ==> ..., value */
2301 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2302 emit_nullpointer_check(cd, iptr, s1);
2304 #if defined(ENABLE_ESCAPE_CHECK)
2305 /*emit_escape_check(cd, s1);*/
2308 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2309 uf = iptr->sx.s23.s3.uf;
2310 fieldtype = uf->fieldref->parseddesc.fd->type;
2313 patcher_add_patch_ref(jd, PATCHER_getfield,
2314 iptr->sx.s23.s3.uf, 0);
2317 fi = iptr->sx.s23.s3.fmiref->p.field;
2318 fieldtype = fi->type;
2322 switch (fieldtype) {
2325 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2326 M_ILD32(d, s1, disp);
2329 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2330 M_LLD32(d, s1, disp);
2333 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2334 M_FLD32(d, s1, disp);
2337 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2338 M_DLD32(d, s1, disp);
2341 emit_store_dst(jd, iptr, d);
2344 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2346 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2347 emit_nullpointer_check(cd, iptr, s1);
2349 /* must be done here because of code patching */
2351 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2352 uf = iptr->sx.s23.s3.uf;
2353 fieldtype = uf->fieldref->parseddesc.fd->type;
2356 fi = iptr->sx.s23.s3.fmiref->p.field;
2357 fieldtype = fi->type;
2360 if (!IS_FLT_DBL_TYPE(fieldtype)) {
2361 if (IS_2_WORD_TYPE(fieldtype))
2362 s2 = emit_load_s2(jd, iptr, REG_ITMP23_PACKED);
2364 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2367 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
2369 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2371 uf = iptr->sx.s23.s3.uf;
2374 patcher_add_patch_ref(jd, PATCHER_putfield, uf, 0);
2378 fi = iptr->sx.s23.s3.fmiref->p.field;
2382 switch (fieldtype) {
2385 M_IST32(s2, s1, disp);
2388 M_LST32(s2, s1, disp);
2391 emit_fstps_membase32(cd, s1, disp);
2394 emit_fstpl_membase32(cd, s1, disp);
2399 case ICMD_PUTFIELDCONST: /* ..., objectref ==> ... */
2400 /* val = value (in current instruction) */
2401 /* following NOP) */
2403 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2404 emit_nullpointer_check(cd, iptr, s1);
2406 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2407 uf = iptr->sx.s23.s3.uf;
2408 fieldtype = uf->fieldref->parseddesc.fd->type;
2411 patcher_add_patch_ref(jd, PATCHER_putfieldconst,
2415 fi = iptr->sx.s23.s3.fmiref->p.field;
2416 fieldtype = fi->type;
2420 switch (fieldtype) {
2423 M_IST32_IMM(iptr->sx.s23.s2.constval, s1, disp);
2426 M_IST32_IMM(iptr->sx.s23.s2.constval & 0xffffffff, s1, disp);
2427 M_IST32_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, s1, disp + 4);
2435 /* branch operations **************************************************/
2437 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2439 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2440 M_INTMOVE(s1, REG_ITMP1_XPTR);
2442 #ifdef ENABLE_VERIFIER
2443 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2444 patcher_add_patch_ref(jd, PATCHER_resolve_class,
2445 iptr->sx.s23.s2.uc, 0);
2447 #endif /* ENABLE_VERIFIER */
2449 M_CALL_IMM(0); /* passing exception pc */
2450 M_POP(REG_ITMP2_XPC);
2452 M_MOV_IMM(asm_handle_exception, REG_ITMP3);
2456 case ICMD_GOTO: /* ... ==> ... */
2457 case ICMD_RET: /* ... ==> ... */
2459 #if defined(ENABLE_SSA)
2461 last_cmd_was_goto = true;
2463 /* In case of a Goto phimoves have to be inserted before the */
2466 codegen_emit_phi_moves(jd, bptr);
2469 emit_br(cd, iptr->dst.block);
2473 case ICMD_JSR: /* ... ==> ... */
2475 emit_br(cd, iptr->sx.s23.s3.jsrtarget.block);
2479 case ICMD_IFNULL: /* ..., value ==> ... */
2480 case ICMD_IFNONNULL:
2482 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2484 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFNULL, BRANCH_OPT_NONE);
2487 case ICMD_IFEQ: /* ..., value ==> ... */
2494 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2495 M_CMP_IMM(iptr->sx.val.i, s1);
2496 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFEQ, BRANCH_OPT_NONE);
2499 case ICMD_IF_LEQ: /* ..., value ==> ... */
2501 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2502 if (iptr->sx.val.l == 0) {
2503 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2504 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2507 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2508 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2509 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2510 M_OR(REG_ITMP2, REG_ITMP1);
2512 emit_beq(cd, iptr->dst.block);
2515 case ICMD_IF_LLT: /* ..., value ==> ... */
2517 if (iptr->sx.val.l == 0) {
2518 /* If high 32-bit are less than zero, then the 64-bits
2520 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2522 emit_blt(cd, iptr->dst.block);
2525 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2526 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2527 emit_blt(cd, iptr->dst.block);
2529 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2530 emit_bult(cd, iptr->dst.block);
2534 case ICMD_IF_LLE: /* ..., value ==> ... */
2536 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2537 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2538 emit_blt(cd, iptr->dst.block);
2540 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2541 emit_bule(cd, iptr->dst.block);
2544 case ICMD_IF_LNE: /* ..., value ==> ... */
2546 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2547 if (iptr->sx.val.l == 0) {
2548 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2549 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2552 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2553 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2554 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2555 M_OR(REG_ITMP2, REG_ITMP1);
2557 emit_bne(cd, iptr->dst.block);
2560 case ICMD_IF_LGT: /* ..., value ==> ... */
2562 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2563 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2564 emit_bgt(cd, iptr->dst.block);
2566 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2567 emit_bugt(cd, iptr->dst.block);
2570 case ICMD_IF_LGE: /* ..., value ==> ... */
2572 if (iptr->sx.val.l == 0) {
2573 /* If high 32-bit are greater equal zero, then the
2575 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2577 emit_bge(cd, iptr->dst.block);
2580 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2581 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2582 emit_bgt(cd, iptr->dst.block);
2584 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2585 emit_buge(cd, iptr->dst.block);
2589 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2590 case ICMD_IF_ICMPNE:
2591 case ICMD_IF_ICMPLT:
2592 case ICMD_IF_ICMPGT:
2593 case ICMD_IF_ICMPGE:
2594 case ICMD_IF_ICMPLE:
2596 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2597 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2599 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ICMPEQ, BRANCH_OPT_NONE);
2602 case ICMD_IF_ACMPEQ: /* ..., value, value ==> ... */
2603 case ICMD_IF_ACMPNE:
2605 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2606 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2608 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ACMPEQ, BRANCH_OPT_NONE);
2611 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2613 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2614 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2615 M_INTMOVE(s1, REG_ITMP1);
2616 M_XOR(s2, REG_ITMP1);
2617 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2618 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2619 M_INTMOVE(s1, REG_ITMP2);
2620 M_XOR(s2, REG_ITMP2);
2621 M_OR(REG_ITMP1, REG_ITMP2);
2622 emit_beq(cd, iptr->dst.block);
2625 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2627 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2628 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2629 M_INTMOVE(s1, REG_ITMP1);
2630 M_XOR(s2, REG_ITMP1);
2631 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2632 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2633 M_INTMOVE(s1, REG_ITMP2);
2634 M_XOR(s2, REG_ITMP2);
2635 M_OR(REG_ITMP1, REG_ITMP2);
2636 emit_bne(cd, iptr->dst.block);
2639 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2641 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2642 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2644 emit_blt(cd, iptr->dst.block);
2645 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2646 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2649 emit_bult(cd, iptr->dst.block);
2652 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2654 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2655 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2657 emit_bgt(cd, iptr->dst.block);
2658 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2659 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2662 emit_bugt(cd, iptr->dst.block);
2665 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2667 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2668 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2670 emit_blt(cd, iptr->dst.block);
2671 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2672 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2675 emit_bule(cd, iptr->dst.block);
2678 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2680 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2681 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2683 emit_bgt(cd, iptr->dst.block);
2684 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2685 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2688 emit_buge(cd, iptr->dst.block);
2692 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2694 REPLACEMENT_POINT_RETURN(cd, iptr);
2695 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2696 M_INTMOVE(s1, REG_RESULT);
2697 goto nowperformreturn;
2699 case ICMD_LRETURN: /* ..., retvalue ==> ... */
2701 REPLACEMENT_POINT_RETURN(cd, iptr);
2702 s1 = emit_load_s1(jd, iptr, REG_RESULT_PACKED);
2703 M_LNGMOVE(s1, REG_RESULT_PACKED);
2704 goto nowperformreturn;
2706 case ICMD_ARETURN: /* ..., retvalue ==> ... */
2708 REPLACEMENT_POINT_RETURN(cd, iptr);
2709 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2710 M_INTMOVE(s1, REG_RESULT);
2712 #ifdef ENABLE_VERIFIER
2713 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2714 patcher_add_patch_ref(jd, PATCHER_resolve_class,
2715 iptr->sx.s23.s2.uc, 0);
2717 #endif /* ENABLE_VERIFIER */
2718 goto nowperformreturn;
2720 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2723 REPLACEMENT_POINT_RETURN(cd, iptr);
2724 s1 = emit_load_s1(jd, iptr, REG_FRESULT);
2725 goto nowperformreturn;
2727 case ICMD_RETURN: /* ... ==> ... */
2729 REPLACEMENT_POINT_RETURN(cd, iptr);
2735 p = cd->stackframesize;
2737 #if !defined(NDEBUG)
2738 emit_verbosecall_exit(jd);
2741 #if defined(ENABLE_THREADS)
2742 if (checksync && code_is_synchronized(code)) {
2743 M_ALD(REG_ITMP2, REG_SP, rd->memuse * 8);
2745 /* we need to save the proper return value */
2746 switch (iptr->opc) {
2749 M_IST(REG_RESULT, REG_SP, rd->memuse * 8);
2753 M_LST(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2757 emit_fstps_membase(cd, REG_SP, rd->memuse * 8);
2761 emit_fstpl_membase(cd, REG_SP, rd->memuse * 8);
2765 M_AST(REG_ITMP2, REG_SP, 0);
2766 M_MOV_IMM(LOCK_monitor_exit, REG_ITMP3);
2769 /* and now restore the proper return value */
2770 switch (iptr->opc) {
2773 M_ILD(REG_RESULT, REG_SP, rd->memuse * 8);
2777 M_LLD(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2781 emit_flds_membase(cd, REG_SP, rd->memuse * 8);
2785 emit_fldl_membase(cd, REG_SP, rd->memuse * 8);
2791 /* restore saved registers */
2793 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
2794 p--; M_ALD(rd->savintregs[i], REG_SP, p * 8);
2797 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
2799 emit_fldl_membase(cd, REG_SP, p * 8);
2800 if (iptr->opc == ICMD_FRETURN || iptr->opc == ICMD_DRETURN) {
2802 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset + 1); */
2805 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset); */
2809 /* deallocate stack */
2811 if (cd->stackframesize)
2812 M_AADD_IMM(cd->stackframesize * 8 + 4, REG_SP);
2819 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2822 branch_target_t *table;
2824 table = iptr->dst.table;
2826 l = iptr->sx.s23.s2.tablelow;
2827 i = iptr->sx.s23.s3.tablehigh;
2829 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2830 M_INTMOVE(s1, REG_ITMP1);
2833 M_ISUB_IMM(l, REG_ITMP1);
2839 M_CMP_IMM(i - 1, REG_ITMP1);
2840 emit_bugt(cd, table[0].block);
2842 /* build jump table top down and use address of lowest entry */
2847 dseg_add_target(cd, table->block);
2851 /* length of dataseg after last dseg_addtarget is used
2854 M_MOV_IMM(0, REG_ITMP2);
2856 emit_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 2, REG_ITMP1);
2862 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2865 lookup_target_t *lookup;
2867 lookup = iptr->dst.lookup;
2869 i = iptr->sx.s23.s2.lookupcount;
2871 MCODECHECK((i<<2)+8);
2872 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2875 M_CMP_IMM(lookup->value, s1);
2876 emit_beq(cd, lookup->target.block);
2880 emit_br(cd, iptr->sx.s23.s3.lookupdefault.block);
2885 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
2887 REPLACEMENT_POINT_FORGC_BUILTIN(cd, iptr);
2889 bte = iptr->sx.s23.s3.bte;
2892 #if defined(ENABLE_ESCAPE_REASON)
2893 if (bte->fp == BUILTIN_escape_reason_new) {
2894 void set_escape_reasons(void *);
2895 M_ASUB_IMM(8, REG_SP);
2896 M_MOV_IMM(iptr->escape_reasons, REG_ITMP1);
2897 M_AST(EDX, REG_SP, 4);
2898 M_AST(REG_ITMP1, REG_SP, 0);
2899 M_MOV_IMM(set_escape_reasons, REG_ITMP1);
2901 M_ALD(EDX, REG_SP, 4);
2902 M_AADD_IMM(8, REG_SP);
2908 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2910 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2911 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
2912 case ICMD_INVOKEINTERFACE:
2914 REPLACEMENT_POINT_INVOKE(cd, iptr);
2916 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2917 md = iptr->sx.s23.s3.um->methodref->parseddesc.md;
2921 lm = iptr->sx.s23.s3.fmiref->p.method;
2922 md = lm->parseddesc;
2926 s3 = md->paramcount;
2928 MCODECHECK((s3 << 1) + 64);
2930 /* copy arguments to registers or stack location */
2932 for (s3 = s3 - 1; s3 >= 0; s3--) {
2933 var = VAR(iptr->sx.s23.s2.args[s3]);
2935 /* Already Preallocated (ARGVAR) ? */
2936 if (var->flags & PREALLOC)
2938 if (IS_INT_LNG_TYPE(var->type)) {
2939 if (!md->params[s3].inmemory) {
2940 log_text("No integer argument registers available!");
2944 if (IS_2_WORD_TYPE(var->type)) {
2945 d = emit_load(jd, iptr, var, REG_ITMP12_PACKED);
2946 M_LST(d, REG_SP, md->params[s3].regoff);
2948 d = emit_load(jd, iptr, var, REG_ITMP1);
2949 M_IST(d, REG_SP, md->params[s3].regoff);
2954 if (!md->params[s3].inmemory) {
2955 s1 = md->params[s3].regoff;
2956 d = emit_load(jd, iptr, var, s1);
2960 d = emit_load(jd, iptr, var, REG_FTMP1);
2961 if (IS_2_WORD_TYPE(var->type))
2962 M_DST(d, REG_SP, md->params[s3].regoff);
2964 M_FST(d, REG_SP, md->params[s3].regoff);
2969 switch (iptr->opc) {
2971 d = md->returntype.type;
2973 if (bte->stub == NULL) {
2974 M_MOV_IMM(bte->fp, REG_ITMP1);
2977 M_MOV_IMM(bte->stub, REG_ITMP1);
2981 #if defined(ENABLE_ESCAPE_CHECK)
2982 if (bte->opcode == ICMD_NEW || bte->opcode == ICMD_NEWARRAY) {
2983 /*emit_escape_annotate_object(cd, m);*/
2988 case ICMD_INVOKESPECIAL:
2989 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
2990 emit_nullpointer_check(cd, iptr, REG_ITMP1);
2993 case ICMD_INVOKESTATIC:
2995 unresolved_method *um = iptr->sx.s23.s3.um;
2997 patcher_add_patch_ref(jd, PATCHER_invokestatic_special,
3001 d = md->returntype.type;
3004 disp = (ptrint) lm->stubroutine;
3005 d = lm->parseddesc->returntype.type;
3008 M_MOV_IMM2(disp, REG_ITMP2);
3012 case ICMD_INVOKEVIRTUAL:
3013 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
3014 emit_nullpointer_check(cd, iptr, s1);
3017 unresolved_method *um = iptr->sx.s23.s3.um;
3019 patcher_add_patch_ref(jd, PATCHER_invokevirtual, um, 0);
3022 d = md->returntype.type;
3025 s1 = OFFSET(vftbl_t, table[0]) +
3026 sizeof(methodptr) * lm->vftblindex;
3027 d = md->returntype.type;
3030 M_ALD(REG_METHODPTR, REG_ITMP1,
3031 OFFSET(java_object_t, vftbl));
3032 M_ALD32(REG_ITMP3, REG_METHODPTR, s1);
3036 case ICMD_INVOKEINTERFACE:
3037 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
3038 emit_nullpointer_check(cd, iptr, s1);
3041 unresolved_method *um = iptr->sx.s23.s3.um;
3043 patcher_add_patch_ref(jd, PATCHER_invokeinterface, um, 0);
3047 d = md->returntype.type;
3050 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3051 sizeof(methodptr) * lm->clazz->index;
3053 s2 = sizeof(methodptr) * (lm - lm->clazz->methods);
3055 d = md->returntype.type;
3058 M_ALD(REG_METHODPTR, REG_ITMP1,
3059 OFFSET(java_object_t, vftbl));
3060 M_ALD32(REG_METHODPTR, REG_METHODPTR, s1);
3061 M_ALD32(REG_ITMP3, REG_METHODPTR, s2);
3066 /* store size of call code in replacement point */
3068 REPLACEMENT_POINT_INVOKE_RETURN(cd, iptr);
3069 REPLACEMENT_POINT_FORGC_BUILTIN_RETURN(cd, iptr);
3071 /* d contains return type */
3073 if (d != TYPE_VOID) {
3074 #if defined(ENABLE_SSA)
3075 if ((ls == NULL) /* || (!IS_TEMPVAR_INDEX(iptr->dst.varindex)) */ ||
3076 (ls->lifetime[iptr->dst.varindex].type != UNUSED))
3077 /* a "living" stackslot */
3080 if (IS_INT_LNG_TYPE(d)) {
3081 if (IS_2_WORD_TYPE(d)) {
3082 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
3083 M_LNGMOVE(REG_RESULT_PACKED, s1);
3086 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3087 M_INTMOVE(REG_RESULT, s1);
3091 s1 = codegen_reg_of_dst(jd, iptr, REG_NULL);
3093 emit_store_dst(jd, iptr, s1);
3099 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3101 if (!(iptr->flags.bits & INS_FLAG_ARRAY)) {
3102 /* object type cast-check */
3105 vftbl_t *supervftbl;
3108 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3114 super = iptr->sx.s23.s3.c.cls;
3115 superindex = super->index;
3116 supervftbl = super->vftbl;
3119 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3121 /* if class is not resolved, check which code to call */
3123 if (super == NULL) {
3125 emit_label_beq(cd, BRANCH_LABEL_1);
3127 patcher_add_patch_ref(jd, PATCHER_checkcast_instanceof_flags,
3128 iptr->sx.s23.s3.c.ref, 0);
3130 M_MOV_IMM2(0, REG_ITMP2); /* super->flags */
3131 M_AND_IMM32(ACC_INTERFACE, REG_ITMP2);
3132 emit_label_beq(cd, BRANCH_LABEL_2);
3135 /* interface checkcast code */
3137 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3138 if (super != NULL) {
3140 emit_label_beq(cd, BRANCH_LABEL_3);
3143 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3145 if (super == NULL) {
3146 patcher_add_patch_ref(jd, PATCHER_checkcast_interface,
3147 iptr->sx.s23.s3.c.ref,
3152 REG_ITMP2, OFFSET(vftbl_t, interfacetablelength));
3153 M_ISUB_IMM32(superindex, REG_ITMP3);
3154 /* XXX do we need this one? */
3156 emit_classcast_check(cd, iptr, BRANCH_LE, REG_ITMP3, s1);
3158 M_ALD32(REG_ITMP3, REG_ITMP2,
3159 OFFSET(vftbl_t, interfacetable[0]) -
3160 superindex * sizeof(methodptr*));
3162 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_ITMP3, s1);
3165 emit_label_br(cd, BRANCH_LABEL_4);
3167 emit_label(cd, BRANCH_LABEL_3);
3170 /* class checkcast code */
3172 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3173 if (super == NULL) {
3174 emit_label(cd, BRANCH_LABEL_2);
3178 emit_label_beq(cd, BRANCH_LABEL_5);
3181 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3182 if (super == NULL) {
3183 patcher_add_patch_ref(jd, PATCHER_checkcast_class,
3184 iptr->sx.s23.s3.c.ref,
3187 M_MOV_IMM2(supervftbl, REG_ITMP3);
3189 if (super == NULL || super->vftbl->subtype_depth >= DISPLAY_SIZE) {
3190 M_ILD(REG_ITMP1, REG_ITMP3, OFFSET(vftbl_t, subtype_offset));
3191 M_CMP_MEMINDEX(REG_ITMP2, 0, REG_ITMP1, 0, REG_ITMP3);
3192 emit_label_beq(cd, BRANCH_LABEL_6); /* good */
3194 if (super == NULL) {
3195 M_ICMP_IMM(OFFSET(vftbl_t, subtype_display[DISPLAY_SIZE]), REG_ITMP1);
3196 emit_label_bne(cd, BRANCH_LABEL_10); /* throw */
3199 M_ILD(REG_ITMP1, REG_ITMP3, OFFSET(vftbl_t, subtype_depth));
3200 M_CMP_MEMBASE(REG_ITMP2, OFFSET(vftbl_t, subtype_depth), REG_ITMP1);
3201 emit_label_bgt(cd, BRANCH_LABEL_9); /* throw */
3203 M_ALD(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, subtype_overflow));
3204 M_CMP_MEMINDEX(REG_ITMP2, -4*DISPLAY_SIZE, REG_ITMP1, 2, REG_ITMP3);
3205 emit_label_beq(cd, BRANCH_LABEL_7); /* good */
3207 emit_label(cd, BRANCH_LABEL_9);
3209 emit_label(cd, BRANCH_LABEL_10);
3211 /* reload s1, might have been destroyed */
3212 emit_load_s1(jd, iptr, REG_ITMP1);
3213 M_ALD_MEM(s1, TRAP_ClassCastException);
3215 emit_label(cd, BRANCH_LABEL_7);
3216 emit_label(cd, BRANCH_LABEL_6);
3217 /* reload s1, might have been destroyed */
3218 emit_load_s1(jd, iptr, REG_ITMP1);
3221 M_CMP_MEMBASE(REG_ITMP2, super->vftbl->subtype_offset, REG_ITMP3);
3223 emit_classcast_check(cd, iptr, BRANCH_NE, REG_ITMP3, s1);
3227 emit_label(cd, BRANCH_LABEL_5);
3230 if (super == NULL) {
3231 emit_label(cd, BRANCH_LABEL_1);
3232 emit_label(cd, BRANCH_LABEL_4);
3235 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
3238 /* array type cast-check */
3240 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3241 M_AST(s1, REG_SP, 0 * 4);
3243 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3244 patcher_add_patch_ref(jd, PATCHER_builtin_arraycheckcast,
3245 iptr->sx.s23.s3.c.ref, 0);
3248 M_AST_IMM(iptr->sx.s23.s3.c.cls, REG_SP, 1 * 4);
3249 M_MOV_IMM(BUILTIN_arraycheckcast, REG_ITMP3);
3252 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3254 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_RESULT, s1);
3256 d = codegen_reg_of_dst(jd, iptr, s1);
3260 emit_store_dst(jd, iptr, d);
3263 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3267 vftbl_t *supervftbl;
3270 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3276 super = iptr->sx.s23.s3.c.cls;
3277 superindex = super->index;
3278 supervftbl = super->vftbl;
3281 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3282 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
3285 M_INTMOVE(s1, REG_ITMP1);
3291 /* if class is not resolved, check which code to call */
3293 if (super == NULL) {
3295 emit_label_beq(cd, BRANCH_LABEL_1);
3297 patcher_add_patch_ref(jd, PATCHER_checkcast_instanceof_flags,
3298 iptr->sx.s23.s3.c.ref, 0);
3300 M_MOV_IMM2(0, REG_ITMP3); /* super->flags */
3301 M_AND_IMM32(ACC_INTERFACE, REG_ITMP3);
3302 emit_label_beq(cd, BRANCH_LABEL_2);
3305 /* interface instanceof code */
3307 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3308 if (super != NULL) {
3310 emit_label_beq(cd, BRANCH_LABEL_3);
3313 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3315 if (super == NULL) {
3316 patcher_add_patch_ref(jd, PATCHER_instanceof_interface,
3317 iptr->sx.s23.s3.c.ref, 0);
3321 REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3322 M_ISUB_IMM32(superindex, REG_ITMP3);
3325 disp = (2 + 4 /* mov_membase32_reg */ + 2 /* test */ +
3326 6 /* jcc */ + 5 /* mov_imm_reg */);
3329 M_ALD32(REG_ITMP1, REG_ITMP1,
3330 OFFSET(vftbl_t, interfacetable[0]) -
3331 superindex * sizeof(methodptr*));
3333 /* emit_setcc_reg(cd, CC_A, d); */
3334 /* emit_jcc(cd, CC_BE, 5); */
3339 emit_label_br(cd, BRANCH_LABEL_4);
3341 emit_label(cd, BRANCH_LABEL_3);
3344 /* class instanceof code */
3346 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3347 if (super == NULL) {
3348 emit_label(cd, BRANCH_LABEL_2);
3352 emit_label_beq(cd, BRANCH_LABEL_5);
3355 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3356 if (super == NULL) {
3357 patcher_add_patch_ref(jd, PATCHER_instanceof_class,
3358 iptr->sx.s23.s3.c.ref, 0);
3360 M_MOV_IMM2(supervftbl, REG_ITMP3);
3362 if (super == NULL || super->vftbl->subtype_depth >= DISPLAY_SIZE) {
3363 M_ILD(REG_ITMP1, REG_ITMP3, OFFSET(vftbl_t, subtype_offset));
3364 M_CMP_MEMINDEX(REG_ITMP2, 0, REG_ITMP1, 0, REG_ITMP3);
3365 emit_label_bne(cd, BRANCH_LABEL_8); /* jump over INC/SETE */
3366 if (d == REG_ITMP2) {
3371 emit_label_br(cd, BRANCH_LABEL_6); /* true */
3372 emit_label(cd, BRANCH_LABEL_8);
3374 if (super == NULL) {
3375 M_ICMP_IMM(OFFSET(vftbl_t, subtype_display[DISPLAY_SIZE]), REG_ITMP1);
3376 emit_label_bne(cd, BRANCH_LABEL_10); /* false */
3379 M_ILD(REG_ITMP1, REG_ITMP3, OFFSET(vftbl_t, subtype_depth));
3380 M_CMP_MEMBASE(REG_ITMP2, OFFSET(vftbl_t, subtype_depth), REG_ITMP1);
3381 emit_label_bgt(cd, BRANCH_LABEL_9); /* false */
3383 M_ALD(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, subtype_overflow));
3384 M_CMP_MEMINDEX(REG_ITMP2, -4*DISPLAY_SIZE, REG_ITMP1, 2, REG_ITMP3);
3387 M_BSEXT(REG_ITMP1, d);
3391 if (d == REG_ITMP2) {
3394 emit_label_br(cd, BRANCH_LABEL_7); /* jump over M_CLR */
3398 emit_label(cd, BRANCH_LABEL_9);
3400 emit_label(cd, BRANCH_LABEL_10);
3401 if (d == REG_ITMP2) {
3404 emit_label(cd, BRANCH_LABEL_7);
3406 emit_label(cd, BRANCH_LABEL_6);
3409 M_CMP_MEMBASE(REG_ITMP2, super->vftbl->subtype_offset, REG_ITMP3);
3413 M_BSEXT(REG_ITMP1, d);
3423 emit_label(cd, BRANCH_LABEL_5);
3426 if (super == NULL) {
3427 emit_label(cd, BRANCH_LABEL_1);
3428 emit_label(cd, BRANCH_LABEL_4);
3431 emit_store_dst(jd, iptr, d);
3435 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3437 /* check for negative sizes and copy sizes to stack if necessary */
3439 MCODECHECK((iptr->s1.argcount << 1) + 64);
3441 for (s1 = iptr->s1.argcount; --s1 >= 0; ) {
3442 /* copy SAVEDVAR sizes to stack */
3443 var = VAR(iptr->sx.s23.s2.args[s1]);
3445 /* Already Preallocated? */
3446 if (!(var->flags & PREALLOC)) {
3447 if (var->flags & INMEMORY) {
3448 M_ILD(REG_ITMP1, REG_SP, var->vv.regoff);
3449 M_IST(REG_ITMP1, REG_SP, (s1 + 3) * 4);
3452 M_IST(var->vv.regoff, REG_SP, (s1 + 3) * 4);
3456 /* is a patcher function set? */
3458 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3459 patcher_add_patch_ref(jd, PATCHER_builtin_multianewarray,
3460 iptr->sx.s23.s3.c.ref, 0);
3466 disp = (ptrint) iptr->sx.s23.s3.c.cls;
3468 /* a0 = dimension count */
3470 M_IST_IMM(iptr->s1.argcount, REG_SP, 0 * 4);
3472 /* a1 = arraydescriptor */
3474 M_IST_IMM(disp, REG_SP, 1 * 4);
3476 /* a2 = pointer to dimensions = stack pointer */
3478 M_MOV(REG_SP, REG_ITMP1);
3479 M_AADD_IMM(3 * 4, REG_ITMP1);
3480 M_AST(REG_ITMP1, REG_SP, 2 * 4);
3482 M_MOV_IMM(BUILTIN_multianewarray, REG_ITMP1);
3485 /* check for exception before result assignment */
3487 emit_exception_check(cd, iptr);
3489 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3490 M_INTMOVE(REG_RESULT, s1);
3491 emit_store_dst(jd, iptr, s1);
3494 #if defined(ENABLE_SSA)
3495 case ICMD_GETEXCEPTION:
3496 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
3497 M_INTMOVE(REG_ITMP1, d);
3498 emit_store_dst(jd, iptr, d);
3502 exceptions_throw_internalerror("Unknown ICMD %d during code generation",
3507 } /* for instruction */
3511 #if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
3514 #if defined(ENABLE_SSA)
3517 /* by edge splitting, in Blocks with phi moves there can only */
3518 /* be a goto as last command, no other Jump/Branch Command */
3520 if (!last_cmd_was_goto)
3521 codegen_emit_phi_moves(jd, bptr);
3526 /* At the end of a basic block we may have to append some nops,
3527 because the patcher stub calling code might be longer than the
3528 actual instruction. So codepatching does not change the
3529 following block unintentionally. */
3531 if (cd->mcodeptr < cd->lastmcodeptr) {
3532 while (cd->mcodeptr < cd->lastmcodeptr) {
3537 } /* if (bptr -> flags >= BBREACHED) */
3538 } /* for basic block */
3540 /* generate stubs */
3542 emit_patcher_traps(jd);
3544 /* everything's ok */
3550 /* codegen_emit_stub_native ****************************************************
3552 Emits a stub routine which calls a native method.
3554 *******************************************************************************/
3556 void codegen_emit_stub_native(jitdata *jd, methoddesc *nmd, functionptr f, int skipparams)
3562 int i, j; /* count variables */
3566 /* get required compiler data */
3572 /* set some variables */
3576 /* calculate stackframe size */
3578 cd->stackframesize =
3579 sizeof(stackframeinfo_t) / SIZEOF_VOID_P +
3580 sizeof(localref_table) / SIZEOF_VOID_P +
3581 4 + /* 4 arguments (start_native_call) */
3584 /* keep stack 16-byte aligned */
3586 ALIGN_ODD(cd->stackframesize);
3588 /* create method header */
3590 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
3591 (void) dseg_add_unique_s4(cd, cd->stackframesize * 8 + 4); /* FrameSize */
3592 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
3593 (void) dseg_add_unique_s4(cd, 0); /* IntSave */
3594 (void) dseg_add_unique_s4(cd, 0); /* FltSave */
3596 #if defined(ENABLE_PROFILING)
3597 /* generate native method profiling code */
3599 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
3600 /* count frequency */
3602 M_MOV_IMM(code, REG_ITMP1);
3603 M_IADD_IMM_MEMBASE(1, REG_ITMP1, OFFSET(codeinfo, frequency));
3607 /* calculate stackframe size for native function */
3609 M_ASUB_IMM(cd->stackframesize * 8 + 4, REG_SP);
3611 /* Mark the whole fpu stack as free for native functions (only for saved */
3612 /* register count == 0). */
3614 emit_ffree_reg(cd, 0);
3615 emit_ffree_reg(cd, 1);
3616 emit_ffree_reg(cd, 2);
3617 emit_ffree_reg(cd, 3);
3618 emit_ffree_reg(cd, 4);
3619 emit_ffree_reg(cd, 5);
3620 emit_ffree_reg(cd, 6);
3621 emit_ffree_reg(cd, 7);
3623 #if defined(ENABLE_GC_CACAO)
3624 /* remember callee saved int registers in stackframeinfo (GC may need to */
3625 /* recover them during a collection). */
3627 disp = cd->stackframesize * 8 - sizeof(stackframeinfo_t) +
3628 OFFSET(stackframeinfo_t, intregs);
3630 for (i = 0; i < INT_SAV_CNT; i++)
3631 M_AST(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3634 /* prepare data structures for native function call */
3636 M_MOV(REG_SP, REG_ITMP1);
3637 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3638 M_IST_IMM(0, REG_SP, 1 * 4);
3641 M_MOV_IMM(codegen_start_native_call, REG_ITMP1);
3644 /* remember class argument */
3646 if (m->flags & ACC_STATIC)
3647 M_MOV(REG_RESULT, REG_ITMP3);
3649 /* Copy or spill arguments to new locations. */
3651 for (i = md->paramcount - 1, j = i + skipparams; i >= 0; i--, j--) {
3652 if (!md->params[i].inmemory)
3655 s1 = md->params[i].regoff + cd->stackframesize * 8 + 8;
3656 s2 = nmd->params[j].regoff;
3658 /* float/double in memory can be copied like int/longs */
3660 switch (md->paramtypes[i].type) {
3664 M_ILD(REG_ITMP1, REG_SP, s1);
3665 M_IST(REG_ITMP1, REG_SP, s2);
3669 M_LLD(REG_ITMP12_PACKED, REG_SP, s1);
3670 M_LST(REG_ITMP12_PACKED, REG_SP, s2);
3675 /* Handle native Java methods. */
3677 if (m->flags & ACC_NATIVE) {
3678 /* if function is static, put class into second argument */
3680 if (m->flags & ACC_STATIC)
3681 M_AST(REG_ITMP3, REG_SP, 1 * 4);
3683 /* put env into first argument */
3685 M_AST_IMM(VM_get_jnienv(), REG_SP, 0 * 4);
3688 /* Call the native function. */
3690 disp = dseg_add_functionptr(cd, f);
3691 emit_mov_imm_reg(cd, 0, REG_ITMP3);
3693 M_ALD(REG_ITMP1, REG_ITMP3, disp);
3696 /* save return value */
3698 switch (md->returntype.type) {
3701 switch (md->returntype.primitivetype) {
3702 case PRIMITIVETYPE_BOOLEAN:
3703 M_BZEXT(REG_RESULT, REG_RESULT);
3705 case PRIMITIVETYPE_BYTE:
3706 M_BSEXT(REG_RESULT, REG_RESULT);
3708 case PRIMITIVETYPE_CHAR:
3709 M_CZEXT(REG_RESULT, REG_RESULT);
3711 case PRIMITIVETYPE_SHORT:
3712 M_SSEXT(REG_RESULT, REG_RESULT);
3715 M_IST(REG_RESULT, REG_SP, 1 * 8);
3718 M_LST(REG_RESULT_PACKED, REG_SP, 1 * 8);
3721 emit_fsts_membase(cd, REG_SP, 1 * 8);
3724 emit_fstl_membase(cd, REG_SP, 1 * 8);
3730 /* remove native stackframe info */
3732 M_MOV(REG_SP, REG_ITMP1);
3733 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3734 M_IST_IMM(0, REG_SP, 1 * 4);
3737 M_MOV_IMM(codegen_finish_native_call, REG_ITMP1);
3739 M_MOV(REG_RESULT, REG_ITMP2); /* REG_ITMP3 == REG_RESULT2 */
3741 /* restore return value */
3743 switch (md->returntype.type) {
3746 M_ILD(REG_RESULT, REG_SP, 1 * 8);
3749 M_LLD(REG_RESULT_PACKED, REG_SP, 1 * 8);
3752 emit_flds_membase(cd, REG_SP, 1 * 8);
3755 emit_fldl_membase(cd, REG_SP, 1 * 8);
3761 #if defined(ENABLE_GC_CACAO)
3762 /* restore callee saved int registers from stackframeinfo (GC might have */
3763 /* modified them during a collection). */
3765 disp = cd->stackframesize * 8 - sizeof(stackframeinfo_t) +
3766 OFFSET(stackframeinfo_t, intregs);
3768 for (i = 0; i < INT_SAV_CNT; i++)
3769 M_ALD(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3772 M_AADD_IMM(cd->stackframesize * 8 + 4, REG_SP);
3774 /* check for exception */
3781 /* handle exception */
3783 M_MOV(REG_ITMP2, REG_ITMP1_XPTR);
3784 M_ALD(REG_ITMP2_XPC, REG_SP, 0);
3785 M_ASUB_IMM(2, REG_ITMP2_XPC);
3787 M_MOV_IMM(asm_handle_nat_exception, REG_ITMP3);
3793 * These are local overrides for various environment variables in Emacs.
3794 * Please do not remove this and leave it at the end of the file, where
3795 * Emacs will automagically detect them.
3796 * ---------------------------------------------------------------------
3799 * indent-tabs-mode: t
3803 * vim:noexpandtab:sw=4:ts=4: