1 /* src/vm/jit/i386/codegen.c - machine code generator for i386
3 Copyright (C) 1996-2005, 2006, 2007, 2008
4 CACAOVM - Verein zur Foerderung der freien virtuellen Maschine CACAO
6 This file is part of CACAO.
8 This program is free software; you can redistribute it and/or
9 modify it under the terms of the GNU General Public License as
10 published by the Free Software Foundation; either version 2, or (at
11 your option) any later version.
13 This program is distributed in the hope that it will be useful, but
14 WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with this program; if not, write to the Free Software
20 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
34 #include "vm/jit/i386/md-abi.h"
36 #include "vm/jit/i386/codegen.h"
37 #include "vm/jit/i386/emit.h"
39 #include "mm/memory.h"
40 #include "native/jni.h"
41 #include "native/localref.h"
42 #include "native/native.h"
44 #include "threads/lock-common.h"
46 #include "vm/builtin.h"
47 #include "vm/exceptions.h"
48 #include "vm/global.h"
49 #include "vm/stringlocal.h"
52 #include "vm/jit/abi.h"
53 #include "vm/jit/asmpart.h"
54 #include "vm/jit/codegen-common.h"
55 #include "vm/jit/dseg.h"
56 #include "vm/jit/emit-common.h"
57 #include "vm/jit/jit.h"
58 #include "vm/jit/linenumbertable.h"
59 #include "vm/jit/parse.h"
60 #include "vm/jit/patcher-common.h"
61 #include "vm/jit/reg.h"
62 #include "vm/jit/replace.h"
63 #include "vm/jit/stacktrace.h"
64 #include "vm/jit/trap.h"
66 #if defined(ENABLE_SSA)
67 # include "vm/jit/optimizing/lsra.h"
68 # include "vm/jit/optimizing/ssa.h"
69 #elif defined(ENABLE_LSRA)
70 # include "vm/jit/allocator/lsra.h"
73 #include "vmcore/loader.h"
74 #include "vmcore/options.h"
75 #include "vmcore/utf8.h"
78 /* codegen_emit ****************************************************************
80 Generates machine code.
82 *******************************************************************************/
84 bool codegen_emit(jitdata *jd)
90 s4 len, s1, s2, s3, d, disp;
91 int align_off; /* offset for alignment compensation */
96 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
97 builtintable_entry *bte;
100 unresolved_field *uf;
103 #if defined(ENABLE_SSA)
105 bool last_cmd_was_goto;
107 last_cmd_was_goto = false;
111 /* get required compiler data */
118 /* prevent compiler warnings */
129 s4 savedregs_num = 0;
132 /* space to save used callee saved registers */
134 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
135 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse);
137 cd->stackframesize = rd->memuse + savedregs_num;
140 #if defined(ENABLE_THREADS)
141 /* space to save argument of monitor_enter */
143 if (checksync && code_is_synchronized(code))
144 cd->stackframesize++;
147 /* create method header */
149 /* Keep stack of non-leaf functions 16-byte aligned. */
151 if (!code_is_leafmethod(code)) {
152 ALIGN_ODD(cd->stackframesize);
155 align_off = cd->stackframesize ? 4 : 0;
157 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
158 (void) dseg_add_unique_s4(
159 cd, cd->stackframesize * 8 + align_off); /* FrameSize */
161 code->synchronizedoffset = rd->memuse * 8;
163 /* REMOVEME: We still need it for exception handling in assembler. */
165 if (code_is_leafmethod(code))
166 (void) dseg_add_unique_s4(cd, 1); /* IsLeaf */
168 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
170 (void) dseg_add_unique_s4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
171 (void) dseg_add_unique_s4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
173 #if defined(ENABLE_PROFILING)
174 /* generate method profiling code */
176 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
177 /* count frequency */
179 M_MOV_IMM(code, REG_ITMP3);
180 M_IADD_IMM_MEMBASE(1, REG_ITMP3, OFFSET(codeinfo, frequency));
184 /* create stack frame (if necessary) */
186 if (cd->stackframesize)
188 M_ASUB_IMM(cd->stackframesize * 8 + 4, REG_SP);
190 /* save return address and used callee saved registers */
192 p = cd->stackframesize;
193 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
194 p--; M_AST(rd->savintregs[i], REG_SP, p * 8);
196 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
197 p--; emit_fld_reg(cd, rd->savfltregs[i]); emit_fstpl_membase(cd, REG_SP, p * 8);
200 /* take arguments out of register or stack frame */
205 for (p = 0, l = 0; p < md->paramcount; p++) {
206 t = md->paramtypes[p].type;
208 varindex = jd->local_map[l * 5 + t];
209 #if defined(ENABLE_SSA)
211 if (varindex != UNUSED)
212 varindex = ls->var_0[varindex];
213 if ((varindex != UNUSED) && (ls->lifetime[varindex].type == UNUSED))
218 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
221 if (varindex == UNUSED)
225 s1 = md->params[p].regoff;
228 if (IS_INT_LNG_TYPE(t)) { /* integer args */
229 if (!md->params[p].inmemory) { /* register arguments */
230 log_text("integer register argument");
232 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
233 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff */
235 else { /* reg arg -> spilled */
236 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff * 4 */
240 if (!(var->flags & INMEMORY)) {
242 cd->stackframesize * 8 + 4 + align_off + s1);
245 if (!IS_2_WORD_TYPE(t)) {
246 #if defined(ENABLE_SSA)
247 /* no copy avoiding by now possible with SSA */
249 emit_mov_membase_reg( /* + 4 for return address */
251 cd->stackframesize * 8 + s1 + 4 + align_off,
253 emit_mov_reg_membase(
254 cd, REG_ITMP1, REG_SP, var->vv.regoff);
257 #endif /*defined(ENABLE_SSA)*/
258 /* reuse stackslot */
259 var->vv.regoff = cd->stackframesize * 8 + 4 +
264 #if defined(ENABLE_SSA)
265 /* no copy avoiding by now possible with SSA */
267 emit_mov_membase_reg( /* + 4 for return address */
269 cd->stackframesize * 8 + s1 + 4 + align_off,
271 emit_mov_reg_membase(
272 cd, REG_ITMP1, REG_SP, var->vv.regoff);
273 emit_mov_membase_reg( /* + 4 for return address */
275 cd->stackframesize * 8 + s1 + 4 + 4 + align_off,
277 emit_mov_reg_membase(
278 cd, REG_ITMP1, REG_SP, var->vv.regoff + 4);
281 #endif /*defined(ENABLE_SSA)*/
282 /* reuse stackslot */
283 var->vv.regoff = cd->stackframesize * 8 + 8 + s1;
288 else { /* floating args */
289 if (!md->params[p].inmemory) { /* register arguments */
290 log_text("There are no float argument registers!");
292 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
293 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff */
294 } else { /* reg arg -> spilled */
295 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff * 8 */
299 else { /* stack arguments */
300 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
304 cd->stackframesize * 8 + s1 + 4 + align_off);
306 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
312 cd->stackframesize * 8 + s1 + 4 + align_off);
314 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
317 } else { /* stack-arg -> spilled */
318 #if defined(ENABLE_SSA)
319 /* no copy avoiding by now possible with SSA */
321 emit_mov_membase_reg(
323 cd->stackframesize * 8 + s1 + 4 + align_off,
325 emit_mov_reg_membase(
326 cd, REG_ITMP1, REG_SP, var->vv.regoff);
330 cd->stackframesize * 8 + s1 + 4 + align_off);
331 emit_fstps_membase(cd, REG_SP, var->vv.regoff);
336 cd->stackframesize * 8 + s1 + 4 + align_off);
337 emit_fstpl_membase(cd, REG_SP, var->vv.regoff);
341 #endif /*defined(ENABLE_SSA)*/
342 /* reuse stackslot */
343 var->vv.regoff = cd->stackframesize * 8 + 4 +
350 /* call monitorenter function */
352 #if defined(ENABLE_THREADS)
353 if (checksync && code_is_synchronized(code)) {
356 if (m->flags & ACC_STATIC) {
357 M_MOV_IMM(&m->clazz->object.header, REG_ITMP1);
360 M_ALD(REG_ITMP1, REG_SP, cd->stackframesize * 8 + 4 + align_off);
363 M_ALD_MEM(REG_ITMP1, TRAP_NullPointerException);
366 M_AST(REG_ITMP1, REG_SP, s1 * 8);
367 M_AST(REG_ITMP1, REG_SP, 0 * 4);
368 M_MOV_IMM(LOCK_monitor_enter, REG_ITMP3);
374 emit_verbosecall_enter(jd);
379 #if defined(ENABLE_SSA)
380 /* with SSA the Header is Basic Block 0 - insert phi Moves if necessary */
382 codegen_emit_phi_moves(jd, ls->basicblocks[0]);
385 /* end of header generation */
387 /* create replacement points */
389 REPLACEMENT_POINTS_INIT(cd, jd);
391 /* walk through all basic blocks */
393 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
395 bptr->mpc = (s4) (cd->mcodeptr - cd->mcodebase);
397 if (bptr->flags >= BBREACHED) {
398 /* branch resolving */
400 codegen_resolve_branchrefs(cd, bptr);
402 /* handle replacement points */
404 REPLACEMENT_POINT_BLOCK_START(cd, bptr);
406 #if defined(ENABLE_REPLACEMENT)
407 if (bptr->bitflags & BBFLAG_REPLACEMENT) {
408 if (cd->replacementpoint[-1].flags & RPLPOINT_FLAG_COUNTDOWN) {
410 disp = (s4) &(m->hitcountdown);
411 M_ISUB_IMM_MEMABS(1, disp);
417 /* copy interface registers to their destination */
422 #if defined(ENABLE_PROFILING)
423 /* generate basic block profiling code */
425 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
426 /* count frequency */
428 M_MOV_IMM(code->bbfrequency, REG_ITMP3);
429 M_IADD_IMM_MEMBASE(1, REG_ITMP3, bptr->nr * 4);
433 #if defined(ENABLE_LSRA) || defined(ENABLE_SSA)
434 # if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
437 # if defined(ENABLE_SSA)
439 last_cmd_was_goto = false;
443 var = VAR(bptr->invars[len]);
444 if (bptr->type != BBTYPE_STD) {
445 if (!IS_2_WORD_TYPE(var->type)) {
446 #if !defined(ENABLE_SSA)
447 if (bptr->type == BBTYPE_EXH) {
448 d = codegen_reg_of_var(0, var, REG_ITMP1);
449 M_INTMOVE(REG_ITMP1, d);
450 emit_store(jd, NULL, var, d);
455 log_text("copy interface registers(EXH, SBR): longs \
456 have to be in memory (begin 1)");
464 #endif /* defined(ENABLE_LSRA) || defined(ENABLE_SSA) */
468 var = VAR(bptr->invars[len]);
469 if ((len == bptr->indepth-1) && (bptr->type != BBTYPE_STD)) {
470 if (!IS_2_WORD_TYPE(var->type)) {
471 if (bptr->type == BBTYPE_EXH) {
472 d = codegen_reg_of_var(0, var, REG_ITMP1);
473 M_INTMOVE(REG_ITMP1, d);
474 emit_store(jd, NULL, var, d);
478 log_text("copy interface registers: longs have to be in \
485 assert((var->flags & INOUT));
490 /* walk through all instructions */
495 for (iptr = bptr->iinstr; len > 0; len--, iptr++) {
496 if (iptr->line != currentline) {
497 linenumbertable_list_entry_add(cd, iptr->line);
498 currentline = iptr->line;
501 MCODECHECK(1024); /* 1kB should be enough */
504 case ICMD_NOP: /* ... ==> ... */
505 case ICMD_POP: /* ..., value ==> ... */
506 case ICMD_POP2: /* ..., value, value ==> ... */
509 case ICMD_INLINE_START:
511 REPLACEMENT_POINT_INLINE_START(cd, iptr);
514 case ICMD_INLINE_BODY:
516 REPLACEMENT_POINT_INLINE_BODY(cd, iptr);
517 linenumbertable_list_entry_add_inline_start(cd, iptr);
518 linenumbertable_list_entry_add(cd, iptr->line);
521 case ICMD_INLINE_END:
523 linenumbertable_list_entry_add_inline_end(cd, iptr);
524 linenumbertable_list_entry_add(cd, iptr->line);
527 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
529 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
530 emit_nullpointer_check(cd, iptr, s1);
533 /* constant operations ************************************************/
535 case ICMD_ICONST: /* ... ==> ..., constant */
537 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
538 ICONST(d, iptr->sx.val.i);
539 emit_store_dst(jd, iptr, d);
542 case ICMD_LCONST: /* ... ==> ..., constant */
544 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
545 LCONST(d, iptr->sx.val.l);
546 emit_store_dst(jd, iptr, d);
549 case ICMD_FCONST: /* ... ==> ..., constant */
551 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
552 if (iptr->sx.val.f == 0.0) {
556 if (iptr->sx.val.i == 0x80000000) {
560 } else if (iptr->sx.val.f == 1.0) {
563 } else if (iptr->sx.val.f == 2.0) {
569 disp = dseg_add_float(cd, iptr->sx.val.f);
570 emit_mov_imm_reg(cd, 0, REG_ITMP1);
572 emit_flds_membase(cd, REG_ITMP1, disp);
574 emit_store_dst(jd, iptr, d);
577 case ICMD_DCONST: /* ... ==> ..., constant */
579 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
580 if (iptr->sx.val.d == 0.0) {
584 if (iptr->sx.val.l == 0x8000000000000000LL) {
588 } else if (iptr->sx.val.d == 1.0) {
591 } else if (iptr->sx.val.d == 2.0) {
597 disp = dseg_add_double(cd, iptr->sx.val.d);
598 emit_mov_imm_reg(cd, 0, REG_ITMP1);
600 emit_fldl_membase(cd, REG_ITMP1, disp);
602 emit_store_dst(jd, iptr, d);
605 case ICMD_ACONST: /* ... ==> ..., constant */
607 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
609 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
610 patcher_add_patch_ref(jd, PATCHER_aconst,
611 iptr->sx.val.c.ref, 0);
616 if (iptr->sx.val.anyptr == NULL)
619 M_MOV_IMM(iptr->sx.val.anyptr, d);
621 emit_store_dst(jd, iptr, d);
625 /* load/store/copy/move operations ************************************/
643 if (!(iptr->flags.bits & INS_FLAG_RETADDR))
648 /* integer operations *************************************************/
650 case ICMD_INEG: /* ..., value ==> ..., - value */
652 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
653 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
656 emit_store_dst(jd, iptr, d);
659 case ICMD_LNEG: /* ..., value ==> ..., - value */
661 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
662 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
664 M_NEG(GET_LOW_REG(d));
665 M_IADDC_IMM(0, GET_HIGH_REG(d));
666 M_NEG(GET_HIGH_REG(d));
667 emit_store_dst(jd, iptr, d);
670 case ICMD_I2L: /* ..., value ==> ..., value */
672 s1 = emit_load_s1(jd, iptr, EAX);
673 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
676 M_LNGMOVE(EAX_EDX_PACKED, d);
677 emit_store_dst(jd, iptr, d);
680 case ICMD_L2I: /* ..., value ==> ..., value */
682 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
683 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
685 emit_store_dst(jd, iptr, d);
688 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
690 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
691 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
695 emit_store_dst(jd, iptr, d);
698 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
700 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
701 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
703 emit_store_dst(jd, iptr, d);
706 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
708 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
709 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
711 emit_store_dst(jd, iptr, d);
715 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
717 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
718 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
719 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
726 emit_store_dst(jd, iptr, d);
730 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
731 /* sx.val.i = constant */
733 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
734 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
736 /* `inc reg' is slower on p4's (regarding to ia32
737 optimization reference manual and benchmarks) and as
741 M_IADD_IMM(iptr->sx.val.i, d);
742 emit_store_dst(jd, iptr, d);
745 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
747 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
748 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
749 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
750 M_INTMOVE(s1, GET_LOW_REG(d));
751 M_IADD(s2, GET_LOW_REG(d));
752 /* don't use REG_ITMP1 */
753 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
754 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
755 M_INTMOVE(s1, GET_HIGH_REG(d));
756 M_IADDC(s2, GET_HIGH_REG(d));
757 emit_store_dst(jd, iptr, d);
760 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
761 /* sx.val.l = constant */
763 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
764 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
766 M_IADD_IMM(iptr->sx.val.l, GET_LOW_REG(d));
767 M_IADDC_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
768 emit_store_dst(jd, iptr, d);
771 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
773 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
774 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
775 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
777 M_INTMOVE(s1, REG_ITMP1);
778 M_ISUB(s2, REG_ITMP1);
779 M_INTMOVE(REG_ITMP1, d);
785 emit_store_dst(jd, iptr, d);
788 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
789 /* sx.val.i = constant */
791 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
792 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
794 M_ISUB_IMM(iptr->sx.val.i, d);
795 emit_store_dst(jd, iptr, d);
798 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
800 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
801 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
802 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
803 if (s2 == GET_LOW_REG(d)) {
804 M_INTMOVE(s1, REG_ITMP1);
805 M_ISUB(s2, REG_ITMP1);
806 M_INTMOVE(REG_ITMP1, GET_LOW_REG(d));
809 M_INTMOVE(s1, GET_LOW_REG(d));
810 M_ISUB(s2, GET_LOW_REG(d));
812 /* don't use REG_ITMP1 */
813 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
814 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
815 if (s2 == GET_HIGH_REG(d)) {
816 M_INTMOVE(s1, REG_ITMP2);
817 M_ISUBB(s2, REG_ITMP2);
818 M_INTMOVE(REG_ITMP2, GET_HIGH_REG(d));
821 M_INTMOVE(s1, GET_HIGH_REG(d));
822 M_ISUBB(s2, GET_HIGH_REG(d));
824 emit_store_dst(jd, iptr, d);
827 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
828 /* sx.val.l = constant */
830 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
831 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
833 M_ISUB_IMM(iptr->sx.val.l, GET_LOW_REG(d));
834 M_ISUBB_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
835 emit_store_dst(jd, iptr, d);
838 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
840 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
841 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
842 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
849 emit_store_dst(jd, iptr, d);
852 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
853 /* sx.val.i = constant */
855 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
856 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
857 M_IMUL_IMM(s1, iptr->sx.val.i, d);
858 emit_store_dst(jd, iptr, d);
861 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
863 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
864 s2 = emit_load_s2_low(jd, iptr, EDX);
865 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
867 M_INTMOVE(s1, REG_ITMP2);
868 M_IMUL(s2, REG_ITMP2);
870 s1 = emit_load_s1_low(jd, iptr, EAX);
871 s2 = emit_load_s2_high(jd, iptr, EDX);
874 M_IADD(EDX, REG_ITMP2);
876 s1 = emit_load_s1_low(jd, iptr, EAX);
877 s2 = emit_load_s2_low(jd, iptr, EDX);
880 M_INTMOVE(EAX, GET_LOW_REG(d));
881 M_IADD(REG_ITMP2, GET_HIGH_REG(d));
883 emit_store_dst(jd, iptr, d);
886 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
887 /* sx.val.l = constant */
889 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
890 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
891 ICONST(EAX, iptr->sx.val.l);
893 M_IMUL_IMM(s1, iptr->sx.val.l >> 32, REG_ITMP2);
894 M_IADD(REG_ITMP2, EDX);
895 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
896 M_IMUL_IMM(s1, iptr->sx.val.l, REG_ITMP2);
897 M_IADD(REG_ITMP2, EDX);
898 M_LNGMOVE(EAX_EDX_PACKED, d);
899 emit_store_dst(jd, iptr, d);
902 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
904 s1 = emit_load_s1(jd, iptr, EAX);
905 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
906 d = codegen_reg_of_dst(jd, iptr, EAX);
907 emit_arithmetic_check(cd, iptr, s2);
909 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
911 /* check as described in jvm spec */
913 M_CMP_IMM(0x80000000, EAX);
920 M_INTMOVE(EAX, d); /* if INMEMORY then d is already EAX */
921 emit_store_dst(jd, iptr, d);
924 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
926 s1 = emit_load_s1(jd, iptr, EAX);
927 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
928 d = codegen_reg_of_dst(jd, iptr, EDX);
929 emit_arithmetic_check(cd, iptr, s2);
931 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
933 /* check as described in jvm spec */
935 M_CMP_IMM(0x80000000, EAX);
943 M_INTMOVE(EDX, d); /* if INMEMORY then d is already EDX */
944 emit_store_dst(jd, iptr, d);
947 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
948 /* sx.val.i = constant */
950 /* TODO: optimize for `/ 2' */
951 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
952 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
956 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, d);/* 32-bit for jump off */
957 M_SRA_IMM(iptr->sx.val.i, d);
958 emit_store_dst(jd, iptr, d);
961 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
962 /* sx.val.i = constant */
964 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
965 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
967 M_MOV(s1, REG_ITMP1);
971 M_AND_IMM(iptr->sx.val.i, d);
973 M_BGE(2 + 2 + 6 + 2);
974 M_MOV(s1, d); /* don't use M_INTMOVE, so we know the jump offset */
976 M_AND_IMM32(iptr->sx.val.i, d); /* use 32-bit for jump offset */
978 emit_store_dst(jd, iptr, d);
981 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
982 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
984 s2 = emit_load_s2(jd, iptr, REG_ITMP12_PACKED);
985 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
987 M_INTMOVE(GET_LOW_REG(s2), REG_ITMP3);
988 M_OR(GET_HIGH_REG(s2), REG_ITMP3);
989 /* XXX could be optimized */
990 emit_arithmetic_check(cd, iptr, REG_ITMP3);
992 bte = iptr->sx.s23.s3.bte;
995 M_LST(s2, REG_SP, 2 * 4);
997 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
998 M_LST(s1, REG_SP, 0 * 4);
1000 M_MOV_IMM(bte->fp, REG_ITMP3);
1002 emit_store_dst(jd, iptr, d);
1005 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1006 /* sx.val.i = constant */
1008 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1009 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1011 M_TEST(GET_HIGH_REG(d));
1013 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, GET_LOW_REG(d));
1014 M_IADDC_IMM(0, GET_HIGH_REG(d));
1015 M_SRLD_IMM(iptr->sx.val.i, GET_HIGH_REG(d), GET_LOW_REG(d));
1016 M_SRA_IMM(iptr->sx.val.i, GET_HIGH_REG(d));
1017 emit_store_dst(jd, iptr, d);
1021 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1022 /* sx.val.l = constant */
1024 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1025 if (iptr->dst.var->flags & INMEMORY) {
1026 if (iptr->s1.var->flags & INMEMORY) {
1027 /* Alpha algorithm */
1029 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8);
1031 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1037 /* TODO: hmm, don't know if this is always correct */
1039 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l & 0x00000000ffffffff);
1041 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l >> 32);
1047 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1048 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1050 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1051 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1052 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1053 emit_jcc(cd, CC_GE, disp);
1055 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1056 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1058 emit_neg_reg(cd, REG_ITMP1);
1059 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1060 emit_neg_reg(cd, REG_ITMP2);
1062 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1063 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1065 emit_neg_reg(cd, REG_ITMP1);
1066 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1067 emit_neg_reg(cd, REG_ITMP2);
1069 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst.var->vv.regoff * 8);
1070 emit_mov_reg_membase(cd, REG_ITMP2, REG_SP, iptr->dst.var->vv.regoff * 8 + 4);
1074 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1075 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1077 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1078 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1079 M_TEST(GET_LOW_REG(s1));
1085 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1087 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1088 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1089 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1090 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1093 emit_store_dst(jd, iptr, d);
1096 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1097 /* sx.val.i = constant */
1099 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1100 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1102 M_SLL_IMM(iptr->sx.val.i, d);
1103 emit_store_dst(jd, iptr, d);
1106 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1108 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1109 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1110 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1111 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1114 emit_store_dst(jd, iptr, d);
1117 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1118 /* sx.val.i = constant */
1120 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1121 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1123 M_SRA_IMM(iptr->sx.val.i, d);
1124 emit_store_dst(jd, iptr, d);
1127 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1129 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1130 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1131 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1132 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1135 emit_store_dst(jd, iptr, d);
1138 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1139 /* sx.val.i = constant */
1141 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1142 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1144 M_SRL_IMM(iptr->sx.val.i, d);
1145 emit_store_dst(jd, iptr, d);
1148 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1150 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1151 s2 = emit_load_s2(jd, iptr, ECX);
1152 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1155 M_TEST_IMM(32, ECX);
1157 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1158 M_CLR(GET_LOW_REG(d));
1159 M_SLLD(GET_LOW_REG(d), GET_HIGH_REG(d));
1160 M_SLL(GET_LOW_REG(d));
1161 emit_store_dst(jd, iptr, d);
1164 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1165 /* sx.val.i = constant */
1167 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1168 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1170 if (iptr->sx.val.i & 0x20) {
1171 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1172 M_CLR(GET_LOW_REG(d));
1173 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1177 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1179 M_SLL_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d));
1181 emit_store_dst(jd, iptr, d);
1184 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1186 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1187 s2 = emit_load_s2(jd, iptr, ECX);
1188 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1191 M_TEST_IMM(32, ECX);
1193 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1194 M_SRA_IMM(31, GET_HIGH_REG(d));
1195 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1196 M_SRA(GET_HIGH_REG(d));
1197 emit_store_dst(jd, iptr, d);
1200 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1201 /* sx.val.i = constant */
1203 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1204 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1206 if (iptr->sx.val.i & 0x20) {
1207 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1208 M_SRA_IMM(31, GET_HIGH_REG(d));
1209 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1213 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1215 M_SRA_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1217 emit_store_dst(jd, iptr, d);
1220 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1222 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1223 s2 = emit_load_s2(jd, iptr, ECX);
1224 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1227 M_TEST_IMM(32, ECX);
1229 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1230 M_CLR(GET_HIGH_REG(d));
1231 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1232 M_SRL(GET_HIGH_REG(d));
1233 emit_store_dst(jd, iptr, d);
1236 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1237 /* sx.val.l = constant */
1239 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1240 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1242 if (iptr->sx.val.i & 0x20) {
1243 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1244 M_CLR(GET_HIGH_REG(d));
1245 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1249 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1251 M_SRL_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1253 emit_store_dst(jd, iptr, d);
1256 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1258 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1259 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1260 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1267 emit_store_dst(jd, iptr, d);
1270 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1271 /* sx.val.i = constant */
1273 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1274 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1276 M_AND_IMM(iptr->sx.val.i, d);
1277 emit_store_dst(jd, iptr, d);
1280 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1282 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1283 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1284 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1285 if (s2 == GET_LOW_REG(d))
1286 M_AND(s1, GET_LOW_REG(d));
1288 M_INTMOVE(s1, GET_LOW_REG(d));
1289 M_AND(s2, GET_LOW_REG(d));
1291 /* REG_ITMP1 probably contains low 32-bit of destination */
1292 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1293 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1294 if (s2 == GET_HIGH_REG(d))
1295 M_AND(s1, GET_HIGH_REG(d));
1297 M_INTMOVE(s1, GET_HIGH_REG(d));
1298 M_AND(s2, GET_HIGH_REG(d));
1300 emit_store_dst(jd, iptr, d);
1303 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1304 /* sx.val.l = constant */
1306 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1307 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1309 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1310 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1311 emit_store_dst(jd, iptr, d);
1314 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1316 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1317 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1318 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1325 emit_store_dst(jd, iptr, d);
1328 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1329 /* sx.val.i = constant */
1331 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1332 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1334 M_OR_IMM(iptr->sx.val.i, d);
1335 emit_store_dst(jd, iptr, d);
1338 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1340 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1341 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1342 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1343 if (s2 == GET_LOW_REG(d))
1344 M_OR(s1, GET_LOW_REG(d));
1346 M_INTMOVE(s1, GET_LOW_REG(d));
1347 M_OR(s2, GET_LOW_REG(d));
1349 /* REG_ITMP1 probably contains low 32-bit of destination */
1350 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1351 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1352 if (s2 == GET_HIGH_REG(d))
1353 M_OR(s1, GET_HIGH_REG(d));
1355 M_INTMOVE(s1, GET_HIGH_REG(d));
1356 M_OR(s2, GET_HIGH_REG(d));
1358 emit_store_dst(jd, iptr, d);
1361 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1362 /* sx.val.l = constant */
1364 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1365 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1367 M_OR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1368 M_OR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1369 emit_store_dst(jd, iptr, d);
1372 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1374 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1375 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1376 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1383 emit_store_dst(jd, iptr, d);
1386 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1387 /* sx.val.i = constant */
1389 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1390 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1392 M_XOR_IMM(iptr->sx.val.i, d);
1393 emit_store_dst(jd, iptr, d);
1396 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1398 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1399 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1400 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1401 if (s2 == GET_LOW_REG(d))
1402 M_XOR(s1, GET_LOW_REG(d));
1404 M_INTMOVE(s1, GET_LOW_REG(d));
1405 M_XOR(s2, GET_LOW_REG(d));
1407 /* REG_ITMP1 probably contains low 32-bit of destination */
1408 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1409 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1410 if (s2 == GET_HIGH_REG(d))
1411 M_XOR(s1, GET_HIGH_REG(d));
1413 M_INTMOVE(s1, GET_HIGH_REG(d));
1414 M_XOR(s2, GET_HIGH_REG(d));
1416 emit_store_dst(jd, iptr, d);
1419 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1420 /* sx.val.l = constant */
1422 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1423 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1425 M_XOR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1426 M_XOR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1427 emit_store_dst(jd, iptr, d);
1431 /* floating operations ************************************************/
1433 case ICMD_FNEG: /* ..., value ==> ..., - value */
1435 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1436 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1438 emit_store_dst(jd, iptr, d);
1441 case ICMD_DNEG: /* ..., value ==> ..., - value */
1443 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1444 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1446 emit_store_dst(jd, iptr, d);
1449 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1451 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1452 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1453 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1455 emit_store_dst(jd, iptr, d);
1458 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1460 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1461 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1462 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1464 emit_store_dst(jd, iptr, d);
1467 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1469 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1470 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1471 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1473 emit_store_dst(jd, iptr, d);
1476 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1478 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1479 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1480 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1482 emit_store_dst(jd, iptr, d);
1485 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1487 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1488 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1489 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1491 emit_store_dst(jd, iptr, d);
1494 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1496 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1497 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1498 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1500 emit_store_dst(jd, iptr, d);
1503 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1505 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1506 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1507 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1509 emit_store_dst(jd, iptr, d);
1512 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1514 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1515 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1516 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1518 emit_store_dst(jd, iptr, d);
1521 case ICMD_FREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1523 /* exchanged to skip fxch */
1524 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1525 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1526 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1527 /* emit_fxch(cd); */
1532 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1533 emit_store_dst(jd, iptr, d);
1534 emit_ffree_reg(cd, 0);
1538 case ICMD_DREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1540 /* exchanged to skip fxch */
1541 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1542 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1543 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1544 /* emit_fxch(cd); */
1549 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1550 emit_store_dst(jd, iptr, d);
1551 emit_ffree_reg(cd, 0);
1555 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1556 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1558 var = VAROP(iptr->s1);
1559 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1561 if (var->flags & INMEMORY) {
1562 emit_fildl_membase(cd, REG_SP, var->vv.regoff);
1564 /* XXX not thread safe! */
1565 disp = dseg_add_unique_s4(cd, 0);
1566 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1568 emit_mov_reg_membase(cd, var->vv.regoff, REG_ITMP1, disp);
1569 emit_fildl_membase(cd, REG_ITMP1, disp);
1572 emit_store_dst(jd, iptr, d);
1575 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1576 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1578 var = VAROP(iptr->s1);
1579 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1580 if (var->flags & INMEMORY) {
1581 emit_fildll_membase(cd, REG_SP, var->vv.regoff);
1584 log_text("L2F: longs have to be in memory");
1587 emit_store_dst(jd, iptr, d);
1590 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1592 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1593 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1595 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1598 /* Round to zero, 53-bit mode, exception masked */
1599 disp = dseg_add_s4(cd, 0x0e7f);
1600 emit_fldcw_membase(cd, REG_ITMP1, disp);
1602 var = VAROP(iptr->dst);
1603 var1 = VAROP(iptr->s1);
1605 if (var->flags & INMEMORY) {
1606 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1608 /* Round to nearest, 53-bit mode, exceptions masked */
1609 disp = dseg_add_s4(cd, 0x027f);
1610 emit_fldcw_membase(cd, REG_ITMP1, disp);
1612 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1613 REG_SP, var->vv.regoff);
1616 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1618 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1621 /* XXX not thread safe! */
1622 disp = dseg_add_unique_s4(cd, 0);
1623 emit_fistpl_membase(cd, REG_ITMP1, disp);
1624 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1626 /* Round to nearest, 53-bit mode, exceptions masked */
1627 disp = dseg_add_s4(cd, 0x027f);
1628 emit_fldcw_membase(cd, REG_ITMP1, disp);
1630 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1633 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1634 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1637 emit_jcc(cd, CC_NE, disp);
1639 /* XXX: change this when we use registers */
1640 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1641 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2i, REG_ITMP1);
1642 emit_call_reg(cd, REG_ITMP1);
1644 if (var->flags & INMEMORY) {
1645 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1648 M_INTMOVE(REG_RESULT, var->vv.regoff);
1652 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1654 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1655 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1657 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1660 /* Round to zero, 53-bit mode, exception masked */
1661 disp = dseg_add_s4(cd, 0x0e7f);
1662 emit_fldcw_membase(cd, REG_ITMP1, disp);
1664 var = VAROP(iptr->dst);
1665 var1 = VAROP(iptr->s1);
1667 if (var->flags & INMEMORY) {
1668 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1670 /* Round to nearest, 53-bit mode, exceptions masked */
1671 disp = dseg_add_s4(cd, 0x027f);
1672 emit_fldcw_membase(cd, REG_ITMP1, disp);
1674 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1675 REG_SP, var->vv.regoff);
1678 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1680 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1683 /* XXX not thread safe! */
1684 disp = dseg_add_unique_s4(cd, 0);
1685 emit_fistpl_membase(cd, REG_ITMP1, disp);
1686 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1688 /* Round to nearest, 53-bit mode, exceptions masked */
1689 disp = dseg_add_s4(cd, 0x027f);
1690 emit_fldcw_membase(cd, REG_ITMP1, disp);
1692 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1695 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1696 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1699 emit_jcc(cd, CC_NE, disp);
1701 /* XXX: change this when we use registers */
1702 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1703 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2i, REG_ITMP1);
1704 emit_call_reg(cd, REG_ITMP1);
1706 if (var->flags & INMEMORY) {
1707 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1709 M_INTMOVE(REG_RESULT, var->vv.regoff);
1713 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1715 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1716 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1718 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1721 /* Round to zero, 53-bit mode, exception masked */
1722 disp = dseg_add_s4(cd, 0x0e7f);
1723 emit_fldcw_membase(cd, REG_ITMP1, disp);
1725 var = VAROP(iptr->dst);
1726 var1 = VAROP(iptr->s1);
1728 if (var->flags & INMEMORY) {
1729 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1731 /* Round to nearest, 53-bit mode, exceptions masked */
1732 disp = dseg_add_s4(cd, 0x027f);
1733 emit_fldcw_membase(cd, REG_ITMP1, disp);
1735 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1736 REG_SP, var->vv.regoff + 4);
1739 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1741 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1744 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1746 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1748 emit_jcc(cd, CC_NE, disp);
1750 emit_alu_imm_membase(cd, ALU_CMP, 0,
1751 REG_SP, var->vv.regoff);
1754 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1756 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1758 emit_jcc(cd, CC_NE, disp);
1760 /* XXX: change this when we use registers */
1761 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1762 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2l, REG_ITMP1);
1763 emit_call_reg(cd, REG_ITMP1);
1764 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1765 emit_mov_reg_membase(cd, REG_RESULT2,
1766 REG_SP, var->vv.regoff + 4);
1769 log_text("F2L: longs have to be in memory");
1774 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1776 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1777 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1779 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1782 /* Round to zero, 53-bit mode, exception masked */
1783 disp = dseg_add_s4(cd, 0x0e7f);
1784 emit_fldcw_membase(cd, REG_ITMP1, disp);
1786 var = VAROP(iptr->dst);
1787 var1 = VAROP(iptr->s1);
1789 if (var->flags & INMEMORY) {
1790 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1792 /* Round to nearest, 53-bit mode, exceptions masked */
1793 disp = dseg_add_s4(cd, 0x027f);
1794 emit_fldcw_membase(cd, REG_ITMP1, disp);
1796 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1797 REG_SP, var->vv.regoff + 4);
1800 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1802 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1805 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1807 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1809 emit_jcc(cd, CC_NE, disp);
1811 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, var->vv.regoff);
1814 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1816 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1818 emit_jcc(cd, CC_NE, disp);
1820 /* XXX: change this when we use registers */
1821 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1822 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2l, REG_ITMP1);
1823 emit_call_reg(cd, REG_ITMP1);
1824 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1825 emit_mov_reg_membase(cd, REG_RESULT2,
1826 REG_SP, var->vv.regoff + 4);
1829 log_text("D2L: longs have to be in memory");
1834 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1836 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1837 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1839 emit_store_dst(jd, iptr, d);
1842 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1844 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1845 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1847 emit_store_dst(jd, iptr, d);
1850 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1853 /* exchanged to skip fxch */
1854 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1855 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1856 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1857 /* emit_fxch(cd); */
1860 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as GT */
1861 emit_jcc(cd, CC_E, 6);
1862 emit_alu_imm_reg(cd, ALU_AND, 0x000000ff, EAX);
1864 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1865 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1866 emit_jcc(cd, CC_B, 3 + 5);
1867 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1868 emit_jmp_imm(cd, 3);
1869 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1870 emit_store_dst(jd, iptr, d);
1873 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1876 /* exchanged to skip fxch */
1877 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1878 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1879 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1880 /* emit_fxch(cd); */
1883 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as LT */
1884 emit_jcc(cd, CC_E, 3);
1885 emit_movb_imm_reg(cd, 1, REG_AH);
1887 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1888 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1889 emit_jcc(cd, CC_B, 3 + 5);
1890 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1891 emit_jmp_imm(cd, 3);
1892 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1893 emit_store_dst(jd, iptr, d);
1897 /* memory operations **************************************************/
1899 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., length */
1901 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1902 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1903 /* implicit null-pointer check */
1904 M_ILD(d, s1, OFFSET(java_array_t, size));
1905 emit_store_dst(jd, iptr, d);
1908 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
1910 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1911 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1912 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1913 /* implicit null-pointer check */
1914 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1915 emit_movsbl_memindex_reg(cd, OFFSET(java_bytearray_t, data[0]),
1917 emit_store_dst(jd, iptr, d);
1920 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
1922 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1923 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1924 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1925 /* implicit null-pointer check */
1926 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1927 emit_movzwl_memindex_reg(cd, OFFSET(java_chararray_t, data[0]),
1929 emit_store_dst(jd, iptr, d);
1932 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
1934 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1935 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1936 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1937 /* implicit null-pointer check */
1938 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1939 emit_movswl_memindex_reg(cd, OFFSET(java_shortarray_t, data[0]),
1941 emit_store_dst(jd, iptr, d);
1944 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
1946 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1947 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1948 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1949 /* implicit null-pointer check */
1950 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1951 emit_mov_memindex_reg(cd, OFFSET(java_intarray_t, data[0]),
1953 emit_store_dst(jd, iptr, d);
1956 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
1958 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1959 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1960 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
1961 /* implicit null-pointer check */
1962 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1964 var = VAROP(iptr->dst);
1966 assert(var->flags & INMEMORY);
1967 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]),
1968 s1, s2, 3, REG_ITMP3);
1969 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff);
1970 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]) + 4,
1971 s1, s2, 3, REG_ITMP3);
1972 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff + 4);
1975 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
1977 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1978 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1979 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1980 /* implicit null-pointer check */
1981 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1982 emit_flds_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2, 2);
1983 emit_store_dst(jd, iptr, d);
1986 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
1988 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1989 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1990 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1991 /* implicit null-pointer check */
1992 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1993 emit_fldl_memindex(cd, OFFSET(java_doublearray_t, data[0]), s1, s2,3);
1994 emit_store_dst(jd, iptr, d);
1997 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
1999 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2000 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2001 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2002 /* implicit null-pointer check */
2003 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2004 emit_mov_memindex_reg(cd, OFFSET(java_objectarray_t, data[0]),
2006 emit_store_dst(jd, iptr, d);
2010 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2012 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2013 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2014 /* implicit null-pointer check */
2015 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2016 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2018 /* because EBP, ESI, EDI have no xH and xL nibbles */
2019 M_INTMOVE(s3, REG_ITMP3);
2022 emit_movb_reg_memindex(cd, s3, OFFSET(java_bytearray_t, data[0]),
2026 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2028 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2029 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2030 /* implicit null-pointer check */
2031 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2032 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2033 emit_movw_reg_memindex(cd, s3, OFFSET(java_chararray_t, data[0]),
2037 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2039 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2040 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2041 /* implicit null-pointer check */
2042 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2043 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2044 emit_movw_reg_memindex(cd, s3, OFFSET(java_shortarray_t, data[0]),
2048 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2050 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2051 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2052 /* implicit null-pointer check */
2053 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2054 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2055 emit_mov_reg_memindex(cd, s3, OFFSET(java_intarray_t, data[0]),
2059 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2061 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2062 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2063 /* implicit null-pointer check */
2064 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2066 var = VAROP(iptr->sx.s23.s3);
2068 assert(var->flags & INMEMORY);
2069 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff, REG_ITMP3);
2070 emit_mov_reg_memindex(cd, REG_ITMP3, OFFSET(java_longarray_t, data[0])
2072 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff + 4, REG_ITMP3);
2073 emit_mov_reg_memindex(cd, REG_ITMP3,
2074 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2077 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2079 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2080 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2081 /* implicit null-pointer check */
2082 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2083 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2084 emit_fstps_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2,2);
2087 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2089 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2090 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2091 /* implicit null-pointer check */
2092 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2093 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2094 emit_fstpl_memindex(cd, OFFSET(java_doublearray_t, data[0]),
2098 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2100 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2101 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2102 /* implicit null-pointer check */
2103 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2104 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2106 M_AST(s1, REG_SP, 0 * 4);
2107 M_AST(s3, REG_SP, 1 * 4);
2108 M_MOV_IMM(BUILTIN_FAST_canstore, REG_ITMP1);
2110 emit_arraystore_check(cd, iptr);
2112 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2113 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2114 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2115 emit_mov_reg_memindex(cd, s3, OFFSET(java_objectarray_t, data[0]),
2119 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2121 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2122 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2123 /* implicit null-pointer check */
2124 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2125 emit_movb_imm_memindex(cd, iptr->sx.s23.s3.constval,
2126 OFFSET(java_bytearray_t, data[0]), s1, s2, 0);
2129 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2131 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2132 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2133 /* implicit null-pointer check */
2134 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2135 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2136 OFFSET(java_chararray_t, data[0]), s1, s2, 1);
2139 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2141 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2142 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2143 /* implicit null-pointer check */
2144 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2145 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2146 OFFSET(java_shortarray_t, data[0]), s1, s2, 1);
2149 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2151 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2152 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2153 /* implicit null-pointer check */
2154 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2155 emit_mov_imm_memindex(cd, iptr->sx.s23.s3.constval,
2156 OFFSET(java_intarray_t, data[0]), s1, s2, 2);
2159 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2161 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2162 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2163 /* implicit null-pointer check */
2164 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2165 emit_mov_imm_memindex(cd,
2166 (u4) (iptr->sx.s23.s3.constval & 0x00000000ffffffff),
2167 OFFSET(java_longarray_t, data[0]), s1, s2, 3);
2168 emit_mov_imm_memindex(cd,
2169 ((s4)iptr->sx.s23.s3.constval) >> 31,
2170 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2173 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2175 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2176 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2177 /* implicit null-pointer check */
2178 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2179 emit_mov_imm_memindex(cd, 0,
2180 OFFSET(java_objectarray_t, data[0]), s1, s2, 2);
2184 case ICMD_GETSTATIC: /* ... ==> ..., value */
2186 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2187 uf = iptr->sx.s23.s3.uf;
2188 fieldtype = uf->fieldref->parseddesc.fd->type;
2191 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2195 fi = iptr->sx.s23.s3.fmiref->p.field;
2196 fieldtype = fi->type;
2197 disp = (intptr_t) fi->value;
2199 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->clazz))
2200 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->clazz, 0);
2203 M_MOV_IMM(disp, REG_ITMP1);
2204 switch (fieldtype) {
2207 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2208 M_ILD(d, REG_ITMP1, 0);
2211 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2212 M_LLD(d, REG_ITMP1, 0);
2215 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2216 M_FLD(d, REG_ITMP1, 0);
2219 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2220 M_DLD(d, REG_ITMP1, 0);
2223 emit_store_dst(jd, iptr, d);
2226 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2228 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2229 uf = iptr->sx.s23.s3.uf;
2230 fieldtype = uf->fieldref->parseddesc.fd->type;
2233 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2236 fi = iptr->sx.s23.s3.fmiref->p.field;
2237 fieldtype = fi->type;
2238 disp = (intptr_t) fi->value;
2240 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->clazz))
2241 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->clazz, 0);
2244 M_MOV_IMM(disp, REG_ITMP1);
2245 switch (fieldtype) {
2248 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
2249 M_IST(s1, REG_ITMP1, 0);
2252 s1 = emit_load_s1(jd, iptr, REG_ITMP23_PACKED);
2253 M_LST(s1, REG_ITMP1, 0);
2256 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2257 emit_fstps_membase(cd, REG_ITMP1, 0);
2260 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2261 emit_fstpl_membase(cd, REG_ITMP1, 0);
2266 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2267 /* val = value (in current instruction) */
2268 /* following NOP) */
2270 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2271 uf = iptr->sx.s23.s3.uf;
2272 fieldtype = uf->fieldref->parseddesc.fd->type;
2275 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2278 fi = iptr->sx.s23.s3.fmiref->p.field;
2279 fieldtype = fi->type;
2280 disp = (intptr_t) fi->value;
2282 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->clazz))
2283 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->clazz, 0);
2286 M_MOV_IMM(disp, REG_ITMP1);
2287 switch (fieldtype) {
2290 M_IST_IMM(iptr->sx.s23.s2.constval, REG_ITMP1, 0);
2293 M_IST_IMM(iptr->sx.s23.s2.constval & 0xffffffff, REG_ITMP1, 0);
2294 M_IST_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, REG_ITMP1, 4);
2301 case ICMD_GETFIELD: /* .., objectref. ==> ..., value */
2303 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2304 emit_nullpointer_check(cd, iptr, s1);
2306 #if defined(ENABLE_ESCAPE_CHECK)
2307 /*emit_escape_check(cd, s1);*/
2310 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2311 uf = iptr->sx.s23.s3.uf;
2312 fieldtype = uf->fieldref->parseddesc.fd->type;
2315 patcher_add_patch_ref(jd, PATCHER_getfield,
2316 iptr->sx.s23.s3.uf, 0);
2319 fi = iptr->sx.s23.s3.fmiref->p.field;
2320 fieldtype = fi->type;
2324 switch (fieldtype) {
2327 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2328 M_ILD32(d, s1, disp);
2331 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2332 M_LLD32(d, s1, disp);
2335 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2336 M_FLD32(d, s1, disp);
2339 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2340 M_DLD32(d, s1, disp);
2343 emit_store_dst(jd, iptr, d);
2346 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2348 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2349 emit_nullpointer_check(cd, iptr, s1);
2351 /* must be done here because of code patching */
2353 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2354 uf = iptr->sx.s23.s3.uf;
2355 fieldtype = uf->fieldref->parseddesc.fd->type;
2358 fi = iptr->sx.s23.s3.fmiref->p.field;
2359 fieldtype = fi->type;
2362 if (!IS_FLT_DBL_TYPE(fieldtype)) {
2363 if (IS_2_WORD_TYPE(fieldtype))
2364 s2 = emit_load_s2(jd, iptr, REG_ITMP23_PACKED);
2366 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2369 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
2371 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2373 uf = iptr->sx.s23.s3.uf;
2376 patcher_add_patch_ref(jd, PATCHER_putfield, uf, 0);
2380 fi = iptr->sx.s23.s3.fmiref->p.field;
2384 switch (fieldtype) {
2387 M_IST32(s2, s1, disp);
2390 M_LST32(s2, s1, disp);
2393 emit_fstps_membase32(cd, s1, disp);
2396 emit_fstpl_membase32(cd, s1, disp);
2401 case ICMD_PUTFIELDCONST: /* ..., objectref ==> ... */
2402 /* val = value (in current instruction) */
2403 /* following NOP) */
2405 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2406 emit_nullpointer_check(cd, iptr, s1);
2408 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2409 uf = iptr->sx.s23.s3.uf;
2410 fieldtype = uf->fieldref->parseddesc.fd->type;
2413 patcher_add_patch_ref(jd, PATCHER_putfieldconst,
2417 fi = iptr->sx.s23.s3.fmiref->p.field;
2418 fieldtype = fi->type;
2422 switch (fieldtype) {
2425 M_IST32_IMM(iptr->sx.s23.s2.constval, s1, disp);
2428 M_IST32_IMM(iptr->sx.s23.s2.constval & 0xffffffff, s1, disp);
2429 M_IST32_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, s1, disp + 4);
2437 /* branch operations **************************************************/
2439 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2441 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2442 M_INTMOVE(s1, REG_ITMP1_XPTR);
2444 #ifdef ENABLE_VERIFIER
2445 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2446 patcher_add_patch_ref(jd, PATCHER_resolve_class,
2447 iptr->sx.s23.s2.uc, 0);
2449 #endif /* ENABLE_VERIFIER */
2451 M_CALL_IMM(0); /* passing exception pc */
2452 M_POP(REG_ITMP2_XPC);
2454 M_MOV_IMM(asm_handle_exception, REG_ITMP3);
2458 case ICMD_GOTO: /* ... ==> ... */
2459 case ICMD_RET: /* ... ==> ... */
2461 #if defined(ENABLE_SSA)
2463 last_cmd_was_goto = true;
2465 /* In case of a Goto phimoves have to be inserted before the */
2468 codegen_emit_phi_moves(jd, bptr);
2471 emit_br(cd, iptr->dst.block);
2475 case ICMD_JSR: /* ... ==> ... */
2477 emit_br(cd, iptr->sx.s23.s3.jsrtarget.block);
2481 case ICMD_IFNULL: /* ..., value ==> ... */
2482 case ICMD_IFNONNULL:
2484 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2486 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFNULL, BRANCH_OPT_NONE);
2489 case ICMD_IFEQ: /* ..., value ==> ... */
2496 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2497 M_CMP_IMM(iptr->sx.val.i, s1);
2498 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFEQ, BRANCH_OPT_NONE);
2501 case ICMD_IF_LEQ: /* ..., value ==> ... */
2503 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2504 if (iptr->sx.val.l == 0) {
2505 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2506 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2509 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2510 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2511 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2512 M_OR(REG_ITMP2, REG_ITMP1);
2514 emit_beq(cd, iptr->dst.block);
2517 case ICMD_IF_LLT: /* ..., value ==> ... */
2519 if (iptr->sx.val.l == 0) {
2520 /* If high 32-bit are less than zero, then the 64-bits
2522 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2524 emit_blt(cd, iptr->dst.block);
2527 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2528 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2529 emit_blt(cd, iptr->dst.block);
2531 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2532 emit_bult(cd, iptr->dst.block);
2536 case ICMD_IF_LLE: /* ..., value ==> ... */
2538 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2539 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2540 emit_blt(cd, iptr->dst.block);
2542 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2543 emit_bule(cd, iptr->dst.block);
2546 case ICMD_IF_LNE: /* ..., value ==> ... */
2548 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2549 if (iptr->sx.val.l == 0) {
2550 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2551 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2554 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2555 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2556 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2557 M_OR(REG_ITMP2, REG_ITMP1);
2559 emit_bne(cd, iptr->dst.block);
2562 case ICMD_IF_LGT: /* ..., value ==> ... */
2564 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2565 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2566 emit_bgt(cd, iptr->dst.block);
2568 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2569 emit_bugt(cd, iptr->dst.block);
2572 case ICMD_IF_LGE: /* ..., value ==> ... */
2574 if (iptr->sx.val.l == 0) {
2575 /* If high 32-bit are greater equal zero, then the
2577 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2579 emit_bge(cd, iptr->dst.block);
2582 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2583 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2584 emit_bgt(cd, iptr->dst.block);
2586 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2587 emit_buge(cd, iptr->dst.block);
2591 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2592 case ICMD_IF_ICMPNE:
2593 case ICMD_IF_ICMPLT:
2594 case ICMD_IF_ICMPGT:
2595 case ICMD_IF_ICMPGE:
2596 case ICMD_IF_ICMPLE:
2598 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2599 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2601 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ICMPEQ, BRANCH_OPT_NONE);
2604 case ICMD_IF_ACMPEQ: /* ..., value, value ==> ... */
2605 case ICMD_IF_ACMPNE:
2607 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2608 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2610 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ACMPEQ, BRANCH_OPT_NONE);
2613 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2615 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2616 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2617 M_INTMOVE(s1, REG_ITMP1);
2618 M_XOR(s2, REG_ITMP1);
2619 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2620 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2621 M_INTMOVE(s1, REG_ITMP2);
2622 M_XOR(s2, REG_ITMP2);
2623 M_OR(REG_ITMP1, REG_ITMP2);
2624 emit_beq(cd, iptr->dst.block);
2627 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2629 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2630 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2631 M_INTMOVE(s1, REG_ITMP1);
2632 M_XOR(s2, REG_ITMP1);
2633 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2634 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2635 M_INTMOVE(s1, REG_ITMP2);
2636 M_XOR(s2, REG_ITMP2);
2637 M_OR(REG_ITMP1, REG_ITMP2);
2638 emit_bne(cd, iptr->dst.block);
2641 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2643 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2644 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2646 emit_blt(cd, iptr->dst.block);
2647 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2648 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2651 emit_bult(cd, iptr->dst.block);
2654 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2656 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2657 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2659 emit_bgt(cd, iptr->dst.block);
2660 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2661 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2664 emit_bugt(cd, iptr->dst.block);
2667 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2669 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2670 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2672 emit_blt(cd, iptr->dst.block);
2673 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2674 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2677 emit_bule(cd, iptr->dst.block);
2680 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2682 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2683 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2685 emit_bgt(cd, iptr->dst.block);
2686 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2687 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2690 emit_buge(cd, iptr->dst.block);
2694 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2696 REPLACEMENT_POINT_RETURN(cd, iptr);
2697 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2698 M_INTMOVE(s1, REG_RESULT);
2699 goto nowperformreturn;
2701 case ICMD_LRETURN: /* ..., retvalue ==> ... */
2703 REPLACEMENT_POINT_RETURN(cd, iptr);
2704 s1 = emit_load_s1(jd, iptr, REG_RESULT_PACKED);
2705 M_LNGMOVE(s1, REG_RESULT_PACKED);
2706 goto nowperformreturn;
2708 case ICMD_ARETURN: /* ..., retvalue ==> ... */
2710 REPLACEMENT_POINT_RETURN(cd, iptr);
2711 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2712 M_INTMOVE(s1, REG_RESULT);
2714 #ifdef ENABLE_VERIFIER
2715 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2716 patcher_add_patch_ref(jd, PATCHER_resolve_class,
2717 iptr->sx.s23.s2.uc, 0);
2719 #endif /* ENABLE_VERIFIER */
2720 goto nowperformreturn;
2722 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2725 REPLACEMENT_POINT_RETURN(cd, iptr);
2726 s1 = emit_load_s1(jd, iptr, REG_FRESULT);
2727 goto nowperformreturn;
2729 case ICMD_RETURN: /* ... ==> ... */
2731 REPLACEMENT_POINT_RETURN(cd, iptr);
2737 p = cd->stackframesize;
2739 #if !defined(NDEBUG)
2740 emit_verbosecall_exit(jd);
2743 #if defined(ENABLE_THREADS)
2744 if (checksync && code_is_synchronized(code)) {
2745 M_ALD(REG_ITMP2, REG_SP, rd->memuse * 8);
2747 /* we need to save the proper return value */
2748 switch (iptr->opc) {
2751 M_IST(REG_RESULT, REG_SP, rd->memuse * 8);
2755 M_LST(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2759 emit_fstps_membase(cd, REG_SP, rd->memuse * 8);
2763 emit_fstpl_membase(cd, REG_SP, rd->memuse * 8);
2767 M_AST(REG_ITMP2, REG_SP, 0);
2768 M_MOV_IMM(LOCK_monitor_exit, REG_ITMP3);
2771 /* and now restore the proper return value */
2772 switch (iptr->opc) {
2775 M_ILD(REG_RESULT, REG_SP, rd->memuse * 8);
2779 M_LLD(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2783 emit_flds_membase(cd, REG_SP, rd->memuse * 8);
2787 emit_fldl_membase(cd, REG_SP, rd->memuse * 8);
2793 /* restore saved registers */
2795 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
2796 p--; M_ALD(rd->savintregs[i], REG_SP, p * 8);
2799 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
2801 emit_fldl_membase(cd, REG_SP, p * 8);
2802 if (iptr->opc == ICMD_FRETURN || iptr->opc == ICMD_DRETURN) {
2804 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset + 1); */
2807 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset); */
2811 /* deallocate stack */
2813 if (cd->stackframesize)
2814 M_AADD_IMM(cd->stackframesize * 8 + 4, REG_SP);
2821 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2824 branch_target_t *table;
2826 table = iptr->dst.table;
2828 l = iptr->sx.s23.s2.tablelow;
2829 i = iptr->sx.s23.s3.tablehigh;
2831 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2832 M_INTMOVE(s1, REG_ITMP1);
2835 M_ISUB_IMM(l, REG_ITMP1);
2841 M_CMP_IMM(i - 1, REG_ITMP1);
2842 emit_bugt(cd, table[0].block);
2844 /* build jump table top down and use address of lowest entry */
2849 dseg_add_target(cd, table->block);
2853 /* length of dataseg after last dseg_addtarget is used
2856 M_MOV_IMM(0, REG_ITMP2);
2858 emit_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 2, REG_ITMP1);
2864 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2867 lookup_target_t *lookup;
2869 lookup = iptr->dst.lookup;
2871 i = iptr->sx.s23.s2.lookupcount;
2873 MCODECHECK((i<<2)+8);
2874 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2877 M_CMP_IMM(lookup->value, s1);
2878 emit_beq(cd, lookup->target.block);
2882 emit_br(cd, iptr->sx.s23.s3.lookupdefault.block);
2887 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
2889 REPLACEMENT_POINT_FORGC_BUILTIN(cd, iptr);
2891 bte = iptr->sx.s23.s3.bte;
2895 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2897 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2898 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
2899 case ICMD_INVOKEINTERFACE:
2901 REPLACEMENT_POINT_INVOKE(cd, iptr);
2903 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2904 md = iptr->sx.s23.s3.um->methodref->parseddesc.md;
2908 lm = iptr->sx.s23.s3.fmiref->p.method;
2909 md = lm->parseddesc;
2913 s3 = md->paramcount;
2915 MCODECHECK((s3 << 1) + 64);
2917 /* copy arguments to registers or stack location */
2919 for (s3 = s3 - 1; s3 >= 0; s3--) {
2920 var = VAR(iptr->sx.s23.s2.args[s3]);
2922 /* Already Preallocated (ARGVAR) ? */
2923 if (var->flags & PREALLOC)
2925 if (IS_INT_LNG_TYPE(var->type)) {
2926 if (!md->params[s3].inmemory) {
2927 log_text("No integer argument registers available!");
2931 if (IS_2_WORD_TYPE(var->type)) {
2932 d = emit_load(jd, iptr, var, REG_ITMP12_PACKED);
2933 M_LST(d, REG_SP, md->params[s3].regoff);
2935 d = emit_load(jd, iptr, var, REG_ITMP1);
2936 M_IST(d, REG_SP, md->params[s3].regoff);
2941 if (!md->params[s3].inmemory) {
2942 s1 = md->params[s3].regoff;
2943 d = emit_load(jd, iptr, var, s1);
2947 d = emit_load(jd, iptr, var, REG_FTMP1);
2948 if (IS_2_WORD_TYPE(var->type))
2949 M_DST(d, REG_SP, md->params[s3].regoff);
2951 M_FST(d, REG_SP, md->params[s3].regoff);
2956 switch (iptr->opc) {
2958 d = md->returntype.type;
2960 if (bte->stub == NULL) {
2961 M_MOV_IMM(bte->fp, REG_ITMP1);
2964 M_MOV_IMM(bte->stub, REG_ITMP1);
2968 #if defined(ENABLE_ESCAPE_CHECK)
2969 if (bte->opcode == ICMD_NEW || bte->opcode == ICMD_NEWARRAY) {
2970 /*emit_escape_annotate_object(cd, m);*/
2975 case ICMD_INVOKESPECIAL:
2976 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
2977 emit_nullpointer_check(cd, iptr, REG_ITMP1);
2980 case ICMD_INVOKESTATIC:
2982 unresolved_method *um = iptr->sx.s23.s3.um;
2984 patcher_add_patch_ref(jd, PATCHER_invokestatic_special,
2988 d = md->returntype.type;
2991 disp = (ptrint) lm->stubroutine;
2992 d = lm->parseddesc->returntype.type;
2995 M_MOV_IMM(disp, REG_ITMP2);
2999 case ICMD_INVOKEVIRTUAL:
3000 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
3001 emit_nullpointer_check(cd, iptr, s1);
3004 unresolved_method *um = iptr->sx.s23.s3.um;
3006 patcher_add_patch_ref(jd, PATCHER_invokevirtual, um, 0);
3009 d = md->returntype.type;
3012 s1 = OFFSET(vftbl_t, table[0]) +
3013 sizeof(methodptr) * lm->vftblindex;
3014 d = md->returntype.type;
3017 M_ALD(REG_METHODPTR, REG_ITMP1,
3018 OFFSET(java_object_t, vftbl));
3019 M_ALD32(REG_ITMP3, REG_METHODPTR, s1);
3023 case ICMD_INVOKEINTERFACE:
3024 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
3025 emit_nullpointer_check(cd, iptr, s1);
3028 unresolved_method *um = iptr->sx.s23.s3.um;
3030 patcher_add_patch_ref(jd, PATCHER_invokeinterface, um, 0);
3034 d = md->returntype.type;
3037 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3038 sizeof(methodptr) * lm->clazz->index;
3040 s2 = sizeof(methodptr) * (lm - lm->clazz->methods);
3042 d = md->returntype.type;
3045 M_ALD(REG_METHODPTR, REG_ITMP1,
3046 OFFSET(java_object_t, vftbl));
3047 M_ALD32(REG_METHODPTR, REG_METHODPTR, s1);
3048 M_ALD32(REG_ITMP3, REG_METHODPTR, s2);
3053 /* store size of call code in replacement point */
3055 REPLACEMENT_POINT_INVOKE_RETURN(cd, iptr);
3056 REPLACEMENT_POINT_FORGC_BUILTIN_RETURN(cd, iptr);
3058 /* d contains return type */
3060 if (d != TYPE_VOID) {
3061 #if defined(ENABLE_SSA)
3062 if ((ls == NULL) /* || (!IS_TEMPVAR_INDEX(iptr->dst.varindex)) */ ||
3063 (ls->lifetime[iptr->dst.varindex].type != UNUSED))
3064 /* a "living" stackslot */
3067 if (IS_INT_LNG_TYPE(d)) {
3068 if (IS_2_WORD_TYPE(d)) {
3069 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
3070 M_LNGMOVE(REG_RESULT_PACKED, s1);
3073 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3074 M_INTMOVE(REG_RESULT, s1);
3078 s1 = codegen_reg_of_dst(jd, iptr, REG_NULL);
3080 emit_store_dst(jd, iptr, s1);
3086 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3088 if (!(iptr->flags.bits & INS_FLAG_ARRAY)) {
3089 /* object type cast-check */
3092 vftbl_t *supervftbl;
3095 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3101 super = iptr->sx.s23.s3.c.cls;
3102 superindex = super->index;
3103 supervftbl = super->vftbl;
3106 if ((super == NULL) || !(super->flags & ACC_INTERFACE))
3107 CODEGEN_CRITICAL_SECTION_NEW;
3109 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3111 /* if class is not resolved, check which code to call */
3113 if (super == NULL) {
3115 emit_label_beq(cd, BRANCH_LABEL_1);
3117 patcher_add_patch_ref(jd, PATCHER_checkcast_instanceof_flags,
3118 iptr->sx.s23.s3.c.ref, 0);
3120 M_MOV_IMM(0, REG_ITMP2); /* super->flags */
3121 M_AND_IMM32(ACC_INTERFACE, REG_ITMP2);
3122 emit_label_beq(cd, BRANCH_LABEL_2);
3125 /* interface checkcast code */
3127 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3128 if (super != NULL) {
3130 emit_label_beq(cd, BRANCH_LABEL_3);
3133 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3135 if (super == NULL) {
3136 patcher_add_patch_ref(jd, PATCHER_checkcast_interface,
3137 iptr->sx.s23.s3.c.ref,
3142 REG_ITMP2, OFFSET(vftbl_t, interfacetablelength));
3143 M_ISUB_IMM32(superindex, REG_ITMP3);
3144 /* XXX do we need this one? */
3146 emit_classcast_check(cd, iptr, BRANCH_LE, REG_ITMP3, s1);
3148 M_ALD32(REG_ITMP3, REG_ITMP2,
3149 OFFSET(vftbl_t, interfacetable[0]) -
3150 superindex * sizeof(methodptr*));
3152 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_ITMP3, s1);
3155 emit_label_br(cd, BRANCH_LABEL_4);
3157 emit_label(cd, BRANCH_LABEL_3);
3160 /* class checkcast code */
3162 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3163 if (super == NULL) {
3164 emit_label(cd, BRANCH_LABEL_2);
3168 emit_label_beq(cd, BRANCH_LABEL_5);
3171 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3173 if (super == NULL) {
3174 patcher_add_patch_ref(jd, PATCHER_checkcast_class,
3175 iptr->sx.s23.s3.c.ref,
3179 M_MOV_IMM(supervftbl, REG_ITMP3);
3181 CODEGEN_CRITICAL_SECTION_START;
3183 M_ILD32(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3185 /* if (s1 != REG_ITMP1) { */
3186 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP1); */
3187 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3); */
3188 /* #if defined(ENABLE_THREADS) */
3189 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3191 /* emit_alu_reg_reg(cd, ALU_SUB, REG_ITMP1, REG_ITMP2); */
3194 M_ILD32(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, baseval));
3195 M_ISUB(REG_ITMP3, REG_ITMP2);
3196 M_MOV_IMM(supervftbl, REG_ITMP3);
3197 M_ILD(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3199 CODEGEN_CRITICAL_SECTION_END;
3203 M_CMP(REG_ITMP3, REG_ITMP2);
3204 emit_classcast_check(cd, iptr, BRANCH_ULE, REG_ITMP3, s1);
3207 emit_label(cd, BRANCH_LABEL_5);
3210 if (super == NULL) {
3211 emit_label(cd, BRANCH_LABEL_1);
3212 emit_label(cd, BRANCH_LABEL_4);
3215 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
3218 /* array type cast-check */
3220 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3221 M_AST(s1, REG_SP, 0 * 4);
3223 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3224 patcher_add_patch_ref(jd, PATCHER_builtin_arraycheckcast,
3225 iptr->sx.s23.s3.c.ref, 0);
3228 M_AST_IMM(iptr->sx.s23.s3.c.cls, REG_SP, 1 * 4);
3229 M_MOV_IMM(BUILTIN_arraycheckcast, REG_ITMP3);
3232 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3234 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_RESULT, s1);
3236 d = codegen_reg_of_dst(jd, iptr, s1);
3240 emit_store_dst(jd, iptr, d);
3243 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3247 vftbl_t *supervftbl;
3250 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3256 super = iptr->sx.s23.s3.c.cls;
3257 superindex = super->index;
3258 supervftbl = super->vftbl;
3261 if ((super == NULL) || !(super->flags & ACC_INTERFACE))
3262 CODEGEN_CRITICAL_SECTION_NEW;
3264 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3265 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
3268 M_INTMOVE(s1, REG_ITMP1);
3274 /* if class is not resolved, check which code to call */
3276 if (super == NULL) {
3278 emit_label_beq(cd, BRANCH_LABEL_1);
3280 patcher_add_patch_ref(jd, PATCHER_checkcast_instanceof_flags,
3281 iptr->sx.s23.s3.c.ref, 0);
3283 M_MOV_IMM(0, REG_ITMP3); /* super->flags */
3284 M_AND_IMM32(ACC_INTERFACE, REG_ITMP3);
3285 emit_label_beq(cd, BRANCH_LABEL_2);
3288 /* interface instanceof code */
3290 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3291 if (super != NULL) {
3293 emit_label_beq(cd, BRANCH_LABEL_3);
3296 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3298 if (super == NULL) {
3299 patcher_add_patch_ref(jd, PATCHER_instanceof_interface,
3300 iptr->sx.s23.s3.c.ref, 0);
3304 REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3305 M_ISUB_IMM32(superindex, REG_ITMP3);
3308 disp = (2 + 4 /* mov_membase32_reg */ + 2 /* test */ +
3309 6 /* jcc */ + 5 /* mov_imm_reg */);
3312 M_ALD32(REG_ITMP1, REG_ITMP1,
3313 OFFSET(vftbl_t, interfacetable[0]) -
3314 superindex * sizeof(methodptr*));
3316 /* emit_setcc_reg(cd, CC_A, d); */
3317 /* emit_jcc(cd, CC_BE, 5); */
3322 emit_label_br(cd, BRANCH_LABEL_4);
3324 emit_label(cd, BRANCH_LABEL_3);
3327 /* class instanceof code */
3329 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3330 if (super == NULL) {
3331 emit_label(cd, BRANCH_LABEL_2);
3335 emit_label_beq(cd, BRANCH_LABEL_5);
3338 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3340 if (super == NULL) {
3341 patcher_add_patch_ref(jd, PATCHER_instanceof_class,
3342 iptr->sx.s23.s3.c.ref, 0);
3345 M_MOV_IMM(supervftbl, REG_ITMP2);
3347 CODEGEN_CRITICAL_SECTION_START;
3349 M_ILD(REG_ITMP1, REG_ITMP1, OFFSET(vftbl_t, baseval));
3350 M_ILD(REG_ITMP3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3351 M_ILD(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3353 CODEGEN_CRITICAL_SECTION_END;
3355 M_ISUB(REG_ITMP2, REG_ITMP1);
3356 M_CLR(d); /* may be REG_ITMP2 */
3357 M_CMP(REG_ITMP3, REG_ITMP1);
3362 emit_label(cd, BRANCH_LABEL_5);
3365 if (super == NULL) {
3366 emit_label(cd, BRANCH_LABEL_1);
3367 emit_label(cd, BRANCH_LABEL_4);
3370 emit_store_dst(jd, iptr, d);
3374 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3376 /* check for negative sizes and copy sizes to stack if necessary */
3378 MCODECHECK((iptr->s1.argcount << 1) + 64);
3380 for (s1 = iptr->s1.argcount; --s1 >= 0; ) {
3381 /* copy SAVEDVAR sizes to stack */
3382 var = VAR(iptr->sx.s23.s2.args[s1]);
3384 /* Already Preallocated? */
3385 if (!(var->flags & PREALLOC)) {
3386 if (var->flags & INMEMORY) {
3387 M_ILD(REG_ITMP1, REG_SP, var->vv.regoff);
3388 M_IST(REG_ITMP1, REG_SP, (s1 + 3) * 4);
3391 M_IST(var->vv.regoff, REG_SP, (s1 + 3) * 4);
3395 /* is a patcher function set? */
3397 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3398 patcher_add_patch_ref(jd, PATCHER_builtin_multianewarray,
3399 iptr->sx.s23.s3.c.ref, 0);
3405 disp = (ptrint) iptr->sx.s23.s3.c.cls;
3407 /* a0 = dimension count */
3409 M_IST_IMM(iptr->s1.argcount, REG_SP, 0 * 4);
3411 /* a1 = arraydescriptor */
3413 M_IST_IMM(disp, REG_SP, 1 * 4);
3415 /* a2 = pointer to dimensions = stack pointer */
3417 M_MOV(REG_SP, REG_ITMP1);
3418 M_AADD_IMM(3 * 4, REG_ITMP1);
3419 M_AST(REG_ITMP1, REG_SP, 2 * 4);
3421 M_MOV_IMM(BUILTIN_multianewarray, REG_ITMP1);
3424 /* check for exception before result assignment */
3426 emit_exception_check(cd, iptr);
3428 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3429 M_INTMOVE(REG_RESULT, s1);
3430 emit_store_dst(jd, iptr, s1);
3433 #if defined(ENABLE_SSA)
3434 case ICMD_GETEXCEPTION:
3435 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
3436 M_INTMOVE(REG_ITMP1, d);
3437 emit_store_dst(jd, iptr, d);
3441 exceptions_throw_internalerror("Unknown ICMD %d during code generation",
3446 } /* for instruction */
3450 #if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
3453 #if defined(ENABLE_SSA)
3456 /* by edge splitting, in Blocks with phi moves there can only */
3457 /* be a goto as last command, no other Jump/Branch Command */
3459 if (!last_cmd_was_goto)
3460 codegen_emit_phi_moves(jd, bptr);
3465 /* At the end of a basic block we may have to append some nops,
3466 because the patcher stub calling code might be longer than the
3467 actual instruction. So codepatching does not change the
3468 following block unintentionally. */
3470 if (cd->mcodeptr < cd->lastmcodeptr) {
3471 while (cd->mcodeptr < cd->lastmcodeptr) {
3476 } /* if (bptr -> flags >= BBREACHED) */
3477 } /* for basic block */
3479 /* generate stubs */
3481 emit_patcher_traps(jd);
3483 /* everything's ok */
3489 /* codegen_emit_stub_native ****************************************************
3491 Emits a stub routine which calls a native method.
3493 *******************************************************************************/
3495 void codegen_emit_stub_native(jitdata *jd, methoddesc *nmd, functionptr f, int skipparams)
3501 int i, j; /* count variables */
3505 /* get required compiler data */
3511 /* set some variables */
3515 /* calculate stackframe size */
3517 cd->stackframesize =
3518 sizeof(stackframeinfo_t) / SIZEOF_VOID_P +
3519 sizeof(localref_table) / SIZEOF_VOID_P +
3520 4 + /* 4 arguments (start_native_call) */
3523 /* keep stack 16-byte aligned */
3525 ALIGN_ODD(cd->stackframesize);
3527 /* create method header */
3529 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
3530 (void) dseg_add_unique_s4(cd, cd->stackframesize * 8 + 4); /* FrameSize */
3531 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
3532 (void) dseg_add_unique_s4(cd, 0); /* IntSave */
3533 (void) dseg_add_unique_s4(cd, 0); /* FltSave */
3535 #if defined(ENABLE_PROFILING)
3536 /* generate native method profiling code */
3538 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
3539 /* count frequency */
3541 M_MOV_IMM(code, REG_ITMP1);
3542 M_IADD_IMM_MEMBASE(1, REG_ITMP1, OFFSET(codeinfo, frequency));
3546 /* calculate stackframe size for native function */
3548 M_ASUB_IMM(cd->stackframesize * 8 + 4, REG_SP);
3550 /* Mark the whole fpu stack as free for native functions (only for saved */
3551 /* register count == 0). */
3553 emit_ffree_reg(cd, 0);
3554 emit_ffree_reg(cd, 1);
3555 emit_ffree_reg(cd, 2);
3556 emit_ffree_reg(cd, 3);
3557 emit_ffree_reg(cd, 4);
3558 emit_ffree_reg(cd, 5);
3559 emit_ffree_reg(cd, 6);
3560 emit_ffree_reg(cd, 7);
3562 #if defined(ENABLE_GC_CACAO)
3563 /* remember callee saved int registers in stackframeinfo (GC may need to */
3564 /* recover them during a collection). */
3566 disp = cd->stackframesize * 8 - sizeof(stackframeinfo_t) +
3567 OFFSET(stackframeinfo_t, intregs);
3569 for (i = 0; i < INT_SAV_CNT; i++)
3570 M_AST(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3573 /* prepare data structures for native function call */
3575 M_MOV(REG_SP, REG_ITMP1);
3576 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3577 M_IST_IMM(0, REG_SP, 1 * 4);
3580 M_MOV_IMM(codegen_start_native_call, REG_ITMP1);
3583 /* remember class argument */
3585 if (m->flags & ACC_STATIC)
3586 M_MOV(REG_RESULT, REG_ITMP3);
3588 /* Copy or spill arguments to new locations. */
3590 for (i = md->paramcount - 1, j = i + skipparams; i >= 0; i--, j--) {
3591 if (!md->params[i].inmemory)
3594 s1 = md->params[i].regoff + cd->stackframesize * 8 + 8;
3595 s2 = nmd->params[j].regoff;
3597 /* float/double in memory can be copied like int/longs */
3599 switch (md->paramtypes[i].type) {
3603 M_ILD(REG_ITMP1, REG_SP, s1);
3604 M_IST(REG_ITMP1, REG_SP, s2);
3608 M_LLD(REG_ITMP12_PACKED, REG_SP, s1);
3609 M_LST(REG_ITMP12_PACKED, REG_SP, s2);
3614 /* Handle native Java methods. */
3616 if (m->flags & ACC_NATIVE) {
3617 /* if function is static, put class into second argument */
3619 if (m->flags & ACC_STATIC)
3620 M_AST(REG_ITMP3, REG_SP, 1 * 4);
3622 /* put env into first argument */
3624 M_AST_IMM(_Jv_env, REG_SP, 0 * 4);
3627 /* Call the native function. */
3629 disp = dseg_add_functionptr(cd, f);
3630 emit_mov_imm_reg(cd, 0, REG_ITMP3);
3632 M_ALD(REG_ITMP1, REG_ITMP3, disp);
3635 /* save return value */
3637 switch (md->returntype.type) {
3640 M_IST(REG_RESULT, REG_SP, 1 * 8);
3643 M_LST(REG_RESULT_PACKED, REG_SP, 1 * 8);
3646 emit_fsts_membase(cd, REG_SP, 1 * 8);
3649 emit_fstl_membase(cd, REG_SP, 1 * 8);
3655 /* remove native stackframe info */
3657 M_MOV(REG_SP, REG_ITMP1);
3658 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3659 M_IST_IMM(0, REG_SP, 1 * 4);
3662 M_MOV_IMM(codegen_finish_native_call, REG_ITMP1);
3664 M_MOV(REG_RESULT, REG_ITMP2); /* REG_ITMP3 == REG_RESULT2 */
3666 /* restore return value */
3668 switch (md->returntype.type) {
3671 M_ILD(REG_RESULT, REG_SP, 1 * 8);
3674 M_LLD(REG_RESULT_PACKED, REG_SP, 1 * 8);
3677 emit_flds_membase(cd, REG_SP, 1 * 8);
3680 emit_fldl_membase(cd, REG_SP, 1 * 8);
3686 #if defined(ENABLE_GC_CACAO)
3687 /* restore callee saved int registers from stackframeinfo (GC might have */
3688 /* modified them during a collection). */
3690 disp = cd->stackframesize * 8 - sizeof(stackframeinfo_t) +
3691 OFFSET(stackframeinfo_t, intregs);
3693 for (i = 0; i < INT_SAV_CNT; i++)
3694 M_ALD(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3697 M_AADD_IMM(cd->stackframesize * 8 + 4, REG_SP);
3699 /* check for exception */
3706 /* handle exception */
3708 M_MOV(REG_ITMP2, REG_ITMP1_XPTR);
3709 M_ALD(REG_ITMP2_XPC, REG_SP, 0);
3710 M_ASUB_IMM(2, REG_ITMP2_XPC);
3712 M_MOV_IMM(asm_handle_nat_exception, REG_ITMP3);
3718 * These are local overrides for various environment variables in Emacs.
3719 * Please do not remove this and leave it at the end of the file, where
3720 * Emacs will automagically detect them.
3721 * ---------------------------------------------------------------------
3724 * indent-tabs-mode: t
3728 * vim:noexpandtab:sw=4:ts=4: