1 /* src/vm/jit/i386/codegen.c - machine code generator for i386
3 Copyright (C) 1996-2005, 2006, 2007, 2008
4 CACAOVM - Verein zur Foerderung der freien virtuellen Maschine CACAO
6 This file is part of CACAO.
8 This program is free software; you can redistribute it and/or
9 modify it under the terms of the GNU General Public License as
10 published by the Free Software Foundation; either version 2, or (at
11 your option) any later version.
13 This program is distributed in the hope that it will be useful, but
14 WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with this program; if not, write to the Free Software
20 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
34 #include "vm/jit/i386/md-abi.h"
36 #include "vm/jit/i386/codegen.h"
37 #include "vm/jit/i386/emit.h"
39 #include "mm/memory.h"
40 #include "native/jni.h"
41 #include "native/localref.h"
42 #include "native/native.h"
44 #include "threads/lock-common.h"
46 #include "vm/builtin.h"
47 #include "vm/exceptions.h"
48 #include "vm/global.h"
49 #include "vm/stringlocal.h"
52 #include "vm/jit/abi.h"
53 #include "vm/jit/asmpart.h"
54 #include "vm/jit/codegen-common.h"
55 #include "vm/jit/dseg.h"
56 #include "vm/jit/emit-common.h"
57 #include "vm/jit/jit.h"
58 #include "vm/jit/linenumbertable.h"
59 #include "vm/jit/parse.h"
60 #include "vm/jit/patcher-common.h"
61 #include "vm/jit/reg.h"
62 #include "vm/jit/replace.h"
63 #include "vm/jit/stacktrace.h"
64 #include "vm/jit/trap.h"
66 #if defined(ENABLE_SSA)
67 # include "vm/jit/optimizing/lsra.h"
68 # include "vm/jit/optimizing/ssa.h"
69 #elif defined(ENABLE_LSRA)
70 # include "vm/jit/allocator/lsra.h"
73 #include "vmcore/loader.h"
74 #include "vmcore/options.h"
75 #include "vmcore/utf8.h"
78 /* codegen_emit ****************************************************************
80 Generates machine code.
82 *******************************************************************************/
84 bool codegen_emit(jitdata *jd)
90 s4 len, s1, s2, s3, d, disp;
91 int align_off; /* offset for alignment compensation */
96 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
97 builtintable_entry *bte;
100 unresolved_field *uf;
103 #if defined(ENABLE_SSA)
105 bool last_cmd_was_goto;
107 last_cmd_was_goto = false;
111 /* get required compiler data */
118 /* prevent compiler warnings */
129 s4 savedregs_num = 0;
132 /* space to save used callee saved registers */
134 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
135 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse);
137 cd->stackframesize = rd->memuse + savedregs_num;
140 #if defined(ENABLE_THREADS)
141 /* space to save argument of monitor_enter */
143 if (checksync && code_is_synchronized(code))
144 cd->stackframesize++;
147 /* create method header */
149 /* Keep stack of non-leaf functions 16-byte aligned. */
151 if (!code_is_leafmethod(code)) {
152 ALIGN_ODD(cd->stackframesize);
155 align_off = cd->stackframesize ? 4 : 0;
157 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
158 (void) dseg_add_unique_s4(
159 cd, cd->stackframesize * 8 + align_off); /* FrameSize */
161 code->synchronizedoffset = rd->memuse * 8;
163 /* REMOVEME: We still need it for exception handling in assembler. */
165 if (code_is_leafmethod(code))
166 (void) dseg_add_unique_s4(cd, 1); /* IsLeaf */
168 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
170 (void) dseg_add_unique_s4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
171 (void) dseg_add_unique_s4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
173 #if defined(ENABLE_PROFILING)
174 /* generate method profiling code */
176 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
177 /* count frequency */
179 M_MOV_IMM(code, REG_ITMP3);
180 M_IADD_IMM_MEMBASE(1, REG_ITMP3, OFFSET(codeinfo, frequency));
184 /* create stack frame (if necessary) */
186 if (cd->stackframesize)
188 M_ASUB_IMM(cd->stackframesize * 8 + 4, REG_SP);
190 /* save return address and used callee saved registers */
192 p = cd->stackframesize;
193 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
194 p--; M_AST(rd->savintregs[i], REG_SP, p * 8);
196 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
197 p--; emit_fld_reg(cd, rd->savfltregs[i]); emit_fstpl_membase(cd, REG_SP, p * 8);
200 /* take arguments out of register or stack frame */
205 for (p = 0, l = 0; p < md->paramcount; p++) {
206 t = md->paramtypes[p].type;
208 varindex = jd->local_map[l * 5 + t];
209 #if defined(ENABLE_SSA)
211 if (varindex != UNUSED)
212 varindex = ls->var_0[varindex];
213 if ((varindex != UNUSED) && (ls->lifetime[varindex].type == UNUSED))
218 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
221 if (varindex == UNUSED)
225 s1 = md->params[p].regoff;
228 if (IS_INT_LNG_TYPE(t)) { /* integer args */
229 if (!md->params[p].inmemory) { /* register arguments */
230 log_text("integer register argument");
232 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
233 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff */
235 else { /* reg arg -> spilled */
236 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff * 4 */
240 if (!(var->flags & INMEMORY)) {
242 cd->stackframesize * 8 + 4 + align_off + s1);
245 if (!IS_2_WORD_TYPE(t)) {
246 #if defined(ENABLE_SSA)
247 /* no copy avoiding by now possible with SSA */
249 emit_mov_membase_reg( /* + 4 for return address */
251 cd->stackframesize * 8 + s1 + 4 + align_off,
253 emit_mov_reg_membase(
254 cd, REG_ITMP1, REG_SP, var->vv.regoff);
257 #endif /*defined(ENABLE_SSA)*/
258 /* reuse stackslot */
259 var->vv.regoff = cd->stackframesize * 8 + 4 +
264 #if defined(ENABLE_SSA)
265 /* no copy avoiding by now possible with SSA */
267 emit_mov_membase_reg( /* + 4 for return address */
269 cd->stackframesize * 8 + s1 + 4 + align_off,
271 emit_mov_reg_membase(
272 cd, REG_ITMP1, REG_SP, var->vv.regoff);
273 emit_mov_membase_reg( /* + 4 for return address */
275 cd->stackframesize * 8 + s1 + 4 + 4 + align_off,
277 emit_mov_reg_membase(
278 cd, REG_ITMP1, REG_SP, var->vv.regoff + 4);
281 #endif /*defined(ENABLE_SSA)*/
282 /* reuse stackslot */
283 var->vv.regoff = cd->stackframesize * 8 + 8 + s1;
288 else { /* floating args */
289 if (!md->params[p].inmemory) { /* register arguments */
290 log_text("There are no float argument registers!");
292 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
293 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff */
294 } else { /* reg arg -> spilled */
295 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff * 8 */
299 else { /* stack arguments */
300 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
304 cd->stackframesize * 8 + s1 + 4 + align_off);
306 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
312 cd->stackframesize * 8 + s1 + 4 + align_off);
314 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
317 } else { /* stack-arg -> spilled */
318 #if defined(ENABLE_SSA)
319 /* no copy avoiding by now possible with SSA */
321 emit_mov_membase_reg(
323 cd->stackframesize * 8 + s1 + 4 + align_off,
325 emit_mov_reg_membase(
326 cd, REG_ITMP1, REG_SP, var->vv.regoff);
330 cd->stackframesize * 8 + s1 + 4 + align_off);
331 emit_fstps_membase(cd, REG_SP, var->vv.regoff);
336 cd->stackframesize * 8 + s1 + 4 + align_off);
337 emit_fstpl_membase(cd, REG_SP, var->vv.regoff);
341 #endif /*defined(ENABLE_SSA)*/
342 /* reuse stackslot */
343 var->vv.regoff = cd->stackframesize * 8 + 4 +
350 /* call monitorenter function */
352 #if defined(ENABLE_THREADS)
353 if (checksync && code_is_synchronized(code)) {
356 if (m->flags & ACC_STATIC) {
357 M_MOV_IMM(&m->clazz->object.header, REG_ITMP1);
360 M_ALD(REG_ITMP1, REG_SP, cd->stackframesize * 8 + 4 + align_off);
363 M_ALD_MEM(REG_ITMP1, TRAP_NullPointerException);
366 M_AST(REG_ITMP1, REG_SP, s1 * 8);
367 M_AST(REG_ITMP1, REG_SP, 0 * 4);
368 M_MOV_IMM(LOCK_monitor_enter, REG_ITMP3);
374 emit_verbosecall_enter(jd);
379 #if defined(ENABLE_SSA)
380 /* with SSA the Header is Basic Block 0 - insert phi Moves if necessary */
382 codegen_emit_phi_moves(jd, ls->basicblocks[0]);
385 /* end of header generation */
387 /* create replacement points */
389 REPLACEMENT_POINTS_INIT(cd, jd);
391 /* walk through all basic blocks */
393 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
395 bptr->mpc = (s4) (cd->mcodeptr - cd->mcodebase);
397 if (bptr->flags >= BBREACHED) {
398 /* branch resolving */
400 codegen_resolve_branchrefs(cd, bptr);
402 /* handle replacement points */
404 REPLACEMENT_POINT_BLOCK_START(cd, bptr);
406 #if defined(ENABLE_REPLACEMENT)
407 if (bptr->bitflags & BBFLAG_REPLACEMENT) {
408 if (cd->replacementpoint[-1].flags & RPLPOINT_FLAG_COUNTDOWN) {
410 disp = (s4) &(m->hitcountdown);
411 M_ISUB_IMM_MEMABS(1, disp);
417 /* copy interface registers to their destination */
422 #if defined(ENABLE_PROFILING)
423 /* generate basic block profiling code */
425 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
426 /* count frequency */
428 M_MOV_IMM(code->bbfrequency, REG_ITMP3);
429 M_IADD_IMM_MEMBASE(1, REG_ITMP3, bptr->nr * 4);
433 #if defined(ENABLE_LSRA) || defined(ENABLE_SSA)
434 # if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
437 # if defined(ENABLE_SSA)
439 last_cmd_was_goto = false;
443 var = VAR(bptr->invars[len]);
444 if (bptr->type != BBTYPE_STD) {
445 if (!IS_2_WORD_TYPE(var->type)) {
446 #if !defined(ENABLE_SSA)
447 if (bptr->type == BBTYPE_EXH) {
448 d = codegen_reg_of_var(0, var, REG_ITMP1);
449 M_INTMOVE(REG_ITMP1, d);
450 emit_store(jd, NULL, var, d);
455 log_text("copy interface registers(EXH, SBR): longs \
456 have to be in memory (begin 1)");
464 #endif /* defined(ENABLE_LSRA) || defined(ENABLE_SSA) */
468 var = VAR(bptr->invars[len]);
469 if ((len == bptr->indepth-1) && (bptr->type != BBTYPE_STD)) {
470 if (!IS_2_WORD_TYPE(var->type)) {
471 if (bptr->type == BBTYPE_EXH) {
472 d = codegen_reg_of_var(0, var, REG_ITMP1);
473 M_INTMOVE(REG_ITMP1, d);
474 emit_store(jd, NULL, var, d);
478 log_text("copy interface registers: longs have to be in \
485 assert((var->flags & INOUT));
490 /* walk through all instructions */
495 for (iptr = bptr->iinstr; len > 0; len--, iptr++) {
496 if (iptr->line != currentline) {
497 linenumbertable_list_entry_add(cd, iptr->line);
498 currentline = iptr->line;
501 MCODECHECK(1024); /* 1kB should be enough */
504 case ICMD_NOP: /* ... ==> ... */
505 case ICMD_POP: /* ..., value ==> ... */
506 case ICMD_POP2: /* ..., value, value ==> ... */
509 case ICMD_INLINE_START:
511 REPLACEMENT_POINT_INLINE_START(cd, iptr);
514 case ICMD_INLINE_BODY:
516 REPLACEMENT_POINT_INLINE_BODY(cd, iptr);
517 linenumbertable_list_entry_add_inline_start(cd, iptr);
518 linenumbertable_list_entry_add(cd, iptr->line);
521 case ICMD_INLINE_END:
523 linenumbertable_list_entry_add_inline_end(cd, iptr);
524 linenumbertable_list_entry_add(cd, iptr->line);
527 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
529 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
530 emit_nullpointer_check(cd, iptr, s1);
533 /* constant operations ************************************************/
535 case ICMD_ICONST: /* ... ==> ..., constant */
537 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
538 ICONST(d, iptr->sx.val.i);
539 emit_store_dst(jd, iptr, d);
542 case ICMD_LCONST: /* ... ==> ..., constant */
544 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
545 LCONST(d, iptr->sx.val.l);
546 emit_store_dst(jd, iptr, d);
549 case ICMD_FCONST: /* ... ==> ..., constant */
551 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
552 if (iptr->sx.val.f == 0.0) {
556 if (iptr->sx.val.i == 0x80000000) {
560 } else if (iptr->sx.val.f == 1.0) {
563 } else if (iptr->sx.val.f == 2.0) {
569 disp = dseg_add_float(cd, iptr->sx.val.f);
570 emit_mov_imm_reg(cd, 0, REG_ITMP1);
572 emit_flds_membase(cd, REG_ITMP1, disp);
574 emit_store_dst(jd, iptr, d);
577 case ICMD_DCONST: /* ... ==> ..., constant */
579 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
580 if (iptr->sx.val.d == 0.0) {
584 if (iptr->sx.val.l == 0x8000000000000000LL) {
588 } else if (iptr->sx.val.d == 1.0) {
591 } else if (iptr->sx.val.d == 2.0) {
597 disp = dseg_add_double(cd, iptr->sx.val.d);
598 emit_mov_imm_reg(cd, 0, REG_ITMP1);
600 emit_fldl_membase(cd, REG_ITMP1, disp);
602 emit_store_dst(jd, iptr, d);
605 case ICMD_ACONST: /* ... ==> ..., constant */
607 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
609 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
610 patcher_add_patch_ref(jd, PATCHER_aconst,
611 iptr->sx.val.c.ref, 0);
616 if (iptr->sx.val.anyptr == NULL)
619 M_MOV_IMM(iptr->sx.val.anyptr, d);
621 emit_store_dst(jd, iptr, d);
625 /* load/store/copy/move operations ************************************/
643 if (!(iptr->flags.bits & INS_FLAG_RETADDR))
648 /* integer operations *************************************************/
650 case ICMD_INEG: /* ..., value ==> ..., - value */
652 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
653 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
656 emit_store_dst(jd, iptr, d);
659 case ICMD_LNEG: /* ..., value ==> ..., - value */
661 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
662 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
664 M_NEG(GET_LOW_REG(d));
665 M_IADDC_IMM(0, GET_HIGH_REG(d));
666 M_NEG(GET_HIGH_REG(d));
667 emit_store_dst(jd, iptr, d);
670 case ICMD_I2L: /* ..., value ==> ..., value */
672 s1 = emit_load_s1(jd, iptr, EAX);
673 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
676 M_LNGMOVE(EAX_EDX_PACKED, d);
677 emit_store_dst(jd, iptr, d);
680 case ICMD_L2I: /* ..., value ==> ..., value */
682 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
683 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
685 emit_store_dst(jd, iptr, d);
688 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
690 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
691 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
695 emit_store_dst(jd, iptr, d);
698 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
700 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
701 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
703 emit_store_dst(jd, iptr, d);
706 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
708 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
709 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
711 emit_store_dst(jd, iptr, d);
715 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
717 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
718 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
719 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
726 emit_store_dst(jd, iptr, d);
730 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
731 /* sx.val.i = constant */
733 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
734 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
736 /* `inc reg' is slower on p4's (regarding to ia32
737 optimization reference manual and benchmarks) and as
741 M_IADD_IMM(iptr->sx.val.i, d);
742 emit_store_dst(jd, iptr, d);
745 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
747 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
748 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
749 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
750 M_INTMOVE(s1, GET_LOW_REG(d));
751 M_IADD(s2, GET_LOW_REG(d));
752 /* don't use REG_ITMP1 */
753 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
754 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
755 M_INTMOVE(s1, GET_HIGH_REG(d));
756 M_IADDC(s2, GET_HIGH_REG(d));
757 emit_store_dst(jd, iptr, d);
760 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
761 /* sx.val.l = constant */
763 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
764 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
766 M_IADD_IMM(iptr->sx.val.l, GET_LOW_REG(d));
767 M_IADDC_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
768 emit_store_dst(jd, iptr, d);
771 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
773 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
774 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
775 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
777 M_INTMOVE(s1, REG_ITMP1);
778 M_ISUB(s2, REG_ITMP1);
779 M_INTMOVE(REG_ITMP1, d);
785 emit_store_dst(jd, iptr, d);
788 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
789 /* sx.val.i = constant */
791 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
792 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
794 M_ISUB_IMM(iptr->sx.val.i, d);
795 emit_store_dst(jd, iptr, d);
798 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
800 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
801 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
802 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
803 if (s2 == GET_LOW_REG(d)) {
804 M_INTMOVE(s1, REG_ITMP1);
805 M_ISUB(s2, REG_ITMP1);
806 M_INTMOVE(REG_ITMP1, GET_LOW_REG(d));
809 M_INTMOVE(s1, GET_LOW_REG(d));
810 M_ISUB(s2, GET_LOW_REG(d));
812 /* don't use REG_ITMP1 */
813 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
814 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
815 if (s2 == GET_HIGH_REG(d)) {
816 M_INTMOVE(s1, REG_ITMP2);
817 M_ISUBB(s2, REG_ITMP2);
818 M_INTMOVE(REG_ITMP2, GET_HIGH_REG(d));
821 M_INTMOVE(s1, GET_HIGH_REG(d));
822 M_ISUBB(s2, GET_HIGH_REG(d));
824 emit_store_dst(jd, iptr, d);
827 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
828 /* sx.val.l = constant */
830 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
831 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
833 M_ISUB_IMM(iptr->sx.val.l, GET_LOW_REG(d));
834 M_ISUBB_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
835 emit_store_dst(jd, iptr, d);
838 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
840 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
841 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
842 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
849 emit_store_dst(jd, iptr, d);
852 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
853 /* sx.val.i = constant */
855 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
856 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
857 M_IMUL_IMM(s1, iptr->sx.val.i, d);
858 emit_store_dst(jd, iptr, d);
861 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
863 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
864 s2 = emit_load_s2_low(jd, iptr, EDX);
865 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
867 M_INTMOVE(s1, REG_ITMP2);
868 M_IMUL(s2, REG_ITMP2);
870 s1 = emit_load_s1_low(jd, iptr, EAX);
871 s2 = emit_load_s2_high(jd, iptr, EDX);
874 M_IADD(EDX, REG_ITMP2);
876 s1 = emit_load_s1_low(jd, iptr, EAX);
877 s2 = emit_load_s2_low(jd, iptr, EDX);
880 M_INTMOVE(EAX, GET_LOW_REG(d));
881 M_IADD(REG_ITMP2, GET_HIGH_REG(d));
883 emit_store_dst(jd, iptr, d);
886 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
887 /* sx.val.l = constant */
889 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
890 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
891 ICONST(EAX, iptr->sx.val.l);
893 M_IMUL_IMM(s1, iptr->sx.val.l >> 32, REG_ITMP2);
894 M_IADD(REG_ITMP2, EDX);
895 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
896 M_IMUL_IMM(s1, iptr->sx.val.l, REG_ITMP2);
897 M_IADD(REG_ITMP2, EDX);
898 M_LNGMOVE(EAX_EDX_PACKED, d);
899 emit_store_dst(jd, iptr, d);
902 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
904 s1 = emit_load_s1(jd, iptr, EAX);
905 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
906 d = codegen_reg_of_dst(jd, iptr, EAX);
907 emit_arithmetic_check(cd, iptr, s2);
909 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
911 /* check as described in jvm spec */
913 M_CMP_IMM(0x80000000, EAX);
920 M_INTMOVE(EAX, d); /* if INMEMORY then d is already EAX */
921 emit_store_dst(jd, iptr, d);
924 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
926 s1 = emit_load_s1(jd, iptr, EAX);
927 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
928 d = codegen_reg_of_dst(jd, iptr, EDX);
929 emit_arithmetic_check(cd, iptr, s2);
931 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
933 /* check as described in jvm spec */
935 M_CMP_IMM(0x80000000, EAX);
943 M_INTMOVE(EDX, d); /* if INMEMORY then d is already EDX */
944 emit_store_dst(jd, iptr, d);
947 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
948 /* sx.val.i = constant */
950 /* TODO: optimize for `/ 2' */
951 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
952 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
956 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, d);/* 32-bit for jump off */
957 M_SRA_IMM(iptr->sx.val.i, d);
958 emit_store_dst(jd, iptr, d);
961 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
962 /* sx.val.i = constant */
964 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
965 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
967 M_MOV(s1, REG_ITMP1);
971 M_AND_IMM(iptr->sx.val.i, d);
973 M_BGE(2 + 2 + 6 + 2);
974 M_MOV(s1, d); /* don't use M_INTMOVE, so we know the jump offset */
976 M_AND_IMM32(iptr->sx.val.i, d); /* use 32-bit for jump offset */
978 emit_store_dst(jd, iptr, d);
981 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
982 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
984 s2 = emit_load_s2(jd, iptr, REG_ITMP12_PACKED);
985 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
987 M_INTMOVE(GET_LOW_REG(s2), REG_ITMP3);
988 M_OR(GET_HIGH_REG(s2), REG_ITMP3);
989 /* XXX could be optimized */
990 emit_arithmetic_check(cd, iptr, REG_ITMP3);
992 bte = iptr->sx.s23.s3.bte;
995 M_LST(s2, REG_SP, 2 * 4);
997 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
998 M_LST(s1, REG_SP, 0 * 4);
1000 M_MOV_IMM(bte->fp, REG_ITMP3);
1002 emit_store_dst(jd, iptr, d);
1005 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1006 /* sx.val.i = constant */
1008 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1009 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1011 M_TEST(GET_HIGH_REG(d));
1013 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, GET_LOW_REG(d));
1014 M_IADDC_IMM(0, GET_HIGH_REG(d));
1015 M_SRLD_IMM(iptr->sx.val.i, GET_HIGH_REG(d), GET_LOW_REG(d));
1016 M_SRA_IMM(iptr->sx.val.i, GET_HIGH_REG(d));
1017 emit_store_dst(jd, iptr, d);
1021 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1022 /* sx.val.l = constant */
1024 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1025 if (iptr->dst.var->flags & INMEMORY) {
1026 if (iptr->s1.var->flags & INMEMORY) {
1027 /* Alpha algorithm */
1029 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8);
1031 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1037 /* TODO: hmm, don't know if this is always correct */
1039 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l & 0x00000000ffffffff);
1041 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l >> 32);
1047 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1048 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1050 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1051 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1052 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1053 emit_jcc(cd, CC_GE, disp);
1055 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1056 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1058 emit_neg_reg(cd, REG_ITMP1);
1059 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1060 emit_neg_reg(cd, REG_ITMP2);
1062 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1063 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1065 emit_neg_reg(cd, REG_ITMP1);
1066 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1067 emit_neg_reg(cd, REG_ITMP2);
1069 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst.var->vv.regoff * 8);
1070 emit_mov_reg_membase(cd, REG_ITMP2, REG_SP, iptr->dst.var->vv.regoff * 8 + 4);
1074 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1075 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1077 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1078 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1079 M_TEST(GET_LOW_REG(s1));
1085 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1087 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1088 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1089 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1090 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1093 emit_store_dst(jd, iptr, d);
1096 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1097 /* sx.val.i = constant */
1099 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1100 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1102 M_SLL_IMM(iptr->sx.val.i, d);
1103 emit_store_dst(jd, iptr, d);
1106 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1108 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1109 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1110 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1111 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1114 emit_store_dst(jd, iptr, d);
1117 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1118 /* sx.val.i = constant */
1120 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1121 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1123 M_SRA_IMM(iptr->sx.val.i, d);
1124 emit_store_dst(jd, iptr, d);
1127 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1129 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1130 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1131 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1132 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1135 emit_store_dst(jd, iptr, d);
1138 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1139 /* sx.val.i = constant */
1141 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1142 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1144 M_SRL_IMM(iptr->sx.val.i, d);
1145 emit_store_dst(jd, iptr, d);
1148 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1150 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1151 s2 = emit_load_s2(jd, iptr, ECX);
1152 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1155 M_TEST_IMM(32, ECX);
1157 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1158 M_CLR(GET_LOW_REG(d));
1159 M_SLLD(GET_LOW_REG(d), GET_HIGH_REG(d));
1160 M_SLL(GET_LOW_REG(d));
1161 emit_store_dst(jd, iptr, d);
1164 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1165 /* sx.val.i = constant */
1167 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1168 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1170 if (iptr->sx.val.i & 0x20) {
1171 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1172 M_CLR(GET_LOW_REG(d));
1173 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1177 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1179 M_SLL_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d));
1181 emit_store_dst(jd, iptr, d);
1184 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1186 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1187 s2 = emit_load_s2(jd, iptr, ECX);
1188 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1191 M_TEST_IMM(32, ECX);
1193 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1194 M_SRA_IMM(31, GET_HIGH_REG(d));
1195 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1196 M_SRA(GET_HIGH_REG(d));
1197 emit_store_dst(jd, iptr, d);
1200 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1201 /* sx.val.i = constant */
1203 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1204 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1206 if (iptr->sx.val.i & 0x20) {
1207 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1208 M_SRA_IMM(31, GET_HIGH_REG(d));
1209 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1213 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1215 M_SRA_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1217 emit_store_dst(jd, iptr, d);
1220 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1222 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1223 s2 = emit_load_s2(jd, iptr, ECX);
1224 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1227 M_TEST_IMM(32, ECX);
1229 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1230 M_CLR(GET_HIGH_REG(d));
1231 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1232 M_SRL(GET_HIGH_REG(d));
1233 emit_store_dst(jd, iptr, d);
1236 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1237 /* sx.val.l = constant */
1239 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1240 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1242 if (iptr->sx.val.i & 0x20) {
1243 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1244 M_CLR(GET_HIGH_REG(d));
1245 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1249 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1251 M_SRL_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1253 emit_store_dst(jd, iptr, d);
1256 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1258 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1259 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1260 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1267 emit_store_dst(jd, iptr, d);
1270 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1271 /* sx.val.i = constant */
1273 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1274 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1276 M_AND_IMM(iptr->sx.val.i, d);
1277 emit_store_dst(jd, iptr, d);
1280 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1282 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1283 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1284 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1285 if (s2 == GET_LOW_REG(d))
1286 M_AND(s1, GET_LOW_REG(d));
1288 M_INTMOVE(s1, GET_LOW_REG(d));
1289 M_AND(s2, GET_LOW_REG(d));
1291 /* REG_ITMP1 probably contains low 32-bit of destination */
1292 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1293 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1294 if (s2 == GET_HIGH_REG(d))
1295 M_AND(s1, GET_HIGH_REG(d));
1297 M_INTMOVE(s1, GET_HIGH_REG(d));
1298 M_AND(s2, GET_HIGH_REG(d));
1300 emit_store_dst(jd, iptr, d);
1303 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1304 /* sx.val.l = constant */
1306 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1307 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1309 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1310 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1311 emit_store_dst(jd, iptr, d);
1314 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1316 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1317 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1318 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1325 emit_store_dst(jd, iptr, d);
1328 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1329 /* sx.val.i = constant */
1331 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1332 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1334 M_OR_IMM(iptr->sx.val.i, d);
1335 emit_store_dst(jd, iptr, d);
1338 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1340 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1341 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1342 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1343 if (s2 == GET_LOW_REG(d))
1344 M_OR(s1, GET_LOW_REG(d));
1346 M_INTMOVE(s1, GET_LOW_REG(d));
1347 M_OR(s2, GET_LOW_REG(d));
1349 /* REG_ITMP1 probably contains low 32-bit of destination */
1350 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1351 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1352 if (s2 == GET_HIGH_REG(d))
1353 M_OR(s1, GET_HIGH_REG(d));
1355 M_INTMOVE(s1, GET_HIGH_REG(d));
1356 M_OR(s2, GET_HIGH_REG(d));
1358 emit_store_dst(jd, iptr, d);
1361 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1362 /* sx.val.l = constant */
1364 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1365 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1367 M_OR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1368 M_OR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1369 emit_store_dst(jd, iptr, d);
1372 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1374 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1375 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1376 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1383 emit_store_dst(jd, iptr, d);
1386 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1387 /* sx.val.i = constant */
1389 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1390 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1392 M_XOR_IMM(iptr->sx.val.i, d);
1393 emit_store_dst(jd, iptr, d);
1396 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1398 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1399 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1400 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1401 if (s2 == GET_LOW_REG(d))
1402 M_XOR(s1, GET_LOW_REG(d));
1404 M_INTMOVE(s1, GET_LOW_REG(d));
1405 M_XOR(s2, GET_LOW_REG(d));
1407 /* REG_ITMP1 probably contains low 32-bit of destination */
1408 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1409 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1410 if (s2 == GET_HIGH_REG(d))
1411 M_XOR(s1, GET_HIGH_REG(d));
1413 M_INTMOVE(s1, GET_HIGH_REG(d));
1414 M_XOR(s2, GET_HIGH_REG(d));
1416 emit_store_dst(jd, iptr, d);
1419 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1420 /* sx.val.l = constant */
1422 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1423 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1425 M_XOR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1426 M_XOR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1427 emit_store_dst(jd, iptr, d);
1431 /* floating operations ************************************************/
1433 case ICMD_FNEG: /* ..., value ==> ..., - value */
1435 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1436 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1438 emit_store_dst(jd, iptr, d);
1441 case ICMD_DNEG: /* ..., value ==> ..., - value */
1443 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1444 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1446 emit_store_dst(jd, iptr, d);
1449 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1451 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1452 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1453 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1455 emit_store_dst(jd, iptr, d);
1458 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1460 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1461 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1462 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1464 emit_store_dst(jd, iptr, d);
1467 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1469 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1470 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1471 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1473 emit_store_dst(jd, iptr, d);
1476 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1478 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1479 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1480 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1482 emit_store_dst(jd, iptr, d);
1485 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1487 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1488 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1489 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1491 emit_store_dst(jd, iptr, d);
1494 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1496 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1497 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1498 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1500 emit_store_dst(jd, iptr, d);
1503 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1505 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1506 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1507 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1509 emit_store_dst(jd, iptr, d);
1512 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1514 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1515 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1516 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1518 emit_store_dst(jd, iptr, d);
1521 case ICMD_FREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1523 /* exchanged to skip fxch */
1524 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1525 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1526 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1527 /* emit_fxch(cd); */
1532 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1533 emit_store_dst(jd, iptr, d);
1534 emit_ffree_reg(cd, 0);
1538 case ICMD_DREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1540 /* exchanged to skip fxch */
1541 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1542 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1543 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1544 /* emit_fxch(cd); */
1549 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1550 emit_store_dst(jd, iptr, d);
1551 emit_ffree_reg(cd, 0);
1555 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1556 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1558 var = VAROP(iptr->s1);
1559 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1561 if (var->flags & INMEMORY) {
1562 emit_fildl_membase(cd, REG_SP, var->vv.regoff);
1564 /* XXX not thread safe! */
1565 disp = dseg_add_unique_s4(cd, 0);
1566 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1568 emit_mov_reg_membase(cd, var->vv.regoff, REG_ITMP1, disp);
1569 emit_fildl_membase(cd, REG_ITMP1, disp);
1572 emit_store_dst(jd, iptr, d);
1575 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1576 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1578 var = VAROP(iptr->s1);
1579 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1580 if (var->flags & INMEMORY) {
1581 emit_fildll_membase(cd, REG_SP, var->vv.regoff);
1584 log_text("L2F: longs have to be in memory");
1587 emit_store_dst(jd, iptr, d);
1590 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1592 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1593 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1595 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1598 /* Round to zero, 53-bit mode, exception masked */
1599 disp = dseg_add_s4(cd, 0x0e7f);
1600 emit_fldcw_membase(cd, REG_ITMP1, disp);
1602 var = VAROP(iptr->dst);
1603 var1 = VAROP(iptr->s1);
1605 if (var->flags & INMEMORY) {
1606 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1608 /* Round to nearest, 53-bit mode, exceptions masked */
1609 disp = dseg_add_s4(cd, 0x027f);
1610 emit_fldcw_membase(cd, REG_ITMP1, disp);
1612 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1613 REG_SP, var->vv.regoff);
1616 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1618 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1621 /* XXX not thread safe! */
1622 disp = dseg_add_unique_s4(cd, 0);
1623 emit_fistpl_membase(cd, REG_ITMP1, disp);
1624 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1626 /* Round to nearest, 53-bit mode, exceptions masked */
1627 disp = dseg_add_s4(cd, 0x027f);
1628 emit_fldcw_membase(cd, REG_ITMP1, disp);
1630 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1633 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1634 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1637 emit_jcc(cd, CC_NE, disp);
1639 /* XXX: change this when we use registers */
1640 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1641 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2i, REG_ITMP1);
1642 emit_call_reg(cd, REG_ITMP1);
1644 if (var->flags & INMEMORY) {
1645 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1648 M_INTMOVE(REG_RESULT, var->vv.regoff);
1652 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1654 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1655 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1657 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1660 /* Round to zero, 53-bit mode, exception masked */
1661 disp = dseg_add_s4(cd, 0x0e7f);
1662 emit_fldcw_membase(cd, REG_ITMP1, disp);
1664 var = VAROP(iptr->dst);
1665 var1 = VAROP(iptr->s1);
1667 if (var->flags & INMEMORY) {
1668 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1670 /* Round to nearest, 53-bit mode, exceptions masked */
1671 disp = dseg_add_s4(cd, 0x027f);
1672 emit_fldcw_membase(cd, REG_ITMP1, disp);
1674 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1675 REG_SP, var->vv.regoff);
1678 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1680 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1683 /* XXX not thread safe! */
1684 disp = dseg_add_unique_s4(cd, 0);
1685 emit_fistpl_membase(cd, REG_ITMP1, disp);
1686 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1688 /* Round to nearest, 53-bit mode, exceptions masked */
1689 disp = dseg_add_s4(cd, 0x027f);
1690 emit_fldcw_membase(cd, REG_ITMP1, disp);
1692 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1695 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1696 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1699 emit_jcc(cd, CC_NE, disp);
1701 /* XXX: change this when we use registers */
1702 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1703 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2i, REG_ITMP1);
1704 emit_call_reg(cd, REG_ITMP1);
1706 if (var->flags & INMEMORY) {
1707 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1709 M_INTMOVE(REG_RESULT, var->vv.regoff);
1713 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1715 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1716 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1718 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1721 /* Round to zero, 53-bit mode, exception masked */
1722 disp = dseg_add_s4(cd, 0x0e7f);
1723 emit_fldcw_membase(cd, REG_ITMP1, disp);
1725 var = VAROP(iptr->dst);
1726 var1 = VAROP(iptr->s1);
1728 if (var->flags & INMEMORY) {
1729 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1731 /* Round to nearest, 53-bit mode, exceptions masked */
1732 disp = dseg_add_s4(cd, 0x027f);
1733 emit_fldcw_membase(cd, REG_ITMP1, disp);
1735 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1736 REG_SP, var->vv.regoff + 4);
1739 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1741 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1744 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1746 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1748 emit_jcc(cd, CC_NE, disp);
1750 emit_alu_imm_membase(cd, ALU_CMP, 0,
1751 REG_SP, var->vv.regoff);
1754 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1756 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1758 emit_jcc(cd, CC_NE, disp);
1760 /* XXX: change this when we use registers */
1761 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1762 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2l, REG_ITMP1);
1763 emit_call_reg(cd, REG_ITMP1);
1764 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1765 emit_mov_reg_membase(cd, REG_RESULT2,
1766 REG_SP, var->vv.regoff + 4);
1769 log_text("F2L: longs have to be in memory");
1774 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1776 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1777 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1779 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1782 /* Round to zero, 53-bit mode, exception masked */
1783 disp = dseg_add_s4(cd, 0x0e7f);
1784 emit_fldcw_membase(cd, REG_ITMP1, disp);
1786 var = VAROP(iptr->dst);
1787 var1 = VAROP(iptr->s1);
1789 if (var->flags & INMEMORY) {
1790 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1792 /* Round to nearest, 53-bit mode, exceptions masked */
1793 disp = dseg_add_s4(cd, 0x027f);
1794 emit_fldcw_membase(cd, REG_ITMP1, disp);
1796 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1797 REG_SP, var->vv.regoff + 4);
1800 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1802 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1805 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1807 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1809 emit_jcc(cd, CC_NE, disp);
1811 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, var->vv.regoff);
1814 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1816 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1818 emit_jcc(cd, CC_NE, disp);
1820 /* XXX: change this when we use registers */
1821 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1822 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2l, REG_ITMP1);
1823 emit_call_reg(cd, REG_ITMP1);
1824 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1825 emit_mov_reg_membase(cd, REG_RESULT2,
1826 REG_SP, var->vv.regoff + 4);
1829 log_text("D2L: longs have to be in memory");
1834 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1836 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1837 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1839 emit_store_dst(jd, iptr, d);
1842 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1844 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1845 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1847 emit_store_dst(jd, iptr, d);
1850 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1853 /* exchanged to skip fxch */
1854 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1855 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1856 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1857 /* emit_fxch(cd); */
1860 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as GT */
1861 emit_jcc(cd, CC_E, 6);
1862 emit_alu_imm_reg(cd, ALU_AND, 0x000000ff, EAX);
1864 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1865 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1866 emit_jcc(cd, CC_B, 3 + 5);
1867 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1868 emit_jmp_imm(cd, 3);
1869 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1870 emit_store_dst(jd, iptr, d);
1873 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1876 /* exchanged to skip fxch */
1877 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1878 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1879 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1880 /* emit_fxch(cd); */
1883 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as LT */
1884 emit_jcc(cd, CC_E, 3);
1885 emit_movb_imm_reg(cd, 1, REG_AH);
1887 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1888 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1889 emit_jcc(cd, CC_B, 3 + 5);
1890 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1891 emit_jmp_imm(cd, 3);
1892 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1893 emit_store_dst(jd, iptr, d);
1897 /* memory operations **************************************************/
1899 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., length */
1901 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1902 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1903 /* implicit null-pointer check */
1904 M_ILD(d, s1, OFFSET(java_array_t, size));
1905 emit_store_dst(jd, iptr, d);
1908 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
1910 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1911 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1912 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1913 /* implicit null-pointer check */
1914 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1915 emit_movsbl_memindex_reg(cd, OFFSET(java_bytearray_t, data[0]),
1917 emit_store_dst(jd, iptr, d);
1920 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
1922 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1923 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1924 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1925 /* implicit null-pointer check */
1926 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1927 emit_movzwl_memindex_reg(cd, OFFSET(java_chararray_t, data[0]),
1929 emit_store_dst(jd, iptr, d);
1932 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
1934 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1935 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1936 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1937 /* implicit null-pointer check */
1938 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1939 emit_movswl_memindex_reg(cd, OFFSET(java_shortarray_t, data[0]),
1941 emit_store_dst(jd, iptr, d);
1944 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
1946 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1947 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1948 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1949 /* implicit null-pointer check */
1950 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1951 emit_mov_memindex_reg(cd, OFFSET(java_intarray_t, data[0]),
1953 emit_store_dst(jd, iptr, d);
1956 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
1958 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1959 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1960 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
1961 /* implicit null-pointer check */
1962 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1964 var = VAROP(iptr->dst);
1966 assert(var->flags & INMEMORY);
1967 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]),
1968 s1, s2, 3, REG_ITMP3);
1969 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff);
1970 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]) + 4,
1971 s1, s2, 3, REG_ITMP3);
1972 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff + 4);
1975 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
1977 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1978 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1979 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1980 /* implicit null-pointer check */
1981 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1982 emit_flds_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2, 2);
1983 emit_store_dst(jd, iptr, d);
1986 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
1988 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1989 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1990 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1991 /* implicit null-pointer check */
1992 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1993 emit_fldl_memindex(cd, OFFSET(java_doublearray_t, data[0]), s1, s2,3);
1994 emit_store_dst(jd, iptr, d);
1997 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
1999 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2000 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2001 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2002 /* implicit null-pointer check */
2003 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2004 emit_mov_memindex_reg(cd, OFFSET(java_objectarray_t, data[0]),
2006 emit_store_dst(jd, iptr, d);
2010 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2012 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2013 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2014 /* implicit null-pointer check */
2015 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2016 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2018 /* because EBP, ESI, EDI have no xH and xL nibbles */
2019 M_INTMOVE(s3, REG_ITMP3);
2022 emit_movb_reg_memindex(cd, s3, OFFSET(java_bytearray_t, data[0]),
2026 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2028 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2029 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2030 /* implicit null-pointer check */
2031 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2032 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2033 emit_movw_reg_memindex(cd, s3, OFFSET(java_chararray_t, data[0]),
2037 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2039 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2040 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2041 /* implicit null-pointer check */
2042 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2043 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2044 emit_movw_reg_memindex(cd, s3, OFFSET(java_shortarray_t, data[0]),
2048 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2050 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2051 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2052 /* implicit null-pointer check */
2053 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2054 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2055 emit_mov_reg_memindex(cd, s3, OFFSET(java_intarray_t, data[0]),
2059 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2061 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2062 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2063 /* implicit null-pointer check */
2064 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2066 var = VAROP(iptr->sx.s23.s3);
2068 assert(var->flags & INMEMORY);
2069 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff, REG_ITMP3);
2070 emit_mov_reg_memindex(cd, REG_ITMP3, OFFSET(java_longarray_t, data[0])
2072 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff + 4, REG_ITMP3);
2073 emit_mov_reg_memindex(cd, REG_ITMP3,
2074 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2077 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2079 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2080 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2081 /* implicit null-pointer check */
2082 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2083 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2084 emit_fstps_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2,2);
2087 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2089 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2090 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2091 /* implicit null-pointer check */
2092 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2093 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2094 emit_fstpl_memindex(cd, OFFSET(java_doublearray_t, data[0]),
2098 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2100 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2101 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2102 /* implicit null-pointer check */
2103 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2104 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2106 M_AST(s1, REG_SP, 0 * 4);
2107 M_AST(s3, REG_SP, 1 * 4);
2108 M_MOV_IMM(BUILTIN_FAST_canstore, REG_ITMP1);
2110 emit_arraystore_check(cd, iptr);
2112 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2113 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2114 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2115 emit_mov_reg_memindex(cd, s3, OFFSET(java_objectarray_t, data[0]),
2119 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2121 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2122 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2123 /* implicit null-pointer check */
2124 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2125 emit_movb_imm_memindex(cd, iptr->sx.s23.s3.constval,
2126 OFFSET(java_bytearray_t, data[0]), s1, s2, 0);
2129 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2131 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2132 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2133 /* implicit null-pointer check */
2134 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2135 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2136 OFFSET(java_chararray_t, data[0]), s1, s2, 1);
2139 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2141 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2142 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2143 /* implicit null-pointer check */
2144 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2145 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2146 OFFSET(java_shortarray_t, data[0]), s1, s2, 1);
2149 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2151 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2152 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2153 /* implicit null-pointer check */
2154 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2155 emit_mov_imm_memindex(cd, iptr->sx.s23.s3.constval,
2156 OFFSET(java_intarray_t, data[0]), s1, s2, 2);
2159 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2161 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2162 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2163 /* implicit null-pointer check */
2164 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2165 emit_mov_imm_memindex(cd,
2166 (u4) (iptr->sx.s23.s3.constval & 0x00000000ffffffff),
2167 OFFSET(java_longarray_t, data[0]), s1, s2, 3);
2168 emit_mov_imm_memindex(cd,
2169 ((s4)iptr->sx.s23.s3.constval) >> 31,
2170 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2173 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2175 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2176 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2177 /* implicit null-pointer check */
2178 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2179 emit_mov_imm_memindex(cd, 0,
2180 OFFSET(java_objectarray_t, data[0]), s1, s2, 2);
2184 case ICMD_GETSTATIC: /* ... ==> ..., value */
2186 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2187 uf = iptr->sx.s23.s3.uf;
2188 fieldtype = uf->fieldref->parseddesc.fd->type;
2191 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2195 fi = iptr->sx.s23.s3.fmiref->p.field;
2196 fieldtype = fi->type;
2197 disp = (intptr_t) fi->value;
2199 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->clazz))
2200 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->clazz, 0);
2203 M_MOV_IMM(disp, REG_ITMP1);
2204 switch (fieldtype) {
2207 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2208 M_ILD(d, REG_ITMP1, 0);
2211 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2212 M_LLD(d, REG_ITMP1, 0);
2215 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2216 M_FLD(d, REG_ITMP1, 0);
2219 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2220 M_DLD(d, REG_ITMP1, 0);
2223 emit_store_dst(jd, iptr, d);
2226 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2228 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2229 uf = iptr->sx.s23.s3.uf;
2230 fieldtype = uf->fieldref->parseddesc.fd->type;
2233 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2236 fi = iptr->sx.s23.s3.fmiref->p.field;
2237 fieldtype = fi->type;
2238 disp = (intptr_t) fi->value;
2240 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->clazz))
2241 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->clazz, 0);
2244 M_MOV_IMM(disp, REG_ITMP1);
2245 switch (fieldtype) {
2248 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
2249 M_IST(s1, REG_ITMP1, 0);
2252 s1 = emit_load_s1(jd, iptr, REG_ITMP23_PACKED);
2253 M_LST(s1, REG_ITMP1, 0);
2256 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2257 emit_fstps_membase(cd, REG_ITMP1, 0);
2260 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2261 emit_fstpl_membase(cd, REG_ITMP1, 0);
2266 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2267 /* val = value (in current instruction) */
2268 /* following NOP) */
2270 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2271 uf = iptr->sx.s23.s3.uf;
2272 fieldtype = uf->fieldref->parseddesc.fd->type;
2275 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2278 fi = iptr->sx.s23.s3.fmiref->p.field;
2279 fieldtype = fi->type;
2280 disp = (intptr_t) fi->value;
2282 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->clazz))
2283 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->clazz, 0);
2286 M_MOV_IMM(disp, REG_ITMP1);
2287 switch (fieldtype) {
2290 M_IST_IMM(iptr->sx.s23.s2.constval, REG_ITMP1, 0);
2293 M_IST_IMM(iptr->sx.s23.s2.constval & 0xffffffff, REG_ITMP1, 0);
2294 M_IST_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, REG_ITMP1, 4);
2301 case ICMD_GETFIELD: /* .., objectref. ==> ..., value */
2303 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2304 emit_nullpointer_check(cd, iptr, s1);
2306 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2307 uf = iptr->sx.s23.s3.uf;
2308 fieldtype = uf->fieldref->parseddesc.fd->type;
2311 patcher_add_patch_ref(jd, PATCHER_getfield,
2312 iptr->sx.s23.s3.uf, 0);
2315 fi = iptr->sx.s23.s3.fmiref->p.field;
2316 fieldtype = fi->type;
2320 switch (fieldtype) {
2323 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2324 M_ILD32(d, s1, disp);
2327 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2328 M_LLD32(d, s1, disp);
2331 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2332 M_FLD32(d, s1, disp);
2335 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2336 M_DLD32(d, s1, disp);
2339 emit_store_dst(jd, iptr, d);
2342 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2344 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2345 emit_nullpointer_check(cd, iptr, s1);
2347 /* must be done here because of code patching */
2349 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2350 uf = iptr->sx.s23.s3.uf;
2351 fieldtype = uf->fieldref->parseddesc.fd->type;
2354 fi = iptr->sx.s23.s3.fmiref->p.field;
2355 fieldtype = fi->type;
2358 if (!IS_FLT_DBL_TYPE(fieldtype)) {
2359 if (IS_2_WORD_TYPE(fieldtype))
2360 s2 = emit_load_s2(jd, iptr, REG_ITMP23_PACKED);
2362 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2365 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
2367 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2369 uf = iptr->sx.s23.s3.uf;
2372 patcher_add_patch_ref(jd, PATCHER_putfield, uf, 0);
2376 fi = iptr->sx.s23.s3.fmiref->p.field;
2380 switch (fieldtype) {
2383 M_IST32(s2, s1, disp);
2386 M_LST32(s2, s1, disp);
2389 emit_fstps_membase32(cd, s1, disp);
2392 emit_fstpl_membase32(cd, s1, disp);
2397 case ICMD_PUTFIELDCONST: /* ..., objectref ==> ... */
2398 /* val = value (in current instruction) */
2399 /* following NOP) */
2401 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2402 emit_nullpointer_check(cd, iptr, s1);
2404 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2405 uf = iptr->sx.s23.s3.uf;
2406 fieldtype = uf->fieldref->parseddesc.fd->type;
2409 patcher_add_patch_ref(jd, PATCHER_putfieldconst,
2413 fi = iptr->sx.s23.s3.fmiref->p.field;
2414 fieldtype = fi->type;
2418 switch (fieldtype) {
2421 M_IST32_IMM(iptr->sx.s23.s2.constval, s1, disp);
2424 M_IST32_IMM(iptr->sx.s23.s2.constval & 0xffffffff, s1, disp);
2425 M_IST32_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, s1, disp + 4);
2433 /* branch operations **************************************************/
2435 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2437 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2438 M_INTMOVE(s1, REG_ITMP1_XPTR);
2440 #ifdef ENABLE_VERIFIER
2441 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2442 patcher_add_patch_ref(jd, PATCHER_resolve_class,
2443 iptr->sx.s23.s2.uc, 0);
2445 #endif /* ENABLE_VERIFIER */
2447 M_CALL_IMM(0); /* passing exception pc */
2448 M_POP(REG_ITMP2_XPC);
2450 M_MOV_IMM(asm_handle_exception, REG_ITMP3);
2454 case ICMD_GOTO: /* ... ==> ... */
2455 case ICMD_RET: /* ... ==> ... */
2457 #if defined(ENABLE_SSA)
2459 last_cmd_was_goto = true;
2461 /* In case of a Goto phimoves have to be inserted before the */
2464 codegen_emit_phi_moves(jd, bptr);
2467 emit_br(cd, iptr->dst.block);
2471 case ICMD_JSR: /* ... ==> ... */
2473 emit_br(cd, iptr->sx.s23.s3.jsrtarget.block);
2477 case ICMD_IFNULL: /* ..., value ==> ... */
2478 case ICMD_IFNONNULL:
2480 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2482 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFNULL, BRANCH_OPT_NONE);
2485 case ICMD_IFEQ: /* ..., value ==> ... */
2492 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2493 M_CMP_IMM(iptr->sx.val.i, s1);
2494 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFEQ, BRANCH_OPT_NONE);
2497 case ICMD_IF_LEQ: /* ..., value ==> ... */
2499 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2500 if (iptr->sx.val.l == 0) {
2501 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2502 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2505 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2506 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2507 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2508 M_OR(REG_ITMP2, REG_ITMP1);
2510 emit_beq(cd, iptr->dst.block);
2513 case ICMD_IF_LLT: /* ..., value ==> ... */
2515 if (iptr->sx.val.l == 0) {
2516 /* If high 32-bit are less than zero, then the 64-bits
2518 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2520 emit_blt(cd, iptr->dst.block);
2523 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2524 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2525 emit_blt(cd, iptr->dst.block);
2527 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2528 emit_bult(cd, iptr->dst.block);
2532 case ICMD_IF_LLE: /* ..., value ==> ... */
2534 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2535 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2536 emit_blt(cd, iptr->dst.block);
2538 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2539 emit_bule(cd, iptr->dst.block);
2542 case ICMD_IF_LNE: /* ..., value ==> ... */
2544 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2545 if (iptr->sx.val.l == 0) {
2546 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2547 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2550 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2551 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2552 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2553 M_OR(REG_ITMP2, REG_ITMP1);
2555 emit_bne(cd, iptr->dst.block);
2558 case ICMD_IF_LGT: /* ..., value ==> ... */
2560 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2561 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2562 emit_bgt(cd, iptr->dst.block);
2564 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2565 emit_bugt(cd, iptr->dst.block);
2568 case ICMD_IF_LGE: /* ..., value ==> ... */
2570 if (iptr->sx.val.l == 0) {
2571 /* If high 32-bit are greater equal zero, then the
2573 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2575 emit_bge(cd, iptr->dst.block);
2578 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2579 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2580 emit_bgt(cd, iptr->dst.block);
2582 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2583 emit_buge(cd, iptr->dst.block);
2587 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2588 case ICMD_IF_ICMPNE:
2589 case ICMD_IF_ICMPLT:
2590 case ICMD_IF_ICMPGT:
2591 case ICMD_IF_ICMPGE:
2592 case ICMD_IF_ICMPLE:
2594 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2595 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2597 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ICMPEQ, BRANCH_OPT_NONE);
2600 case ICMD_IF_ACMPEQ: /* ..., value, value ==> ... */
2601 case ICMD_IF_ACMPNE:
2603 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2604 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2606 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ACMPEQ, BRANCH_OPT_NONE);
2609 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2611 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2612 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2613 M_INTMOVE(s1, REG_ITMP1);
2614 M_XOR(s2, REG_ITMP1);
2615 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2616 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2617 M_INTMOVE(s1, REG_ITMP2);
2618 M_XOR(s2, REG_ITMP2);
2619 M_OR(REG_ITMP1, REG_ITMP2);
2620 emit_beq(cd, iptr->dst.block);
2623 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2625 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2626 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2627 M_INTMOVE(s1, REG_ITMP1);
2628 M_XOR(s2, REG_ITMP1);
2629 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2630 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2631 M_INTMOVE(s1, REG_ITMP2);
2632 M_XOR(s2, REG_ITMP2);
2633 M_OR(REG_ITMP1, REG_ITMP2);
2634 emit_bne(cd, iptr->dst.block);
2637 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2639 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2640 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2642 emit_blt(cd, iptr->dst.block);
2643 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2644 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2647 emit_bult(cd, iptr->dst.block);
2650 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2652 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2653 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2655 emit_bgt(cd, iptr->dst.block);
2656 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2657 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2660 emit_bugt(cd, iptr->dst.block);
2663 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2665 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2666 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2668 emit_blt(cd, iptr->dst.block);
2669 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2670 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2673 emit_bule(cd, iptr->dst.block);
2676 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2678 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2679 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2681 emit_bgt(cd, iptr->dst.block);
2682 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2683 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2686 emit_buge(cd, iptr->dst.block);
2690 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2692 REPLACEMENT_POINT_RETURN(cd, iptr);
2693 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2694 M_INTMOVE(s1, REG_RESULT);
2695 goto nowperformreturn;
2697 case ICMD_LRETURN: /* ..., retvalue ==> ... */
2699 REPLACEMENT_POINT_RETURN(cd, iptr);
2700 s1 = emit_load_s1(jd, iptr, REG_RESULT_PACKED);
2701 M_LNGMOVE(s1, REG_RESULT_PACKED);
2702 goto nowperformreturn;
2704 case ICMD_ARETURN: /* ..., retvalue ==> ... */
2706 REPLACEMENT_POINT_RETURN(cd, iptr);
2707 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2708 M_INTMOVE(s1, REG_RESULT);
2710 #ifdef ENABLE_VERIFIER
2711 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2712 patcher_add_patch_ref(jd, PATCHER_resolve_class,
2713 iptr->sx.s23.s2.uc, 0);
2715 #endif /* ENABLE_VERIFIER */
2716 goto nowperformreturn;
2718 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2721 REPLACEMENT_POINT_RETURN(cd, iptr);
2722 s1 = emit_load_s1(jd, iptr, REG_FRESULT);
2723 goto nowperformreturn;
2725 case ICMD_RETURN: /* ... ==> ... */
2727 REPLACEMENT_POINT_RETURN(cd, iptr);
2733 p = cd->stackframesize;
2735 #if !defined(NDEBUG)
2736 emit_verbosecall_exit(jd);
2739 #if defined(ENABLE_THREADS)
2740 if (checksync && code_is_synchronized(code)) {
2741 M_ALD(REG_ITMP2, REG_SP, rd->memuse * 8);
2743 /* we need to save the proper return value */
2744 switch (iptr->opc) {
2747 M_IST(REG_RESULT, REG_SP, rd->memuse * 8);
2751 M_LST(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2755 emit_fstps_membase(cd, REG_SP, rd->memuse * 8);
2759 emit_fstpl_membase(cd, REG_SP, rd->memuse * 8);
2763 M_AST(REG_ITMP2, REG_SP, 0);
2764 M_MOV_IMM(LOCK_monitor_exit, REG_ITMP3);
2767 /* and now restore the proper return value */
2768 switch (iptr->opc) {
2771 M_ILD(REG_RESULT, REG_SP, rd->memuse * 8);
2775 M_LLD(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2779 emit_flds_membase(cd, REG_SP, rd->memuse * 8);
2783 emit_fldl_membase(cd, REG_SP, rd->memuse * 8);
2789 /* restore saved registers */
2791 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
2792 p--; M_ALD(rd->savintregs[i], REG_SP, p * 8);
2795 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
2797 emit_fldl_membase(cd, REG_SP, p * 8);
2798 if (iptr->opc == ICMD_FRETURN || iptr->opc == ICMD_DRETURN) {
2800 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset + 1); */
2803 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset); */
2807 /* deallocate stack */
2809 if (cd->stackframesize)
2810 M_AADD_IMM(cd->stackframesize * 8 + 4, REG_SP);
2817 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2820 branch_target_t *table;
2822 table = iptr->dst.table;
2824 l = iptr->sx.s23.s2.tablelow;
2825 i = iptr->sx.s23.s3.tablehigh;
2827 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2828 M_INTMOVE(s1, REG_ITMP1);
2831 M_ISUB_IMM(l, REG_ITMP1);
2837 M_CMP_IMM(i - 1, REG_ITMP1);
2838 emit_bugt(cd, table[0].block);
2840 /* build jump table top down and use address of lowest entry */
2845 dseg_add_target(cd, table->block);
2849 /* length of dataseg after last dseg_addtarget is used
2852 M_MOV_IMM(0, REG_ITMP2);
2854 emit_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 2, REG_ITMP1);
2860 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2863 lookup_target_t *lookup;
2865 lookup = iptr->dst.lookup;
2867 i = iptr->sx.s23.s2.lookupcount;
2869 MCODECHECK((i<<2)+8);
2870 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2873 M_CMP_IMM(lookup->value, s1);
2874 emit_beq(cd, lookup->target.block);
2878 emit_br(cd, iptr->sx.s23.s3.lookupdefault.block);
2883 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
2885 REPLACEMENT_POINT_FORGC_BUILTIN(cd, iptr);
2887 bte = iptr->sx.s23.s3.bte;
2891 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2893 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2894 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
2895 case ICMD_INVOKEINTERFACE:
2897 REPLACEMENT_POINT_INVOKE(cd, iptr);
2899 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2900 md = iptr->sx.s23.s3.um->methodref->parseddesc.md;
2904 lm = iptr->sx.s23.s3.fmiref->p.method;
2905 md = lm->parseddesc;
2909 s3 = md->paramcount;
2911 MCODECHECK((s3 << 1) + 64);
2913 /* copy arguments to registers or stack location */
2915 for (s3 = s3 - 1; s3 >= 0; s3--) {
2916 var = VAR(iptr->sx.s23.s2.args[s3]);
2918 /* Already Preallocated (ARGVAR) ? */
2919 if (var->flags & PREALLOC)
2921 if (IS_INT_LNG_TYPE(var->type)) {
2922 if (!md->params[s3].inmemory) {
2923 log_text("No integer argument registers available!");
2927 if (IS_2_WORD_TYPE(var->type)) {
2928 d = emit_load(jd, iptr, var, REG_ITMP12_PACKED);
2929 M_LST(d, REG_SP, md->params[s3].regoff);
2931 d = emit_load(jd, iptr, var, REG_ITMP1);
2932 M_IST(d, REG_SP, md->params[s3].regoff);
2937 if (!md->params[s3].inmemory) {
2938 s1 = md->params[s3].regoff;
2939 d = emit_load(jd, iptr, var, s1);
2943 d = emit_load(jd, iptr, var, REG_FTMP1);
2944 if (IS_2_WORD_TYPE(var->type))
2945 M_DST(d, REG_SP, md->params[s3].regoff);
2947 M_FST(d, REG_SP, md->params[s3].regoff);
2952 switch (iptr->opc) {
2954 d = md->returntype.type;
2956 if (bte->stub == NULL) {
2957 M_MOV_IMM(bte->fp, REG_ITMP1);
2960 M_MOV_IMM(bte->stub, REG_ITMP1);
2965 case ICMD_INVOKESPECIAL:
2966 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
2967 emit_nullpointer_check(cd, iptr, REG_ITMP1);
2970 case ICMD_INVOKESTATIC:
2972 unresolved_method *um = iptr->sx.s23.s3.um;
2974 patcher_add_patch_ref(jd, PATCHER_invokestatic_special,
2978 d = md->returntype.type;
2981 disp = (ptrint) lm->stubroutine;
2982 d = lm->parseddesc->returntype.type;
2985 M_MOV_IMM(disp, REG_ITMP2);
2989 case ICMD_INVOKEVIRTUAL:
2990 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
2991 emit_nullpointer_check(cd, iptr, s1);
2994 unresolved_method *um = iptr->sx.s23.s3.um;
2996 patcher_add_patch_ref(jd, PATCHER_invokevirtual, um, 0);
2999 d = md->returntype.type;
3002 s1 = OFFSET(vftbl_t, table[0]) +
3003 sizeof(methodptr) * lm->vftblindex;
3004 d = md->returntype.type;
3007 M_ALD(REG_METHODPTR, REG_ITMP1,
3008 OFFSET(java_object_t, vftbl));
3009 M_ALD32(REG_ITMP3, REG_METHODPTR, s1);
3013 case ICMD_INVOKEINTERFACE:
3014 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
3015 emit_nullpointer_check(cd, iptr, s1);
3018 unresolved_method *um = iptr->sx.s23.s3.um;
3020 patcher_add_patch_ref(jd, PATCHER_invokeinterface, um, 0);
3024 d = md->returntype.type;
3027 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3028 sizeof(methodptr) * lm->clazz->index;
3030 s2 = sizeof(methodptr) * (lm - lm->clazz->methods);
3032 d = md->returntype.type;
3035 M_ALD(REG_METHODPTR, REG_ITMP1,
3036 OFFSET(java_object_t, vftbl));
3037 M_ALD32(REG_METHODPTR, REG_METHODPTR, s1);
3038 M_ALD32(REG_ITMP3, REG_METHODPTR, s2);
3043 /* store size of call code in replacement point */
3045 REPLACEMENT_POINT_INVOKE_RETURN(cd, iptr);
3046 REPLACEMENT_POINT_FORGC_BUILTIN_RETURN(cd, iptr);
3048 /* d contains return type */
3050 if (d != TYPE_VOID) {
3051 #if defined(ENABLE_SSA)
3052 if ((ls == NULL) /* || (!IS_TEMPVAR_INDEX(iptr->dst.varindex)) */ ||
3053 (ls->lifetime[iptr->dst.varindex].type != UNUSED))
3054 /* a "living" stackslot */
3057 if (IS_INT_LNG_TYPE(d)) {
3058 if (IS_2_WORD_TYPE(d)) {
3059 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
3060 M_LNGMOVE(REG_RESULT_PACKED, s1);
3063 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3064 M_INTMOVE(REG_RESULT, s1);
3068 s1 = codegen_reg_of_dst(jd, iptr, REG_NULL);
3070 emit_store_dst(jd, iptr, s1);
3076 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3078 if (!(iptr->flags.bits & INS_FLAG_ARRAY)) {
3079 /* object type cast-check */
3082 vftbl_t *supervftbl;
3085 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3091 super = iptr->sx.s23.s3.c.cls;
3092 superindex = super->index;
3093 supervftbl = super->vftbl;
3096 if ((super == NULL) || !(super->flags & ACC_INTERFACE))
3097 CODEGEN_CRITICAL_SECTION_NEW;
3099 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3101 /* if class is not resolved, check which code to call */
3103 if (super == NULL) {
3105 emit_label_beq(cd, BRANCH_LABEL_1);
3107 patcher_add_patch_ref(jd, PATCHER_checkcast_instanceof_flags,
3108 iptr->sx.s23.s3.c.ref, 0);
3110 M_MOV_IMM(0, REG_ITMP2); /* super->flags */
3111 M_AND_IMM32(ACC_INTERFACE, REG_ITMP2);
3112 emit_label_beq(cd, BRANCH_LABEL_2);
3115 /* interface checkcast code */
3117 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3118 if (super != NULL) {
3120 emit_label_beq(cd, BRANCH_LABEL_3);
3123 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3125 if (super == NULL) {
3126 patcher_add_patch_ref(jd, PATCHER_checkcast_interface,
3127 iptr->sx.s23.s3.c.ref,
3132 REG_ITMP2, OFFSET(vftbl_t, interfacetablelength));
3133 M_ISUB_IMM32(superindex, REG_ITMP3);
3134 /* XXX do we need this one? */
3136 emit_classcast_check(cd, iptr, BRANCH_LE, REG_ITMP3, s1);
3138 M_ALD32(REG_ITMP3, REG_ITMP2,
3139 OFFSET(vftbl_t, interfacetable[0]) -
3140 superindex * sizeof(methodptr*));
3142 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_ITMP3, s1);
3145 emit_label_br(cd, BRANCH_LABEL_4);
3147 emit_label(cd, BRANCH_LABEL_3);
3150 /* class checkcast code */
3152 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3153 if (super == NULL) {
3154 emit_label(cd, BRANCH_LABEL_2);
3158 emit_label_beq(cd, BRANCH_LABEL_5);
3161 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3163 if (super == NULL) {
3164 patcher_add_patch_ref(jd, PATCHER_checkcast_class,
3165 iptr->sx.s23.s3.c.ref,
3169 M_MOV_IMM(supervftbl, REG_ITMP3);
3171 CODEGEN_CRITICAL_SECTION_START;
3173 M_ILD32(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3175 /* if (s1 != REG_ITMP1) { */
3176 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP1); */
3177 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3); */
3178 /* #if defined(ENABLE_THREADS) */
3179 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3181 /* emit_alu_reg_reg(cd, ALU_SUB, REG_ITMP1, REG_ITMP2); */
3184 M_ILD32(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, baseval));
3185 M_ISUB(REG_ITMP3, REG_ITMP2);
3186 M_MOV_IMM(supervftbl, REG_ITMP3);
3187 M_ILD(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3189 CODEGEN_CRITICAL_SECTION_END;
3193 M_CMP(REG_ITMP3, REG_ITMP2);
3194 emit_classcast_check(cd, iptr, BRANCH_ULE, REG_ITMP3, s1);
3197 emit_label(cd, BRANCH_LABEL_5);
3200 if (super == NULL) {
3201 emit_label(cd, BRANCH_LABEL_1);
3202 emit_label(cd, BRANCH_LABEL_4);
3205 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
3208 /* array type cast-check */
3210 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3211 M_AST(s1, REG_SP, 0 * 4);
3213 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3214 patcher_add_patch_ref(jd, PATCHER_builtin_arraycheckcast,
3215 iptr->sx.s23.s3.c.ref, 0);
3218 M_AST_IMM(iptr->sx.s23.s3.c.cls, REG_SP, 1 * 4);
3219 M_MOV_IMM(BUILTIN_arraycheckcast, REG_ITMP3);
3222 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3224 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_RESULT, s1);
3226 d = codegen_reg_of_dst(jd, iptr, s1);
3230 emit_store_dst(jd, iptr, d);
3233 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3237 vftbl_t *supervftbl;
3240 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3246 super = iptr->sx.s23.s3.c.cls;
3247 superindex = super->index;
3248 supervftbl = super->vftbl;
3251 if ((super == NULL) || !(super->flags & ACC_INTERFACE))
3252 CODEGEN_CRITICAL_SECTION_NEW;
3254 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3255 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
3258 M_INTMOVE(s1, REG_ITMP1);
3264 /* if class is not resolved, check which code to call */
3266 if (super == NULL) {
3268 emit_label_beq(cd, BRANCH_LABEL_1);
3270 patcher_add_patch_ref(jd, PATCHER_checkcast_instanceof_flags,
3271 iptr->sx.s23.s3.c.ref, 0);
3273 M_MOV_IMM(0, REG_ITMP3); /* super->flags */
3274 M_AND_IMM32(ACC_INTERFACE, REG_ITMP3);
3275 emit_label_beq(cd, BRANCH_LABEL_2);
3278 /* interface instanceof code */
3280 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3281 if (super != NULL) {
3283 emit_label_beq(cd, BRANCH_LABEL_3);
3286 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3288 if (super == NULL) {
3289 patcher_add_patch_ref(jd, PATCHER_instanceof_interface,
3290 iptr->sx.s23.s3.c.ref, 0);
3294 REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3295 M_ISUB_IMM32(superindex, REG_ITMP3);
3298 disp = (2 + 4 /* mov_membase32_reg */ + 2 /* test */ +
3299 6 /* jcc */ + 5 /* mov_imm_reg */);
3302 M_ALD32(REG_ITMP1, REG_ITMP1,
3303 OFFSET(vftbl_t, interfacetable[0]) -
3304 superindex * sizeof(methodptr*));
3306 /* emit_setcc_reg(cd, CC_A, d); */
3307 /* emit_jcc(cd, CC_BE, 5); */
3312 emit_label_br(cd, BRANCH_LABEL_4);
3314 emit_label(cd, BRANCH_LABEL_3);
3317 /* class instanceof code */
3319 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3320 if (super == NULL) {
3321 emit_label(cd, BRANCH_LABEL_2);
3325 emit_label_beq(cd, BRANCH_LABEL_5);
3328 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3330 if (super == NULL) {
3331 patcher_add_patch_ref(jd, PATCHER_instanceof_class,
3332 iptr->sx.s23.s3.c.ref, 0);
3335 M_MOV_IMM(supervftbl, REG_ITMP2);
3337 CODEGEN_CRITICAL_SECTION_START;
3339 M_ILD(REG_ITMP1, REG_ITMP1, OFFSET(vftbl_t, baseval));
3340 M_ILD(REG_ITMP3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3341 M_ILD(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3343 CODEGEN_CRITICAL_SECTION_END;
3345 M_ISUB(REG_ITMP2, REG_ITMP1);
3346 M_CLR(d); /* may be REG_ITMP2 */
3347 M_CMP(REG_ITMP3, REG_ITMP1);
3352 emit_label(cd, BRANCH_LABEL_5);
3355 if (super == NULL) {
3356 emit_label(cd, BRANCH_LABEL_1);
3357 emit_label(cd, BRANCH_LABEL_4);
3360 emit_store_dst(jd, iptr, d);
3364 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3366 /* check for negative sizes and copy sizes to stack if necessary */
3368 MCODECHECK((iptr->s1.argcount << 1) + 64);
3370 for (s1 = iptr->s1.argcount; --s1 >= 0; ) {
3371 /* copy SAVEDVAR sizes to stack */
3372 var = VAR(iptr->sx.s23.s2.args[s1]);
3374 /* Already Preallocated? */
3375 if (!(var->flags & PREALLOC)) {
3376 if (var->flags & INMEMORY) {
3377 M_ILD(REG_ITMP1, REG_SP, var->vv.regoff);
3378 M_IST(REG_ITMP1, REG_SP, (s1 + 3) * 4);
3381 M_IST(var->vv.regoff, REG_SP, (s1 + 3) * 4);
3385 /* is a patcher function set? */
3387 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3388 patcher_add_patch_ref(jd, PATCHER_builtin_multianewarray,
3389 iptr->sx.s23.s3.c.ref, 0);
3395 disp = (ptrint) iptr->sx.s23.s3.c.cls;
3397 /* a0 = dimension count */
3399 M_IST_IMM(iptr->s1.argcount, REG_SP, 0 * 4);
3401 /* a1 = arraydescriptor */
3403 M_IST_IMM(disp, REG_SP, 1 * 4);
3405 /* a2 = pointer to dimensions = stack pointer */
3407 M_MOV(REG_SP, REG_ITMP1);
3408 M_AADD_IMM(3 * 4, REG_ITMP1);
3409 M_AST(REG_ITMP1, REG_SP, 2 * 4);
3411 M_MOV_IMM(BUILTIN_multianewarray, REG_ITMP1);
3414 /* check for exception before result assignment */
3416 emit_exception_check(cd, iptr);
3418 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3419 M_INTMOVE(REG_RESULT, s1);
3420 emit_store_dst(jd, iptr, s1);
3423 #if defined(ENABLE_SSA)
3424 case ICMD_GETEXCEPTION:
3425 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
3426 M_INTMOVE(REG_ITMP1, d);
3427 emit_store_dst(jd, iptr, d);
3431 exceptions_throw_internalerror("Unknown ICMD %d during code generation",
3436 } /* for instruction */
3440 #if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
3443 #if defined(ENABLE_SSA)
3446 /* by edge splitting, in Blocks with phi moves there can only */
3447 /* be a goto as last command, no other Jump/Branch Command */
3449 if (!last_cmd_was_goto)
3450 codegen_emit_phi_moves(jd, bptr);
3455 /* At the end of a basic block we may have to append some nops,
3456 because the patcher stub calling code might be longer than the
3457 actual instruction. So codepatching does not change the
3458 following block unintentionally. */
3460 if (cd->mcodeptr < cd->lastmcodeptr) {
3461 while (cd->mcodeptr < cd->lastmcodeptr) {
3466 } /* if (bptr -> flags >= BBREACHED) */
3467 } /* for basic block */
3469 /* generate stubs */
3471 emit_patcher_traps(jd);
3473 /* everything's ok */
3479 /* codegen_emit_stub_native ****************************************************
3481 Emits a stub routine which calls a native method.
3483 *******************************************************************************/
3485 void codegen_emit_stub_native(jitdata *jd, methoddesc *nmd, functionptr f, int skipparams)
3491 int i, j; /* count variables */
3495 /* get required compiler data */
3501 /* set some variables */
3505 /* calculate stackframe size */
3507 cd->stackframesize =
3508 sizeof(stackframeinfo_t) / SIZEOF_VOID_P +
3509 sizeof(localref_table) / SIZEOF_VOID_P +
3510 4 + /* 4 arguments (start_native_call) */
3513 /* keep stack 16-byte aligned */
3515 ALIGN_ODD(cd->stackframesize);
3517 /* create method header */
3519 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
3520 (void) dseg_add_unique_s4(cd, cd->stackframesize * 8 + 4); /* FrameSize */
3521 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
3522 (void) dseg_add_unique_s4(cd, 0); /* IntSave */
3523 (void) dseg_add_unique_s4(cd, 0); /* FltSave */
3525 #if defined(ENABLE_PROFILING)
3526 /* generate native method profiling code */
3528 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
3529 /* count frequency */
3531 M_MOV_IMM(code, REG_ITMP1);
3532 M_IADD_IMM_MEMBASE(1, REG_ITMP1, OFFSET(codeinfo, frequency));
3536 /* calculate stackframe size for native function */
3538 M_ASUB_IMM(cd->stackframesize * 8 + 4, REG_SP);
3540 /* Mark the whole fpu stack as free for native functions (only for saved */
3541 /* register count == 0). */
3543 emit_ffree_reg(cd, 0);
3544 emit_ffree_reg(cd, 1);
3545 emit_ffree_reg(cd, 2);
3546 emit_ffree_reg(cd, 3);
3547 emit_ffree_reg(cd, 4);
3548 emit_ffree_reg(cd, 5);
3549 emit_ffree_reg(cd, 6);
3550 emit_ffree_reg(cd, 7);
3552 #if defined(ENABLE_GC_CACAO)
3553 /* remember callee saved int registers in stackframeinfo (GC may need to */
3554 /* recover them during a collection). */
3556 disp = cd->stackframesize * 8 - sizeof(stackframeinfo_t) +
3557 OFFSET(stackframeinfo_t, intregs);
3559 for (i = 0; i < INT_SAV_CNT; i++)
3560 M_AST(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3563 /* prepare data structures for native function call */
3565 M_MOV(REG_SP, REG_ITMP1);
3566 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3567 M_IST_IMM(0, REG_SP, 1 * 4);
3570 M_MOV_IMM(codegen_start_native_call, REG_ITMP1);
3573 /* remember class argument */
3575 if (m->flags & ACC_STATIC)
3576 M_MOV(REG_RESULT, REG_ITMP3);
3578 /* Copy or spill arguments to new locations. */
3580 for (i = md->paramcount - 1, j = i + skipparams; i >= 0; i--, j--) {
3581 if (!md->params[i].inmemory)
3584 s1 = md->params[i].regoff + cd->stackframesize * 8 + 8;
3585 s2 = nmd->params[j].regoff;
3587 /* float/double in memory can be copied like int/longs */
3589 switch (md->paramtypes[i].type) {
3593 M_ILD(REG_ITMP1, REG_SP, s1);
3594 M_IST(REG_ITMP1, REG_SP, s2);
3598 M_LLD(REG_ITMP12_PACKED, REG_SP, s1);
3599 M_LST(REG_ITMP12_PACKED, REG_SP, s2);
3604 /* Handle native Java methods. */
3606 if (m->flags & ACC_NATIVE) {
3607 /* if function is static, put class into second argument */
3609 if (m->flags & ACC_STATIC)
3610 M_AST(REG_ITMP3, REG_SP, 1 * 4);
3612 /* put env into first argument */
3614 M_AST_IMM(_Jv_env, REG_SP, 0 * 4);
3617 /* Call the native function. */
3619 disp = dseg_add_functionptr(cd, f);
3620 emit_mov_imm_reg(cd, 0, REG_ITMP3);
3622 M_ALD(REG_ITMP1, REG_ITMP3, disp);
3625 /* save return value */
3627 switch (md->returntype.type) {
3630 M_IST(REG_RESULT, REG_SP, 1 * 8);
3633 M_LST(REG_RESULT_PACKED, REG_SP, 1 * 8);
3636 emit_fsts_membase(cd, REG_SP, 1 * 8);
3639 emit_fstl_membase(cd, REG_SP, 1 * 8);
3645 /* remove native stackframe info */
3647 M_MOV(REG_SP, REG_ITMP1);
3648 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3649 M_IST_IMM(0, REG_SP, 1 * 4);
3652 M_MOV_IMM(codegen_finish_native_call, REG_ITMP1);
3654 M_MOV(REG_RESULT, REG_ITMP2); /* REG_ITMP3 == REG_RESULT2 */
3656 /* restore return value */
3658 switch (md->returntype.type) {
3661 M_ILD(REG_RESULT, REG_SP, 1 * 8);
3664 M_LLD(REG_RESULT_PACKED, REG_SP, 1 * 8);
3667 emit_flds_membase(cd, REG_SP, 1 * 8);
3670 emit_fldl_membase(cd, REG_SP, 1 * 8);
3676 #if defined(ENABLE_GC_CACAO)
3677 /* restore callee saved int registers from stackframeinfo (GC might have */
3678 /* modified them during a collection). */
3680 disp = cd->stackframesize * 8 - sizeof(stackframeinfo_t) +
3681 OFFSET(stackframeinfo_t, intregs);
3683 for (i = 0; i < INT_SAV_CNT; i++)
3684 M_ALD(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3687 M_AADD_IMM(cd->stackframesize * 8 + 4, REG_SP);
3689 /* check for exception */
3696 /* handle exception */
3698 M_MOV(REG_ITMP2, REG_ITMP1_XPTR);
3699 M_ALD(REG_ITMP2_XPC, REG_SP, 0);
3700 M_ASUB_IMM(2, REG_ITMP2_XPC);
3702 M_MOV_IMM(asm_handle_nat_exception, REG_ITMP3);
3708 * These are local overrides for various environment variables in Emacs.
3709 * Please do not remove this and leave it at the end of the file, where
3710 * Emacs will automagically detect them.
3711 * ---------------------------------------------------------------------
3714 * indent-tabs-mode: t
3718 * vim:noexpandtab:sw=4:ts=4: