1 /* src/vm/jit/i386/codegen.c - machine code generator for i386
3 Copyright (C) 1996-2005, 2006, 2007, 2008
4 CACAOVM - Verein zur Foerderung der freien virtuellen Maschine CACAO
6 This file is part of CACAO.
8 This program is free software; you can redistribute it and/or
9 modify it under the terms of the GNU General Public License as
10 published by the Free Software Foundation; either version 2, or (at
11 your option) any later version.
13 This program is distributed in the hope that it will be useful, but
14 WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with this program; if not, write to the Free Software
20 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
34 #include "vm/jit/i386/md-abi.h"
36 #include "vm/jit/i386/codegen.h"
37 #include "vm/jit/i386/emit.h"
39 #include "mm/memory.h"
40 #include "native/jni.h"
41 #include "native/localref.h"
42 #include "native/native.h"
44 #include "threads/lock-common.h"
46 #include "vm/builtin.h"
47 #include "vm/exceptions.h"
48 #include "vm/global.h"
49 #include "vm/primitive.h"
50 #include "vm/stringlocal.h"
53 #include "vm/jit/abi.h"
54 #include "vm/jit/asmpart.h"
55 #include "vm/jit/codegen-common.h"
56 #include "vm/jit/dseg.h"
57 #include "vm/jit/emit-common.h"
58 #include "vm/jit/jit.h"
59 #include "vm/jit/linenumbertable.h"
60 #include "vm/jit/parse.h"
61 #include "vm/jit/patcher-common.h"
62 #include "vm/jit/reg.h"
63 #include "vm/jit/replace.h"
64 #include "vm/jit/stacktrace.h"
65 #include "vm/jit/trap.h"
67 #if defined(ENABLE_SSA)
68 # include "vm/jit/optimizing/lsra.h"
69 # include "vm/jit/optimizing/ssa.h"
70 #elif defined(ENABLE_LSRA)
71 # include "vm/jit/allocator/lsra.h"
74 #include "vmcore/loader.h"
75 #include "vmcore/options.h"
76 #include "vmcore/utf8.h"
79 /* codegen_emit ****************************************************************
81 Generates machine code.
83 *******************************************************************************/
85 bool codegen_emit(jitdata *jd)
91 s4 len, s1, s2, s3, d, disp;
92 int align_off; /* offset for alignment compensation */
97 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
98 builtintable_entry *bte;
101 unresolved_field *uf;
104 #if defined(ENABLE_SSA)
106 bool last_cmd_was_goto;
108 last_cmd_was_goto = false;
112 /* get required compiler data */
119 /* prevent compiler warnings */
130 s4 savedregs_num = 0;
133 /* space to save used callee saved registers */
135 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
136 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse);
138 cd->stackframesize = rd->memuse + savedregs_num;
141 #if defined(ENABLE_THREADS)
142 /* space to save argument of monitor_enter */
144 if (checksync && code_is_synchronized(code))
145 cd->stackframesize++;
148 /* create method header */
150 /* Keep stack of non-leaf functions 16-byte aligned. */
152 if (!code_is_leafmethod(code)) {
153 ALIGN_ODD(cd->stackframesize);
156 align_off = cd->stackframesize ? 4 : 0;
158 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
159 (void) dseg_add_unique_s4(
160 cd, cd->stackframesize * 8 + align_off); /* FrameSize */
162 code->synchronizedoffset = rd->memuse * 8;
164 /* REMOVEME: We still need it for exception handling in assembler. */
166 if (code_is_leafmethod(code))
167 (void) dseg_add_unique_s4(cd, 1); /* IsLeaf */
169 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
171 (void) dseg_add_unique_s4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
172 (void) dseg_add_unique_s4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
174 #if defined(ENABLE_PROFILING)
175 /* generate method profiling code */
177 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
178 /* count frequency */
180 M_MOV_IMM(code, REG_ITMP3);
181 M_IADD_IMM_MEMBASE(1, REG_ITMP3, OFFSET(codeinfo, frequency));
185 /* create stack frame (if necessary) */
187 if (cd->stackframesize)
189 M_ASUB_IMM(cd->stackframesize * 8 + 4, REG_SP);
191 /* save return address and used callee saved registers */
193 p = cd->stackframesize;
194 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
195 p--; M_AST(rd->savintregs[i], REG_SP, p * 8);
197 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
198 p--; emit_fld_reg(cd, rd->savfltregs[i]); emit_fstpl_membase(cd, REG_SP, p * 8);
201 /* take arguments out of register or stack frame */
206 for (p = 0, l = 0; p < md->paramcount; p++) {
207 t = md->paramtypes[p].type;
209 varindex = jd->local_map[l * 5 + t];
210 #if defined(ENABLE_SSA)
212 if (varindex != UNUSED)
213 varindex = ls->var_0[varindex];
214 if ((varindex != UNUSED) && (ls->lifetime[varindex].type == UNUSED))
219 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
222 if (varindex == UNUSED)
226 s1 = md->params[p].regoff;
229 if (IS_INT_LNG_TYPE(t)) { /* integer args */
230 if (!md->params[p].inmemory) { /* register arguments */
231 log_text("integer register argument");
233 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
234 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff */
236 else { /* reg arg -> spilled */
237 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff * 4 */
241 if (!(var->flags & INMEMORY)) {
243 cd->stackframesize * 8 + 4 + align_off + s1);
246 if (!IS_2_WORD_TYPE(t)) {
247 #if defined(ENABLE_SSA)
248 /* no copy avoiding by now possible with SSA */
250 emit_mov_membase_reg( /* + 4 for return address */
252 cd->stackframesize * 8 + s1 + 4 + align_off,
254 emit_mov_reg_membase(
255 cd, REG_ITMP1, REG_SP, var->vv.regoff);
258 #endif /*defined(ENABLE_SSA)*/
259 /* reuse stackslot */
260 var->vv.regoff = cd->stackframesize * 8 + 4 +
265 #if defined(ENABLE_SSA)
266 /* no copy avoiding by now possible with SSA */
268 emit_mov_membase_reg( /* + 4 for return address */
270 cd->stackframesize * 8 + s1 + 4 + align_off,
272 emit_mov_reg_membase(
273 cd, REG_ITMP1, REG_SP, var->vv.regoff);
274 emit_mov_membase_reg( /* + 4 for return address */
276 cd->stackframesize * 8 + s1 + 4 + 4 + align_off,
278 emit_mov_reg_membase(
279 cd, REG_ITMP1, REG_SP, var->vv.regoff + 4);
282 #endif /*defined(ENABLE_SSA)*/
283 /* reuse stackslot */
284 var->vv.regoff = cd->stackframesize * 8 + 8 + s1;
289 else { /* floating args */
290 if (!md->params[p].inmemory) { /* register arguments */
291 log_text("There are no float argument registers!");
293 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
294 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff */
295 } else { /* reg arg -> spilled */
296 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff * 8 */
300 else { /* stack arguments */
301 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
305 cd->stackframesize * 8 + s1 + 4 + align_off);
307 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
313 cd->stackframesize * 8 + s1 + 4 + align_off);
315 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
318 } else { /* stack-arg -> spilled */
319 #if defined(ENABLE_SSA)
320 /* no copy avoiding by now possible with SSA */
322 emit_mov_membase_reg(
324 cd->stackframesize * 8 + s1 + 4 + align_off,
326 emit_mov_reg_membase(
327 cd, REG_ITMP1, REG_SP, var->vv.regoff);
331 cd->stackframesize * 8 + s1 + 4 + align_off);
332 emit_fstps_membase(cd, REG_SP, var->vv.regoff);
337 cd->stackframesize * 8 + s1 + 4 + align_off);
338 emit_fstpl_membase(cd, REG_SP, var->vv.regoff);
342 #endif /*defined(ENABLE_SSA)*/
343 /* reuse stackslot */
344 var->vv.regoff = cd->stackframesize * 8 + 4 +
351 /* call monitorenter function */
353 #if defined(ENABLE_THREADS)
354 if (checksync && code_is_synchronized(code)) {
357 if (m->flags & ACC_STATIC) {
358 M_MOV_IMM(&m->clazz->object.header, REG_ITMP1);
361 M_ALD(REG_ITMP1, REG_SP, cd->stackframesize * 8 + 4 + align_off);
364 M_ALD_MEM(REG_ITMP1, TRAP_NullPointerException);
367 M_AST(REG_ITMP1, REG_SP, s1 * 8);
368 M_AST(REG_ITMP1, REG_SP, 0 * 4);
369 M_MOV_IMM(LOCK_monitor_enter, REG_ITMP3);
375 emit_verbosecall_enter(jd);
380 #if defined(ENABLE_SSA)
381 /* with SSA the Header is Basic Block 0 - insert phi Moves if necessary */
383 codegen_emit_phi_moves(jd, ls->basicblocks[0]);
386 /* end of header generation */
388 /* create replacement points */
390 REPLACEMENT_POINTS_INIT(cd, jd);
392 /* walk through all basic blocks */
394 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
396 bptr->mpc = (s4) (cd->mcodeptr - cd->mcodebase);
398 if (bptr->flags >= BBREACHED) {
399 /* branch resolving */
401 codegen_resolve_branchrefs(cd, bptr);
403 /* handle replacement points */
405 REPLACEMENT_POINT_BLOCK_START(cd, bptr);
407 #if defined(ENABLE_REPLACEMENT)
408 if (bptr->bitflags & BBFLAG_REPLACEMENT) {
409 if (cd->replacementpoint[-1].flags & RPLPOINT_FLAG_COUNTDOWN) {
411 disp = (s4) &(m->hitcountdown);
412 M_ISUB_IMM_MEMABS(1, disp);
418 /* copy interface registers to their destination */
423 #if defined(ENABLE_PROFILING)
424 /* generate basic block profiling code */
426 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
427 /* count frequency */
429 M_MOV_IMM(code->bbfrequency, REG_ITMP3);
430 M_IADD_IMM_MEMBASE(1, REG_ITMP3, bptr->nr * 4);
434 #if defined(ENABLE_LSRA) || defined(ENABLE_SSA)
435 # if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
438 # if defined(ENABLE_SSA)
440 last_cmd_was_goto = false;
444 var = VAR(bptr->invars[len]);
445 if (bptr->type != BBTYPE_STD) {
446 if (!IS_2_WORD_TYPE(var->type)) {
447 #if !defined(ENABLE_SSA)
448 if (bptr->type == BBTYPE_EXH) {
449 d = codegen_reg_of_var(0, var, REG_ITMP1);
450 M_INTMOVE(REG_ITMP1, d);
451 emit_store(jd, NULL, var, d);
456 log_text("copy interface registers(EXH, SBR): longs \
457 have to be in memory (begin 1)");
465 #endif /* defined(ENABLE_LSRA) || defined(ENABLE_SSA) */
469 var = VAR(bptr->invars[len]);
470 if ((len == bptr->indepth-1) && (bptr->type != BBTYPE_STD)) {
471 if (!IS_2_WORD_TYPE(var->type)) {
472 if (bptr->type == BBTYPE_EXH) {
473 d = codegen_reg_of_var(0, var, REG_ITMP1);
474 M_INTMOVE(REG_ITMP1, d);
475 emit_store(jd, NULL, var, d);
479 log_text("copy interface registers: longs have to be in \
486 assert((var->flags & INOUT));
491 /* walk through all instructions */
496 for (iptr = bptr->iinstr; len > 0; len--, iptr++) {
497 if (iptr->line != currentline) {
498 linenumbertable_list_entry_add(cd, iptr->line);
499 currentline = iptr->line;
502 MCODECHECK(1024); /* 1kB should be enough */
505 case ICMD_NOP: /* ... ==> ... */
506 case ICMD_POP: /* ..., value ==> ... */
507 case ICMD_POP2: /* ..., value, value ==> ... */
510 case ICMD_INLINE_START:
512 REPLACEMENT_POINT_INLINE_START(cd, iptr);
515 case ICMD_INLINE_BODY:
517 REPLACEMENT_POINT_INLINE_BODY(cd, iptr);
518 linenumbertable_list_entry_add_inline_start(cd, iptr);
519 linenumbertable_list_entry_add(cd, iptr->line);
522 case ICMD_INLINE_END:
524 linenumbertable_list_entry_add_inline_end(cd, iptr);
525 linenumbertable_list_entry_add(cd, iptr->line);
528 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
530 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
531 emit_nullpointer_check(cd, iptr, s1);
534 /* constant operations ************************************************/
536 case ICMD_ICONST: /* ... ==> ..., constant */
538 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
539 ICONST(d, iptr->sx.val.i);
540 emit_store_dst(jd, iptr, d);
543 case ICMD_LCONST: /* ... ==> ..., constant */
545 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
546 LCONST(d, iptr->sx.val.l);
547 emit_store_dst(jd, iptr, d);
550 case ICMD_FCONST: /* ... ==> ..., constant */
552 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
553 if (iptr->sx.val.f == 0.0) {
557 if (iptr->sx.val.i == 0x80000000) {
561 } else if (iptr->sx.val.f == 1.0) {
564 } else if (iptr->sx.val.f == 2.0) {
570 disp = dseg_add_float(cd, iptr->sx.val.f);
571 emit_mov_imm_reg(cd, 0, REG_ITMP1);
573 emit_flds_membase(cd, REG_ITMP1, disp);
575 emit_store_dst(jd, iptr, d);
578 case ICMD_DCONST: /* ... ==> ..., constant */
580 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
581 if (iptr->sx.val.d == 0.0) {
585 if (iptr->sx.val.l == 0x8000000000000000LL) {
589 } else if (iptr->sx.val.d == 1.0) {
592 } else if (iptr->sx.val.d == 2.0) {
598 disp = dseg_add_double(cd, iptr->sx.val.d);
599 emit_mov_imm_reg(cd, 0, REG_ITMP1);
601 emit_fldl_membase(cd, REG_ITMP1, disp);
603 emit_store_dst(jd, iptr, d);
606 case ICMD_ACONST: /* ... ==> ..., constant */
608 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
610 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
611 patcher_add_patch_ref(jd, PATCHER_aconst,
612 iptr->sx.val.c.ref, 0);
617 if (iptr->sx.val.anyptr == NULL)
620 M_MOV_IMM(iptr->sx.val.anyptr, d);
622 emit_store_dst(jd, iptr, d);
626 /* load/store/copy/move operations ************************************/
644 if (!(iptr->flags.bits & INS_FLAG_RETADDR))
649 /* integer operations *************************************************/
651 case ICMD_INEG: /* ..., value ==> ..., - value */
653 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
654 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
657 emit_store_dst(jd, iptr, d);
660 case ICMD_LNEG: /* ..., value ==> ..., - value */
662 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
663 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
665 M_NEG(GET_LOW_REG(d));
666 M_IADDC_IMM(0, GET_HIGH_REG(d));
667 M_NEG(GET_HIGH_REG(d));
668 emit_store_dst(jd, iptr, d);
671 case ICMD_I2L: /* ..., value ==> ..., value */
673 s1 = emit_load_s1(jd, iptr, EAX);
674 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
677 M_LNGMOVE(EAX_EDX_PACKED, d);
678 emit_store_dst(jd, iptr, d);
681 case ICMD_L2I: /* ..., value ==> ..., value */
683 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
684 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
686 emit_store_dst(jd, iptr, d);
689 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
691 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
692 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
696 emit_store_dst(jd, iptr, d);
699 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
701 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
702 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
704 emit_store_dst(jd, iptr, d);
707 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
709 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
710 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
712 emit_store_dst(jd, iptr, d);
716 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
718 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
719 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
720 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
727 emit_store_dst(jd, iptr, d);
731 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
732 /* sx.val.i = constant */
734 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
735 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
737 /* `inc reg' is slower on p4's (regarding to ia32
738 optimization reference manual and benchmarks) and as
742 M_IADD_IMM(iptr->sx.val.i, d);
743 emit_store_dst(jd, iptr, d);
746 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
748 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
749 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
750 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
751 M_INTMOVE(s1, GET_LOW_REG(d));
752 M_IADD(s2, GET_LOW_REG(d));
753 /* don't use REG_ITMP1 */
754 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
755 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
756 M_INTMOVE(s1, GET_HIGH_REG(d));
757 M_IADDC(s2, GET_HIGH_REG(d));
758 emit_store_dst(jd, iptr, d);
761 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
762 /* sx.val.l = constant */
764 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
765 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
767 M_IADD_IMM(iptr->sx.val.l, GET_LOW_REG(d));
768 M_IADDC_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
769 emit_store_dst(jd, iptr, d);
772 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
774 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
775 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
776 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
778 M_INTMOVE(s1, REG_ITMP1);
779 M_ISUB(s2, REG_ITMP1);
780 M_INTMOVE(REG_ITMP1, d);
786 emit_store_dst(jd, iptr, d);
789 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
790 /* sx.val.i = constant */
792 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
793 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
795 M_ISUB_IMM(iptr->sx.val.i, d);
796 emit_store_dst(jd, iptr, d);
799 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
801 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
802 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
803 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
804 if (s2 == GET_LOW_REG(d)) {
805 M_INTMOVE(s1, REG_ITMP1);
806 M_ISUB(s2, REG_ITMP1);
807 M_INTMOVE(REG_ITMP1, GET_LOW_REG(d));
810 M_INTMOVE(s1, GET_LOW_REG(d));
811 M_ISUB(s2, GET_LOW_REG(d));
813 /* don't use REG_ITMP1 */
814 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
815 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
816 if (s2 == GET_HIGH_REG(d)) {
817 M_INTMOVE(s1, REG_ITMP2);
818 M_ISUBB(s2, REG_ITMP2);
819 M_INTMOVE(REG_ITMP2, GET_HIGH_REG(d));
822 M_INTMOVE(s1, GET_HIGH_REG(d));
823 M_ISUBB(s2, GET_HIGH_REG(d));
825 emit_store_dst(jd, iptr, d);
828 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
829 /* sx.val.l = constant */
831 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
832 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
834 M_ISUB_IMM(iptr->sx.val.l, GET_LOW_REG(d));
835 M_ISUBB_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
836 emit_store_dst(jd, iptr, d);
839 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
841 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
842 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
843 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
850 emit_store_dst(jd, iptr, d);
853 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
854 /* sx.val.i = constant */
856 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
857 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
858 M_IMUL_IMM(s1, iptr->sx.val.i, d);
859 emit_store_dst(jd, iptr, d);
862 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
864 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
865 s2 = emit_load_s2_low(jd, iptr, EDX);
866 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
868 M_INTMOVE(s1, REG_ITMP2);
869 M_IMUL(s2, REG_ITMP2);
871 s1 = emit_load_s1_low(jd, iptr, EAX);
872 s2 = emit_load_s2_high(jd, iptr, EDX);
875 M_IADD(EDX, REG_ITMP2);
877 s1 = emit_load_s1_low(jd, iptr, EAX);
878 s2 = emit_load_s2_low(jd, iptr, EDX);
881 M_INTMOVE(EAX, GET_LOW_REG(d));
882 M_IADD(REG_ITMP2, GET_HIGH_REG(d));
884 emit_store_dst(jd, iptr, d);
887 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
888 /* sx.val.l = constant */
890 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
891 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
892 ICONST(EAX, iptr->sx.val.l);
894 M_IMUL_IMM(s1, iptr->sx.val.l >> 32, REG_ITMP2);
895 M_IADD(REG_ITMP2, EDX);
896 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
897 M_IMUL_IMM(s1, iptr->sx.val.l, REG_ITMP2);
898 M_IADD(REG_ITMP2, EDX);
899 M_LNGMOVE(EAX_EDX_PACKED, d);
900 emit_store_dst(jd, iptr, d);
903 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
905 s1 = emit_load_s1(jd, iptr, EAX);
906 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
907 d = codegen_reg_of_dst(jd, iptr, EAX);
908 emit_arithmetic_check(cd, iptr, s2);
910 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
912 /* check as described in jvm spec */
914 M_CMP_IMM(0x80000000, EAX);
921 M_INTMOVE(EAX, d); /* if INMEMORY then d is already EAX */
922 emit_store_dst(jd, iptr, d);
925 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
927 s1 = emit_load_s1(jd, iptr, EAX);
928 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
929 d = codegen_reg_of_dst(jd, iptr, EDX);
930 emit_arithmetic_check(cd, iptr, s2);
932 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
934 /* check as described in jvm spec */
936 M_CMP_IMM(0x80000000, EAX);
944 M_INTMOVE(EDX, d); /* if INMEMORY then d is already EDX */
945 emit_store_dst(jd, iptr, d);
948 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
949 /* sx.val.i = constant */
951 /* TODO: optimize for `/ 2' */
952 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
953 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
957 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, d);/* 32-bit for jump off */
958 M_SRA_IMM(iptr->sx.val.i, d);
959 emit_store_dst(jd, iptr, d);
962 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
963 /* sx.val.i = constant */
965 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
966 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
968 M_MOV(s1, REG_ITMP1);
972 M_AND_IMM(iptr->sx.val.i, d);
974 M_BGE(2 + 2 + 6 + 2);
975 M_MOV(s1, d); /* don't use M_INTMOVE, so we know the jump offset */
977 M_AND_IMM32(iptr->sx.val.i, d); /* use 32-bit for jump offset */
979 emit_store_dst(jd, iptr, d);
982 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
983 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
985 s2 = emit_load_s2(jd, iptr, REG_ITMP12_PACKED);
986 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
988 M_INTMOVE(GET_LOW_REG(s2), REG_ITMP3);
989 M_OR(GET_HIGH_REG(s2), REG_ITMP3);
990 /* XXX could be optimized */
991 emit_arithmetic_check(cd, iptr, REG_ITMP3);
993 bte = iptr->sx.s23.s3.bte;
996 M_LST(s2, REG_SP, 2 * 4);
998 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
999 M_LST(s1, REG_SP, 0 * 4);
1001 M_MOV_IMM(bte->fp, REG_ITMP3);
1003 emit_store_dst(jd, iptr, d);
1006 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1007 /* sx.val.i = constant */
1009 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1010 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1012 M_TEST(GET_HIGH_REG(d));
1014 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, GET_LOW_REG(d));
1015 M_IADDC_IMM(0, GET_HIGH_REG(d));
1016 M_SRLD_IMM(iptr->sx.val.i, GET_HIGH_REG(d), GET_LOW_REG(d));
1017 M_SRA_IMM(iptr->sx.val.i, GET_HIGH_REG(d));
1018 emit_store_dst(jd, iptr, d);
1022 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1023 /* sx.val.l = constant */
1025 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1026 if (iptr->dst.var->flags & INMEMORY) {
1027 if (iptr->s1.var->flags & INMEMORY) {
1028 /* Alpha algorithm */
1030 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8);
1032 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1038 /* TODO: hmm, don't know if this is always correct */
1040 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l & 0x00000000ffffffff);
1042 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l >> 32);
1048 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1049 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1051 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1052 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1053 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1054 emit_jcc(cd, CC_GE, disp);
1056 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1057 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1059 emit_neg_reg(cd, REG_ITMP1);
1060 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1061 emit_neg_reg(cd, REG_ITMP2);
1063 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1064 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1066 emit_neg_reg(cd, REG_ITMP1);
1067 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1068 emit_neg_reg(cd, REG_ITMP2);
1070 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst.var->vv.regoff * 8);
1071 emit_mov_reg_membase(cd, REG_ITMP2, REG_SP, iptr->dst.var->vv.regoff * 8 + 4);
1075 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1076 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1078 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1079 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1080 M_TEST(GET_LOW_REG(s1));
1086 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1088 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1089 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1090 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1091 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1094 emit_store_dst(jd, iptr, d);
1097 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1098 /* sx.val.i = constant */
1100 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1101 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1103 M_SLL_IMM(iptr->sx.val.i, d);
1104 emit_store_dst(jd, iptr, d);
1107 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1109 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1110 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1111 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1112 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1115 emit_store_dst(jd, iptr, d);
1118 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1119 /* sx.val.i = constant */
1121 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1122 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1124 M_SRA_IMM(iptr->sx.val.i, d);
1125 emit_store_dst(jd, iptr, d);
1128 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1130 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1131 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1132 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1133 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1136 emit_store_dst(jd, iptr, d);
1139 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1140 /* sx.val.i = constant */
1142 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1143 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1145 M_SRL_IMM(iptr->sx.val.i, d);
1146 emit_store_dst(jd, iptr, d);
1149 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1151 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1152 s2 = emit_load_s2(jd, iptr, ECX);
1153 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1156 M_TEST_IMM(32, ECX);
1158 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1159 M_CLR(GET_LOW_REG(d));
1160 M_SLLD(GET_LOW_REG(d), GET_HIGH_REG(d));
1161 M_SLL(GET_LOW_REG(d));
1162 emit_store_dst(jd, iptr, d);
1165 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1166 /* sx.val.i = constant */
1168 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1169 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1171 if (iptr->sx.val.i & 0x20) {
1172 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1173 M_CLR(GET_LOW_REG(d));
1174 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1178 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1180 M_SLL_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d));
1182 emit_store_dst(jd, iptr, d);
1185 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1187 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1188 s2 = emit_load_s2(jd, iptr, ECX);
1189 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1192 M_TEST_IMM(32, ECX);
1194 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1195 M_SRA_IMM(31, GET_HIGH_REG(d));
1196 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1197 M_SRA(GET_HIGH_REG(d));
1198 emit_store_dst(jd, iptr, d);
1201 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1202 /* sx.val.i = constant */
1204 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1205 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1207 if (iptr->sx.val.i & 0x20) {
1208 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1209 M_SRA_IMM(31, GET_HIGH_REG(d));
1210 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1214 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1216 M_SRA_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1218 emit_store_dst(jd, iptr, d);
1221 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1223 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1224 s2 = emit_load_s2(jd, iptr, ECX);
1225 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1228 M_TEST_IMM(32, ECX);
1230 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1231 M_CLR(GET_HIGH_REG(d));
1232 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1233 M_SRL(GET_HIGH_REG(d));
1234 emit_store_dst(jd, iptr, d);
1237 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1238 /* sx.val.l = constant */
1240 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1241 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1243 if (iptr->sx.val.i & 0x20) {
1244 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1245 M_CLR(GET_HIGH_REG(d));
1246 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1250 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1252 M_SRL_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1254 emit_store_dst(jd, iptr, d);
1257 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1259 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1260 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1261 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1268 emit_store_dst(jd, iptr, d);
1271 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1272 /* sx.val.i = constant */
1274 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1275 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1277 M_AND_IMM(iptr->sx.val.i, d);
1278 emit_store_dst(jd, iptr, d);
1281 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1283 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1284 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1285 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1286 if (s2 == GET_LOW_REG(d))
1287 M_AND(s1, GET_LOW_REG(d));
1289 M_INTMOVE(s1, GET_LOW_REG(d));
1290 M_AND(s2, GET_LOW_REG(d));
1292 /* REG_ITMP1 probably contains low 32-bit of destination */
1293 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1294 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1295 if (s2 == GET_HIGH_REG(d))
1296 M_AND(s1, GET_HIGH_REG(d));
1298 M_INTMOVE(s1, GET_HIGH_REG(d));
1299 M_AND(s2, GET_HIGH_REG(d));
1301 emit_store_dst(jd, iptr, d);
1304 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1305 /* sx.val.l = constant */
1307 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1308 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1310 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1311 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1312 emit_store_dst(jd, iptr, d);
1315 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1317 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1318 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1319 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1326 emit_store_dst(jd, iptr, d);
1329 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1330 /* sx.val.i = constant */
1332 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1333 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1335 M_OR_IMM(iptr->sx.val.i, d);
1336 emit_store_dst(jd, iptr, d);
1339 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1341 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1342 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1343 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1344 if (s2 == GET_LOW_REG(d))
1345 M_OR(s1, GET_LOW_REG(d));
1347 M_INTMOVE(s1, GET_LOW_REG(d));
1348 M_OR(s2, GET_LOW_REG(d));
1350 /* REG_ITMP1 probably contains low 32-bit of destination */
1351 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1352 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1353 if (s2 == GET_HIGH_REG(d))
1354 M_OR(s1, GET_HIGH_REG(d));
1356 M_INTMOVE(s1, GET_HIGH_REG(d));
1357 M_OR(s2, GET_HIGH_REG(d));
1359 emit_store_dst(jd, iptr, d);
1362 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1363 /* sx.val.l = constant */
1365 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1366 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1368 M_OR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1369 M_OR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1370 emit_store_dst(jd, iptr, d);
1373 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1375 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1376 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1377 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1384 emit_store_dst(jd, iptr, d);
1387 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1388 /* sx.val.i = constant */
1390 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1391 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1393 M_XOR_IMM(iptr->sx.val.i, d);
1394 emit_store_dst(jd, iptr, d);
1397 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1399 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1400 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1401 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1402 if (s2 == GET_LOW_REG(d))
1403 M_XOR(s1, GET_LOW_REG(d));
1405 M_INTMOVE(s1, GET_LOW_REG(d));
1406 M_XOR(s2, GET_LOW_REG(d));
1408 /* REG_ITMP1 probably contains low 32-bit of destination */
1409 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1410 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1411 if (s2 == GET_HIGH_REG(d))
1412 M_XOR(s1, GET_HIGH_REG(d));
1414 M_INTMOVE(s1, GET_HIGH_REG(d));
1415 M_XOR(s2, GET_HIGH_REG(d));
1417 emit_store_dst(jd, iptr, d);
1420 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1421 /* sx.val.l = constant */
1423 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1424 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1426 M_XOR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1427 M_XOR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1428 emit_store_dst(jd, iptr, d);
1432 /* floating operations ************************************************/
1434 case ICMD_FNEG: /* ..., value ==> ..., - value */
1436 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1437 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1439 emit_store_dst(jd, iptr, d);
1442 case ICMD_DNEG: /* ..., value ==> ..., - value */
1444 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1445 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1447 emit_store_dst(jd, iptr, d);
1450 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1452 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1453 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1454 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1456 emit_store_dst(jd, iptr, d);
1459 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1461 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1462 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1463 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1465 emit_store_dst(jd, iptr, d);
1468 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1470 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1471 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1472 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1474 emit_store_dst(jd, iptr, d);
1477 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1479 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1480 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1481 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1483 emit_store_dst(jd, iptr, d);
1486 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1488 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1489 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1490 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1492 emit_store_dst(jd, iptr, d);
1495 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1497 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1498 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1499 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1501 emit_store_dst(jd, iptr, d);
1504 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1506 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1507 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1508 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1510 emit_store_dst(jd, iptr, d);
1513 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1515 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1516 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1517 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1519 emit_store_dst(jd, iptr, d);
1522 case ICMD_FREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1524 /* exchanged to skip fxch */
1525 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1526 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1527 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1528 /* emit_fxch(cd); */
1533 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1534 emit_store_dst(jd, iptr, d);
1535 emit_ffree_reg(cd, 0);
1539 case ICMD_DREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1541 /* exchanged to skip fxch */
1542 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1543 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1544 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1545 /* emit_fxch(cd); */
1550 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1551 emit_store_dst(jd, iptr, d);
1552 emit_ffree_reg(cd, 0);
1556 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1557 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1559 var = VAROP(iptr->s1);
1560 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1562 if (var->flags & INMEMORY) {
1563 emit_fildl_membase(cd, REG_SP, var->vv.regoff);
1565 /* XXX not thread safe! */
1566 disp = dseg_add_unique_s4(cd, 0);
1567 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1569 emit_mov_reg_membase(cd, var->vv.regoff, REG_ITMP1, disp);
1570 emit_fildl_membase(cd, REG_ITMP1, disp);
1573 emit_store_dst(jd, iptr, d);
1576 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1577 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1579 var = VAROP(iptr->s1);
1580 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1581 if (var->flags & INMEMORY) {
1582 emit_fildll_membase(cd, REG_SP, var->vv.regoff);
1585 log_text("L2F: longs have to be in memory");
1588 emit_store_dst(jd, iptr, d);
1591 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1593 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1594 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1596 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1599 /* Round to zero, 53-bit mode, exception masked */
1600 disp = dseg_add_s4(cd, 0x0e7f);
1601 emit_fldcw_membase(cd, REG_ITMP1, disp);
1603 var = VAROP(iptr->dst);
1604 var1 = VAROP(iptr->s1);
1606 if (var->flags & INMEMORY) {
1607 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1609 /* Round to nearest, 53-bit mode, exceptions masked */
1610 disp = dseg_add_s4(cd, 0x027f);
1611 emit_fldcw_membase(cd, REG_ITMP1, disp);
1613 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1614 REG_SP, var->vv.regoff);
1617 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1619 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1622 /* XXX not thread safe! */
1623 disp = dseg_add_unique_s4(cd, 0);
1624 emit_fistpl_membase(cd, REG_ITMP1, disp);
1625 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1627 /* Round to nearest, 53-bit mode, exceptions masked */
1628 disp = dseg_add_s4(cd, 0x027f);
1629 emit_fldcw_membase(cd, REG_ITMP1, disp);
1631 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1634 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1635 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1638 emit_jcc(cd, CC_NE, disp);
1640 /* XXX: change this when we use registers */
1641 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1642 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2i, REG_ITMP1);
1643 emit_call_reg(cd, REG_ITMP1);
1645 if (var->flags & INMEMORY) {
1646 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1649 M_INTMOVE(REG_RESULT, var->vv.regoff);
1653 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1655 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1656 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1658 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1661 /* Round to zero, 53-bit mode, exception masked */
1662 disp = dseg_add_s4(cd, 0x0e7f);
1663 emit_fldcw_membase(cd, REG_ITMP1, disp);
1665 var = VAROP(iptr->dst);
1666 var1 = VAROP(iptr->s1);
1668 if (var->flags & INMEMORY) {
1669 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1671 /* Round to nearest, 53-bit mode, exceptions masked */
1672 disp = dseg_add_s4(cd, 0x027f);
1673 emit_fldcw_membase(cd, REG_ITMP1, disp);
1675 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1676 REG_SP, var->vv.regoff);
1679 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1681 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1684 /* XXX not thread safe! */
1685 disp = dseg_add_unique_s4(cd, 0);
1686 emit_fistpl_membase(cd, REG_ITMP1, disp);
1687 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1689 /* Round to nearest, 53-bit mode, exceptions masked */
1690 disp = dseg_add_s4(cd, 0x027f);
1691 emit_fldcw_membase(cd, REG_ITMP1, disp);
1693 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1696 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1697 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1700 emit_jcc(cd, CC_NE, disp);
1702 /* XXX: change this when we use registers */
1703 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1704 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2i, REG_ITMP1);
1705 emit_call_reg(cd, REG_ITMP1);
1707 if (var->flags & INMEMORY) {
1708 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1710 M_INTMOVE(REG_RESULT, var->vv.regoff);
1714 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1716 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1717 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1719 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1722 /* Round to zero, 53-bit mode, exception masked */
1723 disp = dseg_add_s4(cd, 0x0e7f);
1724 emit_fldcw_membase(cd, REG_ITMP1, disp);
1726 var = VAROP(iptr->dst);
1727 var1 = VAROP(iptr->s1);
1729 if (var->flags & INMEMORY) {
1730 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1732 /* Round to nearest, 53-bit mode, exceptions masked */
1733 disp = dseg_add_s4(cd, 0x027f);
1734 emit_fldcw_membase(cd, REG_ITMP1, disp);
1736 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1737 REG_SP, var->vv.regoff + 4);
1740 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1742 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1745 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1747 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1749 emit_jcc(cd, CC_NE, disp);
1751 emit_alu_imm_membase(cd, ALU_CMP, 0,
1752 REG_SP, var->vv.regoff);
1755 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1757 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1759 emit_jcc(cd, CC_NE, disp);
1761 /* XXX: change this when we use registers */
1762 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1763 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2l, REG_ITMP1);
1764 emit_call_reg(cd, REG_ITMP1);
1765 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1766 emit_mov_reg_membase(cd, REG_RESULT2,
1767 REG_SP, var->vv.regoff + 4);
1770 log_text("F2L: longs have to be in memory");
1775 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1777 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1778 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1780 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1783 /* Round to zero, 53-bit mode, exception masked */
1784 disp = dseg_add_s4(cd, 0x0e7f);
1785 emit_fldcw_membase(cd, REG_ITMP1, disp);
1787 var = VAROP(iptr->dst);
1788 var1 = VAROP(iptr->s1);
1790 if (var->flags & INMEMORY) {
1791 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1793 /* Round to nearest, 53-bit mode, exceptions masked */
1794 disp = dseg_add_s4(cd, 0x027f);
1795 emit_fldcw_membase(cd, REG_ITMP1, disp);
1797 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1798 REG_SP, var->vv.regoff + 4);
1801 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1803 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1806 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1808 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1810 emit_jcc(cd, CC_NE, disp);
1812 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, var->vv.regoff);
1815 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1817 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1819 emit_jcc(cd, CC_NE, disp);
1821 /* XXX: change this when we use registers */
1822 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1823 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2l, REG_ITMP1);
1824 emit_call_reg(cd, REG_ITMP1);
1825 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1826 emit_mov_reg_membase(cd, REG_RESULT2,
1827 REG_SP, var->vv.regoff + 4);
1830 log_text("D2L: longs have to be in memory");
1835 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1837 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1838 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1840 emit_store_dst(jd, iptr, d);
1843 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1845 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1846 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1848 emit_store_dst(jd, iptr, d);
1851 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1854 /* exchanged to skip fxch */
1855 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1856 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1857 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1858 /* emit_fxch(cd); */
1861 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as GT */
1862 emit_jcc(cd, CC_E, 6);
1863 emit_alu_imm_reg(cd, ALU_AND, 0x000000ff, EAX);
1865 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1866 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1867 emit_jcc(cd, CC_B, 3 + 5);
1868 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1869 emit_jmp_imm(cd, 3);
1870 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1871 emit_store_dst(jd, iptr, d);
1874 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1877 /* exchanged to skip fxch */
1878 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1879 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1880 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1881 /* emit_fxch(cd); */
1884 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as LT */
1885 emit_jcc(cd, CC_E, 3);
1886 emit_movb_imm_reg(cd, 1, REG_AH);
1888 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1889 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1890 emit_jcc(cd, CC_B, 3 + 5);
1891 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1892 emit_jmp_imm(cd, 3);
1893 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1894 emit_store_dst(jd, iptr, d);
1898 /* memory operations **************************************************/
1900 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., length */
1902 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1903 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1904 /* implicit null-pointer check */
1905 M_ILD(d, s1, OFFSET(java_array_t, size));
1906 emit_store_dst(jd, iptr, d);
1909 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
1911 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1912 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1913 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1914 /* implicit null-pointer check */
1915 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1916 emit_movsbl_memindex_reg(cd, OFFSET(java_bytearray_t, data[0]),
1918 emit_store_dst(jd, iptr, d);
1921 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
1923 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1924 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1925 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1926 /* implicit null-pointer check */
1927 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1928 emit_movzwl_memindex_reg(cd, OFFSET(java_chararray_t, data[0]),
1930 emit_store_dst(jd, iptr, d);
1933 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
1935 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1936 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1937 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1938 /* implicit null-pointer check */
1939 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1940 emit_movswl_memindex_reg(cd, OFFSET(java_shortarray_t, data[0]),
1942 emit_store_dst(jd, iptr, d);
1945 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
1947 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1948 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1949 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1950 /* implicit null-pointer check */
1951 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1952 emit_mov_memindex_reg(cd, OFFSET(java_intarray_t, data[0]),
1954 emit_store_dst(jd, iptr, d);
1957 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
1959 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1960 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1961 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
1962 /* implicit null-pointer check */
1963 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1965 var = VAROP(iptr->dst);
1967 assert(var->flags & INMEMORY);
1968 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]),
1969 s1, s2, 3, REG_ITMP3);
1970 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff);
1971 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]) + 4,
1972 s1, s2, 3, REG_ITMP3);
1973 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff + 4);
1976 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
1978 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1979 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1980 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1981 /* implicit null-pointer check */
1982 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1983 emit_flds_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2, 2);
1984 emit_store_dst(jd, iptr, d);
1987 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
1989 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1990 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1991 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1992 /* implicit null-pointer check */
1993 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1994 emit_fldl_memindex(cd, OFFSET(java_doublearray_t, data[0]), s1, s2,3);
1995 emit_store_dst(jd, iptr, d);
1998 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2000 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2001 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2002 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2003 /* implicit null-pointer check */
2004 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2005 emit_mov_memindex_reg(cd, OFFSET(java_objectarray_t, data[0]),
2007 emit_store_dst(jd, iptr, d);
2011 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2013 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2014 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2015 /* implicit null-pointer check */
2016 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2017 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2019 /* because EBP, ESI, EDI have no xH and xL nibbles */
2020 M_INTMOVE(s3, REG_ITMP3);
2023 emit_movb_reg_memindex(cd, s3, OFFSET(java_bytearray_t, data[0]),
2027 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2029 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2030 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2031 /* implicit null-pointer check */
2032 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2033 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2034 emit_movw_reg_memindex(cd, s3, OFFSET(java_chararray_t, data[0]),
2038 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2040 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2041 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2042 /* implicit null-pointer check */
2043 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2044 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2045 emit_movw_reg_memindex(cd, s3, OFFSET(java_shortarray_t, data[0]),
2049 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2051 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2052 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2053 /* implicit null-pointer check */
2054 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2055 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2056 emit_mov_reg_memindex(cd, s3, OFFSET(java_intarray_t, data[0]),
2060 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2062 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2063 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2064 /* implicit null-pointer check */
2065 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2067 var = VAROP(iptr->sx.s23.s3);
2069 assert(var->flags & INMEMORY);
2070 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff, REG_ITMP3);
2071 emit_mov_reg_memindex(cd, REG_ITMP3, OFFSET(java_longarray_t, data[0])
2073 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff + 4, REG_ITMP3);
2074 emit_mov_reg_memindex(cd, REG_ITMP3,
2075 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2078 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2080 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2081 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2082 /* implicit null-pointer check */
2083 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2084 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2085 emit_fstps_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2,2);
2088 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2090 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2091 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2092 /* implicit null-pointer check */
2093 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2094 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2095 emit_fstpl_memindex(cd, OFFSET(java_doublearray_t, data[0]),
2099 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2101 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2102 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2103 /* implicit null-pointer check */
2104 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2105 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2107 M_AST(s1, REG_SP, 0 * 4);
2108 M_AST(s3, REG_SP, 1 * 4);
2109 M_MOV_IMM(BUILTIN_FAST_canstore, REG_ITMP1);
2111 emit_arraystore_check(cd, iptr);
2113 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2114 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2115 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2116 emit_mov_reg_memindex(cd, s3, OFFSET(java_objectarray_t, data[0]),
2120 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2122 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2123 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2124 /* implicit null-pointer check */
2125 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2126 emit_movb_imm_memindex(cd, iptr->sx.s23.s3.constval,
2127 OFFSET(java_bytearray_t, data[0]), s1, s2, 0);
2130 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2132 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2133 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2134 /* implicit null-pointer check */
2135 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2136 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2137 OFFSET(java_chararray_t, data[0]), s1, s2, 1);
2140 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2142 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2143 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2144 /* implicit null-pointer check */
2145 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2146 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2147 OFFSET(java_shortarray_t, data[0]), s1, s2, 1);
2150 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2152 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2153 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2154 /* implicit null-pointer check */
2155 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2156 emit_mov_imm_memindex(cd, iptr->sx.s23.s3.constval,
2157 OFFSET(java_intarray_t, data[0]), s1, s2, 2);
2160 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2162 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2163 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2164 /* implicit null-pointer check */
2165 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2166 emit_mov_imm_memindex(cd,
2167 (u4) (iptr->sx.s23.s3.constval & 0x00000000ffffffff),
2168 OFFSET(java_longarray_t, data[0]), s1, s2, 3);
2169 emit_mov_imm_memindex(cd,
2170 ((s4)iptr->sx.s23.s3.constval) >> 31,
2171 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2174 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2176 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2177 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2178 /* implicit null-pointer check */
2179 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2180 emit_mov_imm_memindex(cd, 0,
2181 OFFSET(java_objectarray_t, data[0]), s1, s2, 2);
2185 case ICMD_GETSTATIC: /* ... ==> ..., value */
2187 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2188 uf = iptr->sx.s23.s3.uf;
2189 fieldtype = uf->fieldref->parseddesc.fd->type;
2192 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2196 fi = iptr->sx.s23.s3.fmiref->p.field;
2197 fieldtype = fi->type;
2198 disp = (intptr_t) fi->value;
2200 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->clazz))
2201 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->clazz, 0);
2204 M_MOV_IMM(disp, REG_ITMP1);
2205 switch (fieldtype) {
2208 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2209 M_ILD(d, REG_ITMP1, 0);
2212 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2213 M_LLD(d, REG_ITMP1, 0);
2216 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2217 M_FLD(d, REG_ITMP1, 0);
2220 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2221 M_DLD(d, REG_ITMP1, 0);
2224 emit_store_dst(jd, iptr, d);
2227 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2229 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2230 uf = iptr->sx.s23.s3.uf;
2231 fieldtype = uf->fieldref->parseddesc.fd->type;
2234 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2237 fi = iptr->sx.s23.s3.fmiref->p.field;
2238 fieldtype = fi->type;
2239 disp = (intptr_t) fi->value;
2241 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->clazz))
2242 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->clazz, 0);
2245 M_MOV_IMM(disp, REG_ITMP1);
2246 switch (fieldtype) {
2249 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
2250 M_IST(s1, REG_ITMP1, 0);
2253 s1 = emit_load_s1(jd, iptr, REG_ITMP23_PACKED);
2254 M_LST(s1, REG_ITMP1, 0);
2257 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2258 emit_fstps_membase(cd, REG_ITMP1, 0);
2261 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2262 emit_fstpl_membase(cd, REG_ITMP1, 0);
2267 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2268 /* val = value (in current instruction) */
2269 /* following NOP) */
2271 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2272 uf = iptr->sx.s23.s3.uf;
2273 fieldtype = uf->fieldref->parseddesc.fd->type;
2276 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2279 fi = iptr->sx.s23.s3.fmiref->p.field;
2280 fieldtype = fi->type;
2281 disp = (intptr_t) fi->value;
2283 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->clazz))
2284 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->clazz, 0);
2287 M_MOV_IMM(disp, REG_ITMP1);
2288 switch (fieldtype) {
2291 M_IST_IMM(iptr->sx.s23.s2.constval, REG_ITMP1, 0);
2294 M_IST_IMM(iptr->sx.s23.s2.constval & 0xffffffff, REG_ITMP1, 0);
2295 M_IST_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, REG_ITMP1, 4);
2302 case ICMD_GETFIELD: /* .., objectref. ==> ..., value */
2304 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2305 emit_nullpointer_check(cd, iptr, s1);
2307 #if defined(ENABLE_ESCAPE_CHECK)
2308 /*emit_escape_check(cd, s1);*/
2311 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2312 uf = iptr->sx.s23.s3.uf;
2313 fieldtype = uf->fieldref->parseddesc.fd->type;
2316 patcher_add_patch_ref(jd, PATCHER_getfield,
2317 iptr->sx.s23.s3.uf, 0);
2320 fi = iptr->sx.s23.s3.fmiref->p.field;
2321 fieldtype = fi->type;
2325 switch (fieldtype) {
2328 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2329 M_ILD32(d, s1, disp);
2332 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2333 M_LLD32(d, s1, disp);
2336 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2337 M_FLD32(d, s1, disp);
2340 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2341 M_DLD32(d, s1, disp);
2344 emit_store_dst(jd, iptr, d);
2347 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2349 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2350 emit_nullpointer_check(cd, iptr, s1);
2352 /* must be done here because of code patching */
2354 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2355 uf = iptr->sx.s23.s3.uf;
2356 fieldtype = uf->fieldref->parseddesc.fd->type;
2359 fi = iptr->sx.s23.s3.fmiref->p.field;
2360 fieldtype = fi->type;
2363 if (!IS_FLT_DBL_TYPE(fieldtype)) {
2364 if (IS_2_WORD_TYPE(fieldtype))
2365 s2 = emit_load_s2(jd, iptr, REG_ITMP23_PACKED);
2367 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2370 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
2372 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2374 uf = iptr->sx.s23.s3.uf;
2377 patcher_add_patch_ref(jd, PATCHER_putfield, uf, 0);
2381 fi = iptr->sx.s23.s3.fmiref->p.field;
2385 switch (fieldtype) {
2388 M_IST32(s2, s1, disp);
2391 M_LST32(s2, s1, disp);
2394 emit_fstps_membase32(cd, s1, disp);
2397 emit_fstpl_membase32(cd, s1, disp);
2402 case ICMD_PUTFIELDCONST: /* ..., objectref ==> ... */
2403 /* val = value (in current instruction) */
2404 /* following NOP) */
2406 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2407 emit_nullpointer_check(cd, iptr, s1);
2409 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2410 uf = iptr->sx.s23.s3.uf;
2411 fieldtype = uf->fieldref->parseddesc.fd->type;
2414 patcher_add_patch_ref(jd, PATCHER_putfieldconst,
2418 fi = iptr->sx.s23.s3.fmiref->p.field;
2419 fieldtype = fi->type;
2423 switch (fieldtype) {
2426 M_IST32_IMM(iptr->sx.s23.s2.constval, s1, disp);
2429 M_IST32_IMM(iptr->sx.s23.s2.constval & 0xffffffff, s1, disp);
2430 M_IST32_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, s1, disp + 4);
2438 /* branch operations **************************************************/
2440 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2442 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2443 M_INTMOVE(s1, REG_ITMP1_XPTR);
2445 #ifdef ENABLE_VERIFIER
2446 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2447 patcher_add_patch_ref(jd, PATCHER_resolve_class,
2448 iptr->sx.s23.s2.uc, 0);
2450 #endif /* ENABLE_VERIFIER */
2452 M_CALL_IMM(0); /* passing exception pc */
2453 M_POP(REG_ITMP2_XPC);
2455 M_MOV_IMM(asm_handle_exception, REG_ITMP3);
2459 case ICMD_GOTO: /* ... ==> ... */
2460 case ICMD_RET: /* ... ==> ... */
2462 #if defined(ENABLE_SSA)
2464 last_cmd_was_goto = true;
2466 /* In case of a Goto phimoves have to be inserted before the */
2469 codegen_emit_phi_moves(jd, bptr);
2472 emit_br(cd, iptr->dst.block);
2476 case ICMD_JSR: /* ... ==> ... */
2478 emit_br(cd, iptr->sx.s23.s3.jsrtarget.block);
2482 case ICMD_IFNULL: /* ..., value ==> ... */
2483 case ICMD_IFNONNULL:
2485 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2487 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFNULL, BRANCH_OPT_NONE);
2490 case ICMD_IFEQ: /* ..., value ==> ... */
2497 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2498 M_CMP_IMM(iptr->sx.val.i, s1);
2499 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFEQ, BRANCH_OPT_NONE);
2502 case ICMD_IF_LEQ: /* ..., value ==> ... */
2504 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2505 if (iptr->sx.val.l == 0) {
2506 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2507 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2510 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2511 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2512 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2513 M_OR(REG_ITMP2, REG_ITMP1);
2515 emit_beq(cd, iptr->dst.block);
2518 case ICMD_IF_LLT: /* ..., value ==> ... */
2520 if (iptr->sx.val.l == 0) {
2521 /* If high 32-bit are less than zero, then the 64-bits
2523 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2525 emit_blt(cd, iptr->dst.block);
2528 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2529 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2530 emit_blt(cd, iptr->dst.block);
2532 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2533 emit_bult(cd, iptr->dst.block);
2537 case ICMD_IF_LLE: /* ..., value ==> ... */
2539 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2540 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2541 emit_blt(cd, iptr->dst.block);
2543 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2544 emit_bule(cd, iptr->dst.block);
2547 case ICMD_IF_LNE: /* ..., value ==> ... */
2549 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2550 if (iptr->sx.val.l == 0) {
2551 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2552 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2555 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2556 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2557 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2558 M_OR(REG_ITMP2, REG_ITMP1);
2560 emit_bne(cd, iptr->dst.block);
2563 case ICMD_IF_LGT: /* ..., value ==> ... */
2565 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2566 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2567 emit_bgt(cd, iptr->dst.block);
2569 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2570 emit_bugt(cd, iptr->dst.block);
2573 case ICMD_IF_LGE: /* ..., value ==> ... */
2575 if (iptr->sx.val.l == 0) {
2576 /* If high 32-bit are greater equal zero, then the
2578 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2580 emit_bge(cd, iptr->dst.block);
2583 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2584 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2585 emit_bgt(cd, iptr->dst.block);
2587 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2588 emit_buge(cd, iptr->dst.block);
2592 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2593 case ICMD_IF_ICMPNE:
2594 case ICMD_IF_ICMPLT:
2595 case ICMD_IF_ICMPGT:
2596 case ICMD_IF_ICMPGE:
2597 case ICMD_IF_ICMPLE:
2599 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2600 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2602 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ICMPEQ, BRANCH_OPT_NONE);
2605 case ICMD_IF_ACMPEQ: /* ..., value, value ==> ... */
2606 case ICMD_IF_ACMPNE:
2608 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2609 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2611 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ACMPEQ, BRANCH_OPT_NONE);
2614 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2616 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2617 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2618 M_INTMOVE(s1, REG_ITMP1);
2619 M_XOR(s2, REG_ITMP1);
2620 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2621 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2622 M_INTMOVE(s1, REG_ITMP2);
2623 M_XOR(s2, REG_ITMP2);
2624 M_OR(REG_ITMP1, REG_ITMP2);
2625 emit_beq(cd, iptr->dst.block);
2628 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2630 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2631 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2632 M_INTMOVE(s1, REG_ITMP1);
2633 M_XOR(s2, REG_ITMP1);
2634 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2635 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2636 M_INTMOVE(s1, REG_ITMP2);
2637 M_XOR(s2, REG_ITMP2);
2638 M_OR(REG_ITMP1, REG_ITMP2);
2639 emit_bne(cd, iptr->dst.block);
2642 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2644 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2645 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2647 emit_blt(cd, iptr->dst.block);
2648 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2649 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2652 emit_bult(cd, iptr->dst.block);
2655 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2657 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2658 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2660 emit_bgt(cd, iptr->dst.block);
2661 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2662 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2665 emit_bugt(cd, iptr->dst.block);
2668 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2670 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2671 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2673 emit_blt(cd, iptr->dst.block);
2674 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2675 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2678 emit_bule(cd, iptr->dst.block);
2681 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2683 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2684 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2686 emit_bgt(cd, iptr->dst.block);
2687 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2688 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2691 emit_buge(cd, iptr->dst.block);
2695 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2697 REPLACEMENT_POINT_RETURN(cd, iptr);
2698 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2699 M_INTMOVE(s1, REG_RESULT);
2700 goto nowperformreturn;
2702 case ICMD_LRETURN: /* ..., retvalue ==> ... */
2704 REPLACEMENT_POINT_RETURN(cd, iptr);
2705 s1 = emit_load_s1(jd, iptr, REG_RESULT_PACKED);
2706 M_LNGMOVE(s1, REG_RESULT_PACKED);
2707 goto nowperformreturn;
2709 case ICMD_ARETURN: /* ..., retvalue ==> ... */
2711 REPLACEMENT_POINT_RETURN(cd, iptr);
2712 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2713 M_INTMOVE(s1, REG_RESULT);
2715 #ifdef ENABLE_VERIFIER
2716 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2717 patcher_add_patch_ref(jd, PATCHER_resolve_class,
2718 iptr->sx.s23.s2.uc, 0);
2720 #endif /* ENABLE_VERIFIER */
2721 goto nowperformreturn;
2723 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2726 REPLACEMENT_POINT_RETURN(cd, iptr);
2727 s1 = emit_load_s1(jd, iptr, REG_FRESULT);
2728 goto nowperformreturn;
2730 case ICMD_RETURN: /* ... ==> ... */
2732 REPLACEMENT_POINT_RETURN(cd, iptr);
2738 p = cd->stackframesize;
2740 #if !defined(NDEBUG)
2741 emit_verbosecall_exit(jd);
2744 #if defined(ENABLE_THREADS)
2745 if (checksync && code_is_synchronized(code)) {
2746 M_ALD(REG_ITMP2, REG_SP, rd->memuse * 8);
2748 /* we need to save the proper return value */
2749 switch (iptr->opc) {
2752 M_IST(REG_RESULT, REG_SP, rd->memuse * 8);
2756 M_LST(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2760 emit_fstps_membase(cd, REG_SP, rd->memuse * 8);
2764 emit_fstpl_membase(cd, REG_SP, rd->memuse * 8);
2768 M_AST(REG_ITMP2, REG_SP, 0);
2769 M_MOV_IMM(LOCK_monitor_exit, REG_ITMP3);
2772 /* and now restore the proper return value */
2773 switch (iptr->opc) {
2776 M_ILD(REG_RESULT, REG_SP, rd->memuse * 8);
2780 M_LLD(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2784 emit_flds_membase(cd, REG_SP, rd->memuse * 8);
2788 emit_fldl_membase(cd, REG_SP, rd->memuse * 8);
2794 /* restore saved registers */
2796 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
2797 p--; M_ALD(rd->savintregs[i], REG_SP, p * 8);
2800 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
2802 emit_fldl_membase(cd, REG_SP, p * 8);
2803 if (iptr->opc == ICMD_FRETURN || iptr->opc == ICMD_DRETURN) {
2805 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset + 1); */
2808 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset); */
2812 /* deallocate stack */
2814 if (cd->stackframesize)
2815 M_AADD_IMM(cd->stackframesize * 8 + 4, REG_SP);
2822 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2825 branch_target_t *table;
2827 table = iptr->dst.table;
2829 l = iptr->sx.s23.s2.tablelow;
2830 i = iptr->sx.s23.s3.tablehigh;
2832 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2833 M_INTMOVE(s1, REG_ITMP1);
2836 M_ISUB_IMM(l, REG_ITMP1);
2842 M_CMP_IMM(i - 1, REG_ITMP1);
2843 emit_bugt(cd, table[0].block);
2845 /* build jump table top down and use address of lowest entry */
2850 dseg_add_target(cd, table->block);
2854 /* length of dataseg after last dseg_addtarget is used
2857 M_MOV_IMM(0, REG_ITMP2);
2859 emit_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 2, REG_ITMP1);
2865 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2868 lookup_target_t *lookup;
2870 lookup = iptr->dst.lookup;
2872 i = iptr->sx.s23.s2.lookupcount;
2874 MCODECHECK((i<<2)+8);
2875 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2878 M_CMP_IMM(lookup->value, s1);
2879 emit_beq(cd, lookup->target.block);
2883 emit_br(cd, iptr->sx.s23.s3.lookupdefault.block);
2888 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
2890 REPLACEMENT_POINT_FORGC_BUILTIN(cd, iptr);
2892 bte = iptr->sx.s23.s3.bte;
2896 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2898 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2899 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
2900 case ICMD_INVOKEINTERFACE:
2902 REPLACEMENT_POINT_INVOKE(cd, iptr);
2904 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2905 md = iptr->sx.s23.s3.um->methodref->parseddesc.md;
2909 lm = iptr->sx.s23.s3.fmiref->p.method;
2910 md = lm->parseddesc;
2914 s3 = md->paramcount;
2916 MCODECHECK((s3 << 1) + 64);
2918 /* copy arguments to registers or stack location */
2920 for (s3 = s3 - 1; s3 >= 0; s3--) {
2921 var = VAR(iptr->sx.s23.s2.args[s3]);
2923 /* Already Preallocated (ARGVAR) ? */
2924 if (var->flags & PREALLOC)
2926 if (IS_INT_LNG_TYPE(var->type)) {
2927 if (!md->params[s3].inmemory) {
2928 log_text("No integer argument registers available!");
2932 if (IS_2_WORD_TYPE(var->type)) {
2933 d = emit_load(jd, iptr, var, REG_ITMP12_PACKED);
2934 M_LST(d, REG_SP, md->params[s3].regoff);
2936 d = emit_load(jd, iptr, var, REG_ITMP1);
2937 M_IST(d, REG_SP, md->params[s3].regoff);
2942 if (!md->params[s3].inmemory) {
2943 s1 = md->params[s3].regoff;
2944 d = emit_load(jd, iptr, var, s1);
2948 d = emit_load(jd, iptr, var, REG_FTMP1);
2949 if (IS_2_WORD_TYPE(var->type))
2950 M_DST(d, REG_SP, md->params[s3].regoff);
2952 M_FST(d, REG_SP, md->params[s3].regoff);
2957 switch (iptr->opc) {
2959 d = md->returntype.type;
2961 if (bte->stub == NULL) {
2962 M_MOV_IMM(bte->fp, REG_ITMP1);
2965 M_MOV_IMM(bte->stub, REG_ITMP1);
2969 #if defined(ENABLE_ESCAPE_CHECK)
2970 if (bte->opcode == ICMD_NEW || bte->opcode == ICMD_NEWARRAY) {
2971 /*emit_escape_annotate_object(cd, m);*/
2976 case ICMD_INVOKESPECIAL:
2977 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
2978 emit_nullpointer_check(cd, iptr, REG_ITMP1);
2981 case ICMD_INVOKESTATIC:
2983 unresolved_method *um = iptr->sx.s23.s3.um;
2985 patcher_add_patch_ref(jd, PATCHER_invokestatic_special,
2989 d = md->returntype.type;
2992 disp = (ptrint) lm->stubroutine;
2993 d = lm->parseddesc->returntype.type;
2996 M_MOV_IMM(disp, REG_ITMP2);
3000 case ICMD_INVOKEVIRTUAL:
3001 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
3002 emit_nullpointer_check(cd, iptr, s1);
3005 unresolved_method *um = iptr->sx.s23.s3.um;
3007 patcher_add_patch_ref(jd, PATCHER_invokevirtual, um, 0);
3010 d = md->returntype.type;
3013 s1 = OFFSET(vftbl_t, table[0]) +
3014 sizeof(methodptr) * lm->vftblindex;
3015 d = md->returntype.type;
3018 M_ALD(REG_METHODPTR, REG_ITMP1,
3019 OFFSET(java_object_t, vftbl));
3020 M_ALD32(REG_ITMP3, REG_METHODPTR, s1);
3024 case ICMD_INVOKEINTERFACE:
3025 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
3026 emit_nullpointer_check(cd, iptr, s1);
3029 unresolved_method *um = iptr->sx.s23.s3.um;
3031 patcher_add_patch_ref(jd, PATCHER_invokeinterface, um, 0);
3035 d = md->returntype.type;
3038 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3039 sizeof(methodptr) * lm->clazz->index;
3041 s2 = sizeof(methodptr) * (lm - lm->clazz->methods);
3043 d = md->returntype.type;
3046 M_ALD(REG_METHODPTR, REG_ITMP1,
3047 OFFSET(java_object_t, vftbl));
3048 M_ALD32(REG_METHODPTR, REG_METHODPTR, s1);
3049 M_ALD32(REG_ITMP3, REG_METHODPTR, s2);
3054 /* store size of call code in replacement point */
3056 REPLACEMENT_POINT_INVOKE_RETURN(cd, iptr);
3057 REPLACEMENT_POINT_FORGC_BUILTIN_RETURN(cd, iptr);
3059 /* d contains return type */
3061 if (d != TYPE_VOID) {
3062 #if defined(ENABLE_SSA)
3063 if ((ls == NULL) /* || (!IS_TEMPVAR_INDEX(iptr->dst.varindex)) */ ||
3064 (ls->lifetime[iptr->dst.varindex].type != UNUSED))
3065 /* a "living" stackslot */
3068 if (IS_INT_LNG_TYPE(d)) {
3069 if (IS_2_WORD_TYPE(d)) {
3070 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
3071 M_LNGMOVE(REG_RESULT_PACKED, s1);
3074 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3075 M_INTMOVE(REG_RESULT, s1);
3079 s1 = codegen_reg_of_dst(jd, iptr, REG_NULL);
3081 emit_store_dst(jd, iptr, s1);
3087 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3089 if (!(iptr->flags.bits & INS_FLAG_ARRAY)) {
3090 /* object type cast-check */
3093 vftbl_t *supervftbl;
3096 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3102 super = iptr->sx.s23.s3.c.cls;
3103 superindex = super->index;
3104 supervftbl = super->vftbl;
3107 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3109 /* if class is not resolved, check which code to call */
3111 if (super == NULL) {
3113 emit_label_beq(cd, BRANCH_LABEL_1);
3115 patcher_add_patch_ref(jd, PATCHER_checkcast_instanceof_flags,
3116 iptr->sx.s23.s3.c.ref, 0);
3118 M_MOV_IMM(0, REG_ITMP2); /* super->flags */
3119 M_AND_IMM32(ACC_INTERFACE, REG_ITMP2);
3120 emit_label_beq(cd, BRANCH_LABEL_2);
3123 /* interface checkcast code */
3125 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3126 if (super != NULL) {
3128 emit_label_beq(cd, BRANCH_LABEL_3);
3131 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3133 if (super == NULL) {
3134 patcher_add_patch_ref(jd, PATCHER_checkcast_interface,
3135 iptr->sx.s23.s3.c.ref,
3140 REG_ITMP2, OFFSET(vftbl_t, interfacetablelength));
3141 M_ISUB_IMM32(superindex, REG_ITMP3);
3142 /* XXX do we need this one? */
3144 emit_classcast_check(cd, iptr, BRANCH_LE, REG_ITMP3, s1);
3146 M_ALD32(REG_ITMP3, REG_ITMP2,
3147 OFFSET(vftbl_t, interfacetable[0]) -
3148 superindex * sizeof(methodptr*));
3150 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_ITMP3, s1);
3153 emit_label_br(cd, BRANCH_LABEL_4);
3155 emit_label(cd, BRANCH_LABEL_3);
3158 /* class checkcast code */
3160 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3161 if (super == NULL) {
3162 emit_label(cd, BRANCH_LABEL_2);
3166 emit_label_beq(cd, BRANCH_LABEL_5);
3169 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3171 if (super == NULL) {
3172 patcher_add_patch_ref(jd, PATCHER_checkcast_class,
3173 iptr->sx.s23.s3.c.ref,
3177 M_MOV_IMM(supervftbl, REG_ITMP3);
3179 M_ILD32(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3181 /* if (s1 != REG_ITMP1) { */
3182 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP1); */
3183 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3); */
3184 /* #if defined(ENABLE_THREADS) */
3185 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3187 /* emit_alu_reg_reg(cd, ALU_SUB, REG_ITMP1, REG_ITMP2); */
3190 M_ILD32(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, baseval));
3191 M_ISUB(REG_ITMP3, REG_ITMP2);
3192 M_MOV_IMM(supervftbl, REG_ITMP3);
3193 M_ILD(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3197 M_CMP(REG_ITMP3, REG_ITMP2);
3198 emit_classcast_check(cd, iptr, BRANCH_ULE, REG_ITMP3, s1);
3201 emit_label(cd, BRANCH_LABEL_5);
3204 if (super == NULL) {
3205 emit_label(cd, BRANCH_LABEL_1);
3206 emit_label(cd, BRANCH_LABEL_4);
3209 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
3212 /* array type cast-check */
3214 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3215 M_AST(s1, REG_SP, 0 * 4);
3217 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3218 patcher_add_patch_ref(jd, PATCHER_builtin_arraycheckcast,
3219 iptr->sx.s23.s3.c.ref, 0);
3222 M_AST_IMM(iptr->sx.s23.s3.c.cls, REG_SP, 1 * 4);
3223 M_MOV_IMM(BUILTIN_arraycheckcast, REG_ITMP3);
3226 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3228 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_RESULT, s1);
3230 d = codegen_reg_of_dst(jd, iptr, s1);
3234 emit_store_dst(jd, iptr, d);
3237 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3241 vftbl_t *supervftbl;
3244 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3250 super = iptr->sx.s23.s3.c.cls;
3251 superindex = super->index;
3252 supervftbl = super->vftbl;
3255 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3256 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
3259 M_INTMOVE(s1, REG_ITMP1);
3265 /* if class is not resolved, check which code to call */
3267 if (super == NULL) {
3269 emit_label_beq(cd, BRANCH_LABEL_1);
3271 patcher_add_patch_ref(jd, PATCHER_checkcast_instanceof_flags,
3272 iptr->sx.s23.s3.c.ref, 0);
3274 M_MOV_IMM(0, REG_ITMP3); /* super->flags */
3275 M_AND_IMM32(ACC_INTERFACE, REG_ITMP3);
3276 emit_label_beq(cd, BRANCH_LABEL_2);
3279 /* interface instanceof code */
3281 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3282 if (super != NULL) {
3284 emit_label_beq(cd, BRANCH_LABEL_3);
3287 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3289 if (super == NULL) {
3290 patcher_add_patch_ref(jd, PATCHER_instanceof_interface,
3291 iptr->sx.s23.s3.c.ref, 0);
3295 REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3296 M_ISUB_IMM32(superindex, REG_ITMP3);
3299 disp = (2 + 4 /* mov_membase32_reg */ + 2 /* test */ +
3300 6 /* jcc */ + 5 /* mov_imm_reg */);
3303 M_ALD32(REG_ITMP1, REG_ITMP1,
3304 OFFSET(vftbl_t, interfacetable[0]) -
3305 superindex * sizeof(methodptr*));
3307 /* emit_setcc_reg(cd, CC_A, d); */
3308 /* emit_jcc(cd, CC_BE, 5); */
3313 emit_label_br(cd, BRANCH_LABEL_4);
3315 emit_label(cd, BRANCH_LABEL_3);
3318 /* class instanceof code */
3320 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3321 if (super == NULL) {
3322 emit_label(cd, BRANCH_LABEL_2);
3326 emit_label_beq(cd, BRANCH_LABEL_5);
3329 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3331 if (super == NULL) {
3332 patcher_add_patch_ref(jd, PATCHER_instanceof_class,
3333 iptr->sx.s23.s3.c.ref, 0);
3336 M_MOV_IMM(supervftbl, REG_ITMP2);
3338 M_ILD(REG_ITMP1, REG_ITMP1, OFFSET(vftbl_t, baseval));
3339 M_ILD(REG_ITMP3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3340 M_ILD(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3342 M_ISUB(REG_ITMP2, REG_ITMP1);
3343 M_CLR(d); /* may be REG_ITMP2 */
3344 M_CMP(REG_ITMP3, REG_ITMP1);
3349 emit_label(cd, BRANCH_LABEL_5);
3352 if (super == NULL) {
3353 emit_label(cd, BRANCH_LABEL_1);
3354 emit_label(cd, BRANCH_LABEL_4);
3357 emit_store_dst(jd, iptr, d);
3361 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3363 /* check for negative sizes and copy sizes to stack if necessary */
3365 MCODECHECK((iptr->s1.argcount << 1) + 64);
3367 for (s1 = iptr->s1.argcount; --s1 >= 0; ) {
3368 /* copy SAVEDVAR sizes to stack */
3369 var = VAR(iptr->sx.s23.s2.args[s1]);
3371 /* Already Preallocated? */
3372 if (!(var->flags & PREALLOC)) {
3373 if (var->flags & INMEMORY) {
3374 M_ILD(REG_ITMP1, REG_SP, var->vv.regoff);
3375 M_IST(REG_ITMP1, REG_SP, (s1 + 3) * 4);
3378 M_IST(var->vv.regoff, REG_SP, (s1 + 3) * 4);
3382 /* is a patcher function set? */
3384 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3385 patcher_add_patch_ref(jd, PATCHER_builtin_multianewarray,
3386 iptr->sx.s23.s3.c.ref, 0);
3392 disp = (ptrint) iptr->sx.s23.s3.c.cls;
3394 /* a0 = dimension count */
3396 M_IST_IMM(iptr->s1.argcount, REG_SP, 0 * 4);
3398 /* a1 = arraydescriptor */
3400 M_IST_IMM(disp, REG_SP, 1 * 4);
3402 /* a2 = pointer to dimensions = stack pointer */
3404 M_MOV(REG_SP, REG_ITMP1);
3405 M_AADD_IMM(3 * 4, REG_ITMP1);
3406 M_AST(REG_ITMP1, REG_SP, 2 * 4);
3408 M_MOV_IMM(BUILTIN_multianewarray, REG_ITMP1);
3411 /* check for exception before result assignment */
3413 emit_exception_check(cd, iptr);
3415 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3416 M_INTMOVE(REG_RESULT, s1);
3417 emit_store_dst(jd, iptr, s1);
3420 #if defined(ENABLE_SSA)
3421 case ICMD_GETEXCEPTION:
3422 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
3423 M_INTMOVE(REG_ITMP1, d);
3424 emit_store_dst(jd, iptr, d);
3428 exceptions_throw_internalerror("Unknown ICMD %d during code generation",
3433 } /* for instruction */
3437 #if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
3440 #if defined(ENABLE_SSA)
3443 /* by edge splitting, in Blocks with phi moves there can only */
3444 /* be a goto as last command, no other Jump/Branch Command */
3446 if (!last_cmd_was_goto)
3447 codegen_emit_phi_moves(jd, bptr);
3452 /* At the end of a basic block we may have to append some nops,
3453 because the patcher stub calling code might be longer than the
3454 actual instruction. So codepatching does not change the
3455 following block unintentionally. */
3457 if (cd->mcodeptr < cd->lastmcodeptr) {
3458 while (cd->mcodeptr < cd->lastmcodeptr) {
3463 } /* if (bptr -> flags >= BBREACHED) */
3464 } /* for basic block */
3466 /* generate stubs */
3468 emit_patcher_traps(jd);
3470 /* everything's ok */
3476 /* codegen_emit_stub_native ****************************************************
3478 Emits a stub routine which calls a native method.
3480 *******************************************************************************/
3482 void codegen_emit_stub_native(jitdata *jd, methoddesc *nmd, functionptr f, int skipparams)
3488 int i, j; /* count variables */
3492 /* get required compiler data */
3498 /* set some variables */
3502 /* calculate stackframe size */
3504 cd->stackframesize =
3505 sizeof(stackframeinfo_t) / SIZEOF_VOID_P +
3506 sizeof(localref_table) / SIZEOF_VOID_P +
3507 4 + /* 4 arguments (start_native_call) */
3510 /* keep stack 16-byte aligned */
3512 ALIGN_ODD(cd->stackframesize);
3514 /* create method header */
3516 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
3517 (void) dseg_add_unique_s4(cd, cd->stackframesize * 8 + 4); /* FrameSize */
3518 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
3519 (void) dseg_add_unique_s4(cd, 0); /* IntSave */
3520 (void) dseg_add_unique_s4(cd, 0); /* FltSave */
3522 #if defined(ENABLE_PROFILING)
3523 /* generate native method profiling code */
3525 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
3526 /* count frequency */
3528 M_MOV_IMM(code, REG_ITMP1);
3529 M_IADD_IMM_MEMBASE(1, REG_ITMP1, OFFSET(codeinfo, frequency));
3533 /* calculate stackframe size for native function */
3535 M_ASUB_IMM(cd->stackframesize * 8 + 4, REG_SP);
3537 /* Mark the whole fpu stack as free for native functions (only for saved */
3538 /* register count == 0). */
3540 emit_ffree_reg(cd, 0);
3541 emit_ffree_reg(cd, 1);
3542 emit_ffree_reg(cd, 2);
3543 emit_ffree_reg(cd, 3);
3544 emit_ffree_reg(cd, 4);
3545 emit_ffree_reg(cd, 5);
3546 emit_ffree_reg(cd, 6);
3547 emit_ffree_reg(cd, 7);
3549 #if defined(ENABLE_GC_CACAO)
3550 /* remember callee saved int registers in stackframeinfo (GC may need to */
3551 /* recover them during a collection). */
3553 disp = cd->stackframesize * 8 - sizeof(stackframeinfo_t) +
3554 OFFSET(stackframeinfo_t, intregs);
3556 for (i = 0; i < INT_SAV_CNT; i++)
3557 M_AST(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3560 /* prepare data structures for native function call */
3562 M_MOV(REG_SP, REG_ITMP1);
3563 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3564 M_IST_IMM(0, REG_SP, 1 * 4);
3567 M_MOV_IMM(codegen_start_native_call, REG_ITMP1);
3570 /* remember class argument */
3572 if (m->flags & ACC_STATIC)
3573 M_MOV(REG_RESULT, REG_ITMP3);
3575 /* Copy or spill arguments to new locations. */
3577 for (i = md->paramcount - 1, j = i + skipparams; i >= 0; i--, j--) {
3578 if (!md->params[i].inmemory)
3581 s1 = md->params[i].regoff + cd->stackframesize * 8 + 8;
3582 s2 = nmd->params[j].regoff;
3584 /* float/double in memory can be copied like int/longs */
3586 switch (md->paramtypes[i].type) {
3590 M_ILD(REG_ITMP1, REG_SP, s1);
3591 M_IST(REG_ITMP1, REG_SP, s2);
3595 M_LLD(REG_ITMP12_PACKED, REG_SP, s1);
3596 M_LST(REG_ITMP12_PACKED, REG_SP, s2);
3601 /* Handle native Java methods. */
3603 if (m->flags & ACC_NATIVE) {
3604 /* if function is static, put class into second argument */
3606 if (m->flags & ACC_STATIC)
3607 M_AST(REG_ITMP3, REG_SP, 1 * 4);
3609 /* put env into first argument */
3611 M_AST_IMM(_Jv_env, REG_SP, 0 * 4);
3614 /* Call the native function. */
3616 disp = dseg_add_functionptr(cd, f);
3617 emit_mov_imm_reg(cd, 0, REG_ITMP3);
3619 M_ALD(REG_ITMP1, REG_ITMP3, disp);
3622 /* save return value */
3624 switch (md->returntype.type) {
3627 switch (md->returntype.decltype) {
3628 case PRIMITIVETYPE_BOOLEAN:
3629 M_BZEXT(REG_RESULT, REG_RESULT);
3631 case PRIMITIVETYPE_BYTE:
3632 M_BSEXT(REG_RESULT, REG_RESULT);
3634 case PRIMITIVETYPE_CHAR:
3635 M_CZEXT(REG_RESULT, REG_RESULT);
3637 case PRIMITIVETYPE_SHORT:
3638 M_SSEXT(REG_RESULT, REG_RESULT);
3641 M_IST(REG_RESULT, REG_SP, 1 * 8);
3644 M_LST(REG_RESULT_PACKED, REG_SP, 1 * 8);
3647 emit_fsts_membase(cd, REG_SP, 1 * 8);
3650 emit_fstl_membase(cd, REG_SP, 1 * 8);
3656 /* remove native stackframe info */
3658 M_MOV(REG_SP, REG_ITMP1);
3659 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3660 M_IST_IMM(0, REG_SP, 1 * 4);
3663 M_MOV_IMM(codegen_finish_native_call, REG_ITMP1);
3665 M_MOV(REG_RESULT, REG_ITMP2); /* REG_ITMP3 == REG_RESULT2 */
3667 /* restore return value */
3669 switch (md->returntype.type) {
3672 M_ILD(REG_RESULT, REG_SP, 1 * 8);
3675 M_LLD(REG_RESULT_PACKED, REG_SP, 1 * 8);
3678 emit_flds_membase(cd, REG_SP, 1 * 8);
3681 emit_fldl_membase(cd, REG_SP, 1 * 8);
3687 #if defined(ENABLE_GC_CACAO)
3688 /* restore callee saved int registers from stackframeinfo (GC might have */
3689 /* modified them during a collection). */
3691 disp = cd->stackframesize * 8 - sizeof(stackframeinfo_t) +
3692 OFFSET(stackframeinfo_t, intregs);
3694 for (i = 0; i < INT_SAV_CNT; i++)
3695 M_ALD(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3698 M_AADD_IMM(cd->stackframesize * 8 + 4, REG_SP);
3700 /* check for exception */
3707 /* handle exception */
3709 M_MOV(REG_ITMP2, REG_ITMP1_XPTR);
3710 M_ALD(REG_ITMP2_XPC, REG_SP, 0);
3711 M_ASUB_IMM(2, REG_ITMP2_XPC);
3713 M_MOV_IMM(asm_handle_nat_exception, REG_ITMP3);
3719 * These are local overrides for various environment variables in Emacs.
3720 * Please do not remove this and leave it at the end of the file, where
3721 * Emacs will automagically detect them.
3722 * ---------------------------------------------------------------------
3725 * indent-tabs-mode: t
3729 * vim:noexpandtab:sw=4:ts=4: