1 /* src/vm/jit/i386/codegen.c - machine code generator for i386
3 Copyright (C) 1996-2005, 2006, 2007, 2008
4 CACAOVM - Verein zur Foerderung der freien virtuellen Maschine CACAO
6 This file is part of CACAO.
8 This program is free software; you can redistribute it and/or
9 modify it under the terms of the GNU General Public License as
10 published by the Free Software Foundation; either version 2, or (at
11 your option) any later version.
13 This program is distributed in the hope that it will be useful, but
14 WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with this program; if not, write to the Free Software
20 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
34 #include "vm/jit/i386/md-abi.h"
36 #include "vm/jit/i386/codegen.h"
37 #include "vm/jit/i386/emit.h"
39 #include "mm/memory.h"
40 #include "native/jni.h"
41 #include "native/localref.h"
42 #include "native/native.h"
44 #include "threads/lock-common.h"
46 #include "vm/builtin.h"
47 #include "vm/exceptions.h"
48 #include "vm/global.h"
49 #include "vm/primitive.hpp"
50 #include "vm/stringlocal.h"
53 #include "vm/jit/abi.h"
54 #include "vm/jit/asmpart.h"
55 #include "vm/jit/codegen-common.h"
56 #include "vm/jit/dseg.h"
57 #include "vm/jit/emit-common.h"
58 #include "vm/jit/jit.h"
59 #include "vm/jit/linenumbertable.h"
60 #include "vm/jit/parse.h"
61 #include "vm/jit/patcher-common.h"
62 #include "vm/jit/reg.h"
63 #include "vm/jit/replace.h"
64 #include "vm/jit/stacktrace.h"
65 #include "vm/jit/trap.h"
67 #if defined(ENABLE_SSA)
68 # include "vm/jit/optimizing/lsra.h"
69 # include "vm/jit/optimizing/ssa.h"
70 #elif defined(ENABLE_LSRA)
71 # include "vm/jit/allocator/lsra.h"
74 #include "vmcore/loader.h"
75 #include "vmcore/options.h"
76 #include "vmcore/utf8.h"
79 /* codegen_emit ****************************************************************
81 Generates machine code.
83 *******************************************************************************/
85 bool codegen_emit(jitdata *jd)
91 s4 len, s1, s2, s3, d, disp;
92 int align_off; /* offset for alignment compensation */
97 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
98 builtintable_entry *bte;
101 unresolved_field *uf;
104 #if defined(ENABLE_SSA)
106 bool last_cmd_was_goto;
108 last_cmd_was_goto = false;
112 /* get required compiler data */
119 /* prevent compiler warnings */
130 s4 savedregs_num = 0;
133 /* space to save used callee saved registers */
135 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
136 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse);
138 cd->stackframesize = rd->memuse + savedregs_num;
141 #if defined(ENABLE_THREADS)
142 /* space to save argument of monitor_enter */
144 if (checksync && code_is_synchronized(code))
145 cd->stackframesize++;
148 /* create method header */
150 /* Keep stack of non-leaf functions 16-byte aligned. */
152 if (!code_is_leafmethod(code)) {
153 ALIGN_ODD(cd->stackframesize);
156 align_off = cd->stackframesize ? 4 : 0;
158 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
159 (void) dseg_add_unique_s4(
160 cd, cd->stackframesize * 8 + align_off); /* FrameSize */
162 code->synchronizedoffset = rd->memuse * 8;
164 /* REMOVEME: We still need it for exception handling in assembler. */
166 if (code_is_leafmethod(code))
167 (void) dseg_add_unique_s4(cd, 1); /* IsLeaf */
169 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
171 (void) dseg_add_unique_s4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
172 (void) dseg_add_unique_s4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
174 #if defined(ENABLE_PROFILING)
175 /* generate method profiling code */
177 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
178 /* count frequency */
180 M_MOV_IMM(code, REG_ITMP3);
181 M_IADD_IMM_MEMBASE(1, REG_ITMP3, OFFSET(codeinfo, frequency));
185 /* create stack frame (if necessary) */
187 if (cd->stackframesize)
189 M_ASUB_IMM(cd->stackframesize * 8 + 4, REG_SP);
191 /* save return address and used callee saved registers */
193 p = cd->stackframesize;
194 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
195 p--; M_AST(rd->savintregs[i], REG_SP, p * 8);
197 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
198 p--; emit_fld_reg(cd, rd->savfltregs[i]); emit_fstpl_membase(cd, REG_SP, p * 8);
201 /* take arguments out of register or stack frame */
206 for (p = 0, l = 0; p < md->paramcount; p++) {
207 t = md->paramtypes[p].type;
209 varindex = jd->local_map[l * 5 + t];
210 #if defined(ENABLE_SSA)
212 if (varindex != UNUSED)
213 varindex = ls->var_0[varindex];
214 if ((varindex != UNUSED) && (ls->lifetime[varindex].type == UNUSED))
219 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
222 if (varindex == UNUSED)
226 s1 = md->params[p].regoff;
229 if (IS_INT_LNG_TYPE(t)) { /* integer args */
230 if (!md->params[p].inmemory) { /* register arguments */
231 log_text("integer register argument");
233 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
234 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff */
236 else { /* reg arg -> spilled */
237 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff * 4 */
241 if (!(var->flags & INMEMORY)) {
243 cd->stackframesize * 8 + 4 + align_off + s1);
246 if (!IS_2_WORD_TYPE(t)) {
247 #if defined(ENABLE_SSA)
248 /* no copy avoiding by now possible with SSA */
250 emit_mov_membase_reg( /* + 4 for return address */
252 cd->stackframesize * 8 + s1 + 4 + align_off,
254 emit_mov_reg_membase(
255 cd, REG_ITMP1, REG_SP, var->vv.regoff);
258 #endif /*defined(ENABLE_SSA)*/
259 /* reuse stackslot */
260 var->vv.regoff = cd->stackframesize * 8 + 4 +
265 #if defined(ENABLE_SSA)
266 /* no copy avoiding by now possible with SSA */
268 emit_mov_membase_reg( /* + 4 for return address */
270 cd->stackframesize * 8 + s1 + 4 + align_off,
272 emit_mov_reg_membase(
273 cd, REG_ITMP1, REG_SP, var->vv.regoff);
274 emit_mov_membase_reg( /* + 4 for return address */
276 cd->stackframesize * 8 + s1 + 4 + 4 + align_off,
278 emit_mov_reg_membase(
279 cd, REG_ITMP1, REG_SP, var->vv.regoff + 4);
282 #endif /*defined(ENABLE_SSA)*/
283 /* reuse stackslot */
284 var->vv.regoff = cd->stackframesize * 8 + 8 + s1;
289 else { /* floating args */
290 if (!md->params[p].inmemory) { /* register arguments */
291 log_text("There are no float argument registers!");
293 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
294 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff */
295 } else { /* reg arg -> spilled */
296 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff * 8 */
300 else { /* stack arguments */
301 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
305 cd->stackframesize * 8 + s1 + 4 + align_off);
307 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
313 cd->stackframesize * 8 + s1 + 4 + align_off);
315 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
318 } else { /* stack-arg -> spilled */
319 #if defined(ENABLE_SSA)
320 /* no copy avoiding by now possible with SSA */
322 emit_mov_membase_reg(
324 cd->stackframesize * 8 + s1 + 4 + align_off,
326 emit_mov_reg_membase(
327 cd, REG_ITMP1, REG_SP, var->vv.regoff);
331 cd->stackframesize * 8 + s1 + 4 + align_off);
332 emit_fstps_membase(cd, REG_SP, var->vv.regoff);
337 cd->stackframesize * 8 + s1 + 4 + align_off);
338 emit_fstpl_membase(cd, REG_SP, var->vv.regoff);
342 #endif /*defined(ENABLE_SSA)*/
343 /* reuse stackslot */
344 var->vv.regoff = cd->stackframesize * 8 + 4 +
351 /* call monitorenter function */
353 #if defined(ENABLE_THREADS)
354 if (checksync && code_is_synchronized(code)) {
357 if (m->flags & ACC_STATIC) {
358 M_MOV_IMM(&m->clazz->object.header, REG_ITMP1);
361 M_ALD(REG_ITMP1, REG_SP, cd->stackframesize * 8 + 4 + align_off);
364 M_ALD_MEM(REG_ITMP1, TRAP_NullPointerException);
367 M_AST(REG_ITMP1, REG_SP, s1 * 8);
368 M_AST(REG_ITMP1, REG_SP, 0 * 4);
369 M_MOV_IMM(LOCK_monitor_enter, REG_ITMP3);
375 emit_verbosecall_enter(jd);
380 #if defined(ENABLE_SSA)
381 /* with SSA the Header is Basic Block 0 - insert phi Moves if necessary */
383 codegen_emit_phi_moves(jd, ls->basicblocks[0]);
386 /* end of header generation */
388 /* create replacement points */
390 REPLACEMENT_POINTS_INIT(cd, jd);
392 /* walk through all basic blocks */
394 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
396 bptr->mpc = (s4) (cd->mcodeptr - cd->mcodebase);
398 if (bptr->flags >= BBREACHED) {
399 /* branch resolving */
401 codegen_resolve_branchrefs(cd, bptr);
403 /* handle replacement points */
405 REPLACEMENT_POINT_BLOCK_START(cd, bptr);
407 #if defined(ENABLE_REPLACEMENT)
408 if (bptr->bitflags & BBFLAG_REPLACEMENT) {
409 if (cd->replacementpoint[-1].flags & RPLPOINT_FLAG_COUNTDOWN) {
411 emit_trap_countdown(cd, &(m->hitcountdown));
416 /* copy interface registers to their destination */
421 #if defined(ENABLE_PROFILING)
422 /* generate basic block profiling code */
424 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
425 /* count frequency */
427 M_MOV_IMM(code->bbfrequency, REG_ITMP3);
428 M_IADD_IMM_MEMBASE(1, REG_ITMP3, bptr->nr * 4);
432 #if defined(ENABLE_LSRA) || defined(ENABLE_SSA)
433 # if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
436 # if defined(ENABLE_SSA)
438 last_cmd_was_goto = false;
442 var = VAR(bptr->invars[len]);
443 if (bptr->type != BBTYPE_STD) {
444 if (!IS_2_WORD_TYPE(var->type)) {
445 #if !defined(ENABLE_SSA)
446 if (bptr->type == BBTYPE_EXH) {
447 d = codegen_reg_of_var(0, var, REG_ITMP1);
448 M_INTMOVE(REG_ITMP1, d);
449 emit_store(jd, NULL, var, d);
454 log_text("copy interface registers(EXH, SBR): longs \
455 have to be in memory (begin 1)");
463 #endif /* defined(ENABLE_LSRA) || defined(ENABLE_SSA) */
467 var = VAR(bptr->invars[len]);
468 if ((len == bptr->indepth-1) && (bptr->type != BBTYPE_STD)) {
469 if (!IS_2_WORD_TYPE(var->type)) {
470 if (bptr->type == BBTYPE_EXH) {
471 d = codegen_reg_of_var(0, var, REG_ITMP1);
472 M_INTMOVE(REG_ITMP1, d);
473 emit_store(jd, NULL, var, d);
477 log_text("copy interface registers: longs have to be in \
484 assert((var->flags & INOUT));
489 /* walk through all instructions */
494 for (iptr = bptr->iinstr; len > 0; len--, iptr++) {
495 if (iptr->line != currentline) {
496 linenumbertable_list_entry_add(cd, iptr->line);
497 currentline = iptr->line;
500 MCODECHECK(1024); /* 1kB should be enough */
503 case ICMD_NOP: /* ... ==> ... */
504 case ICMD_POP: /* ..., value ==> ... */
505 case ICMD_POP2: /* ..., value, value ==> ... */
508 case ICMD_INLINE_START:
510 REPLACEMENT_POINT_INLINE_START(cd, iptr);
513 case ICMD_INLINE_BODY:
515 REPLACEMENT_POINT_INLINE_BODY(cd, iptr);
516 linenumbertable_list_entry_add_inline_start(cd, iptr);
517 linenumbertable_list_entry_add(cd, iptr->line);
520 case ICMD_INLINE_END:
522 linenumbertable_list_entry_add_inline_end(cd, iptr);
523 linenumbertable_list_entry_add(cd, iptr->line);
526 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
528 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
529 emit_nullpointer_check(cd, iptr, s1);
532 /* constant operations ************************************************/
534 case ICMD_ICONST: /* ... ==> ..., constant */
536 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
537 ICONST(d, iptr->sx.val.i);
538 emit_store_dst(jd, iptr, d);
541 case ICMD_LCONST: /* ... ==> ..., constant */
543 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
544 LCONST(d, iptr->sx.val.l);
545 emit_store_dst(jd, iptr, d);
548 case ICMD_FCONST: /* ... ==> ..., constant */
550 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
551 if (iptr->sx.val.f == 0.0) {
555 if (iptr->sx.val.i == 0x80000000) {
559 } else if (iptr->sx.val.f == 1.0) {
562 } else if (iptr->sx.val.f == 2.0) {
568 disp = dseg_add_float(cd, iptr->sx.val.f);
569 emit_mov_imm_reg(cd, 0, REG_ITMP1);
571 emit_flds_membase(cd, REG_ITMP1, disp);
573 emit_store_dst(jd, iptr, d);
576 case ICMD_DCONST: /* ... ==> ..., constant */
578 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
579 if (iptr->sx.val.d == 0.0) {
583 if (iptr->sx.val.l == 0x8000000000000000LL) {
587 } else if (iptr->sx.val.d == 1.0) {
590 } else if (iptr->sx.val.d == 2.0) {
596 disp = dseg_add_double(cd, iptr->sx.val.d);
597 emit_mov_imm_reg(cd, 0, REG_ITMP1);
599 emit_fldl_membase(cd, REG_ITMP1, disp);
601 emit_store_dst(jd, iptr, d);
604 case ICMD_ACONST: /* ... ==> ..., constant */
606 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
608 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
609 patcher_add_patch_ref(jd, PATCHER_aconst,
610 iptr->sx.val.c.ref, 0);
615 if (iptr->sx.val.anyptr == NULL)
618 M_MOV_IMM(iptr->sx.val.anyptr, d);
620 emit_store_dst(jd, iptr, d);
624 /* load/store/copy/move operations ************************************/
642 if (!(iptr->flags.bits & INS_FLAG_RETADDR))
647 /* integer operations *************************************************/
649 case ICMD_INEG: /* ..., value ==> ..., - value */
651 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
652 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
655 emit_store_dst(jd, iptr, d);
658 case ICMD_LNEG: /* ..., value ==> ..., - value */
660 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
661 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
663 M_NEG(GET_LOW_REG(d));
664 M_IADDC_IMM(0, GET_HIGH_REG(d));
665 M_NEG(GET_HIGH_REG(d));
666 emit_store_dst(jd, iptr, d);
669 case ICMD_I2L: /* ..., value ==> ..., value */
671 s1 = emit_load_s1(jd, iptr, EAX);
672 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
675 M_LNGMOVE(EAX_EDX_PACKED, d);
676 emit_store_dst(jd, iptr, d);
679 case ICMD_L2I: /* ..., value ==> ..., value */
681 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
682 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
684 emit_store_dst(jd, iptr, d);
687 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
689 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
690 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
694 emit_store_dst(jd, iptr, d);
697 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
699 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
700 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
702 emit_store_dst(jd, iptr, d);
705 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
707 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
708 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
710 emit_store_dst(jd, iptr, d);
714 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
716 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
717 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
718 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
725 emit_store_dst(jd, iptr, d);
729 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
730 /* sx.val.i = constant */
732 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
733 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
735 /* `inc reg' is slower on p4's (regarding to ia32
736 optimization reference manual and benchmarks) and as
740 M_IADD_IMM(iptr->sx.val.i, d);
741 emit_store_dst(jd, iptr, d);
744 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
746 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
747 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
748 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
749 M_INTMOVE(s1, GET_LOW_REG(d));
750 M_IADD(s2, GET_LOW_REG(d));
751 /* don't use REG_ITMP1 */
752 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
753 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
754 M_INTMOVE(s1, GET_HIGH_REG(d));
755 M_IADDC(s2, GET_HIGH_REG(d));
756 emit_store_dst(jd, iptr, d);
759 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
760 /* sx.val.l = constant */
762 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
763 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
765 M_IADD_IMM(iptr->sx.val.l, GET_LOW_REG(d));
766 M_IADDC_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
767 emit_store_dst(jd, iptr, d);
770 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
772 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
773 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
774 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
776 M_INTMOVE(s1, REG_ITMP1);
777 M_ISUB(s2, REG_ITMP1);
778 M_INTMOVE(REG_ITMP1, d);
784 emit_store_dst(jd, iptr, d);
787 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
788 /* sx.val.i = constant */
790 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
791 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
793 M_ISUB_IMM(iptr->sx.val.i, d);
794 emit_store_dst(jd, iptr, d);
797 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
799 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
800 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
801 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
802 if (s2 == GET_LOW_REG(d)) {
803 M_INTMOVE(s1, REG_ITMP1);
804 M_ISUB(s2, REG_ITMP1);
805 M_INTMOVE(REG_ITMP1, GET_LOW_REG(d));
808 M_INTMOVE(s1, GET_LOW_REG(d));
809 M_ISUB(s2, GET_LOW_REG(d));
811 /* don't use REG_ITMP1 */
812 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
813 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
814 if (s2 == GET_HIGH_REG(d)) {
815 M_INTMOVE(s1, REG_ITMP2);
816 M_ISUBB(s2, REG_ITMP2);
817 M_INTMOVE(REG_ITMP2, GET_HIGH_REG(d));
820 M_INTMOVE(s1, GET_HIGH_REG(d));
821 M_ISUBB(s2, GET_HIGH_REG(d));
823 emit_store_dst(jd, iptr, d);
826 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
827 /* sx.val.l = constant */
829 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
830 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
832 M_ISUB_IMM(iptr->sx.val.l, GET_LOW_REG(d));
833 M_ISUBB_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
834 emit_store_dst(jd, iptr, d);
837 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
839 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
840 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
841 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
848 emit_store_dst(jd, iptr, d);
851 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
852 /* sx.val.i = constant */
854 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
855 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
856 M_IMUL_IMM(s1, iptr->sx.val.i, d);
857 emit_store_dst(jd, iptr, d);
860 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
862 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
863 s2 = emit_load_s2_low(jd, iptr, EDX);
864 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
866 M_INTMOVE(s1, REG_ITMP2);
867 M_IMUL(s2, REG_ITMP2);
869 s1 = emit_load_s1_low(jd, iptr, EAX);
870 s2 = emit_load_s2_high(jd, iptr, EDX);
873 M_IADD(EDX, REG_ITMP2);
875 s1 = emit_load_s1_low(jd, iptr, EAX);
876 s2 = emit_load_s2_low(jd, iptr, EDX);
879 M_INTMOVE(EAX, GET_LOW_REG(d));
880 M_IADD(REG_ITMP2, GET_HIGH_REG(d));
882 emit_store_dst(jd, iptr, d);
885 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
886 /* sx.val.l = constant */
888 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
889 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
890 ICONST(EAX, iptr->sx.val.l);
892 M_IMUL_IMM(s1, iptr->sx.val.l >> 32, REG_ITMP2);
893 M_IADD(REG_ITMP2, EDX);
894 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
895 M_IMUL_IMM(s1, iptr->sx.val.l, REG_ITMP2);
896 M_IADD(REG_ITMP2, EDX);
897 M_LNGMOVE(EAX_EDX_PACKED, d);
898 emit_store_dst(jd, iptr, d);
901 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
903 s1 = emit_load_s1(jd, iptr, EAX);
904 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
905 d = codegen_reg_of_dst(jd, iptr, EAX);
906 emit_arithmetic_check(cd, iptr, s2);
908 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
910 /* check as described in jvm spec */
912 M_CMP_IMM(0x80000000, EAX);
919 M_INTMOVE(EAX, d); /* if INMEMORY then d is already EAX */
920 emit_store_dst(jd, iptr, d);
923 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
925 s1 = emit_load_s1(jd, iptr, EAX);
926 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
927 d = codegen_reg_of_dst(jd, iptr, EDX);
928 emit_arithmetic_check(cd, iptr, s2);
930 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
932 /* check as described in jvm spec */
934 M_CMP_IMM(0x80000000, EAX);
942 M_INTMOVE(EDX, d); /* if INMEMORY then d is already EDX */
943 emit_store_dst(jd, iptr, d);
946 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
947 /* sx.val.i = constant */
949 /* TODO: optimize for `/ 2' */
950 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
951 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
955 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, d);/* 32-bit for jump off */
956 M_SRA_IMM(iptr->sx.val.i, d);
957 emit_store_dst(jd, iptr, d);
960 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
961 /* sx.val.i = constant */
963 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
964 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
966 M_MOV(s1, REG_ITMP1);
970 M_AND_IMM(iptr->sx.val.i, d);
972 M_BGE(2 + 2 + 6 + 2);
973 M_MOV(s1, d); /* don't use M_INTMOVE, so we know the jump offset */
975 M_AND_IMM32(iptr->sx.val.i, d); /* use 32-bit for jump offset */
977 emit_store_dst(jd, iptr, d);
980 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
981 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
983 s2 = emit_load_s2(jd, iptr, REG_ITMP12_PACKED);
984 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
986 M_INTMOVE(GET_LOW_REG(s2), REG_ITMP3);
987 M_OR(GET_HIGH_REG(s2), REG_ITMP3);
988 /* XXX could be optimized */
989 emit_arithmetic_check(cd, iptr, REG_ITMP3);
991 bte = iptr->sx.s23.s3.bte;
994 M_LST(s2, REG_SP, 2 * 4);
996 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
997 M_LST(s1, REG_SP, 0 * 4);
999 M_MOV_IMM(bte->fp, REG_ITMP3);
1001 emit_store_dst(jd, iptr, d);
1004 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1005 /* sx.val.i = constant */
1007 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1008 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1010 M_TEST(GET_HIGH_REG(d));
1012 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, GET_LOW_REG(d));
1013 M_IADDC_IMM(0, GET_HIGH_REG(d));
1014 M_SRLD_IMM(iptr->sx.val.i, GET_HIGH_REG(d), GET_LOW_REG(d));
1015 M_SRA_IMM(iptr->sx.val.i, GET_HIGH_REG(d));
1016 emit_store_dst(jd, iptr, d);
1020 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1021 /* sx.val.l = constant */
1023 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1024 if (iptr->dst.var->flags & INMEMORY) {
1025 if (iptr->s1.var->flags & INMEMORY) {
1026 /* Alpha algorithm */
1028 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8);
1030 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1036 /* TODO: hmm, don't know if this is always correct */
1038 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l & 0x00000000ffffffff);
1040 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l >> 32);
1046 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1047 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1049 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1050 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1051 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1052 emit_jcc(cd, CC_GE, disp);
1054 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1055 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1057 emit_neg_reg(cd, REG_ITMP1);
1058 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1059 emit_neg_reg(cd, REG_ITMP2);
1061 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1062 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1064 emit_neg_reg(cd, REG_ITMP1);
1065 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1066 emit_neg_reg(cd, REG_ITMP2);
1068 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst.var->vv.regoff * 8);
1069 emit_mov_reg_membase(cd, REG_ITMP2, REG_SP, iptr->dst.var->vv.regoff * 8 + 4);
1073 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1074 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1076 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1077 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1078 M_TEST(GET_LOW_REG(s1));
1084 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1086 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1087 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1088 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1089 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1092 emit_store_dst(jd, iptr, d);
1095 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1096 /* sx.val.i = constant */
1098 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1099 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1101 M_SLL_IMM(iptr->sx.val.i, d);
1102 emit_store_dst(jd, iptr, d);
1105 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1107 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1108 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1109 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1110 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1113 emit_store_dst(jd, iptr, d);
1116 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1117 /* sx.val.i = constant */
1119 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1120 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1122 M_SRA_IMM(iptr->sx.val.i, d);
1123 emit_store_dst(jd, iptr, d);
1126 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1128 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1129 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1130 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1131 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1134 emit_store_dst(jd, iptr, d);
1137 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1138 /* sx.val.i = constant */
1140 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1141 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1143 M_SRL_IMM(iptr->sx.val.i, d);
1144 emit_store_dst(jd, iptr, d);
1147 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1149 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1150 s2 = emit_load_s2(jd, iptr, ECX);
1151 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1154 M_TEST_IMM(32, ECX);
1156 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1157 M_CLR(GET_LOW_REG(d));
1158 M_SLLD(GET_LOW_REG(d), GET_HIGH_REG(d));
1159 M_SLL(GET_LOW_REG(d));
1160 emit_store_dst(jd, iptr, d);
1163 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1164 /* sx.val.i = constant */
1166 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1167 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1169 if (iptr->sx.val.i & 0x20) {
1170 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1171 M_CLR(GET_LOW_REG(d));
1172 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1176 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1178 M_SLL_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d));
1180 emit_store_dst(jd, iptr, d);
1183 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1185 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1186 s2 = emit_load_s2(jd, iptr, ECX);
1187 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1190 M_TEST_IMM(32, ECX);
1192 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1193 M_SRA_IMM(31, GET_HIGH_REG(d));
1194 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1195 M_SRA(GET_HIGH_REG(d));
1196 emit_store_dst(jd, iptr, d);
1199 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1200 /* sx.val.i = constant */
1202 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1203 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1205 if (iptr->sx.val.i & 0x20) {
1206 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1207 M_SRA_IMM(31, GET_HIGH_REG(d));
1208 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1212 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1214 M_SRA_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1216 emit_store_dst(jd, iptr, d);
1219 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1221 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1222 s2 = emit_load_s2(jd, iptr, ECX);
1223 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1226 M_TEST_IMM(32, ECX);
1228 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1229 M_CLR(GET_HIGH_REG(d));
1230 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1231 M_SRL(GET_HIGH_REG(d));
1232 emit_store_dst(jd, iptr, d);
1235 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1236 /* sx.val.l = constant */
1238 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1239 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1241 if (iptr->sx.val.i & 0x20) {
1242 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1243 M_CLR(GET_HIGH_REG(d));
1244 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1248 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1250 M_SRL_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1252 emit_store_dst(jd, iptr, d);
1255 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1257 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1258 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1259 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1266 emit_store_dst(jd, iptr, d);
1269 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1270 /* sx.val.i = constant */
1272 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1273 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1275 M_AND_IMM(iptr->sx.val.i, d);
1276 emit_store_dst(jd, iptr, d);
1279 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1281 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1282 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1283 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1284 if (s2 == GET_LOW_REG(d))
1285 M_AND(s1, GET_LOW_REG(d));
1287 M_INTMOVE(s1, GET_LOW_REG(d));
1288 M_AND(s2, GET_LOW_REG(d));
1290 /* REG_ITMP1 probably contains low 32-bit of destination */
1291 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1292 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1293 if (s2 == GET_HIGH_REG(d))
1294 M_AND(s1, GET_HIGH_REG(d));
1296 M_INTMOVE(s1, GET_HIGH_REG(d));
1297 M_AND(s2, GET_HIGH_REG(d));
1299 emit_store_dst(jd, iptr, d);
1302 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1303 /* sx.val.l = constant */
1305 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1306 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1308 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1309 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1310 emit_store_dst(jd, iptr, d);
1313 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1315 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1316 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1317 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1324 emit_store_dst(jd, iptr, d);
1327 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1328 /* sx.val.i = constant */
1330 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1331 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1333 M_OR_IMM(iptr->sx.val.i, d);
1334 emit_store_dst(jd, iptr, d);
1337 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1339 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1340 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1341 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1342 if (s2 == GET_LOW_REG(d))
1343 M_OR(s1, GET_LOW_REG(d));
1345 M_INTMOVE(s1, GET_LOW_REG(d));
1346 M_OR(s2, GET_LOW_REG(d));
1348 /* REG_ITMP1 probably contains low 32-bit of destination */
1349 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1350 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1351 if (s2 == GET_HIGH_REG(d))
1352 M_OR(s1, GET_HIGH_REG(d));
1354 M_INTMOVE(s1, GET_HIGH_REG(d));
1355 M_OR(s2, GET_HIGH_REG(d));
1357 emit_store_dst(jd, iptr, d);
1360 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1361 /* sx.val.l = constant */
1363 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1364 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1366 M_OR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1367 M_OR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1368 emit_store_dst(jd, iptr, d);
1371 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1373 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1374 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1375 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1382 emit_store_dst(jd, iptr, d);
1385 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1386 /* sx.val.i = constant */
1388 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1389 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1391 M_XOR_IMM(iptr->sx.val.i, d);
1392 emit_store_dst(jd, iptr, d);
1395 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1397 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1398 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1399 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1400 if (s2 == GET_LOW_REG(d))
1401 M_XOR(s1, GET_LOW_REG(d));
1403 M_INTMOVE(s1, GET_LOW_REG(d));
1404 M_XOR(s2, GET_LOW_REG(d));
1406 /* REG_ITMP1 probably contains low 32-bit of destination */
1407 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1408 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1409 if (s2 == GET_HIGH_REG(d))
1410 M_XOR(s1, GET_HIGH_REG(d));
1412 M_INTMOVE(s1, GET_HIGH_REG(d));
1413 M_XOR(s2, GET_HIGH_REG(d));
1415 emit_store_dst(jd, iptr, d);
1418 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1419 /* sx.val.l = constant */
1421 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1422 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1424 M_XOR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1425 M_XOR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1426 emit_store_dst(jd, iptr, d);
1430 /* floating operations ************************************************/
1432 case ICMD_FNEG: /* ..., value ==> ..., - value */
1434 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1435 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1437 emit_store_dst(jd, iptr, d);
1440 case ICMD_DNEG: /* ..., value ==> ..., - value */
1442 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1443 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1445 emit_store_dst(jd, iptr, d);
1448 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1450 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1451 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1452 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1454 emit_store_dst(jd, iptr, d);
1457 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1459 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1460 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1461 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1463 emit_store_dst(jd, iptr, d);
1466 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1468 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1469 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1470 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1472 emit_store_dst(jd, iptr, d);
1475 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1477 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1478 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1479 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1481 emit_store_dst(jd, iptr, d);
1484 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1486 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1487 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1488 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1490 emit_store_dst(jd, iptr, d);
1493 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1495 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1496 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1497 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1499 emit_store_dst(jd, iptr, d);
1502 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1504 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1505 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1506 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1508 emit_store_dst(jd, iptr, d);
1511 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1513 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1514 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1515 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1517 emit_store_dst(jd, iptr, d);
1520 case ICMD_FREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1522 /* exchanged to skip fxch */
1523 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1524 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1525 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1526 /* emit_fxch(cd); */
1531 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1532 emit_store_dst(jd, iptr, d);
1533 emit_ffree_reg(cd, 0);
1537 case ICMD_DREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1539 /* exchanged to skip fxch */
1540 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1541 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1542 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1543 /* emit_fxch(cd); */
1548 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1549 emit_store_dst(jd, iptr, d);
1550 emit_ffree_reg(cd, 0);
1554 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1555 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1557 var = VAROP(iptr->s1);
1558 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1560 if (var->flags & INMEMORY) {
1561 emit_fildl_membase(cd, REG_SP, var->vv.regoff);
1563 /* XXX not thread safe! */
1564 disp = dseg_add_unique_s4(cd, 0);
1565 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1567 emit_mov_reg_membase(cd, var->vv.regoff, REG_ITMP1, disp);
1568 emit_fildl_membase(cd, REG_ITMP1, disp);
1571 emit_store_dst(jd, iptr, d);
1574 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1575 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1577 var = VAROP(iptr->s1);
1578 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1579 if (var->flags & INMEMORY) {
1580 emit_fildll_membase(cd, REG_SP, var->vv.regoff);
1583 log_text("L2F: longs have to be in memory");
1586 emit_store_dst(jd, iptr, d);
1589 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1591 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1592 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1594 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1597 /* Round to zero, 53-bit mode, exception masked */
1598 disp = dseg_add_s4(cd, 0x0e7f);
1599 emit_fldcw_membase(cd, REG_ITMP1, disp);
1601 var = VAROP(iptr->dst);
1602 var1 = VAROP(iptr->s1);
1604 if (var->flags & INMEMORY) {
1605 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1607 /* Round to nearest, 53-bit mode, exceptions masked */
1608 disp = dseg_add_s4(cd, 0x027f);
1609 emit_fldcw_membase(cd, REG_ITMP1, disp);
1611 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1612 REG_SP, var->vv.regoff);
1615 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1617 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1620 /* XXX not thread safe! */
1621 disp = dseg_add_unique_s4(cd, 0);
1622 emit_fistpl_membase(cd, REG_ITMP1, disp);
1623 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1625 /* Round to nearest, 53-bit mode, exceptions masked */
1626 disp = dseg_add_s4(cd, 0x027f);
1627 emit_fldcw_membase(cd, REG_ITMP1, disp);
1629 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1632 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1633 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1636 emit_jcc(cd, CC_NE, disp);
1638 /* XXX: change this when we use registers */
1639 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1640 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2i, REG_ITMP1);
1641 emit_call_reg(cd, REG_ITMP1);
1643 if (var->flags & INMEMORY) {
1644 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1647 M_INTMOVE(REG_RESULT, var->vv.regoff);
1651 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1653 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1654 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1656 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1659 /* Round to zero, 53-bit mode, exception masked */
1660 disp = dseg_add_s4(cd, 0x0e7f);
1661 emit_fldcw_membase(cd, REG_ITMP1, disp);
1663 var = VAROP(iptr->dst);
1664 var1 = VAROP(iptr->s1);
1666 if (var->flags & INMEMORY) {
1667 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1669 /* Round to nearest, 53-bit mode, exceptions masked */
1670 disp = dseg_add_s4(cd, 0x027f);
1671 emit_fldcw_membase(cd, REG_ITMP1, disp);
1673 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1674 REG_SP, var->vv.regoff);
1677 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1679 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1682 /* XXX not thread safe! */
1683 disp = dseg_add_unique_s4(cd, 0);
1684 emit_fistpl_membase(cd, REG_ITMP1, disp);
1685 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1687 /* Round to nearest, 53-bit mode, exceptions masked */
1688 disp = dseg_add_s4(cd, 0x027f);
1689 emit_fldcw_membase(cd, REG_ITMP1, disp);
1691 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1694 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1695 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1698 emit_jcc(cd, CC_NE, disp);
1700 /* XXX: change this when we use registers */
1701 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1702 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2i, REG_ITMP1);
1703 emit_call_reg(cd, REG_ITMP1);
1705 if (var->flags & INMEMORY) {
1706 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1708 M_INTMOVE(REG_RESULT, var->vv.regoff);
1712 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1714 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1715 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1717 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1720 /* Round to zero, 53-bit mode, exception masked */
1721 disp = dseg_add_s4(cd, 0x0e7f);
1722 emit_fldcw_membase(cd, REG_ITMP1, disp);
1724 var = VAROP(iptr->dst);
1725 var1 = VAROP(iptr->s1);
1727 if (var->flags & INMEMORY) {
1728 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1730 /* Round to nearest, 53-bit mode, exceptions masked */
1731 disp = dseg_add_s4(cd, 0x027f);
1732 emit_fldcw_membase(cd, REG_ITMP1, disp);
1734 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1735 REG_SP, var->vv.regoff + 4);
1738 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1740 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1743 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1745 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1747 emit_jcc(cd, CC_NE, disp);
1749 emit_alu_imm_membase(cd, ALU_CMP, 0,
1750 REG_SP, var->vv.regoff);
1753 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1755 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1757 emit_jcc(cd, CC_NE, disp);
1759 /* XXX: change this when we use registers */
1760 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1761 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2l, REG_ITMP1);
1762 emit_call_reg(cd, REG_ITMP1);
1763 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1764 emit_mov_reg_membase(cd, REG_RESULT2,
1765 REG_SP, var->vv.regoff + 4);
1768 log_text("F2L: longs have to be in memory");
1773 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1775 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1776 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1778 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1781 /* Round to zero, 53-bit mode, exception masked */
1782 disp = dseg_add_s4(cd, 0x0e7f);
1783 emit_fldcw_membase(cd, REG_ITMP1, disp);
1785 var = VAROP(iptr->dst);
1786 var1 = VAROP(iptr->s1);
1788 if (var->flags & INMEMORY) {
1789 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1791 /* Round to nearest, 53-bit mode, exceptions masked */
1792 disp = dseg_add_s4(cd, 0x027f);
1793 emit_fldcw_membase(cd, REG_ITMP1, disp);
1795 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1796 REG_SP, var->vv.regoff + 4);
1799 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1801 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1804 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1806 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1808 emit_jcc(cd, CC_NE, disp);
1810 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, var->vv.regoff);
1813 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1815 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1817 emit_jcc(cd, CC_NE, disp);
1819 /* XXX: change this when we use registers */
1820 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1821 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2l, REG_ITMP1);
1822 emit_call_reg(cd, REG_ITMP1);
1823 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1824 emit_mov_reg_membase(cd, REG_RESULT2,
1825 REG_SP, var->vv.regoff + 4);
1828 log_text("D2L: longs have to be in memory");
1833 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1835 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1836 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1838 emit_store_dst(jd, iptr, d);
1841 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1843 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1844 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1846 emit_store_dst(jd, iptr, d);
1849 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1852 /* exchanged to skip fxch */
1853 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1854 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1855 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1856 /* emit_fxch(cd); */
1859 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as GT */
1860 emit_jcc(cd, CC_E, 6);
1861 emit_alu_imm_reg(cd, ALU_AND, 0x000000ff, EAX);
1863 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1864 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1865 emit_jcc(cd, CC_B, 3 + 5);
1866 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1867 emit_jmp_imm(cd, 3);
1868 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1869 emit_store_dst(jd, iptr, d);
1872 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1875 /* exchanged to skip fxch */
1876 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1877 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1878 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1879 /* emit_fxch(cd); */
1882 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as LT */
1883 emit_jcc(cd, CC_E, 3);
1884 emit_movb_imm_reg(cd, 1, REG_AH);
1886 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1887 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1888 emit_jcc(cd, CC_B, 3 + 5);
1889 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1890 emit_jmp_imm(cd, 3);
1891 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1892 emit_store_dst(jd, iptr, d);
1896 /* memory operations **************************************************/
1898 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., length */
1900 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1901 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1902 /* implicit null-pointer check */
1903 M_ILD(d, s1, OFFSET(java_array_t, size));
1904 emit_store_dst(jd, iptr, d);
1907 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
1909 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1910 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1911 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1912 /* implicit null-pointer check */
1913 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1914 emit_movsbl_memindex_reg(cd, OFFSET(java_bytearray_t, data[0]),
1916 emit_store_dst(jd, iptr, d);
1919 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
1921 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1922 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1923 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1924 /* implicit null-pointer check */
1925 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1926 emit_movzwl_memindex_reg(cd, OFFSET(java_chararray_t, data[0]),
1928 emit_store_dst(jd, iptr, d);
1931 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
1933 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1934 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1935 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1936 /* implicit null-pointer check */
1937 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1938 emit_movswl_memindex_reg(cd, OFFSET(java_shortarray_t, data[0]),
1940 emit_store_dst(jd, iptr, d);
1943 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
1945 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1946 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1947 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1948 /* implicit null-pointer check */
1949 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1950 emit_mov_memindex_reg(cd, OFFSET(java_intarray_t, data[0]),
1952 emit_store_dst(jd, iptr, d);
1955 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
1957 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1958 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1959 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
1960 /* implicit null-pointer check */
1961 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1963 var = VAROP(iptr->dst);
1965 assert(var->flags & INMEMORY);
1966 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]),
1967 s1, s2, 3, REG_ITMP3);
1968 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff);
1969 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]) + 4,
1970 s1, s2, 3, REG_ITMP3);
1971 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff + 4);
1974 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
1976 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1977 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1978 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1979 /* implicit null-pointer check */
1980 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1981 emit_flds_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2, 2);
1982 emit_store_dst(jd, iptr, d);
1985 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
1987 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1988 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1989 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1990 /* implicit null-pointer check */
1991 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1992 emit_fldl_memindex(cd, OFFSET(java_doublearray_t, data[0]), s1, s2,3);
1993 emit_store_dst(jd, iptr, d);
1996 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
1998 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1999 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2000 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2001 /* implicit null-pointer check */
2002 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2003 emit_mov_memindex_reg(cd, OFFSET(java_objectarray_t, data[0]),
2005 emit_store_dst(jd, iptr, d);
2009 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2011 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2012 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2013 /* implicit null-pointer check */
2014 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2015 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2017 /* because EBP, ESI, EDI have no xH and xL nibbles */
2018 M_INTMOVE(s3, REG_ITMP3);
2021 emit_movb_reg_memindex(cd, s3, OFFSET(java_bytearray_t, data[0]),
2025 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2027 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2028 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2029 /* implicit null-pointer check */
2030 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2031 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2032 emit_movw_reg_memindex(cd, s3, OFFSET(java_chararray_t, data[0]),
2036 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2038 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2039 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2040 /* implicit null-pointer check */
2041 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2042 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2043 emit_movw_reg_memindex(cd, s3, OFFSET(java_shortarray_t, data[0]),
2047 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2049 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2050 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2051 /* implicit null-pointer check */
2052 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2053 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2054 emit_mov_reg_memindex(cd, s3, OFFSET(java_intarray_t, data[0]),
2058 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2060 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2061 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2062 /* implicit null-pointer check */
2063 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2065 var = VAROP(iptr->sx.s23.s3);
2067 assert(var->flags & INMEMORY);
2068 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff, REG_ITMP3);
2069 emit_mov_reg_memindex(cd, REG_ITMP3, OFFSET(java_longarray_t, data[0])
2071 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff + 4, REG_ITMP3);
2072 emit_mov_reg_memindex(cd, REG_ITMP3,
2073 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2076 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2078 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2079 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2080 /* implicit null-pointer check */
2081 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2082 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2083 emit_fstps_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2,2);
2086 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2088 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2089 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2090 /* implicit null-pointer check */
2091 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2092 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2093 emit_fstpl_memindex(cd, OFFSET(java_doublearray_t, data[0]),
2097 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2099 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2100 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2101 /* implicit null-pointer check */
2102 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2103 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2105 M_AST(s1, REG_SP, 0 * 4);
2106 M_AST(s3, REG_SP, 1 * 4);
2107 M_MOV_IMM(BUILTIN_FAST_canstore, REG_ITMP1);
2109 emit_arraystore_check(cd, iptr);
2111 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2112 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2113 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2114 emit_mov_reg_memindex(cd, s3, OFFSET(java_objectarray_t, data[0]),
2118 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2120 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2121 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2122 /* implicit null-pointer check */
2123 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2124 emit_movb_imm_memindex(cd, iptr->sx.s23.s3.constval,
2125 OFFSET(java_bytearray_t, data[0]), s1, s2, 0);
2128 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2130 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2131 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2132 /* implicit null-pointer check */
2133 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2134 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2135 OFFSET(java_chararray_t, data[0]), s1, s2, 1);
2138 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2140 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2141 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2142 /* implicit null-pointer check */
2143 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2144 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2145 OFFSET(java_shortarray_t, data[0]), s1, s2, 1);
2148 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2150 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2151 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2152 /* implicit null-pointer check */
2153 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2154 emit_mov_imm_memindex(cd, iptr->sx.s23.s3.constval,
2155 OFFSET(java_intarray_t, data[0]), s1, s2, 2);
2158 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2160 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2161 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2162 /* implicit null-pointer check */
2163 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2164 emit_mov_imm_memindex(cd,
2165 (u4) (iptr->sx.s23.s3.constval & 0x00000000ffffffff),
2166 OFFSET(java_longarray_t, data[0]), s1, s2, 3);
2167 emit_mov_imm_memindex(cd,
2168 ((s4)iptr->sx.s23.s3.constval) >> 31,
2169 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2172 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2174 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2175 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2176 /* implicit null-pointer check */
2177 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2178 emit_mov_imm_memindex(cd, 0,
2179 OFFSET(java_objectarray_t, data[0]), s1, s2, 2);
2183 case ICMD_GETSTATIC: /* ... ==> ..., value */
2185 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2186 uf = iptr->sx.s23.s3.uf;
2187 fieldtype = uf->fieldref->parseddesc.fd->type;
2190 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2194 fi = iptr->sx.s23.s3.fmiref->p.field;
2195 fieldtype = fi->type;
2196 disp = (intptr_t) fi->value;
2198 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->clazz))
2199 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->clazz, 0);
2202 M_MOV_IMM(disp, REG_ITMP1);
2203 switch (fieldtype) {
2206 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2207 M_ILD(d, REG_ITMP1, 0);
2210 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2211 M_LLD(d, REG_ITMP1, 0);
2214 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2215 M_FLD(d, REG_ITMP1, 0);
2218 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2219 M_DLD(d, REG_ITMP1, 0);
2222 emit_store_dst(jd, iptr, d);
2225 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2227 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2228 uf = iptr->sx.s23.s3.uf;
2229 fieldtype = uf->fieldref->parseddesc.fd->type;
2232 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2235 fi = iptr->sx.s23.s3.fmiref->p.field;
2236 fieldtype = fi->type;
2237 disp = (intptr_t) fi->value;
2239 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->clazz))
2240 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->clazz, 0);
2243 M_MOV_IMM(disp, REG_ITMP1);
2244 switch (fieldtype) {
2247 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
2248 M_IST(s1, REG_ITMP1, 0);
2251 s1 = emit_load_s1(jd, iptr, REG_ITMP23_PACKED);
2252 M_LST(s1, REG_ITMP1, 0);
2255 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2256 emit_fstps_membase(cd, REG_ITMP1, 0);
2259 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2260 emit_fstpl_membase(cd, REG_ITMP1, 0);
2265 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2266 /* val = value (in current instruction) */
2267 /* following NOP) */
2269 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2270 uf = iptr->sx.s23.s3.uf;
2271 fieldtype = uf->fieldref->parseddesc.fd->type;
2274 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2277 fi = iptr->sx.s23.s3.fmiref->p.field;
2278 fieldtype = fi->type;
2279 disp = (intptr_t) fi->value;
2281 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->clazz))
2282 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->clazz, 0);
2285 M_MOV_IMM(disp, REG_ITMP1);
2286 switch (fieldtype) {
2289 M_IST_IMM(iptr->sx.s23.s2.constval, REG_ITMP1, 0);
2292 M_IST_IMM(iptr->sx.s23.s2.constval & 0xffffffff, REG_ITMP1, 0);
2293 M_IST_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, REG_ITMP1, 4);
2300 case ICMD_GETFIELD: /* .., objectref. ==> ..., value */
2302 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2303 emit_nullpointer_check(cd, iptr, s1);
2305 #if defined(ENABLE_ESCAPE_CHECK)
2306 /*emit_escape_check(cd, s1);*/
2309 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2310 uf = iptr->sx.s23.s3.uf;
2311 fieldtype = uf->fieldref->parseddesc.fd->type;
2314 patcher_add_patch_ref(jd, PATCHER_getfield,
2315 iptr->sx.s23.s3.uf, 0);
2318 fi = iptr->sx.s23.s3.fmiref->p.field;
2319 fieldtype = fi->type;
2323 switch (fieldtype) {
2326 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2327 M_ILD32(d, s1, disp);
2330 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2331 M_LLD32(d, s1, disp);
2334 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2335 M_FLD32(d, s1, disp);
2338 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2339 M_DLD32(d, s1, disp);
2342 emit_store_dst(jd, iptr, d);
2345 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2347 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2348 emit_nullpointer_check(cd, iptr, s1);
2350 /* must be done here because of code patching */
2352 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2353 uf = iptr->sx.s23.s3.uf;
2354 fieldtype = uf->fieldref->parseddesc.fd->type;
2357 fi = iptr->sx.s23.s3.fmiref->p.field;
2358 fieldtype = fi->type;
2361 if (!IS_FLT_DBL_TYPE(fieldtype)) {
2362 if (IS_2_WORD_TYPE(fieldtype))
2363 s2 = emit_load_s2(jd, iptr, REG_ITMP23_PACKED);
2365 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2368 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
2370 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2372 uf = iptr->sx.s23.s3.uf;
2375 patcher_add_patch_ref(jd, PATCHER_putfield, uf, 0);
2379 fi = iptr->sx.s23.s3.fmiref->p.field;
2383 switch (fieldtype) {
2386 M_IST32(s2, s1, disp);
2389 M_LST32(s2, s1, disp);
2392 emit_fstps_membase32(cd, s1, disp);
2395 emit_fstpl_membase32(cd, s1, disp);
2400 case ICMD_PUTFIELDCONST: /* ..., objectref ==> ... */
2401 /* val = value (in current instruction) */
2402 /* following NOP) */
2404 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2405 emit_nullpointer_check(cd, iptr, s1);
2407 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2408 uf = iptr->sx.s23.s3.uf;
2409 fieldtype = uf->fieldref->parseddesc.fd->type;
2412 patcher_add_patch_ref(jd, PATCHER_putfieldconst,
2416 fi = iptr->sx.s23.s3.fmiref->p.field;
2417 fieldtype = fi->type;
2421 switch (fieldtype) {
2424 M_IST32_IMM(iptr->sx.s23.s2.constval, s1, disp);
2427 M_IST32_IMM(iptr->sx.s23.s2.constval & 0xffffffff, s1, disp);
2428 M_IST32_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, s1, disp + 4);
2436 /* branch operations **************************************************/
2438 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2440 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2441 M_INTMOVE(s1, REG_ITMP1_XPTR);
2443 #ifdef ENABLE_VERIFIER
2444 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2445 patcher_add_patch_ref(jd, PATCHER_resolve_class,
2446 iptr->sx.s23.s2.uc, 0);
2448 #endif /* ENABLE_VERIFIER */
2450 M_CALL_IMM(0); /* passing exception pc */
2451 M_POP(REG_ITMP2_XPC);
2453 M_MOV_IMM(asm_handle_exception, REG_ITMP3);
2457 case ICMD_GOTO: /* ... ==> ... */
2458 case ICMD_RET: /* ... ==> ... */
2460 #if defined(ENABLE_SSA)
2462 last_cmd_was_goto = true;
2464 /* In case of a Goto phimoves have to be inserted before the */
2467 codegen_emit_phi_moves(jd, bptr);
2470 emit_br(cd, iptr->dst.block);
2474 case ICMD_JSR: /* ... ==> ... */
2476 emit_br(cd, iptr->sx.s23.s3.jsrtarget.block);
2480 case ICMD_IFNULL: /* ..., value ==> ... */
2481 case ICMD_IFNONNULL:
2483 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2485 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFNULL, BRANCH_OPT_NONE);
2488 case ICMD_IFEQ: /* ..., value ==> ... */
2495 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2496 M_CMP_IMM(iptr->sx.val.i, s1);
2497 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFEQ, BRANCH_OPT_NONE);
2500 case ICMD_IF_LEQ: /* ..., value ==> ... */
2502 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2503 if (iptr->sx.val.l == 0) {
2504 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2505 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2508 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2509 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2510 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2511 M_OR(REG_ITMP2, REG_ITMP1);
2513 emit_beq(cd, iptr->dst.block);
2516 case ICMD_IF_LLT: /* ..., value ==> ... */
2518 if (iptr->sx.val.l == 0) {
2519 /* If high 32-bit are less than zero, then the 64-bits
2521 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2523 emit_blt(cd, iptr->dst.block);
2526 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2527 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2528 emit_blt(cd, iptr->dst.block);
2530 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2531 emit_bult(cd, iptr->dst.block);
2535 case ICMD_IF_LLE: /* ..., value ==> ... */
2537 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2538 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2539 emit_blt(cd, iptr->dst.block);
2541 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2542 emit_bule(cd, iptr->dst.block);
2545 case ICMD_IF_LNE: /* ..., value ==> ... */
2547 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2548 if (iptr->sx.val.l == 0) {
2549 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2550 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2553 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2554 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2555 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2556 M_OR(REG_ITMP2, REG_ITMP1);
2558 emit_bne(cd, iptr->dst.block);
2561 case ICMD_IF_LGT: /* ..., value ==> ... */
2563 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2564 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2565 emit_bgt(cd, iptr->dst.block);
2567 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2568 emit_bugt(cd, iptr->dst.block);
2571 case ICMD_IF_LGE: /* ..., value ==> ... */
2573 if (iptr->sx.val.l == 0) {
2574 /* If high 32-bit are greater equal zero, then the
2576 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2578 emit_bge(cd, iptr->dst.block);
2581 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2582 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2583 emit_bgt(cd, iptr->dst.block);
2585 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2586 emit_buge(cd, iptr->dst.block);
2590 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2591 case ICMD_IF_ICMPNE:
2592 case ICMD_IF_ICMPLT:
2593 case ICMD_IF_ICMPGT:
2594 case ICMD_IF_ICMPGE:
2595 case ICMD_IF_ICMPLE:
2597 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2598 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2600 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ICMPEQ, BRANCH_OPT_NONE);
2603 case ICMD_IF_ACMPEQ: /* ..., value, value ==> ... */
2604 case ICMD_IF_ACMPNE:
2606 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2607 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2609 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ACMPEQ, BRANCH_OPT_NONE);
2612 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2614 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2615 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2616 M_INTMOVE(s1, REG_ITMP1);
2617 M_XOR(s2, REG_ITMP1);
2618 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2619 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2620 M_INTMOVE(s1, REG_ITMP2);
2621 M_XOR(s2, REG_ITMP2);
2622 M_OR(REG_ITMP1, REG_ITMP2);
2623 emit_beq(cd, iptr->dst.block);
2626 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2628 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2629 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2630 M_INTMOVE(s1, REG_ITMP1);
2631 M_XOR(s2, REG_ITMP1);
2632 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2633 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2634 M_INTMOVE(s1, REG_ITMP2);
2635 M_XOR(s2, REG_ITMP2);
2636 M_OR(REG_ITMP1, REG_ITMP2);
2637 emit_bne(cd, iptr->dst.block);
2640 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2642 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2643 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2645 emit_blt(cd, iptr->dst.block);
2646 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2647 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2650 emit_bult(cd, iptr->dst.block);
2653 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2655 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2656 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2658 emit_bgt(cd, iptr->dst.block);
2659 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2660 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2663 emit_bugt(cd, iptr->dst.block);
2666 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2668 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2669 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2671 emit_blt(cd, iptr->dst.block);
2672 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2673 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2676 emit_bule(cd, iptr->dst.block);
2679 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2681 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2682 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2684 emit_bgt(cd, iptr->dst.block);
2685 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2686 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2689 emit_buge(cd, iptr->dst.block);
2693 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2695 REPLACEMENT_POINT_RETURN(cd, iptr);
2696 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2697 M_INTMOVE(s1, REG_RESULT);
2698 goto nowperformreturn;
2700 case ICMD_LRETURN: /* ..., retvalue ==> ... */
2702 REPLACEMENT_POINT_RETURN(cd, iptr);
2703 s1 = emit_load_s1(jd, iptr, REG_RESULT_PACKED);
2704 M_LNGMOVE(s1, REG_RESULT_PACKED);
2705 goto nowperformreturn;
2707 case ICMD_ARETURN: /* ..., retvalue ==> ... */
2709 REPLACEMENT_POINT_RETURN(cd, iptr);
2710 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2711 M_INTMOVE(s1, REG_RESULT);
2713 #ifdef ENABLE_VERIFIER
2714 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2715 patcher_add_patch_ref(jd, PATCHER_resolve_class,
2716 iptr->sx.s23.s2.uc, 0);
2718 #endif /* ENABLE_VERIFIER */
2719 goto nowperformreturn;
2721 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2724 REPLACEMENT_POINT_RETURN(cd, iptr);
2725 s1 = emit_load_s1(jd, iptr, REG_FRESULT);
2726 goto nowperformreturn;
2728 case ICMD_RETURN: /* ... ==> ... */
2730 REPLACEMENT_POINT_RETURN(cd, iptr);
2736 p = cd->stackframesize;
2738 #if !defined(NDEBUG)
2739 emit_verbosecall_exit(jd);
2742 #if defined(ENABLE_THREADS)
2743 if (checksync && code_is_synchronized(code)) {
2744 M_ALD(REG_ITMP2, REG_SP, rd->memuse * 8);
2746 /* we need to save the proper return value */
2747 switch (iptr->opc) {
2750 M_IST(REG_RESULT, REG_SP, rd->memuse * 8);
2754 M_LST(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2758 emit_fstps_membase(cd, REG_SP, rd->memuse * 8);
2762 emit_fstpl_membase(cd, REG_SP, rd->memuse * 8);
2766 M_AST(REG_ITMP2, REG_SP, 0);
2767 M_MOV_IMM(LOCK_monitor_exit, REG_ITMP3);
2770 /* and now restore the proper return value */
2771 switch (iptr->opc) {
2774 M_ILD(REG_RESULT, REG_SP, rd->memuse * 8);
2778 M_LLD(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2782 emit_flds_membase(cd, REG_SP, rd->memuse * 8);
2786 emit_fldl_membase(cd, REG_SP, rd->memuse * 8);
2792 /* restore saved registers */
2794 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
2795 p--; M_ALD(rd->savintregs[i], REG_SP, p * 8);
2798 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
2800 emit_fldl_membase(cd, REG_SP, p * 8);
2801 if (iptr->opc == ICMD_FRETURN || iptr->opc == ICMD_DRETURN) {
2803 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset + 1); */
2806 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset); */
2810 /* deallocate stack */
2812 if (cd->stackframesize)
2813 M_AADD_IMM(cd->stackframesize * 8 + 4, REG_SP);
2820 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2823 branch_target_t *table;
2825 table = iptr->dst.table;
2827 l = iptr->sx.s23.s2.tablelow;
2828 i = iptr->sx.s23.s3.tablehigh;
2830 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2831 M_INTMOVE(s1, REG_ITMP1);
2834 M_ISUB_IMM(l, REG_ITMP1);
2840 M_CMP_IMM(i - 1, REG_ITMP1);
2841 emit_bugt(cd, table[0].block);
2843 /* build jump table top down and use address of lowest entry */
2848 dseg_add_target(cd, table->block);
2852 /* length of dataseg after last dseg_addtarget is used
2855 M_MOV_IMM(0, REG_ITMP2);
2857 emit_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 2, REG_ITMP1);
2863 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2866 lookup_target_t *lookup;
2868 lookup = iptr->dst.lookup;
2870 i = iptr->sx.s23.s2.lookupcount;
2872 MCODECHECK((i<<2)+8);
2873 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2876 M_CMP_IMM(lookup->value, s1);
2877 emit_beq(cd, lookup->target.block);
2881 emit_br(cd, iptr->sx.s23.s3.lookupdefault.block);
2886 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
2888 REPLACEMENT_POINT_FORGC_BUILTIN(cd, iptr);
2890 bte = iptr->sx.s23.s3.bte;
2893 #if defined(ENABLE_ESCAPE_REASON)
2894 if (bte->fp == BUILTIN_escape_reason_new) {
2895 void set_escape_reasons(void *);
2896 M_ASUB_IMM(8, REG_SP);
2897 M_MOV_IMM(iptr->escape_reasons, REG_ITMP1);
2898 M_AST(EDX, REG_SP, 4);
2899 M_AST(REG_ITMP1, REG_SP, 0);
2900 M_MOV_IMM(set_escape_reasons, REG_ITMP1);
2902 M_ALD(EDX, REG_SP, 4);
2903 M_AADD_IMM(8, REG_SP);
2909 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2911 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2912 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
2913 case ICMD_INVOKEINTERFACE:
2915 REPLACEMENT_POINT_INVOKE(cd, iptr);
2917 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2918 md = iptr->sx.s23.s3.um->methodref->parseddesc.md;
2922 lm = iptr->sx.s23.s3.fmiref->p.method;
2923 md = lm->parseddesc;
2927 s3 = md->paramcount;
2929 MCODECHECK((s3 << 1) + 64);
2931 /* copy arguments to registers or stack location */
2933 for (s3 = s3 - 1; s3 >= 0; s3--) {
2934 var = VAR(iptr->sx.s23.s2.args[s3]);
2936 /* Already Preallocated (ARGVAR) ? */
2937 if (var->flags & PREALLOC)
2939 if (IS_INT_LNG_TYPE(var->type)) {
2940 if (!md->params[s3].inmemory) {
2941 log_text("No integer argument registers available!");
2945 if (IS_2_WORD_TYPE(var->type)) {
2946 d = emit_load(jd, iptr, var, REG_ITMP12_PACKED);
2947 M_LST(d, REG_SP, md->params[s3].regoff);
2949 d = emit_load(jd, iptr, var, REG_ITMP1);
2950 M_IST(d, REG_SP, md->params[s3].regoff);
2955 if (!md->params[s3].inmemory) {
2956 s1 = md->params[s3].regoff;
2957 d = emit_load(jd, iptr, var, s1);
2961 d = emit_load(jd, iptr, var, REG_FTMP1);
2962 if (IS_2_WORD_TYPE(var->type))
2963 M_DST(d, REG_SP, md->params[s3].regoff);
2965 M_FST(d, REG_SP, md->params[s3].regoff);
2970 switch (iptr->opc) {
2972 d = md->returntype.type;
2974 if (bte->stub == NULL) {
2975 M_MOV_IMM(bte->fp, REG_ITMP1);
2978 M_MOV_IMM(bte->stub, REG_ITMP1);
2982 #if defined(ENABLE_ESCAPE_CHECK)
2983 if (bte->opcode == ICMD_NEW || bte->opcode == ICMD_NEWARRAY) {
2984 /*emit_escape_annotate_object(cd, m);*/
2989 case ICMD_INVOKESPECIAL:
2990 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
2991 emit_nullpointer_check(cd, iptr, REG_ITMP1);
2994 case ICMD_INVOKESTATIC:
2996 unresolved_method *um = iptr->sx.s23.s3.um;
2998 patcher_add_patch_ref(jd, PATCHER_invokestatic_special,
3002 d = md->returntype.type;
3005 disp = (ptrint) lm->stubroutine;
3006 d = lm->parseddesc->returntype.type;
3009 M_MOV_IMM(disp, REG_ITMP2);
3013 case ICMD_INVOKEVIRTUAL:
3014 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
3015 emit_nullpointer_check(cd, iptr, s1);
3018 unresolved_method *um = iptr->sx.s23.s3.um;
3020 patcher_add_patch_ref(jd, PATCHER_invokevirtual, um, 0);
3023 d = md->returntype.type;
3026 s1 = OFFSET(vftbl_t, table[0]) +
3027 sizeof(methodptr) * lm->vftblindex;
3028 d = md->returntype.type;
3031 M_ALD(REG_METHODPTR, REG_ITMP1,
3032 OFFSET(java_object_t, vftbl));
3033 M_ALD32(REG_ITMP3, REG_METHODPTR, s1);
3037 case ICMD_INVOKEINTERFACE:
3038 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
3039 emit_nullpointer_check(cd, iptr, s1);
3042 unresolved_method *um = iptr->sx.s23.s3.um;
3044 patcher_add_patch_ref(jd, PATCHER_invokeinterface, um, 0);
3048 d = md->returntype.type;
3051 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3052 sizeof(methodptr) * lm->clazz->index;
3054 s2 = sizeof(methodptr) * (lm - lm->clazz->methods);
3056 d = md->returntype.type;
3059 M_ALD(REG_METHODPTR, REG_ITMP1,
3060 OFFSET(java_object_t, vftbl));
3061 M_ALD32(REG_METHODPTR, REG_METHODPTR, s1);
3062 M_ALD32(REG_ITMP3, REG_METHODPTR, s2);
3067 /* store size of call code in replacement point */
3069 REPLACEMENT_POINT_INVOKE_RETURN(cd, iptr);
3070 REPLACEMENT_POINT_FORGC_BUILTIN_RETURN(cd, iptr);
3072 /* d contains return type */
3074 if (d != TYPE_VOID) {
3075 #if defined(ENABLE_SSA)
3076 if ((ls == NULL) /* || (!IS_TEMPVAR_INDEX(iptr->dst.varindex)) */ ||
3077 (ls->lifetime[iptr->dst.varindex].type != UNUSED))
3078 /* a "living" stackslot */
3081 if (IS_INT_LNG_TYPE(d)) {
3082 if (IS_2_WORD_TYPE(d)) {
3083 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
3084 M_LNGMOVE(REG_RESULT_PACKED, s1);
3087 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3088 M_INTMOVE(REG_RESULT, s1);
3092 s1 = codegen_reg_of_dst(jd, iptr, REG_NULL);
3094 emit_store_dst(jd, iptr, s1);
3100 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3102 if (!(iptr->flags.bits & INS_FLAG_ARRAY)) {
3103 /* object type cast-check */
3106 vftbl_t *supervftbl;
3109 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3115 super = iptr->sx.s23.s3.c.cls;
3116 superindex = super->index;
3117 supervftbl = super->vftbl;
3120 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3122 /* if class is not resolved, check which code to call */
3124 if (super == NULL) {
3126 emit_label_beq(cd, BRANCH_LABEL_1);
3128 patcher_add_patch_ref(jd, PATCHER_checkcast_instanceof_flags,
3129 iptr->sx.s23.s3.c.ref, 0);
3131 M_MOV_IMM(0, REG_ITMP2); /* super->flags */
3132 M_AND_IMM32(ACC_INTERFACE, REG_ITMP2);
3133 emit_label_beq(cd, BRANCH_LABEL_2);
3136 /* interface checkcast code */
3138 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3139 if (super != NULL) {
3141 emit_label_beq(cd, BRANCH_LABEL_3);
3144 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3146 if (super == NULL) {
3147 patcher_add_patch_ref(jd, PATCHER_checkcast_interface,
3148 iptr->sx.s23.s3.c.ref,
3153 REG_ITMP2, OFFSET(vftbl_t, interfacetablelength));
3154 M_ISUB_IMM32(superindex, REG_ITMP3);
3155 /* XXX do we need this one? */
3157 emit_classcast_check(cd, iptr, BRANCH_LE, REG_ITMP3, s1);
3159 M_ALD32(REG_ITMP3, REG_ITMP2,
3160 OFFSET(vftbl_t, interfacetable[0]) -
3161 superindex * sizeof(methodptr*));
3163 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_ITMP3, s1);
3166 emit_label_br(cd, BRANCH_LABEL_4);
3168 emit_label(cd, BRANCH_LABEL_3);
3171 /* class checkcast code */
3173 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3174 if (super == NULL) {
3175 emit_label(cd, BRANCH_LABEL_2);
3179 emit_label_beq(cd, BRANCH_LABEL_5);
3182 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3184 if (super == NULL) {
3185 patcher_add_patch_ref(jd, PATCHER_checkcast_class,
3186 iptr->sx.s23.s3.c.ref,
3190 M_MOV_IMM(supervftbl, REG_ITMP3);
3192 M_ILD32(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3194 /* if (s1 != REG_ITMP1) { */
3195 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP1); */
3196 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3); */
3197 /* #if defined(ENABLE_THREADS) */
3198 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3200 /* emit_alu_reg_reg(cd, ALU_SUB, REG_ITMP1, REG_ITMP2); */
3203 M_ILD32(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, baseval));
3204 M_ISUB(REG_ITMP3, REG_ITMP2);
3205 M_MOV_IMM(supervftbl, REG_ITMP3);
3206 M_ILD(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3210 M_CMP(REG_ITMP3, REG_ITMP2);
3211 emit_classcast_check(cd, iptr, BRANCH_ULE, REG_ITMP3, s1);
3214 emit_label(cd, BRANCH_LABEL_5);
3217 if (super == NULL) {
3218 emit_label(cd, BRANCH_LABEL_1);
3219 emit_label(cd, BRANCH_LABEL_4);
3222 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
3225 /* array type cast-check */
3227 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3228 M_AST(s1, REG_SP, 0 * 4);
3230 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3231 patcher_add_patch_ref(jd, PATCHER_builtin_arraycheckcast,
3232 iptr->sx.s23.s3.c.ref, 0);
3235 M_AST_IMM(iptr->sx.s23.s3.c.cls, REG_SP, 1 * 4);
3236 M_MOV_IMM(BUILTIN_arraycheckcast, REG_ITMP3);
3239 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3241 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_RESULT, s1);
3243 d = codegen_reg_of_dst(jd, iptr, s1);
3247 emit_store_dst(jd, iptr, d);
3250 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3254 vftbl_t *supervftbl;
3257 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3263 super = iptr->sx.s23.s3.c.cls;
3264 superindex = super->index;
3265 supervftbl = super->vftbl;
3268 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3269 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
3272 M_INTMOVE(s1, REG_ITMP1);
3278 /* if class is not resolved, check which code to call */
3280 if (super == NULL) {
3282 emit_label_beq(cd, BRANCH_LABEL_1);
3284 patcher_add_patch_ref(jd, PATCHER_checkcast_instanceof_flags,
3285 iptr->sx.s23.s3.c.ref, 0);
3287 M_MOV_IMM(0, REG_ITMP3); /* super->flags */
3288 M_AND_IMM32(ACC_INTERFACE, REG_ITMP3);
3289 emit_label_beq(cd, BRANCH_LABEL_2);
3292 /* interface instanceof code */
3294 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3295 if (super != NULL) {
3297 emit_label_beq(cd, BRANCH_LABEL_3);
3300 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3302 if (super == NULL) {
3303 patcher_add_patch_ref(jd, PATCHER_instanceof_interface,
3304 iptr->sx.s23.s3.c.ref, 0);
3308 REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3309 M_ISUB_IMM32(superindex, REG_ITMP3);
3312 disp = (2 + 4 /* mov_membase32_reg */ + 2 /* test */ +
3313 6 /* jcc */ + 5 /* mov_imm_reg */);
3316 M_ALD32(REG_ITMP1, REG_ITMP1,
3317 OFFSET(vftbl_t, interfacetable[0]) -
3318 superindex * sizeof(methodptr*));
3320 /* emit_setcc_reg(cd, CC_A, d); */
3321 /* emit_jcc(cd, CC_BE, 5); */
3326 emit_label_br(cd, BRANCH_LABEL_4);
3328 emit_label(cd, BRANCH_LABEL_3);
3331 /* class instanceof code */
3333 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3334 if (super == NULL) {
3335 emit_label(cd, BRANCH_LABEL_2);
3339 emit_label_beq(cd, BRANCH_LABEL_5);
3342 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3344 if (super == NULL) {
3345 patcher_add_patch_ref(jd, PATCHER_instanceof_class,
3346 iptr->sx.s23.s3.c.ref, 0);
3349 M_MOV_IMM(supervftbl, REG_ITMP2);
3351 M_ILD(REG_ITMP1, REG_ITMP1, OFFSET(vftbl_t, baseval));
3352 M_ILD(REG_ITMP3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3353 M_ILD(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3355 M_ISUB(REG_ITMP2, REG_ITMP1);
3356 M_CLR(d); /* may be REG_ITMP2 */
3357 M_CMP(REG_ITMP3, REG_ITMP1);
3362 emit_label(cd, BRANCH_LABEL_5);
3365 if (super == NULL) {
3366 emit_label(cd, BRANCH_LABEL_1);
3367 emit_label(cd, BRANCH_LABEL_4);
3370 emit_store_dst(jd, iptr, d);
3374 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3376 /* check for negative sizes and copy sizes to stack if necessary */
3378 MCODECHECK((iptr->s1.argcount << 1) + 64);
3380 for (s1 = iptr->s1.argcount; --s1 >= 0; ) {
3381 /* copy SAVEDVAR sizes to stack */
3382 var = VAR(iptr->sx.s23.s2.args[s1]);
3384 /* Already Preallocated? */
3385 if (!(var->flags & PREALLOC)) {
3386 if (var->flags & INMEMORY) {
3387 M_ILD(REG_ITMP1, REG_SP, var->vv.regoff);
3388 M_IST(REG_ITMP1, REG_SP, (s1 + 3) * 4);
3391 M_IST(var->vv.regoff, REG_SP, (s1 + 3) * 4);
3395 /* is a patcher function set? */
3397 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3398 patcher_add_patch_ref(jd, PATCHER_builtin_multianewarray,
3399 iptr->sx.s23.s3.c.ref, 0);
3405 disp = (ptrint) iptr->sx.s23.s3.c.cls;
3407 /* a0 = dimension count */
3409 M_IST_IMM(iptr->s1.argcount, REG_SP, 0 * 4);
3411 /* a1 = arraydescriptor */
3413 M_IST_IMM(disp, REG_SP, 1 * 4);
3415 /* a2 = pointer to dimensions = stack pointer */
3417 M_MOV(REG_SP, REG_ITMP1);
3418 M_AADD_IMM(3 * 4, REG_ITMP1);
3419 M_AST(REG_ITMP1, REG_SP, 2 * 4);
3421 M_MOV_IMM(BUILTIN_multianewarray, REG_ITMP1);
3424 /* check for exception before result assignment */
3426 emit_exception_check(cd, iptr);
3428 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3429 M_INTMOVE(REG_RESULT, s1);
3430 emit_store_dst(jd, iptr, s1);
3433 #if defined(ENABLE_SSA)
3434 case ICMD_GETEXCEPTION:
3435 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
3436 M_INTMOVE(REG_ITMP1, d);
3437 emit_store_dst(jd, iptr, d);
3441 exceptions_throw_internalerror("Unknown ICMD %d during code generation",
3446 } /* for instruction */
3450 #if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
3453 #if defined(ENABLE_SSA)
3456 /* by edge splitting, in Blocks with phi moves there can only */
3457 /* be a goto as last command, no other Jump/Branch Command */
3459 if (!last_cmd_was_goto)
3460 codegen_emit_phi_moves(jd, bptr);
3465 /* At the end of a basic block we may have to append some nops,
3466 because the patcher stub calling code might be longer than the
3467 actual instruction. So codepatching does not change the
3468 following block unintentionally. */
3470 if (cd->mcodeptr < cd->lastmcodeptr) {
3471 while (cd->mcodeptr < cd->lastmcodeptr) {
3476 } /* if (bptr -> flags >= BBREACHED) */
3477 } /* for basic block */
3479 /* generate stubs */
3481 emit_patcher_traps(jd);
3483 /* everything's ok */
3489 /* codegen_emit_stub_native ****************************************************
3491 Emits a stub routine which calls a native method.
3493 *******************************************************************************/
3495 void codegen_emit_stub_native(jitdata *jd, methoddesc *nmd, functionptr f, int skipparams)
3501 int i, j; /* count variables */
3505 /* get required compiler data */
3511 /* set some variables */
3515 /* calculate stackframe size */
3517 cd->stackframesize =
3518 sizeof(stackframeinfo_t) / SIZEOF_VOID_P +
3519 sizeof(localref_table) / SIZEOF_VOID_P +
3520 4 + /* 4 arguments (start_native_call) */
3523 /* keep stack 16-byte aligned */
3525 ALIGN_ODD(cd->stackframesize);
3527 /* create method header */
3529 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
3530 (void) dseg_add_unique_s4(cd, cd->stackframesize * 8 + 4); /* FrameSize */
3531 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
3532 (void) dseg_add_unique_s4(cd, 0); /* IntSave */
3533 (void) dseg_add_unique_s4(cd, 0); /* FltSave */
3535 #if defined(ENABLE_PROFILING)
3536 /* generate native method profiling code */
3538 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
3539 /* count frequency */
3541 M_MOV_IMM(code, REG_ITMP1);
3542 M_IADD_IMM_MEMBASE(1, REG_ITMP1, OFFSET(codeinfo, frequency));
3546 /* calculate stackframe size for native function */
3548 M_ASUB_IMM(cd->stackframesize * 8 + 4, REG_SP);
3550 /* Mark the whole fpu stack as free for native functions (only for saved */
3551 /* register count == 0). */
3553 emit_ffree_reg(cd, 0);
3554 emit_ffree_reg(cd, 1);
3555 emit_ffree_reg(cd, 2);
3556 emit_ffree_reg(cd, 3);
3557 emit_ffree_reg(cd, 4);
3558 emit_ffree_reg(cd, 5);
3559 emit_ffree_reg(cd, 6);
3560 emit_ffree_reg(cd, 7);
3562 #if defined(ENABLE_GC_CACAO)
3563 /* remember callee saved int registers in stackframeinfo (GC may need to */
3564 /* recover them during a collection). */
3566 disp = cd->stackframesize * 8 - sizeof(stackframeinfo_t) +
3567 OFFSET(stackframeinfo_t, intregs);
3569 for (i = 0; i < INT_SAV_CNT; i++)
3570 M_AST(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3573 /* prepare data structures for native function call */
3575 M_MOV(REG_SP, REG_ITMP1);
3576 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3577 M_IST_IMM(0, REG_SP, 1 * 4);
3580 M_MOV_IMM(codegen_start_native_call, REG_ITMP1);
3583 /* remember class argument */
3585 if (m->flags & ACC_STATIC)
3586 M_MOV(REG_RESULT, REG_ITMP3);
3588 /* Copy or spill arguments to new locations. */
3590 for (i = md->paramcount - 1, j = i + skipparams; i >= 0; i--, j--) {
3591 if (!md->params[i].inmemory)
3594 s1 = md->params[i].regoff + cd->stackframesize * 8 + 8;
3595 s2 = nmd->params[j].regoff;
3597 /* float/double in memory can be copied like int/longs */
3599 switch (md->paramtypes[i].type) {
3603 M_ILD(REG_ITMP1, REG_SP, s1);
3604 M_IST(REG_ITMP1, REG_SP, s2);
3608 M_LLD(REG_ITMP12_PACKED, REG_SP, s1);
3609 M_LST(REG_ITMP12_PACKED, REG_SP, s2);
3614 /* Handle native Java methods. */
3616 if (m->flags & ACC_NATIVE) {
3617 /* if function is static, put class into second argument */
3619 if (m->flags & ACC_STATIC)
3620 M_AST(REG_ITMP3, REG_SP, 1 * 4);
3622 /* put env into first argument */
3624 M_AST_IMM(_Jv_env, REG_SP, 0 * 4);
3627 /* Call the native function. */
3629 disp = dseg_add_functionptr(cd, f);
3630 emit_mov_imm_reg(cd, 0, REG_ITMP3);
3632 M_ALD(REG_ITMP1, REG_ITMP3, disp);
3635 /* save return value */
3637 switch (md->returntype.type) {
3640 switch (md->returntype.decltype) {
3641 case PRIMITIVETYPE_BOOLEAN:
3642 M_BZEXT(REG_RESULT, REG_RESULT);
3644 case PRIMITIVETYPE_BYTE:
3645 M_BSEXT(REG_RESULT, REG_RESULT);
3647 case PRIMITIVETYPE_CHAR:
3648 M_CZEXT(REG_RESULT, REG_RESULT);
3650 case PRIMITIVETYPE_SHORT:
3651 M_SSEXT(REG_RESULT, REG_RESULT);
3654 M_IST(REG_RESULT, REG_SP, 1 * 8);
3657 M_LST(REG_RESULT_PACKED, REG_SP, 1 * 8);
3660 emit_fsts_membase(cd, REG_SP, 1 * 8);
3663 emit_fstl_membase(cd, REG_SP, 1 * 8);
3669 /* remove native stackframe info */
3671 M_MOV(REG_SP, REG_ITMP1);
3672 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3673 M_IST_IMM(0, REG_SP, 1 * 4);
3676 M_MOV_IMM(codegen_finish_native_call, REG_ITMP1);
3678 M_MOV(REG_RESULT, REG_ITMP2); /* REG_ITMP3 == REG_RESULT2 */
3680 /* restore return value */
3682 switch (md->returntype.type) {
3685 M_ILD(REG_RESULT, REG_SP, 1 * 8);
3688 M_LLD(REG_RESULT_PACKED, REG_SP, 1 * 8);
3691 emit_flds_membase(cd, REG_SP, 1 * 8);
3694 emit_fldl_membase(cd, REG_SP, 1 * 8);
3700 #if defined(ENABLE_GC_CACAO)
3701 /* restore callee saved int registers from stackframeinfo (GC might have */
3702 /* modified them during a collection). */
3704 disp = cd->stackframesize * 8 - sizeof(stackframeinfo_t) +
3705 OFFSET(stackframeinfo_t, intregs);
3707 for (i = 0; i < INT_SAV_CNT; i++)
3708 M_ALD(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3711 M_AADD_IMM(cd->stackframesize * 8 + 4, REG_SP);
3713 /* check for exception */
3720 /* handle exception */
3722 M_MOV(REG_ITMP2, REG_ITMP1_XPTR);
3723 M_ALD(REG_ITMP2_XPC, REG_SP, 0);
3724 M_ASUB_IMM(2, REG_ITMP2_XPC);
3726 M_MOV_IMM(asm_handle_nat_exception, REG_ITMP3);
3732 * These are local overrides for various environment variables in Emacs.
3733 * Please do not remove this and leave it at the end of the file, where
3734 * Emacs will automagically detect them.
3735 * ---------------------------------------------------------------------
3738 * indent-tabs-mode: t
3742 * vim:noexpandtab:sw=4:ts=4: