1 /* src/vm/jit/i386/codegen.c - machine code generator for i386
3 Copyright (C) 1996-2005, 2006, 2007, 2008
4 CACAOVM - Verein zur Foerderung der freien virtuellen Maschine CACAO
6 This file is part of CACAO.
8 This program is free software; you can redistribute it and/or
9 modify it under the terms of the GNU General Public License as
10 published by the Free Software Foundation; either version 2, or (at
11 your option) any later version.
13 This program is distributed in the hope that it will be useful, but
14 WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with this program; if not, write to the Free Software
20 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
34 #include "vm/jit/i386/md-abi.h"
36 #include "vm/jit/i386/codegen.h"
37 #include "vm/jit/i386/emit.h"
39 #include "mm/memory.h"
40 #include "native/jni.h"
41 #include "native/localref.h"
42 #include "native/native.h"
44 #include "threads/lock-common.h"
46 #include "vm/builtin.h"
47 #include "vm/exceptions.hpp"
48 #include "vm/global.h"
49 #include "vm/loader.h"
50 #include "vm/options.h"
51 #include "vm/primitive.hpp"
55 #include "vm/jit/abi.h"
56 #include "vm/jit/asmpart.h"
57 #include "vm/jit/codegen-common.h"
58 #include "vm/jit/dseg.h"
59 #include "vm/jit/emit-common.h"
60 #include "vm/jit/jit.h"
61 #include "vm/jit/jitcache.h"
62 #include "vm/jit/linenumbertable.h"
63 #include "vm/jit/parse.h"
64 #include "vm/jit/patcher-common.h"
65 #include "vm/jit/reg.h"
66 #include "vm/jit/replace.h"
67 #include "vm/jit/stacktrace.hpp"
68 #include "vm/jit/trap.h"
70 #if defined(ENABLE_SSA)
71 # include "vm/jit/optimizing/lsra.h"
72 # include "vm/jit/optimizing/ssa.h"
73 #elif defined(ENABLE_LSRA)
74 # include "vm/jit/allocator/lsra.h"
78 /* codegen_emit ****************************************************************
80 Generates machine code.
82 *******************************************************************************/
84 bool codegen_emit(jitdata *jd)
90 s4 len, s1, s2, s3, d, disp;
91 int align_off; /* offset for alignment compensation */
96 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
97 builtintable_entry *bte;
100 unresolved_field *uf;
103 #if defined(ENABLE_SSA)
105 bool last_cmd_was_goto;
107 last_cmd_was_goto = false;
111 /* get required compiler data */
118 /* prevent compiler warnings */
129 s4 savedregs_num = 0;
132 /* space to save used callee saved registers */
134 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
135 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse);
137 cd->stackframesize = rd->memuse + savedregs_num;
139 #if defined(ENABLE_THREADS)
140 /* space to save argument of monitor_enter */
142 if (checksync && code_is_synchronized(code))
143 cd->stackframesize++;
146 /* create method header */
148 /* Keep stack of non-leaf functions 16-byte aligned. */
150 if (!code_is_leafmethod(code)) {
151 ALIGN_ODD(cd->stackframesize);
154 align_off = cd->stackframesize ? 4 : 0;
156 #if defined(ENABLE_JITCACHE)
157 disp = dseg_add_unique_address(cd, code); /* CodeinfoPointer */
158 jitcache_add_cached_ref(code, CRT_CODEINFO, 0, disp);
160 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
162 (void) dseg_add_unique_s4(
163 cd, cd->stackframesize * 8 + align_off); /* FrameSize */
165 code->synchronizedoffset = rd->memuse * 8;
167 /* REMOVEME: We still need it for exception handling in assembler. */
169 if (code_is_leafmethod(code))
170 (void) dseg_add_unique_s4(cd, 1); /* IsLeaf */
172 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
174 (void) dseg_add_unique_s4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
175 (void) dseg_add_unique_s4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
177 #if defined(ENABLE_PROFILING)
178 /* generate method profiling code */
180 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
181 /* count frequency */
183 M_MOV_IMM(code, REG_ITMP3);
184 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_CODEINFO, NULL);
186 M_IADD_IMM_MEMBASE(1, REG_ITMP3, OFFSET(codeinfo, frequency));
190 /* create stack frame (if necessary) */
192 if (cd->stackframesize)
194 M_ASUB_IMM(cd->stackframesize * 8 + 4, REG_SP);
196 /* save return address and used callee saved registers */
198 p = cd->stackframesize;
199 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
200 p--; M_AST(rd->savintregs[i], REG_SP, p * 8);
202 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
203 p--; emit_fld_reg(cd, rd->savfltregs[i]); emit_fstpl_membase(cd, REG_SP, p * 8);
206 /* take arguments out of register or stack frame */
211 for (p = 0, l = 0; p < md->paramcount; p++) {
212 t = md->paramtypes[p].type;
214 varindex = jd->local_map[l * 5 + t];
215 #if defined(ENABLE_SSA)
217 if (varindex != UNUSED)
218 varindex = ls->var_0[varindex];
219 if ((varindex != UNUSED) && (ls->lifetime[varindex].type == UNUSED))
224 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
227 if (varindex == UNUSED)
231 s1 = md->params[p].regoff;
234 if (IS_INT_LNG_TYPE(t)) { /* integer args */
235 if (!md->params[p].inmemory) { /* register arguments */
236 log_text("integer register argument");
238 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
239 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff */
241 else { /* reg arg -> spilled */
242 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff * 4 */
246 if (!(var->flags & INMEMORY)) {
248 cd->stackframesize * 8 + 4 + align_off + s1);
251 if (!IS_2_WORD_TYPE(t)) {
252 #if defined(ENABLE_SSA)
253 /* no copy avoiding by now possible with SSA */
255 emit_mov_membase_reg( /* + 4 for return address */
257 cd->stackframesize * 8 + s1 + 4 + align_off,
259 emit_mov_reg_membase(
260 cd, REG_ITMP1, REG_SP, var->vv.regoff);
263 #endif /*defined(ENABLE_SSA)*/
264 /* reuse stackslot */
265 var->vv.regoff = cd->stackframesize * 8 + 4 +
270 #if defined(ENABLE_SSA)
271 /* no copy avoiding by now possible with SSA */
273 emit_mov_membase_reg( /* + 4 for return address */
275 cd->stackframesize * 8 + s1 + 4 + align_off,
277 emit_mov_reg_membase(
278 cd, REG_ITMP1, REG_SP, var->vv.regoff);
279 emit_mov_membase_reg( /* + 4 for return address */
281 cd->stackframesize * 8 + s1 + 4 + 4 + align_off,
283 emit_mov_reg_membase(
284 cd, REG_ITMP1, REG_SP, var->vv.regoff + 4);
287 #endif /*defined(ENABLE_SSA)*/
288 /* reuse stackslot */
289 var->vv.regoff = cd->stackframesize * 8 + 8 + s1;
294 else { /* floating args */
295 if (!md->params[p].inmemory) { /* register arguments */
296 log_text("There are no float argument registers!");
298 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
299 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff */
300 } else { /* reg arg -> spilled */
301 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff * 8 */
305 else { /* stack arguments */
306 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
310 cd->stackframesize * 8 + s1 + 4 + align_off);
312 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
318 cd->stackframesize * 8 + s1 + 4 + align_off);
320 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
323 } else { /* stack-arg -> spilled */
324 #if defined(ENABLE_SSA)
325 /* no copy avoiding by now possible with SSA */
327 emit_mov_membase_reg(
329 cd->stackframesize * 8 + s1 + 4 + align_off,
331 emit_mov_reg_membase(
332 cd, REG_ITMP1, REG_SP, var->vv.regoff);
336 cd->stackframesize * 8 + s1 + 4 + align_off);
337 emit_fstps_membase(cd, REG_SP, var->vv.regoff);
342 cd->stackframesize * 8 + s1 + 4 + align_off);
343 emit_fstpl_membase(cd, REG_SP, var->vv.regoff);
347 #endif /*defined(ENABLE_SSA)*/
348 /* reuse stackslot */
349 var->vv.regoff = cd->stackframesize * 8 + 4 +
356 /* call monitorenter function */
358 #if defined(ENABLE_THREADS)
359 if (checksync && code_is_synchronized(code)) {
362 if (m->flags & ACC_STATIC) {
363 M_MOV_IMM(&m->clazz->object.header, REG_ITMP1);
364 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_OBJECT_HEADER, m->clazz);
367 M_ALD(REG_ITMP1, REG_SP, cd->stackframesize * 8 + 4 + align_off);
370 M_ALD_MEM(REG_ITMP1, TRAP_NullPointerException);
373 M_AST(REG_ITMP1, REG_SP, s1 * 8);
374 M_AST(REG_ITMP1, REG_SP, 0 * 4);
375 M_MOV_IMM(LOCK_monitor_enter, REG_ITMP3);
376 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_BUILTIN_FP,
377 builtintable_get_internal(LOCK_monitor_enter));
383 emit_verbosecall_enter(jd);
388 #if defined(ENABLE_SSA)
389 /* with SSA the Header is Basic Block 0 - insert phi Moves if necessary */
391 codegen_emit_phi_moves(jd, ls->basicblocks[0]);
394 /* end of header generation */
396 /* create replacement points */
398 REPLACEMENT_POINTS_INIT(cd, jd);
400 /* walk through all basic blocks */
402 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
404 bptr->mpc = (s4) (cd->mcodeptr - cd->mcodebase);
406 if (bptr->flags >= BBREACHED) {
407 /* branch resolving */
409 codegen_resolve_branchrefs(cd, bptr);
411 /* handle replacement points */
413 REPLACEMENT_POINT_BLOCK_START(cd, bptr);
415 #if defined(ENABLE_REPLACEMENT)
416 if (bptr->bitflags & BBFLAG_REPLACEMENT) {
417 if (cd->replacementpoint[-1].flags & RPLPOINT_FLAG_COUNTDOWN) {
419 emit_trap_countdown(cd, &(m->hitcountdown));
424 /* copy interface registers to their destination */
429 #if defined(ENABLE_PROFILING)
430 /* generate basic block profiling code */
432 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
433 /* count frequency */
435 M_MOV_IMM(code->bbfrequency, REG_ITMP3);
436 M_IADD_IMM_MEMBASE(1, REG_ITMP3, bptr->nr * 4);
440 #if defined(ENABLE_LSRA) || defined(ENABLE_SSA)
441 # if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
444 # if defined(ENABLE_SSA)
446 last_cmd_was_goto = false;
450 var = VAR(bptr->invars[len]);
451 if (bptr->type != BBTYPE_STD) {
452 if (!IS_2_WORD_TYPE(var->type)) {
453 #if !defined(ENABLE_SSA)
454 if (bptr->type == BBTYPE_EXH) {
455 d = codegen_reg_of_var(0, var, REG_ITMP1);
456 M_INTMOVE(REG_ITMP1, d);
457 emit_store(jd, NULL, var, d);
462 log_text("copy interface registers(EXH, SBR): longs \
463 have to be in memory (begin 1)");
471 #endif /* defined(ENABLE_LSRA) || defined(ENABLE_SSA) */
475 var = VAR(bptr->invars[len]);
476 if ((len == bptr->indepth-1) && (bptr->type != BBTYPE_STD)) {
477 if (!IS_2_WORD_TYPE(var->type)) {
478 if (bptr->type == BBTYPE_EXH) {
479 d = codegen_reg_of_var(0, var, REG_ITMP1);
480 M_INTMOVE(REG_ITMP1, d);
481 emit_store(jd, NULL, var, d);
485 log_text("copy interface registers: longs have to be in \
492 assert((var->flags & INOUT));
497 /* walk through all instructions */
502 for (iptr = bptr->iinstr; len > 0; len--, iptr++) {
503 if (iptr->line != currentline) {
504 linenumbertable_list_entry_add(cd, iptr->line);
505 currentline = iptr->line;
508 MCODECHECK(1024); /* 1kB should be enough */
511 case ICMD_NOP: /* ... ==> ... */
512 case ICMD_POP: /* ..., value ==> ... */
513 case ICMD_POP2: /* ..., value, value ==> ... */
516 case ICMD_INLINE_START:
518 REPLACEMENT_POINT_INLINE_START(cd, iptr);
521 case ICMD_INLINE_BODY:
523 REPLACEMENT_POINT_INLINE_BODY(cd, iptr);
524 linenumbertable_list_entry_add_inline_start(cd, iptr);
525 linenumbertable_list_entry_add(cd, iptr->line);
528 case ICMD_INLINE_END:
530 linenumbertable_list_entry_add_inline_end(cd, iptr);
531 linenumbertable_list_entry_add(cd, iptr->line);
534 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
536 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
537 emit_nullpointer_check(cd, iptr, s1);
540 /* constant operations ************************************************/
542 case ICMD_ICONST: /* ... ==> ..., constant */
544 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
545 ICONST(d, iptr->sx.val.i);
546 emit_store_dst(jd, iptr, d);
549 case ICMD_LCONST: /* ... ==> ..., constant */
551 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
552 LCONST(d, iptr->sx.val.l);
553 emit_store_dst(jd, iptr, d);
556 case ICMD_FCONST: /* ... ==> ..., constant */
558 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
559 if (iptr->sx.val.f == 0.0) {
563 if (iptr->sx.val.i == 0x80000000) {
567 } else if (iptr->sx.val.f == 1.0) {
570 } else if (iptr->sx.val.f == 2.0) {
576 disp = dseg_add_float(cd, iptr->sx.val.f);
577 emit_mov_imm_reg(cd, 0, REG_ITMP1);
579 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_ENTRYPOINT, NULL);
580 emit_flds_membase(cd, REG_ITMP1, disp);
582 emit_store_dst(jd, iptr, d);
585 case ICMD_DCONST: /* ... ==> ..., constant */
587 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
588 if (iptr->sx.val.d == 0.0) {
592 if (iptr->sx.val.l == 0x8000000000000000LL) {
596 } else if (iptr->sx.val.d == 1.0) {
599 } else if (iptr->sx.val.d == 2.0) {
605 disp = dseg_add_double(cd, iptr->sx.val.d);
606 emit_mov_imm_reg(cd, 0, REG_ITMP1);
608 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_ENTRYPOINT, NULL);
609 emit_fldl_membase(cd, REG_ITMP1, disp);
611 emit_store_dst(jd, iptr, d);
614 case ICMD_ACONST: /* ... ==> ..., constant */
616 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
618 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
619 patcher_add_patch_ref(jd, PATCHER_aconst,
620 iptr->sx.val.c.ref, 0);
625 if (iptr->sx.val.anyptr == NULL)
629 M_MOV_IMM(iptr->sx.val.anyptr, d);
630 JITCACHE_ADD_CACHED_REF_JD(
632 (iptr->flags.bits & INS_FLAG_CLASS) ? CRT_CLASSINFO
634 (iptr->flags.bits & INS_FLAG_CLASS) ? iptr->sx.val.c.cls
635 : iptr->sx.val.stringconst);
638 emit_store_dst(jd, iptr, d);
642 /* load/store/copy/move operations ************************************/
660 if (!(iptr->flags.bits & INS_FLAG_RETADDR))
665 /* integer operations *************************************************/
667 case ICMD_INEG: /* ..., value ==> ..., - value */
669 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
670 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
673 emit_store_dst(jd, iptr, d);
676 case ICMD_LNEG: /* ..., value ==> ..., - value */
678 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
679 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
681 M_NEG(GET_LOW_REG(d));
682 M_IADDC_IMM(0, GET_HIGH_REG(d));
683 M_NEG(GET_HIGH_REG(d));
684 emit_store_dst(jd, iptr, d);
687 case ICMD_I2L: /* ..., value ==> ..., value */
689 s1 = emit_load_s1(jd, iptr, EAX);
690 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
693 M_LNGMOVE(EAX_EDX_PACKED, d);
694 emit_store_dst(jd, iptr, d);
697 case ICMD_L2I: /* ..., value ==> ..., value */
699 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
700 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
702 emit_store_dst(jd, iptr, d);
705 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
707 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
708 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
712 emit_store_dst(jd, iptr, d);
715 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
717 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
718 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
720 emit_store_dst(jd, iptr, d);
723 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
725 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
726 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
728 emit_store_dst(jd, iptr, d);
732 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
734 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
735 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
736 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
743 emit_store_dst(jd, iptr, d);
747 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
748 /* sx.val.i = constant */
750 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
751 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
753 /* `inc reg' is slower on p4's (regarding to ia32
754 optimization reference manual and benchmarks) and as
758 M_IADD_IMM(iptr->sx.val.i, d);
759 emit_store_dst(jd, iptr, d);
762 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
764 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
765 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
766 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
767 M_INTMOVE(s1, GET_LOW_REG(d));
768 M_IADD(s2, GET_LOW_REG(d));
769 /* don't use REG_ITMP1 */
770 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
771 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
772 M_INTMOVE(s1, GET_HIGH_REG(d));
773 M_IADDC(s2, GET_HIGH_REG(d));
774 emit_store_dst(jd, iptr, d);
777 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
778 /* sx.val.l = constant */
780 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
781 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
783 M_IADD_IMM(iptr->sx.val.l, GET_LOW_REG(d));
784 M_IADDC_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
785 emit_store_dst(jd, iptr, d);
788 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
790 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
791 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
792 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
794 M_INTMOVE(s1, REG_ITMP1);
795 M_ISUB(s2, REG_ITMP1);
796 M_INTMOVE(REG_ITMP1, d);
802 emit_store_dst(jd, iptr, d);
805 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
806 /* sx.val.i = constant */
808 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
809 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
811 M_ISUB_IMM(iptr->sx.val.i, d);
812 emit_store_dst(jd, iptr, d);
815 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
817 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
818 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
819 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
820 if (s2 == GET_LOW_REG(d)) {
821 M_INTMOVE(s1, REG_ITMP1);
822 M_ISUB(s2, REG_ITMP1);
823 M_INTMOVE(REG_ITMP1, GET_LOW_REG(d));
826 M_INTMOVE(s1, GET_LOW_REG(d));
827 M_ISUB(s2, GET_LOW_REG(d));
829 /* don't use REG_ITMP1 */
830 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
831 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
832 if (s2 == GET_HIGH_REG(d)) {
833 M_INTMOVE(s1, REG_ITMP2);
834 M_ISUBB(s2, REG_ITMP2);
835 M_INTMOVE(REG_ITMP2, GET_HIGH_REG(d));
838 M_INTMOVE(s1, GET_HIGH_REG(d));
839 M_ISUBB(s2, GET_HIGH_REG(d));
841 emit_store_dst(jd, iptr, d);
844 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
845 /* sx.val.l = constant */
847 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
848 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
850 M_ISUB_IMM(iptr->sx.val.l, GET_LOW_REG(d));
851 M_ISUBB_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
852 emit_store_dst(jd, iptr, d);
855 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
857 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
858 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
859 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
866 emit_store_dst(jd, iptr, d);
869 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
870 /* sx.val.i = constant */
872 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
873 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
874 M_IMUL_IMM(s1, iptr->sx.val.i, d);
875 emit_store_dst(jd, iptr, d);
878 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
880 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
881 s2 = emit_load_s2_low(jd, iptr, EDX);
882 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
884 M_INTMOVE(s1, REG_ITMP2);
885 M_IMUL(s2, REG_ITMP2);
887 s1 = emit_load_s1_low(jd, iptr, EAX);
888 s2 = emit_load_s2_high(jd, iptr, EDX);
891 M_IADD(EDX, REG_ITMP2);
893 s1 = emit_load_s1_low(jd, iptr, EAX);
894 s2 = emit_load_s2_low(jd, iptr, EDX);
897 M_INTMOVE(EAX, GET_LOW_REG(d));
898 M_IADD(REG_ITMP2, GET_HIGH_REG(d));
900 emit_store_dst(jd, iptr, d);
903 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
904 /* sx.val.l = constant */
906 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
907 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
908 ICONST(EAX, iptr->sx.val.l);
910 M_IMUL_IMM(s1, iptr->sx.val.l >> 32, REG_ITMP2);
911 M_IADD(REG_ITMP2, EDX);
912 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
913 M_IMUL_IMM(s1, iptr->sx.val.l, REG_ITMP2);
914 M_IADD(REG_ITMP2, EDX);
915 M_LNGMOVE(EAX_EDX_PACKED, d);
916 emit_store_dst(jd, iptr, d);
919 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
921 s1 = emit_load_s1(jd, iptr, EAX);
922 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
923 d = codegen_reg_of_dst(jd, iptr, EAX);
924 emit_arithmetic_check(cd, iptr, s2);
926 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
928 /* check as described in jvm spec */
930 M_CMP_IMM(0x80000000, EAX);
937 M_INTMOVE(EAX, d); /* if INMEMORY then d is already EAX */
938 emit_store_dst(jd, iptr, d);
941 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
943 s1 = emit_load_s1(jd, iptr, EAX);
944 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
945 d = codegen_reg_of_dst(jd, iptr, EDX);
946 emit_arithmetic_check(cd, iptr, s2);
948 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
950 /* check as described in jvm spec */
952 M_CMP_IMM(0x80000000, EAX);
960 M_INTMOVE(EDX, d); /* if INMEMORY then d is already EDX */
961 emit_store_dst(jd, iptr, d);
964 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
965 /* sx.val.i = constant */
967 /* TODO: optimize for `/ 2' */
968 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
969 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
973 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, d);/* 32-bit for jump off */
974 M_SRA_IMM(iptr->sx.val.i, d);
975 emit_store_dst(jd, iptr, d);
978 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
979 /* sx.val.i = constant */
981 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
982 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
984 M_MOV(s1, REG_ITMP1);
988 M_AND_IMM(iptr->sx.val.i, d);
990 M_BGE(2 + 2 + 6 + 2);
991 M_MOV(s1, d); /* don't use M_INTMOVE, so we know the jump offset */
993 M_AND_IMM32(iptr->sx.val.i, d); /* use 32-bit for jump offset */
995 emit_store_dst(jd, iptr, d);
998 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
999 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1001 s2 = emit_load_s2(jd, iptr, REG_ITMP12_PACKED);
1002 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1004 M_INTMOVE(GET_LOW_REG(s2), REG_ITMP3);
1005 M_OR(GET_HIGH_REG(s2), REG_ITMP3);
1006 /* XXX could be optimized */
1007 emit_arithmetic_check(cd, iptr, REG_ITMP3);
1009 bte = iptr->sx.s23.s3.bte;
1012 M_LST(s2, REG_SP, 2 * 4);
1014 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1015 M_LST(s1, REG_SP, 0 * 4);
1017 M_MOV_IMM(bte->fp, REG_ITMP3);
1018 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_BUILTIN_FP, bte);
1020 emit_store_dst(jd, iptr, d);
1023 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1024 /* sx.val.i = constant */
1026 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1027 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1029 M_TEST(GET_HIGH_REG(d));
1031 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, GET_LOW_REG(d));
1032 M_IADDC_IMM(0, GET_HIGH_REG(d));
1033 M_SRLD_IMM(iptr->sx.val.i, GET_HIGH_REG(d), GET_LOW_REG(d));
1034 M_SRA_IMM(iptr->sx.val.i, GET_HIGH_REG(d));
1035 emit_store_dst(jd, iptr, d);
1039 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1040 /* sx.val.l = constant */
1042 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1043 if (iptr->dst.var->flags & INMEMORY) {
1044 if (iptr->s1.var->flags & INMEMORY) {
1045 /* Alpha algorithm */
1047 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8);
1049 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1055 /* TODO: hmm, don't know if this is always correct */
1057 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l & 0x00000000ffffffff);
1059 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l >> 32);
1065 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1066 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1068 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1069 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1070 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1071 emit_jcc(cd, CC_GE, disp);
1073 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1074 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1076 emit_neg_reg(cd, REG_ITMP1);
1077 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1078 emit_neg_reg(cd, REG_ITMP2);
1080 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1081 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1083 emit_neg_reg(cd, REG_ITMP1);
1084 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1085 emit_neg_reg(cd, REG_ITMP2);
1087 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst.var->vv.regoff * 8);
1088 emit_mov_reg_membase(cd, REG_ITMP2, REG_SP, iptr->dst.var->vv.regoff * 8 + 4);
1092 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1093 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1095 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1096 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1097 M_TEST(GET_LOW_REG(s1));
1103 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1105 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1106 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1107 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1108 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1111 emit_store_dst(jd, iptr, d);
1114 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1115 /* sx.val.i = constant */
1117 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1118 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1120 M_SLL_IMM(iptr->sx.val.i, d);
1121 emit_store_dst(jd, iptr, d);
1124 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1126 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1127 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1128 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1129 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1132 emit_store_dst(jd, iptr, d);
1135 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1136 /* sx.val.i = constant */
1138 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1139 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1141 M_SRA_IMM(iptr->sx.val.i, d);
1142 emit_store_dst(jd, iptr, d);
1145 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1147 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1148 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1149 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1150 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1153 emit_store_dst(jd, iptr, d);
1156 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1157 /* sx.val.i = constant */
1159 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1160 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1162 M_SRL_IMM(iptr->sx.val.i, d);
1163 emit_store_dst(jd, iptr, d);
1166 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1168 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1169 s2 = emit_load_s2(jd, iptr, ECX);
1170 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1173 M_TEST_IMM(32, ECX);
1175 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1176 M_CLR(GET_LOW_REG(d));
1177 M_SLLD(GET_LOW_REG(d), GET_HIGH_REG(d));
1178 M_SLL(GET_LOW_REG(d));
1179 emit_store_dst(jd, iptr, d);
1182 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1183 /* sx.val.i = constant */
1185 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1186 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1188 if (iptr->sx.val.i & 0x20) {
1189 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1190 M_CLR(GET_LOW_REG(d));
1191 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1195 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1197 M_SLL_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d));
1199 emit_store_dst(jd, iptr, d);
1202 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1204 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1205 s2 = emit_load_s2(jd, iptr, ECX);
1206 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1209 M_TEST_IMM(32, ECX);
1211 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1212 M_SRA_IMM(31, GET_HIGH_REG(d));
1213 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1214 M_SRA(GET_HIGH_REG(d));
1215 emit_store_dst(jd, iptr, d);
1218 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1219 /* sx.val.i = constant */
1221 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1222 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1224 if (iptr->sx.val.i & 0x20) {
1225 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1226 M_SRA_IMM(31, GET_HIGH_REG(d));
1227 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1231 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1233 M_SRA_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1235 emit_store_dst(jd, iptr, d);
1238 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1240 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1241 s2 = emit_load_s2(jd, iptr, ECX);
1242 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1245 M_TEST_IMM(32, ECX);
1247 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1248 M_CLR(GET_HIGH_REG(d));
1249 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1250 M_SRL(GET_HIGH_REG(d));
1251 emit_store_dst(jd, iptr, d);
1254 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1255 /* sx.val.l = constant */
1257 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1258 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1260 if (iptr->sx.val.i & 0x20) {
1261 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1262 M_CLR(GET_HIGH_REG(d));
1263 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1267 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1269 M_SRL_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1271 emit_store_dst(jd, iptr, d);
1274 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1276 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1277 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1278 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1285 emit_store_dst(jd, iptr, d);
1288 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1289 /* sx.val.i = constant */
1291 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1292 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1294 M_AND_IMM(iptr->sx.val.i, d);
1295 emit_store_dst(jd, iptr, d);
1298 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1300 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1301 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1302 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1303 if (s2 == GET_LOW_REG(d))
1304 M_AND(s1, GET_LOW_REG(d));
1306 M_INTMOVE(s1, GET_LOW_REG(d));
1307 M_AND(s2, GET_LOW_REG(d));
1309 /* REG_ITMP1 probably contains low 32-bit of destination */
1310 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1311 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1312 if (s2 == GET_HIGH_REG(d))
1313 M_AND(s1, GET_HIGH_REG(d));
1315 M_INTMOVE(s1, GET_HIGH_REG(d));
1316 M_AND(s2, GET_HIGH_REG(d));
1318 emit_store_dst(jd, iptr, d);
1321 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1322 /* sx.val.l = constant */
1324 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1325 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1327 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1328 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1329 emit_store_dst(jd, iptr, d);
1332 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1334 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1335 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1336 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1343 emit_store_dst(jd, iptr, d);
1346 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1347 /* sx.val.i = constant */
1349 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1350 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1352 M_OR_IMM(iptr->sx.val.i, d);
1353 emit_store_dst(jd, iptr, d);
1356 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1358 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1359 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1360 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1361 if (s2 == GET_LOW_REG(d))
1362 M_OR(s1, GET_LOW_REG(d));
1364 M_INTMOVE(s1, GET_LOW_REG(d));
1365 M_OR(s2, GET_LOW_REG(d));
1367 /* REG_ITMP1 probably contains low 32-bit of destination */
1368 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1369 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1370 if (s2 == GET_HIGH_REG(d))
1371 M_OR(s1, GET_HIGH_REG(d));
1373 M_INTMOVE(s1, GET_HIGH_REG(d));
1374 M_OR(s2, GET_HIGH_REG(d));
1376 emit_store_dst(jd, iptr, d);
1379 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1380 /* sx.val.l = constant */
1382 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1383 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1385 M_OR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1386 M_OR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1387 emit_store_dst(jd, iptr, d);
1390 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1392 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1393 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1394 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1401 emit_store_dst(jd, iptr, d);
1404 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1405 /* sx.val.i = constant */
1407 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1408 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1410 M_XOR_IMM(iptr->sx.val.i, d);
1411 emit_store_dst(jd, iptr, d);
1414 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1416 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1417 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1418 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1419 if (s2 == GET_LOW_REG(d))
1420 M_XOR(s1, GET_LOW_REG(d));
1422 M_INTMOVE(s1, GET_LOW_REG(d));
1423 M_XOR(s2, GET_LOW_REG(d));
1425 /* REG_ITMP1 probably contains low 32-bit of destination */
1426 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1427 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1428 if (s2 == GET_HIGH_REG(d))
1429 M_XOR(s1, GET_HIGH_REG(d));
1431 M_INTMOVE(s1, GET_HIGH_REG(d));
1432 M_XOR(s2, GET_HIGH_REG(d));
1434 emit_store_dst(jd, iptr, d);
1437 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1438 /* sx.val.l = constant */
1440 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1441 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1443 M_XOR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1444 M_XOR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1445 emit_store_dst(jd, iptr, d);
1449 /* floating operations ************************************************/
1451 case ICMD_FNEG: /* ..., value ==> ..., - value */
1453 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1454 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1456 emit_store_dst(jd, iptr, d);
1459 case ICMD_DNEG: /* ..., value ==> ..., - value */
1461 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1462 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1464 emit_store_dst(jd, iptr, d);
1467 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1469 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1470 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1471 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1473 emit_store_dst(jd, iptr, d);
1476 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1478 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1479 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1480 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1482 emit_store_dst(jd, iptr, d);
1485 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1487 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1488 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1489 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1491 emit_store_dst(jd, iptr, d);
1494 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1496 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1497 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1498 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1500 emit_store_dst(jd, iptr, d);
1503 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1505 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1506 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1507 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1509 emit_store_dst(jd, iptr, d);
1512 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1514 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1515 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1516 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1518 emit_store_dst(jd, iptr, d);
1521 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1523 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1524 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1525 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1527 emit_store_dst(jd, iptr, d);
1530 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1532 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1533 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1534 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1536 emit_store_dst(jd, iptr, d);
1539 case ICMD_FREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1541 /* exchanged to skip fxch */
1542 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1543 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1544 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1545 /* emit_fxch(cd); */
1550 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1551 emit_store_dst(jd, iptr, d);
1552 emit_ffree_reg(cd, 0);
1556 case ICMD_DREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1558 /* exchanged to skip fxch */
1559 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1560 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1561 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1562 /* emit_fxch(cd); */
1567 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1568 emit_store_dst(jd, iptr, d);
1569 emit_ffree_reg(cd, 0);
1573 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1574 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1576 var = VAROP(iptr->s1);
1577 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1579 if (var->flags & INMEMORY) {
1580 emit_fildl_membase(cd, REG_SP, var->vv.regoff);
1582 /* XXX not thread safe! */
1583 disp = dseg_add_unique_s4(cd, 0);
1584 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1586 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_ENTRYPOINT, NULL);
1587 emit_mov_reg_membase(cd, var->vv.regoff, REG_ITMP1, disp);
1588 emit_fildl_membase(cd, REG_ITMP1, disp);
1591 emit_store_dst(jd, iptr, d);
1594 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1595 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1597 var = VAROP(iptr->s1);
1598 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1599 if (var->flags & INMEMORY) {
1600 emit_fildll_membase(cd, REG_SP, var->vv.regoff);
1603 log_text("L2F: longs have to be in memory");
1606 emit_store_dst(jd, iptr, d);
1609 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1611 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1612 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1614 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1616 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_ENTRYPOINT, NULL);
1618 /* Round to zero, 53-bit mode, exception masked */
1619 disp = dseg_add_s4(cd, 0x0e7f);
1620 emit_fldcw_membase(cd, REG_ITMP1, disp);
1622 var = VAROP(iptr->dst);
1623 var1 = VAROP(iptr->s1);
1625 if (var->flags & INMEMORY) {
1626 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1628 /* Round to nearest, 53-bit mode, exceptions masked */
1629 disp = dseg_add_s4(cd, 0x027f);
1630 emit_fldcw_membase(cd, REG_ITMP1, disp);
1632 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1633 REG_SP, var->vv.regoff);
1636 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1638 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1641 /* XXX not thread safe! */
1642 disp = dseg_add_unique_s4(cd, 0);
1643 emit_fistpl_membase(cd, REG_ITMP1, disp);
1644 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1646 /* Round to nearest, 53-bit mode, exceptions masked */
1647 disp = dseg_add_s4(cd, 0x027f);
1648 emit_fldcw_membase(cd, REG_ITMP1, disp);
1650 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1653 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1654 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1657 emit_jcc(cd, CC_NE, disp);
1659 /* XXX: change this when we use registers */
1660 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1661 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2i, REG_ITMP1);
1662 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_BUILTIN_FP,
1663 builtintable_get_internal(BUILTIN_f2i));
1664 emit_call_reg(cd, REG_ITMP1);
1666 if (var->flags & INMEMORY) {
1667 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1670 M_INTMOVE(REG_RESULT, var->vv.regoff);
1674 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1676 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1677 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1679 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1681 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_ENTRYPOINT, NULL);
1683 /* Round to zero, 53-bit mode, exception masked */
1684 disp = dseg_add_s4(cd, 0x0e7f);
1685 emit_fldcw_membase(cd, REG_ITMP1, disp);
1687 var = VAROP(iptr->dst);
1688 var1 = VAROP(iptr->s1);
1690 if (var->flags & INMEMORY) {
1691 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1693 /* Round to nearest, 53-bit mode, exceptions masked */
1694 disp = dseg_add_s4(cd, 0x027f);
1695 emit_fldcw_membase(cd, REG_ITMP1, disp);
1697 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1698 REG_SP, var->vv.regoff);
1701 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1703 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1706 /* XXX not thread safe! */
1707 disp = dseg_add_unique_s4(cd, 0);
1708 emit_fistpl_membase(cd, REG_ITMP1, disp);
1709 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1711 /* Round to nearest, 53-bit mode, exceptions masked */
1712 disp = dseg_add_s4(cd, 0x027f);
1713 emit_fldcw_membase(cd, REG_ITMP1, disp);
1715 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1718 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1719 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1722 emit_jcc(cd, CC_NE, disp);
1724 /* XXX: change this when we use registers */
1725 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1726 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2i, REG_ITMP1);
1727 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_BUILTIN_FP,
1728 builtintable_get_internal(BUILTIN_d2i));
1729 emit_call_reg(cd, REG_ITMP1);
1731 if (var->flags & INMEMORY) {
1732 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1734 M_INTMOVE(REG_RESULT, var->vv.regoff);
1738 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1740 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1741 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1743 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1745 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_ENTRYPOINT, NULL);
1747 /* Round to zero, 53-bit mode, exception masked */
1748 disp = dseg_add_s4(cd, 0x0e7f);
1749 emit_fldcw_membase(cd, REG_ITMP1, disp);
1751 var = VAROP(iptr->dst);
1752 var1 = VAROP(iptr->s1);
1754 if (var->flags & INMEMORY) {
1755 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1757 /* Round to nearest, 53-bit mode, exceptions masked */
1758 disp = dseg_add_s4(cd, 0x027f);
1759 emit_fldcw_membase(cd, REG_ITMP1, disp);
1761 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1762 REG_SP, var->vv.regoff + 4);
1765 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1767 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1770 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1772 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1774 emit_jcc(cd, CC_NE, disp);
1776 emit_alu_imm_membase(cd, ALU_CMP, 0,
1777 REG_SP, var->vv.regoff);
1780 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1782 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1784 emit_jcc(cd, CC_NE, disp);
1786 /* XXX: change this when we use registers */
1787 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1788 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2l, REG_ITMP1);
1789 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_BUILTIN_FP,
1790 builtintable_get_internal(BUILTIN_f2l));
1791 emit_call_reg(cd, REG_ITMP1);
1792 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1793 emit_mov_reg_membase(cd, REG_RESULT2,
1794 REG_SP, var->vv.regoff + 4);
1797 log_text("F2L: longs have to be in memory");
1802 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1804 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1805 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1807 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1809 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_ENTRYPOINT, NULL);
1811 /* Round to zero, 53-bit mode, exception masked */
1812 disp = dseg_add_s4(cd, 0x0e7f);
1813 emit_fldcw_membase(cd, REG_ITMP1, disp);
1815 var = VAROP(iptr->dst);
1816 var1 = VAROP(iptr->s1);
1818 if (var->flags & INMEMORY) {
1819 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1821 /* Round to nearest, 53-bit mode, exceptions masked */
1822 disp = dseg_add_s4(cd, 0x027f);
1823 emit_fldcw_membase(cd, REG_ITMP1, disp);
1825 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1826 REG_SP, var->vv.regoff + 4);
1829 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1831 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1834 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1836 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1838 emit_jcc(cd, CC_NE, disp);
1840 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, var->vv.regoff);
1843 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1845 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1847 emit_jcc(cd, CC_NE, disp);
1849 /* XXX: change this when we use registers */
1850 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1851 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2l, REG_ITMP1);
1852 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_BUILTIN_FP,
1853 builtintable_get_internal(BUILTIN_d2l));
1854 emit_call_reg(cd, REG_ITMP1);
1855 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1856 emit_mov_reg_membase(cd, REG_RESULT2,
1857 REG_SP, var->vv.regoff + 4);
1860 log_text("D2L: longs have to be in memory");
1865 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1867 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1868 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1870 emit_store_dst(jd, iptr, d);
1873 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1875 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1876 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1878 emit_store_dst(jd, iptr, d);
1881 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1884 /* exchanged to skip fxch */
1885 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1886 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1887 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1888 /* emit_fxch(cd); */
1891 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as GT */
1892 emit_jcc(cd, CC_E, 6);
1893 emit_alu_imm_reg(cd, ALU_AND, 0x000000ff, EAX);
1895 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1896 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1897 emit_jcc(cd, CC_B, 3 + 5);
1898 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1899 emit_jmp_imm(cd, 3);
1900 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1901 emit_store_dst(jd, iptr, d);
1904 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1907 /* exchanged to skip fxch */
1908 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1909 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1910 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1911 /* emit_fxch(cd); */
1914 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as LT */
1915 emit_jcc(cd, CC_E, 3);
1916 emit_movb_imm_reg(cd, 1, REG_AH);
1918 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1919 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1920 emit_jcc(cd, CC_B, 3 + 5);
1921 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1922 emit_jmp_imm(cd, 3);
1923 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1924 emit_store_dst(jd, iptr, d);
1928 /* memory operations **************************************************/
1930 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., length */
1932 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1933 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1934 /* implicit null-pointer check */
1935 M_ILD(d, s1, OFFSET(java_array_t, size));
1936 emit_store_dst(jd, iptr, d);
1939 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
1941 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1942 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1943 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1944 /* implicit null-pointer check */
1945 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1946 emit_movsbl_memindex_reg(cd, OFFSET(java_bytearray_t, data[0]),
1948 emit_store_dst(jd, iptr, d);
1951 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
1953 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1954 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1955 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1956 /* implicit null-pointer check */
1957 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1958 emit_movzwl_memindex_reg(cd, OFFSET(java_chararray_t, data[0]),
1960 emit_store_dst(jd, iptr, d);
1963 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
1965 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1966 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1967 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1968 /* implicit null-pointer check */
1969 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1970 emit_movswl_memindex_reg(cd, OFFSET(java_shortarray_t, data[0]),
1972 emit_store_dst(jd, iptr, d);
1975 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
1977 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1978 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1979 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1980 /* implicit null-pointer check */
1981 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1982 emit_mov_memindex_reg(cd, OFFSET(java_intarray_t, data[0]),
1984 emit_store_dst(jd, iptr, d);
1987 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
1989 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1990 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1991 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
1992 /* implicit null-pointer check */
1993 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1995 var = VAROP(iptr->dst);
1997 assert(var->flags & INMEMORY);
1998 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]),
1999 s1, s2, 3, REG_ITMP3);
2000 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff);
2001 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]) + 4,
2002 s1, s2, 3, REG_ITMP3);
2003 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff + 4);
2006 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2008 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2009 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2010 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2011 /* implicit null-pointer check */
2012 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2013 emit_flds_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2, 2);
2014 emit_store_dst(jd, iptr, d);
2017 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2019 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2020 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2021 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
2022 /* implicit null-pointer check */
2023 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2024 emit_fldl_memindex(cd, OFFSET(java_doublearray_t, data[0]), s1, s2,3);
2025 emit_store_dst(jd, iptr, d);
2028 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2030 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2031 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2032 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2033 /* implicit null-pointer check */
2034 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2035 emit_mov_memindex_reg(cd, OFFSET(java_objectarray_t, data[0]),
2037 emit_store_dst(jd, iptr, d);
2041 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2043 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2044 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2045 /* implicit null-pointer check */
2046 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2047 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2049 /* because EBP, ESI, EDI have no xH and xL nibbles */
2050 M_INTMOVE(s3, REG_ITMP3);
2053 emit_movb_reg_memindex(cd, s3, OFFSET(java_bytearray_t, data[0]),
2057 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2059 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2060 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2061 /* implicit null-pointer check */
2062 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2063 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2064 emit_movw_reg_memindex(cd, s3, OFFSET(java_chararray_t, data[0]),
2068 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2070 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2071 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2072 /* implicit null-pointer check */
2073 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2074 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2075 emit_movw_reg_memindex(cd, s3, OFFSET(java_shortarray_t, data[0]),
2079 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2081 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2082 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2083 /* implicit null-pointer check */
2084 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2085 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2086 emit_mov_reg_memindex(cd, s3, OFFSET(java_intarray_t, data[0]),
2090 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2092 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2093 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2094 /* implicit null-pointer check */
2095 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2097 var = VAROP(iptr->sx.s23.s3);
2099 assert(var->flags & INMEMORY);
2100 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff, REG_ITMP3);
2101 emit_mov_reg_memindex(cd, REG_ITMP3, OFFSET(java_longarray_t, data[0])
2103 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff + 4, REG_ITMP3);
2104 emit_mov_reg_memindex(cd, REG_ITMP3,
2105 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2108 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2110 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2111 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2112 /* implicit null-pointer check */
2113 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2114 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2115 emit_fstps_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2,2);
2118 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2120 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2121 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2122 /* implicit null-pointer check */
2123 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2124 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2125 emit_fstpl_memindex(cd, OFFSET(java_doublearray_t, data[0]),
2129 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2131 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2132 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2133 /* implicit null-pointer check */
2134 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2135 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2137 M_AST(s1, REG_SP, 0 * 4);
2138 M_AST(s3, REG_SP, 1 * 4);
2139 M_MOV_IMM(BUILTIN_FAST_canstore, REG_ITMP1);
2140 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_BUILTIN_FP,
2141 builtintable_get_internal(BUILTIN_FAST_canstore));
2143 emit_arraystore_check(cd, iptr);
2145 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2146 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2147 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2148 emit_mov_reg_memindex(cd, s3, OFFSET(java_objectarray_t, data[0]),
2152 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2154 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2155 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2156 /* implicit null-pointer check */
2157 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2158 emit_movb_imm_memindex(cd, iptr->sx.s23.s3.constval,
2159 OFFSET(java_bytearray_t, data[0]), s1, s2, 0);
2162 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2164 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2165 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2166 /* implicit null-pointer check */
2167 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2168 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2169 OFFSET(java_chararray_t, data[0]), s1, s2, 1);
2172 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2174 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2175 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2176 /* implicit null-pointer check */
2177 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2178 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2179 OFFSET(java_shortarray_t, data[0]), s1, s2, 1);
2182 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2184 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2185 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2186 /* implicit null-pointer check */
2187 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2188 emit_mov_imm_memindex(cd, iptr->sx.s23.s3.constval,
2189 OFFSET(java_intarray_t, data[0]), s1, s2, 2);
2192 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2194 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2195 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2196 /* implicit null-pointer check */
2197 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2198 emit_mov_imm_memindex(cd,
2199 (u4) (iptr->sx.s23.s3.constval & 0x00000000ffffffff),
2200 OFFSET(java_longarray_t, data[0]), s1, s2, 3);
2201 emit_mov_imm_memindex(cd,
2202 ((s4)iptr->sx.s23.s3.constval) >> 31,
2203 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2206 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2208 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2209 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2210 /* implicit null-pointer check */
2211 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2212 emit_mov_imm_memindex(cd, 0,
2213 OFFSET(java_objectarray_t, data[0]), s1, s2, 2);
2217 case ICMD_GETSTATIC: /* ... ==> ..., value */
2219 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2220 uf = iptr->sx.s23.s3.uf;
2221 fieldtype = uf->fieldref->parseddesc.fd->type;
2224 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2228 fi = iptr->sx.s23.s3.fmiref->p.field;
2229 fieldtype = fi->type;
2231 disp = (intptr_t) fi->value;
2233 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->clazz))
2234 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->clazz, 0);
2237 M_MOV_IMM(disp, REG_ITMP1);
2238 JITCACHE_ADD_CACHED_REF_JD_COND(jd, CRT_FIELDINFO_VALUE, fi, disp);
2239 switch (fieldtype) {
2242 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2243 M_ILD(d, REG_ITMP1, 0);
2246 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2247 M_LLD(d, REG_ITMP1, 0);
2250 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2251 M_FLD(d, REG_ITMP1, 0);
2254 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2255 M_DLD(d, REG_ITMP1, 0);
2258 emit_store_dst(jd, iptr, d);
2261 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2263 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2264 uf = iptr->sx.s23.s3.uf;
2265 fieldtype = uf->fieldref->parseddesc.fd->type;
2268 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2271 fi = iptr->sx.s23.s3.fmiref->p.field;
2272 fieldtype = fi->type;
2274 disp = (intptr_t) fi->value;
2276 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->clazz))
2277 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->clazz, 0);
2279 M_MOV_IMM(disp, REG_ITMP1);
2280 JITCACHE_ADD_CACHED_REF_JD_COND(jd, CRT_FIELDINFO_VALUE, fi, disp);
2281 switch (fieldtype) {
2284 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
2285 M_IST(s1, REG_ITMP1, 0);
2288 s1 = emit_load_s1(jd, iptr, REG_ITMP23_PACKED);
2289 M_LST(s1, REG_ITMP1, 0);
2292 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2293 emit_fstps_membase(cd, REG_ITMP1, 0);
2296 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2297 emit_fstpl_membase(cd, REG_ITMP1, 0);
2302 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2303 /* val = value (in current instruction) */
2304 /* following NOP) */
2306 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2307 uf = iptr->sx.s23.s3.uf;
2308 fieldtype = uf->fieldref->parseddesc.fd->type;
2311 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2314 fi = iptr->sx.s23.s3.fmiref->p.field;
2315 fieldtype = fi->type;
2317 disp = (intptr_t) fi->value;
2319 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->clazz))
2320 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->clazz, 0);
2323 M_MOV_IMM(disp, REG_ITMP1);
2324 JITCACHE_ADD_CACHED_REF_JD_COND(jd, CRT_FIELDINFO_VALUE, fi, disp);
2325 switch (fieldtype) {
2328 M_IST_IMM(iptr->sx.s23.s2.constval, REG_ITMP1, 0);
2331 M_IST_IMM(iptr->sx.s23.s2.constval & 0xffffffff, REG_ITMP1, 0);
2332 M_IST_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, REG_ITMP1, 4);
2339 case ICMD_GETFIELD: /* .., objectref. ==> ..., value */
2341 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2342 emit_nullpointer_check(cd, iptr, s1);
2344 #if defined(ENABLE_ESCAPE_CHECK)
2345 /*emit_escape_check(cd, s1);*/
2348 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2349 uf = iptr->sx.s23.s3.uf;
2350 fieldtype = uf->fieldref->parseddesc.fd->type;
2353 patcher_add_patch_ref(jd, PATCHER_getfield,
2354 iptr->sx.s23.s3.uf, 0);
2357 fi = iptr->sx.s23.s3.fmiref->p.field;
2358 fieldtype = fi->type;
2364 switch (fieldtype) {
2367 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2368 M_ILD32(d, s1, disp);
2371 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2372 M_LLD32(d, s1, disp);
2375 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2376 M_FLD32(d, s1, disp);
2379 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2380 M_DLD32(d, s1, disp);
2383 emit_store_dst(jd, iptr, d);
2386 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2388 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2389 emit_nullpointer_check(cd, iptr, s1);
2391 /* must be done here because of code patching */
2393 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2394 uf = iptr->sx.s23.s3.uf;
2395 fieldtype = uf->fieldref->parseddesc.fd->type;
2398 fi = iptr->sx.s23.s3.fmiref->p.field;
2399 fieldtype = fi->type;
2402 if (!IS_FLT_DBL_TYPE(fieldtype)) {
2403 if (IS_2_WORD_TYPE(fieldtype))
2404 s2 = emit_load_s2(jd, iptr, REG_ITMP23_PACKED);
2406 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2409 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
2411 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2413 uf = iptr->sx.s23.s3.uf;
2416 patcher_add_patch_ref(jd, PATCHER_putfield, uf, 0);
2420 fi = iptr->sx.s23.s3.fmiref->p.field;
2424 switch (fieldtype) {
2427 M_IST32(s2, s1, disp);
2430 M_LST32(s2, s1, disp);
2433 emit_fstps_membase32(cd, s1, disp);
2436 emit_fstpl_membase32(cd, s1, disp);
2441 case ICMD_PUTFIELDCONST: /* ..., objectref ==> ... */
2442 /* val = value (in current instruction) */
2443 /* following NOP) */
2445 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2446 emit_nullpointer_check(cd, iptr, s1);
2448 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2449 uf = iptr->sx.s23.s3.uf;
2450 fieldtype = uf->fieldref->parseddesc.fd->type;
2453 patcher_add_patch_ref(jd, PATCHER_putfieldconst,
2457 fi = iptr->sx.s23.s3.fmiref->p.field;
2458 fieldtype = fi->type;
2462 switch (fieldtype) {
2465 M_IST32_IMM(iptr->sx.s23.s2.constval, s1, disp);
2468 M_IST32_IMM(iptr->sx.s23.s2.constval & 0xffffffff, s1, disp);
2469 M_IST32_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, s1, disp + 4);
2477 /* branch operations **************************************************/
2479 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2481 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2482 M_INTMOVE(s1, REG_ITMP1_XPTR);
2484 #ifdef ENABLE_VERIFIER
2485 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2486 patcher_add_patch_ref(jd, PATCHER_resolve_class,
2487 iptr->sx.s23.s2.uc, 0);
2489 #endif /* ENABLE_VERIFIER */
2491 M_CALL_IMM(0); /* passing exception pc */
2492 M_POP(REG_ITMP2_XPC);
2494 M_MOV_IMM(asm_handle_exception, REG_ITMP3);
2495 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_ASM_HANDLE_EXCEPTION, 1);
2499 case ICMD_GOTO: /* ... ==> ... */
2500 case ICMD_RET: /* ... ==> ... */
2502 #if defined(ENABLE_SSA)
2504 last_cmd_was_goto = true;
2506 /* In case of a Goto phimoves have to be inserted before the */
2509 codegen_emit_phi_moves(jd, bptr);
2512 emit_br(cd, iptr->dst.block);
2516 case ICMD_JSR: /* ... ==> ... */
2518 emit_br(cd, iptr->sx.s23.s3.jsrtarget.block);
2522 case ICMD_IFNULL: /* ..., value ==> ... */
2523 case ICMD_IFNONNULL:
2525 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2527 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFNULL, BRANCH_OPT_NONE);
2530 case ICMD_IFEQ: /* ..., value ==> ... */
2537 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2538 M_CMP_IMM(iptr->sx.val.i, s1);
2539 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFEQ, BRANCH_OPT_NONE);
2542 case ICMD_IF_LEQ: /* ..., value ==> ... */
2544 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2545 if (iptr->sx.val.l == 0) {
2546 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2547 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2550 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2551 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2552 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2553 M_OR(REG_ITMP2, REG_ITMP1);
2555 emit_beq(cd, iptr->dst.block);
2558 case ICMD_IF_LLT: /* ..., value ==> ... */
2560 if (iptr->sx.val.l == 0) {
2561 /* If high 32-bit are less than zero, then the 64-bits
2563 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2565 emit_blt(cd, iptr->dst.block);
2568 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2569 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2570 emit_blt(cd, iptr->dst.block);
2572 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2573 emit_bult(cd, iptr->dst.block);
2577 case ICMD_IF_LLE: /* ..., value ==> ... */
2579 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2580 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2581 emit_blt(cd, iptr->dst.block);
2583 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2584 emit_bule(cd, iptr->dst.block);
2587 case ICMD_IF_LNE: /* ..., value ==> ... */
2589 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2590 if (iptr->sx.val.l == 0) {
2591 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2592 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2595 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2596 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2597 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2598 M_OR(REG_ITMP2, REG_ITMP1);
2600 emit_bne(cd, iptr->dst.block);
2603 case ICMD_IF_LGT: /* ..., value ==> ... */
2605 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2606 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2607 emit_bgt(cd, iptr->dst.block);
2609 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2610 emit_bugt(cd, iptr->dst.block);
2613 case ICMD_IF_LGE: /* ..., value ==> ... */
2615 if (iptr->sx.val.l == 0) {
2616 /* If high 32-bit are greater equal zero, then the
2618 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2620 emit_bge(cd, iptr->dst.block);
2623 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2624 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2625 emit_bgt(cd, iptr->dst.block);
2627 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2628 emit_buge(cd, iptr->dst.block);
2632 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2633 case ICMD_IF_ICMPNE:
2634 case ICMD_IF_ICMPLT:
2635 case ICMD_IF_ICMPGT:
2636 case ICMD_IF_ICMPGE:
2637 case ICMD_IF_ICMPLE:
2639 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2640 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2642 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ICMPEQ, BRANCH_OPT_NONE);
2645 case ICMD_IF_ACMPEQ: /* ..., value, value ==> ... */
2646 case ICMD_IF_ACMPNE:
2648 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2649 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2651 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ACMPEQ, BRANCH_OPT_NONE);
2654 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2656 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2657 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2658 M_INTMOVE(s1, REG_ITMP1);
2659 M_XOR(s2, REG_ITMP1);
2660 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2661 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2662 M_INTMOVE(s1, REG_ITMP2);
2663 M_XOR(s2, REG_ITMP2);
2664 M_OR(REG_ITMP1, REG_ITMP2);
2665 emit_beq(cd, iptr->dst.block);
2668 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2670 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2671 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2672 M_INTMOVE(s1, REG_ITMP1);
2673 M_XOR(s2, REG_ITMP1);
2674 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2675 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2676 M_INTMOVE(s1, REG_ITMP2);
2677 M_XOR(s2, REG_ITMP2);
2678 M_OR(REG_ITMP1, REG_ITMP2);
2679 emit_bne(cd, iptr->dst.block);
2682 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2684 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2685 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2687 emit_blt(cd, iptr->dst.block);
2688 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2689 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2692 emit_bult(cd, iptr->dst.block);
2695 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2697 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2698 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2700 emit_bgt(cd, iptr->dst.block);
2701 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2702 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2705 emit_bugt(cd, iptr->dst.block);
2708 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2710 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2711 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2713 emit_blt(cd, iptr->dst.block);
2714 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2715 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2718 emit_bule(cd, iptr->dst.block);
2721 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2723 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2724 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2726 emit_bgt(cd, iptr->dst.block);
2727 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2728 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2731 emit_buge(cd, iptr->dst.block);
2735 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2737 REPLACEMENT_POINT_RETURN(cd, iptr);
2738 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2739 M_INTMOVE(s1, REG_RESULT);
2740 goto nowperformreturn;
2742 case ICMD_LRETURN: /* ..., retvalue ==> ... */
2744 REPLACEMENT_POINT_RETURN(cd, iptr);
2745 s1 = emit_load_s1(jd, iptr, REG_RESULT_PACKED);
2746 M_LNGMOVE(s1, REG_RESULT_PACKED);
2747 goto nowperformreturn;
2749 case ICMD_ARETURN: /* ..., retvalue ==> ... */
2751 REPLACEMENT_POINT_RETURN(cd, iptr);
2752 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2753 M_INTMOVE(s1, REG_RESULT);
2755 #ifdef ENABLE_VERIFIER
2756 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2757 patcher_add_patch_ref(jd, PATCHER_resolve_class,
2758 iptr->sx.s23.s2.uc, 0);
2760 #endif /* ENABLE_VERIFIER */
2761 goto nowperformreturn;
2763 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2766 REPLACEMENT_POINT_RETURN(cd, iptr);
2767 s1 = emit_load_s1(jd, iptr, REG_FRESULT);
2768 goto nowperformreturn;
2770 case ICMD_RETURN: /* ... ==> ... */
2772 REPLACEMENT_POINT_RETURN(cd, iptr);
2778 p = cd->stackframesize;
2780 #if !defined(NDEBUG)
2781 emit_verbosecall_exit(jd);
2784 #if defined(ENABLE_THREADS)
2785 if (checksync && code_is_synchronized(code)) {
2786 M_ALD(REG_ITMP2, REG_SP, rd->memuse * 8);
2788 /* we need to save the proper return value */
2789 switch (iptr->opc) {
2792 M_IST(REG_RESULT, REG_SP, rd->memuse * 8);
2796 M_LST(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2800 emit_fstps_membase(cd, REG_SP, rd->memuse * 8);
2804 emit_fstpl_membase(cd, REG_SP, rd->memuse * 8);
2808 M_AST(REG_ITMP2, REG_SP, 0);
2809 M_MOV_IMM(LOCK_monitor_exit, REG_ITMP3);
2810 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_BUILTIN_FP,
2811 builtintable_get_internal(LOCK_monitor_exit));
2814 /* and now restore the proper return value */
2815 switch (iptr->opc) {
2818 M_ILD(REG_RESULT, REG_SP, rd->memuse * 8);
2822 M_LLD(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2826 emit_flds_membase(cd, REG_SP, rd->memuse * 8);
2830 emit_fldl_membase(cd, REG_SP, rd->memuse * 8);
2836 /* restore saved registers */
2838 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
2839 p--; M_ALD(rd->savintregs[i], REG_SP, p * 8);
2842 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
2844 emit_fldl_membase(cd, REG_SP, p * 8);
2845 if (iptr->opc == ICMD_FRETURN || iptr->opc == ICMD_DRETURN) {
2847 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset + 1); */
2850 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset); */
2854 /* deallocate stack */
2856 if (cd->stackframesize)
2857 M_AADD_IMM(cd->stackframesize * 8 + 4, REG_SP);
2864 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2867 branch_target_t *table;
2869 table = iptr->dst.table;
2871 l = iptr->sx.s23.s2.tablelow;
2872 i = iptr->sx.s23.s3.tablehigh;
2874 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2875 M_INTMOVE(s1, REG_ITMP1);
2878 M_ISUB_IMM(l, REG_ITMP1);
2884 M_CMP_IMM(i - 1, REG_ITMP1);
2885 emit_bugt(cd, table[0].block);
2887 /* build jump table top down and use address of lowest entry */
2892 dseg_add_target(cd, table->block);
2896 /* length of dataseg after last dseg_addtarget is used
2899 M_MOV_IMM(0, REG_ITMP2);
2901 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_ENTRYPOINT, NULL);
2902 emit_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 2, REG_ITMP1);
2908 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2911 lookup_target_t *lookup;
2913 lookup = iptr->dst.lookup;
2915 i = iptr->sx.s23.s2.lookupcount;
2917 MCODECHECK((i<<2)+8);
2918 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2921 M_CMP_IMM(lookup->value, s1);
2922 emit_beq(cd, lookup->target.block);
2926 emit_br(cd, iptr->sx.s23.s3.lookupdefault.block);
2931 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
2933 REPLACEMENT_POINT_FORGC_BUILTIN(cd, iptr);
2935 bte = iptr->sx.s23.s3.bte;
2938 #if defined(ENABLE_ESCAPE_REASON)
2939 if (bte->fp == BUILTIN_escape_reason_new) {
2940 void set_escape_reasons(void *);
2941 M_ASUB_IMM(8, REG_SP);
2942 M_MOV_IMM(iptr->escape_reasons, REG_ITMP1);
2943 M_AST(EDX, REG_SP, 4);
2944 M_AST(REG_ITMP1, REG_SP, 0);
2945 M_MOV_IMM(set_escape_reasons, REG_ITMP1);
2947 M_ALD(EDX, REG_SP, 4);
2948 M_AADD_IMM(8, REG_SP);
2954 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2956 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2957 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
2958 case ICMD_INVOKEINTERFACE:
2960 REPLACEMENT_POINT_INVOKE(cd, iptr);
2962 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2963 md = iptr->sx.s23.s3.um->methodref->parseddesc.md;
2967 lm = iptr->sx.s23.s3.fmiref->p.method;
2968 md = lm->parseddesc;
2972 s3 = md->paramcount;
2974 MCODECHECK((s3 << 1) + 64);
2976 /* copy arguments to registers or stack location */
2978 for (s3 = s3 - 1; s3 >= 0; s3--) {
2979 var = VAR(iptr->sx.s23.s2.args[s3]);
2981 /* Already Preallocated (ARGVAR) ? */
2982 if (var->flags & PREALLOC)
2984 if (IS_INT_LNG_TYPE(var->type)) {
2985 if (!md->params[s3].inmemory) {
2986 log_text("No integer argument registers available!");
2990 if (IS_2_WORD_TYPE(var->type)) {
2991 d = emit_load(jd, iptr, var, REG_ITMP12_PACKED);
2992 M_LST(d, REG_SP, md->params[s3].regoff);
2994 d = emit_load(jd, iptr, var, REG_ITMP1);
2995 M_IST(d, REG_SP, md->params[s3].regoff);
3000 if (!md->params[s3].inmemory) {
3001 s1 = md->params[s3].regoff;
3002 d = emit_load(jd, iptr, var, s1);
3006 d = emit_load(jd, iptr, var, REG_FTMP1);
3007 if (IS_2_WORD_TYPE(var->type))
3008 M_DST(d, REG_SP, md->params[s3].regoff);
3010 M_FST(d, REG_SP, md->params[s3].regoff);
3015 switch (iptr->opc) {
3017 d = md->returntype.type;
3019 if (bte->stub == NULL) {
3020 M_MOV_IMM(bte->fp, REG_ITMP1);
3023 M_MOV_IMM(bte->stub, REG_ITMP1);
3025 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_BUILTIN, bte);
3028 #if defined(ENABLE_ESCAPE_CHECK)
3029 if (bte->opcode == ICMD_NEW || bte->opcode == ICMD_NEWARRAY) {
3030 /*emit_escape_annotate_object(cd, m);*/
3035 case ICMD_INVOKESPECIAL:
3036 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
3037 emit_nullpointer_check(cd, iptr, REG_ITMP1);
3040 case ICMD_INVOKESTATIC:
3042 unresolved_method *um = iptr->sx.s23.s3.um;
3044 patcher_add_patch_ref(jd, PATCHER_invokestatic_special,
3048 d = md->returntype.type;
3051 disp = (ptrint) lm->stubroutine;
3053 d = lm->parseddesc->returntype.type;
3055 M_MOV_IMM(disp, REG_ITMP2);
3056 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_METHODINFO_STUBROUTINE, lm);
3060 case ICMD_INVOKEVIRTUAL:
3061 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
3062 emit_nullpointer_check(cd, iptr, s1);
3065 unresolved_method *um = iptr->sx.s23.s3.um;
3067 patcher_add_patch_ref(jd, PATCHER_invokevirtual, um, 0);
3070 d = md->returntype.type;
3073 s1 = OFFSET(vftbl_t, table[0]) +
3074 sizeof(methodptr) * lm->vftblindex;
3076 d = md->returntype.type;
3079 M_ALD(REG_METHODPTR, REG_ITMP1,
3080 OFFSET(java_object_t, vftbl));
3081 M_ALD32(REG_ITMP3, REG_METHODPTR, s1);
3082 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_METHODINFO_TABLE, lm);
3086 case ICMD_INVOKEINTERFACE:
3087 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
3088 emit_nullpointer_check(cd, iptr, s1);
3091 unresolved_method *um = iptr->sx.s23.s3.um;
3093 patcher_add_patch_ref(jd, PATCHER_invokeinterface, um, 0);
3097 d = md->returntype.type;
3100 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3101 sizeof(methodptr) * lm->clazz->index;
3103 s2 = sizeof(methodptr) * (lm - lm->clazz->methods);
3105 d = md->returntype.type;
3108 M_ALD(REG_METHODPTR, REG_ITMP1,
3109 OFFSET(java_object_t, vftbl));
3110 M_ALD32(REG_METHODPTR, REG_METHODPTR, s1);
3111 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_METHODINFO_INTERFACETABLE, lm);
3112 M_ALD32(REG_ITMP3, REG_METHODPTR, s2);
3113 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_METHODINFO_METHODOFFSET, lm);
3118 /* store size of call code in replacement point */
3120 REPLACEMENT_POINT_INVOKE_RETURN(cd, iptr);
3121 REPLACEMENT_POINT_FORGC_BUILTIN_RETURN(cd, iptr);
3123 /* d contains return type */
3125 if (d != TYPE_VOID) {
3126 #if defined(ENABLE_SSA)
3127 if ((ls == NULL) /* || (!IS_TEMPVAR_INDEX(iptr->dst.varindex)) */ ||
3128 (ls->lifetime[iptr->dst.varindex].type != UNUSED))
3129 /* a "living" stackslot */
3132 if (IS_INT_LNG_TYPE(d)) {
3133 if (IS_2_WORD_TYPE(d)) {
3134 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
3135 M_LNGMOVE(REG_RESULT_PACKED, s1);
3138 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3139 M_INTMOVE(REG_RESULT, s1);
3143 s1 = codegen_reg_of_dst(jd, iptr, REG_NULL);
3145 emit_store_dst(jd, iptr, s1);
3151 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3153 if (!(iptr->flags.bits & INS_FLAG_ARRAY)) {
3154 /* object type cast-check */
3157 vftbl_t *supervftbl;
3160 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3166 super = iptr->sx.s23.s3.c.cls;
3167 superindex = super->index;
3168 supervftbl = super->vftbl;
3170 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3172 /* if class is not resolved, check which code to call */
3173 if (super == NULL) {
3175 emit_label_beq(cd, BRANCH_LABEL_1);
3177 patcher_add_patch_ref(jd, PATCHER_checkcast_instanceof_flags,
3178 iptr->sx.s23.s3.c.ref, 0);
3180 M_MOV_IMM(0, REG_ITMP2); /* super->flags */
3181 M_AND_IMM32(ACC_INTERFACE, REG_ITMP2);
3182 emit_label_beq(cd, BRANCH_LABEL_2);
3185 /* interface checkcast code */
3187 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3188 if (super != NULL) {
3190 emit_label_beq(cd, BRANCH_LABEL_3);
3193 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3195 if (super == NULL) {
3196 patcher_add_patch_ref(jd, PATCHER_checkcast_interface,
3197 iptr->sx.s23.s3.c.ref,
3202 REG_ITMP2, OFFSET(vftbl_t, interfacetablelength));
3203 M_ISUB_IMM32(superindex, REG_ITMP3);
3204 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_CLASSINFO_INDEX, super);
3205 /* XXX do we need this one? */
3207 emit_classcast_check(cd, iptr, BRANCH_LE, REG_ITMP3, s1);
3209 M_ALD32(REG_ITMP3, REG_ITMP2,
3210 OFFSET(vftbl_t, interfacetable[0]) -
3211 superindex * sizeof(methodptr*));
3212 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_CLASSINFO_INTERFACETABLE, super);
3214 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_ITMP3, s1);
3217 emit_label_br(cd, BRANCH_LABEL_4);
3219 emit_label(cd, BRANCH_LABEL_3);
3222 /* class checkcast code */
3224 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3225 if (super == NULL) {
3226 emit_label(cd, BRANCH_LABEL_2);
3230 emit_label_beq(cd, BRANCH_LABEL_5);
3233 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3235 if (super == NULL) {
3236 patcher_add_patch_ref(jd, PATCHER_checkcast_class,
3237 iptr->sx.s23.s3.c.ref,
3241 M_MOV_IMM(supervftbl, REG_ITMP3);
3242 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_CLASSINFO_VFTBL, super);
3244 M_ILD32(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3246 /* if (s1 != REG_ITMP1) { */
3247 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP1); */
3248 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3); */
3249 /* #if defined(ENABLE_THREADS) */
3250 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3252 /* emit_alu_reg_reg(cd, ALU_SUB, REG_ITMP1, REG_ITMP2); */
3255 M_ILD32(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, baseval));
3256 M_ISUB(REG_ITMP3, REG_ITMP2);
3257 M_MOV_IMM(supervftbl, REG_ITMP3);
3258 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_CLASSINFO_VFTBL, super);
3259 M_ILD(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3263 M_CMP(REG_ITMP3, REG_ITMP2);
3264 emit_classcast_check(cd, iptr, BRANCH_ULE, REG_ITMP3, s1);
3267 emit_label(cd, BRANCH_LABEL_5);
3270 if (super == NULL) {
3271 emit_label(cd, BRANCH_LABEL_1);
3272 emit_label(cd, BRANCH_LABEL_4);
3275 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
3278 /* array type cast-check */
3280 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3281 M_AST(s1, REG_SP, 0 * 4);
3283 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3284 patcher_add_patch_ref(jd, PATCHER_builtin_arraycheckcast,
3285 iptr->sx.s23.s3.c.ref, 0);
3289 disp = iptr->sx.s23.s3.c.cls;
3292 M_AST_IMM(disp, REG_SP, 1 * 4);
3293 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_CLASSINFO, disp);
3295 M_MOV_IMM(BUILTIN_arraycheckcast, REG_ITMP3);
3296 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_BUILTIN_FP,
3297 builtintable_get_internal(BUILTIN_arraycheckcast));
3300 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3302 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_RESULT, s1);
3304 d = codegen_reg_of_dst(jd, iptr, s1);
3308 emit_store_dst(jd, iptr, d);
3311 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3315 vftbl_t *supervftbl;
3318 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3324 super = iptr->sx.s23.s3.c.cls;
3325 superindex = super->index;
3326 supervftbl = super->vftbl;
3329 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3330 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
3333 M_INTMOVE(s1, REG_ITMP1);
3339 /* if class is not resolved, check which code to call */
3341 if (super == NULL) {
3343 emit_label_beq(cd, BRANCH_LABEL_1);
3345 patcher_add_patch_ref(jd, PATCHER_checkcast_instanceof_flags,
3346 iptr->sx.s23.s3.c.ref, 0);
3348 M_MOV_IMM(0, REG_ITMP3); /* super->flags */
3349 M_AND_IMM32(ACC_INTERFACE, REG_ITMP3);
3350 emit_label_beq(cd, BRANCH_LABEL_2);
3353 /* interface instanceof code */
3355 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3356 if (super != NULL) {
3358 emit_label_beq(cd, BRANCH_LABEL_3);
3362 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3364 if (super == NULL) {
3365 patcher_add_patch_ref(jd, PATCHER_instanceof_interface,
3366 iptr->sx.s23.s3.c.ref, 0);
3370 REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3371 M_ISUB_IMM32(superindex, REG_ITMP3);
3372 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_CLASSINFO_INDEX, super);
3376 disp = (2 + 4 /* mov_membase32_reg */ + 2 /* test */ +
3377 6 /* jcc */ + 5 /* mov_imm_reg */);
3380 M_ALD32(REG_ITMP1, REG_ITMP1,
3381 OFFSET(vftbl_t, interfacetable[0]) -
3382 superindex * sizeof(methodptr*));
3383 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_CLASSINFO_INTERFACETABLE, super);
3385 /* emit_setcc_reg(cd, CC_A, d); */
3386 /* emit_jcc(cd, CC_BE, 5); */
3391 emit_label_br(cd, BRANCH_LABEL_4);
3393 emit_label(cd, BRANCH_LABEL_3);
3396 /* class instanceof code */
3398 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3399 if (super == NULL) {
3400 emit_label(cd, BRANCH_LABEL_2);
3404 emit_label_beq(cd, BRANCH_LABEL_5);
3407 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3409 if (super == NULL) {
3410 patcher_add_patch_ref(jd, PATCHER_instanceof_class,
3411 iptr->sx.s23.s3.c.ref, 0);
3414 M_MOV_IMM(supervftbl, REG_ITMP2);
3415 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_CLASSINFO_VFTBL, super);
3416 M_ILD(REG_ITMP1, REG_ITMP1, OFFSET(vftbl_t, baseval));
3417 M_ILD(REG_ITMP3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3418 M_ILD(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3420 M_ISUB(REG_ITMP2, REG_ITMP1);
3421 M_CLR(d); /* may be REG_ITMP2 */
3422 M_CMP(REG_ITMP3, REG_ITMP1);
3427 emit_label(cd, BRANCH_LABEL_5);
3430 if (super == NULL) {
3431 emit_label(cd, BRANCH_LABEL_1);
3432 emit_label(cd, BRANCH_LABEL_4);
3435 emit_store_dst(jd, iptr, d);
3439 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3441 /* check for negative sizes and copy sizes to stack if necessary */
3443 MCODECHECK((iptr->s1.argcount << 1) + 64);
3445 for (s1 = iptr->s1.argcount; --s1 >= 0; ) {
3446 /* copy SAVEDVAR sizes to stack */
3447 var = VAR(iptr->sx.s23.s2.args[s1]);
3449 /* Already Preallocated? */
3450 if (!(var->flags & PREALLOC)) {
3451 if (var->flags & INMEMORY) {
3452 M_ILD(REG_ITMP1, REG_SP, var->vv.regoff);
3453 M_IST(REG_ITMP1, REG_SP, (s1 + 3) * 4);
3456 M_IST(var->vv.regoff, REG_SP, (s1 + 3) * 4);
3460 /* is a patcher function set? */
3462 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3463 patcher_add_patch_ref(jd, PATCHER_builtin_multianewarray,
3464 iptr->sx.s23.s3.c.ref, 0);
3470 disp = (ptrint) iptr->sx.s23.s3.c.cls;
3472 /* a0 = dimension count */
3474 M_IST_IMM(iptr->s1.argcount, REG_SP, 0 * 4);
3476 /* a1 = arraydescriptor */
3478 M_IST_IMM(disp, REG_SP, 1 * 4);
3479 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_CLASSINFO, disp);
3481 /* a2 = pointer to dimensions = stack pointer */
3483 M_MOV(REG_SP, REG_ITMP1);
3484 M_AADD_IMM(3 * 4, REG_ITMP1);
3485 M_AST(REG_ITMP1, REG_SP, 2 * 4);
3487 M_MOV_IMM(BUILTIN_multianewarray, REG_ITMP1);
3488 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_BUILTIN_FP,
3489 builtintable_get_internal(BUILTIN_multianewarray));
3492 /* check for exception before result assignment */
3494 emit_exception_check(cd, iptr);
3496 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3497 M_INTMOVE(REG_RESULT, s1);
3498 emit_store_dst(jd, iptr, s1);
3501 #if defined(ENABLE_SSA)
3502 case ICMD_GETEXCEPTION:
3503 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
3504 M_INTMOVE(REG_ITMP1, d);
3505 emit_store_dst(jd, iptr, d);
3509 exceptions_throw_internalerror("Unknown ICMD %d during code generation",
3514 } /* for instruction */
3518 #if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
3521 #if defined(ENABLE_SSA)
3524 /* by edge splitting, in Blocks with phi moves there can only */
3525 /* be a goto as last command, no other Jump/Branch Command */
3527 if (!last_cmd_was_goto)
3528 codegen_emit_phi_moves(jd, bptr);
3533 /* At the end of a basic block we may have to append some nops,
3534 because the patcher stub calling code might be longer than the
3535 actual instruction. So codepatching does not change the
3536 following block unintentionally. */
3538 if (cd->mcodeptr < cd->lastmcodeptr) {
3539 while (cd->mcodeptr < cd->lastmcodeptr) {
3544 } /* if (bptr -> flags >= BBREACHED) */
3545 } /* for basic block */
3547 /* generate stubs */
3549 emit_patcher_traps(jd);
3551 /* everything's ok */
3557 /* codegen_emit_stub_native ****************************************************
3559 Emits a stub routine which calls a native method.
3561 *******************************************************************************/
3563 void codegen_emit_stub_native(jitdata *jd, methoddesc *nmd, functionptr f, int skipparams)
3569 int i, j; /* count variables */
3573 /* get required compiler data */
3579 /* set some variables */
3583 /* calculate stackframe size */
3585 cd->stackframesize =
3586 sizeof(stackframeinfo_t) / SIZEOF_VOID_P +
3587 sizeof(localref_table) / SIZEOF_VOID_P +
3588 4 + /* 4 arguments (start_native_call) */
3591 /* keep stack 16-byte aligned */
3593 ALIGN_ODD(cd->stackframesize);
3595 /* create method header */
3597 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
3598 (void) dseg_add_unique_s4(cd, cd->stackframesize * 8 + 4); /* FrameSize */
3599 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
3600 (void) dseg_add_unique_s4(cd, 0); /* IntSave */
3601 (void) dseg_add_unique_s4(cd, 0); /* FltSave */
3603 #if defined(ENABLE_PROFILING)
3604 /* generate native method profiling code */
3606 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
3607 /* count frequency */
3609 M_MOV_IMM(code, REG_ITMP1);
3610 M_IADD_IMM_MEMBASE(1, REG_ITMP1, OFFSET(codeinfo, frequency));
3611 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_CODEINFO, 0);
3615 /* calculate stackframe size for native function */
3617 M_ASUB_IMM(cd->stackframesize * 8 + 4, REG_SP);
3619 /* Mark the whole fpu stack as free for native functions (only for saved */
3620 /* register count == 0). */
3622 emit_ffree_reg(cd, 0);
3623 emit_ffree_reg(cd, 1);
3624 emit_ffree_reg(cd, 2);
3625 emit_ffree_reg(cd, 3);
3626 emit_ffree_reg(cd, 4);
3627 emit_ffree_reg(cd, 5);
3628 emit_ffree_reg(cd, 6);
3629 emit_ffree_reg(cd, 7);
3631 #if defined(ENABLE_GC_CACAO)
3632 /* remember callee saved int registers in stackframeinfo (GC may need to */
3633 /* recover them during a collection). */
3635 disp = cd->stackframesize * 8 - sizeof(stackframeinfo_t) +
3636 OFFSET(stackframeinfo_t, intregs);
3638 for (i = 0; i < INT_SAV_CNT; i++)
3639 M_AST(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3642 /* prepare data structures for native function call */
3644 M_MOV(REG_SP, REG_ITMP1);
3645 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3646 M_IST_IMM(0, REG_SP, 1 * 4);
3649 M_MOV_IMM(codegen_start_native_call, REG_ITMP1);
3652 /* remember class argument */
3654 if (m->flags & ACC_STATIC)
3655 M_MOV(REG_RESULT, REG_ITMP3);
3657 /* Copy or spill arguments to new locations. */
3659 for (i = md->paramcount - 1, j = i + skipparams; i >= 0; i--, j--) {
3660 if (!md->params[i].inmemory)
3663 s1 = md->params[i].regoff + cd->stackframesize * 8 + 8;
3664 s2 = nmd->params[j].regoff;
3666 /* float/double in memory can be copied like int/longs */
3668 switch (md->paramtypes[i].type) {
3672 M_ILD(REG_ITMP1, REG_SP, s1);
3673 M_IST(REG_ITMP1, REG_SP, s2);
3677 M_LLD(REG_ITMP12_PACKED, REG_SP, s1);
3678 M_LST(REG_ITMP12_PACKED, REG_SP, s2);
3683 /* Handle native Java methods. */
3685 if (m->flags & ACC_NATIVE) {
3686 /* if function is static, put class into second argument */
3688 if (m->flags & ACC_STATIC)
3689 M_AST(REG_ITMP3, REG_SP, 1 * 4);
3691 /* put env into first argument */
3693 M_AST_IMM(VM_get_jnienv(), REG_SP, 0 * 4);
3696 /* Call the native function. */
3698 disp = dseg_add_functionptr(cd, f);
3699 emit_mov_imm_reg(cd, 0, REG_ITMP3);
3701 M_ALD(REG_ITMP1, REG_ITMP3, disp);
3704 /* save return value */
3706 switch (md->returntype.type) {
3709 switch (md->returntype.primitivetype) {
3710 case PRIMITIVETYPE_BOOLEAN:
3711 M_BZEXT(REG_RESULT, REG_RESULT);
3713 case PRIMITIVETYPE_BYTE:
3714 M_BSEXT(REG_RESULT, REG_RESULT);
3716 case PRIMITIVETYPE_CHAR:
3717 M_CZEXT(REG_RESULT, REG_RESULT);
3719 case PRIMITIVETYPE_SHORT:
3720 M_SSEXT(REG_RESULT, REG_RESULT);
3723 M_IST(REG_RESULT, REG_SP, 1 * 8);
3726 M_LST(REG_RESULT_PACKED, REG_SP, 1 * 8);
3729 emit_fsts_membase(cd, REG_SP, 1 * 8);
3732 emit_fstl_membase(cd, REG_SP, 1 * 8);
3738 /* remove native stackframe info */
3740 M_MOV(REG_SP, REG_ITMP1);
3741 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3742 M_IST_IMM(0, REG_SP, 1 * 4);
3745 M_MOV_IMM(codegen_finish_native_call, REG_ITMP1);
3747 M_MOV(REG_RESULT, REG_ITMP2); /* REG_ITMP3 == REG_RESULT2 */
3749 /* restore return value */
3751 switch (md->returntype.type) {
3754 M_ILD(REG_RESULT, REG_SP, 1 * 8);
3757 M_LLD(REG_RESULT_PACKED, REG_SP, 1 * 8);
3760 emit_flds_membase(cd, REG_SP, 1 * 8);
3763 emit_fldl_membase(cd, REG_SP, 1 * 8);
3769 #if defined(ENABLE_GC_CACAO)
3770 /* restore callee saved int registers from stackframeinfo (GC might have */
3771 /* modified them during a collection). */
3773 disp = cd->stackframesize * 8 - sizeof(stackframeinfo_t) +
3774 OFFSET(stackframeinfo_t, intregs);
3776 for (i = 0; i < INT_SAV_CNT; i++)
3777 M_ALD(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3780 M_AADD_IMM(cd->stackframesize * 8 + 4, REG_SP);
3782 /* check for exception */
3789 /* handle exception */
3791 M_MOV(REG_ITMP2, REG_ITMP1_XPTR);
3792 M_ALD(REG_ITMP2_XPC, REG_SP, 0);
3793 M_ASUB_IMM(2, REG_ITMP2_XPC);
3795 M_MOV_IMM(asm_handle_nat_exception, REG_ITMP3);
3801 * These are local overrides for various environment variables in Emacs.
3802 * Please do not remove this and leave it at the end of the file, where
3803 * Emacs will automagically detect them.
3804 * ---------------------------------------------------------------------
3807 * indent-tabs-mode: t
3811 * vim:noexpandtab:sw=4:ts=4: