1 /* src/vm/jit/i386/codegen.c - machine code generator for i386
3 Copyright (C) 1996-2005, 2006, 2007, 2008
4 CACAOVM - Verein zur Foerderung der freien virtuellen Maschine CACAO
6 This file is part of CACAO.
8 This program is free software; you can redistribute it and/or
9 modify it under the terms of the GNU General Public License as
10 published by the Free Software Foundation; either version 2, or (at
11 your option) any later version.
13 This program is distributed in the hope that it will be useful, but
14 WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with this program; if not, write to the Free Software
20 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
34 #include "vm/jit/i386/md-abi.h"
36 #include "vm/jit/i386/codegen.h"
37 #include "vm/jit/i386/emit.h"
39 #include "mm/memory.h"
40 #include "native/jni.h"
41 #include "native/localref.h"
42 #include "native/native.h"
44 #include "threads/lock-common.h"
46 #include "vm/builtin.h"
47 #include "vm/exceptions.h"
48 #include "vm/global.h"
49 #include "vm/primitive.h"
50 #include "vm/stringlocal.h"
53 #include "vm/jit/abi.h"
54 #include "vm/jit/asmpart.h"
55 #include "vm/jit/codegen-common.h"
56 #include "vm/jit/dseg.h"
57 #include "vm/jit/emit-common.h"
58 #include "vm/jit/jit.h"
59 #include "vm/jit/linenumbertable.h"
60 #include "vm/jit/parse.h"
61 #include "vm/jit/patcher-common.h"
62 #include "vm/jit/reg.h"
63 #include "vm/jit/replace.h"
64 #include "vm/jit/stacktrace.h"
66 #include "vm/jit/jitcache.h"
67 #include "vm/jit/trap.h"
69 #if defined(ENABLE_SSA)
70 # include "vm/jit/optimizing/lsra.h"
71 # include "vm/jit/optimizing/ssa.h"
72 #elif defined(ENABLE_LSRA)
73 # include "vm/jit/allocator/lsra.h"
76 #include "vmcore/loader.h"
77 #include "vmcore/options.h"
78 #include "vmcore/utf8.h"
81 /* codegen_emit ****************************************************************
83 Generates machine code.
85 *******************************************************************************/
87 bool codegen_emit(jitdata *jd)
93 s4 len, s1, s2, s3, d, disp;
94 int align_off; /* offset for alignment compensation */
99 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
100 builtintable_entry *bte;
103 unresolved_field *uf;
106 #if defined(ENABLE_SSA)
108 bool last_cmd_was_goto;
110 last_cmd_was_goto = false;
114 /* get required compiler data */
121 /* prevent compiler warnings */
132 s4 savedregs_num = 0;
135 /* space to save used callee saved registers */
137 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
138 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse);
140 cd->stackframesize = rd->memuse + savedregs_num;
142 #if defined(ENABLE_THREADS)
143 /* space to save argument of monitor_enter */
145 if (checksync && code_is_synchronized(code))
146 cd->stackframesize++;
149 /* create method header */
151 /* Keep stack of non-leaf functions 16-byte aligned. */
153 if (!code_is_leafmethod(code)) {
154 ALIGN_ODD(cd->stackframesize);
157 align_off = cd->stackframesize ? 4 : 0;
159 #if defined(ENABLE_JITCACHE)
160 disp = dseg_add_unique_address(cd, code); /* CodeinfoPointer */
161 jitcache_add_cached_ref(code, CRT_CODEINFO, 0, disp);
163 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
165 (void) dseg_add_unique_s4(
166 cd, cd->stackframesize * 8 + align_off); /* FrameSize */
168 code->synchronizedoffset = rd->memuse * 8;
170 /* REMOVEME: We still need it for exception handling in assembler. */
172 if (code_is_leafmethod(code))
173 (void) dseg_add_unique_s4(cd, 1); /* IsLeaf */
175 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
177 (void) dseg_add_unique_s4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
178 (void) dseg_add_unique_s4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
180 #if defined(ENABLE_PROFILING)
181 /* generate method profiling code */
183 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
184 /* count frequency */
186 M_MOV_IMM(code, REG_ITMP3);
187 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_CODEINFO, NULL);
189 M_IADD_IMM_MEMBASE(1, REG_ITMP3, OFFSET(codeinfo, frequency));
193 /* create stack frame (if necessary) */
195 if (cd->stackframesize)
197 M_ASUB_IMM(cd->stackframesize * 8 + 4, REG_SP);
199 /* save return address and used callee saved registers */
201 p = cd->stackframesize;
202 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
203 p--; M_AST(rd->savintregs[i], REG_SP, p * 8);
205 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
206 p--; emit_fld_reg(cd, rd->savfltregs[i]); emit_fstpl_membase(cd, REG_SP, p * 8);
209 /* take arguments out of register or stack frame */
214 for (p = 0, l = 0; p < md->paramcount; p++) {
215 t = md->paramtypes[p].type;
217 varindex = jd->local_map[l * 5 + t];
218 #if defined(ENABLE_SSA)
220 if (varindex != UNUSED)
221 varindex = ls->var_0[varindex];
222 if ((varindex != UNUSED) && (ls->lifetime[varindex].type == UNUSED))
227 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
230 if (varindex == UNUSED)
234 s1 = md->params[p].regoff;
237 if (IS_INT_LNG_TYPE(t)) { /* integer args */
238 if (!md->params[p].inmemory) { /* register arguments */
239 log_text("integer register argument");
241 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
242 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff */
244 else { /* reg arg -> spilled */
245 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff * 4 */
249 if (!(var->flags & INMEMORY)) {
251 cd->stackframesize * 8 + 4 + align_off + s1);
254 if (!IS_2_WORD_TYPE(t)) {
255 #if defined(ENABLE_SSA)
256 /* no copy avoiding by now possible with SSA */
258 emit_mov_membase_reg( /* + 4 for return address */
260 cd->stackframesize * 8 + s1 + 4 + align_off,
262 emit_mov_reg_membase(
263 cd, REG_ITMP1, REG_SP, var->vv.regoff);
266 #endif /*defined(ENABLE_SSA)*/
267 /* reuse stackslot */
268 var->vv.regoff = cd->stackframesize * 8 + 4 +
273 #if defined(ENABLE_SSA)
274 /* no copy avoiding by now possible with SSA */
276 emit_mov_membase_reg( /* + 4 for return address */
278 cd->stackframesize * 8 + s1 + 4 + align_off,
280 emit_mov_reg_membase(
281 cd, REG_ITMP1, REG_SP, var->vv.regoff);
282 emit_mov_membase_reg( /* + 4 for return address */
284 cd->stackframesize * 8 + s1 + 4 + 4 + align_off,
286 emit_mov_reg_membase(
287 cd, REG_ITMP1, REG_SP, var->vv.regoff + 4);
290 #endif /*defined(ENABLE_SSA)*/
291 /* reuse stackslot */
292 var->vv.regoff = cd->stackframesize * 8 + 8 + s1;
297 else { /* floating args */
298 if (!md->params[p].inmemory) { /* register arguments */
299 log_text("There are no float argument registers!");
301 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
302 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff */
303 } else { /* reg arg -> spilled */
304 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff * 8 */
308 else { /* stack arguments */
309 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
313 cd->stackframesize * 8 + s1 + 4 + align_off);
315 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
321 cd->stackframesize * 8 + s1 + 4 + align_off);
323 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
326 } else { /* stack-arg -> spilled */
327 #if defined(ENABLE_SSA)
328 /* no copy avoiding by now possible with SSA */
330 emit_mov_membase_reg(
332 cd->stackframesize * 8 + s1 + 4 + align_off,
334 emit_mov_reg_membase(
335 cd, REG_ITMP1, REG_SP, var->vv.regoff);
339 cd->stackframesize * 8 + s1 + 4 + align_off);
340 emit_fstps_membase(cd, REG_SP, var->vv.regoff);
345 cd->stackframesize * 8 + s1 + 4 + align_off);
346 emit_fstpl_membase(cd, REG_SP, var->vv.regoff);
350 #endif /*defined(ENABLE_SSA)*/
351 /* reuse stackslot */
352 var->vv.regoff = cd->stackframesize * 8 + 4 +
359 /* call monitorenter function */
361 #if defined(ENABLE_THREADS)
362 if (checksync && code_is_synchronized(code)) {
365 if (m->flags & ACC_STATIC) {
366 M_MOV_IMM(&m->clazz->object.header, REG_ITMP1);
367 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_OBJECT_HEADER, m->clazz);
370 M_ALD(REG_ITMP1, REG_SP, cd->stackframesize * 8 + 4 + align_off);
373 M_ALD_MEM(REG_ITMP1, TRAP_NullPointerException);
376 M_AST(REG_ITMP1, REG_SP, s1 * 8);
377 M_AST(REG_ITMP1, REG_SP, 0 * 4);
378 M_MOV_IMM(LOCK_monitor_enter, REG_ITMP3);
379 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_BUILTIN_FP,
380 builtintable_get_internal(LOCK_monitor_enter));
386 emit_verbosecall_enter(jd);
391 #if defined(ENABLE_SSA)
392 /* with SSA the Header is Basic Block 0 - insert phi Moves if necessary */
394 codegen_emit_phi_moves(jd, ls->basicblocks[0]);
397 /* end of header generation */
399 /* create replacement points */
401 REPLACEMENT_POINTS_INIT(cd, jd);
403 /* walk through all basic blocks */
405 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
407 bptr->mpc = (s4) (cd->mcodeptr - cd->mcodebase);
409 if (bptr->flags >= BBREACHED) {
410 /* branch resolving */
412 codegen_resolve_branchrefs(cd, bptr);
414 /* handle replacement points */
416 REPLACEMENT_POINT_BLOCK_START(cd, bptr);
418 #if defined(ENABLE_REPLACEMENT)
419 if (bptr->bitflags & BBFLAG_REPLACEMENT) {
420 if (cd->replacementpoint[-1].flags & RPLPOINT_FLAG_COUNTDOWN) {
422 disp = (s4) &(m->hitcountdown);
423 M_ISUB_IMM_MEMABS(1, disp);
429 /* copy interface registers to their destination */
434 #if defined(ENABLE_PROFILING)
435 /* generate basic block profiling code */
437 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
438 /* count frequency */
440 M_MOV_IMM(code->bbfrequency, REG_ITMP3);
441 M_IADD_IMM_MEMBASE(1, REG_ITMP3, bptr->nr * 4);
445 #if defined(ENABLE_LSRA) || defined(ENABLE_SSA)
446 # if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
449 # if defined(ENABLE_SSA)
451 last_cmd_was_goto = false;
455 var = VAR(bptr->invars[len]);
456 if (bptr->type != BBTYPE_STD) {
457 if (!IS_2_WORD_TYPE(var->type)) {
458 #if !defined(ENABLE_SSA)
459 if (bptr->type == BBTYPE_EXH) {
460 d = codegen_reg_of_var(0, var, REG_ITMP1);
461 M_INTMOVE(REG_ITMP1, d);
462 emit_store(jd, NULL, var, d);
467 log_text("copy interface registers(EXH, SBR): longs \
468 have to be in memory (begin 1)");
476 #endif /* defined(ENABLE_LSRA) || defined(ENABLE_SSA) */
480 var = VAR(bptr->invars[len]);
481 if ((len == bptr->indepth-1) && (bptr->type != BBTYPE_STD)) {
482 if (!IS_2_WORD_TYPE(var->type)) {
483 if (bptr->type == BBTYPE_EXH) {
484 d = codegen_reg_of_var(0, var, REG_ITMP1);
485 M_INTMOVE(REG_ITMP1, d);
486 emit_store(jd, NULL, var, d);
490 log_text("copy interface registers: longs have to be in \
497 assert((var->flags & INOUT));
502 /* walk through all instructions */
507 for (iptr = bptr->iinstr; len > 0; len--, iptr++) {
508 if (iptr->line != currentline) {
509 linenumbertable_list_entry_add(cd, iptr->line);
510 currentline = iptr->line;
513 MCODECHECK(1024); /* 1kB should be enough */
516 case ICMD_NOP: /* ... ==> ... */
517 case ICMD_POP: /* ..., value ==> ... */
518 case ICMD_POP2: /* ..., value, value ==> ... */
521 case ICMD_INLINE_START:
523 REPLACEMENT_POINT_INLINE_START(cd, iptr);
526 case ICMD_INLINE_BODY:
528 REPLACEMENT_POINT_INLINE_BODY(cd, iptr);
529 linenumbertable_list_entry_add_inline_start(cd, iptr);
530 linenumbertable_list_entry_add(cd, iptr->line);
533 case ICMD_INLINE_END:
535 linenumbertable_list_entry_add_inline_end(cd, iptr);
536 linenumbertable_list_entry_add(cd, iptr->line);
539 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
541 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
542 emit_nullpointer_check(cd, iptr, s1);
545 /* constant operations ************************************************/
547 case ICMD_ICONST: /* ... ==> ..., constant */
549 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
550 ICONST(d, iptr->sx.val.i);
551 emit_store_dst(jd, iptr, d);
554 case ICMD_LCONST: /* ... ==> ..., constant */
556 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
557 LCONST(d, iptr->sx.val.l);
558 emit_store_dst(jd, iptr, d);
561 case ICMD_FCONST: /* ... ==> ..., constant */
563 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
564 if (iptr->sx.val.f == 0.0) {
568 if (iptr->sx.val.i == 0x80000000) {
572 } else if (iptr->sx.val.f == 1.0) {
575 } else if (iptr->sx.val.f == 2.0) {
581 disp = dseg_add_float(cd, iptr->sx.val.f);
582 emit_mov_imm_reg(cd, 0, REG_ITMP1);
584 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_ENTRYPOINT, NULL);
585 emit_flds_membase(cd, REG_ITMP1, disp);
587 emit_store_dst(jd, iptr, d);
590 case ICMD_DCONST: /* ... ==> ..., constant */
592 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
593 if (iptr->sx.val.d == 0.0) {
597 if (iptr->sx.val.l == 0x8000000000000000LL) {
601 } else if (iptr->sx.val.d == 1.0) {
604 } else if (iptr->sx.val.d == 2.0) {
610 disp = dseg_add_double(cd, iptr->sx.val.d);
611 emit_mov_imm_reg(cd, 0, REG_ITMP1);
613 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_ENTRYPOINT, NULL);
614 emit_fldl_membase(cd, REG_ITMP1, disp);
616 emit_store_dst(jd, iptr, d);
619 case ICMD_ACONST: /* ... ==> ..., constant */
621 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
623 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
624 patcher_add_patch_ref(jd, PATCHER_aconst,
625 iptr->sx.val.c.ref, 0);
630 if (iptr->sx.val.anyptr == NULL)
634 M_MOV_IMM(iptr->sx.val.anyptr, d);
635 JITCACHE_ADD_CACHED_REF_JD(
637 (iptr->flags.bits & INS_FLAG_CLASS) ? CRT_CLASSINFO
639 (iptr->flags.bits & INS_FLAG_CLASS) ? iptr->sx.val.c.cls
640 : iptr->sx.val.stringconst);
643 emit_store_dst(jd, iptr, d);
647 /* load/store/copy/move operations ************************************/
665 if (!(iptr->flags.bits & INS_FLAG_RETADDR))
670 /* integer operations *************************************************/
672 case ICMD_INEG: /* ..., value ==> ..., - value */
674 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
675 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
678 emit_store_dst(jd, iptr, d);
681 case ICMD_LNEG: /* ..., value ==> ..., - value */
683 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
684 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
686 M_NEG(GET_LOW_REG(d));
687 M_IADDC_IMM(0, GET_HIGH_REG(d));
688 M_NEG(GET_HIGH_REG(d));
689 emit_store_dst(jd, iptr, d);
692 case ICMD_I2L: /* ..., value ==> ..., value */
694 s1 = emit_load_s1(jd, iptr, EAX);
695 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
698 M_LNGMOVE(EAX_EDX_PACKED, d);
699 emit_store_dst(jd, iptr, d);
702 case ICMD_L2I: /* ..., value ==> ..., value */
704 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
705 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
707 emit_store_dst(jd, iptr, d);
710 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
712 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
713 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
717 emit_store_dst(jd, iptr, d);
720 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
722 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
723 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
725 emit_store_dst(jd, iptr, d);
728 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
730 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
731 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
733 emit_store_dst(jd, iptr, d);
737 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
739 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
740 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
741 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
748 emit_store_dst(jd, iptr, d);
752 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
753 /* sx.val.i = constant */
755 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
756 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
758 /* `inc reg' is slower on p4's (regarding to ia32
759 optimization reference manual and benchmarks) and as
763 M_IADD_IMM(iptr->sx.val.i, d);
764 emit_store_dst(jd, iptr, d);
767 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
769 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
770 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
771 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
772 M_INTMOVE(s1, GET_LOW_REG(d));
773 M_IADD(s2, GET_LOW_REG(d));
774 /* don't use REG_ITMP1 */
775 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
776 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
777 M_INTMOVE(s1, GET_HIGH_REG(d));
778 M_IADDC(s2, GET_HIGH_REG(d));
779 emit_store_dst(jd, iptr, d);
782 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
783 /* sx.val.l = constant */
785 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
786 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
788 M_IADD_IMM(iptr->sx.val.l, GET_LOW_REG(d));
789 M_IADDC_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
790 emit_store_dst(jd, iptr, d);
793 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
795 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
796 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
797 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
799 M_INTMOVE(s1, REG_ITMP1);
800 M_ISUB(s2, REG_ITMP1);
801 M_INTMOVE(REG_ITMP1, d);
807 emit_store_dst(jd, iptr, d);
810 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
811 /* sx.val.i = constant */
813 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
814 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
816 M_ISUB_IMM(iptr->sx.val.i, d);
817 emit_store_dst(jd, iptr, d);
820 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
822 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
823 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
824 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
825 if (s2 == GET_LOW_REG(d)) {
826 M_INTMOVE(s1, REG_ITMP1);
827 M_ISUB(s2, REG_ITMP1);
828 M_INTMOVE(REG_ITMP1, GET_LOW_REG(d));
831 M_INTMOVE(s1, GET_LOW_REG(d));
832 M_ISUB(s2, GET_LOW_REG(d));
834 /* don't use REG_ITMP1 */
835 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
836 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
837 if (s2 == GET_HIGH_REG(d)) {
838 M_INTMOVE(s1, REG_ITMP2);
839 M_ISUBB(s2, REG_ITMP2);
840 M_INTMOVE(REG_ITMP2, GET_HIGH_REG(d));
843 M_INTMOVE(s1, GET_HIGH_REG(d));
844 M_ISUBB(s2, GET_HIGH_REG(d));
846 emit_store_dst(jd, iptr, d);
849 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
850 /* sx.val.l = constant */
852 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
853 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
855 M_ISUB_IMM(iptr->sx.val.l, GET_LOW_REG(d));
856 M_ISUBB_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
857 emit_store_dst(jd, iptr, d);
860 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
862 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
863 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
864 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
871 emit_store_dst(jd, iptr, d);
874 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
875 /* sx.val.i = constant */
877 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
878 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
879 M_IMUL_IMM(s1, iptr->sx.val.i, d);
880 emit_store_dst(jd, iptr, d);
883 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
885 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
886 s2 = emit_load_s2_low(jd, iptr, EDX);
887 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
889 M_INTMOVE(s1, REG_ITMP2);
890 M_IMUL(s2, REG_ITMP2);
892 s1 = emit_load_s1_low(jd, iptr, EAX);
893 s2 = emit_load_s2_high(jd, iptr, EDX);
896 M_IADD(EDX, REG_ITMP2);
898 s1 = emit_load_s1_low(jd, iptr, EAX);
899 s2 = emit_load_s2_low(jd, iptr, EDX);
902 M_INTMOVE(EAX, GET_LOW_REG(d));
903 M_IADD(REG_ITMP2, GET_HIGH_REG(d));
905 emit_store_dst(jd, iptr, d);
908 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
909 /* sx.val.l = constant */
911 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
912 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
913 ICONST(EAX, iptr->sx.val.l);
915 M_IMUL_IMM(s1, iptr->sx.val.l >> 32, REG_ITMP2);
916 M_IADD(REG_ITMP2, EDX);
917 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
918 M_IMUL_IMM(s1, iptr->sx.val.l, REG_ITMP2);
919 M_IADD(REG_ITMP2, EDX);
920 M_LNGMOVE(EAX_EDX_PACKED, d);
921 emit_store_dst(jd, iptr, d);
924 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
926 s1 = emit_load_s1(jd, iptr, EAX);
927 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
928 d = codegen_reg_of_dst(jd, iptr, EAX);
929 emit_arithmetic_check(cd, iptr, s2);
931 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
933 /* check as described in jvm spec */
935 M_CMP_IMM(0x80000000, EAX);
942 M_INTMOVE(EAX, d); /* if INMEMORY then d is already EAX */
943 emit_store_dst(jd, iptr, d);
946 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
948 s1 = emit_load_s1(jd, iptr, EAX);
949 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
950 d = codegen_reg_of_dst(jd, iptr, EDX);
951 emit_arithmetic_check(cd, iptr, s2);
953 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
955 /* check as described in jvm spec */
957 M_CMP_IMM(0x80000000, EAX);
965 M_INTMOVE(EDX, d); /* if INMEMORY then d is already EDX */
966 emit_store_dst(jd, iptr, d);
969 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
970 /* sx.val.i = constant */
972 /* TODO: optimize for `/ 2' */
973 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
974 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
978 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, d);/* 32-bit for jump off */
979 M_SRA_IMM(iptr->sx.val.i, d);
980 emit_store_dst(jd, iptr, d);
983 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
984 /* sx.val.i = constant */
986 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
987 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
989 M_MOV(s1, REG_ITMP1);
993 M_AND_IMM(iptr->sx.val.i, d);
995 M_BGE(2 + 2 + 6 + 2);
996 M_MOV(s1, d); /* don't use M_INTMOVE, so we know the jump offset */
998 M_AND_IMM32(iptr->sx.val.i, d); /* use 32-bit for jump offset */
1000 emit_store_dst(jd, iptr, d);
1003 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1004 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1006 s2 = emit_load_s2(jd, iptr, REG_ITMP12_PACKED);
1007 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1009 M_INTMOVE(GET_LOW_REG(s2), REG_ITMP3);
1010 M_OR(GET_HIGH_REG(s2), REG_ITMP3);
1011 /* XXX could be optimized */
1012 emit_arithmetic_check(cd, iptr, REG_ITMP3);
1014 bte = iptr->sx.s23.s3.bte;
1017 M_LST(s2, REG_SP, 2 * 4);
1019 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1020 M_LST(s1, REG_SP, 0 * 4);
1022 M_MOV_IMM(bte->fp, REG_ITMP3);
1023 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_BUILTIN_FP, bte);
1025 emit_store_dst(jd, iptr, d);
1028 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1029 /* sx.val.i = constant */
1031 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1032 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1034 M_TEST(GET_HIGH_REG(d));
1036 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, GET_LOW_REG(d));
1037 M_IADDC_IMM(0, GET_HIGH_REG(d));
1038 M_SRLD_IMM(iptr->sx.val.i, GET_HIGH_REG(d), GET_LOW_REG(d));
1039 M_SRA_IMM(iptr->sx.val.i, GET_HIGH_REG(d));
1040 emit_store_dst(jd, iptr, d);
1044 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1045 /* sx.val.l = constant */
1047 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1048 if (iptr->dst.var->flags & INMEMORY) {
1049 if (iptr->s1.var->flags & INMEMORY) {
1050 /* Alpha algorithm */
1052 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8);
1054 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1060 /* TODO: hmm, don't know if this is always correct */
1062 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l & 0x00000000ffffffff);
1064 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l >> 32);
1070 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1071 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1073 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1074 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1075 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1076 emit_jcc(cd, CC_GE, disp);
1078 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1079 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1081 emit_neg_reg(cd, REG_ITMP1);
1082 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1083 emit_neg_reg(cd, REG_ITMP2);
1085 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1086 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1088 emit_neg_reg(cd, REG_ITMP1);
1089 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1090 emit_neg_reg(cd, REG_ITMP2);
1092 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst.var->vv.regoff * 8);
1093 emit_mov_reg_membase(cd, REG_ITMP2, REG_SP, iptr->dst.var->vv.regoff * 8 + 4);
1097 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1098 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1100 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1101 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1102 M_TEST(GET_LOW_REG(s1));
1108 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1110 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1111 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1112 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1113 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1116 emit_store_dst(jd, iptr, d);
1119 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1120 /* sx.val.i = constant */
1122 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1123 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1125 M_SLL_IMM(iptr->sx.val.i, d);
1126 emit_store_dst(jd, iptr, d);
1129 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1131 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1132 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1133 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1134 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1137 emit_store_dst(jd, iptr, d);
1140 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1141 /* sx.val.i = constant */
1143 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1144 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1146 M_SRA_IMM(iptr->sx.val.i, d);
1147 emit_store_dst(jd, iptr, d);
1150 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1152 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1153 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1154 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1155 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1158 emit_store_dst(jd, iptr, d);
1161 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1162 /* sx.val.i = constant */
1164 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1165 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1167 M_SRL_IMM(iptr->sx.val.i, d);
1168 emit_store_dst(jd, iptr, d);
1171 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1173 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1174 s2 = emit_load_s2(jd, iptr, ECX);
1175 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1178 M_TEST_IMM(32, ECX);
1180 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1181 M_CLR(GET_LOW_REG(d));
1182 M_SLLD(GET_LOW_REG(d), GET_HIGH_REG(d));
1183 M_SLL(GET_LOW_REG(d));
1184 emit_store_dst(jd, iptr, d);
1187 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1188 /* sx.val.i = constant */
1190 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1191 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1193 if (iptr->sx.val.i & 0x20) {
1194 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1195 M_CLR(GET_LOW_REG(d));
1196 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1200 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1202 M_SLL_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d));
1204 emit_store_dst(jd, iptr, d);
1207 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1209 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1210 s2 = emit_load_s2(jd, iptr, ECX);
1211 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1214 M_TEST_IMM(32, ECX);
1216 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1217 M_SRA_IMM(31, GET_HIGH_REG(d));
1218 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1219 M_SRA(GET_HIGH_REG(d));
1220 emit_store_dst(jd, iptr, d);
1223 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1224 /* sx.val.i = constant */
1226 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1227 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1229 if (iptr->sx.val.i & 0x20) {
1230 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1231 M_SRA_IMM(31, GET_HIGH_REG(d));
1232 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1236 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1238 M_SRA_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1240 emit_store_dst(jd, iptr, d);
1243 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1245 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1246 s2 = emit_load_s2(jd, iptr, ECX);
1247 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1250 M_TEST_IMM(32, ECX);
1252 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1253 M_CLR(GET_HIGH_REG(d));
1254 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1255 M_SRL(GET_HIGH_REG(d));
1256 emit_store_dst(jd, iptr, d);
1259 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1260 /* sx.val.l = constant */
1262 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1263 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1265 if (iptr->sx.val.i & 0x20) {
1266 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1267 M_CLR(GET_HIGH_REG(d));
1268 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1272 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1274 M_SRL_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1276 emit_store_dst(jd, iptr, d);
1279 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1281 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1282 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1283 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1290 emit_store_dst(jd, iptr, d);
1293 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1294 /* sx.val.i = constant */
1296 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1297 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1299 M_AND_IMM(iptr->sx.val.i, d);
1300 emit_store_dst(jd, iptr, d);
1303 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1305 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1306 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1307 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1308 if (s2 == GET_LOW_REG(d))
1309 M_AND(s1, GET_LOW_REG(d));
1311 M_INTMOVE(s1, GET_LOW_REG(d));
1312 M_AND(s2, GET_LOW_REG(d));
1314 /* REG_ITMP1 probably contains low 32-bit of destination */
1315 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1316 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1317 if (s2 == GET_HIGH_REG(d))
1318 M_AND(s1, GET_HIGH_REG(d));
1320 M_INTMOVE(s1, GET_HIGH_REG(d));
1321 M_AND(s2, GET_HIGH_REG(d));
1323 emit_store_dst(jd, iptr, d);
1326 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1327 /* sx.val.l = constant */
1329 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1330 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1332 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1333 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1334 emit_store_dst(jd, iptr, d);
1337 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1339 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1340 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1341 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1348 emit_store_dst(jd, iptr, d);
1351 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1352 /* sx.val.i = constant */
1354 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1355 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1357 M_OR_IMM(iptr->sx.val.i, d);
1358 emit_store_dst(jd, iptr, d);
1361 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1363 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1364 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1365 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1366 if (s2 == GET_LOW_REG(d))
1367 M_OR(s1, GET_LOW_REG(d));
1369 M_INTMOVE(s1, GET_LOW_REG(d));
1370 M_OR(s2, GET_LOW_REG(d));
1372 /* REG_ITMP1 probably contains low 32-bit of destination */
1373 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1374 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1375 if (s2 == GET_HIGH_REG(d))
1376 M_OR(s1, GET_HIGH_REG(d));
1378 M_INTMOVE(s1, GET_HIGH_REG(d));
1379 M_OR(s2, GET_HIGH_REG(d));
1381 emit_store_dst(jd, iptr, d);
1384 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1385 /* sx.val.l = constant */
1387 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1388 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1390 M_OR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1391 M_OR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1392 emit_store_dst(jd, iptr, d);
1395 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1397 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1398 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1399 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1406 emit_store_dst(jd, iptr, d);
1409 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1410 /* sx.val.i = constant */
1412 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1413 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1415 M_XOR_IMM(iptr->sx.val.i, d);
1416 emit_store_dst(jd, iptr, d);
1419 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1421 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1422 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1423 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1424 if (s2 == GET_LOW_REG(d))
1425 M_XOR(s1, GET_LOW_REG(d));
1427 M_INTMOVE(s1, GET_LOW_REG(d));
1428 M_XOR(s2, GET_LOW_REG(d));
1430 /* REG_ITMP1 probably contains low 32-bit of destination */
1431 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1432 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1433 if (s2 == GET_HIGH_REG(d))
1434 M_XOR(s1, GET_HIGH_REG(d));
1436 M_INTMOVE(s1, GET_HIGH_REG(d));
1437 M_XOR(s2, GET_HIGH_REG(d));
1439 emit_store_dst(jd, iptr, d);
1442 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1443 /* sx.val.l = constant */
1445 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1446 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1448 M_XOR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1449 M_XOR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1450 emit_store_dst(jd, iptr, d);
1454 /* floating operations ************************************************/
1456 case ICMD_FNEG: /* ..., value ==> ..., - value */
1458 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1459 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1461 emit_store_dst(jd, iptr, d);
1464 case ICMD_DNEG: /* ..., value ==> ..., - value */
1466 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1467 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1469 emit_store_dst(jd, iptr, d);
1472 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1474 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1475 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1476 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1478 emit_store_dst(jd, iptr, d);
1481 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1483 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1484 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1485 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1487 emit_store_dst(jd, iptr, d);
1490 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1492 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1493 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1494 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1496 emit_store_dst(jd, iptr, d);
1499 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1501 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1502 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1503 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1505 emit_store_dst(jd, iptr, d);
1508 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1510 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1511 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1512 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1514 emit_store_dst(jd, iptr, d);
1517 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1519 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1520 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1521 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1523 emit_store_dst(jd, iptr, d);
1526 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1528 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1529 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1530 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1532 emit_store_dst(jd, iptr, d);
1535 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1537 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1538 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1539 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1541 emit_store_dst(jd, iptr, d);
1544 case ICMD_FREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1546 /* exchanged to skip fxch */
1547 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1548 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1549 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1550 /* emit_fxch(cd); */
1555 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1556 emit_store_dst(jd, iptr, d);
1557 emit_ffree_reg(cd, 0);
1561 case ICMD_DREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1563 /* exchanged to skip fxch */
1564 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1565 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1566 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1567 /* emit_fxch(cd); */
1572 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1573 emit_store_dst(jd, iptr, d);
1574 emit_ffree_reg(cd, 0);
1578 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1579 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1581 var = VAROP(iptr->s1);
1582 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1584 if (var->flags & INMEMORY) {
1585 emit_fildl_membase(cd, REG_SP, var->vv.regoff);
1587 /* XXX not thread safe! */
1588 disp = dseg_add_unique_s4(cd, 0);
1589 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1591 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_ENTRYPOINT, NULL);
1592 emit_mov_reg_membase(cd, var->vv.regoff, REG_ITMP1, disp);
1593 emit_fildl_membase(cd, REG_ITMP1, disp);
1596 emit_store_dst(jd, iptr, d);
1599 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1600 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1602 var = VAROP(iptr->s1);
1603 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1604 if (var->flags & INMEMORY) {
1605 emit_fildll_membase(cd, REG_SP, var->vv.regoff);
1608 log_text("L2F: longs have to be in memory");
1611 emit_store_dst(jd, iptr, d);
1614 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1616 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1617 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1619 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1621 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_ENTRYPOINT, NULL);
1623 /* Round to zero, 53-bit mode, exception masked */
1624 disp = dseg_add_s4(cd, 0x0e7f);
1625 emit_fldcw_membase(cd, REG_ITMP1, disp);
1627 var = VAROP(iptr->dst);
1628 var1 = VAROP(iptr->s1);
1630 if (var->flags & INMEMORY) {
1631 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1633 /* Round to nearest, 53-bit mode, exceptions masked */
1634 disp = dseg_add_s4(cd, 0x027f);
1635 emit_fldcw_membase(cd, REG_ITMP1, disp);
1637 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1638 REG_SP, var->vv.regoff);
1641 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1643 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1646 /* XXX not thread safe! */
1647 disp = dseg_add_unique_s4(cd, 0);
1648 emit_fistpl_membase(cd, REG_ITMP1, disp);
1649 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1651 /* Round to nearest, 53-bit mode, exceptions masked */
1652 disp = dseg_add_s4(cd, 0x027f);
1653 emit_fldcw_membase(cd, REG_ITMP1, disp);
1655 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1658 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1659 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1662 emit_jcc(cd, CC_NE, disp);
1664 /* XXX: change this when we use registers */
1665 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1666 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2i, REG_ITMP1);
1667 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_BUILTIN_FP,
1668 builtintable_get_internal(BUILTIN_f2i));
1669 emit_call_reg(cd, REG_ITMP1);
1671 if (var->flags & INMEMORY) {
1672 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1675 M_INTMOVE(REG_RESULT, var->vv.regoff);
1679 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1681 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1682 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1684 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1686 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_ENTRYPOINT, NULL);
1688 /* Round to zero, 53-bit mode, exception masked */
1689 disp = dseg_add_s4(cd, 0x0e7f);
1690 emit_fldcw_membase(cd, REG_ITMP1, disp);
1692 var = VAROP(iptr->dst);
1693 var1 = VAROP(iptr->s1);
1695 if (var->flags & INMEMORY) {
1696 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1698 /* Round to nearest, 53-bit mode, exceptions masked */
1699 disp = dseg_add_s4(cd, 0x027f);
1700 emit_fldcw_membase(cd, REG_ITMP1, disp);
1702 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1703 REG_SP, var->vv.regoff);
1706 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1708 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1711 /* XXX not thread safe! */
1712 disp = dseg_add_unique_s4(cd, 0);
1713 emit_fistpl_membase(cd, REG_ITMP1, disp);
1714 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1716 /* Round to nearest, 53-bit mode, exceptions masked */
1717 disp = dseg_add_s4(cd, 0x027f);
1718 emit_fldcw_membase(cd, REG_ITMP1, disp);
1720 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1723 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1724 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1727 emit_jcc(cd, CC_NE, disp);
1729 /* XXX: change this when we use registers */
1730 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1731 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2i, REG_ITMP1);
1732 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_BUILTIN_FP,
1733 builtintable_get_internal(BUILTIN_d2i));
1734 emit_call_reg(cd, REG_ITMP1);
1736 if (var->flags & INMEMORY) {
1737 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1739 M_INTMOVE(REG_RESULT, var->vv.regoff);
1743 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1745 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1746 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1748 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1750 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_ENTRYPOINT, NULL);
1752 /* Round to zero, 53-bit mode, exception masked */
1753 disp = dseg_add_s4(cd, 0x0e7f);
1754 emit_fldcw_membase(cd, REG_ITMP1, disp);
1756 var = VAROP(iptr->dst);
1757 var1 = VAROP(iptr->s1);
1759 if (var->flags & INMEMORY) {
1760 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1762 /* Round to nearest, 53-bit mode, exceptions masked */
1763 disp = dseg_add_s4(cd, 0x027f);
1764 emit_fldcw_membase(cd, REG_ITMP1, disp);
1766 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1767 REG_SP, var->vv.regoff + 4);
1770 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1772 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1775 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1777 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1779 emit_jcc(cd, CC_NE, disp);
1781 emit_alu_imm_membase(cd, ALU_CMP, 0,
1782 REG_SP, var->vv.regoff);
1785 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1787 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1789 emit_jcc(cd, CC_NE, disp);
1791 /* XXX: change this when we use registers */
1792 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1793 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2l, REG_ITMP1);
1794 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_BUILTIN_FP,
1795 builtintable_get_internal(BUILTIN_f2l));
1796 emit_call_reg(cd, REG_ITMP1);
1797 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1798 emit_mov_reg_membase(cd, REG_RESULT2,
1799 REG_SP, var->vv.regoff + 4);
1802 log_text("F2L: longs have to be in memory");
1807 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1809 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1810 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1812 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1814 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_ENTRYPOINT, NULL);
1816 /* Round to zero, 53-bit mode, exception masked */
1817 disp = dseg_add_s4(cd, 0x0e7f);
1818 emit_fldcw_membase(cd, REG_ITMP1, disp);
1820 var = VAROP(iptr->dst);
1821 var1 = VAROP(iptr->s1);
1823 if (var->flags & INMEMORY) {
1824 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1826 /* Round to nearest, 53-bit mode, exceptions masked */
1827 disp = dseg_add_s4(cd, 0x027f);
1828 emit_fldcw_membase(cd, REG_ITMP1, disp);
1830 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1831 REG_SP, var->vv.regoff + 4);
1834 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1836 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1839 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1841 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1843 emit_jcc(cd, CC_NE, disp);
1845 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, var->vv.regoff);
1848 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1850 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1852 emit_jcc(cd, CC_NE, disp);
1854 /* XXX: change this when we use registers */
1855 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1856 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2l, REG_ITMP1);
1857 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_BUILTIN_FP,
1858 builtintable_get_internal(BUILTIN_d2l));
1859 emit_call_reg(cd, REG_ITMP1);
1860 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1861 emit_mov_reg_membase(cd, REG_RESULT2,
1862 REG_SP, var->vv.regoff + 4);
1865 log_text("D2L: longs have to be in memory");
1870 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1872 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1873 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1875 emit_store_dst(jd, iptr, d);
1878 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1880 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1881 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1883 emit_store_dst(jd, iptr, d);
1886 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1889 /* exchanged to skip fxch */
1890 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1891 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1892 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1893 /* emit_fxch(cd); */
1896 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as GT */
1897 emit_jcc(cd, CC_E, 6);
1898 emit_alu_imm_reg(cd, ALU_AND, 0x000000ff, EAX);
1900 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1901 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1902 emit_jcc(cd, CC_B, 3 + 5);
1903 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1904 emit_jmp_imm(cd, 3);
1905 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1906 emit_store_dst(jd, iptr, d);
1909 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1912 /* exchanged to skip fxch */
1913 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1914 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1915 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1916 /* emit_fxch(cd); */
1919 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as LT */
1920 emit_jcc(cd, CC_E, 3);
1921 emit_movb_imm_reg(cd, 1, REG_AH);
1923 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1924 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1925 emit_jcc(cd, CC_B, 3 + 5);
1926 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1927 emit_jmp_imm(cd, 3);
1928 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1929 emit_store_dst(jd, iptr, d);
1933 /* memory operations **************************************************/
1935 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., length */
1937 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1938 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1939 /* implicit null-pointer check */
1940 M_ILD(d, s1, OFFSET(java_array_t, size));
1941 emit_store_dst(jd, iptr, d);
1944 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
1946 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1947 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1948 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1949 /* implicit null-pointer check */
1950 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1951 emit_movsbl_memindex_reg(cd, OFFSET(java_bytearray_t, data[0]),
1953 emit_store_dst(jd, iptr, d);
1956 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
1958 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1959 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1960 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1961 /* implicit null-pointer check */
1962 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1963 emit_movzwl_memindex_reg(cd, OFFSET(java_chararray_t, data[0]),
1965 emit_store_dst(jd, iptr, d);
1968 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
1970 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1971 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1972 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1973 /* implicit null-pointer check */
1974 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1975 emit_movswl_memindex_reg(cd, OFFSET(java_shortarray_t, data[0]),
1977 emit_store_dst(jd, iptr, d);
1980 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
1982 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1983 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1984 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1985 /* implicit null-pointer check */
1986 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1987 emit_mov_memindex_reg(cd, OFFSET(java_intarray_t, data[0]),
1989 emit_store_dst(jd, iptr, d);
1992 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
1994 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1995 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1996 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
1997 /* implicit null-pointer check */
1998 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2000 var = VAROP(iptr->dst);
2002 assert(var->flags & INMEMORY);
2003 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]),
2004 s1, s2, 3, REG_ITMP3);
2005 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff);
2006 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]) + 4,
2007 s1, s2, 3, REG_ITMP3);
2008 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff + 4);
2011 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2013 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2014 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2015 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2016 /* implicit null-pointer check */
2017 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2018 emit_flds_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2, 2);
2019 emit_store_dst(jd, iptr, d);
2022 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2024 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2025 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2026 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
2027 /* implicit null-pointer check */
2028 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2029 emit_fldl_memindex(cd, OFFSET(java_doublearray_t, data[0]), s1, s2,3);
2030 emit_store_dst(jd, iptr, d);
2033 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2035 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2036 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2037 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2038 /* implicit null-pointer check */
2039 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2040 emit_mov_memindex_reg(cd, OFFSET(java_objectarray_t, data[0]),
2042 emit_store_dst(jd, iptr, d);
2046 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2048 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2049 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2050 /* implicit null-pointer check */
2051 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2052 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2054 /* because EBP, ESI, EDI have no xH and xL nibbles */
2055 M_INTMOVE(s3, REG_ITMP3);
2058 emit_movb_reg_memindex(cd, s3, OFFSET(java_bytearray_t, data[0]),
2062 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2064 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2065 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2066 /* implicit null-pointer check */
2067 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2068 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2069 emit_movw_reg_memindex(cd, s3, OFFSET(java_chararray_t, data[0]),
2073 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2075 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2076 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2077 /* implicit null-pointer check */
2078 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2079 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2080 emit_movw_reg_memindex(cd, s3, OFFSET(java_shortarray_t, data[0]),
2084 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2086 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2087 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2088 /* implicit null-pointer check */
2089 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2090 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2091 emit_mov_reg_memindex(cd, s3, OFFSET(java_intarray_t, data[0]),
2095 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2097 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2098 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2099 /* implicit null-pointer check */
2100 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2102 var = VAROP(iptr->sx.s23.s3);
2104 assert(var->flags & INMEMORY);
2105 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff, REG_ITMP3);
2106 emit_mov_reg_memindex(cd, REG_ITMP3, OFFSET(java_longarray_t, data[0])
2108 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff + 4, REG_ITMP3);
2109 emit_mov_reg_memindex(cd, REG_ITMP3,
2110 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2113 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2115 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2116 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2117 /* implicit null-pointer check */
2118 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2119 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2120 emit_fstps_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2,2);
2123 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2125 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2126 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2127 /* implicit null-pointer check */
2128 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2129 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2130 emit_fstpl_memindex(cd, OFFSET(java_doublearray_t, data[0]),
2134 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2136 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2137 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2138 /* implicit null-pointer check */
2139 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2140 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2142 M_AST(s1, REG_SP, 0 * 4);
2143 M_AST(s3, REG_SP, 1 * 4);
2144 M_MOV_IMM(BUILTIN_FAST_canstore, REG_ITMP1);
2145 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_BUILTIN_FP,
2146 builtintable_get_internal(BUILTIN_FAST_canstore));
2148 emit_arraystore_check(cd, iptr);
2150 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2151 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2152 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2153 emit_mov_reg_memindex(cd, s3, OFFSET(java_objectarray_t, data[0]),
2157 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2159 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2160 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2161 /* implicit null-pointer check */
2162 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2163 emit_movb_imm_memindex(cd, iptr->sx.s23.s3.constval,
2164 OFFSET(java_bytearray_t, data[0]), s1, s2, 0);
2167 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2169 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2170 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2171 /* implicit null-pointer check */
2172 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2173 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2174 OFFSET(java_chararray_t, data[0]), s1, s2, 1);
2177 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2179 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2180 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2181 /* implicit null-pointer check */
2182 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2183 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2184 OFFSET(java_shortarray_t, data[0]), s1, s2, 1);
2187 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2189 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2190 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2191 /* implicit null-pointer check */
2192 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2193 emit_mov_imm_memindex(cd, iptr->sx.s23.s3.constval,
2194 OFFSET(java_intarray_t, data[0]), s1, s2, 2);
2197 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2199 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2200 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2201 /* implicit null-pointer check */
2202 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2203 emit_mov_imm_memindex(cd,
2204 (u4) (iptr->sx.s23.s3.constval & 0x00000000ffffffff),
2205 OFFSET(java_longarray_t, data[0]), s1, s2, 3);
2206 emit_mov_imm_memindex(cd,
2207 ((s4)iptr->sx.s23.s3.constval) >> 31,
2208 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2211 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2213 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2214 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2215 /* implicit null-pointer check */
2216 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2217 emit_mov_imm_memindex(cd, 0,
2218 OFFSET(java_objectarray_t, data[0]), s1, s2, 2);
2222 case ICMD_GETSTATIC: /* ... ==> ..., value */
2224 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2225 uf = iptr->sx.s23.s3.uf;
2226 fieldtype = uf->fieldref->parseddesc.fd->type;
2229 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2233 fi = iptr->sx.s23.s3.fmiref->p.field;
2234 fieldtype = fi->type;
2236 disp = (intptr_t) fi->value;
2238 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->clazz))
2239 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->clazz, 0);
2242 M_MOV_IMM(disp, REG_ITMP1);
2243 JITCACHE_ADD_CACHED_REF_JD_COND(jd, CRT_FIELDINFO_VALUE, fi, disp);
2244 switch (fieldtype) {
2247 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2248 M_ILD(d, REG_ITMP1, 0);
2251 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2252 M_LLD(d, REG_ITMP1, 0);
2255 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2256 M_FLD(d, REG_ITMP1, 0);
2259 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2260 M_DLD(d, REG_ITMP1, 0);
2263 emit_store_dst(jd, iptr, d);
2266 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2268 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2269 uf = iptr->sx.s23.s3.uf;
2270 fieldtype = uf->fieldref->parseddesc.fd->type;
2273 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2276 fi = iptr->sx.s23.s3.fmiref->p.field;
2277 fieldtype = fi->type;
2279 disp = (intptr_t) fi->value;
2281 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->clazz))
2282 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->clazz, 0);
2284 M_MOV_IMM(disp, REG_ITMP1);
2285 JITCACHE_ADD_CACHED_REF_JD_COND(jd, CRT_FIELDINFO_VALUE, fi, disp);
2286 switch (fieldtype) {
2289 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
2290 M_IST(s1, REG_ITMP1, 0);
2293 s1 = emit_load_s1(jd, iptr, REG_ITMP23_PACKED);
2294 M_LST(s1, REG_ITMP1, 0);
2297 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2298 emit_fstps_membase(cd, REG_ITMP1, 0);
2301 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2302 emit_fstpl_membase(cd, REG_ITMP1, 0);
2307 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2308 /* val = value (in current instruction) */
2309 /* following NOP) */
2311 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2312 uf = iptr->sx.s23.s3.uf;
2313 fieldtype = uf->fieldref->parseddesc.fd->type;
2316 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2319 fi = iptr->sx.s23.s3.fmiref->p.field;
2320 fieldtype = fi->type;
2322 disp = (intptr_t) fi->value;
2324 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->clazz))
2325 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->clazz, 0);
2328 M_MOV_IMM(disp, REG_ITMP1);
2329 JITCACHE_ADD_CACHED_REF_JD_COND(jd, CRT_FIELDINFO_VALUE, fi, disp);
2330 switch (fieldtype) {
2333 M_IST_IMM(iptr->sx.s23.s2.constval, REG_ITMP1, 0);
2336 M_IST_IMM(iptr->sx.s23.s2.constval & 0xffffffff, REG_ITMP1, 0);
2337 M_IST_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, REG_ITMP1, 4);
2344 case ICMD_GETFIELD: /* .., objectref. ==> ..., value */
2346 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2347 emit_nullpointer_check(cd, iptr, s1);
2349 #if defined(ENABLE_ESCAPE_CHECK)
2350 /*emit_escape_check(cd, s1);*/
2353 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2354 uf = iptr->sx.s23.s3.uf;
2355 fieldtype = uf->fieldref->parseddesc.fd->type;
2358 patcher_add_patch_ref(jd, PATCHER_getfield,
2359 iptr->sx.s23.s3.uf, 0);
2362 fi = iptr->sx.s23.s3.fmiref->p.field;
2363 fieldtype = fi->type;
2369 switch (fieldtype) {
2372 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2373 M_ILD32(d, s1, disp);
2376 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2377 M_LLD32(d, s1, disp);
2380 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2381 M_FLD32(d, s1, disp);
2384 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2385 M_DLD32(d, s1, disp);
2388 emit_store_dst(jd, iptr, d);
2391 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2393 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2394 emit_nullpointer_check(cd, iptr, s1);
2396 /* must be done here because of code patching */
2398 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2399 uf = iptr->sx.s23.s3.uf;
2400 fieldtype = uf->fieldref->parseddesc.fd->type;
2403 fi = iptr->sx.s23.s3.fmiref->p.field;
2404 fieldtype = fi->type;
2407 if (!IS_FLT_DBL_TYPE(fieldtype)) {
2408 if (IS_2_WORD_TYPE(fieldtype))
2409 s2 = emit_load_s2(jd, iptr, REG_ITMP23_PACKED);
2411 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2414 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
2416 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2418 uf = iptr->sx.s23.s3.uf;
2421 patcher_add_patch_ref(jd, PATCHER_putfield, uf, 0);
2425 fi = iptr->sx.s23.s3.fmiref->p.field;
2429 switch (fieldtype) {
2432 M_IST32(s2, s1, disp);
2435 M_LST32(s2, s1, disp);
2438 emit_fstps_membase32(cd, s1, disp);
2441 emit_fstpl_membase32(cd, s1, disp);
2446 case ICMD_PUTFIELDCONST: /* ..., objectref ==> ... */
2447 /* val = value (in current instruction) */
2448 /* following NOP) */
2450 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2451 emit_nullpointer_check(cd, iptr, s1);
2453 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2454 uf = iptr->sx.s23.s3.uf;
2455 fieldtype = uf->fieldref->parseddesc.fd->type;
2458 patcher_add_patch_ref(jd, PATCHER_putfieldconst,
2462 fi = iptr->sx.s23.s3.fmiref->p.field;
2463 fieldtype = fi->type;
2467 switch (fieldtype) {
2470 M_IST32_IMM(iptr->sx.s23.s2.constval, s1, disp);
2473 M_IST32_IMM(iptr->sx.s23.s2.constval & 0xffffffff, s1, disp);
2474 M_IST32_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, s1, disp + 4);
2482 /* branch operations **************************************************/
2484 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2486 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2487 M_INTMOVE(s1, REG_ITMP1_XPTR);
2489 #ifdef ENABLE_VERIFIER
2490 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2491 patcher_add_patch_ref(jd, PATCHER_resolve_class,
2492 iptr->sx.s23.s2.uc, 0);
2494 #endif /* ENABLE_VERIFIER */
2496 M_CALL_IMM(0); /* passing exception pc */
2497 M_POP(REG_ITMP2_XPC);
2499 M_MOV_IMM(asm_handle_exception, REG_ITMP3);
2500 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_ASM_HANDLE_EXCEPTION, 1);
2504 case ICMD_GOTO: /* ... ==> ... */
2505 case ICMD_RET: /* ... ==> ... */
2507 #if defined(ENABLE_SSA)
2509 last_cmd_was_goto = true;
2511 /* In case of a Goto phimoves have to be inserted before the */
2514 codegen_emit_phi_moves(jd, bptr);
2517 emit_br(cd, iptr->dst.block);
2521 case ICMD_JSR: /* ... ==> ... */
2523 emit_br(cd, iptr->sx.s23.s3.jsrtarget.block);
2527 case ICMD_IFNULL: /* ..., value ==> ... */
2528 case ICMD_IFNONNULL:
2530 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2532 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFNULL, BRANCH_OPT_NONE);
2535 case ICMD_IFEQ: /* ..., value ==> ... */
2542 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2543 M_CMP_IMM(iptr->sx.val.i, s1);
2544 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFEQ, BRANCH_OPT_NONE);
2547 case ICMD_IF_LEQ: /* ..., value ==> ... */
2549 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2550 if (iptr->sx.val.l == 0) {
2551 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2552 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2555 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2556 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2557 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2558 M_OR(REG_ITMP2, REG_ITMP1);
2560 emit_beq(cd, iptr->dst.block);
2563 case ICMD_IF_LLT: /* ..., value ==> ... */
2565 if (iptr->sx.val.l == 0) {
2566 /* If high 32-bit are less than zero, then the 64-bits
2568 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2570 emit_blt(cd, iptr->dst.block);
2573 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2574 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2575 emit_blt(cd, iptr->dst.block);
2577 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2578 emit_bult(cd, iptr->dst.block);
2582 case ICMD_IF_LLE: /* ..., value ==> ... */
2584 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2585 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2586 emit_blt(cd, iptr->dst.block);
2588 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2589 emit_bule(cd, iptr->dst.block);
2592 case ICMD_IF_LNE: /* ..., value ==> ... */
2594 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2595 if (iptr->sx.val.l == 0) {
2596 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2597 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2600 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2601 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2602 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2603 M_OR(REG_ITMP2, REG_ITMP1);
2605 emit_bne(cd, iptr->dst.block);
2608 case ICMD_IF_LGT: /* ..., value ==> ... */
2610 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2611 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2612 emit_bgt(cd, iptr->dst.block);
2614 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2615 emit_bugt(cd, iptr->dst.block);
2618 case ICMD_IF_LGE: /* ..., value ==> ... */
2620 if (iptr->sx.val.l == 0) {
2621 /* If high 32-bit are greater equal zero, then the
2623 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2625 emit_bge(cd, iptr->dst.block);
2628 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2629 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2630 emit_bgt(cd, iptr->dst.block);
2632 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2633 emit_buge(cd, iptr->dst.block);
2637 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2638 case ICMD_IF_ICMPNE:
2639 case ICMD_IF_ICMPLT:
2640 case ICMD_IF_ICMPGT:
2641 case ICMD_IF_ICMPGE:
2642 case ICMD_IF_ICMPLE:
2644 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2645 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2647 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ICMPEQ, BRANCH_OPT_NONE);
2650 case ICMD_IF_ACMPEQ: /* ..., value, value ==> ... */
2651 case ICMD_IF_ACMPNE:
2653 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2654 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2656 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ACMPEQ, BRANCH_OPT_NONE);
2659 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2661 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2662 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2663 M_INTMOVE(s1, REG_ITMP1);
2664 M_XOR(s2, REG_ITMP1);
2665 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2666 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2667 M_INTMOVE(s1, REG_ITMP2);
2668 M_XOR(s2, REG_ITMP2);
2669 M_OR(REG_ITMP1, REG_ITMP2);
2670 emit_beq(cd, iptr->dst.block);
2673 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2675 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2676 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2677 M_INTMOVE(s1, REG_ITMP1);
2678 M_XOR(s2, REG_ITMP1);
2679 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2680 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2681 M_INTMOVE(s1, REG_ITMP2);
2682 M_XOR(s2, REG_ITMP2);
2683 M_OR(REG_ITMP1, REG_ITMP2);
2684 emit_bne(cd, iptr->dst.block);
2687 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2689 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2690 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2692 emit_blt(cd, iptr->dst.block);
2693 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2694 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2697 emit_bult(cd, iptr->dst.block);
2700 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2702 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2703 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2705 emit_bgt(cd, iptr->dst.block);
2706 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2707 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2710 emit_bugt(cd, iptr->dst.block);
2713 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2715 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2716 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2718 emit_blt(cd, iptr->dst.block);
2719 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2720 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2723 emit_bule(cd, iptr->dst.block);
2726 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2728 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2729 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2731 emit_bgt(cd, iptr->dst.block);
2732 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2733 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2736 emit_buge(cd, iptr->dst.block);
2740 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2742 REPLACEMENT_POINT_RETURN(cd, iptr);
2743 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2744 M_INTMOVE(s1, REG_RESULT);
2745 goto nowperformreturn;
2747 case ICMD_LRETURN: /* ..., retvalue ==> ... */
2749 REPLACEMENT_POINT_RETURN(cd, iptr);
2750 s1 = emit_load_s1(jd, iptr, REG_RESULT_PACKED);
2751 M_LNGMOVE(s1, REG_RESULT_PACKED);
2752 goto nowperformreturn;
2754 case ICMD_ARETURN: /* ..., retvalue ==> ... */
2756 REPLACEMENT_POINT_RETURN(cd, iptr);
2757 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2758 M_INTMOVE(s1, REG_RESULT);
2760 #ifdef ENABLE_VERIFIER
2761 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2762 patcher_add_patch_ref(jd, PATCHER_resolve_class,
2763 iptr->sx.s23.s2.uc, 0);
2765 #endif /* ENABLE_VERIFIER */
2766 goto nowperformreturn;
2768 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2771 REPLACEMENT_POINT_RETURN(cd, iptr);
2772 s1 = emit_load_s1(jd, iptr, REG_FRESULT);
2773 goto nowperformreturn;
2775 case ICMD_RETURN: /* ... ==> ... */
2777 REPLACEMENT_POINT_RETURN(cd, iptr);
2783 p = cd->stackframesize;
2785 #if !defined(NDEBUG)
2786 emit_verbosecall_exit(jd);
2789 #if defined(ENABLE_THREADS)
2790 if (checksync && code_is_synchronized(code)) {
2791 M_ALD(REG_ITMP2, REG_SP, rd->memuse * 8);
2793 /* we need to save the proper return value */
2794 switch (iptr->opc) {
2797 M_IST(REG_RESULT, REG_SP, rd->memuse * 8);
2801 M_LST(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2805 emit_fstps_membase(cd, REG_SP, rd->memuse * 8);
2809 emit_fstpl_membase(cd, REG_SP, rd->memuse * 8);
2813 M_AST(REG_ITMP2, REG_SP, 0);
2814 M_MOV_IMM(LOCK_monitor_exit, REG_ITMP3);
2815 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_BUILTIN_FP,
2816 builtintable_get_internal(LOCK_monitor_exit));
2819 /* and now restore the proper return value */
2820 switch (iptr->opc) {
2823 M_ILD(REG_RESULT, REG_SP, rd->memuse * 8);
2827 M_LLD(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2831 emit_flds_membase(cd, REG_SP, rd->memuse * 8);
2835 emit_fldl_membase(cd, REG_SP, rd->memuse * 8);
2841 /* restore saved registers */
2843 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
2844 p--; M_ALD(rd->savintregs[i], REG_SP, p * 8);
2847 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
2849 emit_fldl_membase(cd, REG_SP, p * 8);
2850 if (iptr->opc == ICMD_FRETURN || iptr->opc == ICMD_DRETURN) {
2852 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset + 1); */
2855 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset); */
2859 /* deallocate stack */
2861 if (cd->stackframesize)
2862 M_AADD_IMM(cd->stackframesize * 8 + 4, REG_SP);
2869 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2872 branch_target_t *table;
2874 table = iptr->dst.table;
2876 l = iptr->sx.s23.s2.tablelow;
2877 i = iptr->sx.s23.s3.tablehigh;
2879 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2880 M_INTMOVE(s1, REG_ITMP1);
2883 M_ISUB_IMM(l, REG_ITMP1);
2889 M_CMP_IMM(i - 1, REG_ITMP1);
2890 emit_bugt(cd, table[0].block);
2892 /* build jump table top down and use address of lowest entry */
2897 dseg_add_target(cd, table->block);
2901 /* length of dataseg after last dseg_addtarget is used
2904 M_MOV_IMM(0, REG_ITMP2);
2906 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_ENTRYPOINT, NULL);
2907 emit_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 2, REG_ITMP1);
2913 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2916 lookup_target_t *lookup;
2918 lookup = iptr->dst.lookup;
2920 i = iptr->sx.s23.s2.lookupcount;
2922 MCODECHECK((i<<2)+8);
2923 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2926 M_CMP_IMM(lookup->value, s1);
2927 emit_beq(cd, lookup->target.block);
2931 emit_br(cd, iptr->sx.s23.s3.lookupdefault.block);
2936 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
2938 REPLACEMENT_POINT_FORGC_BUILTIN(cd, iptr);
2940 bte = iptr->sx.s23.s3.bte;
2944 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2946 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2947 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
2948 case ICMD_INVOKEINTERFACE:
2950 REPLACEMENT_POINT_INVOKE(cd, iptr);
2952 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2953 md = iptr->sx.s23.s3.um->methodref->parseddesc.md;
2957 lm = iptr->sx.s23.s3.fmiref->p.method;
2958 md = lm->parseddesc;
2962 s3 = md->paramcount;
2964 MCODECHECK((s3 << 1) + 64);
2966 /* copy arguments to registers or stack location */
2968 for (s3 = s3 - 1; s3 >= 0; s3--) {
2969 var = VAR(iptr->sx.s23.s2.args[s3]);
2971 /* Already Preallocated (ARGVAR) ? */
2972 if (var->flags & PREALLOC)
2974 if (IS_INT_LNG_TYPE(var->type)) {
2975 if (!md->params[s3].inmemory) {
2976 log_text("No integer argument registers available!");
2980 if (IS_2_WORD_TYPE(var->type)) {
2981 d = emit_load(jd, iptr, var, REG_ITMP12_PACKED);
2982 M_LST(d, REG_SP, md->params[s3].regoff);
2984 d = emit_load(jd, iptr, var, REG_ITMP1);
2985 M_IST(d, REG_SP, md->params[s3].regoff);
2990 if (!md->params[s3].inmemory) {
2991 s1 = md->params[s3].regoff;
2992 d = emit_load(jd, iptr, var, s1);
2996 d = emit_load(jd, iptr, var, REG_FTMP1);
2997 if (IS_2_WORD_TYPE(var->type))
2998 M_DST(d, REG_SP, md->params[s3].regoff);
3000 M_FST(d, REG_SP, md->params[s3].regoff);
3005 switch (iptr->opc) {
3007 d = md->returntype.type;
3009 if (bte->stub == NULL) {
3010 M_MOV_IMM(bte->fp, REG_ITMP1);
3013 M_MOV_IMM(bte->stub, REG_ITMP1);
3015 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_BUILTIN, bte);
3018 #if defined(ENABLE_ESCAPE_CHECK)
3019 if (bte->opcode == ICMD_NEW || bte->opcode == ICMD_NEWARRAY) {
3020 /*emit_escape_annotate_object(cd, m);*/
3025 case ICMD_INVOKESPECIAL:
3026 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
3027 emit_nullpointer_check(cd, iptr, REG_ITMP1);
3030 case ICMD_INVOKESTATIC:
3032 unresolved_method *um = iptr->sx.s23.s3.um;
3034 patcher_add_patch_ref(jd, PATCHER_invokestatic_special,
3038 d = md->returntype.type;
3041 disp = (ptrint) lm->stubroutine;
3043 d = lm->parseddesc->returntype.type;
3045 M_MOV_IMM(disp, REG_ITMP2);
3046 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_METHODINFO_STUBROUTINE, lm);
3050 case ICMD_INVOKEVIRTUAL:
3051 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
3052 emit_nullpointer_check(cd, iptr, s1);
3055 unresolved_method *um = iptr->sx.s23.s3.um;
3057 patcher_add_patch_ref(jd, PATCHER_invokevirtual, um, 0);
3060 d = md->returntype.type;
3063 s1 = OFFSET(vftbl_t, table[0]) +
3064 sizeof(methodptr) * lm->vftblindex;
3066 d = md->returntype.type;
3069 M_ALD(REG_METHODPTR, REG_ITMP1,
3070 OFFSET(java_object_t, vftbl));
3071 M_ALD32(REG_ITMP3, REG_METHODPTR, s1);
3072 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_METHODINFO_TABLE, lm);
3076 case ICMD_INVOKEINTERFACE:
3077 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
3078 emit_nullpointer_check(cd, iptr, s1);
3081 unresolved_method *um = iptr->sx.s23.s3.um;
3083 patcher_add_patch_ref(jd, PATCHER_invokeinterface, um, 0);
3087 d = md->returntype.type;
3090 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3091 sizeof(methodptr) * lm->clazz->index;
3093 s2 = sizeof(methodptr) * (lm - lm->clazz->methods);
3095 d = md->returntype.type;
3098 M_ALD(REG_METHODPTR, REG_ITMP1,
3099 OFFSET(java_object_t, vftbl));
3100 M_ALD32(REG_METHODPTR, REG_METHODPTR, s1);
3101 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_METHODINFO_INTERFACETABLE, lm);
3102 M_ALD32(REG_ITMP3, REG_METHODPTR, s2);
3103 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_METHODINFO_METHODOFFSET, lm);
3108 /* store size of call code in replacement point */
3110 REPLACEMENT_POINT_INVOKE_RETURN(cd, iptr);
3111 REPLACEMENT_POINT_FORGC_BUILTIN_RETURN(cd, iptr);
3113 /* d contains return type */
3115 if (d != TYPE_VOID) {
3116 #if defined(ENABLE_SSA)
3117 if ((ls == NULL) /* || (!IS_TEMPVAR_INDEX(iptr->dst.varindex)) */ ||
3118 (ls->lifetime[iptr->dst.varindex].type != UNUSED))
3119 /* a "living" stackslot */
3122 if (IS_INT_LNG_TYPE(d)) {
3123 if (IS_2_WORD_TYPE(d)) {
3124 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
3125 M_LNGMOVE(REG_RESULT_PACKED, s1);
3128 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3129 M_INTMOVE(REG_RESULT, s1);
3133 s1 = codegen_reg_of_dst(jd, iptr, REG_NULL);
3135 emit_store_dst(jd, iptr, s1);
3141 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3143 if (!(iptr->flags.bits & INS_FLAG_ARRAY)) {
3144 /* object type cast-check */
3147 vftbl_t *supervftbl;
3150 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3156 super = iptr->sx.s23.s3.c.cls;
3157 superindex = super->index;
3158 supervftbl = super->vftbl;
3161 if ((super == NULL) || !(super->flags & ACC_INTERFACE))
3162 CODEGEN_CRITICAL_SECTION_NEW;
3164 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3166 /* if class is not resolved, check which code to call */
3167 if (super == NULL) {
3169 emit_label_beq(cd, BRANCH_LABEL_1);
3171 patcher_add_patch_ref(jd, PATCHER_checkcast_instanceof_flags,
3172 iptr->sx.s23.s3.c.ref, 0);
3174 M_MOV_IMM(0, REG_ITMP2); /* super->flags */
3175 M_AND_IMM32(ACC_INTERFACE, REG_ITMP2);
3176 emit_label_beq(cd, BRANCH_LABEL_2);
3179 /* interface checkcast code */
3181 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3182 if (super != NULL) {
3184 emit_label_beq(cd, BRANCH_LABEL_3);
3187 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3189 if (super == NULL) {
3190 patcher_add_patch_ref(jd, PATCHER_checkcast_interface,
3191 iptr->sx.s23.s3.c.ref,
3196 REG_ITMP2, OFFSET(vftbl_t, interfacetablelength));
3197 M_ISUB_IMM32(superindex, REG_ITMP3);
3198 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_CLASSINFO_INDEX, super);
3199 /* XXX do we need this one? */
3201 emit_classcast_check(cd, iptr, BRANCH_LE, REG_ITMP3, s1);
3203 M_ALD32(REG_ITMP3, REG_ITMP2,
3204 OFFSET(vftbl_t, interfacetable[0]) -
3205 superindex * sizeof(methodptr*));
3206 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_CLASSINFO_INTERFACETABLE, super);
3208 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_ITMP3, s1);
3211 emit_label_br(cd, BRANCH_LABEL_4);
3213 emit_label(cd, BRANCH_LABEL_3);
3216 /* class checkcast code */
3218 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3219 if (super == NULL) {
3220 emit_label(cd, BRANCH_LABEL_2);
3224 emit_label_beq(cd, BRANCH_LABEL_5);
3227 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3229 if (super == NULL) {
3230 patcher_add_patch_ref(jd, PATCHER_checkcast_class,
3231 iptr->sx.s23.s3.c.ref,
3235 M_MOV_IMM(supervftbl, REG_ITMP3);
3236 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_CLASSINFO_VFTBL, super);
3238 CODEGEN_CRITICAL_SECTION_START;
3240 M_ILD32(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3242 /* if (s1 != REG_ITMP1) { */
3243 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP1); */
3244 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3); */
3245 /* #if defined(ENABLE_THREADS) */
3246 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3248 /* emit_alu_reg_reg(cd, ALU_SUB, REG_ITMP1, REG_ITMP2); */
3251 M_ILD32(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, baseval));
3252 M_ISUB(REG_ITMP3, REG_ITMP2);
3253 M_MOV_IMM(supervftbl, REG_ITMP3);
3254 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_CLASSINFO_VFTBL, super);
3255 M_ILD(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3257 CODEGEN_CRITICAL_SECTION_END;
3261 M_CMP(REG_ITMP3, REG_ITMP2);
3262 emit_classcast_check(cd, iptr, BRANCH_ULE, REG_ITMP3, s1);
3265 emit_label(cd, BRANCH_LABEL_5);
3268 if (super == NULL) {
3269 emit_label(cd, BRANCH_LABEL_1);
3270 emit_label(cd, BRANCH_LABEL_4);
3273 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
3276 /* array type cast-check */
3278 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3279 M_AST(s1, REG_SP, 0 * 4);
3281 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3282 patcher_add_patch_ref(jd, PATCHER_builtin_arraycheckcast,
3283 iptr->sx.s23.s3.c.ref, 0);
3287 disp = iptr->sx.s23.s3.c.cls;
3290 M_AST_IMM(disp, REG_SP, 1 * 4);
3291 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_CLASSINFO, disp);
3293 M_MOV_IMM(BUILTIN_arraycheckcast, REG_ITMP3);
3294 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_BUILTIN_FP,
3295 builtintable_get_internal(BUILTIN_arraycheckcast));
3298 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3300 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_RESULT, s1);
3302 d = codegen_reg_of_dst(jd, iptr, s1);
3306 emit_store_dst(jd, iptr, d);
3309 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3313 vftbl_t *supervftbl;
3316 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3322 super = iptr->sx.s23.s3.c.cls;
3323 superindex = super->index;
3324 supervftbl = super->vftbl;
3327 if ((super == NULL) || !(super->flags & ACC_INTERFACE))
3328 CODEGEN_CRITICAL_SECTION_NEW;
3330 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3331 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
3334 M_INTMOVE(s1, REG_ITMP1);
3340 /* if class is not resolved, check which code to call */
3342 if (super == NULL) {
3344 emit_label_beq(cd, BRANCH_LABEL_1);
3346 patcher_add_patch_ref(jd, PATCHER_checkcast_instanceof_flags,
3347 iptr->sx.s23.s3.c.ref, 0);
3349 M_MOV_IMM(0, REG_ITMP3); /* super->flags */
3350 M_AND_IMM32(ACC_INTERFACE, REG_ITMP3);
3351 emit_label_beq(cd, BRANCH_LABEL_2);
3354 /* interface instanceof code */
3356 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3357 if (super != NULL) {
3359 emit_label_beq(cd, BRANCH_LABEL_3);
3363 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3365 if (super == NULL) {
3366 patcher_add_patch_ref(jd, PATCHER_instanceof_interface,
3367 iptr->sx.s23.s3.c.ref, 0);
3371 REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3372 M_ISUB_IMM32(superindex, REG_ITMP3);
3373 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_CLASSINFO_INDEX, super);
3377 disp = (2 + 4 /* mov_membase32_reg */ + 2 /* test */ +
3378 6 /* jcc */ + 5 /* mov_imm_reg */);
3381 M_ALD32(REG_ITMP1, REG_ITMP1,
3382 OFFSET(vftbl_t, interfacetable[0]) -
3383 superindex * sizeof(methodptr*));
3384 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_CLASSINFO_INTERFACETABLE, super);
3386 /* emit_setcc_reg(cd, CC_A, d); */
3387 /* emit_jcc(cd, CC_BE, 5); */
3392 emit_label_br(cd, BRANCH_LABEL_4);
3394 emit_label(cd, BRANCH_LABEL_3);
3397 /* class instanceof code */
3399 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3400 if (super == NULL) {
3401 emit_label(cd, BRANCH_LABEL_2);
3405 emit_label_beq(cd, BRANCH_LABEL_5);
3408 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3410 if (super == NULL) {
3411 patcher_add_patch_ref(jd, PATCHER_instanceof_class,
3412 iptr->sx.s23.s3.c.ref, 0);
3415 M_MOV_IMM(supervftbl, REG_ITMP2);
3416 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_CLASSINFO_VFTBL, super);
3417 CODEGEN_CRITICAL_SECTION_START;
3419 M_ILD(REG_ITMP1, REG_ITMP1, OFFSET(vftbl_t, baseval));
3420 M_ILD(REG_ITMP3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3421 M_ILD(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3423 CODEGEN_CRITICAL_SECTION_END;
3425 M_ISUB(REG_ITMP2, REG_ITMP1);
3426 M_CLR(d); /* may be REG_ITMP2 */
3427 M_CMP(REG_ITMP3, REG_ITMP1);
3432 emit_label(cd, BRANCH_LABEL_5);
3435 if (super == NULL) {
3436 emit_label(cd, BRANCH_LABEL_1);
3437 emit_label(cd, BRANCH_LABEL_4);
3440 emit_store_dst(jd, iptr, d);
3444 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3446 /* check for negative sizes and copy sizes to stack if necessary */
3448 MCODECHECK((iptr->s1.argcount << 1) + 64);
3450 for (s1 = iptr->s1.argcount; --s1 >= 0; ) {
3451 /* copy SAVEDVAR sizes to stack */
3452 var = VAR(iptr->sx.s23.s2.args[s1]);
3454 /* Already Preallocated? */
3455 if (!(var->flags & PREALLOC)) {
3456 if (var->flags & INMEMORY) {
3457 M_ILD(REG_ITMP1, REG_SP, var->vv.regoff);
3458 M_IST(REG_ITMP1, REG_SP, (s1 + 3) * 4);
3461 M_IST(var->vv.regoff, REG_SP, (s1 + 3) * 4);
3465 /* is a patcher function set? */
3467 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3468 patcher_add_patch_ref(jd, PATCHER_builtin_multianewarray,
3469 iptr->sx.s23.s3.c.ref, 0);
3475 disp = (ptrint) iptr->sx.s23.s3.c.cls;
3477 /* a0 = dimension count */
3479 M_IST_IMM(iptr->s1.argcount, REG_SP, 0 * 4);
3481 /* a1 = arraydescriptor */
3483 M_IST_IMM(disp, REG_SP, 1 * 4);
3484 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_CLASSINFO, disp);
3486 /* a2 = pointer to dimensions = stack pointer */
3488 M_MOV(REG_SP, REG_ITMP1);
3489 M_AADD_IMM(3 * 4, REG_ITMP1);
3490 M_AST(REG_ITMP1, REG_SP, 2 * 4);
3492 M_MOV_IMM(BUILTIN_multianewarray, REG_ITMP1);
3493 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_BUILTIN_FP,
3494 builtintable_get_internal(BUILTIN_multianewarray));
3497 /* check for exception before result assignment */
3499 emit_exception_check(cd, iptr);
3501 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3502 M_INTMOVE(REG_RESULT, s1);
3503 emit_store_dst(jd, iptr, s1);
3506 #if defined(ENABLE_SSA)
3507 case ICMD_GETEXCEPTION:
3508 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
3509 M_INTMOVE(REG_ITMP1, d);
3510 emit_store_dst(jd, iptr, d);
3514 exceptions_throw_internalerror("Unknown ICMD %d during code generation",
3519 } /* for instruction */
3523 #if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
3526 #if defined(ENABLE_SSA)
3529 /* by edge splitting, in Blocks with phi moves there can only */
3530 /* be a goto as last command, no other Jump/Branch Command */
3532 if (!last_cmd_was_goto)
3533 codegen_emit_phi_moves(jd, bptr);
3538 /* At the end of a basic block we may have to append some nops,
3539 because the patcher stub calling code might be longer than the
3540 actual instruction. So codepatching does not change the
3541 following block unintentionally. */
3543 if (cd->mcodeptr < cd->lastmcodeptr) {
3544 while (cd->mcodeptr < cd->lastmcodeptr) {
3549 } /* if (bptr -> flags >= BBREACHED) */
3550 } /* for basic block */
3552 /* generate stubs */
3554 emit_patcher_traps(jd);
3556 /* everything's ok */
3562 /* codegen_emit_stub_native ****************************************************
3564 Emits a stub routine which calls a native method.
3566 *******************************************************************************/
3568 void codegen_emit_stub_native(jitdata *jd, methoddesc *nmd, functionptr f, int skipparams)
3574 int i, j; /* count variables */
3578 /* get required compiler data */
3584 /* set some variables */
3588 /* calculate stackframe size */
3590 cd->stackframesize =
3591 sizeof(stackframeinfo_t) / SIZEOF_VOID_P +
3592 sizeof(localref_table) / SIZEOF_VOID_P +
3593 4 + /* 4 arguments (start_native_call) */
3596 /* keep stack 16-byte aligned */
3598 ALIGN_ODD(cd->stackframesize);
3600 /* create method header */
3602 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
3603 (void) dseg_add_unique_s4(cd, cd->stackframesize * 8 + 4); /* FrameSize */
3604 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
3605 (void) dseg_add_unique_s4(cd, 0); /* IntSave */
3606 (void) dseg_add_unique_s4(cd, 0); /* FltSave */
3608 #if defined(ENABLE_PROFILING)
3609 /* generate native method profiling code */
3611 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
3612 /* count frequency */
3614 M_MOV_IMM(code, REG_ITMP1);
3615 M_IADD_IMM_MEMBASE(1, REG_ITMP1, OFFSET(codeinfo, frequency));
3616 JITCACHE_ADD_CACHED_REF_JD(jd, CRT_CODEINFO, 0);
3620 /* calculate stackframe size for native function */
3622 M_ASUB_IMM(cd->stackframesize * 8 + 4, REG_SP);
3624 /* Mark the whole fpu stack as free for native functions (only for saved */
3625 /* register count == 0). */
3627 emit_ffree_reg(cd, 0);
3628 emit_ffree_reg(cd, 1);
3629 emit_ffree_reg(cd, 2);
3630 emit_ffree_reg(cd, 3);
3631 emit_ffree_reg(cd, 4);
3632 emit_ffree_reg(cd, 5);
3633 emit_ffree_reg(cd, 6);
3634 emit_ffree_reg(cd, 7);
3636 #if defined(ENABLE_GC_CACAO)
3637 /* remember callee saved int registers in stackframeinfo (GC may need to */
3638 /* recover them during a collection). */
3640 disp = cd->stackframesize * 8 - sizeof(stackframeinfo_t) +
3641 OFFSET(stackframeinfo_t, intregs);
3643 for (i = 0; i < INT_SAV_CNT; i++)
3644 M_AST(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3647 /* prepare data structures for native function call */
3649 M_MOV(REG_SP, REG_ITMP1);
3650 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3651 M_IST_IMM(0, REG_SP, 1 * 4);
3654 M_MOV_IMM(codegen_start_native_call, REG_ITMP1);
3657 /* remember class argument */
3659 if (m->flags & ACC_STATIC)
3660 M_MOV(REG_RESULT, REG_ITMP3);
3662 /* Copy or spill arguments to new locations. */
3664 for (i = md->paramcount - 1, j = i + skipparams; i >= 0; i--, j--) {
3665 if (!md->params[i].inmemory)
3668 s1 = md->params[i].regoff + cd->stackframesize * 8 + 8;
3669 s2 = nmd->params[j].regoff;
3671 /* float/double in memory can be copied like int/longs */
3673 switch (md->paramtypes[i].type) {
3677 M_ILD(REG_ITMP1, REG_SP, s1);
3678 M_IST(REG_ITMP1, REG_SP, s2);
3682 M_LLD(REG_ITMP12_PACKED, REG_SP, s1);
3683 M_LST(REG_ITMP12_PACKED, REG_SP, s2);
3688 /* Handle native Java methods. */
3690 if (m->flags & ACC_NATIVE) {
3691 /* if function is static, put class into second argument */
3693 if (m->flags & ACC_STATIC)
3694 M_AST(REG_ITMP3, REG_SP, 1 * 4);
3696 /* put env into first argument */
3698 M_AST_IMM(_Jv_env, REG_SP, 0 * 4);
3701 /* Call the native function. */
3703 disp = dseg_add_functionptr(cd, f);
3704 emit_mov_imm_reg(cd, 0, REG_ITMP3);
3706 M_ALD(REG_ITMP1, REG_ITMP3, disp);
3709 /* save return value */
3711 switch (md->returntype.type) {
3714 switch (md->returntype.decltype) {
3715 case PRIMITIVETYPE_BOOLEAN:
3716 M_BZEXT(REG_RESULT, REG_RESULT);
3718 case PRIMITIVETYPE_BYTE:
3719 M_BSEXT(REG_RESULT, REG_RESULT);
3721 case PRIMITIVETYPE_CHAR:
3722 M_CZEXT(REG_RESULT, REG_RESULT);
3724 case PRIMITIVETYPE_SHORT:
3725 M_SSEXT(REG_RESULT, REG_RESULT);
3728 M_IST(REG_RESULT, REG_SP, 1 * 8);
3731 M_LST(REG_RESULT_PACKED, REG_SP, 1 * 8);
3734 emit_fsts_membase(cd, REG_SP, 1 * 8);
3737 emit_fstl_membase(cd, REG_SP, 1 * 8);
3743 /* remove native stackframe info */
3745 M_MOV(REG_SP, REG_ITMP1);
3746 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3747 M_IST_IMM(0, REG_SP, 1 * 4);
3750 M_MOV_IMM(codegen_finish_native_call, REG_ITMP1);
3752 M_MOV(REG_RESULT, REG_ITMP2); /* REG_ITMP3 == REG_RESULT2 */
3754 /* restore return value */
3756 switch (md->returntype.type) {
3759 M_ILD(REG_RESULT, REG_SP, 1 * 8);
3762 M_LLD(REG_RESULT_PACKED, REG_SP, 1 * 8);
3765 emit_flds_membase(cd, REG_SP, 1 * 8);
3768 emit_fldl_membase(cd, REG_SP, 1 * 8);
3774 #if defined(ENABLE_GC_CACAO)
3775 /* restore callee saved int registers from stackframeinfo (GC might have */
3776 /* modified them during a collection). */
3778 disp = cd->stackframesize * 8 - sizeof(stackframeinfo_t) +
3779 OFFSET(stackframeinfo_t, intregs);
3781 for (i = 0; i < INT_SAV_CNT; i++)
3782 M_ALD(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3785 M_AADD_IMM(cd->stackframesize * 8 + 4, REG_SP);
3787 /* check for exception */
3794 /* handle exception */
3796 M_MOV(REG_ITMP2, REG_ITMP1_XPTR);
3797 M_ALD(REG_ITMP2_XPC, REG_SP, 0);
3798 M_ASUB_IMM(2, REG_ITMP2_XPC);
3800 M_MOV_IMM(asm_handle_nat_exception, REG_ITMP3);
3806 * These are local overrides for various environment variables in Emacs.
3807 * Please do not remove this and leave it at the end of the file, where
3808 * Emacs will automagically detect them.
3809 * ---------------------------------------------------------------------
3812 * indent-tabs-mode: t
3816 * vim:noexpandtab:sw=4:ts=4: