1 /* src/vm/jit/i386/codegen.c - machine code generator for i386
3 Copyright (C) 1996-2005, 2006, 2007 R. Grafl, A. Krall, C. Kruegel,
4 C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
5 E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
6 J. Wenninger, Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
25 $Id: codegen.c 7865 2007-05-03 21:29:40Z twisti $
37 #include "vm/jit/i386/md-abi.h"
39 #include "vm/jit/i386/codegen.h"
40 #include "vm/jit/i386/emit.h"
42 #include "mm/memory.h"
43 #include "native/jni.h"
44 #include "native/native.h"
46 #include "threads/lock-common.h"
48 #include "vm/builtin.h"
49 #include "vm/exceptions.h"
50 #include "vm/global.h"
51 #include "vm/stringlocal.h"
54 #include "vm/jit/asmpart.h"
55 #include "vm/jit/codegen-common.h"
56 #include "vm/jit/dseg.h"
57 #include "vm/jit/emit-common.h"
58 #include "vm/jit/jit.h"
59 #include "vm/jit/parse.h"
60 #include "vm/jit/patcher.h"
61 #include "vm/jit/reg.h"
62 #include "vm/jit/replace.h"
63 #include "vm/jit/stacktrace.h"
65 #if defined(ENABLE_SSA)
66 # include "vm/jit/optimizing/lsra.h"
67 # include "vm/jit/optimizing/ssa.h"
68 #elif defined(ENABLE_LSRA)
69 # include "vm/jit/allocator/lsra.h"
72 #include "vmcore/loader.h"
73 #include "vmcore/options.h"
74 #include "vmcore/utf8.h"
77 /* codegen_emit ****************************************************************
79 Generates machine code.
81 *******************************************************************************/
83 #if defined(ENABLE_SSA)
84 void cg_move(codegendata *cd, s4 type, s4 src_regoff, s4 src_flags,
85 s4 dst_regoff, s4 dst_flags);
86 void codegen_insert_phi_moves(jitdata *jd, basicblock *bptr);
89 bool codegen_emit(jitdata *jd)
95 s4 len, s1, s2, s3, d, disp;
101 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
102 builtintable_entry *bte;
105 unresolved_field *uf;
108 #if defined(ENABLE_SSA)
110 bool last_cmd_was_goto;
112 last_cmd_was_goto = false;
116 /* get required compiler data */
123 /* prevent compiler warnings */
134 s4 savedregs_num = 0;
137 /* space to save used callee saved registers */
139 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
141 /* float register are saved on 2 4-byte stackslots */
142 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse) * 2;
144 cd->stackframesize = rd->memuse + savedregs_num;
147 #if defined(ENABLE_THREADS)
148 /* space to save argument of monitor_enter */
150 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
151 /* reserve 2 slots for long/double return values for monitorexit */
153 if (IS_2_WORD_TYPE(m->parseddesc->returntype.type))
154 cd->stackframesize += 2;
156 cd->stackframesize++;
160 /* create method header */
162 /* Keep stack of non-leaf functions 16-byte aligned. */
164 if (!jd->isleafmethod)
165 cd->stackframesize |= 0x3;
167 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
168 (void) dseg_add_unique_s4(cd, cd->stackframesize * 4); /* FrameSize */
170 #if defined(ENABLE_THREADS)
171 /* IsSync contains the offset relative to the stack pointer for the
172 argument of monitor_exit used in the exception handler. Since the
173 offset could be zero and give a wrong meaning of the flag it is
177 if (checksync && (m->flags & ACC_SYNCHRONIZED))
178 (void) dseg_add_unique_s4(cd, (rd->memuse + 1) * 4); /* IsSync */
181 (void) dseg_add_unique_s4(cd, 0); /* IsSync */
183 (void) dseg_add_unique_s4(cd, jd->isleafmethod); /* IsLeaf */
184 (void) dseg_add_unique_s4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
185 (void) dseg_add_unique_s4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
187 /* adds a reference for the length of the line number counter. We don't
188 know the size yet, since we evaluate the information during code
189 generation, to save one additional iteration over the whole
190 instructions. During code optimization the position could have changed
191 to the information gotten from the class file */
192 (void) dseg_addlinenumbertablesize(cd);
194 (void) dseg_add_unique_s4(cd, jd->exceptiontablelength); /* ExTableSize */
196 /* create exception table */
198 for (ex = jd->exceptiontable; ex != NULL; ex = ex->down) {
199 dseg_add_target(cd, ex->start);
200 dseg_add_target(cd, ex->end);
201 dseg_add_target(cd, ex->handler);
202 (void) dseg_add_unique_address(cd, ex->catchtype.any);
205 #if defined(ENABLE_PROFILING)
206 /* generate method profiling code */
208 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
209 /* count frequency */
211 M_MOV_IMM(code, REG_ITMP3);
212 M_IADD_IMM_MEMBASE(1, REG_ITMP3, OFFSET(codeinfo, frequency));
216 /* create stack frame (if necessary) */
218 if (cd->stackframesize)
219 M_ASUB_IMM(cd->stackframesize * 4, REG_SP);
221 /* save return address and used callee saved registers */
223 p = cd->stackframesize;
224 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
225 p--; M_AST(rd->savintregs[i], REG_SP, p * 4);
227 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
228 p-=2; emit_fld_reg(cd, rd->savfltregs[i]); emit_fstpl_membase(cd, REG_SP, p * 4);
231 /* take arguments out of register or stack frame */
236 for (p = 0, l = 0; p < md->paramcount; p++) {
237 t = md->paramtypes[p].type;
239 #if defined(ENABLE_SSA)
244 varindex = jd->local_map[l * 5 + t];
246 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
249 if (varindex == UNUSED)
254 s1 = md->params[p].regoff;
256 if (IS_INT_LNG_TYPE(t)) { /* integer args */
257 if (!md->params[p].inmemory) { /* register arguments */
258 log_text("integer register argument");
260 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
261 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff */
263 else { /* reg arg -> spilled */
264 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff * 4 */
267 else { /* stack arguments */
268 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
269 emit_mov_membase_reg( /* + 4 for return address */
270 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4, var->vv.regoff);
271 /* + 4 for return address */
273 else { /* stack arg -> spilled */
274 if (!IS_2_WORD_TYPE(t)) {
275 #if defined(ENABLE_SSA)
276 /* no copy avoiding by now possible with SSA */
278 emit_mov_membase_reg( /* + 4 for return address */
279 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4,
281 emit_mov_reg_membase(
282 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4);
285 #endif /*defined(ENABLE_SSA)*/
286 /* reuse Stackslotand avoid copying */
287 var->vv.regoff = cd->stackframesize + s1 + 1;
291 #if defined(ENABLE_SSA)
292 /* no copy avoiding by now possible with SSA */
294 emit_mov_membase_reg( /* + 4 for return address */
295 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4,
297 emit_mov_reg_membase(
298 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4);
299 emit_mov_membase_reg( /* + 4 for return address */
300 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4 + 4,
302 emit_mov_reg_membase(
303 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4 + 4);
306 #endif /*defined(ENABLE_SSA)*/
307 /* reuse Stackslotand avoid copying */
308 var->vv.regoff = cd->stackframesize + s1 + 1;
313 else { /* floating args */
314 if (!md->params[p].inmemory) { /* register arguments */
315 log_text("There are no float argument registers!");
317 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
318 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff */
319 } else { /* reg arg -> spilled */
320 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff * 4 */
324 else { /* stack arguments */
325 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
328 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
330 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
335 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
337 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
340 } else { /* stack-arg -> spilled */
341 #if defined(ENABLE_SSA)
342 /* no copy avoiding by now possible with SSA */
344 emit_mov_membase_reg(
345 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4, REG_ITMP1);
346 emit_mov_reg_membase(
347 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4);
350 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
351 emit_fstps_membase(cd, REG_SP, var->vv.regoff * 4);
355 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
356 emit_fstpl_membase(cd, REG_SP, var->vv.regoff * 4);
360 #endif /*defined(ENABLE_SSA)*/
361 /* reuse Stackslotand avoid copying */
362 var->vv.regoff = cd->stackframesize + s1 + 1;
368 /* call monitorenter function */
370 #if defined(ENABLE_THREADS)
371 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
374 if (m->flags & ACC_STATIC) {
375 M_MOV_IMM(&m->class->object.header, REG_ITMP1);
378 M_ALD(REG_ITMP1, REG_SP, cd->stackframesize * 4 + 4);
381 M_ALD_MEM(REG_ITMP1, EXCEPTION_HARDWARE_NULLPOINTER);
384 M_AST(REG_ITMP1, REG_SP, s1 * 4);
385 M_AST(REG_ITMP1, REG_SP, 0 * 4);
386 M_MOV_IMM(LOCK_monitor_enter, REG_ITMP3);
392 emit_verbosecall_enter(jd);
397 #if defined(ENABLE_SSA)
398 /* with SSA Header is Basic Block 0 - insert phi Moves if necessary */
400 codegen_insert_phi_moves(jd, ls->basicblocks[0]);
403 /* end of header generation */
405 /* create replacement points */
407 REPLACEMENT_POINTS_INIT(cd, jd);
409 /* walk through all basic blocks */
411 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
413 bptr->mpc = (s4) (cd->mcodeptr - cd->mcodebase);
415 if (bptr->flags >= BBREACHED) {
416 /* branch resolving */
418 codegen_resolve_branchrefs(cd, bptr);
420 /* handle replacement points */
422 REPLACEMENT_POINT_BLOCK_START(cd, bptr);
424 #if defined(ENABLE_REPLACEMENT)
425 if (bptr->bitflags & BBFLAG_REPLACEMENT) {
426 if (cd->replacementpoint[-1].flags & RPLPOINT_FLAG_COUNTDOWN) {
428 disp = (s4) &(m->hitcountdown);
429 M_ISUB_IMM_MEMABS(1, disp);
435 /* copy interface registers to their destination */
440 #if defined(ENABLE_PROFILING)
441 /* generate basic block profiling code */
443 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
444 /* count frequency */
446 M_MOV_IMM(code->bbfrequency, REG_ITMP3);
447 M_IADD_IMM_MEMBASE(1, REG_ITMP3, bptr->nr * 4);
451 #if defined(ENABLE_LSRA) || defined(ENABLE_SSA)
452 # if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
455 # if defined(ENABLE_SSA)
457 last_cmd_was_goto = false;
461 var = VAR(bptr->invars[len]);
462 if (bptr->type != BBTYPE_STD) {
463 if (!IS_2_WORD_TYPE(var->type)) {
464 if (bptr->type == BBTYPE_EXH) {
465 d = codegen_reg_of_var(0, var, REG_ITMP1);
466 M_INTMOVE(REG_ITMP1, d);
467 emit_store(jd, NULL, var, d);
471 log_text("copy interface registers(EXH, SBR): longs \
472 have to be in memory (begin 1)");
480 #endif /* defined(ENABLE_LSRA) || defined(ENABLE_SSA) */
484 var = VAR(bptr->invars[len]);
485 if ((len == bptr->indepth-1) && (bptr->type != BBTYPE_STD)) {
486 if (!IS_2_WORD_TYPE(var->type)) {
487 if (bptr->type == BBTYPE_EXH) {
488 d = codegen_reg_of_var(0, var, REG_ITMP1);
489 M_INTMOVE(REG_ITMP1, d);
490 emit_store(jd, NULL, var, d);
494 log_text("copy interface registers: longs have to be in \
501 assert((var->flags & INOUT));
506 /* walk through all instructions */
511 for (iptr = bptr->iinstr; len > 0; len--, iptr++) {
512 if (iptr->line != currentline) {
513 dseg_addlinenumber(cd, iptr->line);
514 currentline = iptr->line;
517 MCODECHECK(1024); /* 1kB should be enough */
520 case ICMD_NOP: /* ... ==> ... */
521 case ICMD_POP: /* ..., value ==> ... */
522 case ICMD_POP2: /* ..., value, value ==> ... */
525 case ICMD_INLINE_START:
527 REPLACEMENT_POINT_INLINE_START(cd, iptr);
530 case ICMD_INLINE_BODY:
532 REPLACEMENT_POINT_INLINE_BODY(cd, iptr);
533 dseg_addlinenumber_inline_start(cd, iptr);
534 dseg_addlinenumber(cd, iptr->line);
537 case ICMD_INLINE_END:
539 dseg_addlinenumber_inline_end(cd, iptr);
540 dseg_addlinenumber(cd, iptr->line);
543 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
545 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
546 emit_nullpointer_check(cd, iptr, s1);
549 /* constant operations ************************************************/
551 case ICMD_ICONST: /* ... ==> ..., constant */
553 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
554 ICONST(d, iptr->sx.val.i);
555 emit_store_dst(jd, iptr, d);
558 case ICMD_LCONST: /* ... ==> ..., constant */
560 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
561 LCONST(d, iptr->sx.val.l);
562 emit_store_dst(jd, iptr, d);
565 case ICMD_FCONST: /* ... ==> ..., constant */
567 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
568 if (iptr->sx.val.f == 0.0) {
572 if (iptr->sx.val.i == 0x80000000) {
576 } else if (iptr->sx.val.f == 1.0) {
579 } else if (iptr->sx.val.f == 2.0) {
585 disp = dseg_add_float(cd, iptr->sx.val.f);
586 emit_mov_imm_reg(cd, 0, REG_ITMP1);
588 emit_flds_membase(cd, REG_ITMP1, disp);
590 emit_store_dst(jd, iptr, d);
593 case ICMD_DCONST: /* ... ==> ..., constant */
595 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
596 if (iptr->sx.val.d == 0.0) {
600 if (iptr->sx.val.l == 0x8000000000000000LL) {
604 } else if (iptr->sx.val.d == 1.0) {
607 } else if (iptr->sx.val.d == 2.0) {
613 disp = dseg_add_double(cd, iptr->sx.val.d);
614 emit_mov_imm_reg(cd, 0, REG_ITMP1);
616 emit_fldl_membase(cd, REG_ITMP1, disp);
618 emit_store_dst(jd, iptr, d);
621 case ICMD_ACONST: /* ... ==> ..., constant */
623 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
625 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
626 codegen_addpatchref(cd, PATCHER_aconst,
627 iptr->sx.val.c.ref, 0);
632 if (iptr->sx.val.anyptr == NULL)
635 M_MOV_IMM(iptr->sx.val.anyptr, d);
637 emit_store_dst(jd, iptr, d);
641 /* load/store/copy/move operations ************************************/
659 if (!(iptr->flags.bits & INS_FLAG_RETADDR))
664 /* integer operations *************************************************/
666 case ICMD_INEG: /* ..., value ==> ..., - value */
668 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
669 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
672 emit_store_dst(jd, iptr, d);
675 case ICMD_LNEG: /* ..., value ==> ..., - value */
677 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
678 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
680 M_NEG(GET_LOW_REG(d));
681 M_IADDC_IMM(0, GET_HIGH_REG(d));
682 M_NEG(GET_HIGH_REG(d));
683 emit_store_dst(jd, iptr, d);
686 case ICMD_I2L: /* ..., value ==> ..., value */
688 s1 = emit_load_s1(jd, iptr, EAX);
689 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
692 M_LNGMOVE(EAX_EDX_PACKED, d);
693 emit_store_dst(jd, iptr, d);
696 case ICMD_L2I: /* ..., value ==> ..., value */
698 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
699 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
701 emit_store_dst(jd, iptr, d);
704 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
706 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
707 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
711 emit_store_dst(jd, iptr, d);
714 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
716 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
717 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
719 emit_store_dst(jd, iptr, d);
722 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
724 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
725 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
727 emit_store_dst(jd, iptr, d);
731 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
733 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
734 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
735 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
742 emit_store_dst(jd, iptr, d);
746 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
747 /* sx.val.i = constant */
749 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
750 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
752 /* `inc reg' is slower on p4's (regarding to ia32
753 optimization reference manual and benchmarks) and as
757 M_IADD_IMM(iptr->sx.val.i, d);
758 emit_store_dst(jd, iptr, d);
761 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
763 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
764 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
765 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
766 M_INTMOVE(s1, GET_LOW_REG(d));
767 M_IADD(s2, GET_LOW_REG(d));
768 /* don't use REG_ITMP1 */
769 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
770 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
771 M_INTMOVE(s1, GET_HIGH_REG(d));
772 M_IADDC(s2, GET_HIGH_REG(d));
773 emit_store_dst(jd, iptr, d);
776 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
777 /* sx.val.l = constant */
779 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
780 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
782 M_IADD_IMM(iptr->sx.val.l, GET_LOW_REG(d));
783 M_IADDC_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
784 emit_store_dst(jd, iptr, d);
787 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
789 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
790 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
791 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
793 M_INTMOVE(s1, REG_ITMP1);
794 M_ISUB(s2, REG_ITMP1);
795 M_INTMOVE(REG_ITMP1, d);
801 emit_store_dst(jd, iptr, d);
804 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
805 /* sx.val.i = constant */
807 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
808 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
810 M_ISUB_IMM(iptr->sx.val.i, d);
811 emit_store_dst(jd, iptr, d);
814 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
816 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
817 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
818 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
819 if (s2 == GET_LOW_REG(d)) {
820 M_INTMOVE(s1, REG_ITMP1);
821 M_ISUB(s2, REG_ITMP1);
822 M_INTMOVE(REG_ITMP1, GET_LOW_REG(d));
825 M_INTMOVE(s1, GET_LOW_REG(d));
826 M_ISUB(s2, GET_LOW_REG(d));
828 /* don't use REG_ITMP1 */
829 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
830 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
831 if (s2 == GET_HIGH_REG(d)) {
832 M_INTMOVE(s1, REG_ITMP2);
833 M_ISUBB(s2, REG_ITMP2);
834 M_INTMOVE(REG_ITMP2, GET_HIGH_REG(d));
837 M_INTMOVE(s1, GET_HIGH_REG(d));
838 M_ISUBB(s2, GET_HIGH_REG(d));
840 emit_store_dst(jd, iptr, d);
843 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
844 /* sx.val.l = constant */
846 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
847 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
849 M_ISUB_IMM(iptr->sx.val.l, GET_LOW_REG(d));
850 M_ISUBB_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
851 emit_store_dst(jd, iptr, d);
854 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
856 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
857 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
858 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
865 emit_store_dst(jd, iptr, d);
868 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
869 /* sx.val.i = constant */
871 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
872 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
873 M_IMUL_IMM(s1, iptr->sx.val.i, d);
874 emit_store_dst(jd, iptr, d);
877 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
879 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
880 s2 = emit_load_s2_low(jd, iptr, EDX);
881 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
883 M_INTMOVE(s1, REG_ITMP2);
884 M_IMUL(s2, REG_ITMP2);
886 s1 = emit_load_s1_low(jd, iptr, EAX);
887 s2 = emit_load_s2_high(jd, iptr, EDX);
890 M_IADD(EDX, REG_ITMP2);
892 s1 = emit_load_s1_low(jd, iptr, EAX);
893 s2 = emit_load_s2_low(jd, iptr, EDX);
896 M_INTMOVE(EAX, GET_LOW_REG(d));
897 M_IADD(REG_ITMP2, GET_HIGH_REG(d));
899 emit_store_dst(jd, iptr, d);
902 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
903 /* sx.val.l = constant */
905 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
906 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
907 ICONST(EAX, iptr->sx.val.l);
909 M_IMUL_IMM(s1, iptr->sx.val.l >> 32, REG_ITMP2);
910 M_IADD(REG_ITMP2, EDX);
911 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
912 M_IMUL_IMM(s1, iptr->sx.val.l, REG_ITMP2);
913 M_IADD(REG_ITMP2, EDX);
914 M_LNGMOVE(EAX_EDX_PACKED, d);
915 emit_store_dst(jd, iptr, d);
918 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
920 s1 = emit_load_s1(jd, iptr, EAX);
921 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
922 d = codegen_reg_of_dst(jd, iptr, EAX);
923 emit_arithmetic_check(cd, iptr, s2);
925 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
927 /* check as described in jvm spec */
929 M_CMP_IMM(0x80000000, EAX);
936 M_INTMOVE(EAX, d); /* if INMEMORY then d is already EAX */
937 emit_store_dst(jd, iptr, d);
940 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
942 s1 = emit_load_s1(jd, iptr, EAX);
943 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
944 d = codegen_reg_of_dst(jd, iptr, EDX);
945 emit_arithmetic_check(cd, iptr, s2);
947 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
949 /* check as described in jvm spec */
951 M_CMP_IMM(0x80000000, EAX);
959 M_INTMOVE(EDX, d); /* if INMEMORY then d is already EDX */
960 emit_store_dst(jd, iptr, d);
963 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
964 /* sx.val.i = constant */
966 /* TODO: optimize for `/ 2' */
967 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
968 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
972 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, d);/* 32-bit for jump off */
973 M_SRA_IMM(iptr->sx.val.i, d);
974 emit_store_dst(jd, iptr, d);
977 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
978 /* sx.val.i = constant */
980 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
981 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
983 M_MOV(s1, REG_ITMP1);
987 M_AND_IMM(iptr->sx.val.i, d);
989 M_BGE(2 + 2 + 6 + 2);
990 M_MOV(s1, d); /* don't use M_INTMOVE, so we know the jump offset */
992 M_AND_IMM32(iptr->sx.val.i, d); /* use 32-bit for jump offset */
994 emit_store_dst(jd, iptr, d);
997 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
998 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1000 s2 = emit_load_s2(jd, iptr, REG_ITMP12_PACKED);
1001 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1003 M_INTMOVE(GET_LOW_REG(s2), REG_ITMP3);
1004 M_OR(GET_HIGH_REG(s2), REG_ITMP3);
1005 /* XXX could be optimized */
1006 emit_arithmetic_check(cd, iptr, REG_ITMP3);
1008 bte = iptr->sx.s23.s3.bte;
1011 M_LST(s2, REG_SP, 2 * 4);
1013 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1014 M_LST(s1, REG_SP, 0 * 4);
1016 M_MOV_IMM(bte->fp, REG_ITMP3);
1018 emit_store_dst(jd, iptr, d);
1021 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1022 /* sx.val.i = constant */
1024 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1025 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1027 M_TEST(GET_HIGH_REG(d));
1029 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, GET_LOW_REG(d));
1030 M_IADDC_IMM(0, GET_HIGH_REG(d));
1031 M_SRLD_IMM(iptr->sx.val.i, GET_HIGH_REG(d), GET_LOW_REG(d));
1032 M_SRA_IMM(iptr->sx.val.i, GET_HIGH_REG(d));
1033 emit_store_dst(jd, iptr, d);
1037 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1038 /* sx.val.l = constant */
1040 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1041 if (iptr->dst.var->flags & INMEMORY) {
1042 if (iptr->s1.var->flags & INMEMORY) {
1043 /* Alpha algorithm */
1045 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 4);
1047 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 4 + 4);
1053 /* TODO: hmm, don't know if this is always correct */
1055 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l & 0x00000000ffffffff);
1057 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l >> 32);
1063 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4, REG_ITMP1);
1064 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4 + 4, REG_ITMP2);
1066 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1067 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1068 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, iptr->s1.var->vv.regoff * 4 + 4);
1069 emit_jcc(cd, CC_GE, disp);
1071 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4, REG_ITMP1);
1072 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4 + 4, REG_ITMP2);
1074 emit_neg_reg(cd, REG_ITMP1);
1075 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1076 emit_neg_reg(cd, REG_ITMP2);
1078 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1079 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1081 emit_neg_reg(cd, REG_ITMP1);
1082 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1083 emit_neg_reg(cd, REG_ITMP2);
1085 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst.var->vv.regoff * 4);
1086 emit_mov_reg_membase(cd, REG_ITMP2, REG_SP, iptr->dst.var->vv.regoff * 4 + 4);
1090 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1091 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1093 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1094 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1095 M_TEST(GET_LOW_REG(s1));
1101 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1103 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1104 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1105 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1106 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1109 emit_store_dst(jd, iptr, d);
1112 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1113 /* sx.val.i = constant */
1115 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1116 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1118 M_SLL_IMM(iptr->sx.val.i, d);
1119 emit_store_dst(jd, iptr, d);
1122 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1124 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1125 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1126 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1127 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1130 emit_store_dst(jd, iptr, d);
1133 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1134 /* sx.val.i = constant */
1136 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1137 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1139 M_SRA_IMM(iptr->sx.val.i, d);
1140 emit_store_dst(jd, iptr, d);
1143 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1145 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1146 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1147 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1148 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1151 emit_store_dst(jd, iptr, d);
1154 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1155 /* sx.val.i = constant */
1157 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1158 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1160 M_SRL_IMM(iptr->sx.val.i, d);
1161 emit_store_dst(jd, iptr, d);
1164 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1166 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1167 s2 = emit_load_s2(jd, iptr, ECX);
1168 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1171 M_TEST_IMM(32, ECX);
1173 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1174 M_CLR(GET_LOW_REG(d));
1175 M_SLLD(GET_LOW_REG(d), GET_HIGH_REG(d));
1176 M_SLL(GET_LOW_REG(d));
1177 emit_store_dst(jd, iptr, d);
1180 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1181 /* sx.val.i = constant */
1183 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1184 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1186 if (iptr->sx.val.i & 0x20) {
1187 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1188 M_CLR(GET_LOW_REG(d));
1189 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1193 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1195 M_SLL_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d));
1197 emit_store_dst(jd, iptr, d);
1200 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1202 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1203 s2 = emit_load_s2(jd, iptr, ECX);
1204 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1207 M_TEST_IMM(32, ECX);
1209 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1210 M_SRA_IMM(31, GET_HIGH_REG(d));
1211 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1212 M_SRA(GET_HIGH_REG(d));
1213 emit_store_dst(jd, iptr, d);
1216 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1217 /* sx.val.i = constant */
1219 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1220 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1222 if (iptr->sx.val.i & 0x20) {
1223 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1224 M_SRA_IMM(31, GET_HIGH_REG(d));
1225 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1229 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1231 M_SRA_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1233 emit_store_dst(jd, iptr, d);
1236 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1238 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1239 s2 = emit_load_s2(jd, iptr, ECX);
1240 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1243 M_TEST_IMM(32, ECX);
1245 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1246 M_CLR(GET_HIGH_REG(d));
1247 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1248 M_SRL(GET_HIGH_REG(d));
1249 emit_store_dst(jd, iptr, d);
1252 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1253 /* sx.val.l = constant */
1255 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1256 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1258 if (iptr->sx.val.i & 0x20) {
1259 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1260 M_CLR(GET_HIGH_REG(d));
1261 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1265 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1267 M_SRL_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1269 emit_store_dst(jd, iptr, d);
1272 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1274 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1275 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1276 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1283 emit_store_dst(jd, iptr, d);
1286 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1287 /* sx.val.i = constant */
1289 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1290 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1292 M_AND_IMM(iptr->sx.val.i, d);
1293 emit_store_dst(jd, iptr, d);
1296 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1298 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1299 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1300 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1301 if (s2 == GET_LOW_REG(d))
1302 M_AND(s1, GET_LOW_REG(d));
1304 M_INTMOVE(s1, GET_LOW_REG(d));
1305 M_AND(s2, GET_LOW_REG(d));
1307 /* REG_ITMP1 probably contains low 32-bit of destination */
1308 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1309 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1310 if (s2 == GET_HIGH_REG(d))
1311 M_AND(s1, GET_HIGH_REG(d));
1313 M_INTMOVE(s1, GET_HIGH_REG(d));
1314 M_AND(s2, GET_HIGH_REG(d));
1316 emit_store_dst(jd, iptr, d);
1319 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1320 /* sx.val.l = constant */
1322 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1323 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1325 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1326 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1327 emit_store_dst(jd, iptr, d);
1330 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1332 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1333 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1334 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1341 emit_store_dst(jd, iptr, d);
1344 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1345 /* sx.val.i = constant */
1347 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1348 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1350 M_OR_IMM(iptr->sx.val.i, d);
1351 emit_store_dst(jd, iptr, d);
1354 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1356 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1357 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1358 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1359 if (s2 == GET_LOW_REG(d))
1360 M_OR(s1, GET_LOW_REG(d));
1362 M_INTMOVE(s1, GET_LOW_REG(d));
1363 M_OR(s2, GET_LOW_REG(d));
1365 /* REG_ITMP1 probably contains low 32-bit of destination */
1366 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1367 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1368 if (s2 == GET_HIGH_REG(d))
1369 M_OR(s1, GET_HIGH_REG(d));
1371 M_INTMOVE(s1, GET_HIGH_REG(d));
1372 M_OR(s2, GET_HIGH_REG(d));
1374 emit_store_dst(jd, iptr, d);
1377 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1378 /* sx.val.l = constant */
1380 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1381 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1383 M_OR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1384 M_OR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1385 emit_store_dst(jd, iptr, d);
1388 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1390 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1391 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1392 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1399 emit_store_dst(jd, iptr, d);
1402 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1403 /* sx.val.i = constant */
1405 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1406 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1408 M_XOR_IMM(iptr->sx.val.i, d);
1409 emit_store_dst(jd, iptr, d);
1412 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1414 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1415 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1416 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1417 if (s2 == GET_LOW_REG(d))
1418 M_XOR(s1, GET_LOW_REG(d));
1420 M_INTMOVE(s1, GET_LOW_REG(d));
1421 M_XOR(s2, GET_LOW_REG(d));
1423 /* REG_ITMP1 probably contains low 32-bit of destination */
1424 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1425 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1426 if (s2 == GET_HIGH_REG(d))
1427 M_XOR(s1, GET_HIGH_REG(d));
1429 M_INTMOVE(s1, GET_HIGH_REG(d));
1430 M_XOR(s2, GET_HIGH_REG(d));
1432 emit_store_dst(jd, iptr, d);
1435 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1436 /* sx.val.l = constant */
1438 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1439 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1441 M_XOR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1442 M_XOR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1443 emit_store_dst(jd, iptr, d);
1447 /* floating operations ************************************************/
1449 case ICMD_FNEG: /* ..., value ==> ..., - value */
1451 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1452 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1454 emit_store_dst(jd, iptr, d);
1457 case ICMD_DNEG: /* ..., value ==> ..., - value */
1459 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1460 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1462 emit_store_dst(jd, iptr, d);
1465 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1467 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1468 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1469 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1471 emit_store_dst(jd, iptr, d);
1474 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1476 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1477 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1478 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1480 emit_store_dst(jd, iptr, d);
1483 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1485 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1486 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1487 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1489 emit_store_dst(jd, iptr, d);
1492 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1494 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1495 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1496 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1498 emit_store_dst(jd, iptr, d);
1501 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1503 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1504 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1505 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1507 emit_store_dst(jd, iptr, d);
1510 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1512 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1513 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1514 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1516 emit_store_dst(jd, iptr, d);
1519 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1521 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1522 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1523 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1525 emit_store_dst(jd, iptr, d);
1528 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1530 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1531 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1532 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1534 emit_store_dst(jd, iptr, d);
1537 case ICMD_FREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1539 /* exchanged to skip fxch */
1540 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1541 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1542 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1543 /* emit_fxch(cd); */
1548 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1549 emit_store_dst(jd, iptr, d);
1550 emit_ffree_reg(cd, 0);
1554 case ICMD_DREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1556 /* exchanged to skip fxch */
1557 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1558 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1559 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1560 /* emit_fxch(cd); */
1565 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1566 emit_store_dst(jd, iptr, d);
1567 emit_ffree_reg(cd, 0);
1571 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1572 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1574 var = VAROP(iptr->s1);
1575 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1577 if (var->flags & INMEMORY) {
1578 emit_fildl_membase(cd, REG_SP, var->vv.regoff * 4);
1580 /* XXX not thread safe! */
1581 disp = dseg_add_unique_s4(cd, 0);
1582 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1584 emit_mov_reg_membase(cd, var->vv.regoff, REG_ITMP1, disp);
1585 emit_fildl_membase(cd, REG_ITMP1, disp);
1588 emit_store_dst(jd, iptr, d);
1591 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1592 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1594 var = VAROP(iptr->s1);
1595 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1596 if (var->flags & INMEMORY) {
1597 emit_fildll_membase(cd, REG_SP, var->vv.regoff * 4);
1600 log_text("L2F: longs have to be in memory");
1603 emit_store_dst(jd, iptr, d);
1606 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1608 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1609 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1611 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1614 /* Round to zero, 53-bit mode, exception masked */
1615 disp = dseg_add_s4(cd, 0x0e7f);
1616 emit_fldcw_membase(cd, REG_ITMP1, disp);
1618 var = VAROP(iptr->dst);
1619 var1 = VAROP(iptr->s1);
1621 if (var->flags & INMEMORY) {
1622 emit_fistpl_membase(cd, REG_SP, var->vv.regoff * 4);
1624 /* Round to nearest, 53-bit mode, exceptions masked */
1625 disp = dseg_add_s4(cd, 0x027f);
1626 emit_fldcw_membase(cd, REG_ITMP1, disp);
1628 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1629 REG_SP, var->vv.regoff * 4);
1632 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1634 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1637 /* XXX not thread safe! */
1638 disp = dseg_add_unique_s4(cd, 0);
1639 emit_fistpl_membase(cd, REG_ITMP1, disp);
1640 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1642 /* Round to nearest, 53-bit mode, exceptions masked */
1643 disp = dseg_add_s4(cd, 0x027f);
1644 emit_fldcw_membase(cd, REG_ITMP1, disp);
1646 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1649 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1650 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1653 emit_jcc(cd, CC_NE, disp);
1655 /* XXX: change this when we use registers */
1656 emit_flds_membase(cd, REG_SP, var1->vv.regoff * 4);
1657 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2i, REG_ITMP1);
1658 emit_call_reg(cd, REG_ITMP1);
1660 if (var->flags & INMEMORY) {
1661 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1664 M_INTMOVE(REG_RESULT, var->vv.regoff);
1668 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1670 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1671 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1673 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1676 /* Round to zero, 53-bit mode, exception masked */
1677 disp = dseg_add_s4(cd, 0x0e7f);
1678 emit_fldcw_membase(cd, REG_ITMP1, disp);
1680 var = VAROP(iptr->dst);
1681 var1 = VAROP(iptr->s1);
1683 if (var->flags & INMEMORY) {
1684 emit_fistpl_membase(cd, REG_SP, var->vv.regoff * 4);
1686 /* Round to nearest, 53-bit mode, exceptions masked */
1687 disp = dseg_add_s4(cd, 0x027f);
1688 emit_fldcw_membase(cd, REG_ITMP1, disp);
1690 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1691 REG_SP, var->vv.regoff * 4);
1694 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1696 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1699 /* XXX not thread safe! */
1700 disp = dseg_add_unique_s4(cd, 0);
1701 emit_fistpl_membase(cd, REG_ITMP1, disp);
1702 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1704 /* Round to nearest, 53-bit mode, exceptions masked */
1705 disp = dseg_add_s4(cd, 0x027f);
1706 emit_fldcw_membase(cd, REG_ITMP1, disp);
1708 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1711 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1712 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1715 emit_jcc(cd, CC_NE, disp);
1717 /* XXX: change this when we use registers */
1718 emit_fldl_membase(cd, REG_SP, var1->vv.regoff * 4);
1719 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2i, REG_ITMP1);
1720 emit_call_reg(cd, REG_ITMP1);
1722 if (var->flags & INMEMORY) {
1723 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1725 M_INTMOVE(REG_RESULT, var->vv.regoff);
1729 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1731 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1732 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1734 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1737 /* Round to zero, 53-bit mode, exception masked */
1738 disp = dseg_add_s4(cd, 0x0e7f);
1739 emit_fldcw_membase(cd, REG_ITMP1, disp);
1741 var = VAROP(iptr->dst);
1742 var1 = VAROP(iptr->s1);
1744 if (var->flags & INMEMORY) {
1745 emit_fistpll_membase(cd, REG_SP, var->vv.regoff * 4);
1747 /* Round to nearest, 53-bit mode, exceptions masked */
1748 disp = dseg_add_s4(cd, 0x027f);
1749 emit_fldcw_membase(cd, REG_ITMP1, disp);
1751 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1752 REG_SP, var->vv.regoff * 4 + 4);
1755 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1757 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1760 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1762 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4 + 4);
1764 emit_jcc(cd, CC_NE, disp);
1766 emit_alu_imm_membase(cd, ALU_CMP, 0,
1767 REG_SP, var->vv.regoff * 4);
1770 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1772 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1774 emit_jcc(cd, CC_NE, disp);
1776 /* XXX: change this when we use registers */
1777 emit_flds_membase(cd, REG_SP, var1->vv.regoff * 4);
1778 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2l, REG_ITMP1);
1779 emit_call_reg(cd, REG_ITMP1);
1780 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1781 emit_mov_reg_membase(cd, REG_RESULT2,
1782 REG_SP, var->vv.regoff * 4 + 4);
1785 log_text("F2L: longs have to be in memory");
1790 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1792 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1793 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1795 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1798 /* Round to zero, 53-bit mode, exception masked */
1799 disp = dseg_add_s4(cd, 0x0e7f);
1800 emit_fldcw_membase(cd, REG_ITMP1, disp);
1802 var = VAROP(iptr->dst);
1803 var1 = VAROP(iptr->s1);
1805 if (var->flags & INMEMORY) {
1806 emit_fistpll_membase(cd, REG_SP, var->vv.regoff * 4);
1808 /* Round to nearest, 53-bit mode, exceptions masked */
1809 disp = dseg_add_s4(cd, 0x027f);
1810 emit_fldcw_membase(cd, REG_ITMP1, disp);
1812 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1813 REG_SP, var->vv.regoff * 4 + 4);
1816 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1818 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1821 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1823 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4 + 4);
1825 emit_jcc(cd, CC_NE, disp);
1827 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, var->vv.regoff * 4);
1830 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1832 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1834 emit_jcc(cd, CC_NE, disp);
1836 /* XXX: change this when we use registers */
1837 emit_fldl_membase(cd, REG_SP, var1->vv.regoff * 4);
1838 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2l, REG_ITMP1);
1839 emit_call_reg(cd, REG_ITMP1);
1840 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1841 emit_mov_reg_membase(cd, REG_RESULT2,
1842 REG_SP, var->vv.regoff * 4 + 4);
1845 log_text("D2L: longs have to be in memory");
1850 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1852 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1853 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1855 emit_store_dst(jd, iptr, d);
1858 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1860 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1861 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1863 emit_store_dst(jd, iptr, d);
1866 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1869 /* exchanged to skip fxch */
1870 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1871 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1872 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1873 /* emit_fxch(cd); */
1876 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as GT */
1877 emit_jcc(cd, CC_E, 6);
1878 emit_alu_imm_reg(cd, ALU_AND, 0x000000ff, EAX);
1880 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1881 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1882 emit_jcc(cd, CC_B, 3 + 5);
1883 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1884 emit_jmp_imm(cd, 3);
1885 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1886 emit_store_dst(jd, iptr, d);
1889 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1892 /* exchanged to skip fxch */
1893 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1894 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1895 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1896 /* emit_fxch(cd); */
1899 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as LT */
1900 emit_jcc(cd, CC_E, 3);
1901 emit_movb_imm_reg(cd, 1, REG_AH);
1903 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1904 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1905 emit_jcc(cd, CC_B, 3 + 5);
1906 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1907 emit_jmp_imm(cd, 3);
1908 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1909 emit_store_dst(jd, iptr, d);
1913 /* memory operations **************************************************/
1915 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., length */
1917 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1918 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1919 /* implicit null-pointer check */
1920 M_ILD(d, s1, OFFSET(java_arrayheader, size));
1921 emit_store_dst(jd, iptr, d);
1924 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
1926 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1927 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1928 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1929 /* implicit null-pointer check */
1930 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1931 emit_movsbl_memindex_reg(cd, OFFSET(java_bytearray, data[0]),
1933 emit_store_dst(jd, iptr, d);
1936 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
1938 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1939 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1940 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1941 /* implicit null-pointer check */
1942 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1943 emit_movzwl_memindex_reg(cd, OFFSET(java_chararray, data[0]),
1945 emit_store_dst(jd, iptr, d);
1948 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
1950 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1951 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1952 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1953 /* implicit null-pointer check */
1954 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1955 emit_movswl_memindex_reg(cd, OFFSET(java_shortarray, data[0]),
1957 emit_store_dst(jd, iptr, d);
1960 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
1962 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1963 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1964 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1965 /* implicit null-pointer check */
1966 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1967 emit_mov_memindex_reg(cd, OFFSET(java_intarray, data[0]),
1969 emit_store_dst(jd, iptr, d);
1972 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
1974 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1975 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1976 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
1977 /* implicit null-pointer check */
1978 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1980 var = VAROP(iptr->dst);
1982 assert(var->flags & INMEMORY);
1983 emit_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]),
1984 s1, s2, 3, REG_ITMP3);
1985 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff * 4);
1986 emit_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]) + 4,
1987 s1, s2, 3, REG_ITMP3);
1988 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff * 4 + 4);
1991 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
1993 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1994 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1995 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1996 /* implicit null-pointer check */
1997 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1998 emit_flds_memindex(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2);
1999 emit_store_dst(jd, iptr, d);
2002 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2004 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2005 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2006 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
2007 /* implicit null-pointer check */
2008 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2009 emit_fldl_memindex(cd, OFFSET(java_doublearray, data[0]), s1, s2,3);
2010 emit_store_dst(jd, iptr, d);
2013 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2015 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2016 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2017 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2018 /* implicit null-pointer check */
2019 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2020 emit_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]),
2022 emit_store_dst(jd, iptr, d);
2026 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2028 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2029 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2030 /* implicit null-pointer check */
2031 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2032 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2034 /* because EBP, ESI, EDI have no xH and xL nibbles */
2035 M_INTMOVE(s3, REG_ITMP3);
2038 emit_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]),
2042 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2044 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2045 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2046 /* implicit null-pointer check */
2047 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2048 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2049 emit_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]),
2053 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2055 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2056 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2057 /* implicit null-pointer check */
2058 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2059 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2060 emit_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]),
2064 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2066 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2067 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2068 /* implicit null-pointer check */
2069 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2070 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2071 emit_mov_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]),
2075 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2077 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2078 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2079 /* implicit null-pointer check */
2080 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2082 var = VAROP(iptr->sx.s23.s3);
2084 assert(var->flags & INMEMORY);
2085 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff * 4, REG_ITMP3);
2086 emit_mov_reg_memindex(cd, REG_ITMP3, OFFSET(java_longarray, data[0])
2088 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff * 4 + 4, REG_ITMP3);
2089 emit_mov_reg_memindex(cd, REG_ITMP3,
2090 OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2093 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2095 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2096 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2097 /* implicit null-pointer check */
2098 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2099 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2100 emit_fstps_memindex(cd, OFFSET(java_floatarray, data[0]), s1, s2,2);
2103 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2105 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2106 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2107 /* implicit null-pointer check */
2108 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2109 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2110 emit_fstpl_memindex(cd, OFFSET(java_doublearray, data[0]),
2114 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2116 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2117 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2118 /* implicit null-pointer check */
2119 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2120 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2122 M_AST(s1, REG_SP, 0 * 4);
2123 M_AST(s3, REG_SP, 1 * 4);
2124 M_MOV_IMM(BUILTIN_canstore, REG_ITMP1);
2126 emit_exception_check(cd, iptr);
2128 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2129 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2130 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2131 emit_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]),
2135 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2137 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2138 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2139 /* implicit null-pointer check */
2140 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2141 emit_movb_imm_memindex(cd, iptr->sx.s23.s3.constval,
2142 OFFSET(java_bytearray, data[0]), s1, s2, 0);
2145 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2147 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2148 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2149 /* implicit null-pointer check */
2150 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2151 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2152 OFFSET(java_chararray, data[0]), s1, s2, 1);
2155 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2157 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2158 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2159 /* implicit null-pointer check */
2160 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2161 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2162 OFFSET(java_shortarray, data[0]), s1, s2, 1);
2165 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2167 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2168 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2169 /* implicit null-pointer check */
2170 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2171 emit_mov_imm_memindex(cd, iptr->sx.s23.s3.constval,
2172 OFFSET(java_intarray, data[0]), s1, s2, 2);
2175 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2177 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2178 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2179 /* implicit null-pointer check */
2180 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2181 emit_mov_imm_memindex(cd,
2182 (u4) (iptr->sx.s23.s3.constval & 0x00000000ffffffff),
2183 OFFSET(java_longarray, data[0]), s1, s2, 3);
2184 emit_mov_imm_memindex(cd,
2185 ((s4)iptr->sx.s23.s3.constval) >> 31,
2186 OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2189 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2191 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2192 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2193 /* implicit null-pointer check */
2194 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2195 emit_mov_imm_memindex(cd, 0,
2196 OFFSET(java_objectarray, data[0]), s1, s2, 2);
2200 case ICMD_GETSTATIC: /* ... ==> ..., value */
2202 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2203 uf = iptr->sx.s23.s3.uf;
2204 fieldtype = uf->fieldref->parseddesc.fd->type;
2207 codegen_addpatchref(cd, PATCHER_get_putstatic, uf, 0);
2211 fi = iptr->sx.s23.s3.fmiref->p.field;
2212 fieldtype = fi->type;
2213 disp = (ptrint) &(fi->value);
2215 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2216 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2219 M_MOV_IMM(disp, REG_ITMP1);
2220 switch (fieldtype) {
2223 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2224 M_ILD(d, REG_ITMP1, 0);
2227 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2228 M_LLD(d, REG_ITMP1, 0);
2231 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2232 M_FLD(d, REG_ITMP1, 0);
2235 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2236 M_DLD(d, REG_ITMP1, 0);
2239 emit_store_dst(jd, iptr, d);
2242 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2244 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2245 uf = iptr->sx.s23.s3.uf;
2246 fieldtype = uf->fieldref->parseddesc.fd->type;
2249 codegen_addpatchref(cd, PATCHER_get_putstatic, uf, 0);
2252 fi = iptr->sx.s23.s3.fmiref->p.field;
2253 fieldtype = fi->type;
2254 disp = (ptrint) &(fi->value);
2256 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2257 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2260 M_MOV_IMM(disp, REG_ITMP1);
2261 switch (fieldtype) {
2264 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
2265 M_IST(s1, REG_ITMP1, 0);
2268 s1 = emit_load_s1(jd, iptr, REG_ITMP23_PACKED);
2269 M_LST(s1, REG_ITMP1, 0);
2272 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2273 emit_fstps_membase(cd, REG_ITMP1, 0);
2276 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2277 emit_fstpl_membase(cd, REG_ITMP1, 0);
2282 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2283 /* val = value (in current instruction) */
2284 /* following NOP) */
2286 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2287 uf = iptr->sx.s23.s3.uf;
2288 fieldtype = uf->fieldref->parseddesc.fd->type;
2291 codegen_addpatchref(cd, PATCHER_get_putstatic, uf, 0);
2294 fi = iptr->sx.s23.s3.fmiref->p.field;
2295 fieldtype = fi->type;
2296 disp = (ptrint) &(fi->value);
2298 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2299 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2302 M_MOV_IMM(disp, REG_ITMP1);
2303 switch (fieldtype) {
2306 M_IST_IMM(iptr->sx.s23.s2.constval, REG_ITMP1, 0);
2309 M_IST_IMM(iptr->sx.s23.s2.constval & 0xffffffff, REG_ITMP1, 0);
2310 M_IST_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, REG_ITMP1, 4);
2317 case ICMD_GETFIELD: /* .., objectref. ==> ..., value */
2319 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2320 emit_nullpointer_check(cd, iptr, s1);
2322 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2323 unresolved_field *uf = iptr->sx.s23.s3.uf;
2325 fieldtype = uf->fieldref->parseddesc.fd->type;
2327 codegen_addpatchref(cd, PATCHER_getfield,
2328 iptr->sx.s23.s3.uf, 0);
2334 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2336 fieldtype = fi->type;
2340 switch (fieldtype) {
2343 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2344 M_ILD32(d, s1, disp);
2347 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2348 M_LLD32(d, s1, disp);
2351 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2352 M_FLD32(d, s1, disp);
2355 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2356 M_DLD32(d, s1, disp);
2359 emit_store_dst(jd, iptr, d);
2362 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2364 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2365 emit_nullpointer_check(cd, iptr, s1);
2367 /* must be done here because of code patching */
2369 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2370 unresolved_field *uf = iptr->sx.s23.s3.uf;
2372 fieldtype = uf->fieldref->parseddesc.fd->type;
2375 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2377 fieldtype = fi->type;
2380 if (!IS_FLT_DBL_TYPE(fieldtype)) {
2381 if (IS_2_WORD_TYPE(fieldtype))
2382 s2 = emit_load_s2(jd, iptr, REG_ITMP23_PACKED);
2384 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2387 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
2389 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2390 unresolved_field *uf = iptr->sx.s23.s3.uf;
2392 codegen_addpatchref(cd, PATCHER_putfield, uf, 0);
2398 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2403 switch (fieldtype) {
2406 M_IST32(s2, s1, disp);
2409 M_LST32(s2, s1, disp);
2412 emit_fstps_membase32(cd, s1, disp);
2415 emit_fstpl_membase32(cd, s1, disp);
2420 case ICMD_PUTFIELDCONST: /* ..., objectref ==> ... */
2421 /* val = value (in current instruction) */
2422 /* following NOP) */
2424 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2425 emit_nullpointer_check(cd, iptr, s1);
2427 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2428 unresolved_field *uf = iptr->sx.s23.s3.uf;
2430 fieldtype = uf->fieldref->parseddesc.fd->type;
2432 codegen_addpatchref(cd, PATCHER_putfieldconst,
2440 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2442 fieldtype = fi->type;
2447 switch (fieldtype) {
2450 M_IST32_IMM(iptr->sx.s23.s2.constval, s1, disp);
2453 M_IST32_IMM(iptr->sx.s23.s2.constval & 0xffffffff, s1, disp);
2454 M_IST32_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, s1, disp + 4);
2462 /* branch operations **************************************************/
2464 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2466 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2467 M_INTMOVE(s1, REG_ITMP1_XPTR);
2469 #ifdef ENABLE_VERIFIER
2470 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2471 codegen_addpatchref(cd, PATCHER_athrow_areturn,
2472 iptr->sx.s23.s2.uc, 0);
2474 #endif /* ENABLE_VERIFIER */
2476 M_CALL_IMM(0); /* passing exception pc */
2477 M_POP(REG_ITMP2_XPC);
2479 M_MOV_IMM(asm_handle_exception, REG_ITMP3);
2483 case ICMD_GOTO: /* ... ==> ... */
2484 case ICMD_RET: /* ... ==> ... */
2486 #if defined(ENABLE_SSA)
2488 last_cmd_was_goto = true;
2489 /* In case of a Goto phimoves have to be inserted before the */
2491 codegen_insert_phi_moves(jd, bptr);
2494 emit_br(cd, iptr->dst.block);
2498 case ICMD_JSR: /* ... ==> ... */
2500 emit_br(cd, iptr->sx.s23.s3.jsrtarget.block);
2504 case ICMD_IFNULL: /* ..., value ==> ... */
2505 case ICMD_IFNONNULL:
2507 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2509 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFNULL, BRANCH_OPT_NONE);
2512 case ICMD_IFEQ: /* ..., value ==> ... */
2519 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2520 M_CMP_IMM(iptr->sx.val.i, s1);
2521 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFEQ, BRANCH_OPT_NONE);
2524 case ICMD_IF_LEQ: /* ..., value ==> ... */
2526 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2527 if (iptr->sx.val.l == 0) {
2528 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2529 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2532 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2533 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2534 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2535 M_OR(REG_ITMP2, REG_ITMP1);
2537 emit_beq(cd, iptr->dst.block);
2540 case ICMD_IF_LLT: /* ..., value ==> ... */
2542 if (iptr->sx.val.l == 0) {
2543 /* If high 32-bit are less than zero, then the 64-bits
2545 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2547 emit_blt(cd, iptr->dst.block);
2550 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2551 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2552 emit_blt(cd, iptr->dst.block);
2554 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2555 emit_bult(cd, iptr->dst.block);
2559 case ICMD_IF_LLE: /* ..., value ==> ... */
2561 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2562 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2563 emit_blt(cd, iptr->dst.block);
2565 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2566 emit_bule(cd, iptr->dst.block);
2569 case ICMD_IF_LNE: /* ..., value ==> ... */
2571 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2572 if (iptr->sx.val.l == 0) {
2573 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2574 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2577 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2578 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2579 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2580 M_OR(REG_ITMP2, REG_ITMP1);
2582 emit_bne(cd, iptr->dst.block);
2585 case ICMD_IF_LGT: /* ..., value ==> ... */
2587 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2588 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2589 emit_bgt(cd, iptr->dst.block);
2591 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2592 emit_bugt(cd, iptr->dst.block);
2595 case ICMD_IF_LGE: /* ..., value ==> ... */
2597 if (iptr->sx.val.l == 0) {
2598 /* If high 32-bit are greater equal zero, then the
2600 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2602 emit_bge(cd, iptr->dst.block);
2605 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2606 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2607 emit_bgt(cd, iptr->dst.block);
2609 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2610 emit_buge(cd, iptr->dst.block);
2614 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2615 case ICMD_IF_ICMPNE:
2616 case ICMD_IF_ICMPLT:
2617 case ICMD_IF_ICMPGT:
2618 case ICMD_IF_ICMPGE:
2619 case ICMD_IF_ICMPLE:
2621 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2622 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2624 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ICMPEQ, BRANCH_OPT_NONE);
2627 case ICMD_IF_ACMPEQ: /* ..., value, value ==> ... */
2628 case ICMD_IF_ACMPNE:
2630 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2631 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2633 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ACMPEQ, BRANCH_OPT_NONE);
2636 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2638 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2639 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2640 M_INTMOVE(s1, REG_ITMP1);
2641 M_XOR(s2, REG_ITMP1);
2642 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2643 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2644 M_INTMOVE(s1, REG_ITMP2);
2645 M_XOR(s2, REG_ITMP2);
2646 M_OR(REG_ITMP1, REG_ITMP2);
2647 emit_beq(cd, iptr->dst.block);
2650 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2652 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2653 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2654 M_INTMOVE(s1, REG_ITMP1);
2655 M_XOR(s2, REG_ITMP1);
2656 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2657 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2658 M_INTMOVE(s1, REG_ITMP2);
2659 M_XOR(s2, REG_ITMP2);
2660 M_OR(REG_ITMP1, REG_ITMP2);
2661 emit_bne(cd, iptr->dst.block);
2664 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2666 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2667 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2669 emit_blt(cd, iptr->dst.block);
2670 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2671 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2674 emit_bult(cd, iptr->dst.block);
2677 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2679 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2680 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2682 emit_bgt(cd, iptr->dst.block);
2683 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2684 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2687 emit_bugt(cd, iptr->dst.block);
2690 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2692 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2693 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2695 emit_blt(cd, iptr->dst.block);
2696 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2697 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2700 emit_bule(cd, iptr->dst.block);
2703 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2705 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2706 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2708 emit_bgt(cd, iptr->dst.block);
2709 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2710 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2713 emit_buge(cd, iptr->dst.block);
2717 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2719 REPLACEMENT_POINT_RETURN(cd, iptr);
2720 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2721 M_INTMOVE(s1, REG_RESULT);
2722 goto nowperformreturn;
2724 case ICMD_LRETURN: /* ..., retvalue ==> ... */
2726 REPLACEMENT_POINT_RETURN(cd, iptr);
2727 s1 = emit_load_s1(jd, iptr, REG_RESULT_PACKED);
2728 M_LNGMOVE(s1, REG_RESULT_PACKED);
2729 goto nowperformreturn;
2731 case ICMD_ARETURN: /* ..., retvalue ==> ... */
2733 REPLACEMENT_POINT_RETURN(cd, iptr);
2734 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2735 M_INTMOVE(s1, REG_RESULT);
2737 #ifdef ENABLE_VERIFIER
2738 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2739 codegen_addpatchref(cd, PATCHER_athrow_areturn,
2740 iptr->sx.s23.s2.uc, 0);
2742 #endif /* ENABLE_VERIFIER */
2743 goto nowperformreturn;
2745 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2748 REPLACEMENT_POINT_RETURN(cd, iptr);
2749 s1 = emit_load_s1(jd, iptr, REG_FRESULT);
2750 goto nowperformreturn;
2752 case ICMD_RETURN: /* ... ==> ... */
2754 REPLACEMENT_POINT_RETURN(cd, iptr);
2760 p = cd->stackframesize;
2762 #if !defined(NDEBUG)
2763 emit_verbosecall_exit(jd);
2766 #if defined(ENABLE_THREADS)
2767 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2768 M_ALD(REG_ITMP2, REG_SP, rd->memuse * 4);
2770 /* we need to save the proper return value */
2771 switch (iptr->opc) {
2774 M_IST(REG_RESULT, REG_SP, rd->memuse * 4);
2778 M_LST(REG_RESULT_PACKED, REG_SP, rd->memuse * 4);
2782 emit_fstps_membase(cd, REG_SP, rd->memuse * 4);
2786 emit_fstpl_membase(cd, REG_SP, rd->memuse * 4);
2790 M_AST(REG_ITMP2, REG_SP, 0);
2791 M_MOV_IMM(LOCK_monitor_exit, REG_ITMP3);
2794 /* and now restore the proper return value */
2795 switch (iptr->opc) {
2798 M_ILD(REG_RESULT, REG_SP, rd->memuse * 4);
2802 M_LLD(REG_RESULT_PACKED, REG_SP, rd->memuse * 4);
2806 emit_flds_membase(cd, REG_SP, rd->memuse * 4);
2810 emit_fldl_membase(cd, REG_SP, rd->memuse * 4);
2816 /* restore saved registers */
2818 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
2819 p--; M_ALD(rd->savintregs[i], REG_SP, p * 4);
2822 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
2824 emit_fldl_membase(cd, REG_SP, p * 4);
2825 if (iptr->opc == ICMD_FRETURN || iptr->opc == ICMD_DRETURN) {
2827 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset + 1); */
2830 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset); */
2834 /* deallocate stack */
2836 if (cd->stackframesize)
2837 M_AADD_IMM(cd->stackframesize * 4, REG_SP);
2844 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2847 branch_target_t *table;
2849 table = iptr->dst.table;
2851 l = iptr->sx.s23.s2.tablelow;
2852 i = iptr->sx.s23.s3.tablehigh;
2854 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2855 M_INTMOVE(s1, REG_ITMP1);
2858 M_ISUB_IMM(l, REG_ITMP1);
2864 M_CMP_IMM(i - 1, REG_ITMP1);
2865 emit_bugt(cd, table[0].block);
2867 /* build jump table top down and use address of lowest entry */
2872 dseg_add_target(cd, table->block);
2876 /* length of dataseg after last dseg_addtarget is used
2879 M_MOV_IMM(0, REG_ITMP2);
2881 emit_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 2, REG_ITMP1);
2887 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2890 lookup_target_t *lookup;
2892 lookup = iptr->dst.lookup;
2894 i = iptr->sx.s23.s2.lookupcount;
2896 MCODECHECK((i<<2)+8);
2897 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2900 M_CMP_IMM(lookup->value, s1);
2901 emit_beq(cd, lookup->target.block);
2905 emit_br(cd, iptr->sx.s23.s3.lookupdefault.block);
2910 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
2912 bte = iptr->sx.s23.s3.bte;
2916 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2918 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2919 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
2920 case ICMD_INVOKEINTERFACE:
2922 REPLACEMENT_POINT_INVOKE(cd, iptr);
2924 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2925 md = iptr->sx.s23.s3.um->methodref->parseddesc.md;
2929 lm = iptr->sx.s23.s3.fmiref->p.method;
2930 md = lm->parseddesc;
2934 s3 = md->paramcount;
2936 MCODECHECK((s3 << 1) + 64);
2938 /* copy arguments to registers or stack location */
2940 for (s3 = s3 - 1; s3 >= 0; s3--) {
2941 var = VAR(iptr->sx.s23.s2.args[s3]);
2943 /* Already Preallocated (ARGVAR) ? */
2944 if (var->flags & PREALLOC)
2946 if (IS_INT_LNG_TYPE(var->type)) {
2947 if (!md->params[s3].inmemory) {
2948 log_text("No integer argument registers available!");
2952 if (IS_2_WORD_TYPE(var->type)) {
2953 d = emit_load(jd, iptr, var, REG_ITMP12_PACKED);
2954 M_LST(d, REG_SP, md->params[s3].regoff * 4);
2956 d = emit_load(jd, iptr, var, REG_ITMP1);
2957 M_IST(d, REG_SP, md->params[s3].regoff * 4);
2962 if (!md->params[s3].inmemory) {
2963 s1 = md->params[s3].regoff;
2964 d = emit_load(jd, iptr, var, s1);
2968 d = emit_load(jd, iptr, var, REG_FTMP1);
2969 if (IS_2_WORD_TYPE(var->type))
2970 M_DST(d, REG_SP, md->params[s3].regoff * 4);
2972 M_FST(d, REG_SP, md->params[s3].regoff * 4);
2977 switch (iptr->opc) {
2979 disp = (ptrint) bte->fp;
2980 d = md->returntype.type;
2982 M_MOV_IMM(disp, REG_ITMP1);
2985 emit_exception_check(cd, iptr);
2988 case ICMD_INVOKESPECIAL:
2989 M_ALD(REG_ITMP1, REG_SP, 0 * 4);
2990 emit_nullpointer_check(cd, iptr, REG_ITMP1);
2993 case ICMD_INVOKESTATIC:
2995 unresolved_method *um = iptr->sx.s23.s3.um;
2997 codegen_addpatchref(cd, PATCHER_invokestatic_special,
3001 d = md->returntype.type;
3004 disp = (ptrint) lm->stubroutine;
3005 d = lm->parseddesc->returntype.type;
3008 M_MOV_IMM(disp, REG_ITMP2);
3012 case ICMD_INVOKEVIRTUAL:
3013 M_ALD(REG_ITMP1, REG_SP, 0 * 4);
3014 emit_nullpointer_check(cd, iptr, s1);
3017 unresolved_method *um = iptr->sx.s23.s3.um;
3019 codegen_addpatchref(cd, PATCHER_invokevirtual, um, 0);
3022 d = md->returntype.type;
3025 s1 = OFFSET(vftbl_t, table[0]) +
3026 sizeof(methodptr) * lm->vftblindex;
3027 d = md->returntype.type;
3030 M_ALD(REG_METHODPTR, REG_ITMP1,
3031 OFFSET(java_objectheader, vftbl));
3032 M_ALD32(REG_ITMP3, REG_METHODPTR, s1);
3036 case ICMD_INVOKEINTERFACE:
3037 M_ALD(REG_ITMP1, REG_SP, 0 * 4);
3038 emit_nullpointer_check(cd, iptr, s1);
3041 unresolved_method *um = iptr->sx.s23.s3.um;
3043 codegen_addpatchref(cd, PATCHER_invokeinterface, um, 0);
3047 d = md->returntype.type;
3050 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3051 sizeof(methodptr) * lm->class->index;
3053 s2 = sizeof(methodptr) * (lm - lm->class->methods);
3055 d = md->returntype.type;
3058 M_ALD(REG_METHODPTR, REG_ITMP1,
3059 OFFSET(java_objectheader, vftbl));
3060 M_ALD32(REG_METHODPTR, REG_METHODPTR, s1);
3061 M_ALD32(REG_ITMP3, REG_METHODPTR, s2);
3066 /* store size of call code in replacement point */
3068 REPLACEMENT_POINT_INVOKE_RETURN(cd, iptr);
3070 /* d contains return type */
3072 if (d != TYPE_VOID) {
3073 #if defined(ENABLE_SSA)
3074 if ((ls == NULL) || (!IS_TEMPVAR_INDEX(iptr->dst.varindex)) ||
3075 (ls->lifetime[-iptr->dst.varindex-1].type != -1))
3076 /* a "living" stackslot */
3079 if (IS_INT_LNG_TYPE(d)) {
3080 if (IS_2_WORD_TYPE(d)) {
3081 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
3082 M_LNGMOVE(REG_RESULT_PACKED, s1);
3085 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3086 M_INTMOVE(REG_RESULT, s1);
3090 s1 = codegen_reg_of_dst(jd, iptr, REG_NULL);
3092 emit_store_dst(jd, iptr, s1);
3098 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3100 if (!(iptr->flags.bits & INS_FLAG_ARRAY)) {
3101 /* object type cast-check */
3104 vftbl_t *supervftbl;
3107 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3113 super = iptr->sx.s23.s3.c.cls;
3114 superindex = super->index;
3115 supervftbl = super->vftbl;
3118 if ((super == NULL) || !(super->flags & ACC_INTERFACE))
3119 CODEGEN_CRITICAL_SECTION_NEW;
3121 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3123 /* if class is not resolved, check which code to call */
3125 if (super == NULL) {
3127 emit_label_beq(cd, BRANCH_LABEL_1);
3129 codegen_addpatchref(cd, PATCHER_checkcast_instanceof_flags,
3130 iptr->sx.s23.s3.c.ref, 0);
3132 M_MOV_IMM(0, REG_ITMP2); /* super->flags */
3133 M_AND_IMM32(ACC_INTERFACE, REG_ITMP2);
3134 emit_label_beq(cd, BRANCH_LABEL_2);
3137 /* interface checkcast code */
3139 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3140 if (super != NULL) {
3142 emit_label_beq(cd, BRANCH_LABEL_3);
3145 M_ALD(REG_ITMP2, s1, OFFSET(java_objectheader, vftbl));
3147 if (super == NULL) {
3148 codegen_addpatchref(cd, PATCHER_checkcast_interface,
3149 iptr->sx.s23.s3.c.ref,
3154 REG_ITMP2, OFFSET(vftbl_t, interfacetablelength));
3155 M_ISUB_IMM32(superindex, REG_ITMP3);
3156 /* XXX do we need this one? */
3158 emit_classcast_check(cd, iptr, BRANCH_LE, REG_ITMP3, s1);
3160 M_ALD32(REG_ITMP3, REG_ITMP2,
3161 OFFSET(vftbl_t, interfacetable[0]) -
3162 superindex * sizeof(methodptr*));
3164 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_ITMP3, s1);
3167 emit_label_br(cd, BRANCH_LABEL_4);
3169 emit_label(cd, BRANCH_LABEL_3);
3172 /* class checkcast code */
3174 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3175 if (super == NULL) {
3176 emit_label(cd, BRANCH_LABEL_2);
3180 emit_label_beq(cd, BRANCH_LABEL_5);
3183 M_ALD(REG_ITMP2, s1, OFFSET(java_objectheader, vftbl));
3185 if (super == NULL) {
3186 codegen_addpatchref(cd, PATCHER_checkcast_class,
3187 iptr->sx.s23.s3.c.ref,
3191 M_MOV_IMM(supervftbl, REG_ITMP3);
3193 CODEGEN_CRITICAL_SECTION_START;
3195 M_ILD32(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3197 /* if (s1 != REG_ITMP1) { */
3198 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP1); */
3199 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3); */
3200 /* #if defined(ENABLE_THREADS) */
3201 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3203 /* emit_alu_reg_reg(cd, ALU_SUB, REG_ITMP1, REG_ITMP2); */
3206 M_ILD32(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, baseval));
3207 M_ISUB(REG_ITMP3, REG_ITMP2);
3208 M_MOV_IMM(supervftbl, REG_ITMP3);
3209 M_ILD(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3211 CODEGEN_CRITICAL_SECTION_END;
3215 M_CMP(REG_ITMP3, REG_ITMP2);
3216 emit_classcast_check(cd, iptr, BRANCH_ULE, REG_ITMP3, s1);
3219 emit_label(cd, BRANCH_LABEL_5);
3222 if (super == NULL) {
3223 emit_label(cd, BRANCH_LABEL_1);
3224 emit_label(cd, BRANCH_LABEL_4);
3227 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
3230 /* array type cast-check */
3232 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3233 M_AST(s1, REG_SP, 0 * 4);
3235 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3236 codegen_addpatchref(cd, PATCHER_builtin_arraycheckcast,
3237 iptr->sx.s23.s3.c.ref, 0);
3240 M_AST_IMM(iptr->sx.s23.s3.c.cls, REG_SP, 1 * 4);
3241 M_MOV_IMM(BUILTIN_arraycheckcast, REG_ITMP3);
3244 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3246 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_RESULT, s1);
3248 d = codegen_reg_of_dst(jd, iptr, s1);
3252 emit_store_dst(jd, iptr, d);
3255 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3259 vftbl_t *supervftbl;
3262 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3268 super = iptr->sx.s23.s3.c.cls;
3269 superindex = super->index;
3270 supervftbl = super->vftbl;
3273 if ((super == NULL) || !(super->flags & ACC_INTERFACE))
3274 CODEGEN_CRITICAL_SECTION_NEW;
3276 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3277 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
3280 M_INTMOVE(s1, REG_ITMP1);
3286 /* if class is not resolved, check which code to call */
3288 if (super == NULL) {
3290 emit_label_beq(cd, BRANCH_LABEL_1);
3292 codegen_addpatchref(cd, PATCHER_checkcast_instanceof_flags,
3293 iptr->sx.s23.s3.c.ref, 0);
3295 M_MOV_IMM(0, REG_ITMP3); /* super->flags */
3296 M_AND_IMM32(ACC_INTERFACE, REG_ITMP3);
3297 emit_label_beq(cd, BRANCH_LABEL_2);
3300 /* interface instanceof code */
3302 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3303 if (super != NULL) {
3305 emit_label_beq(cd, BRANCH_LABEL_3);
3308 M_ALD(REG_ITMP1, s1, OFFSET(java_objectheader, vftbl));
3310 if (super == NULL) {
3311 codegen_addpatchref(cd, PATCHER_instanceof_interface,
3312 iptr->sx.s23.s3.c.ref, 0);
3316 REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3317 M_ISUB_IMM32(superindex, REG_ITMP3);
3320 disp = (2 + 4 /* mov_membase32_reg */ + 2 /* test */ +
3321 6 /* jcc */ + 5 /* mov_imm_reg */);
3324 M_ALD32(REG_ITMP1, REG_ITMP1,
3325 OFFSET(vftbl_t, interfacetable[0]) -
3326 superindex * sizeof(methodptr*));
3328 /* emit_setcc_reg(cd, CC_A, d); */
3329 /* emit_jcc(cd, CC_BE, 5); */
3334 emit_label_br(cd, BRANCH_LABEL_4);
3336 emit_label(cd, BRANCH_LABEL_3);
3339 /* class instanceof code */
3341 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3342 if (super == NULL) {
3343 emit_label(cd, BRANCH_LABEL_2);
3347 emit_label_beq(cd, BRANCH_LABEL_5);
3350 M_ALD(REG_ITMP1, s1, OFFSET(java_objectheader, vftbl));
3352 if (super == NULL) {
3353 codegen_addpatchref(cd, PATCHER_instanceof_class,
3354 iptr->sx.s23.s3.c.ref, 0);
3357 M_MOV_IMM(supervftbl, REG_ITMP2);
3359 CODEGEN_CRITICAL_SECTION_START;
3361 M_ILD(REG_ITMP1, REG_ITMP1, OFFSET(vftbl_t, baseval));
3362 M_ILD(REG_ITMP3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3363 M_ILD(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3365 CODEGEN_CRITICAL_SECTION_END;
3367 M_ISUB(REG_ITMP2, REG_ITMP1);
3368 M_CLR(d); /* may be REG_ITMP2 */
3369 M_CMP(REG_ITMP3, REG_ITMP1);
3374 emit_label(cd, BRANCH_LABEL_5);
3377 if (super == NULL) {
3378 emit_label(cd, BRANCH_LABEL_1);
3379 emit_label(cd, BRANCH_LABEL_4);
3382 emit_store_dst(jd, iptr, d);
3386 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3388 /* check for negative sizes and copy sizes to stack if necessary */
3390 MCODECHECK((iptr->s1.argcount << 1) + 64);
3392 for (s1 = iptr->s1.argcount; --s1 >= 0; ) {
3393 /* copy SAVEDVAR sizes to stack */
3394 var = VAR(iptr->sx.s23.s2.args[s1]);
3396 /* Already Preallocated? */
3397 if (!(var->flags & PREALLOC)) {
3398 if (var->flags & INMEMORY) {
3399 M_ILD(REG_ITMP1, REG_SP, var->vv.regoff * 4);
3400 M_IST(REG_ITMP1, REG_SP, (s1 + 3) * 4);
3403 M_IST(var->vv.regoff, REG_SP, (s1 + 3) * 4);
3407 /* is a patcher function set? */
3409 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3410 codegen_addpatchref(cd, PATCHER_builtin_multianewarray,
3411 iptr->sx.s23.s3.c.ref, 0);
3417 disp = (ptrint) iptr->sx.s23.s3.c.cls;
3419 /* a0 = dimension count */
3421 M_IST_IMM(iptr->s1.argcount, REG_SP, 0 * 4);
3423 /* a1 = arraydescriptor */
3425 M_IST_IMM(disp, REG_SP, 1 * 4);
3427 /* a2 = pointer to dimensions = stack pointer */
3429 M_MOV(REG_SP, REG_ITMP1);
3430 M_AADD_IMM(3 * 4, REG_ITMP1);
3431 M_AST(REG_ITMP1, REG_SP, 2 * 4);
3433 M_MOV_IMM(BUILTIN_multianewarray, REG_ITMP1);
3436 /* check for exception before result assignment */
3438 emit_exception_check(cd, iptr);
3440 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3441 M_INTMOVE(REG_RESULT, s1);
3442 emit_store_dst(jd, iptr, s1);
3446 exceptions_throw_internalerror("Unknown ICMD %d during code generation",
3451 } /* for instruction */
3455 #if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
3458 #if defined(ENABLE_SSA)
3460 /* by edge splitting, in Blocks with phi moves there can only */
3461 /* be a goto as last command, no other Jump/Branch Command */
3462 if (!last_cmd_was_goto)
3463 codegen_insert_phi_moves(jd, bptr);
3468 /* At the end of a basic block we may have to append some nops,
3469 because the patcher stub calling code might be longer than the
3470 actual instruction. So codepatching does not change the
3471 following block unintentionally. */
3473 if (cd->mcodeptr < cd->lastmcodeptr) {
3474 while (cd->mcodeptr < cd->lastmcodeptr) {
3479 } /* if (bptr -> flags >= BBREACHED) */
3480 } /* for basic block */
3482 dseg_createlinenumbertable(cd);
3484 /* generate stubs */
3486 emit_patcher_stubs(jd);
3487 REPLACEMENT_EMIT_STUBS(jd);
3489 /* everything's ok */
3494 #if defined(ENABLE_SSA)
3495 void codegen_insert_phi_moves(jitdata *jd, basicblock *bptr) {
3496 /* look for phi moves */
3497 int t_a,s_a,i, type;
3498 int t_lt, s_lt; /* lifetime indices of phi_moves */
3499 s4 t_regoff, s_regoff, s_flags, t_flags;
3508 /* Moves from phi functions with highest indices have to be */
3509 /* inserted first, since this is the order as is used for */
3510 /* conflict resolution */
3511 for(i = ls->num_phi_moves[bptr->nr] - 1; i >= 0 ; i--) {
3512 t_a = ls->phi_moves[bptr->nr][i][0];
3513 s_a = ls->phi_moves[bptr->nr][i][1];
3514 #if defined(SSA_DEBUG_VERBOSE)
3516 printf("BB %3i Move %3i <- %3i ", bptr->nr, t_a, s_a);
3519 /* local var lifetimes */
3520 t_lt = ls->maxlifetimes + t_a;
3521 type = ls->lifetime[t_lt].type;
3525 type = ls->lifetime[t_lt].local_ss->s->type;
3526 /* stackslot lifetime */
3530 #if defined(SSA_DEBUG_VERBOSE)
3532 printf("...returning - phi lifetimes where joined\n");
3538 /* local var lifetimes */
3539 s_lt = ls->maxlifetimes + s_a;
3540 type = ls->lifetime[s_lt].type;
3544 type = ls->lifetime[s_lt].type;
3545 /* stackslot lifetime */
3549 #if defined(SSA_DEBUG_VERBOSE)
3551 printf("...returning - phi lifetimes where joined\n");
3557 t_flags = VAR(t_a)->flags;
3558 t_regoff = VAR(t_a)->vv.regoff;
3562 t_flags = ls->lifetime[t_lt].local_ss->s->flags;
3563 t_regoff = ls->lifetime[t_lt].local_ss->s->regoff;
3567 /* local var move */
3568 s_flags = VAR(s_a)->flags;
3569 s_regoff = VAR(s_a)->vv.regoff;
3571 /* stackslot lifetime */
3572 s_flags = ls->lifetime[s_lt].local_ss->s->flags;
3573 s_regoff = ls->lifetime[s_lt].local_ss->s->regoff;
3577 #if defined(SSA_DEBUG_VERBOSE)
3579 printf("...returning - phi lifetimes where joined\n");
3584 cg_move(cd, type, s_regoff, s_flags, t_regoff, t_flags);
3586 #if defined(SSA_DEBUG_VERBOSE)
3587 if (compileverbose) {
3588 if (IS_INMEMORY(t_flags) && IS_INMEMORY(s_flags)) {
3590 printf("M%3i <- M%3i",t_regoff,s_regoff);
3592 else if (IS_INMEMORY(s_flags)) {
3594 printf("R%3i <- M%3i",t_regoff,s_regoff);
3596 else if (IS_INMEMORY(t_flags)) {
3598 printf("M%3i <- R%3i",t_regoff,s_regoff);
3602 printf("R%3i <- R%3i",t_regoff,s_regoff);
3606 #endif /* defined(SSA_DEBUG_VERBOSE) */
3610 void cg_move(codegendata *cd, s4 type, s4 src_regoff, s4 src_flags,
3611 s4 dst_regoff, s4 dst_flags) {
3612 if ((IS_INMEMORY(dst_flags)) && (IS_INMEMORY(src_flags))) {
3614 if (dst_regoff != src_regoff) {
3615 if (!IS_2_WORD_TYPE(type)) {
3616 if (IS_FLT_DBL_TYPE(type)) {
3617 emit_flds_membase(cd, REG_SP, src_regoff * 4);
3618 emit_fstps_membase(cd, REG_SP, dst_regoff * 4);
3620 emit_mov_membase_reg(cd, REG_SP, src_regoff * 4,
3622 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, dst_regoff * 4);
3624 } else { /* LONG OR DOUBLE */
3625 if (IS_FLT_DBL_TYPE(type)) {
3626 emit_fldl_membase( cd, REG_SP, src_regoff * 4);
3627 emit_fstpl_membase(cd, REG_SP, dst_regoff * 4);
3629 emit_mov_membase_reg(cd, REG_SP, src_regoff * 4,
3631 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, dst_regoff * 4);
3632 emit_mov_membase_reg(cd, REG_SP, src_regoff * 4 + 4,
3634 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP,
3635 dst_regoff * 4 + 4);
3640 if (IS_FLT_DBL_TYPE(type)) {
3641 log_text("cg_move: flt/dbl type have to be in memory\n");
3644 if (IS_2_WORD_TYPE(type)) {
3645 log_text("cg_move: longs have to be in memory\n");
3648 if (IS_INMEMORY(src_flags)) {
3650 emit_mov_membase_reg(cd, REG_SP, src_regoff * 4, dst_regoff);
3651 } else if (IS_INMEMORY(dst_flags)) {
3653 emit_mov_reg_membase(cd, src_regoff, REG_SP, dst_regoff * 4);
3656 /* only ints can be in regs on i386 */
3657 M_INTMOVE(src_regoff,dst_regoff);
3661 #endif /* defined(ENABLE_SSA) */
3664 /* codegen_emit_stub_compiler **************************************************
3666 Emit a stub routine which calls the compiler.
3668 *******************************************************************************/
3670 void codegen_emit_stub_compiler(jitdata *jd)
3675 /* get required compiler data */
3680 /* code for the stub */
3682 M_MOV_IMM(m, REG_ITMP1);
3683 M_MOV_IMM(asm_call_jit_compiler, REG_ITMP3);
3688 /* codegen_emit_stub_native ****************************************************
3690 Emits a stub routine which calls a native method.
3692 *******************************************************************************/
3694 void codegen_emit_stub_native(jitdata *jd, methoddesc *nmd, functionptr f)
3701 s4 i, j; /* count variables */
3705 /* get required compiler data */
3711 /* set some variables */
3714 nativeparams = (m->flags & ACC_STATIC) ? 2 : 1;
3716 /* calculate stackframe size */
3718 cd->stackframesize =
3719 sizeof(stackframeinfo) / SIZEOF_VOID_P +
3720 sizeof(localref_table) / SIZEOF_VOID_P +
3721 1 + /* function pointer */
3722 4 + /* 4 arguments (start_native_call) */
3725 /* keep stack 16-byte aligned */
3727 cd->stackframesize |= 0x3;
3729 /* create method header */
3731 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
3732 (void) dseg_add_unique_s4(cd, cd->stackframesize * 4); /* FrameSize */
3733 (void) dseg_add_unique_s4(cd, 0); /* IsSync */
3734 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
3735 (void) dseg_add_unique_s4(cd, 0); /* IntSave */
3736 (void) dseg_add_unique_s4(cd, 0); /* FltSave */
3737 (void) dseg_addlinenumbertablesize(cd);
3738 (void) dseg_add_unique_s4(cd, 0); /* ExTableSize */
3740 #if defined(ENABLE_PROFILING)
3741 /* generate native method profiling code */
3743 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
3744 /* count frequency */
3746 M_MOV_IMM(code, REG_ITMP1);
3747 M_IADD_IMM_MEMBASE(1, REG_ITMP1, OFFSET(codeinfo, frequency));
3751 /* calculate stackframe size for native function */
3753 M_ASUB_IMM(cd->stackframesize * 4, REG_SP);
3755 #if !defined(NDEBUG)
3756 emit_verbosecall_enter(jd);
3759 /* get function address (this must happen before the stackframeinfo) */
3761 #if !defined(WITH_STATIC_CLASSPATH)
3763 codegen_addpatchref(cd, PATCHER_resolve_native, m, 0);
3766 M_AST_IMM((ptrint) f, REG_SP, 4 * 4);
3768 /* Mark the whole fpu stack as free for native functions (only for saved */
3769 /* register count == 0). */
3771 emit_ffree_reg(cd, 0);
3772 emit_ffree_reg(cd, 1);
3773 emit_ffree_reg(cd, 2);
3774 emit_ffree_reg(cd, 3);
3775 emit_ffree_reg(cd, 4);
3776 emit_ffree_reg(cd, 5);
3777 emit_ffree_reg(cd, 6);
3778 emit_ffree_reg(cd, 7);
3780 /* prepare data structures for native function call */
3782 M_MOV(REG_SP, REG_ITMP1);
3783 M_AADD_IMM(cd->stackframesize * 4, REG_ITMP1);
3785 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3786 M_IST_IMM(0, REG_SP, 1 * 4);
3789 M_MOV(REG_SP, REG_ITMP2);
3790 M_AADD_IMM(cd->stackframesize * 4 + SIZEOF_VOID_P, REG_ITMP2);
3792 M_AST(REG_ITMP2, REG_SP, 2 * 4);
3793 M_ALD(REG_ITMP3, REG_SP, cd->stackframesize * 4);
3794 M_AST(REG_ITMP3, REG_SP, 3 * 4);
3795 M_MOV_IMM(codegen_start_native_call, REG_ITMP1);
3798 M_ALD(REG_ITMP3, REG_SP, 4 * 4);
3800 /* copy arguments into new stackframe */
3802 for (i = md->paramcount - 1, j = i + nativeparams; i >= 0; i--, j--) {
3803 t = md->paramtypes[i].type;
3805 if (!md->params[i].inmemory) {
3806 /* no integer argument registers */
3807 } else { /* float/double in memory can be copied like int/longs */
3808 s1 = (md->params[i].regoff + cd->stackframesize + 1) * 4;
3809 s2 = nmd->params[j].regoff * 4;
3811 M_ILD(REG_ITMP1, REG_SP, s1);
3812 M_IST(REG_ITMP1, REG_SP, s2);
3813 if (IS_2_WORD_TYPE(t)) {
3814 M_ILD(REG_ITMP1, REG_SP, s1 + 4);
3815 M_IST(REG_ITMP1, REG_SP, s2 + 4);
3820 /* if function is static, put class into second argument */
3822 if (m->flags & ACC_STATIC)
3823 M_AST_IMM(m->class, REG_SP, 1 * 4);
3825 /* put env into first argument */
3827 M_AST_IMM(_Jv_env, REG_SP, 0 * 4);
3829 /* call the native function */
3833 /* save return value */
3835 switch (md->returntype.type) {
3838 M_IST(REG_RESULT, REG_SP, 1 * 4);
3841 M_LST(REG_RESULT_PACKED, REG_SP, 1 * 4);
3844 emit_fsts_membase(cd, REG_SP, 1 * 4);
3847 emit_fstl_membase(cd, REG_SP, 1 * 4);
3853 #if !defined(NDEBUG)
3854 emit_verbosecall_exit(jd);
3857 /* remove native stackframe info */
3859 M_MOV(REG_SP, REG_ITMP1);
3860 M_AADD_IMM(cd->stackframesize * 4, REG_ITMP1);
3862 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3863 M_MOV_IMM(codegen_finish_native_call, REG_ITMP1);
3865 M_MOV(REG_RESULT, REG_ITMP2); /* REG_ITMP3 == REG_RESULT2 */
3867 /* restore return value */
3869 switch (md->returntype.type) {
3872 M_ILD(REG_RESULT, REG_SP, 1 * 4);
3875 M_LLD(REG_RESULT_PACKED, REG_SP, 1 * 4);
3878 emit_flds_membase(cd, REG_SP, 1 * 4);
3881 emit_fldl_membase(cd, REG_SP, 1 * 4);
3887 M_AADD_IMM(cd->stackframesize * 4, REG_SP);
3889 /* check for exception */
3896 /* handle exception */
3898 M_MOV(REG_ITMP2, REG_ITMP1_XPTR);
3899 M_ALD(REG_ITMP2_XPC, REG_SP, 0);
3900 M_ASUB_IMM(2, REG_ITMP2_XPC);
3902 M_MOV_IMM(asm_handle_nat_exception, REG_ITMP3);
3905 /* generate patcher stubs */
3907 emit_patcher_stubs(jd);
3912 * These are local overrides for various environment variables in Emacs.
3913 * Please do not remove this and leave it at the end of the file, where
3914 * Emacs will automagically detect them.
3915 * ---------------------------------------------------------------------
3918 * indent-tabs-mode: t
3922 * vim:noexpandtab:sw=4:ts=4: