1 /* src/vm/jit/i386/codegen.c - machine code generator for i386
3 Copyright (C) 1996-2005, 2006, 2007 R. Grafl, A. Krall, C. Kruegel,
4 C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
5 E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
6 J. Wenninger, Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
25 $Id: codegen.c 7692 2007-04-12 14:47:24Z twisti $
37 #include "vm/jit/i386/md-abi.h"
39 #include "vm/jit/i386/codegen.h"
40 #include "vm/jit/i386/emit.h"
42 #include "mm/memory.h"
43 #include "native/jni.h"
44 #include "native/native.h"
46 #if defined(ENABLE_THREADS)
47 # include "threads/native/lock.h"
50 #include "vm/builtin.h"
51 #include "vm/exceptions.h"
52 #include "vm/global.h"
53 #include "vm/stringlocal.h"
56 #include "vm/jit/asmpart.h"
57 #include "vm/jit/codegen-common.h"
58 #include "vm/jit/dseg.h"
59 #include "vm/jit/emit-common.h"
60 #include "vm/jit/jit.h"
61 #include "vm/jit/parse.h"
62 #include "vm/jit/patcher.h"
63 #include "vm/jit/reg.h"
64 #include "vm/jit/replace.h"
65 #include "vm/jit/stacktrace.h"
67 #if defined(ENABLE_SSA)
68 # include "vm/jit/optimizing/lsra.h"
69 # include "vm/jit/optimizing/ssa.h"
70 #elif defined(ENABLE_LSRA)
71 # include "vm/jit/allocator/lsra.h"
74 #include "vmcore/loader.h"
75 #include "vmcore/options.h"
76 #include "vmcore/utf8.h"
79 /* codegen_emit ****************************************************************
81 Generates machine code.
83 *******************************************************************************/
85 #if defined(ENABLE_SSA)
86 void cg_move(codegendata *cd, s4 type, s4 src_regoff, s4 src_flags,
87 s4 dst_regoff, s4 dst_flags);
88 void codegen_insert_phi_moves(jitdata *jd, basicblock *bptr);
91 bool codegen_emit(jitdata *jd)
97 s4 len, s1, s2, s3, d, disp;
103 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
104 builtintable_entry *bte;
107 unresolved_field *uf;
110 #if defined(ENABLE_SSA)
112 bool last_cmd_was_goto;
114 last_cmd_was_goto = false;
118 /* get required compiler data */
125 /* prevent compiler warnings */
136 s4 savedregs_num = 0;
139 /* space to save used callee saved registers */
141 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
143 /* float register are saved on 2 4-byte stackslots */
144 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse) * 2;
146 cd->stackframesize = rd->memuse + savedregs_num;
149 #if defined(ENABLE_THREADS)
150 /* space to save argument of monitor_enter */
152 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
153 /* reserve 2 slots for long/double return values for monitorexit */
155 if (IS_2_WORD_TYPE(m->parseddesc->returntype.type))
156 cd->stackframesize += 2;
158 cd->stackframesize++;
162 /* create method header */
164 /* Keep stack of non-leaf functions 16-byte aligned. */
166 if (!jd->isleafmethod)
167 cd->stackframesize |= 0x3;
169 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
170 (void) dseg_add_unique_s4(cd, cd->stackframesize * 4); /* FrameSize */
172 #if defined(ENABLE_THREADS)
173 /* IsSync contains the offset relative to the stack pointer for the
174 argument of monitor_exit used in the exception handler. Since the
175 offset could be zero and give a wrong meaning of the flag it is
179 if (checksync && (m->flags & ACC_SYNCHRONIZED))
180 (void) dseg_add_unique_s4(cd, (rd->memuse + 1) * 4); /* IsSync */
183 (void) dseg_add_unique_s4(cd, 0); /* IsSync */
185 (void) dseg_add_unique_s4(cd, jd->isleafmethod); /* IsLeaf */
186 (void) dseg_add_unique_s4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
187 (void) dseg_add_unique_s4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
189 /* adds a reference for the length of the line number counter. We don't
190 know the size yet, since we evaluate the information during code
191 generation, to save one additional iteration over the whole
192 instructions. During code optimization the position could have changed
193 to the information gotten from the class file */
194 (void) dseg_addlinenumbertablesize(cd);
196 (void) dseg_add_unique_s4(cd, jd->exceptiontablelength); /* ExTableSize */
198 /* create exception table */
200 for (ex = jd->exceptiontable; ex != NULL; ex = ex->down) {
201 dseg_add_target(cd, ex->start);
202 dseg_add_target(cd, ex->end);
203 dseg_add_target(cd, ex->handler);
204 (void) dseg_add_unique_address(cd, ex->catchtype.any);
207 #if defined(ENABLE_PROFILING)
208 /* generate method profiling code */
210 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
211 /* count frequency */
213 M_MOV_IMM(code, REG_ITMP3);
214 M_IADD_IMM_MEMBASE(1, REG_ITMP3, OFFSET(codeinfo, frequency));
218 /* create stack frame (if necessary) */
220 if (cd->stackframesize)
221 M_ASUB_IMM(cd->stackframesize * 4, REG_SP);
223 /* save return address and used callee saved registers */
225 p = cd->stackframesize;
226 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
227 p--; M_AST(rd->savintregs[i], REG_SP, p * 4);
229 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
230 p-=2; emit_fld_reg(cd, rd->savfltregs[i]); emit_fstpl_membase(cd, REG_SP, p * 4);
233 /* take arguments out of register or stack frame */
238 for (p = 0, l = 0; p < md->paramcount; p++) {
239 t = md->paramtypes[p].type;
241 #if defined(ENABLE_SSA)
246 varindex = jd->local_map[l * 5 + t];
248 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
251 if (varindex == UNUSED)
256 s1 = md->params[p].regoff;
258 if (IS_INT_LNG_TYPE(t)) { /* integer args */
259 if (!md->params[p].inmemory) { /* register arguments */
260 log_text("integer register argument");
262 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
263 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff */
265 else { /* reg arg -> spilled */
266 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff * 4 */
269 else { /* stack arguments */
270 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
271 emit_mov_membase_reg( /* + 4 for return address */
272 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4, var->vv.regoff);
273 /* + 4 for return address */
275 else { /* stack arg -> spilled */
276 if (!IS_2_WORD_TYPE(t)) {
277 #if defined(ENABLE_SSA)
278 /* no copy avoiding by now possible with SSA */
280 emit_mov_membase_reg( /* + 4 for return address */
281 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4,
283 emit_mov_reg_membase(
284 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4);
287 #endif /*defined(ENABLE_SSA)*/
288 /* reuse Stackslotand avoid copying */
289 var->vv.regoff = cd->stackframesize + s1 + 1;
293 #if defined(ENABLE_SSA)
294 /* no copy avoiding by now possible with SSA */
296 emit_mov_membase_reg( /* + 4 for return address */
297 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4,
299 emit_mov_reg_membase(
300 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4);
301 emit_mov_membase_reg( /* + 4 for return address */
302 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4 + 4,
304 emit_mov_reg_membase(
305 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4 + 4);
308 #endif /*defined(ENABLE_SSA)*/
309 /* reuse Stackslotand avoid copying */
310 var->vv.regoff = cd->stackframesize + s1 + 1;
315 else { /* floating args */
316 if (!md->params[p].inmemory) { /* register arguments */
317 log_text("There are no float argument registers!");
319 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
320 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff */
321 } else { /* reg arg -> spilled */
322 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff * 4 */
326 else { /* stack arguments */
327 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
330 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
332 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
337 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
339 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
342 } else { /* stack-arg -> spilled */
343 #if defined(ENABLE_SSA)
344 /* no copy avoiding by now possible with SSA */
346 emit_mov_membase_reg(
347 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4, REG_ITMP1);
348 emit_mov_reg_membase(
349 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4);
352 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
353 emit_fstps_membase(cd, REG_SP, var->vv.regoff * 4);
357 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
358 emit_fstpl_membase(cd, REG_SP, var->vv.regoff * 4);
362 #endif /*defined(ENABLE_SSA)*/
363 /* reuse Stackslotand avoid copying */
364 var->vv.regoff = cd->stackframesize + s1 + 1;
370 /* call monitorenter function */
372 #if defined(ENABLE_THREADS)
373 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
376 if (m->flags & ACC_STATIC) {
377 M_MOV_IMM(&m->class->object.header, REG_ITMP1);
380 M_ALD(REG_ITMP1, REG_SP, cd->stackframesize * 4 + 4);
383 M_ALD_MEM(REG_ITMP1, EXCEPTION_HARDWARE_NULLPOINTER);
386 M_AST(REG_ITMP1, REG_SP, s1 * 4);
387 M_AST(REG_ITMP1, REG_SP, 0 * 4);
388 M_MOV_IMM(LOCK_monitor_enter, REG_ITMP3);
394 emit_verbosecall_enter(jd);
399 #if defined(ENABLE_SSA)
400 /* with SSA Header is Basic Block 0 - insert phi Moves if necessary */
402 codegen_insert_phi_moves(jd, ls->basicblocks[0]);
405 /* end of header generation */
407 /* create replacement points */
409 REPLACEMENT_POINTS_INIT(cd, jd);
411 /* walk through all basic blocks */
413 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
415 bptr->mpc = (s4) (cd->mcodeptr - cd->mcodebase);
417 if (bptr->flags >= BBREACHED) {
418 /* branch resolving */
420 codegen_resolve_branchrefs(cd, bptr);
422 /* handle replacement points */
424 REPLACEMENT_POINT_BLOCK_START(cd, bptr);
426 #if defined(ENABLE_REPLACEMENT)
427 if (bptr->bitflags & BBFLAG_REPLACEMENT) {
428 if (cd->replacementpoint[-1].flags & RPLPOINT_FLAG_COUNTDOWN) {
430 disp = (s4) &(m->hitcountdown);
431 M_ISUB_IMM_MEMABS(1, disp);
437 /* copy interface registers to their destination */
442 #if defined(ENABLE_PROFILING)
443 /* generate basic block profiling code */
445 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
446 /* count frequency */
448 M_MOV_IMM(code->bbfrequency, REG_ITMP3);
449 M_IADD_IMM_MEMBASE(1, REG_ITMP3, bptr->nr * 4);
453 #if defined(ENABLE_LSRA) || defined(ENABLE_SSA)
454 # if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
457 # if defined(ENABLE_SSA)
459 last_cmd_was_goto = false;
463 var = VAR(bptr->invars[len]);
464 if (bptr->type != BBTYPE_STD) {
465 if (!IS_2_WORD_TYPE(var->type)) {
466 if (bptr->type == BBTYPE_EXH) {
467 d = codegen_reg_of_var(0, var, REG_ITMP1);
468 M_INTMOVE(REG_ITMP1, d);
469 emit_store(jd, NULL, var, d);
473 log_text("copy interface registers(EXH, SBR): longs \
474 have to be in memory (begin 1)");
482 #endif /* defined(ENABLE_LSRA) || defined(ENABLE_SSA) */
486 var = VAR(bptr->invars[len]);
487 if ((len == bptr->indepth-1) && (bptr->type != BBTYPE_STD)) {
488 if (!IS_2_WORD_TYPE(var->type)) {
489 if (bptr->type == BBTYPE_EXH) {
490 d = codegen_reg_of_var(0, var, REG_ITMP1);
491 M_INTMOVE(REG_ITMP1, d);
492 emit_store(jd, NULL, var, d);
496 log_text("copy interface registers: longs have to be in \
503 assert((var->flags & INOUT));
508 /* walk through all instructions */
513 for (iptr = bptr->iinstr; len > 0; len--, iptr++) {
514 if (iptr->line != currentline) {
515 dseg_addlinenumber(cd, iptr->line);
516 currentline = iptr->line;
519 MCODECHECK(1024); /* 1kB should be enough */
522 case ICMD_NOP: /* ... ==> ... */
523 case ICMD_POP: /* ..., value ==> ... */
524 case ICMD_POP2: /* ..., value, value ==> ... */
527 case ICMD_INLINE_START:
529 REPLACEMENT_POINT_INLINE_START(cd, iptr);
532 case ICMD_INLINE_BODY:
534 REPLACEMENT_POINT_INLINE_BODY(cd, iptr);
535 dseg_addlinenumber_inline_start(cd, iptr);
536 dseg_addlinenumber(cd, iptr->line);
539 case ICMD_INLINE_END:
541 dseg_addlinenumber_inline_end(cd, iptr);
542 dseg_addlinenumber(cd, iptr->line);
545 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
547 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
548 emit_nullpointer_check(cd, iptr, s1);
551 /* constant operations ************************************************/
553 case ICMD_ICONST: /* ... ==> ..., constant */
555 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
556 ICONST(d, iptr->sx.val.i);
557 emit_store_dst(jd, iptr, d);
560 case ICMD_LCONST: /* ... ==> ..., constant */
562 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
563 LCONST(d, iptr->sx.val.l);
564 emit_store_dst(jd, iptr, d);
567 case ICMD_FCONST: /* ... ==> ..., constant */
569 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
570 if (iptr->sx.val.f == 0.0) {
574 if (iptr->sx.val.i == 0x80000000) {
578 } else if (iptr->sx.val.f == 1.0) {
581 } else if (iptr->sx.val.f == 2.0) {
587 disp = dseg_add_float(cd, iptr->sx.val.f);
588 emit_mov_imm_reg(cd, 0, REG_ITMP1);
590 emit_flds_membase(cd, REG_ITMP1, disp);
592 emit_store_dst(jd, iptr, d);
595 case ICMD_DCONST: /* ... ==> ..., constant */
597 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
598 if (iptr->sx.val.d == 0.0) {
602 if (iptr->sx.val.l == 0x8000000000000000LL) {
606 } else if (iptr->sx.val.d == 1.0) {
609 } else if (iptr->sx.val.d == 2.0) {
615 disp = dseg_add_double(cd, iptr->sx.val.d);
616 emit_mov_imm_reg(cd, 0, REG_ITMP1);
618 emit_fldl_membase(cd, REG_ITMP1, disp);
620 emit_store_dst(jd, iptr, d);
623 case ICMD_ACONST: /* ... ==> ..., constant */
625 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
627 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
628 codegen_addpatchref(cd, PATCHER_aconst,
629 iptr->sx.val.c.ref, 0);
634 if (iptr->sx.val.anyptr == NULL)
637 M_MOV_IMM(iptr->sx.val.anyptr, d);
639 emit_store_dst(jd, iptr, d);
643 /* load/store/copy/move operations ************************************/
657 emit_copy(jd, iptr, VAROP(iptr->s1), VAROP(iptr->dst));
661 if (!(iptr->flags.bits & INS_FLAG_RETADDR))
662 emit_copy(jd, iptr, VAROP(iptr->s1), VAROP(iptr->dst));
666 /* integer operations *************************************************/
668 case ICMD_INEG: /* ..., value ==> ..., - value */
670 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
671 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
674 emit_store_dst(jd, iptr, d);
677 case ICMD_LNEG: /* ..., value ==> ..., - value */
679 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
680 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
682 M_NEG(GET_LOW_REG(d));
683 M_IADDC_IMM(0, GET_HIGH_REG(d));
684 M_NEG(GET_HIGH_REG(d));
685 emit_store_dst(jd, iptr, d);
688 case ICMD_I2L: /* ..., value ==> ..., value */
690 s1 = emit_load_s1(jd, iptr, EAX);
691 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
694 M_LNGMOVE(EAX_EDX_PACKED, d);
695 emit_store_dst(jd, iptr, d);
698 case ICMD_L2I: /* ..., value ==> ..., value */
700 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
701 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
703 emit_store_dst(jd, iptr, d);
706 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
708 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
709 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
713 emit_store_dst(jd, iptr, d);
716 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
718 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
719 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
721 emit_store_dst(jd, iptr, d);
724 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
726 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
727 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
729 emit_store_dst(jd, iptr, d);
733 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
735 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
736 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
737 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
744 emit_store_dst(jd, iptr, d);
748 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
749 /* sx.val.i = constant */
751 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
752 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
754 /* `inc reg' is slower on p4's (regarding to ia32
755 optimization reference manual and benchmarks) and as
759 M_IADD_IMM(iptr->sx.val.i, d);
760 emit_store_dst(jd, iptr, d);
763 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
765 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
766 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
767 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
768 M_INTMOVE(s1, GET_LOW_REG(d));
769 M_IADD(s2, GET_LOW_REG(d));
770 /* don't use REG_ITMP1 */
771 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
772 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
773 M_INTMOVE(s1, GET_HIGH_REG(d));
774 M_IADDC(s2, GET_HIGH_REG(d));
775 emit_store_dst(jd, iptr, d);
778 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
779 /* sx.val.l = constant */
781 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
782 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
784 M_IADD_IMM(iptr->sx.val.l, GET_LOW_REG(d));
785 M_IADDC_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
786 emit_store_dst(jd, iptr, d);
789 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
791 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
792 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
793 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
795 M_INTMOVE(s1, REG_ITMP1);
796 M_ISUB(s2, REG_ITMP1);
797 M_INTMOVE(REG_ITMP1, d);
803 emit_store_dst(jd, iptr, d);
806 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
807 /* sx.val.i = constant */
809 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
810 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
812 M_ISUB_IMM(iptr->sx.val.i, d);
813 emit_store_dst(jd, iptr, d);
816 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
818 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
819 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
820 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
821 if (s2 == GET_LOW_REG(d)) {
822 M_INTMOVE(s1, REG_ITMP1);
823 M_ISUB(s2, REG_ITMP1);
824 M_INTMOVE(REG_ITMP1, GET_LOW_REG(d));
827 M_INTMOVE(s1, GET_LOW_REG(d));
828 M_ISUB(s2, GET_LOW_REG(d));
830 /* don't use REG_ITMP1 */
831 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
832 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
833 if (s2 == GET_HIGH_REG(d)) {
834 M_INTMOVE(s1, REG_ITMP2);
835 M_ISUBB(s2, REG_ITMP2);
836 M_INTMOVE(REG_ITMP2, GET_HIGH_REG(d));
839 M_INTMOVE(s1, GET_HIGH_REG(d));
840 M_ISUBB(s2, GET_HIGH_REG(d));
842 emit_store_dst(jd, iptr, d);
845 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
846 /* sx.val.l = constant */
848 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
849 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
851 M_ISUB_IMM(iptr->sx.val.l, GET_LOW_REG(d));
852 M_ISUBB_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
853 emit_store_dst(jd, iptr, d);
856 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
858 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
859 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
860 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
867 emit_store_dst(jd, iptr, d);
870 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
871 /* sx.val.i = constant */
873 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
874 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
875 M_IMUL_IMM(s1, iptr->sx.val.i, d);
876 emit_store_dst(jd, iptr, d);
879 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
881 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
882 s2 = emit_load_s2_low(jd, iptr, EDX);
883 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
885 M_INTMOVE(s1, REG_ITMP2);
886 M_IMUL(s2, REG_ITMP2);
888 s1 = emit_load_s1_low(jd, iptr, EAX);
889 s2 = emit_load_s2_high(jd, iptr, EDX);
892 M_IADD(EDX, REG_ITMP2);
894 s1 = emit_load_s1_low(jd, iptr, EAX);
895 s2 = emit_load_s2_low(jd, iptr, EDX);
898 M_INTMOVE(EAX, GET_LOW_REG(d));
899 M_IADD(REG_ITMP2, GET_HIGH_REG(d));
901 emit_store_dst(jd, iptr, d);
904 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
905 /* sx.val.l = constant */
907 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
908 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
909 ICONST(EAX, iptr->sx.val.l);
911 M_IMUL_IMM(s1, iptr->sx.val.l >> 32, REG_ITMP2);
912 M_IADD(REG_ITMP2, EDX);
913 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
914 M_IMUL_IMM(s1, iptr->sx.val.l, REG_ITMP2);
915 M_IADD(REG_ITMP2, EDX);
916 M_LNGMOVE(EAX_EDX_PACKED, d);
917 emit_store_dst(jd, iptr, d);
920 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
922 s1 = emit_load_s1(jd, iptr, EAX);
923 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
924 d = codegen_reg_of_dst(jd, iptr, EAX);
925 emit_arithmetic_check(cd, iptr, s2);
927 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
929 /* check as described in jvm spec */
931 M_CMP_IMM(0x80000000, EAX);
938 M_INTMOVE(EAX, d); /* if INMEMORY then d is already EAX */
939 emit_store_dst(jd, iptr, d);
942 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
944 s1 = emit_load_s1(jd, iptr, EAX);
945 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
946 d = codegen_reg_of_dst(jd, iptr, EDX);
947 emit_arithmetic_check(cd, iptr, s2);
949 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
951 /* check as described in jvm spec */
953 M_CMP_IMM(0x80000000, EAX);
961 M_INTMOVE(EDX, d); /* if INMEMORY then d is already EDX */
962 emit_store_dst(jd, iptr, d);
965 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
966 /* sx.val.i = constant */
968 /* TODO: optimize for `/ 2' */
969 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
970 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
974 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, d);/* 32-bit for jump off */
975 M_SRA_IMM(iptr->sx.val.i, d);
976 emit_store_dst(jd, iptr, d);
979 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
980 /* sx.val.i = constant */
982 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
983 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
985 M_MOV(s1, REG_ITMP1);
989 M_AND_IMM(iptr->sx.val.i, d);
991 M_BGE(2 + 2 + 6 + 2);
992 M_MOV(s1, d); /* don't use M_INTMOVE, so we know the jump offset */
994 M_AND_IMM32(iptr->sx.val.i, d); /* use 32-bit for jump offset */
996 emit_store_dst(jd, iptr, d);
999 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1000 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1002 s2 = emit_load_s2(jd, iptr, REG_ITMP12_PACKED);
1003 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1005 M_INTMOVE(GET_LOW_REG(s2), REG_ITMP3);
1006 M_OR(GET_HIGH_REG(s2), REG_ITMP3);
1007 /* XXX could be optimized */
1008 emit_arithmetic_check(cd, iptr, REG_ITMP3);
1010 bte = iptr->sx.s23.s3.bte;
1013 M_LST(s2, REG_SP, 2 * 4);
1015 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1016 M_LST(s1, REG_SP, 0 * 4);
1018 M_MOV_IMM(bte->fp, REG_ITMP3);
1020 emit_store_dst(jd, iptr, d);
1023 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1024 /* sx.val.i = constant */
1026 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1027 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1029 M_TEST(GET_HIGH_REG(d));
1031 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, GET_LOW_REG(d));
1032 M_IADDC_IMM(0, GET_HIGH_REG(d));
1033 M_SRLD_IMM(iptr->sx.val.i, GET_HIGH_REG(d), GET_LOW_REG(d));
1034 M_SRA_IMM(iptr->sx.val.i, GET_HIGH_REG(d));
1035 emit_store_dst(jd, iptr, d);
1039 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1040 /* sx.val.l = constant */
1042 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1043 if (iptr->dst.var->flags & INMEMORY) {
1044 if (iptr->s1.var->flags & INMEMORY) {
1045 /* Alpha algorithm */
1047 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 4);
1049 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 4 + 4);
1055 /* TODO: hmm, don't know if this is always correct */
1057 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l & 0x00000000ffffffff);
1059 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l >> 32);
1065 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4, REG_ITMP1);
1066 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4 + 4, REG_ITMP2);
1068 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1069 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1070 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, iptr->s1.var->vv.regoff * 4 + 4);
1071 emit_jcc(cd, CC_GE, disp);
1073 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4, REG_ITMP1);
1074 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4 + 4, REG_ITMP2);
1076 emit_neg_reg(cd, REG_ITMP1);
1077 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1078 emit_neg_reg(cd, REG_ITMP2);
1080 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1081 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1083 emit_neg_reg(cd, REG_ITMP1);
1084 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1085 emit_neg_reg(cd, REG_ITMP2);
1087 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst.var->vv.regoff * 4);
1088 emit_mov_reg_membase(cd, REG_ITMP2, REG_SP, iptr->dst.var->vv.regoff * 4 + 4);
1092 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1093 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1095 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1096 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1097 M_TEST(GET_LOW_REG(s1));
1103 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1105 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1106 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1107 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1108 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1111 emit_store_dst(jd, iptr, d);
1114 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1115 /* sx.val.i = constant */
1117 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1118 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1120 M_SLL_IMM(iptr->sx.val.i, d);
1121 emit_store_dst(jd, iptr, d);
1124 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1126 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1127 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1128 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1129 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1132 emit_store_dst(jd, iptr, d);
1135 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1136 /* sx.val.i = constant */
1138 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1139 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1141 M_SRA_IMM(iptr->sx.val.i, d);
1142 emit_store_dst(jd, iptr, d);
1145 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1147 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1148 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1149 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1150 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1153 emit_store_dst(jd, iptr, d);
1156 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1157 /* sx.val.i = constant */
1159 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1160 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1162 M_SRL_IMM(iptr->sx.val.i, d);
1163 emit_store_dst(jd, iptr, d);
1166 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1168 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1169 s2 = emit_load_s2(jd, iptr, ECX);
1170 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1173 M_TEST_IMM(32, ECX);
1175 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1176 M_CLR(GET_LOW_REG(d));
1177 M_SLLD(GET_LOW_REG(d), GET_HIGH_REG(d));
1178 M_SLL(GET_LOW_REG(d));
1179 emit_store_dst(jd, iptr, d);
1182 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1183 /* sx.val.i = constant */
1185 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1186 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1188 if (iptr->sx.val.i & 0x20) {
1189 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1190 M_CLR(GET_LOW_REG(d));
1191 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1195 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1197 M_SLL_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d));
1199 emit_store_dst(jd, iptr, d);
1202 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1204 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1205 s2 = emit_load_s2(jd, iptr, ECX);
1206 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1209 M_TEST_IMM(32, ECX);
1211 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1212 M_SRA_IMM(31, GET_HIGH_REG(d));
1213 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1214 M_SRA(GET_HIGH_REG(d));
1215 emit_store_dst(jd, iptr, d);
1218 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1219 /* sx.val.i = constant */
1221 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1222 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1224 if (iptr->sx.val.i & 0x20) {
1225 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1226 M_SRA_IMM(31, GET_HIGH_REG(d));
1227 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1231 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1233 M_SRA_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1235 emit_store_dst(jd, iptr, d);
1238 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1240 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1241 s2 = emit_load_s2(jd, iptr, ECX);
1242 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1245 M_TEST_IMM(32, ECX);
1247 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1248 M_CLR(GET_HIGH_REG(d));
1249 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1250 M_SRL(GET_HIGH_REG(d));
1251 emit_store_dst(jd, iptr, d);
1254 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1255 /* sx.val.l = constant */
1257 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1258 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1260 if (iptr->sx.val.i & 0x20) {
1261 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1262 M_CLR(GET_HIGH_REG(d));
1263 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1267 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1269 M_SRL_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1271 emit_store_dst(jd, iptr, d);
1274 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1276 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1277 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1278 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1285 emit_store_dst(jd, iptr, d);
1288 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1289 /* sx.val.i = constant */
1291 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1292 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1294 M_AND_IMM(iptr->sx.val.i, d);
1295 emit_store_dst(jd, iptr, d);
1298 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1300 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1301 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1302 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1303 if (s2 == GET_LOW_REG(d))
1304 M_AND(s1, GET_LOW_REG(d));
1306 M_INTMOVE(s1, GET_LOW_REG(d));
1307 M_AND(s2, GET_LOW_REG(d));
1309 /* REG_ITMP1 probably contains low 32-bit of destination */
1310 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1311 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1312 if (s2 == GET_HIGH_REG(d))
1313 M_AND(s1, GET_HIGH_REG(d));
1315 M_INTMOVE(s1, GET_HIGH_REG(d));
1316 M_AND(s2, GET_HIGH_REG(d));
1318 emit_store_dst(jd, iptr, d);
1321 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1322 /* sx.val.l = constant */
1324 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1325 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1327 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1328 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1329 emit_store_dst(jd, iptr, d);
1332 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1334 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1335 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1336 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1343 emit_store_dst(jd, iptr, d);
1346 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1347 /* sx.val.i = constant */
1349 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1350 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1352 M_OR_IMM(iptr->sx.val.i, d);
1353 emit_store_dst(jd, iptr, d);
1356 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1358 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1359 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1360 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1361 if (s2 == GET_LOW_REG(d))
1362 M_OR(s1, GET_LOW_REG(d));
1364 M_INTMOVE(s1, GET_LOW_REG(d));
1365 M_OR(s2, GET_LOW_REG(d));
1367 /* REG_ITMP1 probably contains low 32-bit of destination */
1368 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1369 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1370 if (s2 == GET_HIGH_REG(d))
1371 M_OR(s1, GET_HIGH_REG(d));
1373 M_INTMOVE(s1, GET_HIGH_REG(d));
1374 M_OR(s2, GET_HIGH_REG(d));
1376 emit_store_dst(jd, iptr, d);
1379 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1380 /* sx.val.l = constant */
1382 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1383 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1385 M_OR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1386 M_OR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1387 emit_store_dst(jd, iptr, d);
1390 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1392 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1393 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1394 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1401 emit_store_dst(jd, iptr, d);
1404 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1405 /* sx.val.i = constant */
1407 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1408 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1410 M_XOR_IMM(iptr->sx.val.i, d);
1411 emit_store_dst(jd, iptr, d);
1414 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1416 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1417 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1418 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1419 if (s2 == GET_LOW_REG(d))
1420 M_XOR(s1, GET_LOW_REG(d));
1422 M_INTMOVE(s1, GET_LOW_REG(d));
1423 M_XOR(s2, GET_LOW_REG(d));
1425 /* REG_ITMP1 probably contains low 32-bit of destination */
1426 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1427 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1428 if (s2 == GET_HIGH_REG(d))
1429 M_XOR(s1, GET_HIGH_REG(d));
1431 M_INTMOVE(s1, GET_HIGH_REG(d));
1432 M_XOR(s2, GET_HIGH_REG(d));
1434 emit_store_dst(jd, iptr, d);
1437 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1438 /* sx.val.l = constant */
1440 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1441 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1443 M_XOR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1444 M_XOR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1445 emit_store_dst(jd, iptr, d);
1449 /* floating operations ************************************************/
1451 case ICMD_FNEG: /* ..., value ==> ..., - value */
1453 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1454 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1456 emit_store_dst(jd, iptr, d);
1459 case ICMD_DNEG: /* ..., value ==> ..., - value */
1461 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1462 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1464 emit_store_dst(jd, iptr, d);
1467 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1469 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1470 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1471 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1473 emit_store_dst(jd, iptr, d);
1476 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1478 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1479 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1480 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1482 emit_store_dst(jd, iptr, d);
1485 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1487 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1488 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1489 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1491 emit_store_dst(jd, iptr, d);
1494 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1496 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1497 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1498 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1500 emit_store_dst(jd, iptr, d);
1503 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1505 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1506 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1507 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1509 emit_store_dst(jd, iptr, d);
1512 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1514 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1515 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1516 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1518 emit_store_dst(jd, iptr, d);
1521 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1523 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1524 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1525 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1527 emit_store_dst(jd, iptr, d);
1530 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1532 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1533 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1534 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1536 emit_store_dst(jd, iptr, d);
1539 case ICMD_FREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1541 /* exchanged to skip fxch */
1542 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1543 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1544 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1545 /* emit_fxch(cd); */
1550 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1551 emit_store_dst(jd, iptr, d);
1552 emit_ffree_reg(cd, 0);
1556 case ICMD_DREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1558 /* exchanged to skip fxch */
1559 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1560 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1561 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1562 /* emit_fxch(cd); */
1567 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1568 emit_store_dst(jd, iptr, d);
1569 emit_ffree_reg(cd, 0);
1573 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1574 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1576 var = VAROP(iptr->s1);
1577 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1579 if (var->flags & INMEMORY) {
1580 emit_fildl_membase(cd, REG_SP, var->vv.regoff * 4);
1582 /* XXX not thread safe! */
1583 disp = dseg_add_unique_s4(cd, 0);
1584 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1586 emit_mov_reg_membase(cd, var->vv.regoff, REG_ITMP1, disp);
1587 emit_fildl_membase(cd, REG_ITMP1, disp);
1590 emit_store_dst(jd, iptr, d);
1593 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1594 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1596 var = VAROP(iptr->s1);
1597 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1598 if (var->flags & INMEMORY) {
1599 emit_fildll_membase(cd, REG_SP, var->vv.regoff * 4);
1602 log_text("L2F: longs have to be in memory");
1605 emit_store_dst(jd, iptr, d);
1608 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1610 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1611 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1613 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1616 /* Round to zero, 53-bit mode, exception masked */
1617 disp = dseg_add_s4(cd, 0x0e7f);
1618 emit_fldcw_membase(cd, REG_ITMP1, disp);
1620 var = VAROP(iptr->dst);
1621 var1 = VAROP(iptr->s1);
1623 if (var->flags & INMEMORY) {
1624 emit_fistpl_membase(cd, REG_SP, var->vv.regoff * 4);
1626 /* Round to nearest, 53-bit mode, exceptions masked */
1627 disp = dseg_add_s4(cd, 0x027f);
1628 emit_fldcw_membase(cd, REG_ITMP1, disp);
1630 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1631 REG_SP, var->vv.regoff * 4);
1634 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1636 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1639 /* XXX not thread safe! */
1640 disp = dseg_add_unique_s4(cd, 0);
1641 emit_fistpl_membase(cd, REG_ITMP1, disp);
1642 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1644 /* Round to nearest, 53-bit mode, exceptions masked */
1645 disp = dseg_add_s4(cd, 0x027f);
1646 emit_fldcw_membase(cd, REG_ITMP1, disp);
1648 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1651 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1652 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1655 emit_jcc(cd, CC_NE, disp);
1657 /* XXX: change this when we use registers */
1658 emit_flds_membase(cd, REG_SP, var1->vv.regoff * 4);
1659 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2i, REG_ITMP1);
1660 emit_call_reg(cd, REG_ITMP1);
1662 if (var->flags & INMEMORY) {
1663 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1666 M_INTMOVE(REG_RESULT, var->vv.regoff);
1670 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1672 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1673 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1675 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1678 /* Round to zero, 53-bit mode, exception masked */
1679 disp = dseg_add_s4(cd, 0x0e7f);
1680 emit_fldcw_membase(cd, REG_ITMP1, disp);
1682 var = VAROP(iptr->dst);
1683 var1 = VAROP(iptr->s1);
1685 if (var->flags & INMEMORY) {
1686 emit_fistpl_membase(cd, REG_SP, var->vv.regoff * 4);
1688 /* Round to nearest, 53-bit mode, exceptions masked */
1689 disp = dseg_add_s4(cd, 0x027f);
1690 emit_fldcw_membase(cd, REG_ITMP1, disp);
1692 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1693 REG_SP, var->vv.regoff * 4);
1696 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1698 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1701 /* XXX not thread safe! */
1702 disp = dseg_add_unique_s4(cd, 0);
1703 emit_fistpl_membase(cd, REG_ITMP1, disp);
1704 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1706 /* Round to nearest, 53-bit mode, exceptions masked */
1707 disp = dseg_add_s4(cd, 0x027f);
1708 emit_fldcw_membase(cd, REG_ITMP1, disp);
1710 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1713 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1714 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1717 emit_jcc(cd, CC_NE, disp);
1719 /* XXX: change this when we use registers */
1720 emit_fldl_membase(cd, REG_SP, var1->vv.regoff * 4);
1721 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2i, REG_ITMP1);
1722 emit_call_reg(cd, REG_ITMP1);
1724 if (var->flags & INMEMORY) {
1725 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1727 M_INTMOVE(REG_RESULT, var->vv.regoff);
1731 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1733 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1734 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1736 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1739 /* Round to zero, 53-bit mode, exception masked */
1740 disp = dseg_add_s4(cd, 0x0e7f);
1741 emit_fldcw_membase(cd, REG_ITMP1, disp);
1743 var = VAROP(iptr->dst);
1744 var1 = VAROP(iptr->s1);
1746 if (var->flags & INMEMORY) {
1747 emit_fistpll_membase(cd, REG_SP, var->vv.regoff * 4);
1749 /* Round to nearest, 53-bit mode, exceptions masked */
1750 disp = dseg_add_s4(cd, 0x027f);
1751 emit_fldcw_membase(cd, REG_ITMP1, disp);
1753 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1754 REG_SP, var->vv.regoff * 4 + 4);
1757 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1759 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1762 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1764 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4 + 4);
1766 emit_jcc(cd, CC_NE, disp);
1768 emit_alu_imm_membase(cd, ALU_CMP, 0,
1769 REG_SP, var->vv.regoff * 4);
1772 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1774 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1776 emit_jcc(cd, CC_NE, disp);
1778 /* XXX: change this when we use registers */
1779 emit_flds_membase(cd, REG_SP, var1->vv.regoff * 4);
1780 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2l, REG_ITMP1);
1781 emit_call_reg(cd, REG_ITMP1);
1782 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1783 emit_mov_reg_membase(cd, REG_RESULT2,
1784 REG_SP, var->vv.regoff * 4 + 4);
1787 log_text("F2L: longs have to be in memory");
1792 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1794 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1795 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1797 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1800 /* Round to zero, 53-bit mode, exception masked */
1801 disp = dseg_add_s4(cd, 0x0e7f);
1802 emit_fldcw_membase(cd, REG_ITMP1, disp);
1804 var = VAROP(iptr->dst);
1805 var1 = VAROP(iptr->s1);
1807 if (var->flags & INMEMORY) {
1808 emit_fistpll_membase(cd, REG_SP, var->vv.regoff * 4);
1810 /* Round to nearest, 53-bit mode, exceptions masked */
1811 disp = dseg_add_s4(cd, 0x027f);
1812 emit_fldcw_membase(cd, REG_ITMP1, disp);
1814 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1815 REG_SP, var->vv.regoff * 4 + 4);
1818 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1820 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1823 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1825 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4 + 4);
1827 emit_jcc(cd, CC_NE, disp);
1829 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, var->vv.regoff * 4);
1832 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1834 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1836 emit_jcc(cd, CC_NE, disp);
1838 /* XXX: change this when we use registers */
1839 emit_fldl_membase(cd, REG_SP, var1->vv.regoff * 4);
1840 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2l, REG_ITMP1);
1841 emit_call_reg(cd, REG_ITMP1);
1842 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1843 emit_mov_reg_membase(cd, REG_RESULT2,
1844 REG_SP, var->vv.regoff * 4 + 4);
1847 log_text("D2L: longs have to be in memory");
1852 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1854 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1855 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1857 emit_store_dst(jd, iptr, d);
1860 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1862 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1863 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1865 emit_store_dst(jd, iptr, d);
1868 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1871 /* exchanged to skip fxch */
1872 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1873 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1874 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1875 /* emit_fxch(cd); */
1878 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as GT */
1879 emit_jcc(cd, CC_E, 6);
1880 emit_alu_imm_reg(cd, ALU_AND, 0x000000ff, EAX);
1882 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1883 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1884 emit_jcc(cd, CC_B, 3 + 5);
1885 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1886 emit_jmp_imm(cd, 3);
1887 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1888 emit_store_dst(jd, iptr, d);
1891 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1894 /* exchanged to skip fxch */
1895 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1896 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1897 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1898 /* emit_fxch(cd); */
1901 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as LT */
1902 emit_jcc(cd, CC_E, 3);
1903 emit_movb_imm_reg(cd, 1, REG_AH);
1905 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1906 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1907 emit_jcc(cd, CC_B, 3 + 5);
1908 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1909 emit_jmp_imm(cd, 3);
1910 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1911 emit_store_dst(jd, iptr, d);
1915 /* memory operations **************************************************/
1917 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., length */
1919 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1920 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1921 /* implicit null-pointer check */
1922 M_ILD(d, s1, OFFSET(java_arrayheader, size));
1923 emit_store_dst(jd, iptr, d);
1926 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
1928 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1929 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1930 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1931 /* implicit null-pointer check */
1932 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1933 emit_movsbl_memindex_reg(cd, OFFSET(java_bytearray, data[0]),
1935 emit_store_dst(jd, iptr, d);
1938 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
1940 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1941 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1942 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1943 /* implicit null-pointer check */
1944 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1945 emit_movzwl_memindex_reg(cd, OFFSET(java_chararray, data[0]),
1947 emit_store_dst(jd, iptr, d);
1950 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
1952 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1953 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1954 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1955 /* implicit null-pointer check */
1956 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1957 emit_movswl_memindex_reg(cd, OFFSET(java_shortarray, data[0]),
1959 emit_store_dst(jd, iptr, d);
1962 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
1964 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1965 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1966 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1967 /* implicit null-pointer check */
1968 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1969 emit_mov_memindex_reg(cd, OFFSET(java_intarray, data[0]),
1971 emit_store_dst(jd, iptr, d);
1974 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
1976 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1977 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1978 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
1979 /* implicit null-pointer check */
1980 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1982 var = VAROP(iptr->dst);
1984 assert(var->flags & INMEMORY);
1985 emit_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]),
1986 s1, s2, 3, REG_ITMP3);
1987 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff * 4);
1988 emit_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]) + 4,
1989 s1, s2, 3, REG_ITMP3);
1990 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff * 4 + 4);
1993 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
1995 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1996 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1997 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1998 /* implicit null-pointer check */
1999 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2000 emit_flds_memindex(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2001 emit_store_dst(jd, iptr, d);
2004 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2006 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2007 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2008 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
2009 /* implicit null-pointer check */
2010 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2011 emit_fldl_memindex(cd, OFFSET(java_doublearray, data[0]), s1, s2,3);
2012 emit_store_dst(jd, iptr, d);
2015 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2017 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2018 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2019 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2020 /* implicit null-pointer check */
2021 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2022 emit_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]),
2024 emit_store_dst(jd, iptr, d);
2028 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2030 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2031 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2032 /* implicit null-pointer check */
2033 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2034 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2036 /* because EBP, ESI, EDI have no xH and xL nibbles */
2037 M_INTMOVE(s3, REG_ITMP3);
2040 emit_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]),
2044 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2046 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2047 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2048 /* implicit null-pointer check */
2049 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2050 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2051 emit_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]),
2055 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2057 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2058 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2059 /* implicit null-pointer check */
2060 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2061 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2062 emit_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]),
2066 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2068 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2069 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2070 /* implicit null-pointer check */
2071 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2072 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2073 emit_mov_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]),
2077 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2079 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2080 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2081 /* implicit null-pointer check */
2082 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2084 var = VAROP(iptr->sx.s23.s3);
2086 assert(var->flags & INMEMORY);
2087 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff * 4, REG_ITMP3);
2088 emit_mov_reg_memindex(cd, REG_ITMP3, OFFSET(java_longarray, data[0])
2090 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff * 4 + 4, REG_ITMP3);
2091 emit_mov_reg_memindex(cd, REG_ITMP3,
2092 OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2095 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2097 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2098 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2099 /* implicit null-pointer check */
2100 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2101 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2102 emit_fstps_memindex(cd, OFFSET(java_floatarray, data[0]), s1, s2,2);
2105 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2107 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2108 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2109 /* implicit null-pointer check */
2110 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2111 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2112 emit_fstpl_memindex(cd, OFFSET(java_doublearray, data[0]),
2116 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2118 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2119 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2120 /* implicit null-pointer check */
2121 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2122 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2124 M_AST(s1, REG_SP, 0 * 4);
2125 M_AST(s3, REG_SP, 1 * 4);
2126 M_MOV_IMM(BUILTIN_canstore, REG_ITMP1);
2128 emit_exception_check(cd, iptr);
2130 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2131 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2132 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2133 emit_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]),
2137 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2139 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2140 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2141 /* implicit null-pointer check */
2142 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2143 emit_movb_imm_memindex(cd, iptr->sx.s23.s3.constval,
2144 OFFSET(java_bytearray, data[0]), s1, s2, 0);
2147 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2149 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2150 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2151 /* implicit null-pointer check */
2152 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2153 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2154 OFFSET(java_chararray, data[0]), s1, s2, 1);
2157 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2159 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2160 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2161 /* implicit null-pointer check */
2162 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2163 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2164 OFFSET(java_shortarray, data[0]), s1, s2, 1);
2167 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2169 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2170 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2171 /* implicit null-pointer check */
2172 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2173 emit_mov_imm_memindex(cd, iptr->sx.s23.s3.constval,
2174 OFFSET(java_intarray, data[0]), s1, s2, 2);
2177 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2179 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2180 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2181 /* implicit null-pointer check */
2182 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2183 emit_mov_imm_memindex(cd,
2184 (u4) (iptr->sx.s23.s3.constval & 0x00000000ffffffff),
2185 OFFSET(java_longarray, data[0]), s1, s2, 3);
2186 emit_mov_imm_memindex(cd,
2187 ((s4)iptr->sx.s23.s3.constval) >> 31,
2188 OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2191 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2193 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2194 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2195 /* implicit null-pointer check */
2196 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2197 emit_mov_imm_memindex(cd, 0,
2198 OFFSET(java_objectarray, data[0]), s1, s2, 2);
2202 case ICMD_GETSTATIC: /* ... ==> ..., value */
2204 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2205 uf = iptr->sx.s23.s3.uf;
2206 fieldtype = uf->fieldref->parseddesc.fd->type;
2209 codegen_addpatchref(cd, PATCHER_get_putstatic, uf, 0);
2213 fi = iptr->sx.s23.s3.fmiref->p.field;
2214 fieldtype = fi->type;
2215 disp = (ptrint) &(fi->value);
2217 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2218 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2221 M_MOV_IMM(disp, REG_ITMP1);
2222 switch (fieldtype) {
2225 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2226 M_ILD(d, REG_ITMP1, 0);
2229 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2230 M_LLD(d, REG_ITMP1, 0);
2233 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2234 M_FLD(d, REG_ITMP1, 0);
2237 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2238 M_DLD(d, REG_ITMP1, 0);
2241 emit_store_dst(jd, iptr, d);
2244 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2246 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2247 uf = iptr->sx.s23.s3.uf;
2248 fieldtype = uf->fieldref->parseddesc.fd->type;
2251 codegen_addpatchref(cd, PATCHER_get_putstatic, uf, 0);
2254 fi = iptr->sx.s23.s3.fmiref->p.field;
2255 fieldtype = fi->type;
2256 disp = (ptrint) &(fi->value);
2258 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2259 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2262 M_MOV_IMM(disp, REG_ITMP1);
2263 switch (fieldtype) {
2266 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
2267 M_IST(s1, REG_ITMP1, 0);
2270 s1 = emit_load_s1(jd, iptr, REG_ITMP23_PACKED);
2271 M_LST(s1, REG_ITMP1, 0);
2274 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2275 emit_fstps_membase(cd, REG_ITMP1, 0);
2278 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2279 emit_fstpl_membase(cd, REG_ITMP1, 0);
2284 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2285 /* val = value (in current instruction) */
2286 /* following NOP) */
2288 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2289 uf = iptr->sx.s23.s3.uf;
2290 fieldtype = uf->fieldref->parseddesc.fd->type;
2293 codegen_addpatchref(cd, PATCHER_get_putstatic, uf, 0);
2296 fi = iptr->sx.s23.s3.fmiref->p.field;
2297 fieldtype = fi->type;
2298 disp = (ptrint) &(fi->value);
2300 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2301 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2304 M_MOV_IMM(disp, REG_ITMP1);
2305 switch (fieldtype) {
2308 M_IST_IMM(iptr->sx.s23.s2.constval, REG_ITMP1, 0);
2311 M_IST_IMM(iptr->sx.s23.s2.constval & 0xffffffff, REG_ITMP1, 0);
2312 M_IST_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, REG_ITMP1, 4);
2319 case ICMD_GETFIELD: /* .., objectref. ==> ..., value */
2321 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2322 emit_nullpointer_check(cd, iptr, s1);
2324 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2325 unresolved_field *uf = iptr->sx.s23.s3.uf;
2327 fieldtype = uf->fieldref->parseddesc.fd->type;
2329 codegen_addpatchref(cd, PATCHER_getfield,
2330 iptr->sx.s23.s3.uf, 0);
2336 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2338 fieldtype = fi->type;
2342 switch (fieldtype) {
2345 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2346 M_ILD32(d, s1, disp);
2349 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2350 M_LLD32(d, s1, disp);
2353 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2354 M_FLD32(d, s1, disp);
2357 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2358 M_DLD32(d, s1, disp);
2361 emit_store_dst(jd, iptr, d);
2364 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2366 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2367 emit_nullpointer_check(cd, iptr, s1);
2369 /* must be done here because of code patching */
2371 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2372 unresolved_field *uf = iptr->sx.s23.s3.uf;
2374 fieldtype = uf->fieldref->parseddesc.fd->type;
2377 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2379 fieldtype = fi->type;
2382 if (!IS_FLT_DBL_TYPE(fieldtype)) {
2383 if (IS_2_WORD_TYPE(fieldtype))
2384 s2 = emit_load_s2(jd, iptr, REG_ITMP23_PACKED);
2386 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2389 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
2391 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2392 unresolved_field *uf = iptr->sx.s23.s3.uf;
2394 codegen_addpatchref(cd, PATCHER_putfield, uf, 0);
2400 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2405 switch (fieldtype) {
2408 M_IST32(s2, s1, disp);
2411 M_LST32(s2, s1, disp);
2414 emit_fstps_membase32(cd, s1, disp);
2417 emit_fstpl_membase32(cd, s1, disp);
2422 case ICMD_PUTFIELDCONST: /* ..., objectref ==> ... */
2423 /* val = value (in current instruction) */
2424 /* following NOP) */
2426 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2427 emit_nullpointer_check(cd, iptr, s1);
2429 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2430 unresolved_field *uf = iptr->sx.s23.s3.uf;
2432 fieldtype = uf->fieldref->parseddesc.fd->type;
2434 codegen_addpatchref(cd, PATCHER_putfieldconst,
2442 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2444 fieldtype = fi->type;
2449 switch (fieldtype) {
2452 M_IST32_IMM(iptr->sx.s23.s2.constval, s1, disp);
2455 M_IST32_IMM(iptr->sx.s23.s2.constval & 0xffffffff, s1, disp);
2456 M_IST32_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, s1, disp + 4);
2464 /* branch operations **************************************************/
2466 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2468 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2469 M_INTMOVE(s1, REG_ITMP1_XPTR);
2471 #ifdef ENABLE_VERIFIER
2472 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2473 codegen_addpatchref(cd, PATCHER_athrow_areturn,
2474 iptr->sx.s23.s2.uc, 0);
2476 #endif /* ENABLE_VERIFIER */
2478 M_CALL_IMM(0); /* passing exception pc */
2479 M_POP(REG_ITMP2_XPC);
2481 M_MOV_IMM(asm_handle_exception, REG_ITMP3);
2485 case ICMD_GOTO: /* ... ==> ... */
2486 case ICMD_RET: /* ... ==> ... */
2488 #if defined(ENABLE_SSA)
2490 last_cmd_was_goto = true;
2491 /* In case of a Goto phimoves have to be inserted before the */
2493 codegen_insert_phi_moves(jd, bptr);
2496 emit_br(cd, iptr->dst.block);
2500 case ICMD_JSR: /* ... ==> ... */
2502 emit_br(cd, iptr->sx.s23.s3.jsrtarget.block);
2506 case ICMD_IFNULL: /* ..., value ==> ... */
2507 case ICMD_IFNONNULL:
2509 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2511 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFNULL, BRANCH_OPT_NONE);
2514 case ICMD_IFEQ: /* ..., value ==> ... */
2521 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2522 M_CMP_IMM(iptr->sx.val.i, s1);
2523 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFEQ, BRANCH_OPT_NONE);
2526 case ICMD_IF_LEQ: /* ..., value ==> ... */
2528 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2529 if (iptr->sx.val.l == 0) {
2530 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2531 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2534 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2535 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2536 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2537 M_OR(REG_ITMP2, REG_ITMP1);
2539 emit_beq(cd, iptr->dst.block);
2542 case ICMD_IF_LLT: /* ..., value ==> ... */
2544 if (iptr->sx.val.l == 0) {
2545 /* If high 32-bit are less than zero, then the 64-bits
2547 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2549 emit_blt(cd, iptr->dst.block);
2552 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2553 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2554 emit_blt(cd, iptr->dst.block);
2556 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2557 emit_bult(cd, iptr->dst.block);
2561 case ICMD_IF_LLE: /* ..., value ==> ... */
2563 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2564 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2565 emit_blt(cd, iptr->dst.block);
2567 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2568 emit_bule(cd, iptr->dst.block);
2571 case ICMD_IF_LNE: /* ..., value ==> ... */
2573 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2574 if (iptr->sx.val.l == 0) {
2575 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2576 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2579 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2580 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2581 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2582 M_OR(REG_ITMP2, REG_ITMP1);
2584 emit_bne(cd, iptr->dst.block);
2587 case ICMD_IF_LGT: /* ..., value ==> ... */
2589 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2590 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2591 emit_bgt(cd, iptr->dst.block);
2593 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2594 emit_bugt(cd, iptr->dst.block);
2597 case ICMD_IF_LGE: /* ..., value ==> ... */
2599 if (iptr->sx.val.l == 0) {
2600 /* If high 32-bit are greater equal zero, then the
2602 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2604 emit_bge(cd, iptr->dst.block);
2607 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2608 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2609 emit_bgt(cd, iptr->dst.block);
2611 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2612 emit_buge(cd, iptr->dst.block);
2616 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2617 case ICMD_IF_ICMPNE:
2618 case ICMD_IF_ICMPLT:
2619 case ICMD_IF_ICMPGT:
2620 case ICMD_IF_ICMPGE:
2621 case ICMD_IF_ICMPLE:
2623 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2624 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2626 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ICMPEQ, BRANCH_OPT_NONE);
2629 case ICMD_IF_ACMPEQ: /* ..., value, value ==> ... */
2630 case ICMD_IF_ACMPNE:
2632 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2633 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2635 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ACMPEQ, BRANCH_OPT_NONE);
2638 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2640 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2641 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2642 M_INTMOVE(s1, REG_ITMP1);
2643 M_XOR(s2, REG_ITMP1);
2644 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2645 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2646 M_INTMOVE(s1, REG_ITMP2);
2647 M_XOR(s2, REG_ITMP2);
2648 M_OR(REG_ITMP1, REG_ITMP2);
2649 emit_beq(cd, iptr->dst.block);
2652 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2654 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2655 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2656 M_INTMOVE(s1, REG_ITMP1);
2657 M_XOR(s2, REG_ITMP1);
2658 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2659 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2660 M_INTMOVE(s1, REG_ITMP2);
2661 M_XOR(s2, REG_ITMP2);
2662 M_OR(REG_ITMP1, REG_ITMP2);
2663 emit_bne(cd, iptr->dst.block);
2666 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2668 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2669 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2671 emit_blt(cd, iptr->dst.block);
2672 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2673 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2676 emit_bult(cd, iptr->dst.block);
2679 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2681 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2682 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2684 emit_bgt(cd, iptr->dst.block);
2685 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2686 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2689 emit_bugt(cd, iptr->dst.block);
2692 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2694 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2695 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2697 emit_blt(cd, iptr->dst.block);
2698 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2699 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2702 emit_bule(cd, iptr->dst.block);
2705 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2707 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2708 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2710 emit_bgt(cd, iptr->dst.block);
2711 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2712 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2715 emit_buge(cd, iptr->dst.block);
2719 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2721 REPLACEMENT_POINT_RETURN(cd, iptr);
2722 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2723 M_INTMOVE(s1, REG_RESULT);
2724 goto nowperformreturn;
2726 case ICMD_LRETURN: /* ..., retvalue ==> ... */
2728 REPLACEMENT_POINT_RETURN(cd, iptr);
2729 s1 = emit_load_s1(jd, iptr, REG_RESULT_PACKED);
2730 M_LNGMOVE(s1, REG_RESULT_PACKED);
2731 goto nowperformreturn;
2733 case ICMD_ARETURN: /* ..., retvalue ==> ... */
2735 REPLACEMENT_POINT_RETURN(cd, iptr);
2736 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2737 M_INTMOVE(s1, REG_RESULT);
2739 #ifdef ENABLE_VERIFIER
2740 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2741 codegen_addpatchref(cd, PATCHER_athrow_areturn,
2742 iptr->sx.s23.s2.uc, 0);
2744 #endif /* ENABLE_VERIFIER */
2745 goto nowperformreturn;
2747 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2750 REPLACEMENT_POINT_RETURN(cd, iptr);
2751 s1 = emit_load_s1(jd, iptr, REG_FRESULT);
2752 goto nowperformreturn;
2754 case ICMD_RETURN: /* ... ==> ... */
2756 REPLACEMENT_POINT_RETURN(cd, iptr);
2762 p = cd->stackframesize;
2764 #if !defined(NDEBUG)
2765 emit_verbosecall_exit(jd);
2768 #if defined(ENABLE_THREADS)
2769 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2770 M_ALD(REG_ITMP2, REG_SP, rd->memuse * 4);
2772 /* we need to save the proper return value */
2773 switch (iptr->opc) {
2776 M_IST(REG_RESULT, REG_SP, rd->memuse * 4);
2780 M_LST(REG_RESULT_PACKED, REG_SP, rd->memuse * 4);
2784 emit_fstps_membase(cd, REG_SP, rd->memuse * 4);
2788 emit_fstpl_membase(cd, REG_SP, rd->memuse * 4);
2792 M_AST(REG_ITMP2, REG_SP, 0);
2793 M_MOV_IMM(LOCK_monitor_exit, REG_ITMP3);
2796 /* and now restore the proper return value */
2797 switch (iptr->opc) {
2800 M_ILD(REG_RESULT, REG_SP, rd->memuse * 4);
2804 M_LLD(REG_RESULT_PACKED, REG_SP, rd->memuse * 4);
2808 emit_flds_membase(cd, REG_SP, rd->memuse * 4);
2812 emit_fldl_membase(cd, REG_SP, rd->memuse * 4);
2818 /* restore saved registers */
2820 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
2821 p--; M_ALD(rd->savintregs[i], REG_SP, p * 4);
2824 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
2826 emit_fldl_membase(cd, REG_SP, p * 4);
2827 if (iptr->opc == ICMD_FRETURN || iptr->opc == ICMD_DRETURN) {
2829 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset + 1); */
2832 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset); */
2836 /* deallocate stack */
2838 if (cd->stackframesize)
2839 M_AADD_IMM(cd->stackframesize * 4, REG_SP);
2846 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2849 branch_target_t *table;
2851 table = iptr->dst.table;
2853 l = iptr->sx.s23.s2.tablelow;
2854 i = iptr->sx.s23.s3.tablehigh;
2856 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2857 M_INTMOVE(s1, REG_ITMP1);
2860 M_ISUB_IMM(l, REG_ITMP1);
2866 M_CMP_IMM(i - 1, REG_ITMP1);
2867 emit_bugt(cd, table[0].block);
2869 /* build jump table top down and use address of lowest entry */
2874 dseg_add_target(cd, table->block);
2878 /* length of dataseg after last dseg_addtarget is used
2881 M_MOV_IMM(0, REG_ITMP2);
2883 emit_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 2, REG_ITMP1);
2889 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2892 lookup_target_t *lookup;
2894 lookup = iptr->dst.lookup;
2896 i = iptr->sx.s23.s2.lookupcount;
2898 MCODECHECK((i<<2)+8);
2899 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2902 M_CMP_IMM(lookup->value, s1);
2903 emit_beq(cd, lookup->target.block);
2907 emit_br(cd, iptr->sx.s23.s3.lookupdefault.block);
2912 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
2914 bte = iptr->sx.s23.s3.bte;
2918 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2920 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2921 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
2922 case ICMD_INVOKEINTERFACE:
2924 REPLACEMENT_POINT_INVOKE(cd, iptr);
2926 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2927 md = iptr->sx.s23.s3.um->methodref->parseddesc.md;
2931 lm = iptr->sx.s23.s3.fmiref->p.method;
2932 md = lm->parseddesc;
2936 s3 = md->paramcount;
2938 MCODECHECK((s3 << 1) + 64);
2940 /* copy arguments to registers or stack location */
2942 for (s3 = s3 - 1; s3 >= 0; s3--) {
2943 var = VAR(iptr->sx.s23.s2.args[s3]);
2945 /* Already Preallocated (ARGVAR) ? */
2946 if (var->flags & PREALLOC)
2948 if (IS_INT_LNG_TYPE(var->type)) {
2949 if (!md->params[s3].inmemory) {
2950 log_text("No integer argument registers available!");
2954 if (IS_2_WORD_TYPE(var->type)) {
2955 d = emit_load(jd, iptr, var, REG_ITMP12_PACKED);
2956 M_LST(d, REG_SP, md->params[s3].regoff * 4);
2958 d = emit_load(jd, iptr, var, REG_ITMP1);
2959 M_IST(d, REG_SP, md->params[s3].regoff * 4);
2964 if (!md->params[s3].inmemory) {
2965 s1 = rd->argfltregs[md->params[s3].regoff];
2966 d = emit_load(jd, iptr, var, s1);
2970 d = emit_load(jd, iptr, var, REG_FTMP1);
2971 if (IS_2_WORD_TYPE(var->type))
2972 M_DST(d, REG_SP, md->params[s3].regoff * 4);
2974 M_FST(d, REG_SP, md->params[s3].regoff * 4);
2979 switch (iptr->opc) {
2981 disp = (ptrint) bte->fp;
2982 d = md->returntype.type;
2984 M_MOV_IMM(disp, REG_ITMP1);
2987 emit_exception_check(cd, iptr);
2990 case ICMD_INVOKESPECIAL:
2991 M_ALD(REG_ITMP1, REG_SP, 0 * 4);
2992 emit_nullpointer_check(cd, iptr, REG_ITMP1);
2995 case ICMD_INVOKESTATIC:
2997 unresolved_method *um = iptr->sx.s23.s3.um;
2999 codegen_addpatchref(cd, PATCHER_invokestatic_special,
3003 d = md->returntype.type;
3006 disp = (ptrint) lm->stubroutine;
3007 d = lm->parseddesc->returntype.type;
3010 M_MOV_IMM(disp, REG_ITMP2);
3014 case ICMD_INVOKEVIRTUAL:
3015 M_ALD(REG_ITMP1, REG_SP, 0 * 4);
3016 emit_nullpointer_check(cd, iptr, s1);
3019 unresolved_method *um = iptr->sx.s23.s3.um;
3021 codegen_addpatchref(cd, PATCHER_invokevirtual, um, 0);
3024 d = md->returntype.type;
3027 s1 = OFFSET(vftbl_t, table[0]) +
3028 sizeof(methodptr) * lm->vftblindex;
3029 d = md->returntype.type;
3032 M_ALD(REG_METHODPTR, REG_ITMP1,
3033 OFFSET(java_objectheader, vftbl));
3034 M_ALD32(REG_ITMP3, REG_METHODPTR, s1);
3038 case ICMD_INVOKEINTERFACE:
3039 M_ALD(REG_ITMP1, REG_SP, 0 * 4);
3040 emit_nullpointer_check(cd, iptr, s1);
3043 unresolved_method *um = iptr->sx.s23.s3.um;
3045 codegen_addpatchref(cd, PATCHER_invokeinterface, um, 0);
3049 d = md->returntype.type;
3052 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3053 sizeof(methodptr) * lm->class->index;
3055 s2 = sizeof(methodptr) * (lm - lm->class->methods);
3057 d = md->returntype.type;
3060 M_ALD(REG_METHODPTR, REG_ITMP1,
3061 OFFSET(java_objectheader, vftbl));
3062 M_ALD32(REG_METHODPTR, REG_METHODPTR, s1);
3063 M_ALD32(REG_ITMP3, REG_METHODPTR, s2);
3068 /* store size of call code in replacement point */
3070 REPLACEMENT_POINT_INVOKE_RETURN(cd, iptr);
3072 /* d contains return type */
3074 if (d != TYPE_VOID) {
3075 #if defined(ENABLE_SSA)
3076 if ((ls == NULL) || (!IS_TEMPVAR_INDEX(iptr->dst.varindex)) ||
3077 (ls->lifetime[-iptr->dst.varindex-1].type != -1))
3078 /* a "living" stackslot */
3081 if (IS_INT_LNG_TYPE(d)) {
3082 if (IS_2_WORD_TYPE(d)) {
3083 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
3084 M_LNGMOVE(REG_RESULT_PACKED, s1);
3087 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3088 M_INTMOVE(REG_RESULT, s1);
3092 s1 = codegen_reg_of_dst(jd, iptr, REG_NULL);
3094 emit_store_dst(jd, iptr, s1);
3100 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3101 /* val.a: (classinfo*) superclass */
3103 /* superclass is an interface:
3105 * OK if ((sub == NULL) ||
3106 * (sub->vftbl->interfacetablelength > super->index) &&
3107 * (sub->vftbl->interfacetable[-super->index] != NULL));
3109 * superclass is a class:
3111 * OK if ((sub == NULL) || (0
3112 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3113 * super->vftbl->diffval));
3116 if (!(iptr->flags.bits & INS_FLAG_ARRAY)) {
3117 /* object type cast-check */
3120 vftbl_t *supervftbl;
3123 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3129 super = iptr->sx.s23.s3.c.cls;
3130 superindex = super->index;
3131 supervftbl = super->vftbl;
3134 #if defined(ENABLE_THREADS)
3135 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3137 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3139 /* if class is not resolved, check which code to call */
3141 if (super == NULL) {
3143 emit_label_beq(cd, BRANCH_LABEL_1);
3145 codegen_addpatchref(cd, PATCHER_checkcast_instanceof_flags,
3146 iptr->sx.s23.s3.c.ref, 0);
3148 M_MOV_IMM(0, REG_ITMP2); /* super->flags */
3149 M_AND_IMM32(ACC_INTERFACE, REG_ITMP2);
3150 emit_label_beq(cd, BRANCH_LABEL_2);
3153 /* interface checkcast code */
3155 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3156 if (super != NULL) {
3158 emit_label_beq(cd, BRANCH_LABEL_3);
3161 M_ALD(REG_ITMP2, s1, OFFSET(java_objectheader, vftbl));
3163 if (super == NULL) {
3164 codegen_addpatchref(cd, PATCHER_checkcast_interface,
3165 iptr->sx.s23.s3.c.ref,
3170 REG_ITMP2, OFFSET(vftbl_t, interfacetablelength));
3171 M_ISUB_IMM32(superindex, REG_ITMP3);
3172 /* XXX do we need this one? */
3174 emit_classcast_check(cd, iptr, BRANCH_LE, REG_ITMP3, s1);
3176 M_ALD32(REG_ITMP3, REG_ITMP2,
3177 OFFSET(vftbl_t, interfacetable[0]) -
3178 superindex * sizeof(methodptr*));
3180 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_ITMP3, s1);
3183 emit_label_br(cd, BRANCH_LABEL_4);
3185 emit_label(cd, BRANCH_LABEL_3);
3188 /* class checkcast code */
3190 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3191 if (super == NULL) {
3192 emit_label(cd, BRANCH_LABEL_2);
3196 emit_label_beq(cd, BRANCH_LABEL_5);
3199 M_ALD(REG_ITMP2, s1, OFFSET(java_objectheader, vftbl));
3201 if (super == NULL) {
3202 codegen_addpatchref(cd, PATCHER_checkcast_class,
3203 iptr->sx.s23.s3.c.ref,
3207 M_MOV_IMM(supervftbl, REG_ITMP3);
3208 #if defined(ENABLE_THREADS)
3209 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3211 M_ILD32(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3213 /* if (s1 != REG_ITMP1) { */
3214 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP1); */
3215 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3); */
3216 /* #if defined(ENABLE_THREADS) */
3217 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3219 /* emit_alu_reg_reg(cd, ALU_SUB, REG_ITMP1, REG_ITMP2); */
3222 M_ILD32(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, baseval));
3223 M_ISUB(REG_ITMP3, REG_ITMP2);
3224 M_MOV_IMM(supervftbl, REG_ITMP3);
3225 M_ILD(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3226 #if defined(ENABLE_THREADS)
3227 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3231 M_CMP(REG_ITMP3, REG_ITMP2);
3232 emit_classcast_check(cd, iptr, BRANCH_ULE, REG_ITMP3, s1);
3235 emit_label(cd, BRANCH_LABEL_5);
3238 if (super == NULL) {
3239 emit_label(cd, BRANCH_LABEL_1);
3240 emit_label(cd, BRANCH_LABEL_4);
3243 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
3246 /* array type cast-check */
3248 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3249 M_AST(s1, REG_SP, 0 * 4);
3251 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3252 codegen_addpatchref(cd, PATCHER_builtin_arraycheckcast,
3253 iptr->sx.s23.s3.c.ref, 0);
3256 M_AST_IMM(iptr->sx.s23.s3.c.cls, REG_SP, 1 * 4);
3257 M_MOV_IMM(BUILTIN_arraycheckcast, REG_ITMP3);
3260 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3262 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_RESULT, s1);
3264 d = codegen_reg_of_dst(jd, iptr, s1);
3268 emit_store_dst(jd, iptr, d);
3271 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3272 /* val.a: (classinfo*) superclass */
3274 /* superclass is an interface:
3276 * return (sub != NULL) &&
3277 * (sub->vftbl->interfacetablelength > super->index) &&
3278 * (sub->vftbl->interfacetable[-super->index] != NULL);
3280 * superclass is a class:
3282 * return ((sub != NULL) && (0
3283 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3284 * super->vftbl->diffvall));
3289 vftbl_t *supervftbl;
3292 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3298 super = iptr->sx.s23.s3.c.cls;
3299 superindex = super->index;
3300 supervftbl = super->vftbl;
3303 #if defined(ENABLE_THREADS)
3304 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3307 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3308 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
3311 M_INTMOVE(s1, REG_ITMP1);
3317 /* if class is not resolved, check which code to call */
3319 if (super == NULL) {
3321 emit_label_beq(cd, BRANCH_LABEL_1);
3323 codegen_addpatchref(cd, PATCHER_checkcast_instanceof_flags,
3324 iptr->sx.s23.s3.c.ref, 0);
3326 M_MOV_IMM(0, REG_ITMP3); /* super->flags */
3327 M_AND_IMM32(ACC_INTERFACE, REG_ITMP3);
3328 emit_label_beq(cd, BRANCH_LABEL_2);
3331 /* interface instanceof code */
3333 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3334 if (super != NULL) {
3336 emit_label_beq(cd, BRANCH_LABEL_3);
3339 M_ALD(REG_ITMP1, s1, OFFSET(java_objectheader, vftbl));
3341 if (super == NULL) {
3342 codegen_addpatchref(cd, PATCHER_instanceof_interface,
3343 iptr->sx.s23.s3.c.ref, 0);
3347 REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3348 M_ISUB_IMM32(superindex, REG_ITMP3);
3351 disp = (2 + 4 /* mov_membase32_reg */ + 2 /* test */ +
3352 6 /* jcc */ + 5 /* mov_imm_reg */);
3355 M_ALD32(REG_ITMP1, REG_ITMP1,
3356 OFFSET(vftbl_t, interfacetable[0]) -
3357 superindex * sizeof(methodptr*));
3359 /* emit_setcc_reg(cd, CC_A, d); */
3360 /* emit_jcc(cd, CC_BE, 5); */
3365 emit_label_br(cd, BRANCH_LABEL_4);
3367 emit_label(cd, BRANCH_LABEL_3);
3370 /* class instanceof code */
3372 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3373 if (super == NULL) {
3374 emit_label(cd, BRANCH_LABEL_2);
3378 emit_label_beq(cd, BRANCH_LABEL_5);
3381 M_ALD(REG_ITMP1, s1, OFFSET(java_objectheader, vftbl));
3383 if (super == NULL) {
3384 codegen_addpatchref(cd, PATCHER_instanceof_class,
3385 iptr->sx.s23.s3.c.ref, 0);
3388 M_MOV_IMM(supervftbl, REG_ITMP2);
3389 #if defined(ENABLE_THREADS)
3390 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3392 M_ILD(REG_ITMP1, REG_ITMP1, OFFSET(vftbl_t, baseval));
3393 M_ILD(REG_ITMP3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3394 M_ILD(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3395 #if defined(ENABLE_THREADS)
3396 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3398 M_ISUB(REG_ITMP2, REG_ITMP1);
3399 M_CLR(d); /* may be REG_ITMP2 */
3400 M_CMP(REG_ITMP3, REG_ITMP1);
3405 emit_label(cd, BRANCH_LABEL_5);
3408 if (super == NULL) {
3409 emit_label(cd, BRANCH_LABEL_1);
3410 emit_label(cd, BRANCH_LABEL_4);
3413 emit_store_dst(jd, iptr, d);
3417 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3419 /* check for negative sizes and copy sizes to stack if necessary */
3421 MCODECHECK((iptr->s1.argcount << 1) + 64);
3423 for (s1 = iptr->s1.argcount; --s1 >= 0; ) {
3424 /* copy SAVEDVAR sizes to stack */
3425 var = VAR(iptr->sx.s23.s2.args[s1]);
3427 /* Already Preallocated? */
3428 if (!(var->flags & PREALLOC)) {
3429 if (var->flags & INMEMORY) {
3430 M_ILD(REG_ITMP1, REG_SP, var->vv.regoff * 4);
3431 M_IST(REG_ITMP1, REG_SP, (s1 + 3) * 4);
3434 M_IST(var->vv.regoff, REG_SP, (s1 + 3) * 4);
3438 /* is a patcher function set? */
3440 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3441 codegen_addpatchref(cd, PATCHER_builtin_multianewarray,
3442 iptr->sx.s23.s3.c.ref, 0);
3448 disp = (ptrint) iptr->sx.s23.s3.c.cls;
3450 /* a0 = dimension count */
3452 M_IST_IMM(iptr->s1.argcount, REG_SP, 0 * 4);
3454 /* a1 = arraydescriptor */
3456 M_IST_IMM(disp, REG_SP, 1 * 4);
3458 /* a2 = pointer to dimensions = stack pointer */
3460 M_MOV(REG_SP, REG_ITMP1);
3461 M_AADD_IMM(3 * 4, REG_ITMP1);
3462 M_AST(REG_ITMP1, REG_SP, 2 * 4);
3464 M_MOV_IMM(BUILTIN_multianewarray, REG_ITMP1);
3467 /* check for exception before result assignment */
3469 emit_exception_check(cd, iptr);
3471 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3472 M_INTMOVE(REG_RESULT, s1);
3473 emit_store_dst(jd, iptr, s1);
3477 exceptions_throw_internalerror("Unknown ICMD %d during code generation",
3482 } /* for instruction */
3486 #if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
3489 #if defined(ENABLE_SSA)
3491 /* by edge splitting, in Blocks with phi moves there can only */
3492 /* be a goto as last command, no other Jump/Branch Command */
3493 if (!last_cmd_was_goto)
3494 codegen_insert_phi_moves(jd, bptr);
3499 /* At the end of a basic block we may have to append some nops,
3500 because the patcher stub calling code might be longer than the
3501 actual instruction. So codepatching does not change the
3502 following block unintentionally. */
3504 if (cd->mcodeptr < cd->lastmcodeptr) {
3505 while (cd->mcodeptr < cd->lastmcodeptr) {
3510 } /* if (bptr -> flags >= BBREACHED) */
3511 } /* for basic block */
3513 dseg_createlinenumbertable(cd);
3515 /* generate stubs */
3517 emit_patcher_stubs(jd);
3518 REPLACEMENT_EMIT_STUBS(jd);
3520 /* everything's ok */
3525 #if defined(ENABLE_SSA)
3526 void codegen_insert_phi_moves(jitdata *jd, basicblock *bptr) {
3527 /* look for phi moves */
3528 int t_a,s_a,i, type;
3529 int t_lt, s_lt; /* lifetime indices of phi_moves */
3530 s4 t_regoff, s_regoff, s_flags, t_flags;
3539 /* Moves from phi functions with highest indices have to be */
3540 /* inserted first, since this is the order as is used for */
3541 /* conflict resolution */
3542 for(i = ls->num_phi_moves[bptr->nr] - 1; i >= 0 ; i--) {
3543 t_a = ls->phi_moves[bptr->nr][i][0];
3544 s_a = ls->phi_moves[bptr->nr][i][1];
3545 #if defined(SSA_DEBUG_VERBOSE)
3547 printf("BB %3i Move %3i <- %3i ", bptr->nr, t_a, s_a);
3550 /* local var lifetimes */
3551 t_lt = ls->maxlifetimes + t_a;
3552 type = ls->lifetime[t_lt].type;
3556 type = ls->lifetime[t_lt].local_ss->s->type;
3557 /* stackslot lifetime */
3561 #if defined(SSA_DEBUG_VERBOSE)
3563 printf("...returning - phi lifetimes where joined\n");
3569 /* local var lifetimes */
3570 s_lt = ls->maxlifetimes + s_a;
3571 type = ls->lifetime[s_lt].type;
3575 type = ls->lifetime[s_lt].type;
3576 /* stackslot lifetime */
3580 #if defined(SSA_DEBUG_VERBOSE)
3582 printf("...returning - phi lifetimes where joined\n");
3588 t_flags = VAR(t_a)->flags;
3589 t_regoff = VAR(t_a)->vv.regoff;
3593 t_flags = ls->lifetime[t_lt].local_ss->s->flags;
3594 t_regoff = ls->lifetime[t_lt].local_ss->s->regoff;
3598 /* local var move */
3599 s_flags = VAR(s_a)->flags;
3600 s_regoff = VAR(s_a)->vv.regoff;
3602 /* stackslot lifetime */
3603 s_flags = ls->lifetime[s_lt].local_ss->s->flags;
3604 s_regoff = ls->lifetime[s_lt].local_ss->s->regoff;
3608 #if defined(SSA_DEBUG_VERBOSE)
3610 printf("...returning - phi lifetimes where joined\n");
3615 cg_move(cd, type, s_regoff, s_flags, t_regoff, t_flags);
3617 #if defined(SSA_DEBUG_VERBOSE)
3618 if (compileverbose) {
3619 if (IS_INMEMORY(t_flags) && IS_INMEMORY(s_flags)) {
3621 printf("M%3i <- M%3i",t_regoff,s_regoff);
3623 else if (IS_INMEMORY(s_flags)) {
3625 printf("R%3i <- M%3i",t_regoff,s_regoff);
3627 else if (IS_INMEMORY(t_flags)) {
3629 printf("M%3i <- R%3i",t_regoff,s_regoff);
3633 printf("R%3i <- R%3i",t_regoff,s_regoff);
3637 #endif /* defined(SSA_DEBUG_VERBOSE) */
3641 void cg_move(codegendata *cd, s4 type, s4 src_regoff, s4 src_flags,
3642 s4 dst_regoff, s4 dst_flags) {
3643 if ((IS_INMEMORY(dst_flags)) && (IS_INMEMORY(src_flags))) {
3645 if (dst_regoff != src_regoff) {
3646 if (!IS_2_WORD_TYPE(type)) {
3647 if (IS_FLT_DBL_TYPE(type)) {
3648 emit_flds_membase(cd, REG_SP, src_regoff * 4);
3649 emit_fstps_membase(cd, REG_SP, dst_regoff * 4);
3651 emit_mov_membase_reg(cd, REG_SP, src_regoff * 4,
3653 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, dst_regoff * 4);
3655 } else { /* LONG OR DOUBLE */
3656 if (IS_FLT_DBL_TYPE(type)) {
3657 emit_fldl_membase( cd, REG_SP, src_regoff * 4);
3658 emit_fstpl_membase(cd, REG_SP, dst_regoff * 4);
3660 emit_mov_membase_reg(cd, REG_SP, src_regoff * 4,
3662 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, dst_regoff * 4);
3663 emit_mov_membase_reg(cd, REG_SP, src_regoff * 4 + 4,
3665 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP,
3666 dst_regoff * 4 + 4);
3671 if (IS_FLT_DBL_TYPE(type)) {
3672 log_text("cg_move: flt/dbl type have to be in memory\n");
3675 if (IS_2_WORD_TYPE(type)) {
3676 log_text("cg_move: longs have to be in memory\n");
3679 if (IS_INMEMORY(src_flags)) {
3681 emit_mov_membase_reg(cd, REG_SP, src_regoff * 4, dst_regoff);
3682 } else if (IS_INMEMORY(dst_flags)) {
3684 emit_mov_reg_membase(cd, src_regoff, REG_SP, dst_regoff * 4);
3687 /* only ints can be in regs on i386 */
3688 M_INTMOVE(src_regoff,dst_regoff);
3692 #endif /* defined(ENABLE_SSA) */
3695 /* codegen_emit_stub_compiler **************************************************
3697 Emit a stub routine which calls the compiler.
3699 *******************************************************************************/
3701 void codegen_emit_stub_compiler(jitdata *jd)
3706 /* get required compiler data */
3711 /* code for the stub */
3713 M_MOV_IMM(m, REG_ITMP1);
3714 M_MOV_IMM(asm_call_jit_compiler, REG_ITMP3);
3719 /* codegen_emit_stub_native ****************************************************
3721 Emits a stub routine which calls a native method.
3723 *******************************************************************************/
3725 void codegen_emit_stub_native(jitdata *jd, methoddesc *nmd, functionptr f)
3733 s4 i, j; /* count variables */
3737 /* get required compiler data */
3744 /* set some variables */
3747 nativeparams = (m->flags & ACC_STATIC) ? 2 : 1;
3749 /* calculate stackframe size */
3751 cd->stackframesize =
3752 sizeof(stackframeinfo) / SIZEOF_VOID_P +
3753 sizeof(localref_table) / SIZEOF_VOID_P +
3754 1 + /* function pointer */
3755 4 * 4 + /* 4 arguments (start_native_call) */
3758 /* keep stack 16-byte aligned */
3760 cd->stackframesize |= 0x3;
3762 /* create method header */
3764 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
3765 (void) dseg_add_unique_s4(cd, cd->stackframesize * 4); /* FrameSize */
3766 (void) dseg_add_unique_s4(cd, 0); /* IsSync */
3767 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
3768 (void) dseg_add_unique_s4(cd, 0); /* IntSave */
3769 (void) dseg_add_unique_s4(cd, 0); /* FltSave */
3770 (void) dseg_addlinenumbertablesize(cd);
3771 (void) dseg_add_unique_s4(cd, 0); /* ExTableSize */
3773 #if defined(ENABLE_PROFILING)
3774 /* generate native method profiling code */
3776 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
3777 /* count frequency */
3779 M_MOV_IMM(code, REG_ITMP1);
3780 M_IADD_IMM_MEMBASE(1, REG_ITMP1, OFFSET(codeinfo, frequency));
3784 /* calculate stackframe size for native function */
3786 M_ASUB_IMM(cd->stackframesize * 4, REG_SP);
3788 #if !defined(NDEBUG)
3789 emit_verbosecall_enter(jd);
3792 /* get function address (this must happen before the stackframeinfo) */
3794 #if !defined(WITH_STATIC_CLASSPATH)
3796 codegen_addpatchref(cd, PATCHER_resolve_native, m, 0);
3799 M_AST_IMM((ptrint) f, REG_SP, 4 * 4);
3801 /* Mark the whole fpu stack as free for native functions (only for saved */
3802 /* register count == 0). */
3804 emit_ffree_reg(cd, 0);
3805 emit_ffree_reg(cd, 1);
3806 emit_ffree_reg(cd, 2);
3807 emit_ffree_reg(cd, 3);
3808 emit_ffree_reg(cd, 4);
3809 emit_ffree_reg(cd, 5);
3810 emit_ffree_reg(cd, 6);
3811 emit_ffree_reg(cd, 7);
3813 /* prepare data structures for native function call */
3815 M_MOV(REG_SP, REG_ITMP1);
3816 M_AADD_IMM(cd->stackframesize * 4, REG_ITMP1);
3818 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3819 M_IST_IMM(0, REG_SP, 1 * 4);
3822 M_MOV(REG_SP, REG_ITMP2);
3823 M_AADD_IMM(cd->stackframesize * 4 + SIZEOF_VOID_P, REG_ITMP2);
3825 M_AST(REG_ITMP2, REG_SP, 2 * 4);
3826 M_ALD(REG_ITMP3, REG_SP, cd->stackframesize * 4);
3827 M_AST(REG_ITMP3, REG_SP, 3 * 4);
3828 M_MOV_IMM(codegen_start_native_call, REG_ITMP1);
3831 M_ALD(REG_ITMP3, REG_SP, 4 * 4);
3833 /* copy arguments into new stackframe */
3835 for (i = md->paramcount - 1, j = i + nativeparams; i >= 0; i--, j--) {
3836 t = md->paramtypes[i].type;
3838 if (!md->params[i].inmemory) {
3839 /* no integer argument registers */
3840 } else { /* float/double in memory can be copied like int/longs */
3841 s1 = (md->params[i].regoff + cd->stackframesize + 1) * 4;
3842 s2 = nmd->params[j].regoff * 4;
3844 M_ILD(REG_ITMP1, REG_SP, s1);
3845 M_IST(REG_ITMP1, REG_SP, s2);
3846 if (IS_2_WORD_TYPE(t)) {
3847 M_ILD(REG_ITMP1, REG_SP, s1 + 4);
3848 M_IST(REG_ITMP1, REG_SP, s2 + 4);
3853 /* if function is static, put class into second argument */
3855 if (m->flags & ACC_STATIC)
3856 M_AST_IMM(m->class, REG_SP, 1 * 4);
3858 /* put env into first argument */
3860 M_AST_IMM(_Jv_env, REG_SP, 0 * 4);
3862 /* call the native function */
3866 /* save return value */
3868 if (md->returntype.type != TYPE_VOID) {
3869 if (IS_INT_LNG_TYPE(md->returntype.type)) {
3870 if (IS_2_WORD_TYPE(md->returntype.type))
3871 M_IST(REG_RESULT2, REG_SP, 2 * 4);
3872 M_IST(REG_RESULT, REG_SP, 1 * 4);
3875 if (IS_2_WORD_TYPE(md->returntype.type))
3876 emit_fstl_membase(cd, REG_SP, 1 * 4);
3878 emit_fsts_membase(cd, REG_SP, 1 * 4);
3882 #if !defined(NDEBUG)
3883 emit_verbosecall_exit(jd);
3886 /* remove native stackframe info */
3888 M_MOV(REG_SP, REG_ITMP1);
3889 M_AADD_IMM(cd->stackframesize * 4, REG_ITMP1);
3891 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3892 M_MOV_IMM(codegen_finish_native_call, REG_ITMP1);
3894 M_MOV(REG_RESULT, REG_ITMP2); /* REG_ITMP3 == REG_RESULT2 */
3896 /* restore return value */
3898 if (md->returntype.type != TYPE_VOID) {
3899 if (IS_INT_LNG_TYPE(md->returntype.type)) {
3900 if (IS_2_WORD_TYPE(md->returntype.type))
3901 M_ILD(REG_RESULT2, REG_SP, 2 * 4);
3902 M_ILD(REG_RESULT, REG_SP, 1 * 4);
3905 if (IS_2_WORD_TYPE(md->returntype.type))
3906 emit_fldl_membase(cd, REG_SP, 1 * 4);
3908 emit_flds_membase(cd, REG_SP, 1 * 4);
3912 M_AADD_IMM(cd->stackframesize * 4, REG_SP);
3914 /* check for exception */
3921 /* handle exception */
3923 M_MOV(REG_ITMP2, REG_ITMP1_XPTR);
3924 M_ALD(REG_ITMP2_XPC, REG_SP, 0);
3925 M_ASUB_IMM(2, REG_ITMP2_XPC);
3927 M_MOV_IMM(asm_handle_nat_exception, REG_ITMP3);
3930 /* generate patcher stubs */
3932 emit_patcher_stubs(jd);
3937 * These are local overrides for various environment variables in Emacs.
3938 * Please do not remove this and leave it at the end of the file, where
3939 * Emacs will automagically detect them.
3940 * ---------------------------------------------------------------------
3943 * indent-tabs-mode: t
3947 * vim:noexpandtab:sw=4:ts=4: