1 /* src/vm/jit/i386/codegen.c - machine code generator for i386
3 Copyright (C) 1996-2005, 2006, 2007 R. Grafl, A. Krall, C. Kruegel,
4 C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
5 E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
6 J. Wenninger, Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
25 $Id: codegen.c 7448 2007-03-04 14:46:21Z edwin $
37 #include "vm/jit/i386/md-abi.h"
39 #include "vm/jit/i386/codegen.h"
40 #include "vm/jit/i386/emit.h"
42 #include "mm/memory.h"
43 #include "native/jni.h"
44 #include "native/native.h"
46 #if defined(ENABLE_THREADS)
47 # include "threads/native/lock.h"
50 #include "vm/builtin.h"
51 #include "vm/exceptions.h"
52 #include "vm/global.h"
53 #include "vm/stringlocal.h"
56 #include "vm/jit/asmpart.h"
57 #include "vm/jit/codegen-common.h"
58 #include "vm/jit/dseg.h"
59 #include "vm/jit/emit-common.h"
60 #include "vm/jit/jit.h"
61 #include "vm/jit/parse.h"
62 #include "vm/jit/patcher.h"
63 #include "vm/jit/reg.h"
64 #include "vm/jit/replace.h"
65 #include "vm/jit/stacktrace.h"
67 #if defined(ENABLE_SSA)
68 # include "vm/jit/optimizing/lsra.h"
69 # include "vm/jit/optimizing/ssa.h"
70 #elif defined(ENABLE_LSRA)
71 # include "vm/jit/allocator/lsra.h"
74 #include "vmcore/loader.h"
75 #include "vmcore/options.h"
76 #include "vmcore/utf8.h"
79 /* codegen *********************************************************************
81 Generates machine code.
83 *******************************************************************************/
85 #if defined(ENABLE_SSA)
86 void cg_move(codegendata *cd, s4 type, s4 src_regoff, s4 src_flags,
87 s4 dst_regoff, s4 dst_flags);
88 void codegen_insert_phi_moves(jitdata *jd, basicblock *bptr);
91 bool codegen(jitdata *jd)
97 s4 len, s1, s2, s3, d, disp;
103 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
104 builtintable_entry *bte;
108 #if defined(ENABLE_SSA)
110 bool last_cmd_was_goto;
112 last_cmd_was_goto = false;
116 /* get required compiler data */
123 /* prevent compiler warnings */
133 s4 savedregs_num = 0;
136 /* space to save used callee saved registers */
138 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
140 /* float register are saved on 2 4-byte stackslots */
141 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse) * 2;
143 cd->stackframesize = rd->memuse + savedregs_num;
146 #if defined(ENABLE_THREADS)
147 /* space to save argument of monitor_enter */
149 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
150 /* reserve 2 slots for long/double return values for monitorexit */
152 if (IS_2_WORD_TYPE(m->parseddesc->returntype.type))
153 cd->stackframesize += 2;
155 cd->stackframesize++;
159 /* create method header */
161 /* Keep stack of non-leaf functions 16-byte aligned. */
163 if (!jd->isleafmethod)
164 cd->stackframesize |= 0x3;
166 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
167 (void) dseg_add_unique_s4(cd, cd->stackframesize * 4); /* FrameSize */
169 #if defined(ENABLE_THREADS)
170 /* IsSync contains the offset relative to the stack pointer for the
171 argument of monitor_exit used in the exception handler. Since the
172 offset could be zero and give a wrong meaning of the flag it is
176 if (checksync && (m->flags & ACC_SYNCHRONIZED))
177 (void) dseg_add_unique_s4(cd, (rd->memuse + 1) * 4); /* IsSync */
180 (void) dseg_add_unique_s4(cd, 0); /* IsSync */
182 (void) dseg_add_unique_s4(cd, jd->isleafmethod); /* IsLeaf */
183 (void) dseg_add_unique_s4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
184 (void) dseg_add_unique_s4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
186 /* adds a reference for the length of the line number counter. We don't
187 know the size yet, since we evaluate the information during code
188 generation, to save one additional iteration over the whole
189 instructions. During code optimization the position could have changed
190 to the information gotten from the class file */
191 (void) dseg_addlinenumbertablesize(cd);
193 (void) dseg_add_unique_s4(cd, jd->exceptiontablelength); /* ExTableSize */
195 /* create exception table */
197 for (ex = jd->exceptiontable; ex != NULL; ex = ex->down) {
198 dseg_add_target(cd, ex->start);
199 dseg_add_target(cd, ex->end);
200 dseg_add_target(cd, ex->handler);
201 (void) dseg_add_unique_address(cd, ex->catchtype.any);
204 #if defined(ENABLE_PROFILING)
205 /* generate method profiling code */
207 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
208 /* count frequency */
210 M_MOV_IMM(code, REG_ITMP3);
211 M_IADD_IMM_MEMBASE(1, REG_ITMP3, OFFSET(codeinfo, frequency));
215 /* create stack frame (if necessary) */
217 if (cd->stackframesize)
218 M_ASUB_IMM(cd->stackframesize * 4, REG_SP);
220 /* save return address and used callee saved registers */
222 p = cd->stackframesize;
223 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
224 p--; M_AST(rd->savintregs[i], REG_SP, p * 4);
226 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
227 p-=2; emit_fld_reg(cd, rd->savfltregs[i]); emit_fstpl_membase(cd, REG_SP, p * 4);
230 /* take arguments out of register or stack frame */
235 for (p = 0, l = 0; p < md->paramcount; p++) {
236 t = md->paramtypes[p].type;
238 #if defined(ENABLE_SSA)
243 varindex = jd->local_map[l * 5 + t];
245 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
248 if (varindex == UNUSED)
253 s1 = md->params[p].regoff;
255 if (IS_INT_LNG_TYPE(t)) { /* integer args */
256 if (!md->params[p].inmemory) { /* register arguments */
257 log_text("integer register argument");
259 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
260 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff */
262 else { /* reg arg -> spilled */
263 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff * 4 */
266 else { /* stack arguments */
267 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
268 emit_mov_membase_reg( /* + 4 for return address */
269 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4, var->vv.regoff);
270 /* + 4 for return address */
272 else { /* stack arg -> spilled */
273 if (!IS_2_WORD_TYPE(t)) {
274 #if defined(ENABLE_SSA)
275 /* no copy avoiding by now possible with SSA */
277 emit_mov_membase_reg( /* + 4 for return address */
278 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4,
280 emit_mov_reg_membase(
281 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4);
284 #endif /*defined(ENABLE_SSA)*/
285 /* reuse Stackslotand avoid copying */
286 var->vv.regoff = cd->stackframesize + s1 + 1;
290 #if defined(ENABLE_SSA)
291 /* no copy avoiding by now possible with SSA */
293 emit_mov_membase_reg( /* + 4 for return address */
294 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4,
296 emit_mov_reg_membase(
297 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4);
298 emit_mov_membase_reg( /* + 4 for return address */
299 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4 + 4,
301 emit_mov_reg_membase(
302 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4 + 4);
305 #endif /*defined(ENABLE_SSA)*/
306 /* reuse Stackslotand avoid copying */
307 var->vv.regoff = cd->stackframesize + s1 + 1;
312 else { /* floating args */
313 if (!md->params[p].inmemory) { /* register arguments */
314 log_text("There are no float argument registers!");
316 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
317 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff */
318 } else { /* reg arg -> spilled */
319 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff * 4 */
323 else { /* stack arguments */
324 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
327 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
329 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
334 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
336 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
339 } else { /* stack-arg -> spilled */
340 #if defined(ENABLE_SSA)
341 /* no copy avoiding by now possible with SSA */
343 emit_mov_membase_reg(
344 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4, REG_ITMP1);
345 emit_mov_reg_membase(
346 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4);
349 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
350 emit_fstps_membase(cd, REG_SP, var->vv.regoff * 4);
354 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
355 emit_fstpl_membase(cd, REG_SP, var->vv.regoff * 4);
359 #endif /*defined(ENABLE_SSA)*/
360 /* reuse Stackslotand avoid copying */
361 var->vv.regoff = cd->stackframesize + s1 + 1;
367 /* call monitorenter function */
369 #if defined(ENABLE_THREADS)
370 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
373 if (m->flags & ACC_STATIC) {
374 M_MOV_IMM(&m->class->object.header, REG_ITMP1);
377 M_ALD(REG_ITMP1, REG_SP, cd->stackframesize * 4 + 4);
380 codegen_add_nullpointerexception_ref(cd);
383 M_AST(REG_ITMP1, REG_SP, s1 * 4);
384 M_AST(REG_ITMP1, REG_SP, 0 * 4);
385 M_MOV_IMM(LOCK_monitor_enter, REG_ITMP3);
391 emit_verbosecall_enter(jd);
396 #if defined(ENABLE_SSA)
397 /* with SSA Header is Basic Block 0 - insert phi Moves if necessary */
399 codegen_insert_phi_moves(jd, ls->basicblocks[0]);
402 /* end of header generation */
404 /* create replacement points */
406 REPLACEMENT_POINTS_INIT(cd, jd);
408 /* walk through all basic blocks */
410 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
412 bptr->mpc = (s4) (cd->mcodeptr - cd->mcodebase);
414 if (bptr->flags >= BBREACHED) {
415 /* branch resolving */
417 codegen_resolve_branchrefs(cd, bptr);
419 /* handle replacement points */
421 REPLACEMENT_POINT_BLOCK_START(cd, bptr);
423 #if defined(ENABLE_REPLACEMENT)
424 if (bptr->bitflags & BBFLAG_REPLACEMENT) {
425 if (cd->replacementpoint[-1].flags & RPLPOINT_FLAG_COUNTDOWN) {
427 disp = (s4) &(m->hitcountdown);
428 M_ISUB_IMM_MEMABS(1, disp);
434 /* copy interface registers to their destination */
439 #if defined(ENABLE_PROFILING)
440 /* generate basic block profiling code */
442 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
443 /* count frequency */
445 M_MOV_IMM(code->bbfrequency, REG_ITMP3);
446 M_IADD_IMM_MEMBASE(1, REG_ITMP3, bptr->nr * 4);
450 #if defined(ENABLE_LSRA) || defined(ENABLE_SSA)
451 # if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
454 # if defined(ENABLE_SSA)
456 last_cmd_was_goto = false;
460 var = VAR(bptr->invars[len]);
461 if (bptr->type != BBTYPE_STD) {
462 if (!IS_2_WORD_TYPE(var->type)) {
463 if (bptr->type == BBTYPE_EXH) {
464 d = codegen_reg_of_var(0, var, REG_ITMP1);
465 M_INTMOVE(REG_ITMP1, d);
466 emit_store(jd, NULL, var, d);
470 log_text("copy interface registers(EXH, SBR): longs \
471 have to be in memory (begin 1)");
479 #endif /* defined(ENABLE_LSRA) || defined(ENABLE_SSA) */
483 var = VAR(bptr->invars[len]);
484 if ((len == bptr->indepth-1) && (bptr->type != BBTYPE_STD)) {
485 if (!IS_2_WORD_TYPE(var->type)) {
486 if (bptr->type == BBTYPE_EXH) {
487 d = codegen_reg_of_var(0, var, REG_ITMP1);
488 M_INTMOVE(REG_ITMP1, d);
489 emit_store(jd, NULL, var, d);
493 log_text("copy interface registers: longs have to be in \
500 assert((var->flags & INOUT));
505 /* walk through all instructions */
510 for (iptr = bptr->iinstr; len > 0; len--, iptr++) {
511 if (iptr->line != currentline) {
512 dseg_addlinenumber(cd, iptr->line);
513 currentline = iptr->line;
516 MCODECHECK(1024); /* 1kB should be enough */
519 case ICMD_NOP: /* ... ==> ... */
520 case ICMD_POP: /* ..., value ==> ... */
521 case ICMD_POP2: /* ..., value, value ==> ... */
524 case ICMD_INLINE_START:
526 REPLACEMENT_POINT_INLINE_START(cd, iptr);
529 case ICMD_INLINE_BODY:
531 REPLACEMENT_POINT_INLINE_BODY(cd, iptr);
532 dseg_addlinenumber_inline_start(cd, iptr);
533 dseg_addlinenumber(cd, iptr->line);
536 case ICMD_INLINE_END:
538 dseg_addlinenumber_inline_end(cd, iptr);
539 dseg_addlinenumber(cd, iptr->line);
542 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
544 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
545 emit_nullpointer_check(cd, iptr, s1);
548 /* constant operations ************************************************/
550 case ICMD_ICONST: /* ... ==> ..., constant */
552 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
553 ICONST(d, iptr->sx.val.i);
554 emit_store_dst(jd, iptr, d);
557 case ICMD_LCONST: /* ... ==> ..., constant */
559 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
560 LCONST(d, iptr->sx.val.l);
561 emit_store_dst(jd, iptr, d);
564 case ICMD_FCONST: /* ... ==> ..., constant */
566 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
567 if (iptr->sx.val.f == 0.0) {
571 if (iptr->sx.val.i == 0x80000000) {
575 } else if (iptr->sx.val.f == 1.0) {
578 } else if (iptr->sx.val.f == 2.0) {
584 disp = dseg_add_float(cd, iptr->sx.val.f);
585 emit_mov_imm_reg(cd, 0, REG_ITMP1);
587 emit_flds_membase(cd, REG_ITMP1, disp);
589 emit_store_dst(jd, iptr, d);
592 case ICMD_DCONST: /* ... ==> ..., constant */
594 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
595 if (iptr->sx.val.d == 0.0) {
599 if (iptr->sx.val.l == 0x8000000000000000LL) {
603 } else if (iptr->sx.val.d == 1.0) {
606 } else if (iptr->sx.val.d == 2.0) {
612 disp = dseg_add_double(cd, iptr->sx.val.d);
613 emit_mov_imm_reg(cd, 0, REG_ITMP1);
615 emit_fldl_membase(cd, REG_ITMP1, disp);
617 emit_store_dst(jd, iptr, d);
620 case ICMD_ACONST: /* ... ==> ..., constant */
622 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
624 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
625 codegen_addpatchref(cd, PATCHER_aconst,
626 iptr->sx.val.c.ref, 0);
628 if (opt_showdisassemble) {
629 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
635 if (iptr->sx.val.anyptr == NULL)
638 M_MOV_IMM(iptr->sx.val.anyptr, d);
640 emit_store_dst(jd, iptr, d);
644 /* load/store/copy/move operations ************************************/
658 emit_copy(jd, iptr, VAROP(iptr->s1), VAROP(iptr->dst));
662 if (!(iptr->flags.bits & INS_FLAG_RETADDR))
663 emit_copy(jd, iptr, VAROP(iptr->s1), VAROP(iptr->dst));
667 /* integer operations *************************************************/
669 case ICMD_INEG: /* ..., value ==> ..., - value */
671 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
672 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
675 emit_store_dst(jd, iptr, d);
678 case ICMD_LNEG: /* ..., value ==> ..., - value */
680 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
681 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
683 M_NEG(GET_LOW_REG(d));
684 M_IADDC_IMM(0, GET_HIGH_REG(d));
685 M_NEG(GET_HIGH_REG(d));
686 emit_store_dst(jd, iptr, d);
689 case ICMD_I2L: /* ..., value ==> ..., value */
691 s1 = emit_load_s1(jd, iptr, EAX);
692 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
695 M_LNGMOVE(EAX_EDX_PACKED, d);
696 emit_store_dst(jd, iptr, d);
699 case ICMD_L2I: /* ..., value ==> ..., value */
701 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
702 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
704 emit_store_dst(jd, iptr, d);
707 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
709 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
710 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
714 emit_store_dst(jd, iptr, d);
717 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
719 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
720 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
722 emit_store_dst(jd, iptr, d);
725 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
727 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
728 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
730 emit_store_dst(jd, iptr, d);
734 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
736 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
737 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
738 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
745 emit_store_dst(jd, iptr, d);
749 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
750 /* sx.val.i = constant */
752 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
753 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
755 /* `inc reg' is slower on p4's (regarding to ia32
756 optimization reference manual and benchmarks) and as
760 M_IADD_IMM(iptr->sx.val.i, d);
761 emit_store_dst(jd, iptr, d);
764 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
766 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
767 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
768 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
769 M_INTMOVE(s1, GET_LOW_REG(d));
770 M_IADD(s2, GET_LOW_REG(d));
771 /* don't use REG_ITMP1 */
772 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
773 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
774 M_INTMOVE(s1, GET_HIGH_REG(d));
775 M_IADDC(s2, GET_HIGH_REG(d));
776 emit_store_dst(jd, iptr, d);
779 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
780 /* sx.val.l = constant */
782 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
783 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
785 M_IADD_IMM(iptr->sx.val.l, GET_LOW_REG(d));
786 M_IADDC_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
787 emit_store_dst(jd, iptr, d);
790 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
792 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
793 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
794 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
796 M_INTMOVE(s1, REG_ITMP1);
797 M_ISUB(s2, REG_ITMP1);
798 M_INTMOVE(REG_ITMP1, d);
804 emit_store_dst(jd, iptr, d);
807 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
808 /* sx.val.i = constant */
810 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
811 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
813 M_ISUB_IMM(iptr->sx.val.i, d);
814 emit_store_dst(jd, iptr, d);
817 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
819 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
820 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
821 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
822 if (s2 == GET_LOW_REG(d)) {
823 M_INTMOVE(s1, REG_ITMP1);
824 M_ISUB(s2, REG_ITMP1);
825 M_INTMOVE(REG_ITMP1, GET_LOW_REG(d));
828 M_INTMOVE(s1, GET_LOW_REG(d));
829 M_ISUB(s2, GET_LOW_REG(d));
831 /* don't use REG_ITMP1 */
832 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
833 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
834 if (s2 == GET_HIGH_REG(d)) {
835 M_INTMOVE(s1, REG_ITMP2);
836 M_ISUBB(s2, REG_ITMP2);
837 M_INTMOVE(REG_ITMP2, GET_HIGH_REG(d));
840 M_INTMOVE(s1, GET_HIGH_REG(d));
841 M_ISUBB(s2, GET_HIGH_REG(d));
843 emit_store_dst(jd, iptr, d);
846 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
847 /* sx.val.l = constant */
849 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
850 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
852 M_ISUB_IMM(iptr->sx.val.l, GET_LOW_REG(d));
853 M_ISUBB_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
854 emit_store_dst(jd, iptr, d);
857 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
859 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
860 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
861 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
868 emit_store_dst(jd, iptr, d);
871 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
872 /* sx.val.i = constant */
874 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
875 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
876 M_IMUL_IMM(s1, iptr->sx.val.i, d);
877 emit_store_dst(jd, iptr, d);
880 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
882 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
883 s2 = emit_load_s2_low(jd, iptr, EDX);
884 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
886 M_INTMOVE(s1, REG_ITMP2);
887 M_IMUL(s2, REG_ITMP2);
889 s1 = emit_load_s1_low(jd, iptr, EAX);
890 s2 = emit_load_s2_high(jd, iptr, EDX);
893 M_IADD(EDX, REG_ITMP2);
895 s1 = emit_load_s1_low(jd, iptr, EAX);
896 s2 = emit_load_s2_low(jd, iptr, EDX);
899 M_INTMOVE(EAX, GET_LOW_REG(d));
900 M_IADD(REG_ITMP2, GET_HIGH_REG(d));
902 emit_store_dst(jd, iptr, d);
905 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
906 /* sx.val.l = constant */
908 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
909 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
910 ICONST(EAX, iptr->sx.val.l);
912 M_IMUL_IMM(s1, iptr->sx.val.l >> 32, REG_ITMP2);
913 M_IADD(REG_ITMP2, EDX);
914 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
915 M_IMUL_IMM(s1, iptr->sx.val.l, REG_ITMP2);
916 M_IADD(REG_ITMP2, EDX);
917 M_LNGMOVE(EAX_EDX_PACKED, d);
918 emit_store_dst(jd, iptr, d);
921 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
923 s1 = emit_load_s1(jd, iptr, EAX);
924 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
925 d = codegen_reg_of_dst(jd, iptr, EAX);
926 emit_arithmetic_check(cd, iptr, s2);
928 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
930 /* check as described in jvm spec */
932 M_CMP_IMM(0x80000000, EAX);
939 M_INTMOVE(EAX, d); /* if INMEMORY then d is already EAX */
940 emit_store_dst(jd, iptr, d);
943 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
945 s1 = emit_load_s1(jd, iptr, EAX);
946 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
947 d = codegen_reg_of_dst(jd, iptr, EDX);
948 emit_arithmetic_check(cd, iptr, s2);
950 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
952 /* check as described in jvm spec */
954 M_CMP_IMM(0x80000000, EAX);
962 M_INTMOVE(EDX, d); /* if INMEMORY then d is already EDX */
963 emit_store_dst(jd, iptr, d);
966 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
967 /* sx.val.i = constant */
969 /* TODO: optimize for `/ 2' */
970 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
971 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
975 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, d);/* 32-bit for jump off */
976 M_SRA_IMM(iptr->sx.val.i, d);
977 emit_store_dst(jd, iptr, d);
980 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
981 /* sx.val.i = constant */
983 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
984 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
986 M_MOV(s1, REG_ITMP1);
990 M_AND_IMM(iptr->sx.val.i, d);
992 M_BGE(2 + 2 + 6 + 2);
993 M_MOV(s1, d); /* don't use M_INTMOVE, so we know the jump offset */
995 M_AND_IMM32(iptr->sx.val.i, d); /* use 32-bit for jump offset */
997 emit_store_dst(jd, iptr, d);
1000 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1001 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1003 s2 = emit_load_s2(jd, iptr, REG_ITMP12_PACKED);
1004 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1006 M_INTMOVE(GET_LOW_REG(s2), REG_ITMP3);
1007 M_OR(GET_HIGH_REG(s2), REG_ITMP3);
1008 /* XXX could be optimized */
1009 emit_arithmetic_check(cd, iptr, REG_ITMP3);
1011 bte = iptr->sx.s23.s3.bte;
1014 M_LST(s2, REG_SP, 2 * 4);
1016 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1017 M_LST(s1, REG_SP, 0 * 4);
1019 M_MOV_IMM(bte->fp, REG_ITMP3);
1021 emit_store_dst(jd, iptr, d);
1024 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1025 /* sx.val.i = constant */
1027 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1028 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1030 M_TEST(GET_HIGH_REG(d));
1032 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, GET_LOW_REG(d));
1033 M_IADDC_IMM(0, GET_HIGH_REG(d));
1034 M_SRLD_IMM(iptr->sx.val.i, GET_HIGH_REG(d), GET_LOW_REG(d));
1035 M_SRA_IMM(iptr->sx.val.i, GET_HIGH_REG(d));
1036 emit_store_dst(jd, iptr, d);
1040 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1041 /* sx.val.l = constant */
1043 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1044 if (iptr->dst.var->flags & INMEMORY) {
1045 if (iptr->s1.var->flags & INMEMORY) {
1046 /* Alpha algorithm */
1048 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 4);
1050 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 4 + 4);
1056 /* TODO: hmm, don't know if this is always correct */
1058 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l & 0x00000000ffffffff);
1060 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l >> 32);
1066 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4, REG_ITMP1);
1067 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4 + 4, REG_ITMP2);
1069 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1070 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1071 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, iptr->s1.var->vv.regoff * 4 + 4);
1072 emit_jcc(cd, CC_GE, disp);
1074 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4, REG_ITMP1);
1075 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4 + 4, REG_ITMP2);
1077 emit_neg_reg(cd, REG_ITMP1);
1078 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1079 emit_neg_reg(cd, REG_ITMP2);
1081 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1082 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1084 emit_neg_reg(cd, REG_ITMP1);
1085 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1086 emit_neg_reg(cd, REG_ITMP2);
1088 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst.var->vv.regoff * 4);
1089 emit_mov_reg_membase(cd, REG_ITMP2, REG_SP, iptr->dst.var->vv.regoff * 4 + 4);
1093 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1094 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1096 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1097 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1098 M_TEST(GET_LOW_REG(s1));
1104 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1106 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1107 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1108 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1109 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1112 emit_store_dst(jd, iptr, d);
1115 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1116 /* sx.val.i = constant */
1118 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1119 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1121 M_SLL_IMM(iptr->sx.val.i, d);
1122 emit_store_dst(jd, iptr, d);
1125 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1127 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1128 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1129 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1130 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1133 emit_store_dst(jd, iptr, d);
1136 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1137 /* sx.val.i = constant */
1139 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1140 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1142 M_SRA_IMM(iptr->sx.val.i, d);
1143 emit_store_dst(jd, iptr, d);
1146 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1148 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1149 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1150 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1151 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1154 emit_store_dst(jd, iptr, d);
1157 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1158 /* sx.val.i = constant */
1160 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1161 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1163 M_SRL_IMM(iptr->sx.val.i, d);
1164 emit_store_dst(jd, iptr, d);
1167 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1169 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1170 s2 = emit_load_s2(jd, iptr, ECX);
1171 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1174 M_TEST_IMM(32, ECX);
1176 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1177 M_CLR(GET_LOW_REG(d));
1178 M_SLLD(GET_LOW_REG(d), GET_HIGH_REG(d));
1179 M_SLL(GET_LOW_REG(d));
1180 emit_store_dst(jd, iptr, d);
1183 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1184 /* sx.val.i = constant */
1186 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1187 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1189 if (iptr->sx.val.i & 0x20) {
1190 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1191 M_CLR(GET_LOW_REG(d));
1192 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1196 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1198 M_SLL_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d));
1200 emit_store_dst(jd, iptr, d);
1203 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1205 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1206 s2 = emit_load_s2(jd, iptr, ECX);
1207 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1210 M_TEST_IMM(32, ECX);
1212 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1213 M_SRA_IMM(31, GET_HIGH_REG(d));
1214 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1215 M_SRA(GET_HIGH_REG(d));
1216 emit_store_dst(jd, iptr, d);
1219 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1220 /* sx.val.i = constant */
1222 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1223 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1225 if (iptr->sx.val.i & 0x20) {
1226 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1227 M_SRA_IMM(31, GET_HIGH_REG(d));
1228 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1232 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1234 M_SRA_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1236 emit_store_dst(jd, iptr, d);
1239 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1241 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1242 s2 = emit_load_s2(jd, iptr, ECX);
1243 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1246 M_TEST_IMM(32, ECX);
1248 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1249 M_CLR(GET_HIGH_REG(d));
1250 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1251 M_SRL(GET_HIGH_REG(d));
1252 emit_store_dst(jd, iptr, d);
1255 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1256 /* sx.val.l = constant */
1258 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1259 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1261 if (iptr->sx.val.i & 0x20) {
1262 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1263 M_CLR(GET_HIGH_REG(d));
1264 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1268 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1270 M_SRL_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1272 emit_store_dst(jd, iptr, d);
1275 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1277 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1278 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1279 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1286 emit_store_dst(jd, iptr, d);
1289 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1290 /* sx.val.i = constant */
1292 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1293 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1295 M_AND_IMM(iptr->sx.val.i, d);
1296 emit_store_dst(jd, iptr, d);
1299 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1301 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1302 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1303 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1304 if (s2 == GET_LOW_REG(d))
1305 M_AND(s1, GET_LOW_REG(d));
1307 M_INTMOVE(s1, GET_LOW_REG(d));
1308 M_AND(s2, GET_LOW_REG(d));
1310 /* REG_ITMP1 probably contains low 32-bit of destination */
1311 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1312 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1313 if (s2 == GET_HIGH_REG(d))
1314 M_AND(s1, GET_HIGH_REG(d));
1316 M_INTMOVE(s1, GET_HIGH_REG(d));
1317 M_AND(s2, GET_HIGH_REG(d));
1319 emit_store_dst(jd, iptr, d);
1322 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1323 /* sx.val.l = constant */
1325 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1326 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1328 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1329 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1330 emit_store_dst(jd, iptr, d);
1333 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1335 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1336 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1337 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1344 emit_store_dst(jd, iptr, d);
1347 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1348 /* sx.val.i = constant */
1350 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1351 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1353 M_OR_IMM(iptr->sx.val.i, d);
1354 emit_store_dst(jd, iptr, d);
1357 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1359 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1360 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1361 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1362 if (s2 == GET_LOW_REG(d))
1363 M_OR(s1, GET_LOW_REG(d));
1365 M_INTMOVE(s1, GET_LOW_REG(d));
1366 M_OR(s2, GET_LOW_REG(d));
1368 /* REG_ITMP1 probably contains low 32-bit of destination */
1369 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1370 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1371 if (s2 == GET_HIGH_REG(d))
1372 M_OR(s1, GET_HIGH_REG(d));
1374 M_INTMOVE(s1, GET_HIGH_REG(d));
1375 M_OR(s2, GET_HIGH_REG(d));
1377 emit_store_dst(jd, iptr, d);
1380 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1381 /* sx.val.l = constant */
1383 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1384 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1386 M_OR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1387 M_OR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1388 emit_store_dst(jd, iptr, d);
1391 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1393 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1394 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1395 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1402 emit_store_dst(jd, iptr, d);
1405 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1406 /* sx.val.i = constant */
1408 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1409 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1411 M_XOR_IMM(iptr->sx.val.i, d);
1412 emit_store_dst(jd, iptr, d);
1415 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1417 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1418 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1419 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1420 if (s2 == GET_LOW_REG(d))
1421 M_XOR(s1, GET_LOW_REG(d));
1423 M_INTMOVE(s1, GET_LOW_REG(d));
1424 M_XOR(s2, GET_LOW_REG(d));
1426 /* REG_ITMP1 probably contains low 32-bit of destination */
1427 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1428 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1429 if (s2 == GET_HIGH_REG(d))
1430 M_XOR(s1, GET_HIGH_REG(d));
1432 M_INTMOVE(s1, GET_HIGH_REG(d));
1433 M_XOR(s2, GET_HIGH_REG(d));
1435 emit_store_dst(jd, iptr, d);
1438 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1439 /* sx.val.l = constant */
1441 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1442 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1444 M_XOR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1445 M_XOR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1446 emit_store_dst(jd, iptr, d);
1450 /* floating operations ************************************************/
1452 case ICMD_FNEG: /* ..., value ==> ..., - value */
1454 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1455 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1457 emit_store_dst(jd, iptr, d);
1460 case ICMD_DNEG: /* ..., value ==> ..., - value */
1462 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1463 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1465 emit_store_dst(jd, iptr, d);
1468 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1470 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1471 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1472 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1474 emit_store_dst(jd, iptr, d);
1477 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1479 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1480 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1481 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1483 emit_store_dst(jd, iptr, d);
1486 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1488 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1489 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1490 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1492 emit_store_dst(jd, iptr, d);
1495 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1497 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1498 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1499 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1501 emit_store_dst(jd, iptr, d);
1504 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1506 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1507 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1508 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1510 emit_store_dst(jd, iptr, d);
1513 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1515 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1516 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1517 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1519 emit_store_dst(jd, iptr, d);
1522 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1524 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1525 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1526 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1528 emit_store_dst(jd, iptr, d);
1531 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1533 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1534 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1535 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1537 emit_store_dst(jd, iptr, d);
1540 case ICMD_FREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1542 /* exchanged to skip fxch */
1543 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1544 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1545 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1546 /* emit_fxch(cd); */
1551 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1552 emit_store_dst(jd, iptr, d);
1553 emit_ffree_reg(cd, 0);
1557 case ICMD_DREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1559 /* exchanged to skip fxch */
1560 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1561 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1562 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1563 /* emit_fxch(cd); */
1568 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1569 emit_store_dst(jd, iptr, d);
1570 emit_ffree_reg(cd, 0);
1574 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1575 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1577 var = VAROP(iptr->s1);
1578 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1580 if (var->flags & INMEMORY) {
1581 emit_fildl_membase(cd, REG_SP, var->vv.regoff * 4);
1583 /* XXX not thread safe! */
1584 disp = dseg_add_unique_s4(cd, 0);
1585 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1587 emit_mov_reg_membase(cd, var->vv.regoff, REG_ITMP1, disp);
1588 emit_fildl_membase(cd, REG_ITMP1, disp);
1591 emit_store_dst(jd, iptr, d);
1594 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1595 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1597 var = VAROP(iptr->s1);
1598 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1599 if (var->flags & INMEMORY) {
1600 emit_fildll_membase(cd, REG_SP, var->vv.regoff * 4);
1603 log_text("L2F: longs have to be in memory");
1606 emit_store_dst(jd, iptr, d);
1609 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1611 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1612 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1614 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1617 /* Round to zero, 53-bit mode, exception masked */
1618 disp = dseg_add_s4(cd, 0x0e7f);
1619 emit_fldcw_membase(cd, REG_ITMP1, disp);
1621 var = VAROP(iptr->dst);
1622 var1 = VAROP(iptr->s1);
1624 if (var->flags & INMEMORY) {
1625 emit_fistpl_membase(cd, REG_SP, var->vv.regoff * 4);
1627 /* Round to nearest, 53-bit mode, exceptions masked */
1628 disp = dseg_add_s4(cd, 0x027f);
1629 emit_fldcw_membase(cd, REG_ITMP1, disp);
1631 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1632 REG_SP, var->vv.regoff * 4);
1635 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1637 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1640 /* XXX not thread safe! */
1641 disp = dseg_add_unique_s4(cd, 0);
1642 emit_fistpl_membase(cd, REG_ITMP1, disp);
1643 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1645 /* Round to nearest, 53-bit mode, exceptions masked */
1646 disp = dseg_add_s4(cd, 0x027f);
1647 emit_fldcw_membase(cd, REG_ITMP1, disp);
1649 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1652 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1653 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1656 emit_jcc(cd, CC_NE, disp);
1658 /* XXX: change this when we use registers */
1659 emit_flds_membase(cd, REG_SP, var1->vv.regoff * 4);
1660 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2i, REG_ITMP1);
1661 emit_call_reg(cd, REG_ITMP1);
1663 if (var->flags & INMEMORY) {
1664 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1667 M_INTMOVE(REG_RESULT, var->vv.regoff);
1671 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1673 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1674 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1676 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1679 /* Round to zero, 53-bit mode, exception masked */
1680 disp = dseg_add_s4(cd, 0x0e7f);
1681 emit_fldcw_membase(cd, REG_ITMP1, disp);
1683 var = VAROP(iptr->dst);
1684 var1 = VAROP(iptr->s1);
1686 if (var->flags & INMEMORY) {
1687 emit_fistpl_membase(cd, REG_SP, var->vv.regoff * 4);
1689 /* Round to nearest, 53-bit mode, exceptions masked */
1690 disp = dseg_add_s4(cd, 0x027f);
1691 emit_fldcw_membase(cd, REG_ITMP1, disp);
1693 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1694 REG_SP, var->vv.regoff * 4);
1697 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1699 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1702 /* XXX not thread safe! */
1703 disp = dseg_add_unique_s4(cd, 0);
1704 emit_fistpl_membase(cd, REG_ITMP1, disp);
1705 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1707 /* Round to nearest, 53-bit mode, exceptions masked */
1708 disp = dseg_add_s4(cd, 0x027f);
1709 emit_fldcw_membase(cd, REG_ITMP1, disp);
1711 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1714 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1715 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1718 emit_jcc(cd, CC_NE, disp);
1720 /* XXX: change this when we use registers */
1721 emit_fldl_membase(cd, REG_SP, var1->vv.regoff * 4);
1722 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2i, REG_ITMP1);
1723 emit_call_reg(cd, REG_ITMP1);
1725 if (var->flags & INMEMORY) {
1726 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1728 M_INTMOVE(REG_RESULT, var->vv.regoff);
1732 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1734 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1735 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1737 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1740 /* Round to zero, 53-bit mode, exception masked */
1741 disp = dseg_add_s4(cd, 0x0e7f);
1742 emit_fldcw_membase(cd, REG_ITMP1, disp);
1744 var = VAROP(iptr->dst);
1745 var1 = VAROP(iptr->s1);
1747 if (var->flags & INMEMORY) {
1748 emit_fistpll_membase(cd, REG_SP, var->vv.regoff * 4);
1750 /* Round to nearest, 53-bit mode, exceptions masked */
1751 disp = dseg_add_s4(cd, 0x027f);
1752 emit_fldcw_membase(cd, REG_ITMP1, disp);
1754 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1755 REG_SP, var->vv.regoff * 4 + 4);
1758 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1760 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1763 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1765 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4 + 4);
1767 emit_jcc(cd, CC_NE, disp);
1769 emit_alu_imm_membase(cd, ALU_CMP, 0,
1770 REG_SP, var->vv.regoff * 4);
1773 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1775 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1777 emit_jcc(cd, CC_NE, disp);
1779 /* XXX: change this when we use registers */
1780 emit_flds_membase(cd, REG_SP, var1->vv.regoff * 4);
1781 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2l, REG_ITMP1);
1782 emit_call_reg(cd, REG_ITMP1);
1783 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1784 emit_mov_reg_membase(cd, REG_RESULT2,
1785 REG_SP, var->vv.regoff * 4 + 4);
1788 log_text("F2L: longs have to be in memory");
1793 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1795 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1796 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1798 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1801 /* Round to zero, 53-bit mode, exception masked */
1802 disp = dseg_add_s4(cd, 0x0e7f);
1803 emit_fldcw_membase(cd, REG_ITMP1, disp);
1805 var = VAROP(iptr->dst);
1806 var1 = VAROP(iptr->s1);
1808 if (var->flags & INMEMORY) {
1809 emit_fistpll_membase(cd, REG_SP, var->vv.regoff * 4);
1811 /* Round to nearest, 53-bit mode, exceptions masked */
1812 disp = dseg_add_s4(cd, 0x027f);
1813 emit_fldcw_membase(cd, REG_ITMP1, disp);
1815 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1816 REG_SP, var->vv.regoff * 4 + 4);
1819 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1821 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1824 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1826 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4 + 4);
1828 emit_jcc(cd, CC_NE, disp);
1830 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, var->vv.regoff * 4);
1833 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1835 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1837 emit_jcc(cd, CC_NE, disp);
1839 /* XXX: change this when we use registers */
1840 emit_fldl_membase(cd, REG_SP, var1->vv.regoff * 4);
1841 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2l, REG_ITMP1);
1842 emit_call_reg(cd, REG_ITMP1);
1843 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1844 emit_mov_reg_membase(cd, REG_RESULT2,
1845 REG_SP, var->vv.regoff * 4 + 4);
1848 log_text("D2L: longs have to be in memory");
1853 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1855 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1856 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1858 emit_store_dst(jd, iptr, d);
1861 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1863 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1864 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1866 emit_store_dst(jd, iptr, d);
1869 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1872 /* exchanged to skip fxch */
1873 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1874 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1875 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1876 /* emit_fxch(cd); */
1879 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as GT */
1880 emit_jcc(cd, CC_E, 6);
1881 emit_alu_imm_reg(cd, ALU_AND, 0x000000ff, EAX);
1883 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1884 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1885 emit_jcc(cd, CC_B, 3 + 5);
1886 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1887 emit_jmp_imm(cd, 3);
1888 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1889 emit_store_dst(jd, iptr, d);
1892 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1895 /* exchanged to skip fxch */
1896 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1897 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1898 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1899 /* emit_fxch(cd); */
1902 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as LT */
1903 emit_jcc(cd, CC_E, 3);
1904 emit_movb_imm_reg(cd, 1, REG_AH);
1906 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1907 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1908 emit_jcc(cd, CC_B, 3 + 5);
1909 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1910 emit_jmp_imm(cd, 3);
1911 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1912 emit_store_dst(jd, iptr, d);
1916 /* memory operations **************************************************/
1918 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., length */
1920 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1921 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1922 emit_nullpointer_check(cd, iptr, s1);
1923 M_ILD(d, s1, OFFSET(java_arrayheader, size));
1924 emit_store_dst(jd, iptr, d);
1927 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
1929 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1930 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1931 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1932 emit_array_checks(cd, iptr, s1, s2);
1933 emit_movsbl_memindex_reg(cd, OFFSET(java_bytearray, data[0]),
1935 emit_store_dst(jd, iptr, d);
1938 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
1940 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1941 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1942 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1943 emit_array_checks(cd, iptr, s1, s2);
1944 emit_movzwl_memindex_reg(cd, OFFSET(java_chararray, data[0]),
1946 emit_store_dst(jd, iptr, d);
1949 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
1951 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1952 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1953 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1954 emit_array_checks(cd, iptr, s1, s2);
1955 emit_movswl_memindex_reg(cd, OFFSET(java_shortarray, data[0]),
1957 emit_store_dst(jd, iptr, d);
1960 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
1962 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1963 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1964 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1965 emit_array_checks(cd, iptr, s1, s2);
1966 emit_mov_memindex_reg(cd, OFFSET(java_intarray, data[0]),
1968 emit_store_dst(jd, iptr, d);
1971 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
1973 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1974 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1975 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
1976 emit_array_checks(cd, iptr, s1, s2);
1978 var = VAROP(iptr->dst);
1980 assert(var->flags & INMEMORY);
1981 emit_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]),
1982 s1, s2, 3, REG_ITMP3);
1983 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff * 4);
1984 emit_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]) + 4,
1985 s1, s2, 3, REG_ITMP3);
1986 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff * 4 + 4);
1989 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
1991 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1992 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1993 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1994 emit_array_checks(cd, iptr, s1, s2);
1995 emit_flds_memindex(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2);
1996 emit_store_dst(jd, iptr, d);
1999 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2001 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2002 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2003 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
2004 emit_array_checks(cd, iptr, s1, s2);
2005 emit_fldl_memindex(cd, OFFSET(java_doublearray, data[0]), s1, s2,3);
2006 emit_store_dst(jd, iptr, d);
2009 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2011 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2012 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2013 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2014 emit_array_checks(cd, iptr, s1, s2);
2015 emit_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]),
2017 emit_store_dst(jd, iptr, d);
2021 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2023 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2024 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2025 emit_array_checks(cd, iptr, s1, s2);
2026 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2028 /* because EBP, ESI, EDI have no xH and xL nibbles */
2029 M_INTMOVE(s3, REG_ITMP3);
2032 emit_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]),
2036 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2038 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2039 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2040 emit_array_checks(cd, iptr, s1, s2);
2041 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2042 emit_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]),
2046 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2048 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2049 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2050 emit_array_checks(cd, iptr, s1, s2);
2051 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2052 emit_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]),
2056 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2058 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2059 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2060 emit_array_checks(cd, iptr, s1, s2);
2061 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2062 emit_mov_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]),
2066 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2068 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2069 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2070 emit_array_checks(cd, iptr, s1, s2);
2072 var = VAROP(iptr->sx.s23.s3);
2074 assert(var->flags & INMEMORY);
2075 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff * 4, REG_ITMP3);
2076 emit_mov_reg_memindex(cd, REG_ITMP3, OFFSET(java_longarray, data[0])
2078 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff * 4 + 4, REG_ITMP3);
2079 emit_mov_reg_memindex(cd, REG_ITMP3,
2080 OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2083 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2085 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2086 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2087 emit_array_checks(cd, iptr, s1, s2);
2088 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2089 emit_fstps_memindex(cd, OFFSET(java_floatarray, data[0]), s1, s2,2);
2092 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2094 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2095 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2096 emit_array_checks(cd, iptr, s1, s2);
2097 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2098 emit_fstpl_memindex(cd, OFFSET(java_doublearray, data[0]),
2102 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2104 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2105 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2106 emit_array_checks(cd, iptr, s1, s2);
2107 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2109 M_AST(s1, REG_SP, 0 * 4);
2110 M_AST(s3, REG_SP, 1 * 4);
2111 M_MOV_IMM(BUILTIN_canstore, REG_ITMP1);
2115 codegen_add_arraystoreexception_ref(cd);
2117 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2118 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2119 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2120 emit_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]),
2124 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2126 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2127 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2128 emit_array_checks(cd, iptr, s1, s2);
2129 emit_movb_imm_memindex(cd, iptr->sx.s23.s3.constval,
2130 OFFSET(java_bytearray, data[0]), s1, s2, 0);
2133 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2135 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2136 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2137 emit_array_checks(cd, iptr, s1, s2);
2138 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2139 OFFSET(java_chararray, data[0]), s1, s2, 1);
2142 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2144 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2145 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2146 emit_array_checks(cd, iptr, s1, s2);
2147 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2148 OFFSET(java_shortarray, data[0]), s1, s2, 1);
2151 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2153 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2154 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2155 emit_array_checks(cd, iptr, s1, s2);
2156 emit_mov_imm_memindex(cd, iptr->sx.s23.s3.constval,
2157 OFFSET(java_intarray, data[0]), s1, s2, 2);
2160 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2162 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2163 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2164 emit_array_checks(cd, iptr, s1, s2);
2165 emit_mov_imm_memindex(cd,
2166 (u4) (iptr->sx.s23.s3.constval & 0x00000000ffffffff),
2167 OFFSET(java_longarray, data[0]), s1, s2, 3);
2168 emit_mov_imm_memindex(cd,
2169 ((s4)iptr->sx.s23.s3.constval) >> 31,
2170 OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2173 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2175 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2176 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2177 emit_array_checks(cd, iptr, s1, s2);
2178 emit_mov_imm_memindex(cd, 0,
2179 OFFSET(java_objectarray, data[0]), s1, s2, 2);
2183 case ICMD_GETSTATIC: /* ... ==> ..., value */
2185 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2186 unresolved_field *uf = iptr->sx.s23.s3.uf;
2188 fieldtype = uf->fieldref->parseddesc.fd->type;
2190 codegen_addpatchref(cd, PATCHER_get_putstatic,
2191 iptr->sx.s23.s3.uf, 0);
2193 if (opt_showdisassemble) {
2194 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2201 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2203 fieldtype = fi->type;
2205 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class)) {
2206 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2208 if (opt_showdisassemble) {
2209 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2213 disp = (ptrint) &(fi->value);
2216 M_MOV_IMM(disp, REG_ITMP1);
2217 switch (fieldtype) {
2220 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2221 M_ILD(d, REG_ITMP1, 0);
2224 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2225 M_LLD(d, REG_ITMP1, 0);
2228 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2229 M_FLD(d, REG_ITMP1, 0);
2232 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2233 M_DLD(d, REG_ITMP1, 0);
2236 emit_store_dst(jd, iptr, d);
2239 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2241 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2242 unresolved_field *uf = iptr->sx.s23.s3.uf;
2244 fieldtype = uf->fieldref->parseddesc.fd->type;
2246 codegen_addpatchref(cd, PATCHER_get_putstatic,
2247 iptr->sx.s23.s3.uf, 0);
2249 if (opt_showdisassemble) {
2250 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2257 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2259 fieldtype = fi->type;
2261 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class)) {
2262 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2264 if (opt_showdisassemble) {
2265 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2269 disp = (ptrint) &(fi->value);
2272 M_MOV_IMM(disp, REG_ITMP1);
2273 switch (fieldtype) {
2276 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
2277 M_IST(s1, REG_ITMP1, 0);
2280 s1 = emit_load_s1(jd, iptr, REG_ITMP23_PACKED);
2281 M_LST(s1, REG_ITMP1, 0);
2284 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2285 emit_fstps_membase(cd, REG_ITMP1, 0);
2288 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2289 emit_fstpl_membase(cd, REG_ITMP1, 0);
2294 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2295 /* val = value (in current instruction) */
2296 /* following NOP) */
2298 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2299 unresolved_field *uf = iptr->sx.s23.s3.uf;
2301 fieldtype = uf->fieldref->parseddesc.fd->type;
2303 codegen_addpatchref(cd, PATCHER_get_putstatic,
2306 if (opt_showdisassemble) {
2307 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2314 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2316 fieldtype = fi->type;
2318 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class)) {
2319 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2321 if (opt_showdisassemble) {
2322 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2326 disp = (ptrint) &(fi->value);
2329 M_MOV_IMM(disp, REG_ITMP1);
2330 switch (fieldtype) {
2333 M_IST_IMM(iptr->sx.s23.s2.constval, REG_ITMP1, 0);
2336 M_IST_IMM(iptr->sx.s23.s2.constval & 0xffffffff, REG_ITMP1, 0);
2337 M_IST_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, REG_ITMP1, 4);
2344 case ICMD_GETFIELD: /* .., objectref. ==> ..., value */
2346 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2347 emit_nullpointer_check(cd, iptr, s1);
2349 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2350 unresolved_field *uf = iptr->sx.s23.s3.uf;
2352 fieldtype = uf->fieldref->parseddesc.fd->type;
2354 codegen_addpatchref(cd, PATCHER_getfield,
2355 iptr->sx.s23.s3.uf, 0);
2357 if (opt_showdisassemble) {
2358 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2365 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2367 fieldtype = fi->type;
2371 switch (fieldtype) {
2374 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2375 M_ILD32(d, s1, disp);
2378 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2379 M_LLD32(d, s1, disp);
2382 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2383 M_FLD32(d, s1, disp);
2386 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2387 M_DLD32(d, s1, disp);
2390 emit_store_dst(jd, iptr, d);
2393 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2395 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2396 emit_nullpointer_check(cd, iptr, s1);
2398 /* must be done here because of code patching */
2400 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2401 unresolved_field *uf = iptr->sx.s23.s3.uf;
2403 fieldtype = uf->fieldref->parseddesc.fd->type;
2406 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2408 fieldtype = fi->type;
2411 if (!IS_FLT_DBL_TYPE(fieldtype)) {
2412 if (IS_2_WORD_TYPE(fieldtype))
2413 s2 = emit_load_s2(jd, iptr, REG_ITMP23_PACKED);
2415 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2418 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
2420 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2421 unresolved_field *uf = iptr->sx.s23.s3.uf;
2423 codegen_addpatchref(cd, PATCHER_putfield, uf, 0);
2425 if (opt_showdisassemble) {
2426 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2433 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2438 switch (fieldtype) {
2441 M_IST32(s2, s1, disp);
2444 M_LST32(s2, s1, disp);
2447 emit_fstps_membase32(cd, s1, disp);
2450 emit_fstpl_membase32(cd, s1, disp);
2455 case ICMD_PUTFIELDCONST: /* ..., objectref ==> ... */
2456 /* val = value (in current instruction) */
2457 /* following NOP) */
2459 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2460 emit_nullpointer_check(cd, iptr, s1);
2462 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2463 unresolved_field *uf = iptr->sx.s23.s3.uf;
2465 fieldtype = uf->fieldref->parseddesc.fd->type;
2467 codegen_addpatchref(cd, PATCHER_putfieldconst,
2470 if (opt_showdisassemble) {
2471 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2479 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2481 fieldtype = fi->type;
2486 switch (fieldtype) {
2489 M_IST32_IMM(iptr->sx.s23.s2.constval, s1, disp);
2492 M_IST32_IMM(iptr->sx.s23.s2.constval & 0xffffffff, s1, disp);
2493 M_IST32_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, s1, disp + 4);
2501 /* branch operations **************************************************/
2503 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2505 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2506 M_INTMOVE(s1, REG_ITMP1_XPTR);
2508 #ifdef ENABLE_VERIFIER
2509 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2510 codegen_addpatchref(cd, PATCHER_athrow_areturn,
2511 iptr->sx.s23.s2.uc, 0);
2513 if (opt_showdisassemble) {
2514 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2517 #endif /* ENABLE_VERIFIER */
2519 M_CALL_IMM(0); /* passing exception pc */
2520 M_POP(REG_ITMP2_XPC);
2522 M_MOV_IMM(asm_handle_exception, REG_ITMP3);
2526 case ICMD_GOTO: /* ... ==> ... */
2527 case ICMD_RET: /* ... ==> ... */
2529 #if defined(ENABLE_SSA)
2531 last_cmd_was_goto = true;
2532 /* In case of a Goto phimoves have to be inserted before the */
2534 codegen_insert_phi_moves(jd, bptr);
2538 codegen_addreference(cd, iptr->dst.block);
2542 case ICMD_JSR: /* ... ==> ... */
2545 codegen_addreference(cd, iptr->sx.s23.s3.jsrtarget.block);
2548 case ICMD_IFNULL: /* ..., value ==> ... */
2550 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2553 codegen_addreference(cd, iptr->dst.block);
2556 case ICMD_IFNONNULL: /* ..., value ==> ... */
2558 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2561 codegen_addreference(cd, iptr->dst.block);
2564 case ICMD_IFEQ: /* ..., value ==> ... */
2566 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2567 M_CMP_IMM(iptr->sx.val.i, s1);
2569 codegen_addreference(cd, iptr->dst.block);
2572 case ICMD_IFLT: /* ..., value ==> ... */
2574 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2575 M_CMP_IMM(iptr->sx.val.i, s1);
2577 codegen_addreference(cd, iptr->dst.block);
2580 case ICMD_IFLE: /* ..., value ==> ... */
2582 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2583 M_CMP_IMM(iptr->sx.val.i, s1);
2585 codegen_addreference(cd, iptr->dst.block);
2588 case ICMD_IFNE: /* ..., value ==> ... */
2590 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2591 M_CMP_IMM(iptr->sx.val.i, s1);
2593 codegen_addreference(cd, iptr->dst.block);
2596 case ICMD_IFGT: /* ..., value ==> ... */
2598 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2599 M_CMP_IMM(iptr->sx.val.i, s1);
2601 codegen_addreference(cd, iptr->dst.block);
2604 case ICMD_IFGE: /* ..., value ==> ... */
2606 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2607 M_CMP_IMM(iptr->sx.val.i, s1);
2609 codegen_addreference(cd, iptr->dst.block);
2612 case ICMD_IF_LEQ: /* ..., value ==> ... */
2614 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2615 if (iptr->sx.val.l == 0) {
2616 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2617 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2620 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2621 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2622 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2623 M_OR(REG_ITMP2, REG_ITMP1);
2626 codegen_addreference(cd, iptr->dst.block);
2629 case ICMD_IF_LLT: /* ..., value ==> ... */
2631 if (iptr->sx.val.l == 0) {
2632 /* If high 32-bit are less than zero, then the 64-bits
2634 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2639 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2640 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2642 codegen_addreference(cd, iptr->dst.block);
2644 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2647 codegen_addreference(cd, iptr->dst.block);
2650 case ICMD_IF_LLE: /* ..., value ==> ... */
2652 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2653 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2655 codegen_addreference(cd, iptr->dst.block);
2657 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2659 codegen_addreference(cd, iptr->dst.block);
2662 case ICMD_IF_LNE: /* ..., value ==> ... */
2664 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2665 if (iptr->sx.val.l == 0) {
2666 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2667 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2670 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2671 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2672 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2673 M_OR(REG_ITMP2, REG_ITMP1);
2676 codegen_addreference(cd, iptr->dst.block);
2679 case ICMD_IF_LGT: /* ..., value ==> ... */
2681 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2682 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2684 codegen_addreference(cd, iptr->dst.block);
2686 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2688 codegen_addreference(cd, iptr->dst.block);
2691 case ICMD_IF_LGE: /* ..., value ==> ... */
2693 if (iptr->sx.val.l == 0) {
2694 /* If high 32-bit are greater equal zero, then the
2696 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2701 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2702 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2704 codegen_addreference(cd, iptr->dst.block);
2706 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2709 codegen_addreference(cd, iptr->dst.block);
2712 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2713 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2715 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2716 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2719 codegen_addreference(cd, iptr->dst.block);
2722 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2724 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2725 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2726 M_INTMOVE(s1, REG_ITMP1);
2727 M_XOR(s2, REG_ITMP1);
2728 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2729 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2730 M_INTMOVE(s1, REG_ITMP2);
2731 M_XOR(s2, REG_ITMP2);
2732 M_OR(REG_ITMP1, REG_ITMP2);
2734 codegen_addreference(cd, iptr->dst.block);
2737 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2738 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2740 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2741 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2744 codegen_addreference(cd, iptr->dst.block);
2747 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2749 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2750 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2751 M_INTMOVE(s1, REG_ITMP1);
2752 M_XOR(s2, REG_ITMP1);
2753 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2754 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2755 M_INTMOVE(s1, REG_ITMP2);
2756 M_XOR(s2, REG_ITMP2);
2757 M_OR(REG_ITMP1, REG_ITMP2);
2759 codegen_addreference(cd, iptr->dst.block);
2762 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2764 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2765 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2768 codegen_addreference(cd, iptr->dst.block);
2771 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2773 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2774 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2777 codegen_addreference(cd, iptr->dst.block);
2778 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2779 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2783 codegen_addreference(cd, iptr->dst.block);
2786 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2788 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2789 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2792 codegen_addreference(cd, iptr->dst.block);
2795 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2797 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2798 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2801 codegen_addreference(cd, iptr->dst.block);
2802 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2803 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2807 codegen_addreference(cd, iptr->dst.block);
2810 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2812 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2813 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2816 codegen_addreference(cd, iptr->dst.block);
2819 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2821 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2822 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2825 codegen_addreference(cd, iptr->dst.block);
2826 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2827 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2831 codegen_addreference(cd, iptr->dst.block);
2834 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2836 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2837 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2840 codegen_addreference(cd, iptr->dst.block);
2843 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2845 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2846 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2849 codegen_addreference(cd, iptr->dst.block);
2850 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2851 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2855 codegen_addreference(cd, iptr->dst.block);
2859 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2861 REPLACEMENT_POINT_RETURN(cd, iptr);
2862 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2863 M_INTMOVE(s1, REG_RESULT);
2864 goto nowperformreturn;
2866 case ICMD_LRETURN: /* ..., retvalue ==> ... */
2868 REPLACEMENT_POINT_RETURN(cd, iptr);
2869 s1 = emit_load_s1(jd, iptr, REG_RESULT_PACKED);
2870 M_LNGMOVE(s1, REG_RESULT_PACKED);
2871 goto nowperformreturn;
2873 case ICMD_ARETURN: /* ..., retvalue ==> ... */
2875 REPLACEMENT_POINT_RETURN(cd, iptr);
2876 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2877 M_INTMOVE(s1, REG_RESULT);
2879 #ifdef ENABLE_VERIFIER
2880 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2881 codegen_addpatchref(cd, PATCHER_athrow_areturn,
2882 iptr->sx.s23.s2.uc, 0);
2884 if (opt_showdisassemble) {
2885 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2888 #endif /* ENABLE_VERIFIER */
2889 goto nowperformreturn;
2891 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2894 REPLACEMENT_POINT_RETURN(cd, iptr);
2895 s1 = emit_load_s1(jd, iptr, REG_FRESULT);
2896 goto nowperformreturn;
2898 case ICMD_RETURN: /* ... ==> ... */
2900 REPLACEMENT_POINT_RETURN(cd, iptr);
2906 p = cd->stackframesize;
2908 #if !defined(NDEBUG)
2909 emit_verbosecall_exit(jd);
2912 #if defined(ENABLE_THREADS)
2913 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2914 M_ALD(REG_ITMP2, REG_SP, rd->memuse * 4);
2916 /* we need to save the proper return value */
2917 switch (iptr->opc) {
2920 M_IST(REG_RESULT, REG_SP, rd->memuse * 4);
2924 M_LST(REG_RESULT_PACKED, REG_SP, rd->memuse * 4);
2928 emit_fstps_membase(cd, REG_SP, rd->memuse * 4);
2932 emit_fstpl_membase(cd, REG_SP, rd->memuse * 4);
2936 M_AST(REG_ITMP2, REG_SP, 0);
2937 M_MOV_IMM(LOCK_monitor_exit, REG_ITMP3);
2940 /* and now restore the proper return value */
2941 switch (iptr->opc) {
2944 M_ILD(REG_RESULT, REG_SP, rd->memuse * 4);
2948 M_LLD(REG_RESULT_PACKED, REG_SP, rd->memuse * 4);
2952 emit_flds_membase(cd, REG_SP, rd->memuse * 4);
2956 emit_fldl_membase(cd, REG_SP, rd->memuse * 4);
2962 /* restore saved registers */
2964 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
2965 p--; M_ALD(rd->savintregs[i], REG_SP, p * 4);
2968 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
2970 emit_fldl_membase(cd, REG_SP, p * 4);
2971 if (iptr->opc == ICMD_FRETURN || iptr->opc == ICMD_DRETURN) {
2973 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset + 1); */
2976 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset); */
2980 /* deallocate stack */
2982 if (cd->stackframesize)
2983 M_AADD_IMM(cd->stackframesize * 4, REG_SP);
2990 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2993 branch_target_t *table;
2995 table = iptr->dst.table;
2997 l = iptr->sx.s23.s2.tablelow;
2998 i = iptr->sx.s23.s3.tablehigh;
3000 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3001 M_INTMOVE(s1, REG_ITMP1);
3004 M_ISUB_IMM(l, REG_ITMP1);
3009 M_CMP_IMM(i - 1, REG_ITMP1);
3012 codegen_addreference(cd, table[0].block); /* default target */
3014 /* build jump table top down and use address of lowest entry */
3019 dseg_add_target(cd, table->block);
3023 /* length of dataseg after last dseg_addtarget is used
3026 M_MOV_IMM(0, REG_ITMP2);
3028 emit_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 2, REG_ITMP1);
3034 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
3037 lookup_target_t *lookup;
3039 lookup = iptr->dst.lookup;
3041 i = iptr->sx.s23.s2.lookupcount;
3043 MCODECHECK((i<<2)+8);
3044 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3047 M_CMP_IMM(lookup->value, s1);
3049 codegen_addreference(cd, lookup->target.block);
3055 codegen_addreference(cd, iptr->sx.s23.s3.lookupdefault.block);
3059 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
3061 bte = iptr->sx.s23.s3.bte;
3065 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
3067 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3068 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
3069 case ICMD_INVOKEINTERFACE:
3071 REPLACEMENT_POINT_INVOKE(cd, iptr);
3073 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3074 md = iptr->sx.s23.s3.um->methodref->parseddesc.md;
3078 lm = iptr->sx.s23.s3.fmiref->p.method;
3079 md = lm->parseddesc;
3083 s3 = md->paramcount;
3085 MCODECHECK((s3 << 1) + 64);
3087 /* copy arguments to registers or stack location */
3089 for (s3 = s3 - 1; s3 >= 0; s3--) {
3090 var = VAR(iptr->sx.s23.s2.args[s3]);
3092 /* Already Preallocated (ARGVAR) ? */
3093 if (var->flags & PREALLOC)
3095 if (IS_INT_LNG_TYPE(var->type)) {
3096 if (!md->params[s3].inmemory) {
3097 log_text("No integer argument registers available!");
3101 if (IS_2_WORD_TYPE(var->type)) {
3102 d = emit_load(jd, iptr, var, REG_ITMP12_PACKED);
3103 M_LST(d, REG_SP, md->params[s3].regoff * 4);
3105 d = emit_load(jd, iptr, var, REG_ITMP1);
3106 M_IST(d, REG_SP, md->params[s3].regoff * 4);
3111 if (!md->params[s3].inmemory) {
3112 s1 = rd->argfltregs[md->params[s3].regoff];
3113 d = emit_load(jd, iptr, var, s1);
3117 d = emit_load(jd, iptr, var, REG_FTMP1);
3118 if (IS_2_WORD_TYPE(var->type))
3119 M_DST(d, REG_SP, md->params[s3].regoff * 4);
3121 M_FST(d, REG_SP, md->params[s3].regoff * 4);
3126 switch (iptr->opc) {
3128 disp = (ptrint) bte->fp;
3129 d = md->returntype.type;
3131 M_MOV_IMM(disp, REG_ITMP1);
3135 if (INSTRUCTION_MUST_CHECK(iptr)) {
3138 codegen_add_fillinstacktrace_ref(cd);
3142 case ICMD_INVOKESPECIAL:
3143 M_ALD(REG_ITMP1, REG_SP, 0);
3146 codegen_add_nullpointerexception_ref(cd);
3150 case ICMD_INVOKESTATIC:
3152 unresolved_method *um = iptr->sx.s23.s3.um;
3154 codegen_addpatchref(cd, PATCHER_invokestatic_special,
3157 if (opt_showdisassemble) {
3158 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3162 d = md->returntype.type;
3165 disp = (ptrint) lm->stubroutine;
3166 d = lm->parseddesc->returntype.type;
3169 M_MOV_IMM(disp, REG_ITMP2);
3173 case ICMD_INVOKEVIRTUAL:
3174 M_ALD(REG_ITMP1, REG_SP, 0 * 4);
3175 emit_nullpointer_check(cd, iptr, s1);
3178 unresolved_method *um = iptr->sx.s23.s3.um;
3180 codegen_addpatchref(cd, PATCHER_invokevirtual, um, 0);
3182 if (opt_showdisassemble) {
3183 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3187 d = md->returntype.type;
3190 s1 = OFFSET(vftbl_t, table[0]) +
3191 sizeof(methodptr) * lm->vftblindex;
3192 d = md->returntype.type;
3195 M_ALD(REG_METHODPTR, REG_ITMP1,
3196 OFFSET(java_objectheader, vftbl));
3197 M_ALD32(REG_ITMP3, REG_METHODPTR, s1);
3201 case ICMD_INVOKEINTERFACE:
3202 M_ALD(REG_ITMP1, REG_SP, 0 * 4);
3203 emit_nullpointer_check(cd, iptr, s1);
3206 unresolved_method *um = iptr->sx.s23.s3.um;
3208 codegen_addpatchref(cd, PATCHER_invokeinterface, um, 0);
3210 if (opt_showdisassemble) {
3211 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3216 d = md->returntype.type;
3219 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3220 sizeof(methodptr) * lm->class->index;
3222 s2 = sizeof(methodptr) * (lm - lm->class->methods);
3224 d = md->returntype.type;
3227 M_ALD(REG_METHODPTR, REG_ITMP1,
3228 OFFSET(java_objectheader, vftbl));
3229 M_ALD32(REG_METHODPTR, REG_METHODPTR, s1);
3230 M_ALD32(REG_ITMP3, REG_METHODPTR, s2);
3235 /* store size of call code in replacement point */
3237 REPLACEMENT_POINT_INVOKE_RETURN(cd, iptr);
3239 /* d contains return type */
3241 if (d != TYPE_VOID) {
3242 #if defined(ENABLE_SSA)
3243 if ((ls == NULL) || (!IS_TEMPVAR_INDEX(iptr->dst.varindex)) ||
3244 (ls->lifetime[-iptr->dst.varindex-1].type != -1))
3245 /* a "living" stackslot */
3248 if (IS_INT_LNG_TYPE(d)) {
3249 if (IS_2_WORD_TYPE(d)) {
3250 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
3251 M_LNGMOVE(REG_RESULT_PACKED, s1);
3254 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3255 M_INTMOVE(REG_RESULT, s1);
3259 s1 = codegen_reg_of_dst(jd, iptr, REG_NULL);
3261 emit_store_dst(jd, iptr, s1);
3267 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3268 /* val.a: (classinfo*) superclass */
3270 /* superclass is an interface:
3272 * OK if ((sub == NULL) ||
3273 * (sub->vftbl->interfacetablelength > super->index) &&
3274 * (sub->vftbl->interfacetable[-super->index] != NULL));
3276 * superclass is a class:
3278 * OK if ((sub == NULL) || (0
3279 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3280 * super->vftbl->diffval));
3283 if (!(iptr->flags.bits & INS_FLAG_ARRAY)) {
3284 /* object type cast-check */
3287 vftbl_t *supervftbl;
3290 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3296 super = iptr->sx.s23.s3.c.cls;
3297 superindex = super->index;
3298 supervftbl = super->vftbl;
3301 #if defined(ENABLE_THREADS)
3302 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3304 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3306 /* calculate interface checkcast code size */
3308 s2 = 2; /* mov_membase_reg */
3309 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3311 s2 += (2 + 4 /* mov_membase32_reg */ + 2 + 4 /* sub imm32 */ +
3312 2 /* test */ + 6 /* jcc */ + 2 + 4 /* mov_membase32_reg */ +
3313 2 /* test */ + 6 /* jcc */);
3316 s2 += (opt_showdisassemble ? 5 : 0);
3318 /* calculate class checkcast code size */
3320 s3 = 2; /* mov_membase_reg */
3321 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3323 s3 += 5 /* mov_imm_reg */ + 2 + 4 /* mov_membase32_reg */;
3326 if (s1 != REG_ITMP1) {
3328 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, baseval));
3331 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, diffval));
3338 s3 += (2 + 4 /* mov_membase32_reg */ + 2 /* sub */ +
3339 5 /* mov_imm_reg */ + 2 /* mov_membase_reg */);
3340 CALCOFFSETBYTES(s3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3343 s3 += 2 /* cmp */ + 6 /* jcc */;
3346 s3 += (opt_showdisassemble ? 5 : 0);
3348 /* if class is not resolved, check which code to call */
3350 if (super == NULL) {
3352 M_BEQ(5 + (opt_showdisassemble ? 5 : 0) + 6 + 6 + s2 + 5 + s3);
3354 codegen_addpatchref(cd, PATCHER_checkcast_instanceof_flags,
3355 iptr->sx.s23.s3.c.ref, 0);
3357 if (opt_showdisassemble) {
3358 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3361 M_MOV_IMM(0, REG_ITMP2); /* super->flags */
3362 M_AND_IMM32(ACC_INTERFACE, REG_ITMP2);
3366 /* interface checkcast code */
3368 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3369 if (super != NULL) {
3374 M_ALD(REG_ITMP2, s1, OFFSET(java_objectheader, vftbl));
3376 if (super == NULL) {
3377 codegen_addpatchref(cd,
3378 PATCHER_checkcast_instanceof_interface,
3379 iptr->sx.s23.s3.c.ref,
3382 if (opt_showdisassemble) {
3383 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3388 REG_ITMP2, OFFSET(vftbl_t, interfacetablelength));
3389 M_ISUB_IMM32(superindex, REG_ITMP3);
3392 codegen_add_classcastexception_ref(cd, s1);
3393 M_ALD32(REG_ITMP3, REG_ITMP2,
3394 OFFSET(vftbl_t, interfacetable[0]) -
3395 superindex * sizeof(methodptr*));
3398 codegen_add_classcastexception_ref(cd, s1);
3404 /* class checkcast code */
3406 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3407 if (super != NULL) {
3412 M_ALD(REG_ITMP2, s1, OFFSET(java_objectheader, vftbl));
3414 if (super == NULL) {
3415 codegen_addpatchref(cd, PATCHER_checkcast_class,
3416 iptr->sx.s23.s3.c.ref,
3419 if (opt_showdisassemble) {
3420 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3424 M_MOV_IMM(supervftbl, REG_ITMP3);
3425 #if defined(ENABLE_THREADS)
3426 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3428 M_ILD32(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3430 /* if (s1 != REG_ITMP1) { */
3431 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP1); */
3432 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3); */
3433 /* #if defined(ENABLE_THREADS) */
3434 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3436 /* emit_alu_reg_reg(cd, ALU_SUB, REG_ITMP1, REG_ITMP2); */
3439 M_ILD32(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, baseval));
3440 M_ISUB(REG_ITMP3, REG_ITMP2);
3441 M_MOV_IMM(supervftbl, REG_ITMP3);
3442 M_ILD(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3443 #if defined(ENABLE_THREADS)
3444 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3448 M_CMP(REG_ITMP3, REG_ITMP2);
3449 M_BA(0); /* (u) REG_ITMP2 > (u) REG_ITMP3 -> jump */
3450 codegen_add_classcastexception_ref(cd, s1);
3453 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
3456 /* array type cast-check */
3458 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3459 M_AST(s1, REG_SP, 0 * 4);
3461 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3462 codegen_addpatchref(cd, PATCHER_builtin_arraycheckcast,
3463 iptr->sx.s23.s3.c.ref, 0);
3465 if (opt_showdisassemble) {
3466 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3470 M_AST_IMM(iptr->sx.s23.s3.c.cls, REG_SP, 1 * 4);
3471 M_MOV_IMM(BUILTIN_arraycheckcast, REG_ITMP3);
3474 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3477 codegen_add_classcastexception_ref(cd, s1);
3479 d = codegen_reg_of_dst(jd, iptr, s1);
3483 emit_store_dst(jd, iptr, d);
3486 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3487 /* val.a: (classinfo*) superclass */
3489 /* superclass is an interface:
3491 * return (sub != NULL) &&
3492 * (sub->vftbl->interfacetablelength > super->index) &&
3493 * (sub->vftbl->interfacetable[-super->index] != NULL);
3495 * superclass is a class:
3497 * return ((sub != NULL) && (0
3498 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3499 * super->vftbl->diffvall));
3504 vftbl_t *supervftbl;
3507 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3513 super = iptr->sx.s23.s3.c.cls;
3514 superindex = super->index;
3515 supervftbl = super->vftbl;
3518 #if defined(ENABLE_THREADS)
3519 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3522 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3523 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
3525 M_INTMOVE(s1, REG_ITMP1);
3529 /* calculate interface instanceof code size */
3531 s2 = 2; /* mov_membase_reg */
3532 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3534 s2 += (2 + 4 /* mov_membase32_reg */ + 2 + 4 /* alu_imm32_reg */ +
3535 2 /* test */ + 6 /* jcc */ + 2 + 4 /* mov_membase32_reg */ +
3536 2 /* test */ + 6 /* jcc */ + 5 /* mov_imm_reg */);
3539 s2 += (opt_showdisassemble ? 5 : 0);
3541 /* calculate class instanceof code size */
3543 s3 = 2; /* mov_membase_reg */
3544 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3545 s3 += 5; /* mov_imm_reg */
3547 CALCOFFSETBYTES(s3, REG_ITMP1, OFFSET(vftbl_t, baseval));
3549 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3551 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, baseval));
3553 s3 += (2 /* alu_reg_reg */ + 2 /* alu_reg_reg */ +
3554 2 /* alu_reg_reg */ + 6 /* jcc */ + 5 /* mov_imm_reg */);
3557 s3 += (opt_showdisassemble ? 5 : 0);
3561 /* if class is not resolved, check which code to call */
3565 M_BEQ(5 + (opt_showdisassemble ? 5 : 0) + 6 + 6 + s2 + 5 + s3);
3567 codegen_addpatchref(cd, PATCHER_checkcast_instanceof_flags,
3568 iptr->sx.s23.s3.c.ref, 0);
3570 if (opt_showdisassemble) {
3571 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3574 M_MOV_IMM(0, REG_ITMP3); /* super->flags */
3575 M_AND_IMM32(ACC_INTERFACE, REG_ITMP3);
3579 /* interface instanceof code */
3581 if (!super || (super->flags & ACC_INTERFACE)) {
3587 M_ALD(REG_ITMP1, s1, OFFSET(java_objectheader, vftbl));
3590 codegen_addpatchref(cd,
3591 PATCHER_checkcast_instanceof_interface,
3592 iptr->sx.s23.s3.c.ref, 0);
3594 if (opt_showdisassemble) {
3595 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3600 REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3601 M_ISUB_IMM32(superindex, REG_ITMP3);
3604 disp = (2 + 4 /* mov_membase32_reg */ + 2 /* test */ +
3605 6 /* jcc */ + 5 /* mov_imm_reg */);
3608 M_ALD32(REG_ITMP1, REG_ITMP1,
3609 OFFSET(vftbl_t, interfacetable[0]) -
3610 superindex * sizeof(methodptr*));
3612 /* emit_setcc_reg(cd, CC_A, d); */
3613 /* emit_jcc(cd, CC_BE, 5); */
3621 /* class instanceof code */
3623 if (!super || !(super->flags & ACC_INTERFACE)) {
3629 M_ALD(REG_ITMP1, s1, OFFSET(java_objectheader, vftbl));
3632 codegen_addpatchref(cd, PATCHER_instanceof_class,
3633 iptr->sx.s23.s3.c.ref, 0);
3635 if (opt_showdisassemble) {
3636 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3640 M_MOV_IMM(supervftbl, REG_ITMP2);
3641 #if defined(ENABLE_THREADS)
3642 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3644 M_ILD(REG_ITMP1, REG_ITMP1, OFFSET(vftbl_t, baseval));
3645 M_ILD(REG_ITMP3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3646 M_ILD(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3647 #if defined(ENABLE_THREADS)
3648 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3650 M_ISUB(REG_ITMP2, REG_ITMP1);
3651 M_CLR(d); /* may be REG_ITMP2 */
3652 M_CMP(REG_ITMP3, REG_ITMP1);
3656 emit_store_dst(jd, iptr, d);
3662 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3664 /* check for negative sizes and copy sizes to stack if necessary */
3666 MCODECHECK((iptr->s1.argcount << 1) + 64);
3668 for (s1 = iptr->s1.argcount; --s1 >= 0; ) {
3669 /* copy SAVEDVAR sizes to stack */
3670 var = VAR(iptr->sx.s23.s2.args[s1]);
3672 /* Already Preallocated? */
3673 if (!(var->flags & PREALLOC)) {
3674 if (var->flags & INMEMORY) {
3675 M_ILD(REG_ITMP1, REG_SP, var->vv.regoff * 4);
3676 M_IST(REG_ITMP1, REG_SP, (s1 + 3) * 4);
3679 M_IST(var->vv.regoff, REG_SP, (s1 + 3) * 4);
3683 /* is a patcher function set? */
3685 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3686 codegen_addpatchref(cd, PATCHER_builtin_multianewarray,
3687 iptr->sx.s23.s3.c.ref, 0);
3689 if (opt_showdisassemble) {
3690 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3697 disp = (ptrint) iptr->sx.s23.s3.c.cls;
3699 /* a0 = dimension count */
3701 M_IST_IMM(iptr->s1.argcount, REG_SP, 0 * 4);
3703 /* a1 = arraydescriptor */
3705 M_IST_IMM(disp, REG_SP, 1 * 4);
3707 /* a2 = pointer to dimensions = stack pointer */
3709 M_MOV(REG_SP, REG_ITMP1);
3710 M_AADD_IMM(3 * 4, REG_ITMP1);
3711 M_AST(REG_ITMP1, REG_SP, 2 * 4);
3713 M_MOV_IMM(BUILTIN_multianewarray, REG_ITMP1);
3716 /* check for exception before result assignment */
3720 codegen_add_fillinstacktrace_ref(cd);
3722 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3723 M_INTMOVE(REG_RESULT, s1);
3724 emit_store_dst(jd, iptr, s1);
3728 exceptions_throw_internalerror("Unknown ICMD %d during code generation",
3733 } /* for instruction */
3737 #if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
3740 #if defined(ENABLE_SSA)
3742 /* by edge splitting, in Blocks with phi moves there can only */
3743 /* be a goto as last command, no other Jump/Branch Command */
3744 if (!last_cmd_was_goto)
3745 codegen_insert_phi_moves(jd, bptr);
3750 /* At the end of a basic block we may have to append some nops,
3751 because the patcher stub calling code might be longer than the
3752 actual instruction. So codepatching does not change the
3753 following block unintentionally. */
3755 if (cd->mcodeptr < cd->lastmcodeptr) {
3756 while (cd->mcodeptr < cd->lastmcodeptr) {
3761 } /* if (bptr -> flags >= BBREACHED) */
3762 } /* for basic block */
3764 dseg_createlinenumbertable(cd);
3767 /* generate exception and patcher stubs */
3769 emit_exception_stubs(jd);
3770 emit_patcher_stubs(jd);
3771 REPLACEMENT_EMIT_STUBS(jd);
3775 /* everything's ok */
3780 #if defined(ENABLE_SSA)
3781 void codegen_insert_phi_moves(jitdata *jd, basicblock *bptr) {
3782 /* look for phi moves */
3783 int t_a,s_a,i, type;
3784 int t_lt, s_lt; /* lifetime indices of phi_moves */
3785 s4 t_regoff, s_regoff, s_flags, t_flags;
3794 /* Moves from phi functions with highest indices have to be */
3795 /* inserted first, since this is the order as is used for */
3796 /* conflict resolution */
3797 for(i = ls->num_phi_moves[bptr->nr] - 1; i >= 0 ; i--) {
3798 t_a = ls->phi_moves[bptr->nr][i][0];
3799 s_a = ls->phi_moves[bptr->nr][i][1];
3800 #if defined(SSA_DEBUG_VERBOSE)
3802 printf("BB %3i Move %3i <- %3i ", bptr->nr, t_a, s_a);
3805 /* local var lifetimes */
3806 t_lt = ls->maxlifetimes + t_a;
3807 type = ls->lifetime[t_lt].type;
3811 type = ls->lifetime[t_lt].local_ss->s->type;
3812 /* stackslot lifetime */
3816 #if defined(SSA_DEBUG_VERBOSE)
3818 printf("...returning - phi lifetimes where joined\n");
3824 /* local var lifetimes */
3825 s_lt = ls->maxlifetimes + s_a;
3826 type = ls->lifetime[s_lt].type;
3830 type = ls->lifetime[s_lt].type;
3831 /* stackslot lifetime */
3835 #if defined(SSA_DEBUG_VERBOSE)
3837 printf("...returning - phi lifetimes where joined\n");
3843 t_flags = VAR(t_a)->flags;
3844 t_regoff = VAR(t_a)->vv.regoff;
3848 t_flags = ls->lifetime[t_lt].local_ss->s->flags;
3849 t_regoff = ls->lifetime[t_lt].local_ss->s->regoff;
3853 /* local var move */
3854 s_flags = VAR(s_a)->flags;
3855 s_regoff = VAR(s_a)->vv.regoff;
3857 /* stackslot lifetime */
3858 s_flags = ls->lifetime[s_lt].local_ss->s->flags;
3859 s_regoff = ls->lifetime[s_lt].local_ss->s->regoff;
3863 #if defined(SSA_DEBUG_VERBOSE)
3865 printf("...returning - phi lifetimes where joined\n");
3870 cg_move(cd, type, s_regoff, s_flags, t_regoff, t_flags);
3872 #if defined(SSA_DEBUG_VERBOSE)
3873 if (compileverbose) {
3874 if (IS_INMEMORY(t_flags) && IS_INMEMORY(s_flags)) {
3876 printf("M%3i <- M%3i",t_regoff,s_regoff);
3878 else if (IS_INMEMORY(s_flags)) {
3880 printf("R%3i <- M%3i",t_regoff,s_regoff);
3882 else if (IS_INMEMORY(t_flags)) {
3884 printf("M%3i <- R%3i",t_regoff,s_regoff);
3888 printf("R%3i <- R%3i",t_regoff,s_regoff);
3892 #endif /* defined(SSA_DEBUG_VERBOSE) */
3896 void cg_move(codegendata *cd, s4 type, s4 src_regoff, s4 src_flags,
3897 s4 dst_regoff, s4 dst_flags) {
3898 if ((IS_INMEMORY(dst_flags)) && (IS_INMEMORY(src_flags))) {
3900 if (dst_regoff != src_regoff) {
3901 if (!IS_2_WORD_TYPE(type)) {
3902 if (IS_FLT_DBL_TYPE(type)) {
3903 emit_flds_membase(cd, REG_SP, src_regoff * 4);
3904 emit_fstps_membase(cd, REG_SP, dst_regoff * 4);
3906 emit_mov_membase_reg(cd, REG_SP, src_regoff * 4,
3908 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, dst_regoff * 4);
3910 } else { /* LONG OR DOUBLE */
3911 if (IS_FLT_DBL_TYPE(type)) {
3912 emit_fldl_membase( cd, REG_SP, src_regoff * 4);
3913 emit_fstpl_membase(cd, REG_SP, dst_regoff * 4);
3915 emit_mov_membase_reg(cd, REG_SP, src_regoff * 4,
3917 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, dst_regoff * 4);
3918 emit_mov_membase_reg(cd, REG_SP, src_regoff * 4 + 4,
3920 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP,
3921 dst_regoff * 4 + 4);
3926 if (IS_FLT_DBL_TYPE(type)) {
3927 log_text("cg_move: flt/dbl type have to be in memory\n");
3930 if (IS_2_WORD_TYPE(type)) {
3931 log_text("cg_move: longs have to be in memory\n");
3934 if (IS_INMEMORY(src_flags)) {
3936 emit_mov_membase_reg(cd, REG_SP, src_regoff * 4, dst_regoff);
3937 } else if (IS_INMEMORY(dst_flags)) {
3939 emit_mov_reg_membase(cd, src_regoff, REG_SP, dst_regoff * 4);
3942 /* only ints can be in regs on i386 */
3943 M_INTMOVE(src_regoff,dst_regoff);
3947 #endif /* defined(ENABLE_SSA) */
3949 /* createcompilerstub **********************************************************
3951 Creates a stub routine which calls the compiler.
3953 *******************************************************************************/
3955 #define COMPILERSTUB_DATASIZE 3 * SIZEOF_VOID_P
3956 #define COMPILERSTUB_CODESIZE 12
3958 #define COMPILERSTUB_SIZE COMPILERSTUB_DATASIZE + COMPILERSTUB_CODESIZE
3961 u1 *createcompilerstub(methodinfo *m)
3963 u1 *s; /* memory to hold the stub */
3968 s = CNEW(u1, COMPILERSTUB_SIZE);
3970 /* set data pointer and code pointer */
3973 s = s + COMPILERSTUB_DATASIZE;
3975 /* mark start of dump memory area */
3977 dumpsize = dump_size();
3979 cd = DNEW(codegendata);
3982 /* The codeinfo pointer is actually a pointer to the
3983 methodinfo. This fakes a codeinfo structure. */
3985 d[0] = (ptrint) asm_call_jit_compiler;
3987 d[2] = (ptrint) &d[1]; /* fake code->m */
3989 /* code for the stub */
3991 M_MOV_IMM(m, REG_ITMP1); /* method info */
3992 M_MOV_IMM(asm_call_jit_compiler, REG_ITMP3);
3995 #if defined(ENABLE_STATISTICS)
3997 count_cstub_len += COMPILERSTUB_SIZE;
4000 /* release dump area */
4002 dump_release(dumpsize);
4008 /* createnativestub ************************************************************
4010 Creates a stub routine which calls a native method.
4012 *******************************************************************************/
4014 u1 *createnativestub(functionptr f, jitdata *jd, methoddesc *nmd)
4022 s4 i, j; /* count variables */
4026 /* get required compiler data */
4033 /* set some variables */
4036 nativeparams = (m->flags & ACC_STATIC) ? 2 : 1;
4038 /* calculate stackframe size */
4040 cd->stackframesize =
4041 sizeof(stackframeinfo) / SIZEOF_VOID_P +
4042 sizeof(localref_table) / SIZEOF_VOID_P +
4043 1 + /* function pointer */
4044 4 * 4 + /* 4 arguments (start_native_call) */
4047 /* keep stack 16-byte aligned */
4049 cd->stackframesize |= 0x3;
4051 /* create method header */
4053 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
4054 (void) dseg_add_unique_s4(cd, cd->stackframesize * 4); /* FrameSize */
4055 (void) dseg_add_unique_s4(cd, 0); /* IsSync */
4056 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
4057 (void) dseg_add_unique_s4(cd, 0); /* IntSave */
4058 (void) dseg_add_unique_s4(cd, 0); /* FltSave */
4059 (void) dseg_addlinenumbertablesize(cd);
4060 (void) dseg_add_unique_s4(cd, 0); /* ExTableSize */
4062 #if defined(ENABLE_PROFILING)
4063 /* generate native method profiling code */
4065 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
4066 /* count frequency */
4068 M_MOV_IMM(code, REG_ITMP1);
4069 M_IADD_IMM_MEMBASE(1, REG_ITMP1, OFFSET(codeinfo, frequency));
4073 /* calculate stackframe size for native function */
4075 M_ASUB_IMM(cd->stackframesize * 4, REG_SP);
4077 #if !defined(NDEBUG)
4078 emit_verbosecall_enter(jd);
4081 /* get function address (this must happen before the stackframeinfo) */
4083 #if !defined(WITH_STATIC_CLASSPATH)
4085 codegen_addpatchref(cd, PATCHER_resolve_native, m, 0);
4087 if (opt_showdisassemble) {
4088 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
4093 M_AST_IMM((ptrint) f, REG_SP, 4 * 4);
4095 /* Mark the whole fpu stack as free for native functions (only for saved */
4096 /* register count == 0). */
4098 emit_ffree_reg(cd, 0);
4099 emit_ffree_reg(cd, 1);
4100 emit_ffree_reg(cd, 2);
4101 emit_ffree_reg(cd, 3);
4102 emit_ffree_reg(cd, 4);
4103 emit_ffree_reg(cd, 5);
4104 emit_ffree_reg(cd, 6);
4105 emit_ffree_reg(cd, 7);
4107 /* prepare data structures for native function call */
4109 M_MOV(REG_SP, REG_ITMP1);
4110 M_AADD_IMM(cd->stackframesize * 4, REG_ITMP1);
4112 M_AST(REG_ITMP1, REG_SP, 0 * 4);
4113 M_IST_IMM(0, REG_SP, 1 * 4);
4116 M_MOV(REG_SP, REG_ITMP2);
4117 M_AADD_IMM(cd->stackframesize * 4 + SIZEOF_VOID_P, REG_ITMP2);
4119 M_AST(REG_ITMP2, REG_SP, 2 * 4);
4120 M_ALD(REG_ITMP3, REG_SP, cd->stackframesize * 4);
4121 M_AST(REG_ITMP3, REG_SP, 3 * 4);
4122 M_MOV_IMM(codegen_start_native_call, REG_ITMP1);
4125 M_ALD(REG_ITMP3, REG_SP, 4 * 4);
4127 /* copy arguments into new stackframe */
4129 for (i = md->paramcount - 1, j = i + nativeparams; i >= 0; i--, j--) {
4130 t = md->paramtypes[i].type;
4132 if (!md->params[i].inmemory) {
4133 /* no integer argument registers */
4134 } else { /* float/double in memory can be copied like int/longs */
4135 s1 = (md->params[i].regoff + cd->stackframesize + 1) * 4;
4136 s2 = nmd->params[j].regoff * 4;
4138 M_ILD(REG_ITMP1, REG_SP, s1);
4139 M_IST(REG_ITMP1, REG_SP, s2);
4140 if (IS_2_WORD_TYPE(t)) {
4141 M_ILD(REG_ITMP1, REG_SP, s1 + 4);
4142 M_IST(REG_ITMP1, REG_SP, s2 + 4);
4147 /* if function is static, put class into second argument */
4149 if (m->flags & ACC_STATIC)
4150 M_AST_IMM(m->class, REG_SP, 1 * 4);
4152 /* put env into first argument */
4154 M_AST_IMM(_Jv_env, REG_SP, 0 * 4);
4156 /* call the native function */
4160 /* save return value */
4162 if (md->returntype.type != TYPE_VOID) {
4163 if (IS_INT_LNG_TYPE(md->returntype.type)) {
4164 if (IS_2_WORD_TYPE(md->returntype.type))
4165 M_IST(REG_RESULT2, REG_SP, 2 * 4);
4166 M_IST(REG_RESULT, REG_SP, 1 * 4);
4169 if (IS_2_WORD_TYPE(md->returntype.type))
4170 emit_fstl_membase(cd, REG_SP, 1 * 4);
4172 emit_fsts_membase(cd, REG_SP, 1 * 4);
4176 #if !defined(NDEBUG)
4177 emit_verbosecall_exit(jd);
4180 /* remove native stackframe info */
4182 M_MOV(REG_SP, REG_ITMP1);
4183 M_AADD_IMM(cd->stackframesize * 4, REG_ITMP1);
4185 M_AST(REG_ITMP1, REG_SP, 0 * 4);
4186 M_MOV_IMM(codegen_finish_native_call, REG_ITMP1);
4188 M_MOV(REG_RESULT, REG_ITMP2); /* REG_ITMP3 == REG_RESULT2 */
4190 /* restore return value */
4192 if (md->returntype.type != TYPE_VOID) {
4193 if (IS_INT_LNG_TYPE(md->returntype.type)) {
4194 if (IS_2_WORD_TYPE(md->returntype.type))
4195 M_ILD(REG_RESULT2, REG_SP, 2 * 4);
4196 M_ILD(REG_RESULT, REG_SP, 1 * 4);
4199 if (IS_2_WORD_TYPE(md->returntype.type))
4200 emit_fldl_membase(cd, REG_SP, 1 * 4);
4202 emit_flds_membase(cd, REG_SP, 1 * 4);
4206 M_AADD_IMM(cd->stackframesize * 4, REG_SP);
4208 /* check for exception */
4215 /* handle exception */
4217 M_MOV(REG_ITMP2, REG_ITMP1_XPTR);
4218 M_ALD(REG_ITMP2_XPC, REG_SP, 0);
4219 M_ASUB_IMM(2, REG_ITMP2_XPC);
4221 M_MOV_IMM(asm_handle_nat_exception, REG_ITMP3);
4225 /* generate patcher stubs */
4227 emit_patcher_stubs(jd);
4231 return code->entrypoint;
4236 * These are local overrides for various environment variables in Emacs.
4237 * Please do not remove this and leave it at the end of the file, where
4238 * Emacs will automagically detect them.
4239 * ---------------------------------------------------------------------
4242 * indent-tabs-mode: t
4246 * vim:noexpandtab:sw=4:ts=4: