1 /* src/vm/jit/i386/codegen.c - machine code generator for i386
3 Copyright (C) 1996-2005, 2006, 2007 R. Grafl, A. Krall, C. Kruegel,
4 C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
5 E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
6 J. Wenninger, Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
36 #include "vm/jit/i386/md-abi.h"
38 #include "vm/jit/i386/codegen.h"
39 #include "vm/jit/i386/emit.h"
41 #include "mm/memory.h"
42 #include "native/jni.h"
43 #include "native/localref.h"
44 #include "native/native.h"
46 #include "threads/lock-common.h"
48 #include "vm/builtin.h"
49 #include "vm/exceptions.h"
50 #include "vm/global.h"
51 #include "vm/stringlocal.h"
54 #include "vm/jit/abi.h"
55 #include "vm/jit/asmpart.h"
56 #include "vm/jit/codegen-common.h"
57 #include "vm/jit/dseg.h"
58 #include "vm/jit/emit-common.h"
59 #include "vm/jit/jit.h"
60 #include "vm/jit/parse.h"
61 #include "vm/jit/patcher-common.h"
62 #include "vm/jit/reg.h"
63 #include "vm/jit/replace.h"
64 #include "vm/jit/stacktrace.h"
66 #if defined(ENABLE_SSA)
67 # include "vm/jit/optimizing/lsra.h"
68 # include "vm/jit/optimizing/ssa.h"
69 #elif defined(ENABLE_LSRA)
70 # include "vm/jit/allocator/lsra.h"
73 #include "vmcore/loader.h"
74 #include "vmcore/options.h"
75 #include "vmcore/utf8.h"
78 /* codegen_emit ****************************************************************
80 Generates machine code.
82 *******************************************************************************/
84 bool codegen_emit(jitdata *jd)
90 s4 len, s1, s2, s3, d, disp;
96 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
97 builtintable_entry *bte;
100 unresolved_field *uf;
103 #if defined(ENABLE_SSA)
105 bool last_cmd_was_goto;
107 last_cmd_was_goto = false;
111 /* get required compiler data */
118 /* prevent compiler warnings */
129 s4 savedregs_num = 0;
132 /* space to save used callee saved registers */
134 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
135 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse);
137 cd->stackframesize = rd->memuse + savedregs_num;
140 #if defined(ENABLE_THREADS)
141 /* space to save argument of monitor_enter */
143 if (checksync && code_is_synchronized(code))
144 cd->stackframesize++;
147 /* create method header */
149 /* Keep stack of non-leaf functions 16-byte aligned. */
151 if (!code_is_leafmethod(code)) {
152 ALIGN_ODD(cd->stackframesize); /* XXX this is wrong, +4 is missing */
155 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
156 (void) dseg_add_unique_s4(cd, cd->stackframesize * 8); /* FrameSize */
158 /* IsSync contains the offset relative to the stack pointer for the
159 argument of monitor_exit used in the exception handler. Since the
160 offset could be zero and give a wrong meaning of the flag it is
163 /* XXX Remove this "offset by one". */
165 code->synchronizedoffset = (rd->memuse + 1) * 8;
167 /* REMOVEME dummy IsSync */
168 (void) dseg_add_unique_s4(cd, 0);
170 /* REMOVEME: We still need it for exception handling in assembler. */
172 if (code_is_leafmethod(code))
173 (void) dseg_add_unique_s4(cd, 1); /* IsLeaf */
175 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
177 (void) dseg_add_unique_s4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
178 (void) dseg_add_unique_s4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
180 /* adds a reference for the length of the line number counter. We don't
181 know the size yet, since we evaluate the information during code
182 generation, to save one additional iteration over the whole
183 instructions. During code optimization the position could have changed
184 to the information gotten from the class file */
185 (void) dseg_addlinenumbertablesize(cd);
187 (void) dseg_add_unique_s4(cd, jd->exceptiontablelength); /* ExTableSize */
189 /* create exception table */
191 for (ex = jd->exceptiontable; ex != NULL; ex = ex->down) {
192 dseg_add_target(cd, ex->start);
193 dseg_add_target(cd, ex->end);
194 dseg_add_target(cd, ex->handler);
195 (void) dseg_add_unique_address(cd, ex->catchtype.any);
198 #if defined(ENABLE_PROFILING)
199 /* generate method profiling code */
201 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
202 /* count frequency */
204 M_MOV_IMM(code, REG_ITMP3);
205 M_IADD_IMM_MEMBASE(1, REG_ITMP3, OFFSET(codeinfo, frequency));
209 /* create stack frame (if necessary) */
211 if (cd->stackframesize)
212 M_ASUB_IMM(cd->stackframesize * 8, REG_SP);
214 /* save return address and used callee saved registers */
216 p = cd->stackframesize;
217 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
218 p--; M_AST(rd->savintregs[i], REG_SP, p * 8);
220 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
221 p--; emit_fld_reg(cd, rd->savfltregs[i]); emit_fstpl_membase(cd, REG_SP, p * 8);
224 /* take arguments out of register or stack frame */
229 for (p = 0, l = 0; p < md->paramcount; p++) {
230 t = md->paramtypes[p].type;
232 varindex = jd->local_map[l * 5 + t];
233 #if defined(ENABLE_SSA)
235 if (varindex != UNUSED)
236 varindex = ls->var_0[varindex];
237 if ((varindex != UNUSED) && (ls->lifetime[varindex].type == UNUSED))
242 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
245 if (varindex == UNUSED)
249 s1 = md->params[p].regoff;
252 if (IS_INT_LNG_TYPE(t)) { /* integer args */
253 if (!md->params[p].inmemory) { /* register arguments */
254 log_text("integer register argument");
256 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
257 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff */
259 else { /* reg arg -> spilled */
260 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff * 4 */
264 if (!(var->flags & INMEMORY)) {
265 M_ILD(d, REG_SP, cd->stackframesize * 8 + 4 + s1);
268 if (!IS_2_WORD_TYPE(t)) {
269 #if defined(ENABLE_SSA)
270 /* no copy avoiding by now possible with SSA */
272 emit_mov_membase_reg( /* + 4 for return address */
273 cd, REG_SP, cd->stackframesize * 8 + s1 + 4,
275 emit_mov_reg_membase(
276 cd, REG_ITMP1, REG_SP, var->vv.regoff);
279 #endif /*defined(ENABLE_SSA)*/
280 /* reuse stackslot */
281 var->vv.regoff = cd->stackframesize * 8 + 4 + s1;
285 #if defined(ENABLE_SSA)
286 /* no copy avoiding by now possible with SSA */
288 emit_mov_membase_reg( /* + 4 for return address */
289 cd, REG_SP, cd->stackframesize * 8 + s1 + 4,
291 emit_mov_reg_membase(
292 cd, REG_ITMP1, REG_SP, var->vv.regoff);
293 emit_mov_membase_reg( /* + 4 for return address */
294 cd, REG_SP, cd->stackframesize * 8 + s1 + 4 + 4,
296 emit_mov_reg_membase(
297 cd, REG_ITMP1, REG_SP, var->vv.regoff + 4);
300 #endif /*defined(ENABLE_SSA)*/
301 /* reuse stackslot */
302 var->vv.regoff = cd->stackframesize * 8 + 4 + s1;
307 else { /* floating args */
308 if (!md->params[p].inmemory) { /* register arguments */
309 log_text("There are no float argument registers!");
311 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
312 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff */
313 } else { /* reg arg -> spilled */
314 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff * 8 */
318 else { /* stack arguments */
319 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
322 cd, REG_SP, cd->stackframesize * 8 + s1 + 4);
324 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
329 cd, REG_SP, cd->stackframesize * 8 + s1 + 4);
331 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
334 } else { /* stack-arg -> spilled */
335 #if defined(ENABLE_SSA)
336 /* no copy avoiding by now possible with SSA */
338 emit_mov_membase_reg(
339 cd, REG_SP, cd->stackframesize * 8 + s1 + 4, REG_ITMP1);
340 emit_mov_reg_membase(
341 cd, REG_ITMP1, REG_SP, var->vv.regoff);
344 cd, REG_SP, cd->stackframesize * 8 + s1 + 4);
345 emit_fstps_membase(cd, REG_SP, var->vv.regoff);
349 cd, REG_SP, cd->stackframesize * 8 + s1 + 4);
350 emit_fstpl_membase(cd, REG_SP, var->vv.regoff);
354 #endif /*defined(ENABLE_SSA)*/
355 /* reuse stackslot */
356 var->vv.regoff = cd->stackframesize * 8 + 4 + s1;
362 /* call monitorenter function */
364 #if defined(ENABLE_THREADS)
365 if (checksync && code_is_synchronized(code)) {
368 if (m->flags & ACC_STATIC) {
369 M_MOV_IMM(&m->class->object.header, REG_ITMP1);
372 M_ALD(REG_ITMP1, REG_SP, cd->stackframesize * 8 + 4);
375 M_ALD_MEM(REG_ITMP1, EXCEPTION_HARDWARE_NULLPOINTER);
378 M_AST(REG_ITMP1, REG_SP, s1 * 8);
379 M_AST(REG_ITMP1, REG_SP, 0 * 4);
380 M_MOV_IMM(LOCK_monitor_enter, REG_ITMP3);
386 emit_verbosecall_enter(jd);
391 #if defined(ENABLE_SSA)
392 /* with SSA the Header is Basic Block 0 - insert phi Moves if necessary */
394 codegen_emit_phi_moves(jd, ls->basicblocks[0]);
397 /* end of header generation */
399 /* create replacement points */
401 REPLACEMENT_POINTS_INIT(cd, jd);
403 /* walk through all basic blocks */
405 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
407 bptr->mpc = (s4) (cd->mcodeptr - cd->mcodebase);
409 if (bptr->flags >= BBREACHED) {
410 /* branch resolving */
412 codegen_resolve_branchrefs(cd, bptr);
414 /* handle replacement points */
416 REPLACEMENT_POINT_BLOCK_START(cd, bptr);
418 #if defined(ENABLE_REPLACEMENT)
419 if (bptr->bitflags & BBFLAG_REPLACEMENT) {
420 if (cd->replacementpoint[-1].flags & RPLPOINT_FLAG_COUNTDOWN) {
422 disp = (s4) &(m->hitcountdown);
423 M_ISUB_IMM_MEMABS(1, disp);
429 /* copy interface registers to their destination */
434 #if defined(ENABLE_PROFILING)
435 /* generate basic block profiling code */
437 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
438 /* count frequency */
440 M_MOV_IMM(code->bbfrequency, REG_ITMP3);
441 M_IADD_IMM_MEMBASE(1, REG_ITMP3, bptr->nr * 4);
445 #if defined(ENABLE_LSRA) || defined(ENABLE_SSA)
446 # if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
449 # if defined(ENABLE_SSA)
451 last_cmd_was_goto = false;
455 var = VAR(bptr->invars[len]);
456 if (bptr->type != BBTYPE_STD) {
457 if (!IS_2_WORD_TYPE(var->type)) {
458 if (bptr->type == BBTYPE_EXH) {
459 d = codegen_reg_of_var(0, var, REG_ITMP1);
460 M_INTMOVE(REG_ITMP1, d);
461 emit_store(jd, NULL, var, d);
465 log_text("copy interface registers(EXH, SBR): longs \
466 have to be in memory (begin 1)");
474 #endif /* defined(ENABLE_LSRA) || defined(ENABLE_SSA) */
478 var = VAR(bptr->invars[len]);
479 if ((len == bptr->indepth-1) && (bptr->type != BBTYPE_STD)) {
480 if (!IS_2_WORD_TYPE(var->type)) {
481 if (bptr->type == BBTYPE_EXH) {
482 d = codegen_reg_of_var(0, var, REG_ITMP1);
483 M_INTMOVE(REG_ITMP1, d);
484 emit_store(jd, NULL, var, d);
488 log_text("copy interface registers: longs have to be in \
495 assert((var->flags & INOUT));
500 /* walk through all instructions */
505 for (iptr = bptr->iinstr; len > 0; len--, iptr++) {
506 if (iptr->line != currentline) {
507 dseg_addlinenumber(cd, iptr->line);
508 currentline = iptr->line;
511 MCODECHECK(1024); /* 1kB should be enough */
514 case ICMD_NOP: /* ... ==> ... */
515 case ICMD_POP: /* ..., value ==> ... */
516 case ICMD_POP2: /* ..., value, value ==> ... */
519 case ICMD_INLINE_START:
521 REPLACEMENT_POINT_INLINE_START(cd, iptr);
524 case ICMD_INLINE_BODY:
526 REPLACEMENT_POINT_INLINE_BODY(cd, iptr);
527 dseg_addlinenumber_inline_start(cd, iptr);
528 dseg_addlinenumber(cd, iptr->line);
531 case ICMD_INLINE_END:
533 dseg_addlinenumber_inline_end(cd, iptr);
534 dseg_addlinenumber(cd, iptr->line);
537 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
539 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
540 emit_nullpointer_check(cd, iptr, s1);
543 /* constant operations ************************************************/
545 case ICMD_ICONST: /* ... ==> ..., constant */
547 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
548 ICONST(d, iptr->sx.val.i);
549 emit_store_dst(jd, iptr, d);
552 case ICMD_LCONST: /* ... ==> ..., constant */
554 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
555 LCONST(d, iptr->sx.val.l);
556 emit_store_dst(jd, iptr, d);
559 case ICMD_FCONST: /* ... ==> ..., constant */
561 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
562 if (iptr->sx.val.f == 0.0) {
566 if (iptr->sx.val.i == 0x80000000) {
570 } else if (iptr->sx.val.f == 1.0) {
573 } else if (iptr->sx.val.f == 2.0) {
579 disp = dseg_add_float(cd, iptr->sx.val.f);
580 emit_mov_imm_reg(cd, 0, REG_ITMP1);
582 emit_flds_membase(cd, REG_ITMP1, disp);
584 emit_store_dst(jd, iptr, d);
587 case ICMD_DCONST: /* ... ==> ..., constant */
589 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
590 if (iptr->sx.val.d == 0.0) {
594 if (iptr->sx.val.l == 0x8000000000000000LL) {
598 } else if (iptr->sx.val.d == 1.0) {
601 } else if (iptr->sx.val.d == 2.0) {
607 disp = dseg_add_double(cd, iptr->sx.val.d);
608 emit_mov_imm_reg(cd, 0, REG_ITMP1);
610 emit_fldl_membase(cd, REG_ITMP1, disp);
612 emit_store_dst(jd, iptr, d);
615 case ICMD_ACONST: /* ... ==> ..., constant */
617 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
619 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
620 patcher_add_patch_ref(jd, PATCHER_aconst,
621 iptr->sx.val.c.ref, 0);
626 if (iptr->sx.val.anyptr == NULL)
629 M_MOV_IMM(iptr->sx.val.anyptr, d);
631 emit_store_dst(jd, iptr, d);
635 /* load/store/copy/move operations ************************************/
653 if (!(iptr->flags.bits & INS_FLAG_RETADDR))
658 /* integer operations *************************************************/
660 case ICMD_INEG: /* ..., value ==> ..., - value */
662 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
663 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
666 emit_store_dst(jd, iptr, d);
669 case ICMD_LNEG: /* ..., value ==> ..., - value */
671 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
672 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
674 M_NEG(GET_LOW_REG(d));
675 M_IADDC_IMM(0, GET_HIGH_REG(d));
676 M_NEG(GET_HIGH_REG(d));
677 emit_store_dst(jd, iptr, d);
680 case ICMD_I2L: /* ..., value ==> ..., value */
682 s1 = emit_load_s1(jd, iptr, EAX);
683 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
686 M_LNGMOVE(EAX_EDX_PACKED, d);
687 emit_store_dst(jd, iptr, d);
690 case ICMD_L2I: /* ..., value ==> ..., value */
692 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
693 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
695 emit_store_dst(jd, iptr, d);
698 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
700 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
701 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
705 emit_store_dst(jd, iptr, d);
708 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
710 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
711 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
713 emit_store_dst(jd, iptr, d);
716 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
718 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
719 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
721 emit_store_dst(jd, iptr, d);
725 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
727 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
728 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
729 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
736 emit_store_dst(jd, iptr, d);
740 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
741 /* sx.val.i = constant */
743 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
744 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
746 /* `inc reg' is slower on p4's (regarding to ia32
747 optimization reference manual and benchmarks) and as
751 M_IADD_IMM(iptr->sx.val.i, d);
752 emit_store_dst(jd, iptr, d);
755 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
757 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
758 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
759 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
760 M_INTMOVE(s1, GET_LOW_REG(d));
761 M_IADD(s2, GET_LOW_REG(d));
762 /* don't use REG_ITMP1 */
763 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
764 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
765 M_INTMOVE(s1, GET_HIGH_REG(d));
766 M_IADDC(s2, GET_HIGH_REG(d));
767 emit_store_dst(jd, iptr, d);
770 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
771 /* sx.val.l = constant */
773 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
774 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
776 M_IADD_IMM(iptr->sx.val.l, GET_LOW_REG(d));
777 M_IADDC_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
778 emit_store_dst(jd, iptr, d);
781 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
783 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
784 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
785 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
787 M_INTMOVE(s1, REG_ITMP1);
788 M_ISUB(s2, REG_ITMP1);
789 M_INTMOVE(REG_ITMP1, d);
795 emit_store_dst(jd, iptr, d);
798 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
799 /* sx.val.i = constant */
801 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
802 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
804 M_ISUB_IMM(iptr->sx.val.i, d);
805 emit_store_dst(jd, iptr, d);
808 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
810 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
811 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
812 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
813 if (s2 == GET_LOW_REG(d)) {
814 M_INTMOVE(s1, REG_ITMP1);
815 M_ISUB(s2, REG_ITMP1);
816 M_INTMOVE(REG_ITMP1, GET_LOW_REG(d));
819 M_INTMOVE(s1, GET_LOW_REG(d));
820 M_ISUB(s2, GET_LOW_REG(d));
822 /* don't use REG_ITMP1 */
823 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
824 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
825 if (s2 == GET_HIGH_REG(d)) {
826 M_INTMOVE(s1, REG_ITMP2);
827 M_ISUBB(s2, REG_ITMP2);
828 M_INTMOVE(REG_ITMP2, GET_HIGH_REG(d));
831 M_INTMOVE(s1, GET_HIGH_REG(d));
832 M_ISUBB(s2, GET_HIGH_REG(d));
834 emit_store_dst(jd, iptr, d);
837 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
838 /* sx.val.l = constant */
840 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
841 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
843 M_ISUB_IMM(iptr->sx.val.l, GET_LOW_REG(d));
844 M_ISUBB_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
845 emit_store_dst(jd, iptr, d);
848 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
850 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
851 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
852 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
859 emit_store_dst(jd, iptr, d);
862 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
863 /* sx.val.i = constant */
865 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
866 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
867 M_IMUL_IMM(s1, iptr->sx.val.i, d);
868 emit_store_dst(jd, iptr, d);
871 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
873 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
874 s2 = emit_load_s2_low(jd, iptr, EDX);
875 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
877 M_INTMOVE(s1, REG_ITMP2);
878 M_IMUL(s2, REG_ITMP2);
880 s1 = emit_load_s1_low(jd, iptr, EAX);
881 s2 = emit_load_s2_high(jd, iptr, EDX);
884 M_IADD(EDX, REG_ITMP2);
886 s1 = emit_load_s1_low(jd, iptr, EAX);
887 s2 = emit_load_s2_low(jd, iptr, EDX);
890 M_INTMOVE(EAX, GET_LOW_REG(d));
891 M_IADD(REG_ITMP2, GET_HIGH_REG(d));
893 emit_store_dst(jd, iptr, d);
896 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
897 /* sx.val.l = constant */
899 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
900 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
901 ICONST(EAX, iptr->sx.val.l);
903 M_IMUL_IMM(s1, iptr->sx.val.l >> 32, REG_ITMP2);
904 M_IADD(REG_ITMP2, EDX);
905 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
906 M_IMUL_IMM(s1, iptr->sx.val.l, REG_ITMP2);
907 M_IADD(REG_ITMP2, EDX);
908 M_LNGMOVE(EAX_EDX_PACKED, d);
909 emit_store_dst(jd, iptr, d);
912 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
914 s1 = emit_load_s1(jd, iptr, EAX);
915 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
916 d = codegen_reg_of_dst(jd, iptr, EAX);
917 emit_arithmetic_check(cd, iptr, s2);
919 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
921 /* check as described in jvm spec */
923 M_CMP_IMM(0x80000000, EAX);
930 M_INTMOVE(EAX, d); /* if INMEMORY then d is already EAX */
931 emit_store_dst(jd, iptr, d);
934 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
936 s1 = emit_load_s1(jd, iptr, EAX);
937 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
938 d = codegen_reg_of_dst(jd, iptr, EDX);
939 emit_arithmetic_check(cd, iptr, s2);
941 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
943 /* check as described in jvm spec */
945 M_CMP_IMM(0x80000000, EAX);
953 M_INTMOVE(EDX, d); /* if INMEMORY then d is already EDX */
954 emit_store_dst(jd, iptr, d);
957 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
958 /* sx.val.i = constant */
960 /* TODO: optimize for `/ 2' */
961 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
962 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
966 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, d);/* 32-bit for jump off */
967 M_SRA_IMM(iptr->sx.val.i, d);
968 emit_store_dst(jd, iptr, d);
971 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
972 /* sx.val.i = constant */
974 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
975 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
977 M_MOV(s1, REG_ITMP1);
981 M_AND_IMM(iptr->sx.val.i, d);
983 M_BGE(2 + 2 + 6 + 2);
984 M_MOV(s1, d); /* don't use M_INTMOVE, so we know the jump offset */
986 M_AND_IMM32(iptr->sx.val.i, d); /* use 32-bit for jump offset */
988 emit_store_dst(jd, iptr, d);
991 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
992 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
994 s2 = emit_load_s2(jd, iptr, REG_ITMP12_PACKED);
995 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
997 M_INTMOVE(GET_LOW_REG(s2), REG_ITMP3);
998 M_OR(GET_HIGH_REG(s2), REG_ITMP3);
999 /* XXX could be optimized */
1000 emit_arithmetic_check(cd, iptr, REG_ITMP3);
1002 bte = iptr->sx.s23.s3.bte;
1005 M_LST(s2, REG_SP, 2 * 4);
1007 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1008 M_LST(s1, REG_SP, 0 * 4);
1010 M_MOV_IMM(bte->fp, REG_ITMP3);
1012 emit_store_dst(jd, iptr, d);
1015 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1016 /* sx.val.i = constant */
1018 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1019 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1021 M_TEST(GET_HIGH_REG(d));
1023 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, GET_LOW_REG(d));
1024 M_IADDC_IMM(0, GET_HIGH_REG(d));
1025 M_SRLD_IMM(iptr->sx.val.i, GET_HIGH_REG(d), GET_LOW_REG(d));
1026 M_SRA_IMM(iptr->sx.val.i, GET_HIGH_REG(d));
1027 emit_store_dst(jd, iptr, d);
1031 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1032 /* sx.val.l = constant */
1034 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1035 if (iptr->dst.var->flags & INMEMORY) {
1036 if (iptr->s1.var->flags & INMEMORY) {
1037 /* Alpha algorithm */
1039 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8);
1041 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1047 /* TODO: hmm, don't know if this is always correct */
1049 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l & 0x00000000ffffffff);
1051 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l >> 32);
1057 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1058 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1060 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1061 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1062 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1063 emit_jcc(cd, CC_GE, disp);
1065 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1066 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1068 emit_neg_reg(cd, REG_ITMP1);
1069 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1070 emit_neg_reg(cd, REG_ITMP2);
1072 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1073 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1075 emit_neg_reg(cd, REG_ITMP1);
1076 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1077 emit_neg_reg(cd, REG_ITMP2);
1079 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst.var->vv.regoff * 8);
1080 emit_mov_reg_membase(cd, REG_ITMP2, REG_SP, iptr->dst.var->vv.regoff * 8 + 4);
1084 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1085 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1087 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1088 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1089 M_TEST(GET_LOW_REG(s1));
1095 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1097 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1098 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1099 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1100 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1103 emit_store_dst(jd, iptr, d);
1106 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1107 /* sx.val.i = constant */
1109 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1110 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1112 M_SLL_IMM(iptr->sx.val.i, d);
1113 emit_store_dst(jd, iptr, d);
1116 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1118 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1119 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1120 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1121 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1124 emit_store_dst(jd, iptr, d);
1127 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1128 /* sx.val.i = constant */
1130 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1131 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1133 M_SRA_IMM(iptr->sx.val.i, d);
1134 emit_store_dst(jd, iptr, d);
1137 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1139 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1140 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1141 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1142 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1145 emit_store_dst(jd, iptr, d);
1148 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1149 /* sx.val.i = constant */
1151 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1152 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1154 M_SRL_IMM(iptr->sx.val.i, d);
1155 emit_store_dst(jd, iptr, d);
1158 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1160 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1161 s2 = emit_load_s2(jd, iptr, ECX);
1162 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1165 M_TEST_IMM(32, ECX);
1167 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1168 M_CLR(GET_LOW_REG(d));
1169 M_SLLD(GET_LOW_REG(d), GET_HIGH_REG(d));
1170 M_SLL(GET_LOW_REG(d));
1171 emit_store_dst(jd, iptr, d);
1174 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1175 /* sx.val.i = constant */
1177 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1178 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1180 if (iptr->sx.val.i & 0x20) {
1181 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1182 M_CLR(GET_LOW_REG(d));
1183 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1187 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1189 M_SLL_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d));
1191 emit_store_dst(jd, iptr, d);
1194 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1196 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1197 s2 = emit_load_s2(jd, iptr, ECX);
1198 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1201 M_TEST_IMM(32, ECX);
1203 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1204 M_SRA_IMM(31, GET_HIGH_REG(d));
1205 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1206 M_SRA(GET_HIGH_REG(d));
1207 emit_store_dst(jd, iptr, d);
1210 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1211 /* sx.val.i = constant */
1213 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1214 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1216 if (iptr->sx.val.i & 0x20) {
1217 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1218 M_SRA_IMM(31, GET_HIGH_REG(d));
1219 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1223 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1225 M_SRA_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1227 emit_store_dst(jd, iptr, d);
1230 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1232 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1233 s2 = emit_load_s2(jd, iptr, ECX);
1234 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1237 M_TEST_IMM(32, ECX);
1239 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1240 M_CLR(GET_HIGH_REG(d));
1241 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1242 M_SRL(GET_HIGH_REG(d));
1243 emit_store_dst(jd, iptr, d);
1246 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1247 /* sx.val.l = constant */
1249 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1250 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1252 if (iptr->sx.val.i & 0x20) {
1253 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1254 M_CLR(GET_HIGH_REG(d));
1255 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1259 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1261 M_SRL_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1263 emit_store_dst(jd, iptr, d);
1266 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1268 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1269 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1270 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1277 emit_store_dst(jd, iptr, d);
1280 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1281 /* sx.val.i = constant */
1283 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1284 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1286 M_AND_IMM(iptr->sx.val.i, d);
1287 emit_store_dst(jd, iptr, d);
1290 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1292 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1293 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1294 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1295 if (s2 == GET_LOW_REG(d))
1296 M_AND(s1, GET_LOW_REG(d));
1298 M_INTMOVE(s1, GET_LOW_REG(d));
1299 M_AND(s2, GET_LOW_REG(d));
1301 /* REG_ITMP1 probably contains low 32-bit of destination */
1302 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1303 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1304 if (s2 == GET_HIGH_REG(d))
1305 M_AND(s1, GET_HIGH_REG(d));
1307 M_INTMOVE(s1, GET_HIGH_REG(d));
1308 M_AND(s2, GET_HIGH_REG(d));
1310 emit_store_dst(jd, iptr, d);
1313 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1314 /* sx.val.l = constant */
1316 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1317 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1319 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1320 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1321 emit_store_dst(jd, iptr, d);
1324 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1326 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1327 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1328 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1335 emit_store_dst(jd, iptr, d);
1338 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1339 /* sx.val.i = constant */
1341 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1342 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1344 M_OR_IMM(iptr->sx.val.i, d);
1345 emit_store_dst(jd, iptr, d);
1348 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1350 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1351 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1352 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1353 if (s2 == GET_LOW_REG(d))
1354 M_OR(s1, GET_LOW_REG(d));
1356 M_INTMOVE(s1, GET_LOW_REG(d));
1357 M_OR(s2, GET_LOW_REG(d));
1359 /* REG_ITMP1 probably contains low 32-bit of destination */
1360 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1361 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1362 if (s2 == GET_HIGH_REG(d))
1363 M_OR(s1, GET_HIGH_REG(d));
1365 M_INTMOVE(s1, GET_HIGH_REG(d));
1366 M_OR(s2, GET_HIGH_REG(d));
1368 emit_store_dst(jd, iptr, d);
1371 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1372 /* sx.val.l = constant */
1374 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1375 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1377 M_OR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1378 M_OR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1379 emit_store_dst(jd, iptr, d);
1382 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1384 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1385 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1386 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1393 emit_store_dst(jd, iptr, d);
1396 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1397 /* sx.val.i = constant */
1399 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1400 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1402 M_XOR_IMM(iptr->sx.val.i, d);
1403 emit_store_dst(jd, iptr, d);
1406 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1408 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1409 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1410 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1411 if (s2 == GET_LOW_REG(d))
1412 M_XOR(s1, GET_LOW_REG(d));
1414 M_INTMOVE(s1, GET_LOW_REG(d));
1415 M_XOR(s2, GET_LOW_REG(d));
1417 /* REG_ITMP1 probably contains low 32-bit of destination */
1418 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1419 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1420 if (s2 == GET_HIGH_REG(d))
1421 M_XOR(s1, GET_HIGH_REG(d));
1423 M_INTMOVE(s1, GET_HIGH_REG(d));
1424 M_XOR(s2, GET_HIGH_REG(d));
1426 emit_store_dst(jd, iptr, d);
1429 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1430 /* sx.val.l = constant */
1432 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1433 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1435 M_XOR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1436 M_XOR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1437 emit_store_dst(jd, iptr, d);
1441 /* floating operations ************************************************/
1443 case ICMD_FNEG: /* ..., value ==> ..., - value */
1445 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1446 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1448 emit_store_dst(jd, iptr, d);
1451 case ICMD_DNEG: /* ..., value ==> ..., - value */
1453 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1454 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1456 emit_store_dst(jd, iptr, d);
1459 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1461 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1462 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1463 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1465 emit_store_dst(jd, iptr, d);
1468 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1470 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1471 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1472 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1474 emit_store_dst(jd, iptr, d);
1477 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1479 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1480 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1481 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1483 emit_store_dst(jd, iptr, d);
1486 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1488 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1489 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1490 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1492 emit_store_dst(jd, iptr, d);
1495 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1497 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1498 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1499 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1501 emit_store_dst(jd, iptr, d);
1504 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1506 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1507 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1508 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1510 emit_store_dst(jd, iptr, d);
1513 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1515 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1516 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1517 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1519 emit_store_dst(jd, iptr, d);
1522 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1524 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1525 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1526 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1528 emit_store_dst(jd, iptr, d);
1531 case ICMD_FREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1533 /* exchanged to skip fxch */
1534 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1535 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1536 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1537 /* emit_fxch(cd); */
1542 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1543 emit_store_dst(jd, iptr, d);
1544 emit_ffree_reg(cd, 0);
1548 case ICMD_DREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1550 /* exchanged to skip fxch */
1551 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1552 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1553 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1554 /* emit_fxch(cd); */
1559 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1560 emit_store_dst(jd, iptr, d);
1561 emit_ffree_reg(cd, 0);
1565 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1566 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1568 var = VAROP(iptr->s1);
1569 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1571 if (var->flags & INMEMORY) {
1572 emit_fildl_membase(cd, REG_SP, var->vv.regoff);
1574 /* XXX not thread safe! */
1575 disp = dseg_add_unique_s4(cd, 0);
1576 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1578 emit_mov_reg_membase(cd, var->vv.regoff, REG_ITMP1, disp);
1579 emit_fildl_membase(cd, REG_ITMP1, disp);
1582 emit_store_dst(jd, iptr, d);
1585 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1586 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1588 var = VAROP(iptr->s1);
1589 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1590 if (var->flags & INMEMORY) {
1591 emit_fildll_membase(cd, REG_SP, var->vv.regoff);
1594 log_text("L2F: longs have to be in memory");
1597 emit_store_dst(jd, iptr, d);
1600 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1602 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1603 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1605 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1608 /* Round to zero, 53-bit mode, exception masked */
1609 disp = dseg_add_s4(cd, 0x0e7f);
1610 emit_fldcw_membase(cd, REG_ITMP1, disp);
1612 var = VAROP(iptr->dst);
1613 var1 = VAROP(iptr->s1);
1615 if (var->flags & INMEMORY) {
1616 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1618 /* Round to nearest, 53-bit mode, exceptions masked */
1619 disp = dseg_add_s4(cd, 0x027f);
1620 emit_fldcw_membase(cd, REG_ITMP1, disp);
1622 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1623 REG_SP, var->vv.regoff);
1626 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1628 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1631 /* XXX not thread safe! */
1632 disp = dseg_add_unique_s4(cd, 0);
1633 emit_fistpl_membase(cd, REG_ITMP1, disp);
1634 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1636 /* Round to nearest, 53-bit mode, exceptions masked */
1637 disp = dseg_add_s4(cd, 0x027f);
1638 emit_fldcw_membase(cd, REG_ITMP1, disp);
1640 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1643 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1644 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1647 emit_jcc(cd, CC_NE, disp);
1649 /* XXX: change this when we use registers */
1650 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1651 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2i, REG_ITMP1);
1652 emit_call_reg(cd, REG_ITMP1);
1654 if (var->flags & INMEMORY) {
1655 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1658 M_INTMOVE(REG_RESULT, var->vv.regoff);
1662 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1664 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1665 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1667 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1670 /* Round to zero, 53-bit mode, exception masked */
1671 disp = dseg_add_s4(cd, 0x0e7f);
1672 emit_fldcw_membase(cd, REG_ITMP1, disp);
1674 var = VAROP(iptr->dst);
1675 var1 = VAROP(iptr->s1);
1677 if (var->flags & INMEMORY) {
1678 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1680 /* Round to nearest, 53-bit mode, exceptions masked */
1681 disp = dseg_add_s4(cd, 0x027f);
1682 emit_fldcw_membase(cd, REG_ITMP1, disp);
1684 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1685 REG_SP, var->vv.regoff);
1688 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1690 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1693 /* XXX not thread safe! */
1694 disp = dseg_add_unique_s4(cd, 0);
1695 emit_fistpl_membase(cd, REG_ITMP1, disp);
1696 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1698 /* Round to nearest, 53-bit mode, exceptions masked */
1699 disp = dseg_add_s4(cd, 0x027f);
1700 emit_fldcw_membase(cd, REG_ITMP1, disp);
1702 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1705 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1706 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1709 emit_jcc(cd, CC_NE, disp);
1711 /* XXX: change this when we use registers */
1712 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1713 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2i, REG_ITMP1);
1714 emit_call_reg(cd, REG_ITMP1);
1716 if (var->flags & INMEMORY) {
1717 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1719 M_INTMOVE(REG_RESULT, var->vv.regoff);
1723 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1725 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1726 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1728 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1731 /* Round to zero, 53-bit mode, exception masked */
1732 disp = dseg_add_s4(cd, 0x0e7f);
1733 emit_fldcw_membase(cd, REG_ITMP1, disp);
1735 var = VAROP(iptr->dst);
1736 var1 = VAROP(iptr->s1);
1738 if (var->flags & INMEMORY) {
1739 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1741 /* Round to nearest, 53-bit mode, exceptions masked */
1742 disp = dseg_add_s4(cd, 0x027f);
1743 emit_fldcw_membase(cd, REG_ITMP1, disp);
1745 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1746 REG_SP, var->vv.regoff + 4);
1749 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1751 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1754 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1756 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1758 emit_jcc(cd, CC_NE, disp);
1760 emit_alu_imm_membase(cd, ALU_CMP, 0,
1761 REG_SP, var->vv.regoff);
1764 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1766 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1768 emit_jcc(cd, CC_NE, disp);
1770 /* XXX: change this when we use registers */
1771 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1772 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2l, REG_ITMP1);
1773 emit_call_reg(cd, REG_ITMP1);
1774 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1775 emit_mov_reg_membase(cd, REG_RESULT2,
1776 REG_SP, var->vv.regoff + 4);
1779 log_text("F2L: longs have to be in memory");
1784 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1786 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1787 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1789 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1792 /* Round to zero, 53-bit mode, exception masked */
1793 disp = dseg_add_s4(cd, 0x0e7f);
1794 emit_fldcw_membase(cd, REG_ITMP1, disp);
1796 var = VAROP(iptr->dst);
1797 var1 = VAROP(iptr->s1);
1799 if (var->flags & INMEMORY) {
1800 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1802 /* Round to nearest, 53-bit mode, exceptions masked */
1803 disp = dseg_add_s4(cd, 0x027f);
1804 emit_fldcw_membase(cd, REG_ITMP1, disp);
1806 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1807 REG_SP, var->vv.regoff + 4);
1810 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1812 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1815 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1817 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1819 emit_jcc(cd, CC_NE, disp);
1821 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, var->vv.regoff);
1824 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1826 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1828 emit_jcc(cd, CC_NE, disp);
1830 /* XXX: change this when we use registers */
1831 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1832 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2l, REG_ITMP1);
1833 emit_call_reg(cd, REG_ITMP1);
1834 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1835 emit_mov_reg_membase(cd, REG_RESULT2,
1836 REG_SP, var->vv.regoff + 4);
1839 log_text("D2L: longs have to be in memory");
1844 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1846 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1847 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1849 emit_store_dst(jd, iptr, d);
1852 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1854 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1855 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1857 emit_store_dst(jd, iptr, d);
1860 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1863 /* exchanged to skip fxch */
1864 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1865 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1866 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1867 /* emit_fxch(cd); */
1870 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as GT */
1871 emit_jcc(cd, CC_E, 6);
1872 emit_alu_imm_reg(cd, ALU_AND, 0x000000ff, EAX);
1874 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1875 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1876 emit_jcc(cd, CC_B, 3 + 5);
1877 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1878 emit_jmp_imm(cd, 3);
1879 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1880 emit_store_dst(jd, iptr, d);
1883 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1886 /* exchanged to skip fxch */
1887 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1888 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1889 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1890 /* emit_fxch(cd); */
1893 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as LT */
1894 emit_jcc(cd, CC_E, 3);
1895 emit_movb_imm_reg(cd, 1, REG_AH);
1897 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1898 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1899 emit_jcc(cd, CC_B, 3 + 5);
1900 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1901 emit_jmp_imm(cd, 3);
1902 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1903 emit_store_dst(jd, iptr, d);
1907 /* memory operations **************************************************/
1909 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., length */
1911 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1912 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1913 /* implicit null-pointer check */
1914 M_ILD(d, s1, OFFSET(java_array_t, size));
1915 emit_store_dst(jd, iptr, d);
1918 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
1920 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1921 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1922 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1923 /* implicit null-pointer check */
1924 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1925 emit_movsbl_memindex_reg(cd, OFFSET(java_bytearray_t, data[0]),
1927 emit_store_dst(jd, iptr, d);
1930 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
1932 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1933 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1934 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1935 /* implicit null-pointer check */
1936 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1937 emit_movzwl_memindex_reg(cd, OFFSET(java_chararray_t, data[0]),
1939 emit_store_dst(jd, iptr, d);
1942 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
1944 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1945 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1946 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1947 /* implicit null-pointer check */
1948 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1949 emit_movswl_memindex_reg(cd, OFFSET(java_shortarray_t, data[0]),
1951 emit_store_dst(jd, iptr, d);
1954 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
1956 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1957 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1958 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1959 /* implicit null-pointer check */
1960 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1961 emit_mov_memindex_reg(cd, OFFSET(java_intarray_t, data[0]),
1963 emit_store_dst(jd, iptr, d);
1966 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
1968 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1969 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1970 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
1971 /* implicit null-pointer check */
1972 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1974 var = VAROP(iptr->dst);
1976 assert(var->flags & INMEMORY);
1977 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]),
1978 s1, s2, 3, REG_ITMP3);
1979 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff);
1980 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]) + 4,
1981 s1, s2, 3, REG_ITMP3);
1982 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff + 4);
1985 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
1987 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1988 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1989 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1990 /* implicit null-pointer check */
1991 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1992 emit_flds_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2, 2);
1993 emit_store_dst(jd, iptr, d);
1996 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
1998 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1999 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2000 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
2001 /* implicit null-pointer check */
2002 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2003 emit_fldl_memindex(cd, OFFSET(java_doublearray_t, data[0]), s1, s2,3);
2004 emit_store_dst(jd, iptr, d);
2007 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2009 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2010 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2011 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2012 /* implicit null-pointer check */
2013 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2014 emit_mov_memindex_reg(cd, OFFSET(java_objectarray_t, data[0]),
2016 emit_store_dst(jd, iptr, d);
2020 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2022 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2023 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2024 /* implicit null-pointer check */
2025 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2026 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2028 /* because EBP, ESI, EDI have no xH and xL nibbles */
2029 M_INTMOVE(s3, REG_ITMP3);
2032 emit_movb_reg_memindex(cd, s3, OFFSET(java_bytearray_t, data[0]),
2036 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2038 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2039 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2040 /* implicit null-pointer check */
2041 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2042 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2043 emit_movw_reg_memindex(cd, s3, OFFSET(java_chararray_t, data[0]),
2047 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2049 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2050 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2051 /* implicit null-pointer check */
2052 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2053 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2054 emit_movw_reg_memindex(cd, s3, OFFSET(java_shortarray_t, data[0]),
2058 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2060 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2061 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2062 /* implicit null-pointer check */
2063 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2064 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2065 emit_mov_reg_memindex(cd, s3, OFFSET(java_intarray_t, data[0]),
2069 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2071 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2072 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2073 /* implicit null-pointer check */
2074 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2076 var = VAROP(iptr->sx.s23.s3);
2078 assert(var->flags & INMEMORY);
2079 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff, REG_ITMP3);
2080 emit_mov_reg_memindex(cd, REG_ITMP3, OFFSET(java_longarray_t, data[0])
2082 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff + 4, REG_ITMP3);
2083 emit_mov_reg_memindex(cd, REG_ITMP3,
2084 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2087 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2089 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2090 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2091 /* implicit null-pointer check */
2092 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2093 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2094 emit_fstps_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2,2);
2097 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2099 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2100 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2101 /* implicit null-pointer check */
2102 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2103 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2104 emit_fstpl_memindex(cd, OFFSET(java_doublearray_t, data[0]),
2108 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2110 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2111 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2112 /* implicit null-pointer check */
2113 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2114 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2116 M_AST(s1, REG_SP, 0 * 4);
2117 M_AST(s3, REG_SP, 1 * 4);
2118 M_MOV_IMM(BUILTIN_FAST_canstore, REG_ITMP1);
2120 emit_arraystore_check(cd, iptr);
2122 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2123 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2124 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2125 emit_mov_reg_memindex(cd, s3, OFFSET(java_objectarray_t, data[0]),
2129 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2131 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2132 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2133 /* implicit null-pointer check */
2134 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2135 emit_movb_imm_memindex(cd, iptr->sx.s23.s3.constval,
2136 OFFSET(java_bytearray_t, data[0]), s1, s2, 0);
2139 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2141 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2142 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2143 /* implicit null-pointer check */
2144 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2145 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2146 OFFSET(java_chararray_t, data[0]), s1, s2, 1);
2149 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2151 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2152 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2153 /* implicit null-pointer check */
2154 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2155 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2156 OFFSET(java_shortarray_t, data[0]), s1, s2, 1);
2159 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2161 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2162 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2163 /* implicit null-pointer check */
2164 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2165 emit_mov_imm_memindex(cd, iptr->sx.s23.s3.constval,
2166 OFFSET(java_intarray_t, data[0]), s1, s2, 2);
2169 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2171 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2172 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2173 /* implicit null-pointer check */
2174 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2175 emit_mov_imm_memindex(cd,
2176 (u4) (iptr->sx.s23.s3.constval & 0x00000000ffffffff),
2177 OFFSET(java_longarray_t, data[0]), s1, s2, 3);
2178 emit_mov_imm_memindex(cd,
2179 ((s4)iptr->sx.s23.s3.constval) >> 31,
2180 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2183 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2185 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2186 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2187 /* implicit null-pointer check */
2188 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2189 emit_mov_imm_memindex(cd, 0,
2190 OFFSET(java_objectarray_t, data[0]), s1, s2, 2);
2194 case ICMD_GETSTATIC: /* ... ==> ..., value */
2196 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2197 uf = iptr->sx.s23.s3.uf;
2198 fieldtype = uf->fieldref->parseddesc.fd->type;
2201 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2205 fi = iptr->sx.s23.s3.fmiref->p.field;
2206 fieldtype = fi->type;
2207 disp = (intptr_t) fi->value;
2209 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2210 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->class, 0);
2213 M_MOV_IMM(disp, REG_ITMP1);
2214 switch (fieldtype) {
2217 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2218 M_ILD(d, REG_ITMP1, 0);
2221 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2222 M_LLD(d, REG_ITMP1, 0);
2225 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2226 M_FLD(d, REG_ITMP1, 0);
2229 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2230 M_DLD(d, REG_ITMP1, 0);
2233 emit_store_dst(jd, iptr, d);
2236 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2238 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2239 uf = iptr->sx.s23.s3.uf;
2240 fieldtype = uf->fieldref->parseddesc.fd->type;
2243 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2246 fi = iptr->sx.s23.s3.fmiref->p.field;
2247 fieldtype = fi->type;
2248 disp = (intptr_t) fi->value;
2250 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2251 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->class, 0);
2254 M_MOV_IMM(disp, REG_ITMP1);
2255 switch (fieldtype) {
2258 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
2259 M_IST(s1, REG_ITMP1, 0);
2262 s1 = emit_load_s1(jd, iptr, REG_ITMP23_PACKED);
2263 M_LST(s1, REG_ITMP1, 0);
2266 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2267 emit_fstps_membase(cd, REG_ITMP1, 0);
2270 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2271 emit_fstpl_membase(cd, REG_ITMP1, 0);
2276 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2277 /* val = value (in current instruction) */
2278 /* following NOP) */
2280 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2281 uf = iptr->sx.s23.s3.uf;
2282 fieldtype = uf->fieldref->parseddesc.fd->type;
2285 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2288 fi = iptr->sx.s23.s3.fmiref->p.field;
2289 fieldtype = fi->type;
2290 disp = (intptr_t) fi->value;
2292 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2293 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->class, 0);
2296 M_MOV_IMM(disp, REG_ITMP1);
2297 switch (fieldtype) {
2300 M_IST_IMM(iptr->sx.s23.s2.constval, REG_ITMP1, 0);
2303 M_IST_IMM(iptr->sx.s23.s2.constval & 0xffffffff, REG_ITMP1, 0);
2304 M_IST_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, REG_ITMP1, 4);
2311 case ICMD_GETFIELD: /* .., objectref. ==> ..., value */
2313 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2314 emit_nullpointer_check(cd, iptr, s1);
2316 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2317 uf = iptr->sx.s23.s3.uf;
2318 fieldtype = uf->fieldref->parseddesc.fd->type;
2321 patcher_add_patch_ref(jd, PATCHER_getfield,
2322 iptr->sx.s23.s3.uf, 0);
2325 fi = iptr->sx.s23.s3.fmiref->p.field;
2326 fieldtype = fi->type;
2330 switch (fieldtype) {
2333 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2334 M_ILD32(d, s1, disp);
2337 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2338 M_LLD32(d, s1, disp);
2341 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2342 M_FLD32(d, s1, disp);
2345 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2346 M_DLD32(d, s1, disp);
2349 emit_store_dst(jd, iptr, d);
2352 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2354 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2355 emit_nullpointer_check(cd, iptr, s1);
2357 /* must be done here because of code patching */
2359 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2360 uf = iptr->sx.s23.s3.uf;
2361 fieldtype = uf->fieldref->parseddesc.fd->type;
2364 fi = iptr->sx.s23.s3.fmiref->p.field;
2365 fieldtype = fi->type;
2368 if (!IS_FLT_DBL_TYPE(fieldtype)) {
2369 if (IS_2_WORD_TYPE(fieldtype))
2370 s2 = emit_load_s2(jd, iptr, REG_ITMP23_PACKED);
2372 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2375 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
2377 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2379 uf = iptr->sx.s23.s3.uf;
2382 patcher_add_patch_ref(jd, PATCHER_putfield, uf, 0);
2386 fi = iptr->sx.s23.s3.fmiref->p.field;
2390 switch (fieldtype) {
2393 M_IST32(s2, s1, disp);
2396 M_LST32(s2, s1, disp);
2399 emit_fstps_membase32(cd, s1, disp);
2402 emit_fstpl_membase32(cd, s1, disp);
2407 case ICMD_PUTFIELDCONST: /* ..., objectref ==> ... */
2408 /* val = value (in current instruction) */
2409 /* following NOP) */
2411 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2412 emit_nullpointer_check(cd, iptr, s1);
2414 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2415 uf = iptr->sx.s23.s3.uf;
2416 fieldtype = uf->fieldref->parseddesc.fd->type;
2419 patcher_add_patch_ref(jd, PATCHER_putfieldconst,
2423 fi = iptr->sx.s23.s3.fmiref->p.field;
2424 fieldtype = fi->type;
2428 switch (fieldtype) {
2431 M_IST32_IMM(iptr->sx.s23.s2.constval, s1, disp);
2434 M_IST32_IMM(iptr->sx.s23.s2.constval & 0xffffffff, s1, disp);
2435 M_IST32_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, s1, disp + 4);
2443 /* branch operations **************************************************/
2445 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2447 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2448 M_INTMOVE(s1, REG_ITMP1_XPTR);
2450 #ifdef ENABLE_VERIFIER
2451 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2452 patcher_add_patch_ref(jd, PATCHER_resolve_class,
2453 iptr->sx.s23.s2.uc, 0);
2455 #endif /* ENABLE_VERIFIER */
2457 M_CALL_IMM(0); /* passing exception pc */
2458 M_POP(REG_ITMP2_XPC);
2460 M_MOV_IMM(asm_handle_exception, REG_ITMP3);
2464 case ICMD_GOTO: /* ... ==> ... */
2465 case ICMD_RET: /* ... ==> ... */
2467 #if defined(ENABLE_SSA)
2469 last_cmd_was_goto = true;
2471 /* In case of a Goto phimoves have to be inserted before the */
2474 codegen_emit_phi_moves(jd, bptr);
2477 emit_br(cd, iptr->dst.block);
2481 case ICMD_JSR: /* ... ==> ... */
2483 emit_br(cd, iptr->sx.s23.s3.jsrtarget.block);
2487 case ICMD_IFNULL: /* ..., value ==> ... */
2488 case ICMD_IFNONNULL:
2490 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2492 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFNULL, BRANCH_OPT_NONE);
2495 case ICMD_IFEQ: /* ..., value ==> ... */
2502 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2503 M_CMP_IMM(iptr->sx.val.i, s1);
2504 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFEQ, BRANCH_OPT_NONE);
2507 case ICMD_IF_LEQ: /* ..., value ==> ... */
2509 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2510 if (iptr->sx.val.l == 0) {
2511 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2512 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2515 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2516 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2517 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2518 M_OR(REG_ITMP2, REG_ITMP1);
2520 emit_beq(cd, iptr->dst.block);
2523 case ICMD_IF_LLT: /* ..., value ==> ... */
2525 if (iptr->sx.val.l == 0) {
2526 /* If high 32-bit are less than zero, then the 64-bits
2528 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2530 emit_blt(cd, iptr->dst.block);
2533 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2534 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2535 emit_blt(cd, iptr->dst.block);
2537 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2538 emit_bult(cd, iptr->dst.block);
2542 case ICMD_IF_LLE: /* ..., value ==> ... */
2544 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2545 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2546 emit_blt(cd, iptr->dst.block);
2548 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2549 emit_bule(cd, iptr->dst.block);
2552 case ICMD_IF_LNE: /* ..., value ==> ... */
2554 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2555 if (iptr->sx.val.l == 0) {
2556 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2557 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2560 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2561 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2562 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2563 M_OR(REG_ITMP2, REG_ITMP1);
2565 emit_bne(cd, iptr->dst.block);
2568 case ICMD_IF_LGT: /* ..., value ==> ... */
2570 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2571 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2572 emit_bgt(cd, iptr->dst.block);
2574 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2575 emit_bugt(cd, iptr->dst.block);
2578 case ICMD_IF_LGE: /* ..., value ==> ... */
2580 if (iptr->sx.val.l == 0) {
2581 /* If high 32-bit are greater equal zero, then the
2583 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2585 emit_bge(cd, iptr->dst.block);
2588 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2589 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2590 emit_bgt(cd, iptr->dst.block);
2592 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2593 emit_buge(cd, iptr->dst.block);
2597 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2598 case ICMD_IF_ICMPNE:
2599 case ICMD_IF_ICMPLT:
2600 case ICMD_IF_ICMPGT:
2601 case ICMD_IF_ICMPGE:
2602 case ICMD_IF_ICMPLE:
2604 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2605 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2607 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ICMPEQ, BRANCH_OPT_NONE);
2610 case ICMD_IF_ACMPEQ: /* ..., value, value ==> ... */
2611 case ICMD_IF_ACMPNE:
2613 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2614 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2616 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ACMPEQ, BRANCH_OPT_NONE);
2619 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2621 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2622 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2623 M_INTMOVE(s1, REG_ITMP1);
2624 M_XOR(s2, REG_ITMP1);
2625 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2626 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2627 M_INTMOVE(s1, REG_ITMP2);
2628 M_XOR(s2, REG_ITMP2);
2629 M_OR(REG_ITMP1, REG_ITMP2);
2630 emit_beq(cd, iptr->dst.block);
2633 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2635 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2636 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2637 M_INTMOVE(s1, REG_ITMP1);
2638 M_XOR(s2, REG_ITMP1);
2639 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2640 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2641 M_INTMOVE(s1, REG_ITMP2);
2642 M_XOR(s2, REG_ITMP2);
2643 M_OR(REG_ITMP1, REG_ITMP2);
2644 emit_bne(cd, iptr->dst.block);
2647 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2649 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2650 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2652 emit_blt(cd, iptr->dst.block);
2653 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2654 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2657 emit_bult(cd, iptr->dst.block);
2660 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2662 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2663 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2665 emit_bgt(cd, iptr->dst.block);
2666 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2667 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2670 emit_bugt(cd, iptr->dst.block);
2673 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2675 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2676 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2678 emit_blt(cd, iptr->dst.block);
2679 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2680 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2683 emit_bule(cd, iptr->dst.block);
2686 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2688 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2689 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2691 emit_bgt(cd, iptr->dst.block);
2692 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2693 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2696 emit_buge(cd, iptr->dst.block);
2700 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2702 REPLACEMENT_POINT_RETURN(cd, iptr);
2703 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2704 M_INTMOVE(s1, REG_RESULT);
2705 goto nowperformreturn;
2707 case ICMD_LRETURN: /* ..., retvalue ==> ... */
2709 REPLACEMENT_POINT_RETURN(cd, iptr);
2710 s1 = emit_load_s1(jd, iptr, REG_RESULT_PACKED);
2711 M_LNGMOVE(s1, REG_RESULT_PACKED);
2712 goto nowperformreturn;
2714 case ICMD_ARETURN: /* ..., retvalue ==> ... */
2716 REPLACEMENT_POINT_RETURN(cd, iptr);
2717 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2718 M_INTMOVE(s1, REG_RESULT);
2720 #ifdef ENABLE_VERIFIER
2721 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2722 patcher_add_patch_ref(jd, PATCHER_resolve_class,
2723 iptr->sx.s23.s2.uc, 0);
2725 #endif /* ENABLE_VERIFIER */
2726 goto nowperformreturn;
2728 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2731 REPLACEMENT_POINT_RETURN(cd, iptr);
2732 s1 = emit_load_s1(jd, iptr, REG_FRESULT);
2733 goto nowperformreturn;
2735 case ICMD_RETURN: /* ... ==> ... */
2737 REPLACEMENT_POINT_RETURN(cd, iptr);
2743 p = cd->stackframesize;
2745 #if !defined(NDEBUG)
2746 emit_verbosecall_exit(jd);
2749 #if defined(ENABLE_THREADS)
2750 if (checksync && code_is_synchronized(code)) {
2751 M_ALD(REG_ITMP2, REG_SP, rd->memuse * 8);
2753 /* we need to save the proper return value */
2754 switch (iptr->opc) {
2757 M_IST(REG_RESULT, REG_SP, rd->memuse * 8);
2761 M_LST(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2765 emit_fstps_membase(cd, REG_SP, rd->memuse * 8);
2769 emit_fstpl_membase(cd, REG_SP, rd->memuse * 8);
2773 M_AST(REG_ITMP2, REG_SP, 0);
2774 M_MOV_IMM(LOCK_monitor_exit, REG_ITMP3);
2777 /* and now restore the proper return value */
2778 switch (iptr->opc) {
2781 M_ILD(REG_RESULT, REG_SP, rd->memuse * 8);
2785 M_LLD(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2789 emit_flds_membase(cd, REG_SP, rd->memuse * 8);
2793 emit_fldl_membase(cd, REG_SP, rd->memuse * 8);
2799 /* restore saved registers */
2801 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
2802 p--; M_ALD(rd->savintregs[i], REG_SP, p * 8);
2805 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
2807 emit_fldl_membase(cd, REG_SP, p * 8);
2808 if (iptr->opc == ICMD_FRETURN || iptr->opc == ICMD_DRETURN) {
2810 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset + 1); */
2813 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset); */
2817 /* deallocate stack */
2819 if (cd->stackframesize)
2820 M_AADD_IMM(cd->stackframesize * 8, REG_SP);
2827 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2830 branch_target_t *table;
2832 table = iptr->dst.table;
2834 l = iptr->sx.s23.s2.tablelow;
2835 i = iptr->sx.s23.s3.tablehigh;
2837 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2838 M_INTMOVE(s1, REG_ITMP1);
2841 M_ISUB_IMM(l, REG_ITMP1);
2847 M_CMP_IMM(i - 1, REG_ITMP1);
2848 emit_bugt(cd, table[0].block);
2850 /* build jump table top down and use address of lowest entry */
2855 dseg_add_target(cd, table->block);
2859 /* length of dataseg after last dseg_addtarget is used
2862 M_MOV_IMM(0, REG_ITMP2);
2864 emit_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 2, REG_ITMP1);
2870 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2873 lookup_target_t *lookup;
2875 lookup = iptr->dst.lookup;
2877 i = iptr->sx.s23.s2.lookupcount;
2879 MCODECHECK((i<<2)+8);
2880 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2883 M_CMP_IMM(lookup->value, s1);
2884 emit_beq(cd, lookup->target.block);
2888 emit_br(cd, iptr->sx.s23.s3.lookupdefault.block);
2893 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
2895 REPLACEMENT_POINT_FORGC_BUILTIN(cd, iptr);
2897 bte = iptr->sx.s23.s3.bte;
2901 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2903 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2904 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
2905 case ICMD_INVOKEINTERFACE:
2907 REPLACEMENT_POINT_INVOKE(cd, iptr);
2909 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2910 md = iptr->sx.s23.s3.um->methodref->parseddesc.md;
2914 lm = iptr->sx.s23.s3.fmiref->p.method;
2915 md = lm->parseddesc;
2919 s3 = md->paramcount;
2921 MCODECHECK((s3 << 1) + 64);
2923 /* copy arguments to registers or stack location */
2925 for (s3 = s3 - 1; s3 >= 0; s3--) {
2926 var = VAR(iptr->sx.s23.s2.args[s3]);
2928 /* Already Preallocated (ARGVAR) ? */
2929 if (var->flags & PREALLOC)
2931 if (IS_INT_LNG_TYPE(var->type)) {
2932 if (!md->params[s3].inmemory) {
2933 log_text("No integer argument registers available!");
2937 if (IS_2_WORD_TYPE(var->type)) {
2938 d = emit_load(jd, iptr, var, REG_ITMP12_PACKED);
2939 M_LST(d, REG_SP, md->params[s3].regoff);
2941 d = emit_load(jd, iptr, var, REG_ITMP1);
2942 M_IST(d, REG_SP, md->params[s3].regoff);
2947 if (!md->params[s3].inmemory) {
2948 s1 = md->params[s3].regoff;
2949 d = emit_load(jd, iptr, var, s1);
2953 d = emit_load(jd, iptr, var, REG_FTMP1);
2954 if (IS_2_WORD_TYPE(var->type))
2955 M_DST(d, REG_SP, md->params[s3].regoff);
2957 M_FST(d, REG_SP, md->params[s3].regoff);
2962 switch (iptr->opc) {
2964 d = md->returntype.type;
2966 if (bte->stub == NULL) {
2967 M_MOV_IMM(bte->fp, REG_ITMP1);
2970 M_MOV_IMM(bte->stub, REG_ITMP1);
2975 case ICMD_INVOKESPECIAL:
2976 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
2977 emit_nullpointer_check(cd, iptr, REG_ITMP1);
2980 case ICMD_INVOKESTATIC:
2982 unresolved_method *um = iptr->sx.s23.s3.um;
2984 patcher_add_patch_ref(jd, PATCHER_invokestatic_special,
2988 d = md->returntype.type;
2991 disp = (ptrint) lm->stubroutine;
2992 d = lm->parseddesc->returntype.type;
2995 M_MOV_IMM(disp, REG_ITMP2);
2999 case ICMD_INVOKEVIRTUAL:
3000 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
3001 emit_nullpointer_check(cd, iptr, s1);
3004 unresolved_method *um = iptr->sx.s23.s3.um;
3006 patcher_add_patch_ref(jd, PATCHER_invokevirtual, um, 0);
3009 d = md->returntype.type;
3012 s1 = OFFSET(vftbl_t, table[0]) +
3013 sizeof(methodptr) * lm->vftblindex;
3014 d = md->returntype.type;
3017 M_ALD(REG_METHODPTR, REG_ITMP1,
3018 OFFSET(java_object_t, vftbl));
3019 M_ALD32(REG_ITMP3, REG_METHODPTR, s1);
3023 case ICMD_INVOKEINTERFACE:
3024 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
3025 emit_nullpointer_check(cd, iptr, s1);
3028 unresolved_method *um = iptr->sx.s23.s3.um;
3030 patcher_add_patch_ref(jd, PATCHER_invokeinterface, um, 0);
3034 d = md->returntype.type;
3037 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3038 sizeof(methodptr) * lm->class->index;
3040 s2 = sizeof(methodptr) * (lm - lm->class->methods);
3042 d = md->returntype.type;
3045 M_ALD(REG_METHODPTR, REG_ITMP1,
3046 OFFSET(java_object_t, vftbl));
3047 M_ALD32(REG_METHODPTR, REG_METHODPTR, s1);
3048 M_ALD32(REG_ITMP3, REG_METHODPTR, s2);
3053 /* store size of call code in replacement point */
3055 REPLACEMENT_POINT_INVOKE_RETURN(cd, iptr);
3056 REPLACEMENT_POINT_FORGC_BUILTIN_RETURN(cd, iptr);
3058 /* d contains return type */
3060 if (d != TYPE_VOID) {
3061 #if defined(ENABLE_SSA)
3062 if ((ls == NULL) /* || (!IS_TEMPVAR_INDEX(iptr->dst.varindex)) */ ||
3063 (ls->lifetime[iptr->dst.varindex].type != UNUSED))
3064 /* a "living" stackslot */
3067 if (IS_INT_LNG_TYPE(d)) {
3068 if (IS_2_WORD_TYPE(d)) {
3069 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
3070 M_LNGMOVE(REG_RESULT_PACKED, s1);
3073 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3074 M_INTMOVE(REG_RESULT, s1);
3078 s1 = codegen_reg_of_dst(jd, iptr, REG_NULL);
3080 emit_store_dst(jd, iptr, s1);
3086 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3088 if (!(iptr->flags.bits & INS_FLAG_ARRAY)) {
3089 /* object type cast-check */
3092 vftbl_t *supervftbl;
3095 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3101 super = iptr->sx.s23.s3.c.cls;
3102 superindex = super->index;
3103 supervftbl = super->vftbl;
3106 if ((super == NULL) || !(super->flags & ACC_INTERFACE))
3107 CODEGEN_CRITICAL_SECTION_NEW;
3109 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3111 /* if class is not resolved, check which code to call */
3113 if (super == NULL) {
3115 emit_label_beq(cd, BRANCH_LABEL_1);
3117 patcher_add_patch_ref(jd, PATCHER_checkcast_instanceof_flags,
3118 iptr->sx.s23.s3.c.ref, 0);
3120 M_MOV_IMM(0, REG_ITMP2); /* super->flags */
3121 M_AND_IMM32(ACC_INTERFACE, REG_ITMP2);
3122 emit_label_beq(cd, BRANCH_LABEL_2);
3125 /* interface checkcast code */
3127 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3128 if (super != NULL) {
3130 emit_label_beq(cd, BRANCH_LABEL_3);
3133 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3135 if (super == NULL) {
3136 patcher_add_patch_ref(jd, PATCHER_checkcast_interface,
3137 iptr->sx.s23.s3.c.ref,
3142 REG_ITMP2, OFFSET(vftbl_t, interfacetablelength));
3143 M_ISUB_IMM32(superindex, REG_ITMP3);
3144 /* XXX do we need this one? */
3146 emit_classcast_check(cd, iptr, BRANCH_LE, REG_ITMP3, s1);
3148 M_ALD32(REG_ITMP3, REG_ITMP2,
3149 OFFSET(vftbl_t, interfacetable[0]) -
3150 superindex * sizeof(methodptr*));
3152 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_ITMP3, s1);
3155 emit_label_br(cd, BRANCH_LABEL_4);
3157 emit_label(cd, BRANCH_LABEL_3);
3160 /* class checkcast code */
3162 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3163 if (super == NULL) {
3164 emit_label(cd, BRANCH_LABEL_2);
3168 emit_label_beq(cd, BRANCH_LABEL_5);
3171 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3173 if (super == NULL) {
3174 patcher_add_patch_ref(jd, PATCHER_checkcast_class,
3175 iptr->sx.s23.s3.c.ref,
3179 M_MOV_IMM(supervftbl, REG_ITMP3);
3181 CODEGEN_CRITICAL_SECTION_START;
3183 M_ILD32(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3185 /* if (s1 != REG_ITMP1) { */
3186 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP1); */
3187 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3); */
3188 /* #if defined(ENABLE_THREADS) */
3189 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3191 /* emit_alu_reg_reg(cd, ALU_SUB, REG_ITMP1, REG_ITMP2); */
3194 M_ILD32(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, baseval));
3195 M_ISUB(REG_ITMP3, REG_ITMP2);
3196 M_MOV_IMM(supervftbl, REG_ITMP3);
3197 M_ILD(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3199 CODEGEN_CRITICAL_SECTION_END;
3203 M_CMP(REG_ITMP3, REG_ITMP2);
3204 emit_classcast_check(cd, iptr, BRANCH_ULE, REG_ITMP3, s1);
3207 emit_label(cd, BRANCH_LABEL_5);
3210 if (super == NULL) {
3211 emit_label(cd, BRANCH_LABEL_1);
3212 emit_label(cd, BRANCH_LABEL_4);
3215 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
3218 /* array type cast-check */
3220 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3221 M_AST(s1, REG_SP, 0 * 4);
3223 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3224 patcher_add_patch_ref(jd, PATCHER_builtin_arraycheckcast,
3225 iptr->sx.s23.s3.c.ref, 0);
3228 M_AST_IMM(iptr->sx.s23.s3.c.cls, REG_SP, 1 * 4);
3229 M_MOV_IMM(BUILTIN_arraycheckcast, REG_ITMP3);
3232 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3234 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_RESULT, s1);
3236 d = codegen_reg_of_dst(jd, iptr, s1);
3240 emit_store_dst(jd, iptr, d);
3243 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3247 vftbl_t *supervftbl;
3250 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3256 super = iptr->sx.s23.s3.c.cls;
3257 superindex = super->index;
3258 supervftbl = super->vftbl;
3261 if ((super == NULL) || !(super->flags & ACC_INTERFACE))
3262 CODEGEN_CRITICAL_SECTION_NEW;
3264 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3265 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
3268 M_INTMOVE(s1, REG_ITMP1);
3274 /* if class is not resolved, check which code to call */
3276 if (super == NULL) {
3278 emit_label_beq(cd, BRANCH_LABEL_1);
3280 patcher_add_patch_ref(jd, PATCHER_checkcast_instanceof_flags,
3281 iptr->sx.s23.s3.c.ref, 0);
3283 M_MOV_IMM(0, REG_ITMP3); /* super->flags */
3284 M_AND_IMM32(ACC_INTERFACE, REG_ITMP3);
3285 emit_label_beq(cd, BRANCH_LABEL_2);
3288 /* interface instanceof code */
3290 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3291 if (super != NULL) {
3293 emit_label_beq(cd, BRANCH_LABEL_3);
3296 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3298 if (super == NULL) {
3299 patcher_add_patch_ref(jd, PATCHER_instanceof_interface,
3300 iptr->sx.s23.s3.c.ref, 0);
3304 REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3305 M_ISUB_IMM32(superindex, REG_ITMP3);
3308 disp = (2 + 4 /* mov_membase32_reg */ + 2 /* test */ +
3309 6 /* jcc */ + 5 /* mov_imm_reg */);
3312 M_ALD32(REG_ITMP1, REG_ITMP1,
3313 OFFSET(vftbl_t, interfacetable[0]) -
3314 superindex * sizeof(methodptr*));
3316 /* emit_setcc_reg(cd, CC_A, d); */
3317 /* emit_jcc(cd, CC_BE, 5); */
3322 emit_label_br(cd, BRANCH_LABEL_4);
3324 emit_label(cd, BRANCH_LABEL_3);
3327 /* class instanceof code */
3329 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3330 if (super == NULL) {
3331 emit_label(cd, BRANCH_LABEL_2);
3335 emit_label_beq(cd, BRANCH_LABEL_5);
3338 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3340 if (super == NULL) {
3341 patcher_add_patch_ref(jd, PATCHER_instanceof_class,
3342 iptr->sx.s23.s3.c.ref, 0);
3345 M_MOV_IMM(supervftbl, REG_ITMP2);
3347 CODEGEN_CRITICAL_SECTION_START;
3349 M_ILD(REG_ITMP1, REG_ITMP1, OFFSET(vftbl_t, baseval));
3350 M_ILD(REG_ITMP3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3351 M_ILD(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3353 CODEGEN_CRITICAL_SECTION_END;
3355 M_ISUB(REG_ITMP2, REG_ITMP1);
3356 M_CLR(d); /* may be REG_ITMP2 */
3357 M_CMP(REG_ITMP3, REG_ITMP1);
3362 emit_label(cd, BRANCH_LABEL_5);
3365 if (super == NULL) {
3366 emit_label(cd, BRANCH_LABEL_1);
3367 emit_label(cd, BRANCH_LABEL_4);
3370 emit_store_dst(jd, iptr, d);
3374 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3376 /* check for negative sizes and copy sizes to stack if necessary */
3378 MCODECHECK((iptr->s1.argcount << 1) + 64);
3380 for (s1 = iptr->s1.argcount; --s1 >= 0; ) {
3381 /* copy SAVEDVAR sizes to stack */
3382 var = VAR(iptr->sx.s23.s2.args[s1]);
3384 /* Already Preallocated? */
3385 if (!(var->flags & PREALLOC)) {
3386 if (var->flags & INMEMORY) {
3387 M_ILD(REG_ITMP1, REG_SP, var->vv.regoff);
3388 M_IST(REG_ITMP1, REG_SP, (s1 + 3) * 4);
3391 M_IST(var->vv.regoff, REG_SP, (s1 + 3) * 4);
3395 /* is a patcher function set? */
3397 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3398 patcher_add_patch_ref(jd, PATCHER_builtin_multianewarray,
3399 iptr->sx.s23.s3.c.ref, 0);
3405 disp = (ptrint) iptr->sx.s23.s3.c.cls;
3407 /* a0 = dimension count */
3409 M_IST_IMM(iptr->s1.argcount, REG_SP, 0 * 4);
3411 /* a1 = arraydescriptor */
3413 M_IST_IMM(disp, REG_SP, 1 * 4);
3415 /* a2 = pointer to dimensions = stack pointer */
3417 M_MOV(REG_SP, REG_ITMP1);
3418 M_AADD_IMM(3 * 4, REG_ITMP1);
3419 M_AST(REG_ITMP1, REG_SP, 2 * 4);
3421 M_MOV_IMM(BUILTIN_multianewarray, REG_ITMP1);
3424 /* check for exception before result assignment */
3426 emit_exception_check(cd, iptr);
3428 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3429 M_INTMOVE(REG_RESULT, s1);
3430 emit_store_dst(jd, iptr, s1);
3434 exceptions_throw_internalerror("Unknown ICMD %d during code generation",
3439 } /* for instruction */
3443 #if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
3446 #if defined(ENABLE_SSA)
3449 /* by edge splitting, in Blocks with phi moves there can only */
3450 /* be a goto as last command, no other Jump/Branch Command */
3452 if (!last_cmd_was_goto)
3453 codegen_emit_phi_moves(jd, bptr);
3458 /* At the end of a basic block we may have to append some nops,
3459 because the patcher stub calling code might be longer than the
3460 actual instruction. So codepatching does not change the
3461 following block unintentionally. */
3463 if (cd->mcodeptr < cd->lastmcodeptr) {
3464 while (cd->mcodeptr < cd->lastmcodeptr) {
3469 } /* if (bptr -> flags >= BBREACHED) */
3470 } /* for basic block */
3472 dseg_createlinenumbertable(cd);
3474 /* generate stubs */
3476 emit_patcher_traps(jd);
3478 /* everything's ok */
3484 /* codegen_emit_stub_native ****************************************************
3486 Emits a stub routine which calls a native method.
3488 *******************************************************************************/
3490 void codegen_emit_stub_native(jitdata *jd, methoddesc *nmd, functionptr f, int skipparams)
3496 int i, j; /* count variables */
3499 #if defined(ENABLE_GC_CACAO)
3503 /* get required compiler data */
3509 /* set some variables */
3513 /* calculate stackframe size */
3515 cd->stackframesize =
3516 sizeof(stackframeinfo_t) / SIZEOF_VOID_P +
3517 sizeof(localref_table) / SIZEOF_VOID_P +
3518 4 + /* 4 arguments (start_native_call) */
3521 /* keep stack 16-byte aligned */
3523 ALIGN_ODD(cd->stackframesize); /* XXX this is wrong, +4 is missing */
3525 /* create method header */
3527 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
3528 (void) dseg_add_unique_s4(cd, cd->stackframesize * 8); /* FrameSize */
3529 (void) dseg_add_unique_s4(cd, 0); /* IsSync */
3530 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
3531 (void) dseg_add_unique_s4(cd, 0); /* IntSave */
3532 (void) dseg_add_unique_s4(cd, 0); /* FltSave */
3533 (void) dseg_addlinenumbertablesize(cd);
3534 (void) dseg_add_unique_s4(cd, 0); /* ExTableSize */
3536 #if defined(ENABLE_PROFILING)
3537 /* generate native method profiling code */
3539 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
3540 /* count frequency */
3542 M_MOV_IMM(code, REG_ITMP1);
3543 M_IADD_IMM_MEMBASE(1, REG_ITMP1, OFFSET(codeinfo, frequency));
3547 /* calculate stackframe size for native function */
3549 M_ASUB_IMM(cd->stackframesize * 8, REG_SP);
3551 /* get function address (this must happen before the stackframeinfo) */
3553 funcdisp = dseg_add_functionptr(cd, f);
3556 patcher_add_patch_ref(jd, PATCHER_resolve_native_function, m, funcdisp);
3558 /* Mark the whole fpu stack as free for native functions (only for saved */
3559 /* register count == 0). */
3561 emit_ffree_reg(cd, 0);
3562 emit_ffree_reg(cd, 1);
3563 emit_ffree_reg(cd, 2);
3564 emit_ffree_reg(cd, 3);
3565 emit_ffree_reg(cd, 4);
3566 emit_ffree_reg(cd, 5);
3567 emit_ffree_reg(cd, 6);
3568 emit_ffree_reg(cd, 7);
3570 #if defined(ENABLE_GC_CACAO)
3571 /* remember callee saved int registers in stackframeinfo (GC may need to */
3572 /* recover them during a collection). */
3574 disp = cd->stackframesize * 8 - sizeof(stackframeinfo_t) +
3575 OFFSET(stackframeinfo_t, intregs);
3577 for (i = 0; i < INT_SAV_CNT; i++)
3578 M_AST(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3581 /* prepare data structures for native function call */
3583 M_MOV(REG_SP, REG_ITMP1);
3584 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3585 M_IST_IMM(0, REG_SP, 1 * 4);
3588 M_MOV_IMM(codegen_start_native_call, REG_ITMP1);
3591 /* remember class argument */
3593 if (m->flags & ACC_STATIC)
3594 M_MOV(REG_RESULT, REG_ITMP3);
3596 /* Copy or spill arguments to new locations. */
3598 for (i = md->paramcount - 1, j = i + skipparams; i >= 0; i--, j--) {
3599 if (!md->params[i].inmemory)
3602 s1 = md->params[i].regoff + cd->stackframesize * 8 + 4;
3603 s2 = nmd->params[j].regoff;
3605 /* float/double in memory can be copied like int/longs */
3607 switch (md->paramtypes[i].type) {
3611 M_ILD(REG_ITMP1, REG_SP, s1);
3612 M_IST(REG_ITMP1, REG_SP, s2);
3616 M_LLD(REG_ITMP12_PACKED, REG_SP, s1);
3617 M_LST(REG_ITMP12_PACKED, REG_SP, s2);
3622 /* Handle native Java methods. */
3624 if (m->flags & ACC_NATIVE) {
3625 /* if function is static, put class into second argument */
3627 if (m->flags & ACC_STATIC)
3628 M_AST(REG_ITMP3, REG_SP, 1 * 4);
3630 /* put env into first argument */
3632 M_AST_IMM(_Jv_env, REG_SP, 0 * 4);
3635 /* call the native function */
3637 emit_mov_imm_reg(cd, 0, REG_ITMP3);
3639 M_ALD(REG_ITMP1, REG_ITMP3, funcdisp);
3642 /* save return value */
3644 switch (md->returntype.type) {
3647 M_IST(REG_RESULT, REG_SP, 1 * 8);
3650 M_LST(REG_RESULT_PACKED, REG_SP, 1 * 8);
3653 emit_fsts_membase(cd, REG_SP, 1 * 8);
3656 emit_fstl_membase(cd, REG_SP, 1 * 8);
3662 /* remove native stackframe info */
3664 M_MOV(REG_SP, REG_ITMP1);
3665 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3666 M_IST_IMM(0, REG_SP, 1 * 4);
3669 M_MOV_IMM(codegen_finish_native_call, REG_ITMP1);
3671 M_MOV(REG_RESULT, REG_ITMP2); /* REG_ITMP3 == REG_RESULT2 */
3673 /* restore return value */
3675 switch (md->returntype.type) {
3678 M_ILD(REG_RESULT, REG_SP, 1 * 8);
3681 M_LLD(REG_RESULT_PACKED, REG_SP, 1 * 8);
3684 emit_flds_membase(cd, REG_SP, 1 * 8);
3687 emit_fldl_membase(cd, REG_SP, 1 * 8);
3693 #if defined(ENABLE_GC_CACAO)
3694 /* restore callee saved int registers from stackframeinfo (GC might have */
3695 /* modified them during a collection). */
3697 disp = cd->stackframesize * 8 - sizeof(stackframeinfo_t) +
3698 OFFSET(stackframeinfo_t, intregs);
3700 for (i = 0; i < INT_SAV_CNT; i++)
3701 M_ALD(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3704 M_AADD_IMM(cd->stackframesize * 8, REG_SP);
3706 /* check for exception */
3713 /* handle exception */
3715 M_MOV(REG_ITMP2, REG_ITMP1_XPTR);
3716 M_ALD(REG_ITMP2_XPC, REG_SP, 0);
3717 M_ASUB_IMM(2, REG_ITMP2_XPC);
3719 M_MOV_IMM(asm_handle_nat_exception, REG_ITMP3);
3722 /* generate patcher stubs */
3724 emit_patcher_traps(jd);
3729 * These are local overrides for various environment variables in Emacs.
3730 * Please do not remove this and leave it at the end of the file, where
3731 * Emacs will automagically detect them.
3732 * ---------------------------------------------------------------------
3735 * indent-tabs-mode: t
3739 * vim:noexpandtab:sw=4:ts=4: