1 /* src/vm/jit/i386/codegen.c - machine code generator for i386
3 Copyright (C) 1996-2005, 2006, 2007 R. Grafl, A. Krall, C. Kruegel,
4 C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
5 E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
6 J. Wenninger, Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
36 #include "vm/jit/i386/md-abi.h"
38 #include "vm/jit/i386/codegen.h"
39 #include "vm/jit/i386/emit.h"
41 #include "mm/memory.h"
42 #include "native/jni.h"
43 #include "native/localref.h"
44 #include "native/native.h"
46 #include "threads/lock-common.h"
48 #include "vm/builtin.h"
49 #include "vm/exceptions.h"
50 #include "vm/global.h"
51 #include "vm/stringlocal.h"
54 #include "vm/jit/abi.h"
55 #include "vm/jit/asmpart.h"
56 #include "vm/jit/codegen-common.h"
57 #include "vm/jit/dseg.h"
58 #include "vm/jit/emit-common.h"
59 #include "vm/jit/jit.h"
60 #include "vm/jit/parse.h"
61 #include "vm/jit/patcher-common.h"
62 #include "vm/jit/reg.h"
63 #include "vm/jit/replace.h"
64 #include "vm/jit/stacktrace.h"
66 #if defined(ENABLE_SSA)
67 # include "vm/jit/optimizing/lsra.h"
68 # include "vm/jit/optimizing/ssa.h"
69 #elif defined(ENABLE_LSRA)
70 # include "vm/jit/allocator/lsra.h"
73 #include "vmcore/loader.h"
74 #include "vmcore/options.h"
75 #include "vmcore/utf8.h"
78 /* codegen_emit ****************************************************************
80 Generates machine code.
82 *******************************************************************************/
84 bool codegen_emit(jitdata *jd)
90 s4 len, s1, s2, s3, d, disp;
96 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
97 builtintable_entry *bte;
100 unresolved_field *uf;
103 #if defined(ENABLE_SSA)
105 bool last_cmd_was_goto;
107 last_cmd_was_goto = false;
111 /* get required compiler data */
118 /* prevent compiler warnings */
129 s4 savedregs_num = 0;
132 /* space to save used callee saved registers */
134 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
135 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse);
137 cd->stackframesize = rd->memuse + savedregs_num;
140 #if defined(ENABLE_THREADS)
141 /* space to save argument of monitor_enter */
143 if (checksync && code_is_synchronized(code))
144 cd->stackframesize++;
147 /* create method header */
149 /* Keep stack of non-leaf functions 16-byte aligned. */
151 if (!code_is_leafmethod(code)) {
152 ALIGN_ODD(cd->stackframesize); /* XXX this is wrong, +4 is missing */
155 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
156 (void) dseg_add_unique_s4(cd, cd->stackframesize * 8); /* FrameSize */
158 /* IsSync contains the offset relative to the stack pointer for the
159 argument of monitor_exit used in the exception handler. Since the
160 offset could be zero and give a wrong meaning of the flag it is
163 /* XXX Remove this "offset by one". */
165 code->synchronizedoffset = (rd->memuse + 1) * 8;
167 /* REMOVEME: We still need it for exception handling in assembler. */
169 if (code_is_leafmethod(code))
170 (void) dseg_add_unique_s4(cd, 1); /* IsLeaf */
172 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
174 (void) dseg_add_unique_s4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
175 (void) dseg_add_unique_s4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
177 /* adds a reference for the length of the line number counter. We don't
178 know the size yet, since we evaluate the information during code
179 generation, to save one additional iteration over the whole
180 instructions. During code optimization the position could have changed
181 to the information gotten from the class file */
182 (void) dseg_addlinenumbertablesize(cd);
184 (void) dseg_add_unique_s4(cd, jd->exceptiontablelength); /* ExTableSize */
186 /* create exception table */
188 for (ex = jd->exceptiontable; ex != NULL; ex = ex->down) {
189 dseg_add_target(cd, ex->start);
190 dseg_add_target(cd, ex->end);
191 dseg_add_target(cd, ex->handler);
192 (void) dseg_add_unique_address(cd, ex->catchtype.any);
195 #if defined(ENABLE_PROFILING)
196 /* generate method profiling code */
198 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
199 /* count frequency */
201 M_MOV_IMM(code, REG_ITMP3);
202 M_IADD_IMM_MEMBASE(1, REG_ITMP3, OFFSET(codeinfo, frequency));
206 /* create stack frame (if necessary) */
208 if (cd->stackframesize)
209 M_ASUB_IMM(cd->stackframesize * 8, REG_SP);
211 /* save return address and used callee saved registers */
213 p = cd->stackframesize;
214 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
215 p--; M_AST(rd->savintregs[i], REG_SP, p * 8);
217 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
218 p--; emit_fld_reg(cd, rd->savfltregs[i]); emit_fstpl_membase(cd, REG_SP, p * 8);
221 /* take arguments out of register or stack frame */
226 for (p = 0, l = 0; p < md->paramcount; p++) {
227 t = md->paramtypes[p].type;
229 varindex = jd->local_map[l * 5 + t];
230 #if defined(ENABLE_SSA)
232 if (varindex != UNUSED)
233 varindex = ls->var_0[varindex];
234 if ((varindex != UNUSED) && (ls->lifetime[varindex].type == UNUSED))
239 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
242 if (varindex == UNUSED)
246 s1 = md->params[p].regoff;
249 if (IS_INT_LNG_TYPE(t)) { /* integer args */
250 if (!md->params[p].inmemory) { /* register arguments */
251 log_text("integer register argument");
253 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
254 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff */
256 else { /* reg arg -> spilled */
257 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff * 4 */
261 if (!(var->flags & INMEMORY)) {
262 M_ILD(d, REG_SP, cd->stackframesize * 8 + 4 + s1);
265 if (!IS_2_WORD_TYPE(t)) {
266 #if defined(ENABLE_SSA)
267 /* no copy avoiding by now possible with SSA */
269 emit_mov_membase_reg( /* + 4 for return address */
270 cd, REG_SP, cd->stackframesize * 8 + s1 + 4,
272 emit_mov_reg_membase(
273 cd, REG_ITMP1, REG_SP, var->vv.regoff);
276 #endif /*defined(ENABLE_SSA)*/
277 /* reuse stackslot */
278 var->vv.regoff = cd->stackframesize * 8 + 4 + s1;
282 #if defined(ENABLE_SSA)
283 /* no copy avoiding by now possible with SSA */
285 emit_mov_membase_reg( /* + 4 for return address */
286 cd, REG_SP, cd->stackframesize * 8 + s1 + 4,
288 emit_mov_reg_membase(
289 cd, REG_ITMP1, REG_SP, var->vv.regoff);
290 emit_mov_membase_reg( /* + 4 for return address */
291 cd, REG_SP, cd->stackframesize * 8 + s1 + 4 + 4,
293 emit_mov_reg_membase(
294 cd, REG_ITMP1, REG_SP, var->vv.regoff + 4);
297 #endif /*defined(ENABLE_SSA)*/
298 /* reuse stackslot */
299 var->vv.regoff = cd->stackframesize * 8 + 4 + s1;
304 else { /* floating args */
305 if (!md->params[p].inmemory) { /* register arguments */
306 log_text("There are no float argument registers!");
308 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
309 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff */
310 } else { /* reg arg -> spilled */
311 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff * 8 */
315 else { /* stack arguments */
316 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
319 cd, REG_SP, cd->stackframesize * 8 + s1 + 4);
321 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
326 cd, REG_SP, cd->stackframesize * 8 + s1 + 4);
328 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
331 } else { /* stack-arg -> spilled */
332 #if defined(ENABLE_SSA)
333 /* no copy avoiding by now possible with SSA */
335 emit_mov_membase_reg(
336 cd, REG_SP, cd->stackframesize * 8 + s1 + 4, REG_ITMP1);
337 emit_mov_reg_membase(
338 cd, REG_ITMP1, REG_SP, var->vv.regoff);
341 cd, REG_SP, cd->stackframesize * 8 + s1 + 4);
342 emit_fstps_membase(cd, REG_SP, var->vv.regoff);
346 cd, REG_SP, cd->stackframesize * 8 + s1 + 4);
347 emit_fstpl_membase(cd, REG_SP, var->vv.regoff);
351 #endif /*defined(ENABLE_SSA)*/
352 /* reuse stackslot */
353 var->vv.regoff = cd->stackframesize * 8 + 4 + s1;
359 /* call monitorenter function */
361 #if defined(ENABLE_THREADS)
362 if (checksync && code_is_synchronized(code)) {
365 if (m->flags & ACC_STATIC) {
366 M_MOV_IMM(&m->class->object.header, REG_ITMP1);
369 M_ALD(REG_ITMP1, REG_SP, cd->stackframesize * 8 + 4);
372 M_ALD_MEM(REG_ITMP1, EXCEPTION_HARDWARE_NULLPOINTER);
375 M_AST(REG_ITMP1, REG_SP, s1 * 8);
376 M_AST(REG_ITMP1, REG_SP, 0 * 4);
377 M_MOV_IMM(LOCK_monitor_enter, REG_ITMP3);
383 emit_verbosecall_enter(jd);
388 #if defined(ENABLE_SSA)
389 /* with SSA the Header is Basic Block 0 - insert phi Moves if necessary */
391 codegen_emit_phi_moves(jd, ls->basicblocks[0]);
394 /* end of header generation */
396 /* create replacement points */
398 REPLACEMENT_POINTS_INIT(cd, jd);
400 /* walk through all basic blocks */
402 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
404 bptr->mpc = (s4) (cd->mcodeptr - cd->mcodebase);
406 if (bptr->flags >= BBREACHED) {
407 /* branch resolving */
409 codegen_resolve_branchrefs(cd, bptr);
411 /* handle replacement points */
413 REPLACEMENT_POINT_BLOCK_START(cd, bptr);
415 #if defined(ENABLE_REPLACEMENT)
416 if (bptr->bitflags & BBFLAG_REPLACEMENT) {
417 if (cd->replacementpoint[-1].flags & RPLPOINT_FLAG_COUNTDOWN) {
419 disp = (s4) &(m->hitcountdown);
420 M_ISUB_IMM_MEMABS(1, disp);
426 /* copy interface registers to their destination */
431 #if defined(ENABLE_PROFILING)
432 /* generate basic block profiling code */
434 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
435 /* count frequency */
437 M_MOV_IMM(code->bbfrequency, REG_ITMP3);
438 M_IADD_IMM_MEMBASE(1, REG_ITMP3, bptr->nr * 4);
442 #if defined(ENABLE_LSRA) || defined(ENABLE_SSA)
443 # if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
446 # if defined(ENABLE_SSA)
448 last_cmd_was_goto = false;
452 var = VAR(bptr->invars[len]);
453 if (bptr->type != BBTYPE_STD) {
454 if (!IS_2_WORD_TYPE(var->type)) {
455 if (bptr->type == BBTYPE_EXH) {
456 d = codegen_reg_of_var(0, var, REG_ITMP1);
457 M_INTMOVE(REG_ITMP1, d);
458 emit_store(jd, NULL, var, d);
462 log_text("copy interface registers(EXH, SBR): longs \
463 have to be in memory (begin 1)");
471 #endif /* defined(ENABLE_LSRA) || defined(ENABLE_SSA) */
475 var = VAR(bptr->invars[len]);
476 if ((len == bptr->indepth-1) && (bptr->type != BBTYPE_STD)) {
477 if (!IS_2_WORD_TYPE(var->type)) {
478 if (bptr->type == BBTYPE_EXH) {
479 d = codegen_reg_of_var(0, var, REG_ITMP1);
480 M_INTMOVE(REG_ITMP1, d);
481 emit_store(jd, NULL, var, d);
485 log_text("copy interface registers: longs have to be in \
492 assert((var->flags & INOUT));
497 /* walk through all instructions */
502 for (iptr = bptr->iinstr; len > 0; len--, iptr++) {
503 if (iptr->line != currentline) {
504 dseg_addlinenumber(cd, iptr->line);
505 currentline = iptr->line;
508 MCODECHECK(1024); /* 1kB should be enough */
511 case ICMD_NOP: /* ... ==> ... */
512 case ICMD_POP: /* ..., value ==> ... */
513 case ICMD_POP2: /* ..., value, value ==> ... */
516 case ICMD_INLINE_START:
518 REPLACEMENT_POINT_INLINE_START(cd, iptr);
521 case ICMD_INLINE_BODY:
523 REPLACEMENT_POINT_INLINE_BODY(cd, iptr);
524 dseg_addlinenumber_inline_start(cd, iptr);
525 dseg_addlinenumber(cd, iptr->line);
528 case ICMD_INLINE_END:
530 dseg_addlinenumber_inline_end(cd, iptr);
531 dseg_addlinenumber(cd, iptr->line);
534 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
536 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
537 emit_nullpointer_check(cd, iptr, s1);
540 /* constant operations ************************************************/
542 case ICMD_ICONST: /* ... ==> ..., constant */
544 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
545 ICONST(d, iptr->sx.val.i);
546 emit_store_dst(jd, iptr, d);
549 case ICMD_LCONST: /* ... ==> ..., constant */
551 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
552 LCONST(d, iptr->sx.val.l);
553 emit_store_dst(jd, iptr, d);
556 case ICMD_FCONST: /* ... ==> ..., constant */
558 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
559 if (iptr->sx.val.f == 0.0) {
563 if (iptr->sx.val.i == 0x80000000) {
567 } else if (iptr->sx.val.f == 1.0) {
570 } else if (iptr->sx.val.f == 2.0) {
576 disp = dseg_add_float(cd, iptr->sx.val.f);
577 emit_mov_imm_reg(cd, 0, REG_ITMP1);
579 emit_flds_membase(cd, REG_ITMP1, disp);
581 emit_store_dst(jd, iptr, d);
584 case ICMD_DCONST: /* ... ==> ..., constant */
586 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
587 if (iptr->sx.val.d == 0.0) {
591 if (iptr->sx.val.l == 0x8000000000000000LL) {
595 } else if (iptr->sx.val.d == 1.0) {
598 } else if (iptr->sx.val.d == 2.0) {
604 disp = dseg_add_double(cd, iptr->sx.val.d);
605 emit_mov_imm_reg(cd, 0, REG_ITMP1);
607 emit_fldl_membase(cd, REG_ITMP1, disp);
609 emit_store_dst(jd, iptr, d);
612 case ICMD_ACONST: /* ... ==> ..., constant */
614 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
616 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
617 patcher_add_patch_ref(jd, PATCHER_aconst,
618 iptr->sx.val.c.ref, 0);
623 if (iptr->sx.val.anyptr == NULL)
626 M_MOV_IMM(iptr->sx.val.anyptr, d);
628 emit_store_dst(jd, iptr, d);
632 /* load/store/copy/move operations ************************************/
650 if (!(iptr->flags.bits & INS_FLAG_RETADDR))
655 /* integer operations *************************************************/
657 case ICMD_INEG: /* ..., value ==> ..., - value */
659 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
660 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
663 emit_store_dst(jd, iptr, d);
666 case ICMD_LNEG: /* ..., value ==> ..., - value */
668 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
669 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
671 M_NEG(GET_LOW_REG(d));
672 M_IADDC_IMM(0, GET_HIGH_REG(d));
673 M_NEG(GET_HIGH_REG(d));
674 emit_store_dst(jd, iptr, d);
677 case ICMD_I2L: /* ..., value ==> ..., value */
679 s1 = emit_load_s1(jd, iptr, EAX);
680 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
683 M_LNGMOVE(EAX_EDX_PACKED, d);
684 emit_store_dst(jd, iptr, d);
687 case ICMD_L2I: /* ..., value ==> ..., value */
689 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
690 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
692 emit_store_dst(jd, iptr, d);
695 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
697 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
698 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
702 emit_store_dst(jd, iptr, d);
705 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
707 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
708 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
710 emit_store_dst(jd, iptr, d);
713 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
715 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
716 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
718 emit_store_dst(jd, iptr, d);
722 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
724 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
725 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
726 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
733 emit_store_dst(jd, iptr, d);
737 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
738 /* sx.val.i = constant */
740 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
741 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
743 /* `inc reg' is slower on p4's (regarding to ia32
744 optimization reference manual and benchmarks) and as
748 M_IADD_IMM(iptr->sx.val.i, d);
749 emit_store_dst(jd, iptr, d);
752 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
754 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
755 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
756 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
757 M_INTMOVE(s1, GET_LOW_REG(d));
758 M_IADD(s2, GET_LOW_REG(d));
759 /* don't use REG_ITMP1 */
760 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
761 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
762 M_INTMOVE(s1, GET_HIGH_REG(d));
763 M_IADDC(s2, GET_HIGH_REG(d));
764 emit_store_dst(jd, iptr, d);
767 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
768 /* sx.val.l = constant */
770 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
771 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
773 M_IADD_IMM(iptr->sx.val.l, GET_LOW_REG(d));
774 M_IADDC_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
775 emit_store_dst(jd, iptr, d);
778 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
780 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
781 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
782 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
784 M_INTMOVE(s1, REG_ITMP1);
785 M_ISUB(s2, REG_ITMP1);
786 M_INTMOVE(REG_ITMP1, d);
792 emit_store_dst(jd, iptr, d);
795 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
796 /* sx.val.i = constant */
798 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
799 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
801 M_ISUB_IMM(iptr->sx.val.i, d);
802 emit_store_dst(jd, iptr, d);
805 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
807 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
808 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
809 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
810 if (s2 == GET_LOW_REG(d)) {
811 M_INTMOVE(s1, REG_ITMP1);
812 M_ISUB(s2, REG_ITMP1);
813 M_INTMOVE(REG_ITMP1, GET_LOW_REG(d));
816 M_INTMOVE(s1, GET_LOW_REG(d));
817 M_ISUB(s2, GET_LOW_REG(d));
819 /* don't use REG_ITMP1 */
820 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
821 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
822 if (s2 == GET_HIGH_REG(d)) {
823 M_INTMOVE(s1, REG_ITMP2);
824 M_ISUBB(s2, REG_ITMP2);
825 M_INTMOVE(REG_ITMP2, GET_HIGH_REG(d));
828 M_INTMOVE(s1, GET_HIGH_REG(d));
829 M_ISUBB(s2, GET_HIGH_REG(d));
831 emit_store_dst(jd, iptr, d);
834 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
835 /* sx.val.l = constant */
837 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
838 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
840 M_ISUB_IMM(iptr->sx.val.l, GET_LOW_REG(d));
841 M_ISUBB_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
842 emit_store_dst(jd, iptr, d);
845 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
847 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
848 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
849 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
856 emit_store_dst(jd, iptr, d);
859 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
860 /* sx.val.i = constant */
862 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
863 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
864 M_IMUL_IMM(s1, iptr->sx.val.i, d);
865 emit_store_dst(jd, iptr, d);
868 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
870 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
871 s2 = emit_load_s2_low(jd, iptr, EDX);
872 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
874 M_INTMOVE(s1, REG_ITMP2);
875 M_IMUL(s2, REG_ITMP2);
877 s1 = emit_load_s1_low(jd, iptr, EAX);
878 s2 = emit_load_s2_high(jd, iptr, EDX);
881 M_IADD(EDX, REG_ITMP2);
883 s1 = emit_load_s1_low(jd, iptr, EAX);
884 s2 = emit_load_s2_low(jd, iptr, EDX);
887 M_INTMOVE(EAX, GET_LOW_REG(d));
888 M_IADD(REG_ITMP2, GET_HIGH_REG(d));
890 emit_store_dst(jd, iptr, d);
893 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
894 /* sx.val.l = constant */
896 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
897 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
898 ICONST(EAX, iptr->sx.val.l);
900 M_IMUL_IMM(s1, iptr->sx.val.l >> 32, REG_ITMP2);
901 M_IADD(REG_ITMP2, EDX);
902 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
903 M_IMUL_IMM(s1, iptr->sx.val.l, REG_ITMP2);
904 M_IADD(REG_ITMP2, EDX);
905 M_LNGMOVE(EAX_EDX_PACKED, d);
906 emit_store_dst(jd, iptr, d);
909 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
911 s1 = emit_load_s1(jd, iptr, EAX);
912 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
913 d = codegen_reg_of_dst(jd, iptr, EAX);
914 emit_arithmetic_check(cd, iptr, s2);
916 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
918 /* check as described in jvm spec */
920 M_CMP_IMM(0x80000000, EAX);
927 M_INTMOVE(EAX, d); /* if INMEMORY then d is already EAX */
928 emit_store_dst(jd, iptr, d);
931 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
933 s1 = emit_load_s1(jd, iptr, EAX);
934 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
935 d = codegen_reg_of_dst(jd, iptr, EDX);
936 emit_arithmetic_check(cd, iptr, s2);
938 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
940 /* check as described in jvm spec */
942 M_CMP_IMM(0x80000000, EAX);
950 M_INTMOVE(EDX, d); /* if INMEMORY then d is already EDX */
951 emit_store_dst(jd, iptr, d);
954 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
955 /* sx.val.i = constant */
957 /* TODO: optimize for `/ 2' */
958 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
959 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
963 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, d);/* 32-bit for jump off */
964 M_SRA_IMM(iptr->sx.val.i, d);
965 emit_store_dst(jd, iptr, d);
968 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
969 /* sx.val.i = constant */
971 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
972 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
974 M_MOV(s1, REG_ITMP1);
978 M_AND_IMM(iptr->sx.val.i, d);
980 M_BGE(2 + 2 + 6 + 2);
981 M_MOV(s1, d); /* don't use M_INTMOVE, so we know the jump offset */
983 M_AND_IMM32(iptr->sx.val.i, d); /* use 32-bit for jump offset */
985 emit_store_dst(jd, iptr, d);
988 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
989 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
991 s2 = emit_load_s2(jd, iptr, REG_ITMP12_PACKED);
992 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
994 M_INTMOVE(GET_LOW_REG(s2), REG_ITMP3);
995 M_OR(GET_HIGH_REG(s2), REG_ITMP3);
996 /* XXX could be optimized */
997 emit_arithmetic_check(cd, iptr, REG_ITMP3);
999 bte = iptr->sx.s23.s3.bte;
1002 M_LST(s2, REG_SP, 2 * 4);
1004 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1005 M_LST(s1, REG_SP, 0 * 4);
1007 M_MOV_IMM(bte->fp, REG_ITMP3);
1009 emit_store_dst(jd, iptr, d);
1012 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1013 /* sx.val.i = constant */
1015 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1016 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1018 M_TEST(GET_HIGH_REG(d));
1020 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, GET_LOW_REG(d));
1021 M_IADDC_IMM(0, GET_HIGH_REG(d));
1022 M_SRLD_IMM(iptr->sx.val.i, GET_HIGH_REG(d), GET_LOW_REG(d));
1023 M_SRA_IMM(iptr->sx.val.i, GET_HIGH_REG(d));
1024 emit_store_dst(jd, iptr, d);
1028 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1029 /* sx.val.l = constant */
1031 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1032 if (iptr->dst.var->flags & INMEMORY) {
1033 if (iptr->s1.var->flags & INMEMORY) {
1034 /* Alpha algorithm */
1036 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8);
1038 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1044 /* TODO: hmm, don't know if this is always correct */
1046 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l & 0x00000000ffffffff);
1048 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l >> 32);
1054 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1055 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1057 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1058 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1059 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1060 emit_jcc(cd, CC_GE, disp);
1062 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1063 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1065 emit_neg_reg(cd, REG_ITMP1);
1066 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1067 emit_neg_reg(cd, REG_ITMP2);
1069 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1070 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1072 emit_neg_reg(cd, REG_ITMP1);
1073 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1074 emit_neg_reg(cd, REG_ITMP2);
1076 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst.var->vv.regoff * 8);
1077 emit_mov_reg_membase(cd, REG_ITMP2, REG_SP, iptr->dst.var->vv.regoff * 8 + 4);
1081 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1082 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1084 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1085 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1086 M_TEST(GET_LOW_REG(s1));
1092 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1094 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1095 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1096 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1097 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1100 emit_store_dst(jd, iptr, d);
1103 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1104 /* sx.val.i = constant */
1106 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1107 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1109 M_SLL_IMM(iptr->sx.val.i, d);
1110 emit_store_dst(jd, iptr, d);
1113 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1115 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1116 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1117 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1118 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1121 emit_store_dst(jd, iptr, d);
1124 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1125 /* sx.val.i = constant */
1127 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1128 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1130 M_SRA_IMM(iptr->sx.val.i, d);
1131 emit_store_dst(jd, iptr, d);
1134 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1136 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1137 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1138 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1139 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1142 emit_store_dst(jd, iptr, d);
1145 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1146 /* sx.val.i = constant */
1148 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1149 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1151 M_SRL_IMM(iptr->sx.val.i, d);
1152 emit_store_dst(jd, iptr, d);
1155 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1157 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1158 s2 = emit_load_s2(jd, iptr, ECX);
1159 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1162 M_TEST_IMM(32, ECX);
1164 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1165 M_CLR(GET_LOW_REG(d));
1166 M_SLLD(GET_LOW_REG(d), GET_HIGH_REG(d));
1167 M_SLL(GET_LOW_REG(d));
1168 emit_store_dst(jd, iptr, d);
1171 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1172 /* sx.val.i = constant */
1174 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1175 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1177 if (iptr->sx.val.i & 0x20) {
1178 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1179 M_CLR(GET_LOW_REG(d));
1180 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1184 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1186 M_SLL_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d));
1188 emit_store_dst(jd, iptr, d);
1191 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1193 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1194 s2 = emit_load_s2(jd, iptr, ECX);
1195 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1198 M_TEST_IMM(32, ECX);
1200 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1201 M_SRA_IMM(31, GET_HIGH_REG(d));
1202 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1203 M_SRA(GET_HIGH_REG(d));
1204 emit_store_dst(jd, iptr, d);
1207 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1208 /* sx.val.i = constant */
1210 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1211 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1213 if (iptr->sx.val.i & 0x20) {
1214 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1215 M_SRA_IMM(31, GET_HIGH_REG(d));
1216 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1220 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1222 M_SRA_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1224 emit_store_dst(jd, iptr, d);
1227 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1229 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1230 s2 = emit_load_s2(jd, iptr, ECX);
1231 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1234 M_TEST_IMM(32, ECX);
1236 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1237 M_CLR(GET_HIGH_REG(d));
1238 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1239 M_SRL(GET_HIGH_REG(d));
1240 emit_store_dst(jd, iptr, d);
1243 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1244 /* sx.val.l = constant */
1246 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1247 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1249 if (iptr->sx.val.i & 0x20) {
1250 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1251 M_CLR(GET_HIGH_REG(d));
1252 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1256 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1258 M_SRL_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1260 emit_store_dst(jd, iptr, d);
1263 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1265 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1266 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1267 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1274 emit_store_dst(jd, iptr, d);
1277 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1278 /* sx.val.i = constant */
1280 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1281 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1283 M_AND_IMM(iptr->sx.val.i, d);
1284 emit_store_dst(jd, iptr, d);
1287 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1289 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1290 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1291 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1292 if (s2 == GET_LOW_REG(d))
1293 M_AND(s1, GET_LOW_REG(d));
1295 M_INTMOVE(s1, GET_LOW_REG(d));
1296 M_AND(s2, GET_LOW_REG(d));
1298 /* REG_ITMP1 probably contains low 32-bit of destination */
1299 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1300 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1301 if (s2 == GET_HIGH_REG(d))
1302 M_AND(s1, GET_HIGH_REG(d));
1304 M_INTMOVE(s1, GET_HIGH_REG(d));
1305 M_AND(s2, GET_HIGH_REG(d));
1307 emit_store_dst(jd, iptr, d);
1310 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1311 /* sx.val.l = constant */
1313 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1314 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1316 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1317 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1318 emit_store_dst(jd, iptr, d);
1321 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1323 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1324 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1325 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1332 emit_store_dst(jd, iptr, d);
1335 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1336 /* sx.val.i = constant */
1338 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1339 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1341 M_OR_IMM(iptr->sx.val.i, d);
1342 emit_store_dst(jd, iptr, d);
1345 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1347 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1348 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1349 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1350 if (s2 == GET_LOW_REG(d))
1351 M_OR(s1, GET_LOW_REG(d));
1353 M_INTMOVE(s1, GET_LOW_REG(d));
1354 M_OR(s2, GET_LOW_REG(d));
1356 /* REG_ITMP1 probably contains low 32-bit of destination */
1357 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1358 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1359 if (s2 == GET_HIGH_REG(d))
1360 M_OR(s1, GET_HIGH_REG(d));
1362 M_INTMOVE(s1, GET_HIGH_REG(d));
1363 M_OR(s2, GET_HIGH_REG(d));
1365 emit_store_dst(jd, iptr, d);
1368 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1369 /* sx.val.l = constant */
1371 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1372 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1374 M_OR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1375 M_OR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1376 emit_store_dst(jd, iptr, d);
1379 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1381 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1382 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1383 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1390 emit_store_dst(jd, iptr, d);
1393 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1394 /* sx.val.i = constant */
1396 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1397 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1399 M_XOR_IMM(iptr->sx.val.i, d);
1400 emit_store_dst(jd, iptr, d);
1403 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1405 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1406 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1407 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1408 if (s2 == GET_LOW_REG(d))
1409 M_XOR(s1, GET_LOW_REG(d));
1411 M_INTMOVE(s1, GET_LOW_REG(d));
1412 M_XOR(s2, GET_LOW_REG(d));
1414 /* REG_ITMP1 probably contains low 32-bit of destination */
1415 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1416 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1417 if (s2 == GET_HIGH_REG(d))
1418 M_XOR(s1, GET_HIGH_REG(d));
1420 M_INTMOVE(s1, GET_HIGH_REG(d));
1421 M_XOR(s2, GET_HIGH_REG(d));
1423 emit_store_dst(jd, iptr, d);
1426 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1427 /* sx.val.l = constant */
1429 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1430 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1432 M_XOR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1433 M_XOR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1434 emit_store_dst(jd, iptr, d);
1438 /* floating operations ************************************************/
1440 case ICMD_FNEG: /* ..., value ==> ..., - value */
1442 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1443 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1445 emit_store_dst(jd, iptr, d);
1448 case ICMD_DNEG: /* ..., value ==> ..., - value */
1450 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1451 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1453 emit_store_dst(jd, iptr, d);
1456 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1458 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1459 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1460 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1462 emit_store_dst(jd, iptr, d);
1465 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1467 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1468 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1469 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1471 emit_store_dst(jd, iptr, d);
1474 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1476 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1477 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1478 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1480 emit_store_dst(jd, iptr, d);
1483 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1485 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1486 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1487 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1489 emit_store_dst(jd, iptr, d);
1492 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1494 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1495 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1496 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1498 emit_store_dst(jd, iptr, d);
1501 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1503 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1504 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1505 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1507 emit_store_dst(jd, iptr, d);
1510 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1512 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1513 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1514 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1516 emit_store_dst(jd, iptr, d);
1519 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1521 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1522 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1523 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1525 emit_store_dst(jd, iptr, d);
1528 case ICMD_FREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1530 /* exchanged to skip fxch */
1531 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1532 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1533 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1534 /* emit_fxch(cd); */
1539 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1540 emit_store_dst(jd, iptr, d);
1541 emit_ffree_reg(cd, 0);
1545 case ICMD_DREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1547 /* exchanged to skip fxch */
1548 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1549 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1550 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1551 /* emit_fxch(cd); */
1556 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1557 emit_store_dst(jd, iptr, d);
1558 emit_ffree_reg(cd, 0);
1562 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1563 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1565 var = VAROP(iptr->s1);
1566 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1568 if (var->flags & INMEMORY) {
1569 emit_fildl_membase(cd, REG_SP, var->vv.regoff);
1571 /* XXX not thread safe! */
1572 disp = dseg_add_unique_s4(cd, 0);
1573 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1575 emit_mov_reg_membase(cd, var->vv.regoff, REG_ITMP1, disp);
1576 emit_fildl_membase(cd, REG_ITMP1, disp);
1579 emit_store_dst(jd, iptr, d);
1582 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1583 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1585 var = VAROP(iptr->s1);
1586 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1587 if (var->flags & INMEMORY) {
1588 emit_fildll_membase(cd, REG_SP, var->vv.regoff);
1591 log_text("L2F: longs have to be in memory");
1594 emit_store_dst(jd, iptr, d);
1597 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1599 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1600 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1602 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1605 /* Round to zero, 53-bit mode, exception masked */
1606 disp = dseg_add_s4(cd, 0x0e7f);
1607 emit_fldcw_membase(cd, REG_ITMP1, disp);
1609 var = VAROP(iptr->dst);
1610 var1 = VAROP(iptr->s1);
1612 if (var->flags & INMEMORY) {
1613 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1615 /* Round to nearest, 53-bit mode, exceptions masked */
1616 disp = dseg_add_s4(cd, 0x027f);
1617 emit_fldcw_membase(cd, REG_ITMP1, disp);
1619 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1620 REG_SP, var->vv.regoff);
1623 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1625 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1628 /* XXX not thread safe! */
1629 disp = dseg_add_unique_s4(cd, 0);
1630 emit_fistpl_membase(cd, REG_ITMP1, disp);
1631 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1633 /* Round to nearest, 53-bit mode, exceptions masked */
1634 disp = dseg_add_s4(cd, 0x027f);
1635 emit_fldcw_membase(cd, REG_ITMP1, disp);
1637 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1640 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1641 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1644 emit_jcc(cd, CC_NE, disp);
1646 /* XXX: change this when we use registers */
1647 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1648 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2i, REG_ITMP1);
1649 emit_call_reg(cd, REG_ITMP1);
1651 if (var->flags & INMEMORY) {
1652 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1655 M_INTMOVE(REG_RESULT, var->vv.regoff);
1659 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1661 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1662 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1664 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1667 /* Round to zero, 53-bit mode, exception masked */
1668 disp = dseg_add_s4(cd, 0x0e7f);
1669 emit_fldcw_membase(cd, REG_ITMP1, disp);
1671 var = VAROP(iptr->dst);
1672 var1 = VAROP(iptr->s1);
1674 if (var->flags & INMEMORY) {
1675 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1677 /* Round to nearest, 53-bit mode, exceptions masked */
1678 disp = dseg_add_s4(cd, 0x027f);
1679 emit_fldcw_membase(cd, REG_ITMP1, disp);
1681 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1682 REG_SP, var->vv.regoff);
1685 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1687 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1690 /* XXX not thread safe! */
1691 disp = dseg_add_unique_s4(cd, 0);
1692 emit_fistpl_membase(cd, REG_ITMP1, disp);
1693 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1695 /* Round to nearest, 53-bit mode, exceptions masked */
1696 disp = dseg_add_s4(cd, 0x027f);
1697 emit_fldcw_membase(cd, REG_ITMP1, disp);
1699 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1702 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1703 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1706 emit_jcc(cd, CC_NE, disp);
1708 /* XXX: change this when we use registers */
1709 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1710 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2i, REG_ITMP1);
1711 emit_call_reg(cd, REG_ITMP1);
1713 if (var->flags & INMEMORY) {
1714 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1716 M_INTMOVE(REG_RESULT, var->vv.regoff);
1720 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1722 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1723 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1725 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1728 /* Round to zero, 53-bit mode, exception masked */
1729 disp = dseg_add_s4(cd, 0x0e7f);
1730 emit_fldcw_membase(cd, REG_ITMP1, disp);
1732 var = VAROP(iptr->dst);
1733 var1 = VAROP(iptr->s1);
1735 if (var->flags & INMEMORY) {
1736 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1738 /* Round to nearest, 53-bit mode, exceptions masked */
1739 disp = dseg_add_s4(cd, 0x027f);
1740 emit_fldcw_membase(cd, REG_ITMP1, disp);
1742 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1743 REG_SP, var->vv.regoff + 4);
1746 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1748 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1751 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1753 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1755 emit_jcc(cd, CC_NE, disp);
1757 emit_alu_imm_membase(cd, ALU_CMP, 0,
1758 REG_SP, var->vv.regoff);
1761 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1763 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1765 emit_jcc(cd, CC_NE, disp);
1767 /* XXX: change this when we use registers */
1768 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1769 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2l, REG_ITMP1);
1770 emit_call_reg(cd, REG_ITMP1);
1771 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1772 emit_mov_reg_membase(cd, REG_RESULT2,
1773 REG_SP, var->vv.regoff + 4);
1776 log_text("F2L: longs have to be in memory");
1781 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1783 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1784 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1786 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1789 /* Round to zero, 53-bit mode, exception masked */
1790 disp = dseg_add_s4(cd, 0x0e7f);
1791 emit_fldcw_membase(cd, REG_ITMP1, disp);
1793 var = VAROP(iptr->dst);
1794 var1 = VAROP(iptr->s1);
1796 if (var->flags & INMEMORY) {
1797 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1799 /* Round to nearest, 53-bit mode, exceptions masked */
1800 disp = dseg_add_s4(cd, 0x027f);
1801 emit_fldcw_membase(cd, REG_ITMP1, disp);
1803 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1804 REG_SP, var->vv.regoff + 4);
1807 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1809 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1812 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1814 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1816 emit_jcc(cd, CC_NE, disp);
1818 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, var->vv.regoff);
1821 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1823 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1825 emit_jcc(cd, CC_NE, disp);
1827 /* XXX: change this when we use registers */
1828 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1829 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2l, REG_ITMP1);
1830 emit_call_reg(cd, REG_ITMP1);
1831 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1832 emit_mov_reg_membase(cd, REG_RESULT2,
1833 REG_SP, var->vv.regoff + 4);
1836 log_text("D2L: longs have to be in memory");
1841 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1843 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1844 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1846 emit_store_dst(jd, iptr, d);
1849 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1851 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1852 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1854 emit_store_dst(jd, iptr, d);
1857 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1860 /* exchanged to skip fxch */
1861 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1862 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1863 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1864 /* emit_fxch(cd); */
1867 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as GT */
1868 emit_jcc(cd, CC_E, 6);
1869 emit_alu_imm_reg(cd, ALU_AND, 0x000000ff, EAX);
1871 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1872 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1873 emit_jcc(cd, CC_B, 3 + 5);
1874 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1875 emit_jmp_imm(cd, 3);
1876 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1877 emit_store_dst(jd, iptr, d);
1880 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1883 /* exchanged to skip fxch */
1884 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1885 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1886 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1887 /* emit_fxch(cd); */
1890 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as LT */
1891 emit_jcc(cd, CC_E, 3);
1892 emit_movb_imm_reg(cd, 1, REG_AH);
1894 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1895 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1896 emit_jcc(cd, CC_B, 3 + 5);
1897 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1898 emit_jmp_imm(cd, 3);
1899 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1900 emit_store_dst(jd, iptr, d);
1904 /* memory operations **************************************************/
1906 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., length */
1908 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1909 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1910 /* implicit null-pointer check */
1911 M_ILD(d, s1, OFFSET(java_array_t, size));
1912 emit_store_dst(jd, iptr, d);
1915 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
1917 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1918 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1919 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1920 /* implicit null-pointer check */
1921 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1922 emit_movsbl_memindex_reg(cd, OFFSET(java_bytearray_t, data[0]),
1924 emit_store_dst(jd, iptr, d);
1927 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
1929 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1930 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1931 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1932 /* implicit null-pointer check */
1933 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1934 emit_movzwl_memindex_reg(cd, OFFSET(java_chararray_t, data[0]),
1936 emit_store_dst(jd, iptr, d);
1939 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
1941 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1942 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1943 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1944 /* implicit null-pointer check */
1945 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1946 emit_movswl_memindex_reg(cd, OFFSET(java_shortarray_t, data[0]),
1948 emit_store_dst(jd, iptr, d);
1951 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
1953 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1954 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1955 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1956 /* implicit null-pointer check */
1957 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1958 emit_mov_memindex_reg(cd, OFFSET(java_intarray_t, data[0]),
1960 emit_store_dst(jd, iptr, d);
1963 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
1965 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1966 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1967 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
1968 /* implicit null-pointer check */
1969 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1971 var = VAROP(iptr->dst);
1973 assert(var->flags & INMEMORY);
1974 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]),
1975 s1, s2, 3, REG_ITMP3);
1976 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff);
1977 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]) + 4,
1978 s1, s2, 3, REG_ITMP3);
1979 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff + 4);
1982 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
1984 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1985 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1986 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1987 /* implicit null-pointer check */
1988 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1989 emit_flds_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2, 2);
1990 emit_store_dst(jd, iptr, d);
1993 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
1995 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1996 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1997 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1998 /* implicit null-pointer check */
1999 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2000 emit_fldl_memindex(cd, OFFSET(java_doublearray_t, data[0]), s1, s2,3);
2001 emit_store_dst(jd, iptr, d);
2004 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2006 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2007 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2008 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2009 /* implicit null-pointer check */
2010 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2011 emit_mov_memindex_reg(cd, OFFSET(java_objectarray_t, data[0]),
2013 emit_store_dst(jd, iptr, d);
2017 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2019 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2020 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2021 /* implicit null-pointer check */
2022 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2023 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2025 /* because EBP, ESI, EDI have no xH and xL nibbles */
2026 M_INTMOVE(s3, REG_ITMP3);
2029 emit_movb_reg_memindex(cd, s3, OFFSET(java_bytearray_t, data[0]),
2033 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2035 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2036 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2037 /* implicit null-pointer check */
2038 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2039 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2040 emit_movw_reg_memindex(cd, s3, OFFSET(java_chararray_t, data[0]),
2044 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2046 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2047 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2048 /* implicit null-pointer check */
2049 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2050 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2051 emit_movw_reg_memindex(cd, s3, OFFSET(java_shortarray_t, data[0]),
2055 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2057 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2058 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2059 /* implicit null-pointer check */
2060 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2061 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2062 emit_mov_reg_memindex(cd, s3, OFFSET(java_intarray_t, data[0]),
2066 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2068 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2069 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2070 /* implicit null-pointer check */
2071 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2073 var = VAROP(iptr->sx.s23.s3);
2075 assert(var->flags & INMEMORY);
2076 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff, REG_ITMP3);
2077 emit_mov_reg_memindex(cd, REG_ITMP3, OFFSET(java_longarray_t, data[0])
2079 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff + 4, REG_ITMP3);
2080 emit_mov_reg_memindex(cd, REG_ITMP3,
2081 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2084 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2086 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2087 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2088 /* implicit null-pointer check */
2089 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2090 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2091 emit_fstps_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2,2);
2094 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2096 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2097 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2098 /* implicit null-pointer check */
2099 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2100 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2101 emit_fstpl_memindex(cd, OFFSET(java_doublearray_t, data[0]),
2105 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2107 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2108 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2109 /* implicit null-pointer check */
2110 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2111 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2113 M_AST(s1, REG_SP, 0 * 4);
2114 M_AST(s3, REG_SP, 1 * 4);
2115 M_MOV_IMM(BUILTIN_FAST_canstore, REG_ITMP1);
2117 emit_arraystore_check(cd, iptr);
2119 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2120 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2121 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2122 emit_mov_reg_memindex(cd, s3, OFFSET(java_objectarray_t, data[0]),
2126 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2128 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2129 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2130 /* implicit null-pointer check */
2131 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2132 emit_movb_imm_memindex(cd, iptr->sx.s23.s3.constval,
2133 OFFSET(java_bytearray_t, data[0]), s1, s2, 0);
2136 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2138 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2139 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2140 /* implicit null-pointer check */
2141 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2142 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2143 OFFSET(java_chararray_t, data[0]), s1, s2, 1);
2146 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2148 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2149 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2150 /* implicit null-pointer check */
2151 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2152 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2153 OFFSET(java_shortarray_t, data[0]), s1, s2, 1);
2156 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2158 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2159 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2160 /* implicit null-pointer check */
2161 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2162 emit_mov_imm_memindex(cd, iptr->sx.s23.s3.constval,
2163 OFFSET(java_intarray_t, data[0]), s1, s2, 2);
2166 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2168 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2169 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2170 /* implicit null-pointer check */
2171 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2172 emit_mov_imm_memindex(cd,
2173 (u4) (iptr->sx.s23.s3.constval & 0x00000000ffffffff),
2174 OFFSET(java_longarray_t, data[0]), s1, s2, 3);
2175 emit_mov_imm_memindex(cd,
2176 ((s4)iptr->sx.s23.s3.constval) >> 31,
2177 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2180 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2182 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2183 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2184 /* implicit null-pointer check */
2185 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2186 emit_mov_imm_memindex(cd, 0,
2187 OFFSET(java_objectarray_t, data[0]), s1, s2, 2);
2191 case ICMD_GETSTATIC: /* ... ==> ..., value */
2193 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2194 uf = iptr->sx.s23.s3.uf;
2195 fieldtype = uf->fieldref->parseddesc.fd->type;
2198 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2202 fi = iptr->sx.s23.s3.fmiref->p.field;
2203 fieldtype = fi->type;
2204 disp = (intptr_t) fi->value;
2206 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2207 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->class, 0);
2210 M_MOV_IMM(disp, REG_ITMP1);
2211 switch (fieldtype) {
2214 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2215 M_ILD(d, REG_ITMP1, 0);
2218 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2219 M_LLD(d, REG_ITMP1, 0);
2222 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2223 M_FLD(d, REG_ITMP1, 0);
2226 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2227 M_DLD(d, REG_ITMP1, 0);
2230 emit_store_dst(jd, iptr, d);
2233 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2235 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2236 uf = iptr->sx.s23.s3.uf;
2237 fieldtype = uf->fieldref->parseddesc.fd->type;
2240 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2243 fi = iptr->sx.s23.s3.fmiref->p.field;
2244 fieldtype = fi->type;
2245 disp = (intptr_t) fi->value;
2247 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2248 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->class, 0);
2251 M_MOV_IMM(disp, REG_ITMP1);
2252 switch (fieldtype) {
2255 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
2256 M_IST(s1, REG_ITMP1, 0);
2259 s1 = emit_load_s1(jd, iptr, REG_ITMP23_PACKED);
2260 M_LST(s1, REG_ITMP1, 0);
2263 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2264 emit_fstps_membase(cd, REG_ITMP1, 0);
2267 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2268 emit_fstpl_membase(cd, REG_ITMP1, 0);
2273 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2274 /* val = value (in current instruction) */
2275 /* following NOP) */
2277 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2278 uf = iptr->sx.s23.s3.uf;
2279 fieldtype = uf->fieldref->parseddesc.fd->type;
2282 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2285 fi = iptr->sx.s23.s3.fmiref->p.field;
2286 fieldtype = fi->type;
2287 disp = (intptr_t) fi->value;
2289 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2290 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->class, 0);
2293 M_MOV_IMM(disp, REG_ITMP1);
2294 switch (fieldtype) {
2297 M_IST_IMM(iptr->sx.s23.s2.constval, REG_ITMP1, 0);
2300 M_IST_IMM(iptr->sx.s23.s2.constval & 0xffffffff, REG_ITMP1, 0);
2301 M_IST_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, REG_ITMP1, 4);
2308 case ICMD_GETFIELD: /* .., objectref. ==> ..., value */
2310 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2311 emit_nullpointer_check(cd, iptr, s1);
2313 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2314 uf = iptr->sx.s23.s3.uf;
2315 fieldtype = uf->fieldref->parseddesc.fd->type;
2318 patcher_add_patch_ref(jd, PATCHER_getfield,
2319 iptr->sx.s23.s3.uf, 0);
2322 fi = iptr->sx.s23.s3.fmiref->p.field;
2323 fieldtype = fi->type;
2327 switch (fieldtype) {
2330 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2331 M_ILD32(d, s1, disp);
2334 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2335 M_LLD32(d, s1, disp);
2338 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2339 M_FLD32(d, s1, disp);
2342 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2343 M_DLD32(d, s1, disp);
2346 emit_store_dst(jd, iptr, d);
2349 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2351 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2352 emit_nullpointer_check(cd, iptr, s1);
2354 /* must be done here because of code patching */
2356 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2357 uf = iptr->sx.s23.s3.uf;
2358 fieldtype = uf->fieldref->parseddesc.fd->type;
2361 fi = iptr->sx.s23.s3.fmiref->p.field;
2362 fieldtype = fi->type;
2365 if (!IS_FLT_DBL_TYPE(fieldtype)) {
2366 if (IS_2_WORD_TYPE(fieldtype))
2367 s2 = emit_load_s2(jd, iptr, REG_ITMP23_PACKED);
2369 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2372 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
2374 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2376 uf = iptr->sx.s23.s3.uf;
2379 patcher_add_patch_ref(jd, PATCHER_putfield, uf, 0);
2383 fi = iptr->sx.s23.s3.fmiref->p.field;
2387 switch (fieldtype) {
2390 M_IST32(s2, s1, disp);
2393 M_LST32(s2, s1, disp);
2396 emit_fstps_membase32(cd, s1, disp);
2399 emit_fstpl_membase32(cd, s1, disp);
2404 case ICMD_PUTFIELDCONST: /* ..., objectref ==> ... */
2405 /* val = value (in current instruction) */
2406 /* following NOP) */
2408 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2409 emit_nullpointer_check(cd, iptr, s1);
2411 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2412 uf = iptr->sx.s23.s3.uf;
2413 fieldtype = uf->fieldref->parseddesc.fd->type;
2416 patcher_add_patch_ref(jd, PATCHER_putfieldconst,
2420 fi = iptr->sx.s23.s3.fmiref->p.field;
2421 fieldtype = fi->type;
2425 switch (fieldtype) {
2428 M_IST32_IMM(iptr->sx.s23.s2.constval, s1, disp);
2431 M_IST32_IMM(iptr->sx.s23.s2.constval & 0xffffffff, s1, disp);
2432 M_IST32_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, s1, disp + 4);
2440 /* branch operations **************************************************/
2442 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2444 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2445 M_INTMOVE(s1, REG_ITMP1_XPTR);
2447 #ifdef ENABLE_VERIFIER
2448 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2449 patcher_add_patch_ref(jd, PATCHER_resolve_class,
2450 iptr->sx.s23.s2.uc, 0);
2452 #endif /* ENABLE_VERIFIER */
2454 M_CALL_IMM(0); /* passing exception pc */
2455 M_POP(REG_ITMP2_XPC);
2457 M_MOV_IMM(asm_handle_exception, REG_ITMP3);
2461 case ICMD_GOTO: /* ... ==> ... */
2462 case ICMD_RET: /* ... ==> ... */
2464 #if defined(ENABLE_SSA)
2466 last_cmd_was_goto = true;
2468 /* In case of a Goto phimoves have to be inserted before the */
2471 codegen_emit_phi_moves(jd, bptr);
2474 emit_br(cd, iptr->dst.block);
2478 case ICMD_JSR: /* ... ==> ... */
2480 emit_br(cd, iptr->sx.s23.s3.jsrtarget.block);
2484 case ICMD_IFNULL: /* ..., value ==> ... */
2485 case ICMD_IFNONNULL:
2487 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2489 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFNULL, BRANCH_OPT_NONE);
2492 case ICMD_IFEQ: /* ..., value ==> ... */
2499 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2500 M_CMP_IMM(iptr->sx.val.i, s1);
2501 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFEQ, BRANCH_OPT_NONE);
2504 case ICMD_IF_LEQ: /* ..., value ==> ... */
2506 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2507 if (iptr->sx.val.l == 0) {
2508 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2509 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2512 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2513 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2514 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2515 M_OR(REG_ITMP2, REG_ITMP1);
2517 emit_beq(cd, iptr->dst.block);
2520 case ICMD_IF_LLT: /* ..., value ==> ... */
2522 if (iptr->sx.val.l == 0) {
2523 /* If high 32-bit are less than zero, then the 64-bits
2525 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2527 emit_blt(cd, iptr->dst.block);
2530 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2531 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2532 emit_blt(cd, iptr->dst.block);
2534 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2535 emit_bult(cd, iptr->dst.block);
2539 case ICMD_IF_LLE: /* ..., value ==> ... */
2541 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2542 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2543 emit_blt(cd, iptr->dst.block);
2545 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2546 emit_bule(cd, iptr->dst.block);
2549 case ICMD_IF_LNE: /* ..., value ==> ... */
2551 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2552 if (iptr->sx.val.l == 0) {
2553 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2554 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2557 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2558 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2559 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2560 M_OR(REG_ITMP2, REG_ITMP1);
2562 emit_bne(cd, iptr->dst.block);
2565 case ICMD_IF_LGT: /* ..., value ==> ... */
2567 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2568 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2569 emit_bgt(cd, iptr->dst.block);
2571 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2572 emit_bugt(cd, iptr->dst.block);
2575 case ICMD_IF_LGE: /* ..., value ==> ... */
2577 if (iptr->sx.val.l == 0) {
2578 /* If high 32-bit are greater equal zero, then the
2580 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2582 emit_bge(cd, iptr->dst.block);
2585 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2586 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2587 emit_bgt(cd, iptr->dst.block);
2589 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2590 emit_buge(cd, iptr->dst.block);
2594 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2595 case ICMD_IF_ICMPNE:
2596 case ICMD_IF_ICMPLT:
2597 case ICMD_IF_ICMPGT:
2598 case ICMD_IF_ICMPGE:
2599 case ICMD_IF_ICMPLE:
2601 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2602 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2604 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ICMPEQ, BRANCH_OPT_NONE);
2607 case ICMD_IF_ACMPEQ: /* ..., value, value ==> ... */
2608 case ICMD_IF_ACMPNE:
2610 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2611 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2613 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ACMPEQ, BRANCH_OPT_NONE);
2616 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2618 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2619 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2620 M_INTMOVE(s1, REG_ITMP1);
2621 M_XOR(s2, REG_ITMP1);
2622 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2623 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2624 M_INTMOVE(s1, REG_ITMP2);
2625 M_XOR(s2, REG_ITMP2);
2626 M_OR(REG_ITMP1, REG_ITMP2);
2627 emit_beq(cd, iptr->dst.block);
2630 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2632 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2633 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2634 M_INTMOVE(s1, REG_ITMP1);
2635 M_XOR(s2, REG_ITMP1);
2636 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2637 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2638 M_INTMOVE(s1, REG_ITMP2);
2639 M_XOR(s2, REG_ITMP2);
2640 M_OR(REG_ITMP1, REG_ITMP2);
2641 emit_bne(cd, iptr->dst.block);
2644 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2646 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2647 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2649 emit_blt(cd, iptr->dst.block);
2650 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2651 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2654 emit_bult(cd, iptr->dst.block);
2657 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2659 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2660 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2662 emit_bgt(cd, iptr->dst.block);
2663 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2664 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2667 emit_bugt(cd, iptr->dst.block);
2670 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2672 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2673 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2675 emit_blt(cd, iptr->dst.block);
2676 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2677 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2680 emit_bule(cd, iptr->dst.block);
2683 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2685 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2686 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2688 emit_bgt(cd, iptr->dst.block);
2689 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2690 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2693 emit_buge(cd, iptr->dst.block);
2697 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2699 REPLACEMENT_POINT_RETURN(cd, iptr);
2700 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2701 M_INTMOVE(s1, REG_RESULT);
2702 goto nowperformreturn;
2704 case ICMD_LRETURN: /* ..., retvalue ==> ... */
2706 REPLACEMENT_POINT_RETURN(cd, iptr);
2707 s1 = emit_load_s1(jd, iptr, REG_RESULT_PACKED);
2708 M_LNGMOVE(s1, REG_RESULT_PACKED);
2709 goto nowperformreturn;
2711 case ICMD_ARETURN: /* ..., retvalue ==> ... */
2713 REPLACEMENT_POINT_RETURN(cd, iptr);
2714 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2715 M_INTMOVE(s1, REG_RESULT);
2717 #ifdef ENABLE_VERIFIER
2718 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2719 patcher_add_patch_ref(jd, PATCHER_resolve_class,
2720 iptr->sx.s23.s2.uc, 0);
2722 #endif /* ENABLE_VERIFIER */
2723 goto nowperformreturn;
2725 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2728 REPLACEMENT_POINT_RETURN(cd, iptr);
2729 s1 = emit_load_s1(jd, iptr, REG_FRESULT);
2730 goto nowperformreturn;
2732 case ICMD_RETURN: /* ... ==> ... */
2734 REPLACEMENT_POINT_RETURN(cd, iptr);
2740 p = cd->stackframesize;
2742 #if !defined(NDEBUG)
2743 emit_verbosecall_exit(jd);
2746 #if defined(ENABLE_THREADS)
2747 if (checksync && code_is_synchronized(code)) {
2748 M_ALD(REG_ITMP2, REG_SP, rd->memuse * 8);
2750 /* we need to save the proper return value */
2751 switch (iptr->opc) {
2754 M_IST(REG_RESULT, REG_SP, rd->memuse * 8);
2758 M_LST(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2762 emit_fstps_membase(cd, REG_SP, rd->memuse * 8);
2766 emit_fstpl_membase(cd, REG_SP, rd->memuse * 8);
2770 M_AST(REG_ITMP2, REG_SP, 0);
2771 M_MOV_IMM(LOCK_monitor_exit, REG_ITMP3);
2774 /* and now restore the proper return value */
2775 switch (iptr->opc) {
2778 M_ILD(REG_RESULT, REG_SP, rd->memuse * 8);
2782 M_LLD(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2786 emit_flds_membase(cd, REG_SP, rd->memuse * 8);
2790 emit_fldl_membase(cd, REG_SP, rd->memuse * 8);
2796 /* restore saved registers */
2798 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
2799 p--; M_ALD(rd->savintregs[i], REG_SP, p * 8);
2802 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
2804 emit_fldl_membase(cd, REG_SP, p * 8);
2805 if (iptr->opc == ICMD_FRETURN || iptr->opc == ICMD_DRETURN) {
2807 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset + 1); */
2810 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset); */
2814 /* deallocate stack */
2816 if (cd->stackframesize)
2817 M_AADD_IMM(cd->stackframesize * 8, REG_SP);
2824 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2827 branch_target_t *table;
2829 table = iptr->dst.table;
2831 l = iptr->sx.s23.s2.tablelow;
2832 i = iptr->sx.s23.s3.tablehigh;
2834 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2835 M_INTMOVE(s1, REG_ITMP1);
2838 M_ISUB_IMM(l, REG_ITMP1);
2844 M_CMP_IMM(i - 1, REG_ITMP1);
2845 emit_bugt(cd, table[0].block);
2847 /* build jump table top down and use address of lowest entry */
2852 dseg_add_target(cd, table->block);
2856 /* length of dataseg after last dseg_addtarget is used
2859 M_MOV_IMM(0, REG_ITMP2);
2861 emit_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 2, REG_ITMP1);
2867 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2870 lookup_target_t *lookup;
2872 lookup = iptr->dst.lookup;
2874 i = iptr->sx.s23.s2.lookupcount;
2876 MCODECHECK((i<<2)+8);
2877 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2880 M_CMP_IMM(lookup->value, s1);
2881 emit_beq(cd, lookup->target.block);
2885 emit_br(cd, iptr->sx.s23.s3.lookupdefault.block);
2890 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
2892 REPLACEMENT_POINT_FORGC_BUILTIN(cd, iptr);
2894 bte = iptr->sx.s23.s3.bte;
2898 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2900 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2901 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
2902 case ICMD_INVOKEINTERFACE:
2904 REPLACEMENT_POINT_INVOKE(cd, iptr);
2906 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2907 md = iptr->sx.s23.s3.um->methodref->parseddesc.md;
2911 lm = iptr->sx.s23.s3.fmiref->p.method;
2912 md = lm->parseddesc;
2916 s3 = md->paramcount;
2918 MCODECHECK((s3 << 1) + 64);
2920 /* copy arguments to registers or stack location */
2922 for (s3 = s3 - 1; s3 >= 0; s3--) {
2923 var = VAR(iptr->sx.s23.s2.args[s3]);
2925 /* Already Preallocated (ARGVAR) ? */
2926 if (var->flags & PREALLOC)
2928 if (IS_INT_LNG_TYPE(var->type)) {
2929 if (!md->params[s3].inmemory) {
2930 log_text("No integer argument registers available!");
2934 if (IS_2_WORD_TYPE(var->type)) {
2935 d = emit_load(jd, iptr, var, REG_ITMP12_PACKED);
2936 M_LST(d, REG_SP, md->params[s3].regoff);
2938 d = emit_load(jd, iptr, var, REG_ITMP1);
2939 M_IST(d, REG_SP, md->params[s3].regoff);
2944 if (!md->params[s3].inmemory) {
2945 s1 = md->params[s3].regoff;
2946 d = emit_load(jd, iptr, var, s1);
2950 d = emit_load(jd, iptr, var, REG_FTMP1);
2951 if (IS_2_WORD_TYPE(var->type))
2952 M_DST(d, REG_SP, md->params[s3].regoff);
2954 M_FST(d, REG_SP, md->params[s3].regoff);
2959 switch (iptr->opc) {
2961 d = md->returntype.type;
2963 if (bte->stub == NULL) {
2964 M_MOV_IMM(bte->fp, REG_ITMP1);
2967 M_MOV_IMM(bte->stub, REG_ITMP1);
2972 case ICMD_INVOKESPECIAL:
2973 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
2974 emit_nullpointer_check(cd, iptr, REG_ITMP1);
2977 case ICMD_INVOKESTATIC:
2979 unresolved_method *um = iptr->sx.s23.s3.um;
2981 patcher_add_patch_ref(jd, PATCHER_invokestatic_special,
2985 d = md->returntype.type;
2988 disp = (ptrint) lm->stubroutine;
2989 d = lm->parseddesc->returntype.type;
2992 M_MOV_IMM(disp, REG_ITMP2);
2996 case ICMD_INVOKEVIRTUAL:
2997 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
2998 emit_nullpointer_check(cd, iptr, s1);
3001 unresolved_method *um = iptr->sx.s23.s3.um;
3003 patcher_add_patch_ref(jd, PATCHER_invokevirtual, um, 0);
3006 d = md->returntype.type;
3009 s1 = OFFSET(vftbl_t, table[0]) +
3010 sizeof(methodptr) * lm->vftblindex;
3011 d = md->returntype.type;
3014 M_ALD(REG_METHODPTR, REG_ITMP1,
3015 OFFSET(java_object_t, vftbl));
3016 M_ALD32(REG_ITMP3, REG_METHODPTR, s1);
3020 case ICMD_INVOKEINTERFACE:
3021 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
3022 emit_nullpointer_check(cd, iptr, s1);
3025 unresolved_method *um = iptr->sx.s23.s3.um;
3027 patcher_add_patch_ref(jd, PATCHER_invokeinterface, um, 0);
3031 d = md->returntype.type;
3034 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3035 sizeof(methodptr) * lm->class->index;
3037 s2 = sizeof(methodptr) * (lm - lm->class->methods);
3039 d = md->returntype.type;
3042 M_ALD(REG_METHODPTR, REG_ITMP1,
3043 OFFSET(java_object_t, vftbl));
3044 M_ALD32(REG_METHODPTR, REG_METHODPTR, s1);
3045 M_ALD32(REG_ITMP3, REG_METHODPTR, s2);
3050 /* store size of call code in replacement point */
3052 REPLACEMENT_POINT_INVOKE_RETURN(cd, iptr);
3053 REPLACEMENT_POINT_FORGC_BUILTIN_RETURN(cd, iptr);
3055 /* d contains return type */
3057 if (d != TYPE_VOID) {
3058 #if defined(ENABLE_SSA)
3059 if ((ls == NULL) /* || (!IS_TEMPVAR_INDEX(iptr->dst.varindex)) */ ||
3060 (ls->lifetime[iptr->dst.varindex].type != UNUSED))
3061 /* a "living" stackslot */
3064 if (IS_INT_LNG_TYPE(d)) {
3065 if (IS_2_WORD_TYPE(d)) {
3066 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
3067 M_LNGMOVE(REG_RESULT_PACKED, s1);
3070 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3071 M_INTMOVE(REG_RESULT, s1);
3075 s1 = codegen_reg_of_dst(jd, iptr, REG_NULL);
3077 emit_store_dst(jd, iptr, s1);
3083 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3085 if (!(iptr->flags.bits & INS_FLAG_ARRAY)) {
3086 /* object type cast-check */
3089 vftbl_t *supervftbl;
3092 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3098 super = iptr->sx.s23.s3.c.cls;
3099 superindex = super->index;
3100 supervftbl = super->vftbl;
3103 if ((super == NULL) || !(super->flags & ACC_INTERFACE))
3104 CODEGEN_CRITICAL_SECTION_NEW;
3106 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3108 /* if class is not resolved, check which code to call */
3110 if (super == NULL) {
3112 emit_label_beq(cd, BRANCH_LABEL_1);
3114 patcher_add_patch_ref(jd, PATCHER_checkcast_instanceof_flags,
3115 iptr->sx.s23.s3.c.ref, 0);
3117 M_MOV_IMM(0, REG_ITMP2); /* super->flags */
3118 M_AND_IMM32(ACC_INTERFACE, REG_ITMP2);
3119 emit_label_beq(cd, BRANCH_LABEL_2);
3122 /* interface checkcast code */
3124 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3125 if (super != NULL) {
3127 emit_label_beq(cd, BRANCH_LABEL_3);
3130 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3132 if (super == NULL) {
3133 patcher_add_patch_ref(jd, PATCHER_checkcast_interface,
3134 iptr->sx.s23.s3.c.ref,
3139 REG_ITMP2, OFFSET(vftbl_t, interfacetablelength));
3140 M_ISUB_IMM32(superindex, REG_ITMP3);
3141 /* XXX do we need this one? */
3143 emit_classcast_check(cd, iptr, BRANCH_LE, REG_ITMP3, s1);
3145 M_ALD32(REG_ITMP3, REG_ITMP2,
3146 OFFSET(vftbl_t, interfacetable[0]) -
3147 superindex * sizeof(methodptr*));
3149 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_ITMP3, s1);
3152 emit_label_br(cd, BRANCH_LABEL_4);
3154 emit_label(cd, BRANCH_LABEL_3);
3157 /* class checkcast code */
3159 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3160 if (super == NULL) {
3161 emit_label(cd, BRANCH_LABEL_2);
3165 emit_label_beq(cd, BRANCH_LABEL_5);
3168 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3170 if (super == NULL) {
3171 patcher_add_patch_ref(jd, PATCHER_checkcast_class,
3172 iptr->sx.s23.s3.c.ref,
3176 M_MOV_IMM(supervftbl, REG_ITMP3);
3178 CODEGEN_CRITICAL_SECTION_START;
3180 M_ILD32(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3182 /* if (s1 != REG_ITMP1) { */
3183 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP1); */
3184 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3); */
3185 /* #if defined(ENABLE_THREADS) */
3186 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3188 /* emit_alu_reg_reg(cd, ALU_SUB, REG_ITMP1, REG_ITMP2); */
3191 M_ILD32(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, baseval));
3192 M_ISUB(REG_ITMP3, REG_ITMP2);
3193 M_MOV_IMM(supervftbl, REG_ITMP3);
3194 M_ILD(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3196 CODEGEN_CRITICAL_SECTION_END;
3200 M_CMP(REG_ITMP3, REG_ITMP2);
3201 emit_classcast_check(cd, iptr, BRANCH_ULE, REG_ITMP3, s1);
3204 emit_label(cd, BRANCH_LABEL_5);
3207 if (super == NULL) {
3208 emit_label(cd, BRANCH_LABEL_1);
3209 emit_label(cd, BRANCH_LABEL_4);
3212 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
3215 /* array type cast-check */
3217 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3218 M_AST(s1, REG_SP, 0 * 4);
3220 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3221 patcher_add_patch_ref(jd, PATCHER_builtin_arraycheckcast,
3222 iptr->sx.s23.s3.c.ref, 0);
3225 M_AST_IMM(iptr->sx.s23.s3.c.cls, REG_SP, 1 * 4);
3226 M_MOV_IMM(BUILTIN_arraycheckcast, REG_ITMP3);
3229 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3231 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_RESULT, s1);
3233 d = codegen_reg_of_dst(jd, iptr, s1);
3237 emit_store_dst(jd, iptr, d);
3240 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3244 vftbl_t *supervftbl;
3247 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3253 super = iptr->sx.s23.s3.c.cls;
3254 superindex = super->index;
3255 supervftbl = super->vftbl;
3258 if ((super == NULL) || !(super->flags & ACC_INTERFACE))
3259 CODEGEN_CRITICAL_SECTION_NEW;
3261 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3262 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
3265 M_INTMOVE(s1, REG_ITMP1);
3271 /* if class is not resolved, check which code to call */
3273 if (super == NULL) {
3275 emit_label_beq(cd, BRANCH_LABEL_1);
3277 patcher_add_patch_ref(jd, PATCHER_checkcast_instanceof_flags,
3278 iptr->sx.s23.s3.c.ref, 0);
3280 M_MOV_IMM(0, REG_ITMP3); /* super->flags */
3281 M_AND_IMM32(ACC_INTERFACE, REG_ITMP3);
3282 emit_label_beq(cd, BRANCH_LABEL_2);
3285 /* interface instanceof code */
3287 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3288 if (super != NULL) {
3290 emit_label_beq(cd, BRANCH_LABEL_3);
3293 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3295 if (super == NULL) {
3296 patcher_add_patch_ref(jd, PATCHER_instanceof_interface,
3297 iptr->sx.s23.s3.c.ref, 0);
3301 REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3302 M_ISUB_IMM32(superindex, REG_ITMP3);
3305 disp = (2 + 4 /* mov_membase32_reg */ + 2 /* test */ +
3306 6 /* jcc */ + 5 /* mov_imm_reg */);
3309 M_ALD32(REG_ITMP1, REG_ITMP1,
3310 OFFSET(vftbl_t, interfacetable[0]) -
3311 superindex * sizeof(methodptr*));
3313 /* emit_setcc_reg(cd, CC_A, d); */
3314 /* emit_jcc(cd, CC_BE, 5); */
3319 emit_label_br(cd, BRANCH_LABEL_4);
3321 emit_label(cd, BRANCH_LABEL_3);
3324 /* class instanceof code */
3326 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3327 if (super == NULL) {
3328 emit_label(cd, BRANCH_LABEL_2);
3332 emit_label_beq(cd, BRANCH_LABEL_5);
3335 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3337 if (super == NULL) {
3338 patcher_add_patch_ref(jd, PATCHER_instanceof_class,
3339 iptr->sx.s23.s3.c.ref, 0);
3342 M_MOV_IMM(supervftbl, REG_ITMP2);
3344 CODEGEN_CRITICAL_SECTION_START;
3346 M_ILD(REG_ITMP1, REG_ITMP1, OFFSET(vftbl_t, baseval));
3347 M_ILD(REG_ITMP3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3348 M_ILD(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3350 CODEGEN_CRITICAL_SECTION_END;
3352 M_ISUB(REG_ITMP2, REG_ITMP1);
3353 M_CLR(d); /* may be REG_ITMP2 */
3354 M_CMP(REG_ITMP3, REG_ITMP1);
3359 emit_label(cd, BRANCH_LABEL_5);
3362 if (super == NULL) {
3363 emit_label(cd, BRANCH_LABEL_1);
3364 emit_label(cd, BRANCH_LABEL_4);
3367 emit_store_dst(jd, iptr, d);
3371 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3373 /* check for negative sizes and copy sizes to stack if necessary */
3375 MCODECHECK((iptr->s1.argcount << 1) + 64);
3377 for (s1 = iptr->s1.argcount; --s1 >= 0; ) {
3378 /* copy SAVEDVAR sizes to stack */
3379 var = VAR(iptr->sx.s23.s2.args[s1]);
3381 /* Already Preallocated? */
3382 if (!(var->flags & PREALLOC)) {
3383 if (var->flags & INMEMORY) {
3384 M_ILD(REG_ITMP1, REG_SP, var->vv.regoff);
3385 M_IST(REG_ITMP1, REG_SP, (s1 + 3) * 4);
3388 M_IST(var->vv.regoff, REG_SP, (s1 + 3) * 4);
3392 /* is a patcher function set? */
3394 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3395 patcher_add_patch_ref(jd, PATCHER_builtin_multianewarray,
3396 iptr->sx.s23.s3.c.ref, 0);
3402 disp = (ptrint) iptr->sx.s23.s3.c.cls;
3404 /* a0 = dimension count */
3406 M_IST_IMM(iptr->s1.argcount, REG_SP, 0 * 4);
3408 /* a1 = arraydescriptor */
3410 M_IST_IMM(disp, REG_SP, 1 * 4);
3412 /* a2 = pointer to dimensions = stack pointer */
3414 M_MOV(REG_SP, REG_ITMP1);
3415 M_AADD_IMM(3 * 4, REG_ITMP1);
3416 M_AST(REG_ITMP1, REG_SP, 2 * 4);
3418 M_MOV_IMM(BUILTIN_multianewarray, REG_ITMP1);
3421 /* check for exception before result assignment */
3423 emit_exception_check(cd, iptr);
3425 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3426 M_INTMOVE(REG_RESULT, s1);
3427 emit_store_dst(jd, iptr, s1);
3431 exceptions_throw_internalerror("Unknown ICMD %d during code generation",
3436 } /* for instruction */
3440 #if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
3443 #if defined(ENABLE_SSA)
3446 /* by edge splitting, in Blocks with phi moves there can only */
3447 /* be a goto as last command, no other Jump/Branch Command */
3449 if (!last_cmd_was_goto)
3450 codegen_emit_phi_moves(jd, bptr);
3455 /* At the end of a basic block we may have to append some nops,
3456 because the patcher stub calling code might be longer than the
3457 actual instruction. So codepatching does not change the
3458 following block unintentionally. */
3460 if (cd->mcodeptr < cd->lastmcodeptr) {
3461 while (cd->mcodeptr < cd->lastmcodeptr) {
3466 } /* if (bptr -> flags >= BBREACHED) */
3467 } /* for basic block */
3469 dseg_createlinenumbertable(cd);
3471 /* generate stubs */
3473 emit_patcher_traps(jd);
3475 /* everything's ok */
3481 /* codegen_emit_stub_native ****************************************************
3483 Emits a stub routine which calls a native method.
3485 *******************************************************************************/
3487 void codegen_emit_stub_native(jitdata *jd, methoddesc *nmd, functionptr f, int skipparams)
3493 int i, j; /* count variables */
3496 #if defined(ENABLE_GC_CACAO)
3500 /* get required compiler data */
3506 /* set some variables */
3510 /* calculate stackframe size */
3512 cd->stackframesize =
3513 sizeof(stackframeinfo_t) / SIZEOF_VOID_P +
3514 sizeof(localref_table) / SIZEOF_VOID_P +
3515 4 + /* 4 arguments (start_native_call) */
3518 /* keep stack 16-byte aligned */
3520 ALIGN_ODD(cd->stackframesize); /* XXX this is wrong, +4 is missing */
3522 /* create method header */
3524 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
3525 (void) dseg_add_unique_s4(cd, cd->stackframesize * 8); /* FrameSize */
3526 (void) dseg_add_unique_s4(cd, 0); /* IsSync */
3527 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
3528 (void) dseg_add_unique_s4(cd, 0); /* IntSave */
3529 (void) dseg_add_unique_s4(cd, 0); /* FltSave */
3530 (void) dseg_addlinenumbertablesize(cd);
3531 (void) dseg_add_unique_s4(cd, 0); /* ExTableSize */
3533 #if defined(ENABLE_PROFILING)
3534 /* generate native method profiling code */
3536 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
3537 /* count frequency */
3539 M_MOV_IMM(code, REG_ITMP1);
3540 M_IADD_IMM_MEMBASE(1, REG_ITMP1, OFFSET(codeinfo, frequency));
3544 /* calculate stackframe size for native function */
3546 M_ASUB_IMM(cd->stackframesize * 8, REG_SP);
3548 /* get function address (this must happen before the stackframeinfo) */
3550 funcdisp = dseg_add_functionptr(cd, f);
3553 patcher_add_patch_ref(jd, PATCHER_resolve_native_function, m, funcdisp);
3555 /* Mark the whole fpu stack as free for native functions (only for saved */
3556 /* register count == 0). */
3558 emit_ffree_reg(cd, 0);
3559 emit_ffree_reg(cd, 1);
3560 emit_ffree_reg(cd, 2);
3561 emit_ffree_reg(cd, 3);
3562 emit_ffree_reg(cd, 4);
3563 emit_ffree_reg(cd, 5);
3564 emit_ffree_reg(cd, 6);
3565 emit_ffree_reg(cd, 7);
3567 #if defined(ENABLE_GC_CACAO)
3568 /* remember callee saved int registers in stackframeinfo (GC may need to */
3569 /* recover them during a collection). */
3571 disp = cd->stackframesize * 8 - sizeof(stackframeinfo_t) +
3572 OFFSET(stackframeinfo_t, intregs);
3574 for (i = 0; i < INT_SAV_CNT; i++)
3575 M_AST(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3578 /* prepare data structures for native function call */
3580 M_MOV(REG_SP, REG_ITMP1);
3581 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3582 M_IST_IMM(0, REG_SP, 1 * 4);
3585 M_MOV_IMM(codegen_start_native_call, REG_ITMP1);
3588 /* remember class argument */
3590 if (m->flags & ACC_STATIC)
3591 M_MOV(REG_RESULT, REG_ITMP3);
3593 /* Copy or spill arguments to new locations. */
3595 for (i = md->paramcount - 1, j = i + skipparams; i >= 0; i--, j--) {
3596 if (!md->params[i].inmemory)
3599 s1 = md->params[i].regoff + cd->stackframesize * 8 + 4;
3600 s2 = nmd->params[j].regoff;
3602 /* float/double in memory can be copied like int/longs */
3604 switch (md->paramtypes[i].type) {
3608 M_ILD(REG_ITMP1, REG_SP, s1);
3609 M_IST(REG_ITMP1, REG_SP, s2);
3613 M_LLD(REG_ITMP12_PACKED, REG_SP, s1);
3614 M_LST(REG_ITMP12_PACKED, REG_SP, s2);
3619 /* Handle native Java methods. */
3621 if (m->flags & ACC_NATIVE) {
3622 /* if function is static, put class into second argument */
3624 if (m->flags & ACC_STATIC)
3625 M_AST(REG_ITMP3, REG_SP, 1 * 4);
3627 /* put env into first argument */
3629 M_AST_IMM(_Jv_env, REG_SP, 0 * 4);
3632 /* call the native function */
3634 emit_mov_imm_reg(cd, 0, REG_ITMP3);
3636 M_ALD(REG_ITMP1, REG_ITMP3, funcdisp);
3639 /* save return value */
3641 switch (md->returntype.type) {
3644 M_IST(REG_RESULT, REG_SP, 1 * 8);
3647 M_LST(REG_RESULT_PACKED, REG_SP, 1 * 8);
3650 emit_fsts_membase(cd, REG_SP, 1 * 8);
3653 emit_fstl_membase(cd, REG_SP, 1 * 8);
3659 /* remove native stackframe info */
3661 M_MOV(REG_SP, REG_ITMP1);
3662 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3663 M_IST_IMM(0, REG_SP, 1 * 4);
3666 M_MOV_IMM(codegen_finish_native_call, REG_ITMP1);
3668 M_MOV(REG_RESULT, REG_ITMP2); /* REG_ITMP3 == REG_RESULT2 */
3670 /* restore return value */
3672 switch (md->returntype.type) {
3675 M_ILD(REG_RESULT, REG_SP, 1 * 8);
3678 M_LLD(REG_RESULT_PACKED, REG_SP, 1 * 8);
3681 emit_flds_membase(cd, REG_SP, 1 * 8);
3684 emit_fldl_membase(cd, REG_SP, 1 * 8);
3690 #if defined(ENABLE_GC_CACAO)
3691 /* restore callee saved int registers from stackframeinfo (GC might have */
3692 /* modified them during a collection). */
3694 disp = cd->stackframesize * 8 - sizeof(stackframeinfo_t) +
3695 OFFSET(stackframeinfo_t, intregs);
3697 for (i = 0; i < INT_SAV_CNT; i++)
3698 M_ALD(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3701 M_AADD_IMM(cd->stackframesize * 8, REG_SP);
3703 /* check for exception */
3710 /* handle exception */
3712 M_MOV(REG_ITMP2, REG_ITMP1_XPTR);
3713 M_ALD(REG_ITMP2_XPC, REG_SP, 0);
3714 M_ASUB_IMM(2, REG_ITMP2_XPC);
3716 M_MOV_IMM(asm_handle_nat_exception, REG_ITMP3);
3719 /* generate patcher stubs */
3721 emit_patcher_traps(jd);
3726 * These are local overrides for various environment variables in Emacs.
3727 * Please do not remove this and leave it at the end of the file, where
3728 * Emacs will automagically detect them.
3729 * ---------------------------------------------------------------------
3732 * indent-tabs-mode: t
3736 * vim:noexpandtab:sw=4:ts=4: