1 /* src/vm/jit/i386/codegen.c - machine code generator for i386
3 Copyright (C) 1996-2005, 2006, 2007 R. Grafl, A. Krall, C. Kruegel,
4 C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
5 E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
6 J. Wenninger, Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
36 #include "vm/jit/i386/md-abi.h"
38 #include "vm/jit/i386/codegen.h"
39 #include "vm/jit/i386/emit.h"
41 #include "mm/memory.h"
42 #include "native/jni.h"
43 #include "native/localref.h"
44 #include "native/native.h"
46 #include "threads/lock-common.h"
48 #include "vm/builtin.h"
49 #include "vm/exceptions.h"
50 #include "vm/global.h"
51 #include "vm/stringlocal.h"
54 #include "vm/jit/asmpart.h"
55 #include "vm/jit/codegen-common.h"
56 #include "vm/jit/dseg.h"
57 #include "vm/jit/emit-common.h"
58 #include "vm/jit/jit.h"
59 #include "vm/jit/parse.h"
60 #include "vm/jit/patcher.h"
61 #include "vm/jit/reg.h"
62 #include "vm/jit/replace.h"
63 #include "vm/jit/stacktrace.h"
65 #if defined(ENABLE_SSA)
66 # include "vm/jit/optimizing/lsra.h"
67 # include "vm/jit/optimizing/ssa.h"
68 #elif defined(ENABLE_LSRA)
69 # include "vm/jit/allocator/lsra.h"
72 #include "vmcore/loader.h"
73 #include "vmcore/options.h"
74 #include "vmcore/utf8.h"
77 /* codegen_emit ****************************************************************
79 Generates machine code.
81 *******************************************************************************/
83 bool codegen_emit(jitdata *jd)
89 s4 len, s1, s2, s3, d, disp;
95 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
96 builtintable_entry *bte;
102 #if defined(ENABLE_SSA)
104 bool last_cmd_was_goto;
106 last_cmd_was_goto = false;
110 /* get required compiler data */
117 /* prevent compiler warnings */
128 s4 savedregs_num = 0;
131 /* space to save used callee saved registers */
133 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
134 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse);
136 cd->stackframesize = rd->memuse + savedregs_num;
139 #if defined(ENABLE_THREADS)
140 /* space to save argument of monitor_enter */
142 if (checksync && (m->flags & ACC_SYNCHRONIZED))
143 cd->stackframesize++;
146 /* create method header */
148 /* Keep stack of non-leaf functions 16-byte aligned. */
150 if (!jd->isleafmethod) {
151 ALIGN_ODD(cd->stackframesize); /* XXX this is wrong, +4 is missing */
154 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
155 (void) dseg_add_unique_s4(cd, cd->stackframesize * 8); /* FrameSize */
157 #if defined(ENABLE_THREADS)
158 /* IsSync contains the offset relative to the stack pointer for the
159 argument of monitor_exit used in the exception handler. Since the
160 offset could be zero and give a wrong meaning of the flag it is
164 if (checksync && (m->flags & ACC_SYNCHRONIZED))
165 (void) dseg_add_unique_s4(cd, (rd->memuse + 1) * 8); /* IsSync */
168 (void) dseg_add_unique_s4(cd, 0); /* IsSync */
170 (void) dseg_add_unique_s4(cd, jd->isleafmethod); /* IsLeaf */
171 (void) dseg_add_unique_s4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
172 (void) dseg_add_unique_s4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
174 /* adds a reference for the length of the line number counter. We don't
175 know the size yet, since we evaluate the information during code
176 generation, to save one additional iteration over the whole
177 instructions. During code optimization the position could have changed
178 to the information gotten from the class file */
179 (void) dseg_addlinenumbertablesize(cd);
181 (void) dseg_add_unique_s4(cd, jd->exceptiontablelength); /* ExTableSize */
183 /* create exception table */
185 for (ex = jd->exceptiontable; ex != NULL; ex = ex->down) {
186 dseg_add_target(cd, ex->start);
187 dseg_add_target(cd, ex->end);
188 dseg_add_target(cd, ex->handler);
189 (void) dseg_add_unique_address(cd, ex->catchtype.any);
192 #if defined(ENABLE_PROFILING)
193 /* generate method profiling code */
195 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
196 /* count frequency */
198 M_MOV_IMM(code, REG_ITMP3);
199 M_IADD_IMM_MEMBASE(1, REG_ITMP3, OFFSET(codeinfo, frequency));
203 /* create stack frame (if necessary) */
205 if (cd->stackframesize)
206 M_ASUB_IMM(cd->stackframesize * 8, REG_SP);
208 /* save return address and used callee saved registers */
210 p = cd->stackframesize;
211 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
212 p--; M_AST(rd->savintregs[i], REG_SP, p * 8);
214 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
215 p--; emit_fld_reg(cd, rd->savfltregs[i]); emit_fstpl_membase(cd, REG_SP, p * 8);
218 /* take arguments out of register or stack frame */
223 for (p = 0, l = 0; p < md->paramcount; p++) {
224 t = md->paramtypes[p].type;
226 varindex = jd->local_map[l * 5 + t];
227 #if defined(ENABLE_SSA)
229 if (varindex != UNUSED)
230 varindex = ls->var_0[varindex];
231 if ((varindex != UNUSED) && (ls->lifetime[varindex].type == UNUSED))
236 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
239 if (varindex == UNUSED)
243 s1 = md->params[p].regoff;
246 if (IS_INT_LNG_TYPE(t)) { /* integer args */
247 if (!md->params[p].inmemory) { /* register arguments */
248 log_text("integer register argument");
250 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
251 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff */
253 else { /* reg arg -> spilled */
254 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff * 4 */
258 if (!(var->flags & INMEMORY)) {
259 M_ILD(d, REG_SP, cd->stackframesize * 8 + 4 + s1);
262 if (!IS_2_WORD_TYPE(t)) {
263 #if defined(ENABLE_SSA)
264 /* no copy avoiding by now possible with SSA */
266 emit_mov_membase_reg( /* + 4 for return address */
267 cd, REG_SP, cd->stackframesize * 8 + s1 + 4,
269 emit_mov_reg_membase(
270 cd, REG_ITMP1, REG_SP, var->vv.regoff);
273 #endif /*defined(ENABLE_SSA)*/
274 /* reuse stackslot */
275 var->vv.regoff = cd->stackframesize * 8 + 4 + s1;
279 #if defined(ENABLE_SSA)
280 /* no copy avoiding by now possible with SSA */
282 emit_mov_membase_reg( /* + 4 for return address */
283 cd, REG_SP, cd->stackframesize * 8 + s1 + 4,
285 emit_mov_reg_membase(
286 cd, REG_ITMP1, REG_SP, var->vv.regoff);
287 emit_mov_membase_reg( /* + 4 for return address */
288 cd, REG_SP, cd->stackframesize * 8 + s1 + 4 + 4,
290 emit_mov_reg_membase(
291 cd, REG_ITMP1, REG_SP, var->vv.regoff + 4);
294 #endif /*defined(ENABLE_SSA)*/
295 /* reuse stackslot */
296 var->vv.regoff = cd->stackframesize * 8 + 4 + s1;
301 else { /* floating args */
302 if (!md->params[p].inmemory) { /* register arguments */
303 log_text("There are no float argument registers!");
305 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
306 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff */
307 } else { /* reg arg -> spilled */
308 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff * 8 */
312 else { /* stack arguments */
313 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
316 cd, REG_SP, cd->stackframesize * 8 + s1 + 4);
318 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
323 cd, REG_SP, cd->stackframesize * 8 + s1 + 4);
325 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
328 } else { /* stack-arg -> spilled */
329 #if defined(ENABLE_SSA)
330 /* no copy avoiding by now possible with SSA */
332 emit_mov_membase_reg(
333 cd, REG_SP, cd->stackframesize * 8 + s1 + 4, REG_ITMP1);
334 emit_mov_reg_membase(
335 cd, REG_ITMP1, REG_SP, var->vv.regoff);
338 cd, REG_SP, cd->stackframesize * 8 + s1 + 4);
339 emit_fstps_membase(cd, REG_SP, var->vv.regoff);
343 cd, REG_SP, cd->stackframesize * 8 + s1 + 4);
344 emit_fstpl_membase(cd, REG_SP, var->vv.regoff);
348 #endif /*defined(ENABLE_SSA)*/
349 /* reuse stackslot */
350 var->vv.regoff = cd->stackframesize * 8 + 4 + s1;
356 /* call monitorenter function */
358 #if defined(ENABLE_THREADS)
359 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
362 if (m->flags & ACC_STATIC) {
363 M_MOV_IMM(&m->class->object.header, REG_ITMP1);
366 M_ALD(REG_ITMP1, REG_SP, cd->stackframesize * 8 + 4);
369 M_ALD_MEM(REG_ITMP1, EXCEPTION_HARDWARE_NULLPOINTER);
372 M_AST(REG_ITMP1, REG_SP, s1 * 8);
373 M_AST(REG_ITMP1, REG_SP, 0 * 4);
374 M_MOV_IMM(LOCK_monitor_enter, REG_ITMP3);
380 emit_verbosecall_enter(jd);
385 #if defined(ENABLE_SSA)
386 /* with SSA the Header is Basic Block 0 - insert phi Moves if necessary */
388 codegen_emit_phi_moves(jd, ls->basicblocks[0]);
391 /* end of header generation */
393 /* create replacement points */
395 REPLACEMENT_POINTS_INIT(cd, jd);
397 /* walk through all basic blocks */
399 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
401 bptr->mpc = (s4) (cd->mcodeptr - cd->mcodebase);
403 if (bptr->flags >= BBREACHED) {
404 /* branch resolving */
406 codegen_resolve_branchrefs(cd, bptr);
408 /* handle replacement points */
410 REPLACEMENT_POINT_BLOCK_START(cd, bptr);
412 #if defined(ENABLE_REPLACEMENT)
413 if (bptr->bitflags & BBFLAG_REPLACEMENT) {
414 if (cd->replacementpoint[-1].flags & RPLPOINT_FLAG_COUNTDOWN) {
416 disp = (s4) &(m->hitcountdown);
417 M_ISUB_IMM_MEMABS(1, disp);
423 /* copy interface registers to their destination */
428 #if defined(ENABLE_PROFILING)
429 /* generate basic block profiling code */
431 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
432 /* count frequency */
434 M_MOV_IMM(code->bbfrequency, REG_ITMP3);
435 M_IADD_IMM_MEMBASE(1, REG_ITMP3, bptr->nr * 4);
439 #if defined(ENABLE_LSRA) || defined(ENABLE_SSA)
440 # if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
443 # if defined(ENABLE_SSA)
445 last_cmd_was_goto = false;
449 var = VAR(bptr->invars[len]);
450 if (bptr->type != BBTYPE_STD) {
451 if (!IS_2_WORD_TYPE(var->type)) {
452 if (bptr->type == BBTYPE_EXH) {
453 d = codegen_reg_of_var(0, var, REG_ITMP1);
454 M_INTMOVE(REG_ITMP1, d);
455 emit_store(jd, NULL, var, d);
459 log_text("copy interface registers(EXH, SBR): longs \
460 have to be in memory (begin 1)");
468 #endif /* defined(ENABLE_LSRA) || defined(ENABLE_SSA) */
472 var = VAR(bptr->invars[len]);
473 if ((len == bptr->indepth-1) && (bptr->type != BBTYPE_STD)) {
474 if (!IS_2_WORD_TYPE(var->type)) {
475 if (bptr->type == BBTYPE_EXH) {
476 d = codegen_reg_of_var(0, var, REG_ITMP1);
477 M_INTMOVE(REG_ITMP1, d);
478 emit_store(jd, NULL, var, d);
482 log_text("copy interface registers: longs have to be in \
489 assert((var->flags & INOUT));
494 /* walk through all instructions */
499 for (iptr = bptr->iinstr; len > 0; len--, iptr++) {
500 if (iptr->line != currentline) {
501 dseg_addlinenumber(cd, iptr->line);
502 currentline = iptr->line;
505 MCODECHECK(1024); /* 1kB should be enough */
508 case ICMD_NOP: /* ... ==> ... */
509 case ICMD_POP: /* ..., value ==> ... */
510 case ICMD_POP2: /* ..., value, value ==> ... */
513 case ICMD_INLINE_START:
515 REPLACEMENT_POINT_INLINE_START(cd, iptr);
518 case ICMD_INLINE_BODY:
520 REPLACEMENT_POINT_INLINE_BODY(cd, iptr);
521 dseg_addlinenumber_inline_start(cd, iptr);
522 dseg_addlinenumber(cd, iptr->line);
525 case ICMD_INLINE_END:
527 dseg_addlinenumber_inline_end(cd, iptr);
528 dseg_addlinenumber(cd, iptr->line);
531 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
533 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
534 emit_nullpointer_check(cd, iptr, s1);
537 /* constant operations ************************************************/
539 case ICMD_ICONST: /* ... ==> ..., constant */
541 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
542 ICONST(d, iptr->sx.val.i);
543 emit_store_dst(jd, iptr, d);
546 case ICMD_LCONST: /* ... ==> ..., constant */
548 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
549 LCONST(d, iptr->sx.val.l);
550 emit_store_dst(jd, iptr, d);
553 case ICMD_FCONST: /* ... ==> ..., constant */
555 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
556 if (iptr->sx.val.f == 0.0) {
560 if (iptr->sx.val.i == 0x80000000) {
564 } else if (iptr->sx.val.f == 1.0) {
567 } else if (iptr->sx.val.f == 2.0) {
573 disp = dseg_add_float(cd, iptr->sx.val.f);
574 emit_mov_imm_reg(cd, 0, REG_ITMP1);
576 emit_flds_membase(cd, REG_ITMP1, disp);
578 emit_store_dst(jd, iptr, d);
581 case ICMD_DCONST: /* ... ==> ..., constant */
583 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
584 if (iptr->sx.val.d == 0.0) {
588 if (iptr->sx.val.l == 0x8000000000000000LL) {
592 } else if (iptr->sx.val.d == 1.0) {
595 } else if (iptr->sx.val.d == 2.0) {
601 disp = dseg_add_double(cd, iptr->sx.val.d);
602 emit_mov_imm_reg(cd, 0, REG_ITMP1);
604 emit_fldl_membase(cd, REG_ITMP1, disp);
606 emit_store_dst(jd, iptr, d);
609 case ICMD_ACONST: /* ... ==> ..., constant */
611 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
613 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
614 codegen_addpatchref(cd, PATCHER_aconst,
615 iptr->sx.val.c.ref, 0);
620 if (iptr->sx.val.anyptr == NULL)
623 M_MOV_IMM(iptr->sx.val.anyptr, d);
625 emit_store_dst(jd, iptr, d);
629 /* load/store/copy/move operations ************************************/
647 if (!(iptr->flags.bits & INS_FLAG_RETADDR))
652 /* integer operations *************************************************/
654 case ICMD_INEG: /* ..., value ==> ..., - value */
656 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
657 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
660 emit_store_dst(jd, iptr, d);
663 case ICMD_LNEG: /* ..., value ==> ..., - value */
665 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
666 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
668 M_NEG(GET_LOW_REG(d));
669 M_IADDC_IMM(0, GET_HIGH_REG(d));
670 M_NEG(GET_HIGH_REG(d));
671 emit_store_dst(jd, iptr, d);
674 case ICMD_I2L: /* ..., value ==> ..., value */
676 s1 = emit_load_s1(jd, iptr, EAX);
677 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
680 M_LNGMOVE(EAX_EDX_PACKED, d);
681 emit_store_dst(jd, iptr, d);
684 case ICMD_L2I: /* ..., value ==> ..., value */
686 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
687 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
689 emit_store_dst(jd, iptr, d);
692 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
694 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
695 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
699 emit_store_dst(jd, iptr, d);
702 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
704 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
705 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
707 emit_store_dst(jd, iptr, d);
710 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
712 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
713 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
715 emit_store_dst(jd, iptr, d);
719 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
721 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
722 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
723 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
730 emit_store_dst(jd, iptr, d);
734 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
735 /* sx.val.i = constant */
737 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
738 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
740 /* `inc reg' is slower on p4's (regarding to ia32
741 optimization reference manual and benchmarks) and as
745 M_IADD_IMM(iptr->sx.val.i, d);
746 emit_store_dst(jd, iptr, d);
749 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
751 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
752 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
753 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
754 M_INTMOVE(s1, GET_LOW_REG(d));
755 M_IADD(s2, GET_LOW_REG(d));
756 /* don't use REG_ITMP1 */
757 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
758 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
759 M_INTMOVE(s1, GET_HIGH_REG(d));
760 M_IADDC(s2, GET_HIGH_REG(d));
761 emit_store_dst(jd, iptr, d);
764 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
765 /* sx.val.l = constant */
767 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
768 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
770 M_IADD_IMM(iptr->sx.val.l, GET_LOW_REG(d));
771 M_IADDC_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
772 emit_store_dst(jd, iptr, d);
775 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
777 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
778 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
779 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
781 M_INTMOVE(s1, REG_ITMP1);
782 M_ISUB(s2, REG_ITMP1);
783 M_INTMOVE(REG_ITMP1, d);
789 emit_store_dst(jd, iptr, d);
792 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
793 /* sx.val.i = constant */
795 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
796 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
798 M_ISUB_IMM(iptr->sx.val.i, d);
799 emit_store_dst(jd, iptr, d);
802 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
804 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
805 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
806 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
807 if (s2 == GET_LOW_REG(d)) {
808 M_INTMOVE(s1, REG_ITMP1);
809 M_ISUB(s2, REG_ITMP1);
810 M_INTMOVE(REG_ITMP1, GET_LOW_REG(d));
813 M_INTMOVE(s1, GET_LOW_REG(d));
814 M_ISUB(s2, GET_LOW_REG(d));
816 /* don't use REG_ITMP1 */
817 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
818 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
819 if (s2 == GET_HIGH_REG(d)) {
820 M_INTMOVE(s1, REG_ITMP2);
821 M_ISUBB(s2, REG_ITMP2);
822 M_INTMOVE(REG_ITMP2, GET_HIGH_REG(d));
825 M_INTMOVE(s1, GET_HIGH_REG(d));
826 M_ISUBB(s2, GET_HIGH_REG(d));
828 emit_store_dst(jd, iptr, d);
831 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
832 /* sx.val.l = constant */
834 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
835 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
837 M_ISUB_IMM(iptr->sx.val.l, GET_LOW_REG(d));
838 M_ISUBB_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
839 emit_store_dst(jd, iptr, d);
842 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
844 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
845 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
846 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
853 emit_store_dst(jd, iptr, d);
856 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
857 /* sx.val.i = constant */
859 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
860 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
861 M_IMUL_IMM(s1, iptr->sx.val.i, d);
862 emit_store_dst(jd, iptr, d);
865 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
867 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
868 s2 = emit_load_s2_low(jd, iptr, EDX);
869 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
871 M_INTMOVE(s1, REG_ITMP2);
872 M_IMUL(s2, REG_ITMP2);
874 s1 = emit_load_s1_low(jd, iptr, EAX);
875 s2 = emit_load_s2_high(jd, iptr, EDX);
878 M_IADD(EDX, REG_ITMP2);
880 s1 = emit_load_s1_low(jd, iptr, EAX);
881 s2 = emit_load_s2_low(jd, iptr, EDX);
884 M_INTMOVE(EAX, GET_LOW_REG(d));
885 M_IADD(REG_ITMP2, GET_HIGH_REG(d));
887 emit_store_dst(jd, iptr, d);
890 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
891 /* sx.val.l = constant */
893 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
894 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
895 ICONST(EAX, iptr->sx.val.l);
897 M_IMUL_IMM(s1, iptr->sx.val.l >> 32, REG_ITMP2);
898 M_IADD(REG_ITMP2, EDX);
899 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
900 M_IMUL_IMM(s1, iptr->sx.val.l, REG_ITMP2);
901 M_IADD(REG_ITMP2, EDX);
902 M_LNGMOVE(EAX_EDX_PACKED, d);
903 emit_store_dst(jd, iptr, d);
906 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
908 s1 = emit_load_s1(jd, iptr, EAX);
909 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
910 d = codegen_reg_of_dst(jd, iptr, EAX);
911 emit_arithmetic_check(cd, iptr, s2);
913 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
915 /* check as described in jvm spec */
917 M_CMP_IMM(0x80000000, EAX);
924 M_INTMOVE(EAX, d); /* if INMEMORY then d is already EAX */
925 emit_store_dst(jd, iptr, d);
928 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
930 s1 = emit_load_s1(jd, iptr, EAX);
931 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
932 d = codegen_reg_of_dst(jd, iptr, EDX);
933 emit_arithmetic_check(cd, iptr, s2);
935 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
937 /* check as described in jvm spec */
939 M_CMP_IMM(0x80000000, EAX);
947 M_INTMOVE(EDX, d); /* if INMEMORY then d is already EDX */
948 emit_store_dst(jd, iptr, d);
951 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
952 /* sx.val.i = constant */
954 /* TODO: optimize for `/ 2' */
955 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
956 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
960 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, d);/* 32-bit for jump off */
961 M_SRA_IMM(iptr->sx.val.i, d);
962 emit_store_dst(jd, iptr, d);
965 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
966 /* sx.val.i = constant */
968 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
969 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
971 M_MOV(s1, REG_ITMP1);
975 M_AND_IMM(iptr->sx.val.i, d);
977 M_BGE(2 + 2 + 6 + 2);
978 M_MOV(s1, d); /* don't use M_INTMOVE, so we know the jump offset */
980 M_AND_IMM32(iptr->sx.val.i, d); /* use 32-bit for jump offset */
982 emit_store_dst(jd, iptr, d);
985 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
986 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
988 s2 = emit_load_s2(jd, iptr, REG_ITMP12_PACKED);
989 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
991 M_INTMOVE(GET_LOW_REG(s2), REG_ITMP3);
992 M_OR(GET_HIGH_REG(s2), REG_ITMP3);
993 /* XXX could be optimized */
994 emit_arithmetic_check(cd, iptr, REG_ITMP3);
996 bte = iptr->sx.s23.s3.bte;
999 M_LST(s2, REG_SP, 2 * 4);
1001 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1002 M_LST(s1, REG_SP, 0 * 4);
1004 M_MOV_IMM(bte->fp, REG_ITMP3);
1006 emit_store_dst(jd, iptr, d);
1009 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1010 /* sx.val.i = constant */
1012 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1013 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1015 M_TEST(GET_HIGH_REG(d));
1017 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, GET_LOW_REG(d));
1018 M_IADDC_IMM(0, GET_HIGH_REG(d));
1019 M_SRLD_IMM(iptr->sx.val.i, GET_HIGH_REG(d), GET_LOW_REG(d));
1020 M_SRA_IMM(iptr->sx.val.i, GET_HIGH_REG(d));
1021 emit_store_dst(jd, iptr, d);
1025 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1026 /* sx.val.l = constant */
1028 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1029 if (iptr->dst.var->flags & INMEMORY) {
1030 if (iptr->s1.var->flags & INMEMORY) {
1031 /* Alpha algorithm */
1033 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8);
1035 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1041 /* TODO: hmm, don't know if this is always correct */
1043 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l & 0x00000000ffffffff);
1045 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l >> 32);
1051 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1052 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1054 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1055 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1056 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1057 emit_jcc(cd, CC_GE, disp);
1059 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1060 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1062 emit_neg_reg(cd, REG_ITMP1);
1063 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1064 emit_neg_reg(cd, REG_ITMP2);
1066 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1067 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1069 emit_neg_reg(cd, REG_ITMP1);
1070 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1071 emit_neg_reg(cd, REG_ITMP2);
1073 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst.var->vv.regoff * 8);
1074 emit_mov_reg_membase(cd, REG_ITMP2, REG_SP, iptr->dst.var->vv.regoff * 8 + 4);
1078 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1079 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1081 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1082 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1083 M_TEST(GET_LOW_REG(s1));
1089 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1091 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1092 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1093 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1094 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1097 emit_store_dst(jd, iptr, d);
1100 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1101 /* sx.val.i = constant */
1103 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1104 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1106 M_SLL_IMM(iptr->sx.val.i, d);
1107 emit_store_dst(jd, iptr, d);
1110 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1112 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1113 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1114 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1115 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1118 emit_store_dst(jd, iptr, d);
1121 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1122 /* sx.val.i = constant */
1124 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1125 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1127 M_SRA_IMM(iptr->sx.val.i, d);
1128 emit_store_dst(jd, iptr, d);
1131 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1133 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1134 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1135 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1136 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1139 emit_store_dst(jd, iptr, d);
1142 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1143 /* sx.val.i = constant */
1145 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1146 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1148 M_SRL_IMM(iptr->sx.val.i, d);
1149 emit_store_dst(jd, iptr, d);
1152 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1154 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1155 s2 = emit_load_s2(jd, iptr, ECX);
1156 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1159 M_TEST_IMM(32, ECX);
1161 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1162 M_CLR(GET_LOW_REG(d));
1163 M_SLLD(GET_LOW_REG(d), GET_HIGH_REG(d));
1164 M_SLL(GET_LOW_REG(d));
1165 emit_store_dst(jd, iptr, d);
1168 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1169 /* sx.val.i = constant */
1171 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1172 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1174 if (iptr->sx.val.i & 0x20) {
1175 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1176 M_CLR(GET_LOW_REG(d));
1177 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1181 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1183 M_SLL_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d));
1185 emit_store_dst(jd, iptr, d);
1188 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1190 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1191 s2 = emit_load_s2(jd, iptr, ECX);
1192 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1195 M_TEST_IMM(32, ECX);
1197 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1198 M_SRA_IMM(31, GET_HIGH_REG(d));
1199 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1200 M_SRA(GET_HIGH_REG(d));
1201 emit_store_dst(jd, iptr, d);
1204 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1205 /* sx.val.i = constant */
1207 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1208 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1210 if (iptr->sx.val.i & 0x20) {
1211 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1212 M_SRA_IMM(31, GET_HIGH_REG(d));
1213 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1217 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1219 M_SRA_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1221 emit_store_dst(jd, iptr, d);
1224 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1226 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1227 s2 = emit_load_s2(jd, iptr, ECX);
1228 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1231 M_TEST_IMM(32, ECX);
1233 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1234 M_CLR(GET_HIGH_REG(d));
1235 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1236 M_SRL(GET_HIGH_REG(d));
1237 emit_store_dst(jd, iptr, d);
1240 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1241 /* sx.val.l = constant */
1243 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1244 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1246 if (iptr->sx.val.i & 0x20) {
1247 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1248 M_CLR(GET_HIGH_REG(d));
1249 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1253 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1255 M_SRL_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1257 emit_store_dst(jd, iptr, d);
1260 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1262 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1263 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1264 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1271 emit_store_dst(jd, iptr, d);
1274 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1275 /* sx.val.i = constant */
1277 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1278 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1280 M_AND_IMM(iptr->sx.val.i, d);
1281 emit_store_dst(jd, iptr, d);
1284 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1286 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1287 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1288 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1289 if (s2 == GET_LOW_REG(d))
1290 M_AND(s1, GET_LOW_REG(d));
1292 M_INTMOVE(s1, GET_LOW_REG(d));
1293 M_AND(s2, GET_LOW_REG(d));
1295 /* REG_ITMP1 probably contains low 32-bit of destination */
1296 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1297 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1298 if (s2 == GET_HIGH_REG(d))
1299 M_AND(s1, GET_HIGH_REG(d));
1301 M_INTMOVE(s1, GET_HIGH_REG(d));
1302 M_AND(s2, GET_HIGH_REG(d));
1304 emit_store_dst(jd, iptr, d);
1307 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1308 /* sx.val.l = constant */
1310 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1311 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1313 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1314 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1315 emit_store_dst(jd, iptr, d);
1318 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1320 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1321 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1322 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1329 emit_store_dst(jd, iptr, d);
1332 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1333 /* sx.val.i = constant */
1335 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1336 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1338 M_OR_IMM(iptr->sx.val.i, d);
1339 emit_store_dst(jd, iptr, d);
1342 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1344 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1345 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1346 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1347 if (s2 == GET_LOW_REG(d))
1348 M_OR(s1, GET_LOW_REG(d));
1350 M_INTMOVE(s1, GET_LOW_REG(d));
1351 M_OR(s2, GET_LOW_REG(d));
1353 /* REG_ITMP1 probably contains low 32-bit of destination */
1354 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1355 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1356 if (s2 == GET_HIGH_REG(d))
1357 M_OR(s1, GET_HIGH_REG(d));
1359 M_INTMOVE(s1, GET_HIGH_REG(d));
1360 M_OR(s2, GET_HIGH_REG(d));
1362 emit_store_dst(jd, iptr, d);
1365 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1366 /* sx.val.l = constant */
1368 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1369 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1371 M_OR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1372 M_OR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1373 emit_store_dst(jd, iptr, d);
1376 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1378 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1379 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1380 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1387 emit_store_dst(jd, iptr, d);
1390 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1391 /* sx.val.i = constant */
1393 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1394 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1396 M_XOR_IMM(iptr->sx.val.i, d);
1397 emit_store_dst(jd, iptr, d);
1400 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1402 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1403 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1404 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1405 if (s2 == GET_LOW_REG(d))
1406 M_XOR(s1, GET_LOW_REG(d));
1408 M_INTMOVE(s1, GET_LOW_REG(d));
1409 M_XOR(s2, GET_LOW_REG(d));
1411 /* REG_ITMP1 probably contains low 32-bit of destination */
1412 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1413 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1414 if (s2 == GET_HIGH_REG(d))
1415 M_XOR(s1, GET_HIGH_REG(d));
1417 M_INTMOVE(s1, GET_HIGH_REG(d));
1418 M_XOR(s2, GET_HIGH_REG(d));
1420 emit_store_dst(jd, iptr, d);
1423 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1424 /* sx.val.l = constant */
1426 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1427 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1429 M_XOR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1430 M_XOR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1431 emit_store_dst(jd, iptr, d);
1435 /* floating operations ************************************************/
1437 case ICMD_FNEG: /* ..., value ==> ..., - value */
1439 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1440 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1442 emit_store_dst(jd, iptr, d);
1445 case ICMD_DNEG: /* ..., value ==> ..., - value */
1447 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1448 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1450 emit_store_dst(jd, iptr, d);
1453 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1455 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1456 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1457 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1459 emit_store_dst(jd, iptr, d);
1462 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1464 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1465 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1466 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1468 emit_store_dst(jd, iptr, d);
1471 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1473 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1474 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1475 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1477 emit_store_dst(jd, iptr, d);
1480 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1482 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1483 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1484 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1486 emit_store_dst(jd, iptr, d);
1489 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1491 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1492 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1493 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1495 emit_store_dst(jd, iptr, d);
1498 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1500 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1501 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1502 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1504 emit_store_dst(jd, iptr, d);
1507 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1509 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1510 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1511 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1513 emit_store_dst(jd, iptr, d);
1516 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1518 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1519 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1520 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1522 emit_store_dst(jd, iptr, d);
1525 case ICMD_FREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1527 /* exchanged to skip fxch */
1528 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1529 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1530 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1531 /* emit_fxch(cd); */
1536 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1537 emit_store_dst(jd, iptr, d);
1538 emit_ffree_reg(cd, 0);
1542 case ICMD_DREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1544 /* exchanged to skip fxch */
1545 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1546 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1547 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1548 /* emit_fxch(cd); */
1553 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1554 emit_store_dst(jd, iptr, d);
1555 emit_ffree_reg(cd, 0);
1559 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1560 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1562 var = VAROP(iptr->s1);
1563 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1565 if (var->flags & INMEMORY) {
1566 emit_fildl_membase(cd, REG_SP, var->vv.regoff);
1568 /* XXX not thread safe! */
1569 disp = dseg_add_unique_s4(cd, 0);
1570 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1572 emit_mov_reg_membase(cd, var->vv.regoff, REG_ITMP1, disp);
1573 emit_fildl_membase(cd, REG_ITMP1, disp);
1576 emit_store_dst(jd, iptr, d);
1579 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1580 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1582 var = VAROP(iptr->s1);
1583 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1584 if (var->flags & INMEMORY) {
1585 emit_fildll_membase(cd, REG_SP, var->vv.regoff);
1588 log_text("L2F: longs have to be in memory");
1591 emit_store_dst(jd, iptr, d);
1594 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1596 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1597 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1599 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1602 /* Round to zero, 53-bit mode, exception masked */
1603 disp = dseg_add_s4(cd, 0x0e7f);
1604 emit_fldcw_membase(cd, REG_ITMP1, disp);
1606 var = VAROP(iptr->dst);
1607 var1 = VAROP(iptr->s1);
1609 if (var->flags & INMEMORY) {
1610 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1612 /* Round to nearest, 53-bit mode, exceptions masked */
1613 disp = dseg_add_s4(cd, 0x027f);
1614 emit_fldcw_membase(cd, REG_ITMP1, disp);
1616 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1617 REG_SP, var->vv.regoff);
1620 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1622 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1625 /* XXX not thread safe! */
1626 disp = dseg_add_unique_s4(cd, 0);
1627 emit_fistpl_membase(cd, REG_ITMP1, disp);
1628 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1630 /* Round to nearest, 53-bit mode, exceptions masked */
1631 disp = dseg_add_s4(cd, 0x027f);
1632 emit_fldcw_membase(cd, REG_ITMP1, disp);
1634 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1637 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1638 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1641 emit_jcc(cd, CC_NE, disp);
1643 /* XXX: change this when we use registers */
1644 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1645 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2i, REG_ITMP1);
1646 emit_call_reg(cd, REG_ITMP1);
1648 if (var->flags & INMEMORY) {
1649 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1652 M_INTMOVE(REG_RESULT, var->vv.regoff);
1656 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1658 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1659 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1661 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1664 /* Round to zero, 53-bit mode, exception masked */
1665 disp = dseg_add_s4(cd, 0x0e7f);
1666 emit_fldcw_membase(cd, REG_ITMP1, disp);
1668 var = VAROP(iptr->dst);
1669 var1 = VAROP(iptr->s1);
1671 if (var->flags & INMEMORY) {
1672 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1674 /* Round to nearest, 53-bit mode, exceptions masked */
1675 disp = dseg_add_s4(cd, 0x027f);
1676 emit_fldcw_membase(cd, REG_ITMP1, disp);
1678 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1679 REG_SP, var->vv.regoff);
1682 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1684 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1687 /* XXX not thread safe! */
1688 disp = dseg_add_unique_s4(cd, 0);
1689 emit_fistpl_membase(cd, REG_ITMP1, disp);
1690 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1692 /* Round to nearest, 53-bit mode, exceptions masked */
1693 disp = dseg_add_s4(cd, 0x027f);
1694 emit_fldcw_membase(cd, REG_ITMP1, disp);
1696 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1699 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1700 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1703 emit_jcc(cd, CC_NE, disp);
1705 /* XXX: change this when we use registers */
1706 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1707 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2i, REG_ITMP1);
1708 emit_call_reg(cd, REG_ITMP1);
1710 if (var->flags & INMEMORY) {
1711 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1713 M_INTMOVE(REG_RESULT, var->vv.regoff);
1717 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1719 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1720 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1722 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1725 /* Round to zero, 53-bit mode, exception masked */
1726 disp = dseg_add_s4(cd, 0x0e7f);
1727 emit_fldcw_membase(cd, REG_ITMP1, disp);
1729 var = VAROP(iptr->dst);
1730 var1 = VAROP(iptr->s1);
1732 if (var->flags & INMEMORY) {
1733 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1735 /* Round to nearest, 53-bit mode, exceptions masked */
1736 disp = dseg_add_s4(cd, 0x027f);
1737 emit_fldcw_membase(cd, REG_ITMP1, disp);
1739 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1740 REG_SP, var->vv.regoff + 4);
1743 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1745 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1748 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1750 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1752 emit_jcc(cd, CC_NE, disp);
1754 emit_alu_imm_membase(cd, ALU_CMP, 0,
1755 REG_SP, var->vv.regoff);
1758 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1760 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1762 emit_jcc(cd, CC_NE, disp);
1764 /* XXX: change this when we use registers */
1765 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1766 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2l, REG_ITMP1);
1767 emit_call_reg(cd, REG_ITMP1);
1768 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1769 emit_mov_reg_membase(cd, REG_RESULT2,
1770 REG_SP, var->vv.regoff + 4);
1773 log_text("F2L: longs have to be in memory");
1778 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1780 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1781 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1783 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1786 /* Round to zero, 53-bit mode, exception masked */
1787 disp = dseg_add_s4(cd, 0x0e7f);
1788 emit_fldcw_membase(cd, REG_ITMP1, disp);
1790 var = VAROP(iptr->dst);
1791 var1 = VAROP(iptr->s1);
1793 if (var->flags & INMEMORY) {
1794 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1796 /* Round to nearest, 53-bit mode, exceptions masked */
1797 disp = dseg_add_s4(cd, 0x027f);
1798 emit_fldcw_membase(cd, REG_ITMP1, disp);
1800 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1801 REG_SP, var->vv.regoff + 4);
1804 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1806 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1809 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1811 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1813 emit_jcc(cd, CC_NE, disp);
1815 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, var->vv.regoff);
1818 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1820 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1822 emit_jcc(cd, CC_NE, disp);
1824 /* XXX: change this when we use registers */
1825 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1826 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2l, REG_ITMP1);
1827 emit_call_reg(cd, REG_ITMP1);
1828 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1829 emit_mov_reg_membase(cd, REG_RESULT2,
1830 REG_SP, var->vv.regoff + 4);
1833 log_text("D2L: longs have to be in memory");
1838 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1840 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1841 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1843 emit_store_dst(jd, iptr, d);
1846 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1848 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1849 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1851 emit_store_dst(jd, iptr, d);
1854 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1857 /* exchanged to skip fxch */
1858 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1859 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1860 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1861 /* emit_fxch(cd); */
1864 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as GT */
1865 emit_jcc(cd, CC_E, 6);
1866 emit_alu_imm_reg(cd, ALU_AND, 0x000000ff, EAX);
1868 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1869 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1870 emit_jcc(cd, CC_B, 3 + 5);
1871 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1872 emit_jmp_imm(cd, 3);
1873 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1874 emit_store_dst(jd, iptr, d);
1877 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1880 /* exchanged to skip fxch */
1881 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1882 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1883 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1884 /* emit_fxch(cd); */
1887 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as LT */
1888 emit_jcc(cd, CC_E, 3);
1889 emit_movb_imm_reg(cd, 1, REG_AH);
1891 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1892 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1893 emit_jcc(cd, CC_B, 3 + 5);
1894 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1895 emit_jmp_imm(cd, 3);
1896 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1897 emit_store_dst(jd, iptr, d);
1901 /* memory operations **************************************************/
1903 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., length */
1905 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1906 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1907 /* implicit null-pointer check */
1908 M_ILD(d, s1, OFFSET(java_array_t, size));
1909 emit_store_dst(jd, iptr, d);
1912 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
1914 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1915 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1916 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1917 /* implicit null-pointer check */
1918 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1919 emit_movsbl_memindex_reg(cd, OFFSET(java_bytearray_t, data[0]),
1921 emit_store_dst(jd, iptr, d);
1924 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
1926 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1927 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1928 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1929 /* implicit null-pointer check */
1930 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1931 emit_movzwl_memindex_reg(cd, OFFSET(java_chararray_t, data[0]),
1933 emit_store_dst(jd, iptr, d);
1936 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
1938 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1939 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1940 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1941 /* implicit null-pointer check */
1942 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1943 emit_movswl_memindex_reg(cd, OFFSET(java_shortarray_t, data[0]),
1945 emit_store_dst(jd, iptr, d);
1948 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
1950 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1951 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1952 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1953 /* implicit null-pointer check */
1954 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1955 emit_mov_memindex_reg(cd, OFFSET(java_intarray_t, data[0]),
1957 emit_store_dst(jd, iptr, d);
1960 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
1962 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1963 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1964 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
1965 /* implicit null-pointer check */
1966 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1968 var = VAROP(iptr->dst);
1970 assert(var->flags & INMEMORY);
1971 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]),
1972 s1, s2, 3, REG_ITMP3);
1973 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff);
1974 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]) + 4,
1975 s1, s2, 3, REG_ITMP3);
1976 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff + 4);
1979 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
1981 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1982 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1983 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1984 /* implicit null-pointer check */
1985 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1986 emit_flds_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2, 2);
1987 emit_store_dst(jd, iptr, d);
1990 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
1992 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1993 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1994 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1995 /* implicit null-pointer check */
1996 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1997 emit_fldl_memindex(cd, OFFSET(java_doublearray_t, data[0]), s1, s2,3);
1998 emit_store_dst(jd, iptr, d);
2001 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2003 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2004 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2005 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2006 /* implicit null-pointer check */
2007 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2008 emit_mov_memindex_reg(cd, OFFSET(java_objectarray_t, data[0]),
2010 emit_store_dst(jd, iptr, d);
2014 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2016 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2017 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2018 /* implicit null-pointer check */
2019 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2020 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2022 /* because EBP, ESI, EDI have no xH and xL nibbles */
2023 M_INTMOVE(s3, REG_ITMP3);
2026 emit_movb_reg_memindex(cd, s3, OFFSET(java_bytearray_t, data[0]),
2030 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2032 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2033 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2034 /* implicit null-pointer check */
2035 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2036 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2037 emit_movw_reg_memindex(cd, s3, OFFSET(java_chararray_t, data[0]),
2041 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2043 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2044 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2045 /* implicit null-pointer check */
2046 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2047 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2048 emit_movw_reg_memindex(cd, s3, OFFSET(java_shortarray_t, data[0]),
2052 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2054 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2055 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2056 /* implicit null-pointer check */
2057 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2058 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2059 emit_mov_reg_memindex(cd, s3, OFFSET(java_intarray_t, data[0]),
2063 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2065 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2066 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2067 /* implicit null-pointer check */
2068 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2070 var = VAROP(iptr->sx.s23.s3);
2072 assert(var->flags & INMEMORY);
2073 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff, REG_ITMP3);
2074 emit_mov_reg_memindex(cd, REG_ITMP3, OFFSET(java_longarray_t, data[0])
2076 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff + 4, REG_ITMP3);
2077 emit_mov_reg_memindex(cd, REG_ITMP3,
2078 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2081 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2083 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2084 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2085 /* implicit null-pointer check */
2086 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2087 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2088 emit_fstps_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2,2);
2091 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2093 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2094 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2095 /* implicit null-pointer check */
2096 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2097 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2098 emit_fstpl_memindex(cd, OFFSET(java_doublearray_t, data[0]),
2102 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2104 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2105 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2106 /* implicit null-pointer check */
2107 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2108 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2110 M_AST(s1, REG_SP, 0 * 4);
2111 M_AST(s3, REG_SP, 1 * 4);
2112 M_MOV_IMM(BUILTIN_canstore, REG_ITMP1);
2114 emit_exception_check(cd, iptr);
2116 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2117 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2118 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2119 emit_mov_reg_memindex(cd, s3, OFFSET(java_objectarray_t, data[0]),
2123 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2125 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2126 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2127 /* implicit null-pointer check */
2128 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2129 emit_movb_imm_memindex(cd, iptr->sx.s23.s3.constval,
2130 OFFSET(java_bytearray_t, data[0]), s1, s2, 0);
2133 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2135 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2136 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2137 /* implicit null-pointer check */
2138 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2139 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2140 OFFSET(java_chararray_t, data[0]), s1, s2, 1);
2143 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2145 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2146 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2147 /* implicit null-pointer check */
2148 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2149 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2150 OFFSET(java_shortarray_t, data[0]), s1, s2, 1);
2153 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2155 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2156 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2157 /* implicit null-pointer check */
2158 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2159 emit_mov_imm_memindex(cd, iptr->sx.s23.s3.constval,
2160 OFFSET(java_intarray_t, data[0]), s1, s2, 2);
2163 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2165 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2166 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2167 /* implicit null-pointer check */
2168 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2169 emit_mov_imm_memindex(cd,
2170 (u4) (iptr->sx.s23.s3.constval & 0x00000000ffffffff),
2171 OFFSET(java_longarray_t, data[0]), s1, s2, 3);
2172 emit_mov_imm_memindex(cd,
2173 ((s4)iptr->sx.s23.s3.constval) >> 31,
2174 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2177 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2179 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2180 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2181 /* implicit null-pointer check */
2182 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2183 emit_mov_imm_memindex(cd, 0,
2184 OFFSET(java_objectarray_t, data[0]), s1, s2, 2);
2188 case ICMD_GETSTATIC: /* ... ==> ..., value */
2190 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2191 uf = iptr->sx.s23.s3.uf;
2192 fieldtype = uf->fieldref->parseddesc.fd->type;
2195 codegen_addpatchref(cd, PATCHER_get_putstatic, uf, 0);
2199 fi = iptr->sx.s23.s3.fmiref->p.field;
2200 fieldtype = fi->type;
2201 disp = (intptr_t) fi->value;
2203 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2204 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2207 M_MOV_IMM(disp, REG_ITMP1);
2208 switch (fieldtype) {
2211 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2212 M_ILD(d, REG_ITMP1, 0);
2215 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2216 M_LLD(d, REG_ITMP1, 0);
2219 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2220 M_FLD(d, REG_ITMP1, 0);
2223 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2224 M_DLD(d, REG_ITMP1, 0);
2227 emit_store_dst(jd, iptr, d);
2230 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2232 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2233 uf = iptr->sx.s23.s3.uf;
2234 fieldtype = uf->fieldref->parseddesc.fd->type;
2237 codegen_addpatchref(cd, PATCHER_get_putstatic, uf, 0);
2240 fi = iptr->sx.s23.s3.fmiref->p.field;
2241 fieldtype = fi->type;
2242 disp = (intptr_t) fi->value;
2244 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2245 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2248 M_MOV_IMM(disp, REG_ITMP1);
2249 switch (fieldtype) {
2252 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
2253 M_IST(s1, REG_ITMP1, 0);
2256 s1 = emit_load_s1(jd, iptr, REG_ITMP23_PACKED);
2257 M_LST(s1, REG_ITMP1, 0);
2260 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2261 emit_fstps_membase(cd, REG_ITMP1, 0);
2264 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2265 emit_fstpl_membase(cd, REG_ITMP1, 0);
2270 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2271 /* val = value (in current instruction) */
2272 /* following NOP) */
2274 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2275 uf = iptr->sx.s23.s3.uf;
2276 fieldtype = uf->fieldref->parseddesc.fd->type;
2279 codegen_addpatchref(cd, PATCHER_get_putstatic, uf, 0);
2282 fi = iptr->sx.s23.s3.fmiref->p.field;
2283 fieldtype = fi->type;
2284 disp = (intptr_t) fi->value;
2286 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2287 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2290 M_MOV_IMM(disp, REG_ITMP1);
2291 switch (fieldtype) {
2294 M_IST_IMM(iptr->sx.s23.s2.constval, REG_ITMP1, 0);
2297 M_IST_IMM(iptr->sx.s23.s2.constval & 0xffffffff, REG_ITMP1, 0);
2298 M_IST_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, REG_ITMP1, 4);
2305 case ICMD_GETFIELD: /* .., objectref. ==> ..., value */
2307 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2308 emit_nullpointer_check(cd, iptr, s1);
2310 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2311 uf = iptr->sx.s23.s3.uf;
2312 fieldtype = uf->fieldref->parseddesc.fd->type;
2315 codegen_addpatchref(cd, PATCHER_getfield,
2316 iptr->sx.s23.s3.uf, 0);
2319 fi = iptr->sx.s23.s3.fmiref->p.field;
2320 fieldtype = fi->type;
2324 switch (fieldtype) {
2327 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2328 M_ILD32(d, s1, disp);
2331 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2332 M_LLD32(d, s1, disp);
2335 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2336 M_FLD32(d, s1, disp);
2339 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2340 M_DLD32(d, s1, disp);
2343 emit_store_dst(jd, iptr, d);
2346 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2348 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2349 emit_nullpointer_check(cd, iptr, s1);
2351 /* must be done here because of code patching */
2353 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2354 uf = iptr->sx.s23.s3.uf;
2355 fieldtype = uf->fieldref->parseddesc.fd->type;
2358 fi = iptr->sx.s23.s3.fmiref->p.field;
2359 fieldtype = fi->type;
2362 if (!IS_FLT_DBL_TYPE(fieldtype)) {
2363 if (IS_2_WORD_TYPE(fieldtype))
2364 s2 = emit_load_s2(jd, iptr, REG_ITMP23_PACKED);
2366 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2369 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
2371 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2373 uf = iptr->sx.s23.s3.uf;
2376 codegen_addpatchref(cd, PATCHER_putfield, uf, 0);
2380 fi = iptr->sx.s23.s3.fmiref->p.field;
2384 switch (fieldtype) {
2387 M_IST32(s2, s1, disp);
2390 M_LST32(s2, s1, disp);
2393 emit_fstps_membase32(cd, s1, disp);
2396 emit_fstpl_membase32(cd, s1, disp);
2401 case ICMD_PUTFIELDCONST: /* ..., objectref ==> ... */
2402 /* val = value (in current instruction) */
2403 /* following NOP) */
2405 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2406 emit_nullpointer_check(cd, iptr, s1);
2408 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2409 uf = iptr->sx.s23.s3.uf;
2410 fieldtype = uf->fieldref->parseddesc.fd->type;
2413 codegen_addpatchref(cd, PATCHER_putfieldconst,
2417 fi = iptr->sx.s23.s3.fmiref->p.field;
2418 fieldtype = fi->type;
2422 switch (fieldtype) {
2425 M_IST32_IMM(iptr->sx.s23.s2.constval, s1, disp);
2428 M_IST32_IMM(iptr->sx.s23.s2.constval & 0xffffffff, s1, disp);
2429 M_IST32_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, s1, disp + 4);
2437 /* branch operations **************************************************/
2439 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2441 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2442 M_INTMOVE(s1, REG_ITMP1_XPTR);
2444 #ifdef ENABLE_VERIFIER
2445 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2446 codegen_addpatchref(cd, PATCHER_athrow_areturn,
2447 iptr->sx.s23.s2.uc, 0);
2449 #endif /* ENABLE_VERIFIER */
2451 M_CALL_IMM(0); /* passing exception pc */
2452 M_POP(REG_ITMP2_XPC);
2454 M_MOV_IMM(asm_handle_exception, REG_ITMP3);
2458 case ICMD_GOTO: /* ... ==> ... */
2459 case ICMD_RET: /* ... ==> ... */
2461 #if defined(ENABLE_SSA)
2463 last_cmd_was_goto = true;
2465 /* In case of a Goto phimoves have to be inserted before the */
2468 codegen_emit_phi_moves(jd, bptr);
2471 emit_br(cd, iptr->dst.block);
2475 case ICMD_JSR: /* ... ==> ... */
2477 emit_br(cd, iptr->sx.s23.s3.jsrtarget.block);
2481 case ICMD_IFNULL: /* ..., value ==> ... */
2482 case ICMD_IFNONNULL:
2484 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2486 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFNULL, BRANCH_OPT_NONE);
2489 case ICMD_IFEQ: /* ..., value ==> ... */
2496 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2497 M_CMP_IMM(iptr->sx.val.i, s1);
2498 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFEQ, BRANCH_OPT_NONE);
2501 case ICMD_IF_LEQ: /* ..., value ==> ... */
2503 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2504 if (iptr->sx.val.l == 0) {
2505 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2506 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2509 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2510 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2511 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2512 M_OR(REG_ITMP2, REG_ITMP1);
2514 emit_beq(cd, iptr->dst.block);
2517 case ICMD_IF_LLT: /* ..., value ==> ... */
2519 if (iptr->sx.val.l == 0) {
2520 /* If high 32-bit are less than zero, then the 64-bits
2522 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2524 emit_blt(cd, iptr->dst.block);
2527 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2528 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2529 emit_blt(cd, iptr->dst.block);
2531 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2532 emit_bult(cd, iptr->dst.block);
2536 case ICMD_IF_LLE: /* ..., value ==> ... */
2538 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2539 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2540 emit_blt(cd, iptr->dst.block);
2542 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2543 emit_bule(cd, iptr->dst.block);
2546 case ICMD_IF_LNE: /* ..., value ==> ... */
2548 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2549 if (iptr->sx.val.l == 0) {
2550 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2551 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2554 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2555 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2556 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2557 M_OR(REG_ITMP2, REG_ITMP1);
2559 emit_bne(cd, iptr->dst.block);
2562 case ICMD_IF_LGT: /* ..., value ==> ... */
2564 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2565 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2566 emit_bgt(cd, iptr->dst.block);
2568 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2569 emit_bugt(cd, iptr->dst.block);
2572 case ICMD_IF_LGE: /* ..., value ==> ... */
2574 if (iptr->sx.val.l == 0) {
2575 /* If high 32-bit are greater equal zero, then the
2577 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2579 emit_bge(cd, iptr->dst.block);
2582 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2583 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2584 emit_bgt(cd, iptr->dst.block);
2586 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2587 emit_buge(cd, iptr->dst.block);
2591 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2592 case ICMD_IF_ICMPNE:
2593 case ICMD_IF_ICMPLT:
2594 case ICMD_IF_ICMPGT:
2595 case ICMD_IF_ICMPGE:
2596 case ICMD_IF_ICMPLE:
2598 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2599 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2601 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ICMPEQ, BRANCH_OPT_NONE);
2604 case ICMD_IF_ACMPEQ: /* ..., value, value ==> ... */
2605 case ICMD_IF_ACMPNE:
2607 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2608 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2610 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ACMPEQ, BRANCH_OPT_NONE);
2613 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2615 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2616 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2617 M_INTMOVE(s1, REG_ITMP1);
2618 M_XOR(s2, REG_ITMP1);
2619 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2620 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2621 M_INTMOVE(s1, REG_ITMP2);
2622 M_XOR(s2, REG_ITMP2);
2623 M_OR(REG_ITMP1, REG_ITMP2);
2624 emit_beq(cd, iptr->dst.block);
2627 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2629 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2630 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2631 M_INTMOVE(s1, REG_ITMP1);
2632 M_XOR(s2, REG_ITMP1);
2633 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2634 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2635 M_INTMOVE(s1, REG_ITMP2);
2636 M_XOR(s2, REG_ITMP2);
2637 M_OR(REG_ITMP1, REG_ITMP2);
2638 emit_bne(cd, iptr->dst.block);
2641 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2643 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2644 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2646 emit_blt(cd, iptr->dst.block);
2647 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2648 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2651 emit_bult(cd, iptr->dst.block);
2654 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2656 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2657 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2659 emit_bgt(cd, iptr->dst.block);
2660 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2661 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2664 emit_bugt(cd, iptr->dst.block);
2667 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2669 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2670 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2672 emit_blt(cd, iptr->dst.block);
2673 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2674 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2677 emit_bule(cd, iptr->dst.block);
2680 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2682 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2683 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2685 emit_bgt(cd, iptr->dst.block);
2686 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2687 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2690 emit_buge(cd, iptr->dst.block);
2694 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2696 REPLACEMENT_POINT_RETURN(cd, iptr);
2697 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2698 M_INTMOVE(s1, REG_RESULT);
2699 goto nowperformreturn;
2701 case ICMD_LRETURN: /* ..., retvalue ==> ... */
2703 REPLACEMENT_POINT_RETURN(cd, iptr);
2704 s1 = emit_load_s1(jd, iptr, REG_RESULT_PACKED);
2705 M_LNGMOVE(s1, REG_RESULT_PACKED);
2706 goto nowperformreturn;
2708 case ICMD_ARETURN: /* ..., retvalue ==> ... */
2710 REPLACEMENT_POINT_RETURN(cd, iptr);
2711 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2712 M_INTMOVE(s1, REG_RESULT);
2714 #ifdef ENABLE_VERIFIER
2715 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2716 codegen_addpatchref(cd, PATCHER_athrow_areturn,
2717 iptr->sx.s23.s2.uc, 0);
2719 #endif /* ENABLE_VERIFIER */
2720 goto nowperformreturn;
2722 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2725 REPLACEMENT_POINT_RETURN(cd, iptr);
2726 s1 = emit_load_s1(jd, iptr, REG_FRESULT);
2727 goto nowperformreturn;
2729 case ICMD_RETURN: /* ... ==> ... */
2731 REPLACEMENT_POINT_RETURN(cd, iptr);
2737 p = cd->stackframesize;
2739 #if !defined(NDEBUG)
2740 emit_verbosecall_exit(jd);
2743 #if defined(ENABLE_THREADS)
2744 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2745 M_ALD(REG_ITMP2, REG_SP, rd->memuse * 8);
2747 /* we need to save the proper return value */
2748 switch (iptr->opc) {
2751 M_IST(REG_RESULT, REG_SP, rd->memuse * 8);
2755 M_LST(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2759 emit_fstps_membase(cd, REG_SP, rd->memuse * 8);
2763 emit_fstpl_membase(cd, REG_SP, rd->memuse * 8);
2767 M_AST(REG_ITMP2, REG_SP, 0);
2768 M_MOV_IMM(LOCK_monitor_exit, REG_ITMP3);
2771 /* and now restore the proper return value */
2772 switch (iptr->opc) {
2775 M_ILD(REG_RESULT, REG_SP, rd->memuse * 8);
2779 M_LLD(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2783 emit_flds_membase(cd, REG_SP, rd->memuse * 8);
2787 emit_fldl_membase(cd, REG_SP, rd->memuse * 8);
2793 /* restore saved registers */
2795 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
2796 p--; M_ALD(rd->savintregs[i], REG_SP, p * 8);
2799 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
2801 emit_fldl_membase(cd, REG_SP, p * 8);
2802 if (iptr->opc == ICMD_FRETURN || iptr->opc == ICMD_DRETURN) {
2804 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset + 1); */
2807 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset); */
2811 /* deallocate stack */
2813 if (cd->stackframesize)
2814 M_AADD_IMM(cd->stackframesize * 8, REG_SP);
2821 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2824 branch_target_t *table;
2826 table = iptr->dst.table;
2828 l = iptr->sx.s23.s2.tablelow;
2829 i = iptr->sx.s23.s3.tablehigh;
2831 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2832 M_INTMOVE(s1, REG_ITMP1);
2835 M_ISUB_IMM(l, REG_ITMP1);
2841 M_CMP_IMM(i - 1, REG_ITMP1);
2842 emit_bugt(cd, table[0].block);
2844 /* build jump table top down and use address of lowest entry */
2849 dseg_add_target(cd, table->block);
2853 /* length of dataseg after last dseg_addtarget is used
2856 M_MOV_IMM(0, REG_ITMP2);
2858 emit_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 2, REG_ITMP1);
2864 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2867 lookup_target_t *lookup;
2869 lookup = iptr->dst.lookup;
2871 i = iptr->sx.s23.s2.lookupcount;
2873 MCODECHECK((i<<2)+8);
2874 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2877 M_CMP_IMM(lookup->value, s1);
2878 emit_beq(cd, lookup->target.block);
2882 emit_br(cd, iptr->sx.s23.s3.lookupdefault.block);
2887 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
2889 bte = iptr->sx.s23.s3.bte;
2893 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2895 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2896 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
2897 case ICMD_INVOKEINTERFACE:
2899 REPLACEMENT_POINT_INVOKE(cd, iptr);
2901 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2902 md = iptr->sx.s23.s3.um->methodref->parseddesc.md;
2906 lm = iptr->sx.s23.s3.fmiref->p.method;
2907 md = lm->parseddesc;
2911 s3 = md->paramcount;
2913 MCODECHECK((s3 << 1) + 64);
2915 /* copy arguments to registers or stack location */
2917 for (s3 = s3 - 1; s3 >= 0; s3--) {
2918 var = VAR(iptr->sx.s23.s2.args[s3]);
2920 /* Already Preallocated (ARGVAR) ? */
2921 if (var->flags & PREALLOC)
2923 if (IS_INT_LNG_TYPE(var->type)) {
2924 if (!md->params[s3].inmemory) {
2925 log_text("No integer argument registers available!");
2929 if (IS_2_WORD_TYPE(var->type)) {
2930 d = emit_load(jd, iptr, var, REG_ITMP12_PACKED);
2931 M_LST(d, REG_SP, md->params[s3].regoff);
2933 d = emit_load(jd, iptr, var, REG_ITMP1);
2934 M_IST(d, REG_SP, md->params[s3].regoff);
2939 if (!md->params[s3].inmemory) {
2940 s1 = md->params[s3].regoff;
2941 d = emit_load(jd, iptr, var, s1);
2945 d = emit_load(jd, iptr, var, REG_FTMP1);
2946 if (IS_2_WORD_TYPE(var->type))
2947 M_DST(d, REG_SP, md->params[s3].regoff);
2949 M_FST(d, REG_SP, md->params[s3].regoff);
2954 switch (iptr->opc) {
2956 disp = (ptrint) bte->fp;
2957 d = md->returntype.type;
2959 M_MOV_IMM(disp, REG_ITMP1);
2962 emit_exception_check(cd, iptr);
2965 case ICMD_INVOKESPECIAL:
2966 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
2967 emit_nullpointer_check(cd, iptr, REG_ITMP1);
2970 case ICMD_INVOKESTATIC:
2972 unresolved_method *um = iptr->sx.s23.s3.um;
2974 codegen_addpatchref(cd, PATCHER_invokestatic_special,
2978 d = md->returntype.type;
2981 disp = (ptrint) lm->stubroutine;
2982 d = lm->parseddesc->returntype.type;
2985 M_MOV_IMM(disp, REG_ITMP2);
2989 case ICMD_INVOKEVIRTUAL:
2990 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
2991 emit_nullpointer_check(cd, iptr, s1);
2994 unresolved_method *um = iptr->sx.s23.s3.um;
2996 codegen_addpatchref(cd, PATCHER_invokevirtual, um, 0);
2999 d = md->returntype.type;
3002 s1 = OFFSET(vftbl_t, table[0]) +
3003 sizeof(methodptr) * lm->vftblindex;
3004 d = md->returntype.type;
3007 M_ALD(REG_METHODPTR, REG_ITMP1,
3008 OFFSET(java_object_t, vftbl));
3009 M_ALD32(REG_ITMP3, REG_METHODPTR, s1);
3013 case ICMD_INVOKEINTERFACE:
3014 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
3015 emit_nullpointer_check(cd, iptr, s1);
3018 unresolved_method *um = iptr->sx.s23.s3.um;
3020 codegen_addpatchref(cd, PATCHER_invokeinterface, um, 0);
3024 d = md->returntype.type;
3027 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3028 sizeof(methodptr) * lm->class->index;
3030 s2 = sizeof(methodptr) * (lm - lm->class->methods);
3032 d = md->returntype.type;
3035 M_ALD(REG_METHODPTR, REG_ITMP1,
3036 OFFSET(java_object_t, vftbl));
3037 M_ALD32(REG_METHODPTR, REG_METHODPTR, s1);
3038 M_ALD32(REG_ITMP3, REG_METHODPTR, s2);
3043 /* store size of call code in replacement point */
3045 REPLACEMENT_POINT_INVOKE_RETURN(cd, iptr);
3047 /* d contains return type */
3049 if (d != TYPE_VOID) {
3050 #if defined(ENABLE_SSA)
3051 if ((ls == NULL) /* || (!IS_TEMPVAR_INDEX(iptr->dst.varindex)) */ ||
3052 (ls->lifetime[iptr->dst.varindex].type != UNUSED))
3053 /* a "living" stackslot */
3056 if (IS_INT_LNG_TYPE(d)) {
3057 if (IS_2_WORD_TYPE(d)) {
3058 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
3059 M_LNGMOVE(REG_RESULT_PACKED, s1);
3062 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3063 M_INTMOVE(REG_RESULT, s1);
3067 s1 = codegen_reg_of_dst(jd, iptr, REG_NULL);
3069 emit_store_dst(jd, iptr, s1);
3075 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3077 if (!(iptr->flags.bits & INS_FLAG_ARRAY)) {
3078 /* object type cast-check */
3081 vftbl_t *supervftbl;
3084 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3090 super = iptr->sx.s23.s3.c.cls;
3091 superindex = super->index;
3092 supervftbl = super->vftbl;
3095 if ((super == NULL) || !(super->flags & ACC_INTERFACE))
3096 CODEGEN_CRITICAL_SECTION_NEW;
3098 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3100 /* if class is not resolved, check which code to call */
3102 if (super == NULL) {
3104 emit_label_beq(cd, BRANCH_LABEL_1);
3106 codegen_addpatchref(cd, PATCHER_checkcast_instanceof_flags,
3107 iptr->sx.s23.s3.c.ref, 0);
3109 M_MOV_IMM(0, REG_ITMP2); /* super->flags */
3110 M_AND_IMM32(ACC_INTERFACE, REG_ITMP2);
3111 emit_label_beq(cd, BRANCH_LABEL_2);
3114 /* interface checkcast code */
3116 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3117 if (super != NULL) {
3119 emit_label_beq(cd, BRANCH_LABEL_3);
3122 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3124 if (super == NULL) {
3125 codegen_addpatchref(cd, PATCHER_checkcast_interface,
3126 iptr->sx.s23.s3.c.ref,
3131 REG_ITMP2, OFFSET(vftbl_t, interfacetablelength));
3132 M_ISUB_IMM32(superindex, REG_ITMP3);
3133 /* XXX do we need this one? */
3135 emit_classcast_check(cd, iptr, BRANCH_LE, REG_ITMP3, s1);
3137 M_ALD32(REG_ITMP3, REG_ITMP2,
3138 OFFSET(vftbl_t, interfacetable[0]) -
3139 superindex * sizeof(methodptr*));
3141 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_ITMP3, s1);
3144 emit_label_br(cd, BRANCH_LABEL_4);
3146 emit_label(cd, BRANCH_LABEL_3);
3149 /* class checkcast code */
3151 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3152 if (super == NULL) {
3153 emit_label(cd, BRANCH_LABEL_2);
3157 emit_label_beq(cd, BRANCH_LABEL_5);
3160 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3162 if (super == NULL) {
3163 codegen_addpatchref(cd, PATCHER_checkcast_class,
3164 iptr->sx.s23.s3.c.ref,
3168 M_MOV_IMM(supervftbl, REG_ITMP3);
3170 CODEGEN_CRITICAL_SECTION_START;
3172 M_ILD32(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3174 /* if (s1 != REG_ITMP1) { */
3175 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP1); */
3176 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3); */
3177 /* #if defined(ENABLE_THREADS) */
3178 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3180 /* emit_alu_reg_reg(cd, ALU_SUB, REG_ITMP1, REG_ITMP2); */
3183 M_ILD32(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, baseval));
3184 M_ISUB(REG_ITMP3, REG_ITMP2);
3185 M_MOV_IMM(supervftbl, REG_ITMP3);
3186 M_ILD(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3188 CODEGEN_CRITICAL_SECTION_END;
3192 M_CMP(REG_ITMP3, REG_ITMP2);
3193 emit_classcast_check(cd, iptr, BRANCH_ULE, REG_ITMP3, s1);
3196 emit_label(cd, BRANCH_LABEL_5);
3199 if (super == NULL) {
3200 emit_label(cd, BRANCH_LABEL_1);
3201 emit_label(cd, BRANCH_LABEL_4);
3204 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
3207 /* array type cast-check */
3209 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3210 M_AST(s1, REG_SP, 0 * 4);
3212 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3213 codegen_addpatchref(cd, PATCHER_builtin_arraycheckcast,
3214 iptr->sx.s23.s3.c.ref, 0);
3217 M_AST_IMM(iptr->sx.s23.s3.c.cls, REG_SP, 1 * 4);
3218 M_MOV_IMM(BUILTIN_arraycheckcast, REG_ITMP3);
3221 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3223 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_RESULT, s1);
3225 d = codegen_reg_of_dst(jd, iptr, s1);
3229 emit_store_dst(jd, iptr, d);
3232 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3236 vftbl_t *supervftbl;
3239 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3245 super = iptr->sx.s23.s3.c.cls;
3246 superindex = super->index;
3247 supervftbl = super->vftbl;
3250 if ((super == NULL) || !(super->flags & ACC_INTERFACE))
3251 CODEGEN_CRITICAL_SECTION_NEW;
3253 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3254 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
3257 M_INTMOVE(s1, REG_ITMP1);
3263 /* if class is not resolved, check which code to call */
3265 if (super == NULL) {
3267 emit_label_beq(cd, BRANCH_LABEL_1);
3269 codegen_addpatchref(cd, PATCHER_checkcast_instanceof_flags,
3270 iptr->sx.s23.s3.c.ref, 0);
3272 M_MOV_IMM(0, REG_ITMP3); /* super->flags */
3273 M_AND_IMM32(ACC_INTERFACE, REG_ITMP3);
3274 emit_label_beq(cd, BRANCH_LABEL_2);
3277 /* interface instanceof code */
3279 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3280 if (super != NULL) {
3282 emit_label_beq(cd, BRANCH_LABEL_3);
3285 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3287 if (super == NULL) {
3288 codegen_addpatchref(cd, PATCHER_instanceof_interface,
3289 iptr->sx.s23.s3.c.ref, 0);
3293 REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3294 M_ISUB_IMM32(superindex, REG_ITMP3);
3297 disp = (2 + 4 /* mov_membase32_reg */ + 2 /* test */ +
3298 6 /* jcc */ + 5 /* mov_imm_reg */);
3301 M_ALD32(REG_ITMP1, REG_ITMP1,
3302 OFFSET(vftbl_t, interfacetable[0]) -
3303 superindex * sizeof(methodptr*));
3305 /* emit_setcc_reg(cd, CC_A, d); */
3306 /* emit_jcc(cd, CC_BE, 5); */
3311 emit_label_br(cd, BRANCH_LABEL_4);
3313 emit_label(cd, BRANCH_LABEL_3);
3316 /* class instanceof code */
3318 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3319 if (super == NULL) {
3320 emit_label(cd, BRANCH_LABEL_2);
3324 emit_label_beq(cd, BRANCH_LABEL_5);
3327 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3329 if (super == NULL) {
3330 codegen_addpatchref(cd, PATCHER_instanceof_class,
3331 iptr->sx.s23.s3.c.ref, 0);
3334 M_MOV_IMM(supervftbl, REG_ITMP2);
3336 CODEGEN_CRITICAL_SECTION_START;
3338 M_ILD(REG_ITMP1, REG_ITMP1, OFFSET(vftbl_t, baseval));
3339 M_ILD(REG_ITMP3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3340 M_ILD(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3342 CODEGEN_CRITICAL_SECTION_END;
3344 M_ISUB(REG_ITMP2, REG_ITMP1);
3345 M_CLR(d); /* may be REG_ITMP2 */
3346 M_CMP(REG_ITMP3, REG_ITMP1);
3351 emit_label(cd, BRANCH_LABEL_5);
3354 if (super == NULL) {
3355 emit_label(cd, BRANCH_LABEL_1);
3356 emit_label(cd, BRANCH_LABEL_4);
3359 emit_store_dst(jd, iptr, d);
3363 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3365 /* check for negative sizes and copy sizes to stack if necessary */
3367 MCODECHECK((iptr->s1.argcount << 1) + 64);
3369 for (s1 = iptr->s1.argcount; --s1 >= 0; ) {
3370 /* copy SAVEDVAR sizes to stack */
3371 var = VAR(iptr->sx.s23.s2.args[s1]);
3373 /* Already Preallocated? */
3374 if (!(var->flags & PREALLOC)) {
3375 if (var->flags & INMEMORY) {
3376 M_ILD(REG_ITMP1, REG_SP, var->vv.regoff);
3377 M_IST(REG_ITMP1, REG_SP, (s1 + 3) * 4);
3380 M_IST(var->vv.regoff, REG_SP, (s1 + 3) * 4);
3384 /* is a patcher function set? */
3386 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3387 codegen_addpatchref(cd, PATCHER_builtin_multianewarray,
3388 iptr->sx.s23.s3.c.ref, 0);
3394 disp = (ptrint) iptr->sx.s23.s3.c.cls;
3396 /* a0 = dimension count */
3398 M_IST_IMM(iptr->s1.argcount, REG_SP, 0 * 4);
3400 /* a1 = arraydescriptor */
3402 M_IST_IMM(disp, REG_SP, 1 * 4);
3404 /* a2 = pointer to dimensions = stack pointer */
3406 M_MOV(REG_SP, REG_ITMP1);
3407 M_AADD_IMM(3 * 4, REG_ITMP1);
3408 M_AST(REG_ITMP1, REG_SP, 2 * 4);
3410 M_MOV_IMM(BUILTIN_multianewarray, REG_ITMP1);
3413 /* check for exception before result assignment */
3415 emit_exception_check(cd, iptr);
3417 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3418 M_INTMOVE(REG_RESULT, s1);
3419 emit_store_dst(jd, iptr, s1);
3423 exceptions_throw_internalerror("Unknown ICMD %d during code generation",
3428 } /* for instruction */
3432 #if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
3435 #if defined(ENABLE_SSA)
3438 /* by edge splitting, in Blocks with phi moves there can only */
3439 /* be a goto as last command, no other Jump/Branch Command */
3441 if (!last_cmd_was_goto)
3442 codegen_emit_phi_moves(jd, bptr);
3447 /* At the end of a basic block we may have to append some nops,
3448 because the patcher stub calling code might be longer than the
3449 actual instruction. So codepatching does not change the
3450 following block unintentionally. */
3452 if (cd->mcodeptr < cd->lastmcodeptr) {
3453 while (cd->mcodeptr < cd->lastmcodeptr) {
3458 } /* if (bptr -> flags >= BBREACHED) */
3459 } /* for basic block */
3461 dseg_createlinenumbertable(cd);
3463 /* generate stubs */
3465 emit_patcher_stubs(jd);
3467 /* everything's ok */
3472 /* codegen_emit_stub_compiler **************************************************
3474 Emit a stub routine which calls the compiler.
3476 *******************************************************************************/
3478 void codegen_emit_stub_compiler(jitdata *jd)
3483 /* get required compiler data */
3488 /* code for the stub */
3490 M_MOV_IMM(m, REG_ITMP1);
3491 M_MOV_IMM(asm_call_jit_compiler, REG_ITMP3);
3496 /* codegen_emit_stub_native ****************************************************
3498 Emits a stub routine which calls a native method.
3500 *******************************************************************************/
3502 void codegen_emit_stub_native(jitdata *jd, methoddesc *nmd, functionptr f)
3509 s4 i, j; /* count variables */
3513 /* get required compiler data */
3519 /* set some variables */
3522 nativeparams = (m->flags & ACC_STATIC) ? 2 : 1;
3524 /* calculate stackframe size */
3526 cd->stackframesize =
3527 sizeof(stackframeinfo) / SIZEOF_VOID_P +
3528 sizeof(localref_table) / SIZEOF_VOID_P +
3529 1 + /* function pointer */
3530 4 + /* 4 arguments (start_native_call) */
3533 /* keep stack 16-byte aligned */
3535 ALIGN_ODD(cd->stackframesize); /* XXX this is wrong, +4 is missing */
3537 /* create method header */
3539 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
3540 (void) dseg_add_unique_s4(cd, cd->stackframesize * 8); /* FrameSize */
3541 (void) dseg_add_unique_s4(cd, 0); /* IsSync */
3542 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
3543 (void) dseg_add_unique_s4(cd, 0); /* IntSave */
3544 (void) dseg_add_unique_s4(cd, 0); /* FltSave */
3545 (void) dseg_addlinenumbertablesize(cd);
3546 (void) dseg_add_unique_s4(cd, 0); /* ExTableSize */
3548 #if defined(ENABLE_PROFILING)
3549 /* generate native method profiling code */
3551 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
3552 /* count frequency */
3554 M_MOV_IMM(code, REG_ITMP1);
3555 M_IADD_IMM_MEMBASE(1, REG_ITMP1, OFFSET(codeinfo, frequency));
3559 /* calculate stackframe size for native function */
3561 M_ASUB_IMM(cd->stackframesize * 8, REG_SP);
3563 #if !defined(NDEBUG)
3564 emit_verbosecall_enter(jd);
3567 /* get function address (this must happen before the stackframeinfo) */
3569 #if !defined(WITH_STATIC_CLASSPATH)
3571 codegen_addpatchref(cd, PATCHER_resolve_native, m, 0);
3574 M_AST_IMM((ptrint) f, REG_SP, 4 * 4);
3576 /* Mark the whole fpu stack as free for native functions (only for saved */
3577 /* register count == 0). */
3579 emit_ffree_reg(cd, 0);
3580 emit_ffree_reg(cd, 1);
3581 emit_ffree_reg(cd, 2);
3582 emit_ffree_reg(cd, 3);
3583 emit_ffree_reg(cd, 4);
3584 emit_ffree_reg(cd, 5);
3585 emit_ffree_reg(cd, 6);
3586 emit_ffree_reg(cd, 7);
3588 /* prepare data structures for native function call */
3590 M_MOV(REG_SP, REG_ITMP1);
3591 M_AADD_IMM(cd->stackframesize * 8, REG_ITMP1);
3593 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3594 M_IST_IMM(0, REG_SP, 1 * 4);
3597 M_MOV(REG_SP, REG_ITMP2);
3598 M_AADD_IMM(cd->stackframesize * 8 + SIZEOF_VOID_P, REG_ITMP2);
3600 M_AST(REG_ITMP2, REG_SP, 2 * 4);
3601 M_ALD(REG_ITMP3, REG_SP, cd->stackframesize * 8);
3602 M_AST(REG_ITMP3, REG_SP, 3 * 4);
3603 M_MOV_IMM(codegen_start_native_call, REG_ITMP1);
3606 M_ALD(REG_ITMP3, REG_SP, 4 * 4);
3608 /* copy arguments into new stackframe */
3610 for (i = md->paramcount - 1, j = i + nativeparams; i >= 0; i--, j--) {
3611 t = md->paramtypes[i].type;
3613 if (!md->params[i].inmemory) {
3614 /* no integer argument registers */
3617 /* float/double in memory can be copied like int/longs */
3619 s1 = md->params[i].regoff + cd->stackframesize * 8 + 4;
3620 s2 = nmd->params[j].regoff;
3622 M_ILD(REG_ITMP1, REG_SP, s1);
3623 M_IST(REG_ITMP1, REG_SP, s2);
3624 if (IS_2_WORD_TYPE(t)) {
3625 M_ILD(REG_ITMP1, REG_SP, s1 + 4);
3626 M_IST(REG_ITMP1, REG_SP, s2 + 4);
3631 /* if function is static, put class into second argument */
3633 if (m->flags & ACC_STATIC)
3634 M_AST_IMM(m->class, REG_SP, 1 * 4);
3636 /* put env into first argument */
3638 M_AST_IMM(_Jv_env, REG_SP, 0 * 4);
3640 /* call the native function */
3644 /* save return value */
3646 switch (md->returntype.type) {
3649 M_IST(REG_RESULT, REG_SP, 1 * 8);
3652 M_LST(REG_RESULT_PACKED, REG_SP, 1 * 8);
3655 emit_fsts_membase(cd, REG_SP, 1 * 8);
3658 emit_fstl_membase(cd, REG_SP, 1 * 8);
3664 #if !defined(NDEBUG)
3665 emit_verbosecall_exit(jd);
3668 /* remove native stackframe info */
3670 M_MOV(REG_SP, REG_ITMP1);
3671 M_AADD_IMM(cd->stackframesize * 8, REG_ITMP1);
3673 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3674 M_MOV_IMM(codegen_finish_native_call, REG_ITMP1);
3676 M_MOV(REG_RESULT, REG_ITMP2); /* REG_ITMP3 == REG_RESULT2 */
3678 /* restore return value */
3680 switch (md->returntype.type) {
3683 M_ILD(REG_RESULT, REG_SP, 1 * 8);
3686 M_LLD(REG_RESULT_PACKED, REG_SP, 1 * 8);
3689 emit_flds_membase(cd, REG_SP, 1 * 8);
3692 emit_fldl_membase(cd, REG_SP, 1 * 8);
3698 M_AADD_IMM(cd->stackframesize * 8, REG_SP);
3700 /* check for exception */
3707 /* handle exception */
3709 M_MOV(REG_ITMP2, REG_ITMP1_XPTR);
3710 M_ALD(REG_ITMP2_XPC, REG_SP, 0);
3711 M_ASUB_IMM(2, REG_ITMP2_XPC);
3713 M_MOV_IMM(asm_handle_nat_exception, REG_ITMP3);
3716 /* generate patcher stubs */
3718 emit_patcher_stubs(jd);
3723 * These are local overrides for various environment variables in Emacs.
3724 * Please do not remove this and leave it at the end of the file, where
3725 * Emacs will automagically detect them.
3726 * ---------------------------------------------------------------------
3729 * indent-tabs-mode: t
3733 * vim:noexpandtab:sw=4:ts=4: