1 /* src/vm/jit/i386/codegen.c - machine code generator for i386
3 Copyright (C) 1996-2005, 2006, 2007 R. Grafl, A. Krall, C. Kruegel,
4 C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
5 E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
6 J. Wenninger, Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
25 $Id: codegen.c 8039 2007-06-07 13:46:55Z michi $
37 #include "vm/jit/i386/md-abi.h"
39 #include "vm/jit/i386/codegen.h"
40 #include "vm/jit/i386/emit.h"
42 #include "mm/memory.h"
43 #include "native/jni.h"
44 #include "native/native.h"
46 #include "threads/lock-common.h"
48 #include "vm/builtin.h"
49 #include "vm/exceptions.h"
50 #include "vm/global.h"
51 #include "vm/stringlocal.h"
54 #include "vm/jit/abi.h"
55 #include "vm/jit/asmpart.h"
56 #include "vm/jit/codegen-common.h"
57 #include "vm/jit/dseg.h"
58 #include "vm/jit/emit-common.h"
59 #include "vm/jit/jit.h"
60 #include "vm/jit/parse.h"
61 #include "vm/jit/patcher.h"
62 #include "vm/jit/reg.h"
63 #include "vm/jit/replace.h"
64 #include "vm/jit/stacktrace.h"
66 #if defined(ENABLE_SSA)
67 # include "vm/jit/optimizing/lsra.h"
68 # include "vm/jit/optimizing/ssa.h"
69 #elif defined(ENABLE_LSRA)
70 # include "vm/jit/allocator/lsra.h"
73 #include "vmcore/loader.h"
74 #include "vmcore/options.h"
75 #include "vmcore/utf8.h"
78 /* codegen_emit ****************************************************************
80 Generates machine code.
82 *******************************************************************************/
84 bool codegen_emit(jitdata *jd)
90 s4 len, s1, s2, s3, d, disp;
96 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
97 builtintable_entry *bte;
100 unresolved_field *uf;
103 #if defined(ENABLE_SSA)
105 bool last_cmd_was_goto;
107 last_cmd_was_goto = false;
111 /* get required compiler data */
118 /* prevent compiler warnings */
129 s4 savedregs_num = 0;
132 /* space to save used callee saved registers */
134 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
136 /* float register are saved on 2 4-byte stackslots */
137 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse) * 2;
139 cd->stackframesize = rd->memuse + savedregs_num;
142 #if defined(ENABLE_THREADS)
143 /* space to save argument of monitor_enter */
145 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
146 /* reserve 2 slots for long/double return values for monitorexit */
148 if (IS_2_WORD_TYPE(m->parseddesc->returntype.type))
149 cd->stackframesize += 2;
151 cd->stackframesize++;
155 /* create method header */
157 /* Keep stack of non-leaf functions 16-byte aligned. */
159 if (!jd->isleafmethod)
160 cd->stackframesize |= 0x3;
162 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
163 (void) dseg_add_unique_s4(cd, cd->stackframesize * 4); /* FrameSize */
165 #if defined(ENABLE_THREADS)
166 /* IsSync contains the offset relative to the stack pointer for the
167 argument of monitor_exit used in the exception handler. Since the
168 offset could be zero and give a wrong meaning of the flag it is
172 if (checksync && (m->flags & ACC_SYNCHRONIZED))
173 (void) dseg_add_unique_s4(cd, (rd->memuse + 1) * 4); /* IsSync */
176 (void) dseg_add_unique_s4(cd, 0); /* IsSync */
178 (void) dseg_add_unique_s4(cd, jd->isleafmethod); /* IsLeaf */
179 (void) dseg_add_unique_s4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
180 (void) dseg_add_unique_s4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
182 /* adds a reference for the length of the line number counter. We don't
183 know the size yet, since we evaluate the information during code
184 generation, to save one additional iteration over the whole
185 instructions. During code optimization the position could have changed
186 to the information gotten from the class file */
187 (void) dseg_addlinenumbertablesize(cd);
189 (void) dseg_add_unique_s4(cd, jd->exceptiontablelength); /* ExTableSize */
191 /* create exception table */
193 for (ex = jd->exceptiontable; ex != NULL; ex = ex->down) {
194 dseg_add_target(cd, ex->start);
195 dseg_add_target(cd, ex->end);
196 dseg_add_target(cd, ex->handler);
197 (void) dseg_add_unique_address(cd, ex->catchtype.any);
200 #if defined(ENABLE_PROFILING)
201 /* generate method profiling code */
203 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
204 /* count frequency */
206 M_MOV_IMM(code, REG_ITMP3);
207 M_IADD_IMM_MEMBASE(1, REG_ITMP3, OFFSET(codeinfo, frequency));
211 /* create stack frame (if necessary) */
213 if (cd->stackframesize)
214 M_ASUB_IMM(cd->stackframesize * 4, REG_SP);
216 /* save return address and used callee saved registers */
218 p = cd->stackframesize;
219 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
220 p--; M_AST(rd->savintregs[i], REG_SP, p * 4);
222 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
223 p-=2; emit_fld_reg(cd, rd->savfltregs[i]); emit_fstpl_membase(cd, REG_SP, p * 4);
226 /* take arguments out of register or stack frame */
231 for (p = 0, l = 0; p < md->paramcount; p++) {
232 t = md->paramtypes[p].type;
234 varindex = jd->local_map[l * 5 + t];
235 #if defined(ENABLE_SSA)
237 if (varindex != UNUSED)
238 varindex = ls->var_0[varindex];
239 if ((varindex != UNUSED) && (ls->lifetime[varindex].type == UNUSED))
244 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
247 if (varindex == UNUSED)
252 s1 = md->params[p].regoff;
254 if (IS_INT_LNG_TYPE(t)) { /* integer args */
255 if (!md->params[p].inmemory) { /* register arguments */
256 log_text("integer register argument");
258 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
259 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff */
261 else { /* reg arg -> spilled */
262 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff * 4 */
265 else { /* stack arguments */
266 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
267 emit_mov_membase_reg( /* + 4 for return address */
268 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4, var->vv.regoff);
269 /* + 4 for return address */
271 else { /* stack arg -> spilled */
272 if (!IS_2_WORD_TYPE(t)) {
273 #if defined(ENABLE_SSA)
274 /* no copy avoiding by now possible with SSA */
276 emit_mov_membase_reg( /* + 4 for return address */
277 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4,
279 emit_mov_reg_membase(
280 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4);
283 #endif /*defined(ENABLE_SSA)*/
284 /* reuse Stackslotand avoid copying */
285 var->vv.regoff = cd->stackframesize + s1 + 1;
289 #if defined(ENABLE_SSA)
290 /* no copy avoiding by now possible with SSA */
292 emit_mov_membase_reg( /* + 4 for return address */
293 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4,
295 emit_mov_reg_membase(
296 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4);
297 emit_mov_membase_reg( /* + 4 for return address */
298 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4 + 4,
300 emit_mov_reg_membase(
301 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4 + 4);
304 #endif /*defined(ENABLE_SSA)*/
305 /* reuse Stackslotand avoid copying */
306 var->vv.regoff = cd->stackframesize + s1 + 1;
311 else { /* floating args */
312 if (!md->params[p].inmemory) { /* register arguments */
313 log_text("There are no float argument registers!");
315 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
316 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff */
317 } else { /* reg arg -> spilled */
318 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff * 4 */
322 else { /* stack arguments */
323 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
326 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
328 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
333 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
335 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
338 } else { /* stack-arg -> spilled */
339 #if defined(ENABLE_SSA)
340 /* no copy avoiding by now possible with SSA */
342 emit_mov_membase_reg(
343 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4, REG_ITMP1);
344 emit_mov_reg_membase(
345 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4);
348 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
349 emit_fstps_membase(cd, REG_SP, var->vv.regoff * 4);
353 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
354 emit_fstpl_membase(cd, REG_SP, var->vv.regoff * 4);
358 #endif /*defined(ENABLE_SSA)*/
359 /* reuse Stackslotand avoid copying */
360 var->vv.regoff = cd->stackframesize + s1 + 1;
366 /* call monitorenter function */
368 #if defined(ENABLE_THREADS)
369 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
372 if (m->flags & ACC_STATIC) {
373 M_MOV_IMM(&m->class->object.header, REG_ITMP1);
376 M_ALD(REG_ITMP1, REG_SP, cd->stackframesize * 4 + 4);
379 M_ALD_MEM(REG_ITMP1, EXCEPTION_HARDWARE_NULLPOINTER);
382 M_AST(REG_ITMP1, REG_SP, s1 * 4);
383 M_AST(REG_ITMP1, REG_SP, 0 * 4);
384 M_MOV_IMM(LOCK_monitor_enter, REG_ITMP3);
390 emit_verbosecall_enter(jd);
395 #if defined(ENABLE_SSA)
396 /* with SSA the Header is Basic Block 0 - insert phi Moves if necessary */
398 codegen_emit_phi_moves(jd, ls->basicblocks[0]);
401 /* end of header generation */
403 /* create replacement points */
405 REPLACEMENT_POINTS_INIT(cd, jd);
407 /* walk through all basic blocks */
409 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
411 bptr->mpc = (s4) (cd->mcodeptr - cd->mcodebase);
413 if (bptr->flags >= BBREACHED) {
414 /* branch resolving */
416 codegen_resolve_branchrefs(cd, bptr);
418 /* handle replacement points */
420 REPLACEMENT_POINT_BLOCK_START(cd, bptr);
422 #if defined(ENABLE_REPLACEMENT)
423 if (bptr->bitflags & BBFLAG_REPLACEMENT) {
424 if (cd->replacementpoint[-1].flags & RPLPOINT_FLAG_COUNTDOWN) {
426 disp = (s4) &(m->hitcountdown);
427 M_ISUB_IMM_MEMABS(1, disp);
433 /* copy interface registers to their destination */
438 #if defined(ENABLE_PROFILING)
439 /* generate basic block profiling code */
441 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
442 /* count frequency */
444 M_MOV_IMM(code->bbfrequency, REG_ITMP3);
445 M_IADD_IMM_MEMBASE(1, REG_ITMP3, bptr->nr * 4);
449 #if defined(ENABLE_LSRA) || defined(ENABLE_SSA)
450 # if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
453 # if defined(ENABLE_SSA)
455 last_cmd_was_goto = false;
459 var = VAR(bptr->invars[len]);
460 if (bptr->type != BBTYPE_STD) {
461 if (!IS_2_WORD_TYPE(var->type)) {
462 if (bptr->type == BBTYPE_EXH) {
463 d = codegen_reg_of_var(0, var, REG_ITMP1);
464 M_INTMOVE(REG_ITMP1, d);
465 emit_store(jd, NULL, var, d);
469 log_text("copy interface registers(EXH, SBR): longs \
470 have to be in memory (begin 1)");
478 #endif /* defined(ENABLE_LSRA) || defined(ENABLE_SSA) */
482 var = VAR(bptr->invars[len]);
483 if ((len == bptr->indepth-1) && (bptr->type != BBTYPE_STD)) {
484 if (!IS_2_WORD_TYPE(var->type)) {
485 if (bptr->type == BBTYPE_EXH) {
486 d = codegen_reg_of_var(0, var, REG_ITMP1);
487 M_INTMOVE(REG_ITMP1, d);
488 emit_store(jd, NULL, var, d);
492 log_text("copy interface registers: longs have to be in \
499 assert((var->flags & INOUT));
504 /* walk through all instructions */
509 for (iptr = bptr->iinstr; len > 0; len--, iptr++) {
510 if (iptr->line != currentline) {
511 dseg_addlinenumber(cd, iptr->line);
512 currentline = iptr->line;
515 MCODECHECK(1024); /* 1kB should be enough */
518 case ICMD_NOP: /* ... ==> ... */
519 case ICMD_POP: /* ..., value ==> ... */
520 case ICMD_POP2: /* ..., value, value ==> ... */
523 case ICMD_INLINE_START:
525 REPLACEMENT_POINT_INLINE_START(cd, iptr);
528 case ICMD_INLINE_BODY:
530 REPLACEMENT_POINT_INLINE_BODY(cd, iptr);
531 dseg_addlinenumber_inline_start(cd, iptr);
532 dseg_addlinenumber(cd, iptr->line);
535 case ICMD_INLINE_END:
537 dseg_addlinenumber_inline_end(cd, iptr);
538 dseg_addlinenumber(cd, iptr->line);
541 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
543 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
544 emit_nullpointer_check(cd, iptr, s1);
547 /* constant operations ************************************************/
549 case ICMD_ICONST: /* ... ==> ..., constant */
551 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
552 ICONST(d, iptr->sx.val.i);
553 emit_store_dst(jd, iptr, d);
556 case ICMD_LCONST: /* ... ==> ..., constant */
558 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
559 LCONST(d, iptr->sx.val.l);
560 emit_store_dst(jd, iptr, d);
563 case ICMD_FCONST: /* ... ==> ..., constant */
565 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
566 if (iptr->sx.val.f == 0.0) {
570 if (iptr->sx.val.i == 0x80000000) {
574 } else if (iptr->sx.val.f == 1.0) {
577 } else if (iptr->sx.val.f == 2.0) {
583 disp = dseg_add_float(cd, iptr->sx.val.f);
584 emit_mov_imm_reg(cd, 0, REG_ITMP1);
586 emit_flds_membase(cd, REG_ITMP1, disp);
588 emit_store_dst(jd, iptr, d);
591 case ICMD_DCONST: /* ... ==> ..., constant */
593 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
594 if (iptr->sx.val.d == 0.0) {
598 if (iptr->sx.val.l == 0x8000000000000000LL) {
602 } else if (iptr->sx.val.d == 1.0) {
605 } else if (iptr->sx.val.d == 2.0) {
611 disp = dseg_add_double(cd, iptr->sx.val.d);
612 emit_mov_imm_reg(cd, 0, REG_ITMP1);
614 emit_fldl_membase(cd, REG_ITMP1, disp);
616 emit_store_dst(jd, iptr, d);
619 case ICMD_ACONST: /* ... ==> ..., constant */
621 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
623 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
624 codegen_addpatchref(cd, PATCHER_aconst,
625 iptr->sx.val.c.ref, 0);
630 if (iptr->sx.val.anyptr == NULL)
633 M_MOV_IMM(iptr->sx.val.anyptr, d);
635 emit_store_dst(jd, iptr, d);
639 /* load/store/copy/move operations ************************************/
657 if (!(iptr->flags.bits & INS_FLAG_RETADDR))
662 /* integer operations *************************************************/
664 case ICMD_INEG: /* ..., value ==> ..., - value */
666 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
667 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
670 emit_store_dst(jd, iptr, d);
673 case ICMD_LNEG: /* ..., value ==> ..., - value */
675 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
676 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
678 M_NEG(GET_LOW_REG(d));
679 M_IADDC_IMM(0, GET_HIGH_REG(d));
680 M_NEG(GET_HIGH_REG(d));
681 emit_store_dst(jd, iptr, d);
684 case ICMD_I2L: /* ..., value ==> ..., value */
686 s1 = emit_load_s1(jd, iptr, EAX);
687 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
690 M_LNGMOVE(EAX_EDX_PACKED, d);
691 emit_store_dst(jd, iptr, d);
694 case ICMD_L2I: /* ..., value ==> ..., value */
696 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
697 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
699 emit_store_dst(jd, iptr, d);
702 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
704 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
705 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
709 emit_store_dst(jd, iptr, d);
712 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
714 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
715 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
717 emit_store_dst(jd, iptr, d);
720 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
722 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
723 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
725 emit_store_dst(jd, iptr, d);
729 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
731 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
732 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
733 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
740 emit_store_dst(jd, iptr, d);
744 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
745 /* sx.val.i = constant */
747 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
748 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
750 /* `inc reg' is slower on p4's (regarding to ia32
751 optimization reference manual and benchmarks) and as
755 M_IADD_IMM(iptr->sx.val.i, d);
756 emit_store_dst(jd, iptr, d);
759 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
761 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
762 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
763 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
764 M_INTMOVE(s1, GET_LOW_REG(d));
765 M_IADD(s2, GET_LOW_REG(d));
766 /* don't use REG_ITMP1 */
767 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
768 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
769 M_INTMOVE(s1, GET_HIGH_REG(d));
770 M_IADDC(s2, GET_HIGH_REG(d));
771 emit_store_dst(jd, iptr, d);
774 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
775 /* sx.val.l = constant */
777 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
778 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
780 M_IADD_IMM(iptr->sx.val.l, GET_LOW_REG(d));
781 M_IADDC_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
782 emit_store_dst(jd, iptr, d);
785 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
787 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
788 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
789 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
791 M_INTMOVE(s1, REG_ITMP1);
792 M_ISUB(s2, REG_ITMP1);
793 M_INTMOVE(REG_ITMP1, d);
799 emit_store_dst(jd, iptr, d);
802 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
803 /* sx.val.i = constant */
805 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
806 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
808 M_ISUB_IMM(iptr->sx.val.i, d);
809 emit_store_dst(jd, iptr, d);
812 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
814 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
815 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
816 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
817 if (s2 == GET_LOW_REG(d)) {
818 M_INTMOVE(s1, REG_ITMP1);
819 M_ISUB(s2, REG_ITMP1);
820 M_INTMOVE(REG_ITMP1, GET_LOW_REG(d));
823 M_INTMOVE(s1, GET_LOW_REG(d));
824 M_ISUB(s2, GET_LOW_REG(d));
826 /* don't use REG_ITMP1 */
827 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
828 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
829 if (s2 == GET_HIGH_REG(d)) {
830 M_INTMOVE(s1, REG_ITMP2);
831 M_ISUBB(s2, REG_ITMP2);
832 M_INTMOVE(REG_ITMP2, GET_HIGH_REG(d));
835 M_INTMOVE(s1, GET_HIGH_REG(d));
836 M_ISUBB(s2, GET_HIGH_REG(d));
838 emit_store_dst(jd, iptr, d);
841 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
842 /* sx.val.l = constant */
844 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
845 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
847 M_ISUB_IMM(iptr->sx.val.l, GET_LOW_REG(d));
848 M_ISUBB_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
849 emit_store_dst(jd, iptr, d);
852 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
854 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
855 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
856 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
863 emit_store_dst(jd, iptr, d);
866 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
867 /* sx.val.i = constant */
869 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
870 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
871 M_IMUL_IMM(s1, iptr->sx.val.i, d);
872 emit_store_dst(jd, iptr, d);
875 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
877 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
878 s2 = emit_load_s2_low(jd, iptr, EDX);
879 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
881 M_INTMOVE(s1, REG_ITMP2);
882 M_IMUL(s2, REG_ITMP2);
884 s1 = emit_load_s1_low(jd, iptr, EAX);
885 s2 = emit_load_s2_high(jd, iptr, EDX);
888 M_IADD(EDX, REG_ITMP2);
890 s1 = emit_load_s1_low(jd, iptr, EAX);
891 s2 = emit_load_s2_low(jd, iptr, EDX);
894 M_INTMOVE(EAX, GET_LOW_REG(d));
895 M_IADD(REG_ITMP2, GET_HIGH_REG(d));
897 emit_store_dst(jd, iptr, d);
900 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
901 /* sx.val.l = constant */
903 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
904 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
905 ICONST(EAX, iptr->sx.val.l);
907 M_IMUL_IMM(s1, iptr->sx.val.l >> 32, REG_ITMP2);
908 M_IADD(REG_ITMP2, EDX);
909 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
910 M_IMUL_IMM(s1, iptr->sx.val.l, REG_ITMP2);
911 M_IADD(REG_ITMP2, EDX);
912 M_LNGMOVE(EAX_EDX_PACKED, d);
913 emit_store_dst(jd, iptr, d);
916 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
918 s1 = emit_load_s1(jd, iptr, EAX);
919 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
920 d = codegen_reg_of_dst(jd, iptr, EAX);
921 emit_arithmetic_check(cd, iptr, s2);
923 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
925 /* check as described in jvm spec */
927 M_CMP_IMM(0x80000000, EAX);
934 M_INTMOVE(EAX, d); /* if INMEMORY then d is already EAX */
935 emit_store_dst(jd, iptr, d);
938 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
940 s1 = emit_load_s1(jd, iptr, EAX);
941 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
942 d = codegen_reg_of_dst(jd, iptr, EDX);
943 emit_arithmetic_check(cd, iptr, s2);
945 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
947 /* check as described in jvm spec */
949 M_CMP_IMM(0x80000000, EAX);
957 M_INTMOVE(EDX, d); /* if INMEMORY then d is already EDX */
958 emit_store_dst(jd, iptr, d);
961 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
962 /* sx.val.i = constant */
964 /* TODO: optimize for `/ 2' */
965 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
966 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
970 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, d);/* 32-bit for jump off */
971 M_SRA_IMM(iptr->sx.val.i, d);
972 emit_store_dst(jd, iptr, d);
975 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
976 /* sx.val.i = constant */
978 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
979 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
981 M_MOV(s1, REG_ITMP1);
985 M_AND_IMM(iptr->sx.val.i, d);
987 M_BGE(2 + 2 + 6 + 2);
988 M_MOV(s1, d); /* don't use M_INTMOVE, so we know the jump offset */
990 M_AND_IMM32(iptr->sx.val.i, d); /* use 32-bit for jump offset */
992 emit_store_dst(jd, iptr, d);
995 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
996 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
998 s2 = emit_load_s2(jd, iptr, REG_ITMP12_PACKED);
999 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1001 M_INTMOVE(GET_LOW_REG(s2), REG_ITMP3);
1002 M_OR(GET_HIGH_REG(s2), REG_ITMP3);
1003 /* XXX could be optimized */
1004 emit_arithmetic_check(cd, iptr, REG_ITMP3);
1006 bte = iptr->sx.s23.s3.bte;
1009 M_LST(s2, REG_SP, 2 * 4);
1011 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1012 M_LST(s1, REG_SP, 0 * 4);
1014 M_MOV_IMM(bte->fp, REG_ITMP3);
1016 emit_store_dst(jd, iptr, d);
1019 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1020 /* sx.val.i = constant */
1022 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1023 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1025 M_TEST(GET_HIGH_REG(d));
1027 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, GET_LOW_REG(d));
1028 M_IADDC_IMM(0, GET_HIGH_REG(d));
1029 M_SRLD_IMM(iptr->sx.val.i, GET_HIGH_REG(d), GET_LOW_REG(d));
1030 M_SRA_IMM(iptr->sx.val.i, GET_HIGH_REG(d));
1031 emit_store_dst(jd, iptr, d);
1035 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1036 /* sx.val.l = constant */
1038 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1039 if (iptr->dst.var->flags & INMEMORY) {
1040 if (iptr->s1.var->flags & INMEMORY) {
1041 /* Alpha algorithm */
1043 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 4);
1045 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 4 + 4);
1051 /* TODO: hmm, don't know if this is always correct */
1053 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l & 0x00000000ffffffff);
1055 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l >> 32);
1061 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4, REG_ITMP1);
1062 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4 + 4, REG_ITMP2);
1064 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1065 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1066 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, iptr->s1.var->vv.regoff * 4 + 4);
1067 emit_jcc(cd, CC_GE, disp);
1069 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4, REG_ITMP1);
1070 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4 + 4, REG_ITMP2);
1072 emit_neg_reg(cd, REG_ITMP1);
1073 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1074 emit_neg_reg(cd, REG_ITMP2);
1076 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1077 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1079 emit_neg_reg(cd, REG_ITMP1);
1080 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1081 emit_neg_reg(cd, REG_ITMP2);
1083 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst.var->vv.regoff * 4);
1084 emit_mov_reg_membase(cd, REG_ITMP2, REG_SP, iptr->dst.var->vv.regoff * 4 + 4);
1088 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1089 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1091 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1092 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1093 M_TEST(GET_LOW_REG(s1));
1099 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1101 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1102 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1103 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1104 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1107 emit_store_dst(jd, iptr, d);
1110 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1111 /* sx.val.i = constant */
1113 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1114 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1116 M_SLL_IMM(iptr->sx.val.i, d);
1117 emit_store_dst(jd, iptr, d);
1120 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1122 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1123 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1124 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1125 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1128 emit_store_dst(jd, iptr, d);
1131 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1132 /* sx.val.i = constant */
1134 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1135 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1137 M_SRA_IMM(iptr->sx.val.i, d);
1138 emit_store_dst(jd, iptr, d);
1141 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1143 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1144 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1145 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1146 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1149 emit_store_dst(jd, iptr, d);
1152 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1153 /* sx.val.i = constant */
1155 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1156 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1158 M_SRL_IMM(iptr->sx.val.i, d);
1159 emit_store_dst(jd, iptr, d);
1162 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1164 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1165 s2 = emit_load_s2(jd, iptr, ECX);
1166 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1169 M_TEST_IMM(32, ECX);
1171 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1172 M_CLR(GET_LOW_REG(d));
1173 M_SLLD(GET_LOW_REG(d), GET_HIGH_REG(d));
1174 M_SLL(GET_LOW_REG(d));
1175 emit_store_dst(jd, iptr, d);
1178 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1179 /* sx.val.i = constant */
1181 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1182 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1184 if (iptr->sx.val.i & 0x20) {
1185 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1186 M_CLR(GET_LOW_REG(d));
1187 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1191 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1193 M_SLL_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d));
1195 emit_store_dst(jd, iptr, d);
1198 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1200 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1201 s2 = emit_load_s2(jd, iptr, ECX);
1202 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1205 M_TEST_IMM(32, ECX);
1207 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1208 M_SRA_IMM(31, GET_HIGH_REG(d));
1209 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1210 M_SRA(GET_HIGH_REG(d));
1211 emit_store_dst(jd, iptr, d);
1214 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1215 /* sx.val.i = constant */
1217 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1218 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1220 if (iptr->sx.val.i & 0x20) {
1221 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1222 M_SRA_IMM(31, GET_HIGH_REG(d));
1223 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1227 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1229 M_SRA_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1231 emit_store_dst(jd, iptr, d);
1234 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1236 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1237 s2 = emit_load_s2(jd, iptr, ECX);
1238 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1241 M_TEST_IMM(32, ECX);
1243 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1244 M_CLR(GET_HIGH_REG(d));
1245 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1246 M_SRL(GET_HIGH_REG(d));
1247 emit_store_dst(jd, iptr, d);
1250 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1251 /* sx.val.l = constant */
1253 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1254 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1256 if (iptr->sx.val.i & 0x20) {
1257 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1258 M_CLR(GET_HIGH_REG(d));
1259 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1263 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1265 M_SRL_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1267 emit_store_dst(jd, iptr, d);
1270 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1272 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1273 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1274 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1281 emit_store_dst(jd, iptr, d);
1284 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1285 /* sx.val.i = constant */
1287 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1288 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1290 M_AND_IMM(iptr->sx.val.i, d);
1291 emit_store_dst(jd, iptr, d);
1294 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1296 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1297 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1298 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1299 if (s2 == GET_LOW_REG(d))
1300 M_AND(s1, GET_LOW_REG(d));
1302 M_INTMOVE(s1, GET_LOW_REG(d));
1303 M_AND(s2, GET_LOW_REG(d));
1305 /* REG_ITMP1 probably contains low 32-bit of destination */
1306 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1307 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1308 if (s2 == GET_HIGH_REG(d))
1309 M_AND(s1, GET_HIGH_REG(d));
1311 M_INTMOVE(s1, GET_HIGH_REG(d));
1312 M_AND(s2, GET_HIGH_REG(d));
1314 emit_store_dst(jd, iptr, d);
1317 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1318 /* sx.val.l = constant */
1320 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1321 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1323 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1324 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1325 emit_store_dst(jd, iptr, d);
1328 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1330 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1331 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1332 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1339 emit_store_dst(jd, iptr, d);
1342 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1343 /* sx.val.i = constant */
1345 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1346 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1348 M_OR_IMM(iptr->sx.val.i, d);
1349 emit_store_dst(jd, iptr, d);
1352 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1354 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1355 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1356 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1357 if (s2 == GET_LOW_REG(d))
1358 M_OR(s1, GET_LOW_REG(d));
1360 M_INTMOVE(s1, GET_LOW_REG(d));
1361 M_OR(s2, GET_LOW_REG(d));
1363 /* REG_ITMP1 probably contains low 32-bit of destination */
1364 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1365 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1366 if (s2 == GET_HIGH_REG(d))
1367 M_OR(s1, GET_HIGH_REG(d));
1369 M_INTMOVE(s1, GET_HIGH_REG(d));
1370 M_OR(s2, GET_HIGH_REG(d));
1372 emit_store_dst(jd, iptr, d);
1375 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1376 /* sx.val.l = constant */
1378 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1379 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1381 M_OR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1382 M_OR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1383 emit_store_dst(jd, iptr, d);
1386 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1388 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1389 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1390 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1397 emit_store_dst(jd, iptr, d);
1400 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1401 /* sx.val.i = constant */
1403 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1404 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1406 M_XOR_IMM(iptr->sx.val.i, d);
1407 emit_store_dst(jd, iptr, d);
1410 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1412 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1413 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1414 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1415 if (s2 == GET_LOW_REG(d))
1416 M_XOR(s1, GET_LOW_REG(d));
1418 M_INTMOVE(s1, GET_LOW_REG(d));
1419 M_XOR(s2, GET_LOW_REG(d));
1421 /* REG_ITMP1 probably contains low 32-bit of destination */
1422 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1423 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1424 if (s2 == GET_HIGH_REG(d))
1425 M_XOR(s1, GET_HIGH_REG(d));
1427 M_INTMOVE(s1, GET_HIGH_REG(d));
1428 M_XOR(s2, GET_HIGH_REG(d));
1430 emit_store_dst(jd, iptr, d);
1433 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1434 /* sx.val.l = constant */
1436 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1437 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1439 M_XOR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1440 M_XOR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1441 emit_store_dst(jd, iptr, d);
1445 /* floating operations ************************************************/
1447 case ICMD_FNEG: /* ..., value ==> ..., - value */
1449 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1450 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1452 emit_store_dst(jd, iptr, d);
1455 case ICMD_DNEG: /* ..., value ==> ..., - value */
1457 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1458 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1460 emit_store_dst(jd, iptr, d);
1463 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1465 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1466 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1467 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1469 emit_store_dst(jd, iptr, d);
1472 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1474 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1475 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1476 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1478 emit_store_dst(jd, iptr, d);
1481 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1483 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1484 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1485 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1487 emit_store_dst(jd, iptr, d);
1490 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1492 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1493 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1494 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1496 emit_store_dst(jd, iptr, d);
1499 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1501 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1502 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1503 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1505 emit_store_dst(jd, iptr, d);
1508 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1510 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1511 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1512 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1514 emit_store_dst(jd, iptr, d);
1517 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1519 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1520 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1521 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1523 emit_store_dst(jd, iptr, d);
1526 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1528 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1529 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1530 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1532 emit_store_dst(jd, iptr, d);
1535 case ICMD_FREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1537 /* exchanged to skip fxch */
1538 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1539 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1540 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1541 /* emit_fxch(cd); */
1546 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1547 emit_store_dst(jd, iptr, d);
1548 emit_ffree_reg(cd, 0);
1552 case ICMD_DREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1554 /* exchanged to skip fxch */
1555 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1556 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1557 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1558 /* emit_fxch(cd); */
1563 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1564 emit_store_dst(jd, iptr, d);
1565 emit_ffree_reg(cd, 0);
1569 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1570 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1572 var = VAROP(iptr->s1);
1573 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1575 if (var->flags & INMEMORY) {
1576 emit_fildl_membase(cd, REG_SP, var->vv.regoff * 4);
1578 /* XXX not thread safe! */
1579 disp = dseg_add_unique_s4(cd, 0);
1580 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1582 emit_mov_reg_membase(cd, var->vv.regoff, REG_ITMP1, disp);
1583 emit_fildl_membase(cd, REG_ITMP1, disp);
1586 emit_store_dst(jd, iptr, d);
1589 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1590 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1592 var = VAROP(iptr->s1);
1593 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1594 if (var->flags & INMEMORY) {
1595 emit_fildll_membase(cd, REG_SP, var->vv.regoff * 4);
1598 log_text("L2F: longs have to be in memory");
1601 emit_store_dst(jd, iptr, d);
1604 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1606 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1607 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1609 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1612 /* Round to zero, 53-bit mode, exception masked */
1613 disp = dseg_add_s4(cd, 0x0e7f);
1614 emit_fldcw_membase(cd, REG_ITMP1, disp);
1616 var = VAROP(iptr->dst);
1617 var1 = VAROP(iptr->s1);
1619 if (var->flags & INMEMORY) {
1620 emit_fistpl_membase(cd, REG_SP, var->vv.regoff * 4);
1622 /* Round to nearest, 53-bit mode, exceptions masked */
1623 disp = dseg_add_s4(cd, 0x027f);
1624 emit_fldcw_membase(cd, REG_ITMP1, disp);
1626 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1627 REG_SP, var->vv.regoff * 4);
1630 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1632 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1635 /* XXX not thread safe! */
1636 disp = dseg_add_unique_s4(cd, 0);
1637 emit_fistpl_membase(cd, REG_ITMP1, disp);
1638 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1640 /* Round to nearest, 53-bit mode, exceptions masked */
1641 disp = dseg_add_s4(cd, 0x027f);
1642 emit_fldcw_membase(cd, REG_ITMP1, disp);
1644 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1647 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1648 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1651 emit_jcc(cd, CC_NE, disp);
1653 /* XXX: change this when we use registers */
1654 emit_flds_membase(cd, REG_SP, var1->vv.regoff * 4);
1655 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2i, REG_ITMP1);
1656 emit_call_reg(cd, REG_ITMP1);
1658 if (var->flags & INMEMORY) {
1659 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1662 M_INTMOVE(REG_RESULT, var->vv.regoff);
1666 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1668 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1669 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1671 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1674 /* Round to zero, 53-bit mode, exception masked */
1675 disp = dseg_add_s4(cd, 0x0e7f);
1676 emit_fldcw_membase(cd, REG_ITMP1, disp);
1678 var = VAROP(iptr->dst);
1679 var1 = VAROP(iptr->s1);
1681 if (var->flags & INMEMORY) {
1682 emit_fistpl_membase(cd, REG_SP, var->vv.regoff * 4);
1684 /* Round to nearest, 53-bit mode, exceptions masked */
1685 disp = dseg_add_s4(cd, 0x027f);
1686 emit_fldcw_membase(cd, REG_ITMP1, disp);
1688 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1689 REG_SP, var->vv.regoff * 4);
1692 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1694 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1697 /* XXX not thread safe! */
1698 disp = dseg_add_unique_s4(cd, 0);
1699 emit_fistpl_membase(cd, REG_ITMP1, disp);
1700 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1702 /* Round to nearest, 53-bit mode, exceptions masked */
1703 disp = dseg_add_s4(cd, 0x027f);
1704 emit_fldcw_membase(cd, REG_ITMP1, disp);
1706 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1709 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1710 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1713 emit_jcc(cd, CC_NE, disp);
1715 /* XXX: change this when we use registers */
1716 emit_fldl_membase(cd, REG_SP, var1->vv.regoff * 4);
1717 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2i, REG_ITMP1);
1718 emit_call_reg(cd, REG_ITMP1);
1720 if (var->flags & INMEMORY) {
1721 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1723 M_INTMOVE(REG_RESULT, var->vv.regoff);
1727 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1729 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1730 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1732 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1735 /* Round to zero, 53-bit mode, exception masked */
1736 disp = dseg_add_s4(cd, 0x0e7f);
1737 emit_fldcw_membase(cd, REG_ITMP1, disp);
1739 var = VAROP(iptr->dst);
1740 var1 = VAROP(iptr->s1);
1742 if (var->flags & INMEMORY) {
1743 emit_fistpll_membase(cd, REG_SP, var->vv.regoff * 4);
1745 /* Round to nearest, 53-bit mode, exceptions masked */
1746 disp = dseg_add_s4(cd, 0x027f);
1747 emit_fldcw_membase(cd, REG_ITMP1, disp);
1749 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1750 REG_SP, var->vv.regoff * 4 + 4);
1753 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1755 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1758 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1760 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4 + 4);
1762 emit_jcc(cd, CC_NE, disp);
1764 emit_alu_imm_membase(cd, ALU_CMP, 0,
1765 REG_SP, var->vv.regoff * 4);
1768 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1770 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1772 emit_jcc(cd, CC_NE, disp);
1774 /* XXX: change this when we use registers */
1775 emit_flds_membase(cd, REG_SP, var1->vv.regoff * 4);
1776 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2l, REG_ITMP1);
1777 emit_call_reg(cd, REG_ITMP1);
1778 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1779 emit_mov_reg_membase(cd, REG_RESULT2,
1780 REG_SP, var->vv.regoff * 4 + 4);
1783 log_text("F2L: longs have to be in memory");
1788 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1790 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1791 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1793 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1796 /* Round to zero, 53-bit mode, exception masked */
1797 disp = dseg_add_s4(cd, 0x0e7f);
1798 emit_fldcw_membase(cd, REG_ITMP1, disp);
1800 var = VAROP(iptr->dst);
1801 var1 = VAROP(iptr->s1);
1803 if (var->flags & INMEMORY) {
1804 emit_fistpll_membase(cd, REG_SP, var->vv.regoff * 4);
1806 /* Round to nearest, 53-bit mode, exceptions masked */
1807 disp = dseg_add_s4(cd, 0x027f);
1808 emit_fldcw_membase(cd, REG_ITMP1, disp);
1810 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1811 REG_SP, var->vv.regoff * 4 + 4);
1814 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1816 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1819 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1821 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4 + 4);
1823 emit_jcc(cd, CC_NE, disp);
1825 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, var->vv.regoff * 4);
1828 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1830 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1832 emit_jcc(cd, CC_NE, disp);
1834 /* XXX: change this when we use registers */
1835 emit_fldl_membase(cd, REG_SP, var1->vv.regoff * 4);
1836 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2l, REG_ITMP1);
1837 emit_call_reg(cd, REG_ITMP1);
1838 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1839 emit_mov_reg_membase(cd, REG_RESULT2,
1840 REG_SP, var->vv.regoff * 4 + 4);
1843 log_text("D2L: longs have to be in memory");
1848 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1850 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1851 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1853 emit_store_dst(jd, iptr, d);
1856 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1858 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1859 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1861 emit_store_dst(jd, iptr, d);
1864 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1867 /* exchanged to skip fxch */
1868 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1869 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1870 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1871 /* emit_fxch(cd); */
1874 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as GT */
1875 emit_jcc(cd, CC_E, 6);
1876 emit_alu_imm_reg(cd, ALU_AND, 0x000000ff, EAX);
1878 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1879 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1880 emit_jcc(cd, CC_B, 3 + 5);
1881 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1882 emit_jmp_imm(cd, 3);
1883 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1884 emit_store_dst(jd, iptr, d);
1887 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1890 /* exchanged to skip fxch */
1891 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1892 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1893 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1894 /* emit_fxch(cd); */
1897 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as LT */
1898 emit_jcc(cd, CC_E, 3);
1899 emit_movb_imm_reg(cd, 1, REG_AH);
1901 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1902 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1903 emit_jcc(cd, CC_B, 3 + 5);
1904 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1905 emit_jmp_imm(cd, 3);
1906 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1907 emit_store_dst(jd, iptr, d);
1911 /* memory operations **************************************************/
1913 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., length */
1915 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1916 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1917 /* implicit null-pointer check */
1918 M_ILD(d, s1, OFFSET(java_arrayheader, size));
1919 emit_store_dst(jd, iptr, d);
1922 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
1924 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1925 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1926 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1927 /* implicit null-pointer check */
1928 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1929 emit_movsbl_memindex_reg(cd, OFFSET(java_bytearray, data[0]),
1931 emit_store_dst(jd, iptr, d);
1934 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
1936 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1937 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1938 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1939 /* implicit null-pointer check */
1940 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1941 emit_movzwl_memindex_reg(cd, OFFSET(java_chararray, data[0]),
1943 emit_store_dst(jd, iptr, d);
1946 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
1948 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1949 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1950 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1951 /* implicit null-pointer check */
1952 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1953 emit_movswl_memindex_reg(cd, OFFSET(java_shortarray, data[0]),
1955 emit_store_dst(jd, iptr, d);
1958 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
1960 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1961 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1962 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1963 /* implicit null-pointer check */
1964 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1965 emit_mov_memindex_reg(cd, OFFSET(java_intarray, data[0]),
1967 emit_store_dst(jd, iptr, d);
1970 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
1972 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1973 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1974 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
1975 /* implicit null-pointer check */
1976 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1978 var = VAROP(iptr->dst);
1980 assert(var->flags & INMEMORY);
1981 emit_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]),
1982 s1, s2, 3, REG_ITMP3);
1983 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff * 4);
1984 emit_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]) + 4,
1985 s1, s2, 3, REG_ITMP3);
1986 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff * 4 + 4);
1989 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
1991 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1992 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1993 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1994 /* implicit null-pointer check */
1995 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1996 emit_flds_memindex(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2);
1997 emit_store_dst(jd, iptr, d);
2000 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2002 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2003 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2004 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
2005 /* implicit null-pointer check */
2006 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2007 emit_fldl_memindex(cd, OFFSET(java_doublearray, data[0]), s1, s2,3);
2008 emit_store_dst(jd, iptr, d);
2011 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2013 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2014 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2015 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2016 /* implicit null-pointer check */
2017 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2018 emit_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]),
2020 emit_store_dst(jd, iptr, d);
2024 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2026 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2027 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2028 /* implicit null-pointer check */
2029 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2030 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2032 /* because EBP, ESI, EDI have no xH and xL nibbles */
2033 M_INTMOVE(s3, REG_ITMP3);
2036 emit_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]),
2040 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2042 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2043 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2044 /* implicit null-pointer check */
2045 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2046 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2047 emit_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]),
2051 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2053 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2054 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2055 /* implicit null-pointer check */
2056 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2057 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2058 emit_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]),
2062 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2064 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2065 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2066 /* implicit null-pointer check */
2067 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2068 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2069 emit_mov_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]),
2073 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2075 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2076 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2077 /* implicit null-pointer check */
2078 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2080 var = VAROP(iptr->sx.s23.s3);
2082 assert(var->flags & INMEMORY);
2083 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff * 4, REG_ITMP3);
2084 emit_mov_reg_memindex(cd, REG_ITMP3, OFFSET(java_longarray, data[0])
2086 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff * 4 + 4, REG_ITMP3);
2087 emit_mov_reg_memindex(cd, REG_ITMP3,
2088 OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2091 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2093 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2094 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2095 /* implicit null-pointer check */
2096 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2097 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2098 emit_fstps_memindex(cd, OFFSET(java_floatarray, data[0]), s1, s2,2);
2101 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2103 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2104 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2105 /* implicit null-pointer check */
2106 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2107 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2108 emit_fstpl_memindex(cd, OFFSET(java_doublearray, data[0]),
2112 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2114 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2115 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2116 /* implicit null-pointer check */
2117 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2118 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2120 M_AST(s1, REG_SP, 0 * 4);
2121 M_AST(s3, REG_SP, 1 * 4);
2122 M_MOV_IMM(BUILTIN_canstore, REG_ITMP1);
2124 emit_exception_check(cd, iptr);
2126 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2127 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2128 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2129 emit_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]),
2133 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2135 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2136 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2137 /* implicit null-pointer check */
2138 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2139 emit_movb_imm_memindex(cd, iptr->sx.s23.s3.constval,
2140 OFFSET(java_bytearray, data[0]), s1, s2, 0);
2143 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2145 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2146 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2147 /* implicit null-pointer check */
2148 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2149 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2150 OFFSET(java_chararray, data[0]), s1, s2, 1);
2153 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2155 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2156 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2157 /* implicit null-pointer check */
2158 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2159 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2160 OFFSET(java_shortarray, data[0]), s1, s2, 1);
2163 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2165 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2166 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2167 /* implicit null-pointer check */
2168 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2169 emit_mov_imm_memindex(cd, iptr->sx.s23.s3.constval,
2170 OFFSET(java_intarray, data[0]), s1, s2, 2);
2173 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2175 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2176 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2177 /* implicit null-pointer check */
2178 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2179 emit_mov_imm_memindex(cd,
2180 (u4) (iptr->sx.s23.s3.constval & 0x00000000ffffffff),
2181 OFFSET(java_longarray, data[0]), s1, s2, 3);
2182 emit_mov_imm_memindex(cd,
2183 ((s4)iptr->sx.s23.s3.constval) >> 31,
2184 OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2187 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2189 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2190 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2191 /* implicit null-pointer check */
2192 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2193 emit_mov_imm_memindex(cd, 0,
2194 OFFSET(java_objectarray, data[0]), s1, s2, 2);
2198 case ICMD_GETSTATIC: /* ... ==> ..., value */
2200 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2201 uf = iptr->sx.s23.s3.uf;
2202 fieldtype = uf->fieldref->parseddesc.fd->type;
2205 codegen_addpatchref(cd, PATCHER_get_putstatic, uf, 0);
2209 fi = iptr->sx.s23.s3.fmiref->p.field;
2210 fieldtype = fi->type;
2211 disp = (ptrint) &(fi->value);
2213 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2214 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2217 M_MOV_IMM(disp, REG_ITMP1);
2218 switch (fieldtype) {
2221 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2222 M_ILD(d, REG_ITMP1, 0);
2225 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2226 M_LLD(d, REG_ITMP1, 0);
2229 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2230 M_FLD(d, REG_ITMP1, 0);
2233 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2234 M_DLD(d, REG_ITMP1, 0);
2237 emit_store_dst(jd, iptr, d);
2240 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2242 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2243 uf = iptr->sx.s23.s3.uf;
2244 fieldtype = uf->fieldref->parseddesc.fd->type;
2247 codegen_addpatchref(cd, PATCHER_get_putstatic, uf, 0);
2250 fi = iptr->sx.s23.s3.fmiref->p.field;
2251 fieldtype = fi->type;
2252 disp = (ptrint) &(fi->value);
2254 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2255 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2258 M_MOV_IMM(disp, REG_ITMP1);
2259 switch (fieldtype) {
2262 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
2263 M_IST(s1, REG_ITMP1, 0);
2266 s1 = emit_load_s1(jd, iptr, REG_ITMP23_PACKED);
2267 M_LST(s1, REG_ITMP1, 0);
2270 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2271 emit_fstps_membase(cd, REG_ITMP1, 0);
2274 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2275 emit_fstpl_membase(cd, REG_ITMP1, 0);
2280 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2281 /* val = value (in current instruction) */
2282 /* following NOP) */
2284 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2285 uf = iptr->sx.s23.s3.uf;
2286 fieldtype = uf->fieldref->parseddesc.fd->type;
2289 codegen_addpatchref(cd, PATCHER_get_putstatic, uf, 0);
2292 fi = iptr->sx.s23.s3.fmiref->p.field;
2293 fieldtype = fi->type;
2294 disp = (ptrint) &(fi->value);
2296 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2297 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2300 M_MOV_IMM(disp, REG_ITMP1);
2301 switch (fieldtype) {
2304 M_IST_IMM(iptr->sx.s23.s2.constval, REG_ITMP1, 0);
2307 M_IST_IMM(iptr->sx.s23.s2.constval & 0xffffffff, REG_ITMP1, 0);
2308 M_IST_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, REG_ITMP1, 4);
2315 case ICMD_GETFIELD: /* .., objectref. ==> ..., value */
2317 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2318 emit_nullpointer_check(cd, iptr, s1);
2320 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2321 unresolved_field *uf = iptr->sx.s23.s3.uf;
2323 fieldtype = uf->fieldref->parseddesc.fd->type;
2325 codegen_addpatchref(cd, PATCHER_getfield,
2326 iptr->sx.s23.s3.uf, 0);
2332 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2334 fieldtype = fi->type;
2338 switch (fieldtype) {
2341 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2342 M_ILD32(d, s1, disp);
2345 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2346 M_LLD32(d, s1, disp);
2349 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2350 M_FLD32(d, s1, disp);
2353 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2354 M_DLD32(d, s1, disp);
2357 emit_store_dst(jd, iptr, d);
2360 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2362 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2363 emit_nullpointer_check(cd, iptr, s1);
2365 /* must be done here because of code patching */
2367 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2368 unresolved_field *uf = iptr->sx.s23.s3.uf;
2370 fieldtype = uf->fieldref->parseddesc.fd->type;
2373 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2375 fieldtype = fi->type;
2378 if (!IS_FLT_DBL_TYPE(fieldtype)) {
2379 if (IS_2_WORD_TYPE(fieldtype))
2380 s2 = emit_load_s2(jd, iptr, REG_ITMP23_PACKED);
2382 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2385 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
2387 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2388 unresolved_field *uf = iptr->sx.s23.s3.uf;
2390 codegen_addpatchref(cd, PATCHER_putfield, uf, 0);
2396 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2401 switch (fieldtype) {
2404 M_IST32(s2, s1, disp);
2407 M_LST32(s2, s1, disp);
2410 emit_fstps_membase32(cd, s1, disp);
2413 emit_fstpl_membase32(cd, s1, disp);
2418 case ICMD_PUTFIELDCONST: /* ..., objectref ==> ... */
2419 /* val = value (in current instruction) */
2420 /* following NOP) */
2422 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2423 emit_nullpointer_check(cd, iptr, s1);
2425 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2426 unresolved_field *uf = iptr->sx.s23.s3.uf;
2428 fieldtype = uf->fieldref->parseddesc.fd->type;
2430 codegen_addpatchref(cd, PATCHER_putfieldconst,
2438 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2440 fieldtype = fi->type;
2445 switch (fieldtype) {
2448 M_IST32_IMM(iptr->sx.s23.s2.constval, s1, disp);
2451 M_IST32_IMM(iptr->sx.s23.s2.constval & 0xffffffff, s1, disp);
2452 M_IST32_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, s1, disp + 4);
2460 /* branch operations **************************************************/
2462 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2464 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2465 M_INTMOVE(s1, REG_ITMP1_XPTR);
2467 #ifdef ENABLE_VERIFIER
2468 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2469 codegen_addpatchref(cd, PATCHER_athrow_areturn,
2470 iptr->sx.s23.s2.uc, 0);
2472 #endif /* ENABLE_VERIFIER */
2474 M_CALL_IMM(0); /* passing exception pc */
2475 M_POP(REG_ITMP2_XPC);
2477 M_MOV_IMM(asm_handle_exception, REG_ITMP3);
2481 case ICMD_GOTO: /* ... ==> ... */
2482 case ICMD_RET: /* ... ==> ... */
2484 #if defined(ENABLE_SSA)
2486 last_cmd_was_goto = true;
2488 /* In case of a Goto phimoves have to be inserted before the */
2491 codegen_emit_phi_moves(jd, bptr);
2494 emit_br(cd, iptr->dst.block);
2498 case ICMD_JSR: /* ... ==> ... */
2500 emit_br(cd, iptr->sx.s23.s3.jsrtarget.block);
2504 case ICMD_IFNULL: /* ..., value ==> ... */
2505 case ICMD_IFNONNULL:
2507 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2509 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFNULL, BRANCH_OPT_NONE);
2512 case ICMD_IFEQ: /* ..., value ==> ... */
2519 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2520 M_CMP_IMM(iptr->sx.val.i, s1);
2521 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFEQ, BRANCH_OPT_NONE);
2524 case ICMD_IF_LEQ: /* ..., value ==> ... */
2526 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2527 if (iptr->sx.val.l == 0) {
2528 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2529 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2532 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2533 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2534 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2535 M_OR(REG_ITMP2, REG_ITMP1);
2537 emit_beq(cd, iptr->dst.block);
2540 case ICMD_IF_LLT: /* ..., value ==> ... */
2542 if (iptr->sx.val.l == 0) {
2543 /* If high 32-bit are less than zero, then the 64-bits
2545 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2547 emit_blt(cd, iptr->dst.block);
2550 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2551 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2552 emit_blt(cd, iptr->dst.block);
2554 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2555 emit_bult(cd, iptr->dst.block);
2559 case ICMD_IF_LLE: /* ..., value ==> ... */
2561 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2562 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2563 emit_blt(cd, iptr->dst.block);
2565 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2566 emit_bule(cd, iptr->dst.block);
2569 case ICMD_IF_LNE: /* ..., value ==> ... */
2571 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2572 if (iptr->sx.val.l == 0) {
2573 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2574 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2577 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2578 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2579 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2580 M_OR(REG_ITMP2, REG_ITMP1);
2582 emit_bne(cd, iptr->dst.block);
2585 case ICMD_IF_LGT: /* ..., value ==> ... */
2587 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2588 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2589 emit_bgt(cd, iptr->dst.block);
2591 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2592 emit_bugt(cd, iptr->dst.block);
2595 case ICMD_IF_LGE: /* ..., value ==> ... */
2597 if (iptr->sx.val.l == 0) {
2598 /* If high 32-bit are greater equal zero, then the
2600 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2602 emit_bge(cd, iptr->dst.block);
2605 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2606 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2607 emit_bgt(cd, iptr->dst.block);
2609 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2610 emit_buge(cd, iptr->dst.block);
2614 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2615 case ICMD_IF_ICMPNE:
2616 case ICMD_IF_ICMPLT:
2617 case ICMD_IF_ICMPGT:
2618 case ICMD_IF_ICMPGE:
2619 case ICMD_IF_ICMPLE:
2621 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2622 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2624 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ICMPEQ, BRANCH_OPT_NONE);
2627 case ICMD_IF_ACMPEQ: /* ..., value, value ==> ... */
2628 case ICMD_IF_ACMPNE:
2630 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2631 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2633 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ACMPEQ, BRANCH_OPT_NONE);
2636 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2638 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2639 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2640 M_INTMOVE(s1, REG_ITMP1);
2641 M_XOR(s2, REG_ITMP1);
2642 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2643 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2644 M_INTMOVE(s1, REG_ITMP2);
2645 M_XOR(s2, REG_ITMP2);
2646 M_OR(REG_ITMP1, REG_ITMP2);
2647 emit_beq(cd, iptr->dst.block);
2650 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2652 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2653 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2654 M_INTMOVE(s1, REG_ITMP1);
2655 M_XOR(s2, REG_ITMP1);
2656 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2657 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2658 M_INTMOVE(s1, REG_ITMP2);
2659 M_XOR(s2, REG_ITMP2);
2660 M_OR(REG_ITMP1, REG_ITMP2);
2661 emit_bne(cd, iptr->dst.block);
2664 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2666 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2667 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2669 emit_blt(cd, iptr->dst.block);
2670 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2671 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2674 emit_bult(cd, iptr->dst.block);
2677 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2679 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2680 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2682 emit_bgt(cd, iptr->dst.block);
2683 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2684 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2687 emit_bugt(cd, iptr->dst.block);
2690 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2692 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2693 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2695 emit_blt(cd, iptr->dst.block);
2696 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2697 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2700 emit_bule(cd, iptr->dst.block);
2703 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2705 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2706 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2708 emit_bgt(cd, iptr->dst.block);
2709 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2710 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2713 emit_buge(cd, iptr->dst.block);
2717 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2719 REPLACEMENT_POINT_RETURN(cd, iptr);
2720 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2721 M_INTMOVE(s1, REG_RESULT);
2722 goto nowperformreturn;
2724 case ICMD_LRETURN: /* ..., retvalue ==> ... */
2726 REPLACEMENT_POINT_RETURN(cd, iptr);
2727 s1 = emit_load_s1(jd, iptr, REG_RESULT_PACKED);
2728 M_LNGMOVE(s1, REG_RESULT_PACKED);
2729 goto nowperformreturn;
2731 case ICMD_ARETURN: /* ..., retvalue ==> ... */
2733 REPLACEMENT_POINT_RETURN(cd, iptr);
2734 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2735 M_INTMOVE(s1, REG_RESULT);
2737 #ifdef ENABLE_VERIFIER
2738 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2739 codegen_addpatchref(cd, PATCHER_athrow_areturn,
2740 iptr->sx.s23.s2.uc, 0);
2742 #endif /* ENABLE_VERIFIER */
2743 goto nowperformreturn;
2745 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2748 REPLACEMENT_POINT_RETURN(cd, iptr);
2749 s1 = emit_load_s1(jd, iptr, REG_FRESULT);
2750 goto nowperformreturn;
2752 case ICMD_RETURN: /* ... ==> ... */
2754 REPLACEMENT_POINT_RETURN(cd, iptr);
2760 p = cd->stackframesize;
2762 #if !defined(NDEBUG)
2763 emit_verbosecall_exit(jd);
2766 #if defined(ENABLE_THREADS)
2767 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2768 M_ALD(REG_ITMP2, REG_SP, rd->memuse * 4);
2770 /* we need to save the proper return value */
2771 switch (iptr->opc) {
2774 M_IST(REG_RESULT, REG_SP, rd->memuse * 4);
2778 M_LST(REG_RESULT_PACKED, REG_SP, rd->memuse * 4);
2782 emit_fstps_membase(cd, REG_SP, rd->memuse * 4);
2786 emit_fstpl_membase(cd, REG_SP, rd->memuse * 4);
2790 M_AST(REG_ITMP2, REG_SP, 0);
2791 M_MOV_IMM(LOCK_monitor_exit, REG_ITMP3);
2794 /* and now restore the proper return value */
2795 switch (iptr->opc) {
2798 M_ILD(REG_RESULT, REG_SP, rd->memuse * 4);
2802 M_LLD(REG_RESULT_PACKED, REG_SP, rd->memuse * 4);
2806 emit_flds_membase(cd, REG_SP, rd->memuse * 4);
2810 emit_fldl_membase(cd, REG_SP, rd->memuse * 4);
2816 /* restore saved registers */
2818 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
2819 p--; M_ALD(rd->savintregs[i], REG_SP, p * 4);
2822 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
2824 emit_fldl_membase(cd, REG_SP, p * 4);
2825 if (iptr->opc == ICMD_FRETURN || iptr->opc == ICMD_DRETURN) {
2827 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset + 1); */
2830 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset); */
2834 /* deallocate stack */
2836 if (cd->stackframesize)
2837 M_AADD_IMM(cd->stackframesize * 4, REG_SP);
2844 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2847 branch_target_t *table;
2849 table = iptr->dst.table;
2851 l = iptr->sx.s23.s2.tablelow;
2852 i = iptr->sx.s23.s3.tablehigh;
2854 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2855 M_INTMOVE(s1, REG_ITMP1);
2858 M_ISUB_IMM(l, REG_ITMP1);
2864 M_CMP_IMM(i - 1, REG_ITMP1);
2865 emit_bugt(cd, table[0].block);
2867 /* build jump table top down and use address of lowest entry */
2872 dseg_add_target(cd, table->block);
2876 /* length of dataseg after last dseg_addtarget is used
2879 M_MOV_IMM(0, REG_ITMP2);
2881 emit_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 2, REG_ITMP1);
2887 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2890 lookup_target_t *lookup;
2892 lookup = iptr->dst.lookup;
2894 i = iptr->sx.s23.s2.lookupcount;
2896 MCODECHECK((i<<2)+8);
2897 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2900 M_CMP_IMM(lookup->value, s1);
2901 emit_beq(cd, lookup->target.block);
2905 emit_br(cd, iptr->sx.s23.s3.lookupdefault.block);
2910 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
2912 REPLACEMENT_POINT_FORGC_BUILTIN(cd, iptr);
2914 bte = iptr->sx.s23.s3.bte;
2918 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2920 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2921 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
2922 case ICMD_INVOKEINTERFACE:
2924 REPLACEMENT_POINT_INVOKE(cd, iptr);
2926 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2927 md = iptr->sx.s23.s3.um->methodref->parseddesc.md;
2931 lm = iptr->sx.s23.s3.fmiref->p.method;
2932 md = lm->parseddesc;
2936 s3 = md->paramcount;
2938 MCODECHECK((s3 << 1) + 64);
2940 /* copy arguments to registers or stack location */
2942 for (s3 = s3 - 1; s3 >= 0; s3--) {
2943 var = VAR(iptr->sx.s23.s2.args[s3]);
2945 /* Already Preallocated (ARGVAR) ? */
2946 if (var->flags & PREALLOC)
2948 if (IS_INT_LNG_TYPE(var->type)) {
2949 if (!md->params[s3].inmemory) {
2950 log_text("No integer argument registers available!");
2954 if (IS_2_WORD_TYPE(var->type)) {
2955 d = emit_load(jd, iptr, var, REG_ITMP12_PACKED);
2956 M_LST(d, REG_SP, md->params[s3].regoff * 4);
2958 d = emit_load(jd, iptr, var, REG_ITMP1);
2959 M_IST(d, REG_SP, md->params[s3].regoff * 4);
2964 if (!md->params[s3].inmemory) {
2965 s1 = md->params[s3].regoff;
2966 d = emit_load(jd, iptr, var, s1);
2970 d = emit_load(jd, iptr, var, REG_FTMP1);
2971 if (IS_2_WORD_TYPE(var->type))
2972 M_DST(d, REG_SP, md->params[s3].regoff * 4);
2974 M_FST(d, REG_SP, md->params[s3].regoff * 4);
2979 switch (iptr->opc) {
2981 d = md->returntype.type;
2983 if (bte->stub == NULL) {
2984 M_MOV_IMM(bte->fp, REG_ITMP1);
2986 M_MOV_IMM(bte->stub, REG_ITMP1);
2990 emit_exception_check(cd, iptr);
2993 case ICMD_INVOKESPECIAL:
2994 M_ALD(REG_ITMP1, REG_SP, 0 * 4);
2995 emit_nullpointer_check(cd, iptr, REG_ITMP1);
2998 case ICMD_INVOKESTATIC:
3000 unresolved_method *um = iptr->sx.s23.s3.um;
3002 codegen_addpatchref(cd, PATCHER_invokestatic_special,
3006 d = md->returntype.type;
3009 disp = (ptrint) lm->stubroutine;
3010 d = lm->parseddesc->returntype.type;
3013 M_MOV_IMM(disp, REG_ITMP2);
3017 case ICMD_INVOKEVIRTUAL:
3018 M_ALD(REG_ITMP1, REG_SP, 0 * 4);
3019 emit_nullpointer_check(cd, iptr, s1);
3022 unresolved_method *um = iptr->sx.s23.s3.um;
3024 codegen_addpatchref(cd, PATCHER_invokevirtual, um, 0);
3027 d = md->returntype.type;
3030 s1 = OFFSET(vftbl_t, table[0]) +
3031 sizeof(methodptr) * lm->vftblindex;
3032 d = md->returntype.type;
3035 M_ALD(REG_METHODPTR, REG_ITMP1,
3036 OFFSET(java_objectheader, vftbl));
3037 M_ALD32(REG_ITMP3, REG_METHODPTR, s1);
3041 case ICMD_INVOKEINTERFACE:
3042 M_ALD(REG_ITMP1, REG_SP, 0 * 4);
3043 emit_nullpointer_check(cd, iptr, s1);
3046 unresolved_method *um = iptr->sx.s23.s3.um;
3048 codegen_addpatchref(cd, PATCHER_invokeinterface, um, 0);
3052 d = md->returntype.type;
3055 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3056 sizeof(methodptr) * lm->class->index;
3058 s2 = sizeof(methodptr) * (lm - lm->class->methods);
3060 d = md->returntype.type;
3063 M_ALD(REG_METHODPTR, REG_ITMP1,
3064 OFFSET(java_objectheader, vftbl));
3065 M_ALD32(REG_METHODPTR, REG_METHODPTR, s1);
3066 M_ALD32(REG_ITMP3, REG_METHODPTR, s2);
3071 /* store size of call code in replacement point */
3073 REPLACEMENT_POINT_INVOKE_RETURN(cd, iptr);
3074 REPLACEMENT_POINT_FORGC_BUILTIN_RETURN(cd, iptr);
3076 /* d contains return type */
3078 if (d != TYPE_VOID) {
3079 #if defined(ENABLE_SSA)
3080 if ((ls == NULL) /* || (!IS_TEMPVAR_INDEX(iptr->dst.varindex)) */ ||
3081 (ls->lifetime[iptr->dst.varindex].type != UNUSED))
3082 /* a "living" stackslot */
3085 if (IS_INT_LNG_TYPE(d)) {
3086 if (IS_2_WORD_TYPE(d)) {
3087 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
3088 M_LNGMOVE(REG_RESULT_PACKED, s1);
3091 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3092 M_INTMOVE(REG_RESULT, s1);
3096 s1 = codegen_reg_of_dst(jd, iptr, REG_NULL);
3098 emit_store_dst(jd, iptr, s1);
3104 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3106 if (!(iptr->flags.bits & INS_FLAG_ARRAY)) {
3107 /* object type cast-check */
3110 vftbl_t *supervftbl;
3113 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3119 super = iptr->sx.s23.s3.c.cls;
3120 superindex = super->index;
3121 supervftbl = super->vftbl;
3124 if ((super == NULL) || !(super->flags & ACC_INTERFACE))
3125 CODEGEN_CRITICAL_SECTION_NEW;
3127 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3129 /* if class is not resolved, check which code to call */
3131 if (super == NULL) {
3133 emit_label_beq(cd, BRANCH_LABEL_1);
3135 codegen_addpatchref(cd, PATCHER_checkcast_instanceof_flags,
3136 iptr->sx.s23.s3.c.ref, 0);
3138 M_MOV_IMM(0, REG_ITMP2); /* super->flags */
3139 M_AND_IMM32(ACC_INTERFACE, REG_ITMP2);
3140 emit_label_beq(cd, BRANCH_LABEL_2);
3143 /* interface checkcast code */
3145 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3146 if (super != NULL) {
3148 emit_label_beq(cd, BRANCH_LABEL_3);
3151 M_ALD(REG_ITMP2, s1, OFFSET(java_objectheader, vftbl));
3153 if (super == NULL) {
3154 codegen_addpatchref(cd, PATCHER_checkcast_interface,
3155 iptr->sx.s23.s3.c.ref,
3160 REG_ITMP2, OFFSET(vftbl_t, interfacetablelength));
3161 M_ISUB_IMM32(superindex, REG_ITMP3);
3162 /* XXX do we need this one? */
3164 emit_classcast_check(cd, iptr, BRANCH_LE, REG_ITMP3, s1);
3166 M_ALD32(REG_ITMP3, REG_ITMP2,
3167 OFFSET(vftbl_t, interfacetable[0]) -
3168 superindex * sizeof(methodptr*));
3170 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_ITMP3, s1);
3173 emit_label_br(cd, BRANCH_LABEL_4);
3175 emit_label(cd, BRANCH_LABEL_3);
3178 /* class checkcast code */
3180 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3181 if (super == NULL) {
3182 emit_label(cd, BRANCH_LABEL_2);
3186 emit_label_beq(cd, BRANCH_LABEL_5);
3189 M_ALD(REG_ITMP2, s1, OFFSET(java_objectheader, vftbl));
3191 if (super == NULL) {
3192 codegen_addpatchref(cd, PATCHER_checkcast_class,
3193 iptr->sx.s23.s3.c.ref,
3197 M_MOV_IMM(supervftbl, REG_ITMP3);
3199 CODEGEN_CRITICAL_SECTION_START;
3201 M_ILD32(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3203 /* if (s1 != REG_ITMP1) { */
3204 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP1); */
3205 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3); */
3206 /* #if defined(ENABLE_THREADS) */
3207 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3209 /* emit_alu_reg_reg(cd, ALU_SUB, REG_ITMP1, REG_ITMP2); */
3212 M_ILD32(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, baseval));
3213 M_ISUB(REG_ITMP3, REG_ITMP2);
3214 M_MOV_IMM(supervftbl, REG_ITMP3);
3215 M_ILD(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3217 CODEGEN_CRITICAL_SECTION_END;
3221 M_CMP(REG_ITMP3, REG_ITMP2);
3222 emit_classcast_check(cd, iptr, BRANCH_ULE, REG_ITMP3, s1);
3225 emit_label(cd, BRANCH_LABEL_5);
3228 if (super == NULL) {
3229 emit_label(cd, BRANCH_LABEL_1);
3230 emit_label(cd, BRANCH_LABEL_4);
3233 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
3236 /* array type cast-check */
3238 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3239 M_AST(s1, REG_SP, 0 * 4);
3241 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3242 codegen_addpatchref(cd, PATCHER_builtin_arraycheckcast,
3243 iptr->sx.s23.s3.c.ref, 0);
3246 M_AST_IMM(iptr->sx.s23.s3.c.cls, REG_SP, 1 * 4);
3247 M_MOV_IMM(BUILTIN_arraycheckcast, REG_ITMP3);
3250 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3252 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_RESULT, s1);
3254 d = codegen_reg_of_dst(jd, iptr, s1);
3258 emit_store_dst(jd, iptr, d);
3261 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3265 vftbl_t *supervftbl;
3268 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3274 super = iptr->sx.s23.s3.c.cls;
3275 superindex = super->index;
3276 supervftbl = super->vftbl;
3279 if ((super == NULL) || !(super->flags & ACC_INTERFACE))
3280 CODEGEN_CRITICAL_SECTION_NEW;
3282 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3283 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
3286 M_INTMOVE(s1, REG_ITMP1);
3292 /* if class is not resolved, check which code to call */
3294 if (super == NULL) {
3296 emit_label_beq(cd, BRANCH_LABEL_1);
3298 codegen_addpatchref(cd, PATCHER_checkcast_instanceof_flags,
3299 iptr->sx.s23.s3.c.ref, 0);
3301 M_MOV_IMM(0, REG_ITMP3); /* super->flags */
3302 M_AND_IMM32(ACC_INTERFACE, REG_ITMP3);
3303 emit_label_beq(cd, BRANCH_LABEL_2);
3306 /* interface instanceof code */
3308 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3309 if (super != NULL) {
3311 emit_label_beq(cd, BRANCH_LABEL_3);
3314 M_ALD(REG_ITMP1, s1, OFFSET(java_objectheader, vftbl));
3316 if (super == NULL) {
3317 codegen_addpatchref(cd, PATCHER_instanceof_interface,
3318 iptr->sx.s23.s3.c.ref, 0);
3322 REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3323 M_ISUB_IMM32(superindex, REG_ITMP3);
3326 disp = (2 + 4 /* mov_membase32_reg */ + 2 /* test */ +
3327 6 /* jcc */ + 5 /* mov_imm_reg */);
3330 M_ALD32(REG_ITMP1, REG_ITMP1,
3331 OFFSET(vftbl_t, interfacetable[0]) -
3332 superindex * sizeof(methodptr*));
3334 /* emit_setcc_reg(cd, CC_A, d); */
3335 /* emit_jcc(cd, CC_BE, 5); */
3340 emit_label_br(cd, BRANCH_LABEL_4);
3342 emit_label(cd, BRANCH_LABEL_3);
3345 /* class instanceof code */
3347 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3348 if (super == NULL) {
3349 emit_label(cd, BRANCH_LABEL_2);
3353 emit_label_beq(cd, BRANCH_LABEL_5);
3356 M_ALD(REG_ITMP1, s1, OFFSET(java_objectheader, vftbl));
3358 if (super == NULL) {
3359 codegen_addpatchref(cd, PATCHER_instanceof_class,
3360 iptr->sx.s23.s3.c.ref, 0);
3363 M_MOV_IMM(supervftbl, REG_ITMP2);
3365 CODEGEN_CRITICAL_SECTION_START;
3367 M_ILD(REG_ITMP1, REG_ITMP1, OFFSET(vftbl_t, baseval));
3368 M_ILD(REG_ITMP3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3369 M_ILD(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3371 CODEGEN_CRITICAL_SECTION_END;
3373 M_ISUB(REG_ITMP2, REG_ITMP1);
3374 M_CLR(d); /* may be REG_ITMP2 */
3375 M_CMP(REG_ITMP3, REG_ITMP1);
3380 emit_label(cd, BRANCH_LABEL_5);
3383 if (super == NULL) {
3384 emit_label(cd, BRANCH_LABEL_1);
3385 emit_label(cd, BRANCH_LABEL_4);
3388 emit_store_dst(jd, iptr, d);
3392 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3394 /* check for negative sizes and copy sizes to stack if necessary */
3396 MCODECHECK((iptr->s1.argcount << 1) + 64);
3398 for (s1 = iptr->s1.argcount; --s1 >= 0; ) {
3399 /* copy SAVEDVAR sizes to stack */
3400 var = VAR(iptr->sx.s23.s2.args[s1]);
3402 /* Already Preallocated? */
3403 if (!(var->flags & PREALLOC)) {
3404 if (var->flags & INMEMORY) {
3405 M_ILD(REG_ITMP1, REG_SP, var->vv.regoff * 4);
3406 M_IST(REG_ITMP1, REG_SP, (s1 + 3) * 4);
3409 M_IST(var->vv.regoff, REG_SP, (s1 + 3) * 4);
3413 /* is a patcher function set? */
3415 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3416 codegen_addpatchref(cd, PATCHER_builtin_multianewarray,
3417 iptr->sx.s23.s3.c.ref, 0);
3423 disp = (ptrint) iptr->sx.s23.s3.c.cls;
3425 /* a0 = dimension count */
3427 M_IST_IMM(iptr->s1.argcount, REG_SP, 0 * 4);
3429 /* a1 = arraydescriptor */
3431 M_IST_IMM(disp, REG_SP, 1 * 4);
3433 /* a2 = pointer to dimensions = stack pointer */
3435 M_MOV(REG_SP, REG_ITMP1);
3436 M_AADD_IMM(3 * 4, REG_ITMP1);
3437 M_AST(REG_ITMP1, REG_SP, 2 * 4);
3439 M_MOV_IMM(BUILTIN_multianewarray, REG_ITMP1);
3442 /* check for exception before result assignment */
3444 emit_exception_check(cd, iptr);
3446 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3447 M_INTMOVE(REG_RESULT, s1);
3448 emit_store_dst(jd, iptr, s1);
3452 exceptions_throw_internalerror("Unknown ICMD %d during code generation",
3457 } /* for instruction */
3461 #if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
3464 #if defined(ENABLE_SSA)
3467 /* by edge splitting, in Blocks with phi moves there can only */
3468 /* be a goto as last command, no other Jump/Branch Command */
3470 if (!last_cmd_was_goto)
3471 codegen_emit_phi_moves(jd, bptr);
3476 /* At the end of a basic block we may have to append some nops,
3477 because the patcher stub calling code might be longer than the
3478 actual instruction. So codepatching does not change the
3479 following block unintentionally. */
3481 if (cd->mcodeptr < cd->lastmcodeptr) {
3482 while (cd->mcodeptr < cd->lastmcodeptr) {
3487 } /* if (bptr -> flags >= BBREACHED) */
3488 } /* for basic block */
3490 dseg_createlinenumbertable(cd);
3492 /* generate stubs */
3494 emit_patcher_stubs(jd);
3495 REPLACEMENT_EMIT_STUBS(jd);
3497 /* everything's ok */
3502 /* codegen_emit_stub_compiler **************************************************
3504 Emit a stub routine which calls the compiler.
3506 *******************************************************************************/
3508 void codegen_emit_stub_compiler(jitdata *jd)
3513 /* get required compiler data */
3518 /* code for the stub */
3520 M_MOV_IMM(m, REG_ITMP1);
3521 M_MOV_IMM(asm_call_jit_compiler, REG_ITMP3);
3526 /* codegen_emit_stub_builtin ***************************************************
3528 Creates a stub routine which calls a builtin function.
3530 *******************************************************************************/
3532 void codegen_emit_stub_builtin(jitdata *jd, builtintable_entry *bte)
3541 /* get required compiler data */
3546 /* set some variables */
3550 /* calculate stack frame size */
3552 cd->stackframesize =
3553 sizeof(stackframeinfo) / SIZEOF_VOID_P +
3554 4; /* 4 arguments or return value */
3556 cd->stackframesize |= 0x3; /* keep stack 16-byte aligned */
3558 /* create method header */
3560 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
3561 (void) dseg_add_unique_s4(cd, cd->stackframesize * 4); /* FrameSize */
3562 (void) dseg_add_unique_s4(cd, 0); /* IsSync */
3563 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
3564 (void) dseg_add_unique_s4(cd, 0); /* IntSave */
3565 (void) dseg_add_unique_s4(cd, 0); /* FltSave */
3566 (void) dseg_addlinenumbertablesize(cd);
3567 (void) dseg_add_unique_s4(cd, 0); /* ExTableSize */
3569 /* generate stub code */
3571 M_ASUB_IMM(cd->stackframesize * 4, REG_SP);
3573 #if defined(ENABLE_GC_CACAO)
3574 /* Save callee saved integer registers in stackframeinfo (GC may
3575 need to recover them during a collection). */
3577 disp = cd->stackframesize * 4 - sizeof(stackframeinfo) +
3578 OFFSET(stackframeinfo, intregs);
3580 for (i = 0; i < INT_SAV_CNT; i++)
3581 M_AST(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3584 /* create dynamic stack info */
3586 M_MOV(REG_SP, REG_ITMP1);
3587 M_AADD_IMM(cd->stackframesize * 4, REG_ITMP1);
3588 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3590 M_IST_IMM(0, REG_SP, 1 * 4);
3593 M_MOV(REG_SP, REG_ITMP2);
3594 M_AADD_IMM(cd->stackframesize * 4 + SIZEOF_VOID_P, REG_ITMP2);
3595 M_AST(REG_ITMP2, REG_SP, 2 * 4);
3597 M_ALD(REG_ITMP3, REG_SP, cd->stackframesize * 4);
3598 M_AST(REG_ITMP3, REG_SP, 3 * 4);
3600 M_MOV_IMM(codegen_stub_builtin_enter, REG_ITMP1);
3603 /* builtins are allowed to have 4 arguments max */
3605 assert(md->paramcount <= 4);
3607 /* copy arguments into new stackframe */
3609 for (i = 0; i < md->paramcount; i++) {
3610 if (!md->params[i].inmemory) {
3611 log_text("No integer argument registers available!");
3614 } else { /* float/double in memory can be copied like int/longs */
3615 s1 = (md->params[i].regoff + cd->stackframesize + 1) * 4;
3616 s2 = md->params[i].regoff * 4;
3618 M_ILD(REG_ITMP1, REG_SP, s1);
3619 M_IST(REG_ITMP1, REG_SP, s2);
3620 if (IS_2_WORD_TYPE(md->paramtypes[i].type)) {
3621 M_ILD(REG_ITMP1, REG_SP, s1 + 4);
3622 M_IST(REG_ITMP1, REG_SP, s2 + 4);
3628 /* call the builtin function */
3630 M_MOV_IMM(bte->fp, REG_ITMP3);
3633 /* save return value */
3635 if (md->returntype.type != TYPE_VOID) {
3636 if (IS_INT_LNG_TYPE(md->returntype.type)) {
3637 if (IS_2_WORD_TYPE(md->returntype.type))
3638 M_IST(REG_RESULT2, REG_SP, 2 * 4);
3639 M_IST(REG_RESULT, REG_SP, 1 * 4);
3642 if (IS_2_WORD_TYPE(md->returntype.type))
3643 emit_fstl_membase(cd, REG_SP, 1 * 4);
3645 emit_fsts_membase(cd, REG_SP, 1 * 4);
3649 /* remove native stackframe info */
3651 M_MOV(REG_SP, REG_ITMP1);
3652 M_AADD_IMM(cd->stackframesize * 4, REG_ITMP1);
3653 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3655 M_MOV_IMM(codegen_stub_builtin_exit, REG_ITMP1);
3658 /* restore return value */
3660 if (md->returntype.type != TYPE_VOID) {
3661 if (IS_INT_LNG_TYPE(md->returntype.type)) {
3662 if (IS_2_WORD_TYPE(md->returntype.type))
3663 M_ILD(REG_RESULT2, REG_SP, 2 * 4);
3664 M_ILD(REG_RESULT, REG_SP, 1 * 4);
3667 if (IS_2_WORD_TYPE(md->returntype.type))
3668 emit_fldl_membase(cd, REG_SP, 1 * 4);
3670 emit_flds_membase(cd, REG_SP, 1 * 4);
3674 #if defined(ENABLE_GC_CACAO)
3675 /* Restore callee saved integer registers from stackframeinfo (GC
3676 might have modified them during a collection). */
3678 disp = cd->stackframesize * 4 - sizeof(stackframeinfo) +
3679 OFFSET(stackframeinfo, intregs);
3681 for (i = 0; i < INT_SAV_CNT; i++)
3682 M_ALD(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3685 /* remove stackframe */
3687 M_AADD_IMM(cd->stackframesize * 4, REG_SP);
3692 /* codegen_emit_stub_native ****************************************************
3694 Emits a stub routine which calls a native method.
3696 *******************************************************************************/
3698 void codegen_emit_stub_native(jitdata *jd, methoddesc *nmd, functionptr f)
3705 s4 i, j; /* count variables */
3710 /* get required compiler data */
3716 /* set some variables */
3719 nativeparams = (m->flags & ACC_STATIC) ? 2 : 1;
3721 /* calculate stackframe size */
3723 cd->stackframesize =
3724 sizeof(stackframeinfo) / SIZEOF_VOID_P +
3725 sizeof(localref_table) / SIZEOF_VOID_P +
3726 1 + /* function pointer */
3727 4 + /* 4 arguments (start_native_call) */
3730 /* keep stack 16-byte aligned */
3732 cd->stackframesize |= 0x3;
3734 /* create method header */
3736 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
3737 (void) dseg_add_unique_s4(cd, cd->stackframesize * 4); /* FrameSize */
3738 (void) dseg_add_unique_s4(cd, 0); /* IsSync */
3739 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
3740 (void) dseg_add_unique_s4(cd, 0); /* IntSave */
3741 (void) dseg_add_unique_s4(cd, 0); /* FltSave */
3742 (void) dseg_addlinenumbertablesize(cd);
3743 (void) dseg_add_unique_s4(cd, 0); /* ExTableSize */
3745 #if defined(ENABLE_PROFILING)
3746 /* generate native method profiling code */
3748 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
3749 /* count frequency */
3751 M_MOV_IMM(code, REG_ITMP1);
3752 M_IADD_IMM_MEMBASE(1, REG_ITMP1, OFFSET(codeinfo, frequency));
3756 /* calculate stackframe size for native function */
3758 M_ASUB_IMM(cd->stackframesize * 4, REG_SP);
3760 #if !defined(NDEBUG)
3761 emit_verbosecall_enter(jd);
3764 /* get function address (this must happen before the stackframeinfo) */
3766 #if !defined(WITH_STATIC_CLASSPATH)
3768 codegen_addpatchref(cd, PATCHER_resolve_native, m, 0);
3771 M_AST_IMM((ptrint) f, REG_SP, 4 * 4);
3773 /* Mark the whole fpu stack as free for native functions (only for saved */
3774 /* register count == 0). */
3776 emit_ffree_reg(cd, 0);
3777 emit_ffree_reg(cd, 1);
3778 emit_ffree_reg(cd, 2);
3779 emit_ffree_reg(cd, 3);
3780 emit_ffree_reg(cd, 4);
3781 emit_ffree_reg(cd, 5);
3782 emit_ffree_reg(cd, 6);
3783 emit_ffree_reg(cd, 7);
3785 #if defined(ENABLE_GC_CACAO)
3786 /* remember callee saved int registers in stackframeinfo (GC may need to */
3787 /* recover them during a collection). */
3789 disp = cd->stackframesize * 4 - sizeof(stackframeinfo) +
3790 OFFSET(stackframeinfo, intregs);
3792 for (i = 0; i < INT_SAV_CNT; i++)
3793 M_AST(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3796 /* prepare data structures for native function call */
3798 M_MOV(REG_SP, REG_ITMP1);
3799 M_AADD_IMM(cd->stackframesize * 4, REG_ITMP1);
3801 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3802 M_IST_IMM(0, REG_SP, 1 * 4);
3805 M_MOV(REG_SP, REG_ITMP2);
3806 M_AADD_IMM(cd->stackframesize * 4 + SIZEOF_VOID_P, REG_ITMP2);
3808 M_AST(REG_ITMP2, REG_SP, 2 * 4);
3809 M_ALD(REG_ITMP3, REG_SP, cd->stackframesize * 4);
3810 M_AST(REG_ITMP3, REG_SP, 3 * 4);
3811 M_MOV_IMM(codegen_start_native_call, REG_ITMP1);
3814 M_ALD(REG_ITMP3, REG_SP, 4 * 4);
3816 /* copy arguments into new stackframe */
3818 for (i = md->paramcount - 1, j = i + nativeparams; i >= 0; i--, j--) {
3819 t = md->paramtypes[i].type;
3821 if (!md->params[i].inmemory) {
3822 /* no integer argument registers */
3823 } else { /* float/double in memory can be copied like int/longs */
3824 s1 = (md->params[i].regoff + cd->stackframesize + 1) * 4;
3825 s2 = nmd->params[j].regoff * 4;
3827 M_ILD(REG_ITMP1, REG_SP, s1);
3828 M_IST(REG_ITMP1, REG_SP, s2);
3829 if (IS_2_WORD_TYPE(t)) {
3830 M_ILD(REG_ITMP1, REG_SP, s1 + 4);
3831 M_IST(REG_ITMP1, REG_SP, s2 + 4);
3836 /* if function is static, put class into second argument */
3838 if (m->flags & ACC_STATIC)
3839 M_AST_IMM(m->class, REG_SP, 1 * 4);
3841 /* put env into first argument */
3843 M_AST_IMM(_Jv_env, REG_SP, 0 * 4);
3845 /* call the native function */
3849 /* save return value */
3851 switch (md->returntype.type) {
3854 M_IST(REG_RESULT, REG_SP, 1 * 4);
3857 M_LST(REG_RESULT_PACKED, REG_SP, 1 * 4);
3860 emit_fsts_membase(cd, REG_SP, 1 * 4);
3863 emit_fstl_membase(cd, REG_SP, 1 * 4);
3869 #if !defined(NDEBUG)
3870 emit_verbosecall_exit(jd);
3873 /* remove native stackframe info */
3875 M_MOV(REG_SP, REG_ITMP1);
3876 M_AADD_IMM(cd->stackframesize * 4, REG_ITMP1);
3878 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3879 M_MOV_IMM(codegen_finish_native_call, REG_ITMP1);
3881 M_MOV(REG_RESULT, REG_ITMP2); /* REG_ITMP3 == REG_RESULT2 */
3883 /* restore return value */
3885 switch (md->returntype.type) {
3888 M_ILD(REG_RESULT, REG_SP, 1 * 4);
3891 M_LLD(REG_RESULT_PACKED, REG_SP, 1 * 4);
3894 emit_flds_membase(cd, REG_SP, 1 * 4);
3897 emit_fldl_membase(cd, REG_SP, 1 * 4);
3903 #if defined(ENABLE_GC_CACAO)
3904 /* restore callee saved int registers from stackframeinfo (GC might have */
3905 /* modified them during a collection). */
3907 disp = cd->stackframesize * 4 - sizeof(stackframeinfo) +
3908 OFFSET(stackframeinfo, intregs);
3910 for (i = 0; i < INT_SAV_CNT; i++)
3911 M_ALD(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3914 M_AADD_IMM(cd->stackframesize * 4, REG_SP);
3916 /* check for exception */
3923 /* handle exception */
3925 M_MOV(REG_ITMP2, REG_ITMP1_XPTR);
3926 M_ALD(REG_ITMP2_XPC, REG_SP, 0);
3927 M_ASUB_IMM(2, REG_ITMP2_XPC);
3929 M_MOV_IMM(asm_handle_nat_exception, REG_ITMP3);
3932 /* generate patcher stubs */
3934 emit_patcher_stubs(jd);
3939 * These are local overrides for various environment variables in Emacs.
3940 * Please do not remove this and leave it at the end of the file, where
3941 * Emacs will automagically detect them.
3942 * ---------------------------------------------------------------------
3945 * indent-tabs-mode: t
3949 * vim:noexpandtab:sw=4:ts=4: