1 /* src/vm/jit/i386/codegen.c - machine code generator for i386
3 Copyright (C) 1996-2005, 2006, 2007 R. Grafl, A. Krall, C. Kruegel,
4 C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
5 E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
6 J. Wenninger, Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
25 $Id: codegen.c 7908 2007-05-15 09:55:17Z christian $
37 #include "vm/jit/i386/md-abi.h"
39 #include "vm/jit/i386/codegen.h"
40 #include "vm/jit/i386/emit.h"
42 #include "mm/memory.h"
43 #include "native/jni.h"
44 #include "native/native.h"
46 #include "threads/lock-common.h"
48 #include "vm/builtin.h"
49 #include "vm/exceptions.h"
50 #include "vm/global.h"
51 #include "vm/stringlocal.h"
54 #include "vm/jit/asmpart.h"
55 #include "vm/jit/codegen-common.h"
56 #include "vm/jit/dseg.h"
57 #include "vm/jit/emit-common.h"
58 #include "vm/jit/jit.h"
59 #include "vm/jit/parse.h"
60 #include "vm/jit/patcher.h"
61 #include "vm/jit/reg.h"
62 #include "vm/jit/replace.h"
63 #include "vm/jit/stacktrace.h"
65 #if defined(ENABLE_SSA)
66 # include "vm/jit/optimizing/lsra.h"
67 # include "vm/jit/optimizing/ssa.h"
68 #elif defined(ENABLE_LSRA)
69 # include "vm/jit/allocator/lsra.h"
72 #include "vmcore/loader.h"
73 #include "vmcore/options.h"
74 #include "vmcore/utf8.h"
77 /* codegen_emit ****************************************************************
79 Generates machine code.
81 *******************************************************************************/
83 bool codegen_emit(jitdata *jd)
89 s4 len, s1, s2, s3, d, disp;
95 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
96 builtintable_entry *bte;
102 #if defined(ENABLE_SSA)
104 bool last_cmd_was_goto;
106 last_cmd_was_goto = false;
110 /* get required compiler data */
117 /* prevent compiler warnings */
128 s4 savedregs_num = 0;
131 /* space to save used callee saved registers */
133 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
135 /* float register are saved on 2 4-byte stackslots */
136 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse) * 2;
138 cd->stackframesize = rd->memuse + savedregs_num;
141 #if defined(ENABLE_THREADS)
142 /* space to save argument of monitor_enter */
144 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
145 /* reserve 2 slots for long/double return values for monitorexit */
147 if (IS_2_WORD_TYPE(m->parseddesc->returntype.type))
148 cd->stackframesize += 2;
150 cd->stackframesize++;
154 /* create method header */
156 /* Keep stack of non-leaf functions 16-byte aligned. */
158 if (!jd->isleafmethod)
159 cd->stackframesize |= 0x3;
161 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
162 (void) dseg_add_unique_s4(cd, cd->stackframesize * 4); /* FrameSize */
164 #if defined(ENABLE_THREADS)
165 /* IsSync contains the offset relative to the stack pointer for the
166 argument of monitor_exit used in the exception handler. Since the
167 offset could be zero and give a wrong meaning of the flag it is
171 if (checksync && (m->flags & ACC_SYNCHRONIZED))
172 (void) dseg_add_unique_s4(cd, (rd->memuse + 1) * 4); /* IsSync */
175 (void) dseg_add_unique_s4(cd, 0); /* IsSync */
177 (void) dseg_add_unique_s4(cd, jd->isleafmethod); /* IsLeaf */
178 (void) dseg_add_unique_s4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
179 (void) dseg_add_unique_s4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
181 /* adds a reference for the length of the line number counter. We don't
182 know the size yet, since we evaluate the information during code
183 generation, to save one additional iteration over the whole
184 instructions. During code optimization the position could have changed
185 to the information gotten from the class file */
186 (void) dseg_addlinenumbertablesize(cd);
188 (void) dseg_add_unique_s4(cd, jd->exceptiontablelength); /* ExTableSize */
190 /* create exception table */
192 for (ex = jd->exceptiontable; ex != NULL; ex = ex->down) {
193 dseg_add_target(cd, ex->start);
194 dseg_add_target(cd, ex->end);
195 dseg_add_target(cd, ex->handler);
196 (void) dseg_add_unique_address(cd, ex->catchtype.any);
199 #if defined(ENABLE_PROFILING)
200 /* generate method profiling code */
202 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
203 /* count frequency */
205 M_MOV_IMM(code, REG_ITMP3);
206 M_IADD_IMM_MEMBASE(1, REG_ITMP3, OFFSET(codeinfo, frequency));
210 /* create stack frame (if necessary) */
212 if (cd->stackframesize)
213 M_ASUB_IMM(cd->stackframesize * 4, REG_SP);
215 /* save return address and used callee saved registers */
217 p = cd->stackframesize;
218 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
219 p--; M_AST(rd->savintregs[i], REG_SP, p * 4);
221 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
222 p-=2; emit_fld_reg(cd, rd->savfltregs[i]); emit_fstpl_membase(cd, REG_SP, p * 4);
225 /* take arguments out of register or stack frame */
230 for (p = 0, l = 0; p < md->paramcount; p++) {
231 t = md->paramtypes[p].type;
233 varindex = jd->local_map[l * 5 + t];
234 #if defined(ENABLE_SSA)
236 if (varindex != UNUSED)
237 varindex = ls->var_0[varindex];
238 if ((varindex != UNUSED) && (ls->lifetime[varindex].type == UNUSED))
243 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
246 if (varindex == UNUSED)
251 s1 = md->params[p].regoff;
253 if (IS_INT_LNG_TYPE(t)) { /* integer args */
254 if (!md->params[p].inmemory) { /* register arguments */
255 log_text("integer register argument");
257 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
258 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff */
260 else { /* reg arg -> spilled */
261 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff * 4 */
264 else { /* stack arguments */
265 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
266 emit_mov_membase_reg( /* + 4 for return address */
267 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4, var->vv.regoff);
268 /* + 4 for return address */
270 else { /* stack arg -> spilled */
271 if (!IS_2_WORD_TYPE(t)) {
272 #if defined(ENABLE_SSA)
273 /* no copy avoiding by now possible with SSA */
275 emit_mov_membase_reg( /* + 4 for return address */
276 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4,
278 emit_mov_reg_membase(
279 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4);
282 #endif /*defined(ENABLE_SSA)*/
283 /* reuse Stackslotand avoid copying */
284 var->vv.regoff = cd->stackframesize + s1 + 1;
288 #if defined(ENABLE_SSA)
289 /* no copy avoiding by now possible with SSA */
291 emit_mov_membase_reg( /* + 4 for return address */
292 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4,
294 emit_mov_reg_membase(
295 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4);
296 emit_mov_membase_reg( /* + 4 for return address */
297 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4 + 4,
299 emit_mov_reg_membase(
300 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4 + 4);
303 #endif /*defined(ENABLE_SSA)*/
304 /* reuse Stackslotand avoid copying */
305 var->vv.regoff = cd->stackframesize + s1 + 1;
310 else { /* floating args */
311 if (!md->params[p].inmemory) { /* register arguments */
312 log_text("There are no float argument registers!");
314 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
315 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff */
316 } else { /* reg arg -> spilled */
317 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff * 4 */
321 else { /* stack arguments */
322 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
325 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
327 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
332 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
334 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
337 } else { /* stack-arg -> spilled */
338 #if defined(ENABLE_SSA)
339 /* no copy avoiding by now possible with SSA */
341 emit_mov_membase_reg(
342 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4, REG_ITMP1);
343 emit_mov_reg_membase(
344 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4);
347 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
348 emit_fstps_membase(cd, REG_SP, var->vv.regoff * 4);
352 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
353 emit_fstpl_membase(cd, REG_SP, var->vv.regoff * 4);
357 #endif /*defined(ENABLE_SSA)*/
358 /* reuse Stackslotand avoid copying */
359 var->vv.regoff = cd->stackframesize + s1 + 1;
365 /* call monitorenter function */
367 #if defined(ENABLE_THREADS)
368 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
371 if (m->flags & ACC_STATIC) {
372 M_MOV_IMM(&m->class->object.header, REG_ITMP1);
375 M_ALD(REG_ITMP1, REG_SP, cd->stackframesize * 4 + 4);
378 M_ALD_MEM(REG_ITMP1, EXCEPTION_HARDWARE_NULLPOINTER);
381 M_AST(REG_ITMP1, REG_SP, s1 * 4);
382 M_AST(REG_ITMP1, REG_SP, 0 * 4);
383 M_MOV_IMM(LOCK_monitor_enter, REG_ITMP3);
389 emit_verbosecall_enter(jd);
394 #if defined(ENABLE_SSA)
395 /* with SSA the Header is Basic Block 0 - insert phi Moves if necessary */
397 codegen_emit_phi_moves(jd, ls->basicblocks[0]);
400 /* end of header generation */
402 /* create replacement points */
404 REPLACEMENT_POINTS_INIT(cd, jd);
406 /* walk through all basic blocks */
408 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
410 bptr->mpc = (s4) (cd->mcodeptr - cd->mcodebase);
412 if (bptr->flags >= BBREACHED) {
413 /* branch resolving */
415 codegen_resolve_branchrefs(cd, bptr);
417 /* handle replacement points */
419 REPLACEMENT_POINT_BLOCK_START(cd, bptr);
421 #if defined(ENABLE_REPLACEMENT)
422 if (bptr->bitflags & BBFLAG_REPLACEMENT) {
423 if (cd->replacementpoint[-1].flags & RPLPOINT_FLAG_COUNTDOWN) {
425 disp = (s4) &(m->hitcountdown);
426 M_ISUB_IMM_MEMABS(1, disp);
432 /* copy interface registers to their destination */
437 #if defined(ENABLE_PROFILING)
438 /* generate basic block profiling code */
440 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
441 /* count frequency */
443 M_MOV_IMM(code->bbfrequency, REG_ITMP3);
444 M_IADD_IMM_MEMBASE(1, REG_ITMP3, bptr->nr * 4);
448 #if defined(ENABLE_LSRA) || defined(ENABLE_SSA)
449 # if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
452 # if defined(ENABLE_SSA)
454 last_cmd_was_goto = false;
458 var = VAR(bptr->invars[len]);
459 if (bptr->type != BBTYPE_STD) {
460 if (!IS_2_WORD_TYPE(var->type)) {
461 if (bptr->type == BBTYPE_EXH) {
462 d = codegen_reg_of_var(0, var, REG_ITMP1);
463 M_INTMOVE(REG_ITMP1, d);
464 emit_store(jd, NULL, var, d);
468 log_text("copy interface registers(EXH, SBR): longs \
469 have to be in memory (begin 1)");
477 #endif /* defined(ENABLE_LSRA) || defined(ENABLE_SSA) */
481 var = VAR(bptr->invars[len]);
482 if ((len == bptr->indepth-1) && (bptr->type != BBTYPE_STD)) {
483 if (!IS_2_WORD_TYPE(var->type)) {
484 if (bptr->type == BBTYPE_EXH) {
485 d = codegen_reg_of_var(0, var, REG_ITMP1);
486 M_INTMOVE(REG_ITMP1, d);
487 emit_store(jd, NULL, var, d);
491 log_text("copy interface registers: longs have to be in \
498 assert((var->flags & INOUT));
503 /* walk through all instructions */
508 for (iptr = bptr->iinstr; len > 0; len--, iptr++) {
509 if (iptr->line != currentline) {
510 dseg_addlinenumber(cd, iptr->line);
511 currentline = iptr->line;
514 MCODECHECK(1024); /* 1kB should be enough */
517 case ICMD_NOP: /* ... ==> ... */
518 case ICMD_POP: /* ..., value ==> ... */
519 case ICMD_POP2: /* ..., value, value ==> ... */
522 case ICMD_INLINE_START:
524 REPLACEMENT_POINT_INLINE_START(cd, iptr);
527 case ICMD_INLINE_BODY:
529 REPLACEMENT_POINT_INLINE_BODY(cd, iptr);
530 dseg_addlinenumber_inline_start(cd, iptr);
531 dseg_addlinenumber(cd, iptr->line);
534 case ICMD_INLINE_END:
536 dseg_addlinenumber_inline_end(cd, iptr);
537 dseg_addlinenumber(cd, iptr->line);
540 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
542 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
543 emit_nullpointer_check(cd, iptr, s1);
546 /* constant operations ************************************************/
548 case ICMD_ICONST: /* ... ==> ..., constant */
550 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
551 ICONST(d, iptr->sx.val.i);
552 emit_store_dst(jd, iptr, d);
555 case ICMD_LCONST: /* ... ==> ..., constant */
557 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
558 LCONST(d, iptr->sx.val.l);
559 emit_store_dst(jd, iptr, d);
562 case ICMD_FCONST: /* ... ==> ..., constant */
564 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
565 if (iptr->sx.val.f == 0.0) {
569 if (iptr->sx.val.i == 0x80000000) {
573 } else if (iptr->sx.val.f == 1.0) {
576 } else if (iptr->sx.val.f == 2.0) {
582 disp = dseg_add_float(cd, iptr->sx.val.f);
583 emit_mov_imm_reg(cd, 0, REG_ITMP1);
585 emit_flds_membase(cd, REG_ITMP1, disp);
587 emit_store_dst(jd, iptr, d);
590 case ICMD_DCONST: /* ... ==> ..., constant */
592 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
593 if (iptr->sx.val.d == 0.0) {
597 if (iptr->sx.val.l == 0x8000000000000000LL) {
601 } else if (iptr->sx.val.d == 1.0) {
604 } else if (iptr->sx.val.d == 2.0) {
610 disp = dseg_add_double(cd, iptr->sx.val.d);
611 emit_mov_imm_reg(cd, 0, REG_ITMP1);
613 emit_fldl_membase(cd, REG_ITMP1, disp);
615 emit_store_dst(jd, iptr, d);
618 case ICMD_ACONST: /* ... ==> ..., constant */
620 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
622 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
623 codegen_addpatchref(cd, PATCHER_aconst,
624 iptr->sx.val.c.ref, 0);
629 if (iptr->sx.val.anyptr == NULL)
632 M_MOV_IMM(iptr->sx.val.anyptr, d);
634 emit_store_dst(jd, iptr, d);
638 /* load/store/copy/move operations ************************************/
656 if (!(iptr->flags.bits & INS_FLAG_RETADDR))
661 /* integer operations *************************************************/
663 case ICMD_INEG: /* ..., value ==> ..., - value */
665 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
666 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
669 emit_store_dst(jd, iptr, d);
672 case ICMD_LNEG: /* ..., value ==> ..., - value */
674 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
675 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
677 M_NEG(GET_LOW_REG(d));
678 M_IADDC_IMM(0, GET_HIGH_REG(d));
679 M_NEG(GET_HIGH_REG(d));
680 emit_store_dst(jd, iptr, d);
683 case ICMD_I2L: /* ..., value ==> ..., value */
685 s1 = emit_load_s1(jd, iptr, EAX);
686 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
689 M_LNGMOVE(EAX_EDX_PACKED, d);
690 emit_store_dst(jd, iptr, d);
693 case ICMD_L2I: /* ..., value ==> ..., value */
695 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
696 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
698 emit_store_dst(jd, iptr, d);
701 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
703 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
704 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
708 emit_store_dst(jd, iptr, d);
711 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
713 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
714 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
716 emit_store_dst(jd, iptr, d);
719 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
721 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
722 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
724 emit_store_dst(jd, iptr, d);
728 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
730 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
731 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
732 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
739 emit_store_dst(jd, iptr, d);
743 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
744 /* sx.val.i = constant */
746 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
747 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
749 /* `inc reg' is slower on p4's (regarding to ia32
750 optimization reference manual and benchmarks) and as
754 M_IADD_IMM(iptr->sx.val.i, d);
755 emit_store_dst(jd, iptr, d);
758 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
760 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
761 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
762 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
763 M_INTMOVE(s1, GET_LOW_REG(d));
764 M_IADD(s2, GET_LOW_REG(d));
765 /* don't use REG_ITMP1 */
766 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
767 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
768 M_INTMOVE(s1, GET_HIGH_REG(d));
769 M_IADDC(s2, GET_HIGH_REG(d));
770 emit_store_dst(jd, iptr, d);
773 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
774 /* sx.val.l = constant */
776 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
777 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
779 M_IADD_IMM(iptr->sx.val.l, GET_LOW_REG(d));
780 M_IADDC_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
781 emit_store_dst(jd, iptr, d);
784 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
786 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
787 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
788 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
790 M_INTMOVE(s1, REG_ITMP1);
791 M_ISUB(s2, REG_ITMP1);
792 M_INTMOVE(REG_ITMP1, d);
798 emit_store_dst(jd, iptr, d);
801 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
802 /* sx.val.i = constant */
804 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
805 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
807 M_ISUB_IMM(iptr->sx.val.i, d);
808 emit_store_dst(jd, iptr, d);
811 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
813 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
814 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
815 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
816 if (s2 == GET_LOW_REG(d)) {
817 M_INTMOVE(s1, REG_ITMP1);
818 M_ISUB(s2, REG_ITMP1);
819 M_INTMOVE(REG_ITMP1, GET_LOW_REG(d));
822 M_INTMOVE(s1, GET_LOW_REG(d));
823 M_ISUB(s2, GET_LOW_REG(d));
825 /* don't use REG_ITMP1 */
826 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
827 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
828 if (s2 == GET_HIGH_REG(d)) {
829 M_INTMOVE(s1, REG_ITMP2);
830 M_ISUBB(s2, REG_ITMP2);
831 M_INTMOVE(REG_ITMP2, GET_HIGH_REG(d));
834 M_INTMOVE(s1, GET_HIGH_REG(d));
835 M_ISUBB(s2, GET_HIGH_REG(d));
837 emit_store_dst(jd, iptr, d);
840 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
841 /* sx.val.l = constant */
843 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
844 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
846 M_ISUB_IMM(iptr->sx.val.l, GET_LOW_REG(d));
847 M_ISUBB_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
848 emit_store_dst(jd, iptr, d);
851 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
853 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
854 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
855 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
862 emit_store_dst(jd, iptr, d);
865 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
866 /* sx.val.i = constant */
868 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
869 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
870 M_IMUL_IMM(s1, iptr->sx.val.i, d);
871 emit_store_dst(jd, iptr, d);
874 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
876 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
877 s2 = emit_load_s2_low(jd, iptr, EDX);
878 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
880 M_INTMOVE(s1, REG_ITMP2);
881 M_IMUL(s2, REG_ITMP2);
883 s1 = emit_load_s1_low(jd, iptr, EAX);
884 s2 = emit_load_s2_high(jd, iptr, EDX);
887 M_IADD(EDX, REG_ITMP2);
889 s1 = emit_load_s1_low(jd, iptr, EAX);
890 s2 = emit_load_s2_low(jd, iptr, EDX);
893 M_INTMOVE(EAX, GET_LOW_REG(d));
894 M_IADD(REG_ITMP2, GET_HIGH_REG(d));
896 emit_store_dst(jd, iptr, d);
899 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
900 /* sx.val.l = constant */
902 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
903 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
904 ICONST(EAX, iptr->sx.val.l);
906 M_IMUL_IMM(s1, iptr->sx.val.l >> 32, REG_ITMP2);
907 M_IADD(REG_ITMP2, EDX);
908 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
909 M_IMUL_IMM(s1, iptr->sx.val.l, REG_ITMP2);
910 M_IADD(REG_ITMP2, EDX);
911 M_LNGMOVE(EAX_EDX_PACKED, d);
912 emit_store_dst(jd, iptr, d);
915 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
917 s1 = emit_load_s1(jd, iptr, EAX);
918 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
919 d = codegen_reg_of_dst(jd, iptr, EAX);
920 emit_arithmetic_check(cd, iptr, s2);
922 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
924 /* check as described in jvm spec */
926 M_CMP_IMM(0x80000000, EAX);
933 M_INTMOVE(EAX, d); /* if INMEMORY then d is already EAX */
934 emit_store_dst(jd, iptr, d);
937 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
939 s1 = emit_load_s1(jd, iptr, EAX);
940 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
941 d = codegen_reg_of_dst(jd, iptr, EDX);
942 emit_arithmetic_check(cd, iptr, s2);
944 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
946 /* check as described in jvm spec */
948 M_CMP_IMM(0x80000000, EAX);
956 M_INTMOVE(EDX, d); /* if INMEMORY then d is already EDX */
957 emit_store_dst(jd, iptr, d);
960 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
961 /* sx.val.i = constant */
963 /* TODO: optimize for `/ 2' */
964 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
965 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
969 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, d);/* 32-bit for jump off */
970 M_SRA_IMM(iptr->sx.val.i, d);
971 emit_store_dst(jd, iptr, d);
974 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
975 /* sx.val.i = constant */
977 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
978 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
980 M_MOV(s1, REG_ITMP1);
984 M_AND_IMM(iptr->sx.val.i, d);
986 M_BGE(2 + 2 + 6 + 2);
987 M_MOV(s1, d); /* don't use M_INTMOVE, so we know the jump offset */
989 M_AND_IMM32(iptr->sx.val.i, d); /* use 32-bit for jump offset */
991 emit_store_dst(jd, iptr, d);
994 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
995 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
997 s2 = emit_load_s2(jd, iptr, REG_ITMP12_PACKED);
998 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1000 M_INTMOVE(GET_LOW_REG(s2), REG_ITMP3);
1001 M_OR(GET_HIGH_REG(s2), REG_ITMP3);
1002 /* XXX could be optimized */
1003 emit_arithmetic_check(cd, iptr, REG_ITMP3);
1005 bte = iptr->sx.s23.s3.bte;
1008 M_LST(s2, REG_SP, 2 * 4);
1010 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1011 M_LST(s1, REG_SP, 0 * 4);
1013 M_MOV_IMM(bte->fp, REG_ITMP3);
1015 emit_store_dst(jd, iptr, d);
1018 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1019 /* sx.val.i = constant */
1021 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1022 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1024 M_TEST(GET_HIGH_REG(d));
1026 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, GET_LOW_REG(d));
1027 M_IADDC_IMM(0, GET_HIGH_REG(d));
1028 M_SRLD_IMM(iptr->sx.val.i, GET_HIGH_REG(d), GET_LOW_REG(d));
1029 M_SRA_IMM(iptr->sx.val.i, GET_HIGH_REG(d));
1030 emit_store_dst(jd, iptr, d);
1034 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1035 /* sx.val.l = constant */
1037 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1038 if (iptr->dst.var->flags & INMEMORY) {
1039 if (iptr->s1.var->flags & INMEMORY) {
1040 /* Alpha algorithm */
1042 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 4);
1044 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 4 + 4);
1050 /* TODO: hmm, don't know if this is always correct */
1052 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l & 0x00000000ffffffff);
1054 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l >> 32);
1060 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4, REG_ITMP1);
1061 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4 + 4, REG_ITMP2);
1063 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1064 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1065 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, iptr->s1.var->vv.regoff * 4 + 4);
1066 emit_jcc(cd, CC_GE, disp);
1068 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4, REG_ITMP1);
1069 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4 + 4, REG_ITMP2);
1071 emit_neg_reg(cd, REG_ITMP1);
1072 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1073 emit_neg_reg(cd, REG_ITMP2);
1075 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1076 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1078 emit_neg_reg(cd, REG_ITMP1);
1079 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1080 emit_neg_reg(cd, REG_ITMP2);
1082 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst.var->vv.regoff * 4);
1083 emit_mov_reg_membase(cd, REG_ITMP2, REG_SP, iptr->dst.var->vv.regoff * 4 + 4);
1087 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1088 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1090 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1091 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1092 M_TEST(GET_LOW_REG(s1));
1098 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1100 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1101 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1102 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1103 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1106 emit_store_dst(jd, iptr, d);
1109 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1110 /* sx.val.i = constant */
1112 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1113 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1115 M_SLL_IMM(iptr->sx.val.i, d);
1116 emit_store_dst(jd, iptr, d);
1119 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1121 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1122 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1123 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1124 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1127 emit_store_dst(jd, iptr, d);
1130 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1131 /* sx.val.i = constant */
1133 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1134 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1136 M_SRA_IMM(iptr->sx.val.i, d);
1137 emit_store_dst(jd, iptr, d);
1140 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1142 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1143 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1144 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1145 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1148 emit_store_dst(jd, iptr, d);
1151 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1152 /* sx.val.i = constant */
1154 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1155 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1157 M_SRL_IMM(iptr->sx.val.i, d);
1158 emit_store_dst(jd, iptr, d);
1161 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1163 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1164 s2 = emit_load_s2(jd, iptr, ECX);
1165 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1168 M_TEST_IMM(32, ECX);
1170 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1171 M_CLR(GET_LOW_REG(d));
1172 M_SLLD(GET_LOW_REG(d), GET_HIGH_REG(d));
1173 M_SLL(GET_LOW_REG(d));
1174 emit_store_dst(jd, iptr, d);
1177 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1178 /* sx.val.i = constant */
1180 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1181 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1183 if (iptr->sx.val.i & 0x20) {
1184 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1185 M_CLR(GET_LOW_REG(d));
1186 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1190 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1192 M_SLL_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d));
1194 emit_store_dst(jd, iptr, d);
1197 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1199 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1200 s2 = emit_load_s2(jd, iptr, ECX);
1201 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1204 M_TEST_IMM(32, ECX);
1206 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1207 M_SRA_IMM(31, GET_HIGH_REG(d));
1208 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1209 M_SRA(GET_HIGH_REG(d));
1210 emit_store_dst(jd, iptr, d);
1213 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1214 /* sx.val.i = constant */
1216 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1217 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1219 if (iptr->sx.val.i & 0x20) {
1220 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1221 M_SRA_IMM(31, GET_HIGH_REG(d));
1222 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1226 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1228 M_SRA_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1230 emit_store_dst(jd, iptr, d);
1233 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1235 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1236 s2 = emit_load_s2(jd, iptr, ECX);
1237 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1240 M_TEST_IMM(32, ECX);
1242 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1243 M_CLR(GET_HIGH_REG(d));
1244 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1245 M_SRL(GET_HIGH_REG(d));
1246 emit_store_dst(jd, iptr, d);
1249 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1250 /* sx.val.l = constant */
1252 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1253 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1255 if (iptr->sx.val.i & 0x20) {
1256 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1257 M_CLR(GET_HIGH_REG(d));
1258 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1262 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1264 M_SRL_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1266 emit_store_dst(jd, iptr, d);
1269 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1271 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1272 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1273 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1280 emit_store_dst(jd, iptr, d);
1283 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1284 /* sx.val.i = constant */
1286 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1287 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1289 M_AND_IMM(iptr->sx.val.i, d);
1290 emit_store_dst(jd, iptr, d);
1293 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1295 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1296 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1297 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1298 if (s2 == GET_LOW_REG(d))
1299 M_AND(s1, GET_LOW_REG(d));
1301 M_INTMOVE(s1, GET_LOW_REG(d));
1302 M_AND(s2, GET_LOW_REG(d));
1304 /* REG_ITMP1 probably contains low 32-bit of destination */
1305 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1306 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1307 if (s2 == GET_HIGH_REG(d))
1308 M_AND(s1, GET_HIGH_REG(d));
1310 M_INTMOVE(s1, GET_HIGH_REG(d));
1311 M_AND(s2, GET_HIGH_REG(d));
1313 emit_store_dst(jd, iptr, d);
1316 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1317 /* sx.val.l = constant */
1319 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1320 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1322 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1323 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1324 emit_store_dst(jd, iptr, d);
1327 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1329 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1330 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1331 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1338 emit_store_dst(jd, iptr, d);
1341 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1342 /* sx.val.i = constant */
1344 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1345 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1347 M_OR_IMM(iptr->sx.val.i, d);
1348 emit_store_dst(jd, iptr, d);
1351 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1353 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1354 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1355 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1356 if (s2 == GET_LOW_REG(d))
1357 M_OR(s1, GET_LOW_REG(d));
1359 M_INTMOVE(s1, GET_LOW_REG(d));
1360 M_OR(s2, GET_LOW_REG(d));
1362 /* REG_ITMP1 probably contains low 32-bit of destination */
1363 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1364 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1365 if (s2 == GET_HIGH_REG(d))
1366 M_OR(s1, GET_HIGH_REG(d));
1368 M_INTMOVE(s1, GET_HIGH_REG(d));
1369 M_OR(s2, GET_HIGH_REG(d));
1371 emit_store_dst(jd, iptr, d);
1374 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1375 /* sx.val.l = constant */
1377 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1378 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1380 M_OR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1381 M_OR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1382 emit_store_dst(jd, iptr, d);
1385 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1387 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1388 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1389 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1396 emit_store_dst(jd, iptr, d);
1399 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1400 /* sx.val.i = constant */
1402 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1403 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1405 M_XOR_IMM(iptr->sx.val.i, d);
1406 emit_store_dst(jd, iptr, d);
1409 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1411 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1412 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1413 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1414 if (s2 == GET_LOW_REG(d))
1415 M_XOR(s1, GET_LOW_REG(d));
1417 M_INTMOVE(s1, GET_LOW_REG(d));
1418 M_XOR(s2, GET_LOW_REG(d));
1420 /* REG_ITMP1 probably contains low 32-bit of destination */
1421 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1422 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1423 if (s2 == GET_HIGH_REG(d))
1424 M_XOR(s1, GET_HIGH_REG(d));
1426 M_INTMOVE(s1, GET_HIGH_REG(d));
1427 M_XOR(s2, GET_HIGH_REG(d));
1429 emit_store_dst(jd, iptr, d);
1432 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1433 /* sx.val.l = constant */
1435 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1436 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1438 M_XOR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1439 M_XOR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1440 emit_store_dst(jd, iptr, d);
1444 /* floating operations ************************************************/
1446 case ICMD_FNEG: /* ..., value ==> ..., - value */
1448 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1449 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1451 emit_store_dst(jd, iptr, d);
1454 case ICMD_DNEG: /* ..., value ==> ..., - value */
1456 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1457 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1459 emit_store_dst(jd, iptr, d);
1462 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1464 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1465 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1466 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1468 emit_store_dst(jd, iptr, d);
1471 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1473 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1474 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1475 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1477 emit_store_dst(jd, iptr, d);
1480 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1482 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1483 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1484 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1486 emit_store_dst(jd, iptr, d);
1489 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1491 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1492 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1493 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1495 emit_store_dst(jd, iptr, d);
1498 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1500 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1501 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1502 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1504 emit_store_dst(jd, iptr, d);
1507 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1509 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1510 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1511 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1513 emit_store_dst(jd, iptr, d);
1516 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1518 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1519 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1520 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1522 emit_store_dst(jd, iptr, d);
1525 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1527 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1528 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1529 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1531 emit_store_dst(jd, iptr, d);
1534 case ICMD_FREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1536 /* exchanged to skip fxch */
1537 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1538 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1539 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1540 /* emit_fxch(cd); */
1545 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1546 emit_store_dst(jd, iptr, d);
1547 emit_ffree_reg(cd, 0);
1551 case ICMD_DREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1553 /* exchanged to skip fxch */
1554 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1555 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1556 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1557 /* emit_fxch(cd); */
1562 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1563 emit_store_dst(jd, iptr, d);
1564 emit_ffree_reg(cd, 0);
1568 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1569 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1571 var = VAROP(iptr->s1);
1572 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1574 if (var->flags & INMEMORY) {
1575 emit_fildl_membase(cd, REG_SP, var->vv.regoff * 4);
1577 /* XXX not thread safe! */
1578 disp = dseg_add_unique_s4(cd, 0);
1579 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1581 emit_mov_reg_membase(cd, var->vv.regoff, REG_ITMP1, disp);
1582 emit_fildl_membase(cd, REG_ITMP1, disp);
1585 emit_store_dst(jd, iptr, d);
1588 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1589 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1591 var = VAROP(iptr->s1);
1592 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1593 if (var->flags & INMEMORY) {
1594 emit_fildll_membase(cd, REG_SP, var->vv.regoff * 4);
1597 log_text("L2F: longs have to be in memory");
1600 emit_store_dst(jd, iptr, d);
1603 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1605 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1606 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1608 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1611 /* Round to zero, 53-bit mode, exception masked */
1612 disp = dseg_add_s4(cd, 0x0e7f);
1613 emit_fldcw_membase(cd, REG_ITMP1, disp);
1615 var = VAROP(iptr->dst);
1616 var1 = VAROP(iptr->s1);
1618 if (var->flags & INMEMORY) {
1619 emit_fistpl_membase(cd, REG_SP, var->vv.regoff * 4);
1621 /* Round to nearest, 53-bit mode, exceptions masked */
1622 disp = dseg_add_s4(cd, 0x027f);
1623 emit_fldcw_membase(cd, REG_ITMP1, disp);
1625 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1626 REG_SP, var->vv.regoff * 4);
1629 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1631 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1634 /* XXX not thread safe! */
1635 disp = dseg_add_unique_s4(cd, 0);
1636 emit_fistpl_membase(cd, REG_ITMP1, disp);
1637 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1639 /* Round to nearest, 53-bit mode, exceptions masked */
1640 disp = dseg_add_s4(cd, 0x027f);
1641 emit_fldcw_membase(cd, REG_ITMP1, disp);
1643 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1646 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1647 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1650 emit_jcc(cd, CC_NE, disp);
1652 /* XXX: change this when we use registers */
1653 emit_flds_membase(cd, REG_SP, var1->vv.regoff * 4);
1654 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2i, REG_ITMP1);
1655 emit_call_reg(cd, REG_ITMP1);
1657 if (var->flags & INMEMORY) {
1658 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1661 M_INTMOVE(REG_RESULT, var->vv.regoff);
1665 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1667 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1668 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1670 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1673 /* Round to zero, 53-bit mode, exception masked */
1674 disp = dseg_add_s4(cd, 0x0e7f);
1675 emit_fldcw_membase(cd, REG_ITMP1, disp);
1677 var = VAROP(iptr->dst);
1678 var1 = VAROP(iptr->s1);
1680 if (var->flags & INMEMORY) {
1681 emit_fistpl_membase(cd, REG_SP, var->vv.regoff * 4);
1683 /* Round to nearest, 53-bit mode, exceptions masked */
1684 disp = dseg_add_s4(cd, 0x027f);
1685 emit_fldcw_membase(cd, REG_ITMP1, disp);
1687 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1688 REG_SP, var->vv.regoff * 4);
1691 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1693 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1696 /* XXX not thread safe! */
1697 disp = dseg_add_unique_s4(cd, 0);
1698 emit_fistpl_membase(cd, REG_ITMP1, disp);
1699 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1701 /* Round to nearest, 53-bit mode, exceptions masked */
1702 disp = dseg_add_s4(cd, 0x027f);
1703 emit_fldcw_membase(cd, REG_ITMP1, disp);
1705 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1708 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1709 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1712 emit_jcc(cd, CC_NE, disp);
1714 /* XXX: change this when we use registers */
1715 emit_fldl_membase(cd, REG_SP, var1->vv.regoff * 4);
1716 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2i, REG_ITMP1);
1717 emit_call_reg(cd, REG_ITMP1);
1719 if (var->flags & INMEMORY) {
1720 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1722 M_INTMOVE(REG_RESULT, var->vv.regoff);
1726 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1728 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1729 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1731 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1734 /* Round to zero, 53-bit mode, exception masked */
1735 disp = dseg_add_s4(cd, 0x0e7f);
1736 emit_fldcw_membase(cd, REG_ITMP1, disp);
1738 var = VAROP(iptr->dst);
1739 var1 = VAROP(iptr->s1);
1741 if (var->flags & INMEMORY) {
1742 emit_fistpll_membase(cd, REG_SP, var->vv.regoff * 4);
1744 /* Round to nearest, 53-bit mode, exceptions masked */
1745 disp = dseg_add_s4(cd, 0x027f);
1746 emit_fldcw_membase(cd, REG_ITMP1, disp);
1748 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1749 REG_SP, var->vv.regoff * 4 + 4);
1752 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1754 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1757 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1759 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4 + 4);
1761 emit_jcc(cd, CC_NE, disp);
1763 emit_alu_imm_membase(cd, ALU_CMP, 0,
1764 REG_SP, var->vv.regoff * 4);
1767 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1769 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1771 emit_jcc(cd, CC_NE, disp);
1773 /* XXX: change this when we use registers */
1774 emit_flds_membase(cd, REG_SP, var1->vv.regoff * 4);
1775 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2l, REG_ITMP1);
1776 emit_call_reg(cd, REG_ITMP1);
1777 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1778 emit_mov_reg_membase(cd, REG_RESULT2,
1779 REG_SP, var->vv.regoff * 4 + 4);
1782 log_text("F2L: longs have to be in memory");
1787 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1789 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1790 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1792 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1795 /* Round to zero, 53-bit mode, exception masked */
1796 disp = dseg_add_s4(cd, 0x0e7f);
1797 emit_fldcw_membase(cd, REG_ITMP1, disp);
1799 var = VAROP(iptr->dst);
1800 var1 = VAROP(iptr->s1);
1802 if (var->flags & INMEMORY) {
1803 emit_fistpll_membase(cd, REG_SP, var->vv.regoff * 4);
1805 /* Round to nearest, 53-bit mode, exceptions masked */
1806 disp = dseg_add_s4(cd, 0x027f);
1807 emit_fldcw_membase(cd, REG_ITMP1, disp);
1809 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1810 REG_SP, var->vv.regoff * 4 + 4);
1813 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1815 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1818 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1820 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4 + 4);
1822 emit_jcc(cd, CC_NE, disp);
1824 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, var->vv.regoff * 4);
1827 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1829 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1831 emit_jcc(cd, CC_NE, disp);
1833 /* XXX: change this when we use registers */
1834 emit_fldl_membase(cd, REG_SP, var1->vv.regoff * 4);
1835 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2l, REG_ITMP1);
1836 emit_call_reg(cd, REG_ITMP1);
1837 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1838 emit_mov_reg_membase(cd, REG_RESULT2,
1839 REG_SP, var->vv.regoff * 4 + 4);
1842 log_text("D2L: longs have to be in memory");
1847 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1849 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1850 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1852 emit_store_dst(jd, iptr, d);
1855 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1857 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1858 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1860 emit_store_dst(jd, iptr, d);
1863 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1866 /* exchanged to skip fxch */
1867 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1868 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1869 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1870 /* emit_fxch(cd); */
1873 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as GT */
1874 emit_jcc(cd, CC_E, 6);
1875 emit_alu_imm_reg(cd, ALU_AND, 0x000000ff, EAX);
1877 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1878 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1879 emit_jcc(cd, CC_B, 3 + 5);
1880 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1881 emit_jmp_imm(cd, 3);
1882 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1883 emit_store_dst(jd, iptr, d);
1886 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1889 /* exchanged to skip fxch */
1890 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1891 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1892 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1893 /* emit_fxch(cd); */
1896 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as LT */
1897 emit_jcc(cd, CC_E, 3);
1898 emit_movb_imm_reg(cd, 1, REG_AH);
1900 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1901 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1902 emit_jcc(cd, CC_B, 3 + 5);
1903 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1904 emit_jmp_imm(cd, 3);
1905 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1906 emit_store_dst(jd, iptr, d);
1910 /* memory operations **************************************************/
1912 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., length */
1914 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1915 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1916 /* implicit null-pointer check */
1917 M_ILD(d, s1, OFFSET(java_arrayheader, size));
1918 emit_store_dst(jd, iptr, d);
1921 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
1923 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1924 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1925 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1926 /* implicit null-pointer check */
1927 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1928 emit_movsbl_memindex_reg(cd, OFFSET(java_bytearray, data[0]),
1930 emit_store_dst(jd, iptr, d);
1933 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
1935 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1936 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1937 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1938 /* implicit null-pointer check */
1939 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1940 emit_movzwl_memindex_reg(cd, OFFSET(java_chararray, data[0]),
1942 emit_store_dst(jd, iptr, d);
1945 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
1947 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1948 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1949 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1950 /* implicit null-pointer check */
1951 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1952 emit_movswl_memindex_reg(cd, OFFSET(java_shortarray, data[0]),
1954 emit_store_dst(jd, iptr, d);
1957 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
1959 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1960 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1961 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1962 /* implicit null-pointer check */
1963 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1964 emit_mov_memindex_reg(cd, OFFSET(java_intarray, data[0]),
1966 emit_store_dst(jd, iptr, d);
1969 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
1971 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1972 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1973 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
1974 /* implicit null-pointer check */
1975 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1977 var = VAROP(iptr->dst);
1979 assert(var->flags & INMEMORY);
1980 emit_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]),
1981 s1, s2, 3, REG_ITMP3);
1982 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff * 4);
1983 emit_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]) + 4,
1984 s1, s2, 3, REG_ITMP3);
1985 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff * 4 + 4);
1988 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
1990 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1991 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1992 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1993 /* implicit null-pointer check */
1994 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1995 emit_flds_memindex(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2);
1996 emit_store_dst(jd, iptr, d);
1999 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2001 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2002 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2003 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
2004 /* implicit null-pointer check */
2005 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2006 emit_fldl_memindex(cd, OFFSET(java_doublearray, data[0]), s1, s2,3);
2007 emit_store_dst(jd, iptr, d);
2010 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2012 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2013 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2014 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2015 /* implicit null-pointer check */
2016 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2017 emit_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]),
2019 emit_store_dst(jd, iptr, d);
2023 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2025 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2026 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2027 /* implicit null-pointer check */
2028 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2029 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2031 /* because EBP, ESI, EDI have no xH and xL nibbles */
2032 M_INTMOVE(s3, REG_ITMP3);
2035 emit_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]),
2039 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2041 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2042 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2043 /* implicit null-pointer check */
2044 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2045 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2046 emit_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]),
2050 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2052 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2053 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2054 /* implicit null-pointer check */
2055 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2056 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2057 emit_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]),
2061 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2063 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2064 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2065 /* implicit null-pointer check */
2066 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2067 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2068 emit_mov_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]),
2072 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2074 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2075 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2076 /* implicit null-pointer check */
2077 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2079 var = VAROP(iptr->sx.s23.s3);
2081 assert(var->flags & INMEMORY);
2082 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff * 4, REG_ITMP3);
2083 emit_mov_reg_memindex(cd, REG_ITMP3, OFFSET(java_longarray, data[0])
2085 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff * 4 + 4, REG_ITMP3);
2086 emit_mov_reg_memindex(cd, REG_ITMP3,
2087 OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2090 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2092 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2093 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2094 /* implicit null-pointer check */
2095 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2096 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2097 emit_fstps_memindex(cd, OFFSET(java_floatarray, data[0]), s1, s2,2);
2100 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2102 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2103 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2104 /* implicit null-pointer check */
2105 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2106 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2107 emit_fstpl_memindex(cd, OFFSET(java_doublearray, data[0]),
2111 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2113 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2114 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2115 /* implicit null-pointer check */
2116 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2117 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2119 M_AST(s1, REG_SP, 0 * 4);
2120 M_AST(s3, REG_SP, 1 * 4);
2121 M_MOV_IMM(BUILTIN_canstore, REG_ITMP1);
2123 emit_exception_check(cd, iptr);
2125 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2126 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2127 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2128 emit_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]),
2132 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2134 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2135 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2136 /* implicit null-pointer check */
2137 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2138 emit_movb_imm_memindex(cd, iptr->sx.s23.s3.constval,
2139 OFFSET(java_bytearray, data[0]), s1, s2, 0);
2142 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2144 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2145 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2146 /* implicit null-pointer check */
2147 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2148 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2149 OFFSET(java_chararray, data[0]), s1, s2, 1);
2152 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2154 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2155 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2156 /* implicit null-pointer check */
2157 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2158 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2159 OFFSET(java_shortarray, data[0]), s1, s2, 1);
2162 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2164 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2165 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2166 /* implicit null-pointer check */
2167 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2168 emit_mov_imm_memindex(cd, iptr->sx.s23.s3.constval,
2169 OFFSET(java_intarray, data[0]), s1, s2, 2);
2172 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2174 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2175 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2176 /* implicit null-pointer check */
2177 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2178 emit_mov_imm_memindex(cd,
2179 (u4) (iptr->sx.s23.s3.constval & 0x00000000ffffffff),
2180 OFFSET(java_longarray, data[0]), s1, s2, 3);
2181 emit_mov_imm_memindex(cd,
2182 ((s4)iptr->sx.s23.s3.constval) >> 31,
2183 OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2186 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2188 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2189 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2190 /* implicit null-pointer check */
2191 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2192 emit_mov_imm_memindex(cd, 0,
2193 OFFSET(java_objectarray, data[0]), s1, s2, 2);
2197 case ICMD_GETSTATIC: /* ... ==> ..., value */
2199 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2200 uf = iptr->sx.s23.s3.uf;
2201 fieldtype = uf->fieldref->parseddesc.fd->type;
2204 codegen_addpatchref(cd, PATCHER_get_putstatic, uf, 0);
2208 fi = iptr->sx.s23.s3.fmiref->p.field;
2209 fieldtype = fi->type;
2210 disp = (ptrint) &(fi->value);
2212 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2213 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2216 M_MOV_IMM(disp, REG_ITMP1);
2217 switch (fieldtype) {
2220 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2221 M_ILD(d, REG_ITMP1, 0);
2224 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2225 M_LLD(d, REG_ITMP1, 0);
2228 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2229 M_FLD(d, REG_ITMP1, 0);
2232 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2233 M_DLD(d, REG_ITMP1, 0);
2236 emit_store_dst(jd, iptr, d);
2239 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2241 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2242 uf = iptr->sx.s23.s3.uf;
2243 fieldtype = uf->fieldref->parseddesc.fd->type;
2246 codegen_addpatchref(cd, PATCHER_get_putstatic, uf, 0);
2249 fi = iptr->sx.s23.s3.fmiref->p.field;
2250 fieldtype = fi->type;
2251 disp = (ptrint) &(fi->value);
2253 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2254 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2257 M_MOV_IMM(disp, REG_ITMP1);
2258 switch (fieldtype) {
2261 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
2262 M_IST(s1, REG_ITMP1, 0);
2265 s1 = emit_load_s1(jd, iptr, REG_ITMP23_PACKED);
2266 M_LST(s1, REG_ITMP1, 0);
2269 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2270 emit_fstps_membase(cd, REG_ITMP1, 0);
2273 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2274 emit_fstpl_membase(cd, REG_ITMP1, 0);
2279 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2280 /* val = value (in current instruction) */
2281 /* following NOP) */
2283 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2284 uf = iptr->sx.s23.s3.uf;
2285 fieldtype = uf->fieldref->parseddesc.fd->type;
2288 codegen_addpatchref(cd, PATCHER_get_putstatic, uf, 0);
2291 fi = iptr->sx.s23.s3.fmiref->p.field;
2292 fieldtype = fi->type;
2293 disp = (ptrint) &(fi->value);
2295 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2296 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2299 M_MOV_IMM(disp, REG_ITMP1);
2300 switch (fieldtype) {
2303 M_IST_IMM(iptr->sx.s23.s2.constval, REG_ITMP1, 0);
2306 M_IST_IMM(iptr->sx.s23.s2.constval & 0xffffffff, REG_ITMP1, 0);
2307 M_IST_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, REG_ITMP1, 4);
2314 case ICMD_GETFIELD: /* .., objectref. ==> ..., value */
2316 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2317 emit_nullpointer_check(cd, iptr, s1);
2319 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2320 unresolved_field *uf = iptr->sx.s23.s3.uf;
2322 fieldtype = uf->fieldref->parseddesc.fd->type;
2324 codegen_addpatchref(cd, PATCHER_getfield,
2325 iptr->sx.s23.s3.uf, 0);
2331 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2333 fieldtype = fi->type;
2337 switch (fieldtype) {
2340 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2341 M_ILD32(d, s1, disp);
2344 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2345 M_LLD32(d, s1, disp);
2348 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2349 M_FLD32(d, s1, disp);
2352 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2353 M_DLD32(d, s1, disp);
2356 emit_store_dst(jd, iptr, d);
2359 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2361 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2362 emit_nullpointer_check(cd, iptr, s1);
2364 /* must be done here because of code patching */
2366 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2367 unresolved_field *uf = iptr->sx.s23.s3.uf;
2369 fieldtype = uf->fieldref->parseddesc.fd->type;
2372 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2374 fieldtype = fi->type;
2377 if (!IS_FLT_DBL_TYPE(fieldtype)) {
2378 if (IS_2_WORD_TYPE(fieldtype))
2379 s2 = emit_load_s2(jd, iptr, REG_ITMP23_PACKED);
2381 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2384 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
2386 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2387 unresolved_field *uf = iptr->sx.s23.s3.uf;
2389 codegen_addpatchref(cd, PATCHER_putfield, uf, 0);
2395 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2400 switch (fieldtype) {
2403 M_IST32(s2, s1, disp);
2406 M_LST32(s2, s1, disp);
2409 emit_fstps_membase32(cd, s1, disp);
2412 emit_fstpl_membase32(cd, s1, disp);
2417 case ICMD_PUTFIELDCONST: /* ..., objectref ==> ... */
2418 /* val = value (in current instruction) */
2419 /* following NOP) */
2421 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2422 emit_nullpointer_check(cd, iptr, s1);
2424 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2425 unresolved_field *uf = iptr->sx.s23.s3.uf;
2427 fieldtype = uf->fieldref->parseddesc.fd->type;
2429 codegen_addpatchref(cd, PATCHER_putfieldconst,
2437 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2439 fieldtype = fi->type;
2444 switch (fieldtype) {
2447 M_IST32_IMM(iptr->sx.s23.s2.constval, s1, disp);
2450 M_IST32_IMM(iptr->sx.s23.s2.constval & 0xffffffff, s1, disp);
2451 M_IST32_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, s1, disp + 4);
2459 /* branch operations **************************************************/
2461 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2463 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2464 M_INTMOVE(s1, REG_ITMP1_XPTR);
2466 #ifdef ENABLE_VERIFIER
2467 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2468 codegen_addpatchref(cd, PATCHER_athrow_areturn,
2469 iptr->sx.s23.s2.uc, 0);
2471 #endif /* ENABLE_VERIFIER */
2473 M_CALL_IMM(0); /* passing exception pc */
2474 M_POP(REG_ITMP2_XPC);
2476 M_MOV_IMM(asm_handle_exception, REG_ITMP3);
2480 case ICMD_GOTO: /* ... ==> ... */
2481 case ICMD_RET: /* ... ==> ... */
2483 #if defined(ENABLE_SSA)
2485 last_cmd_was_goto = true;
2487 /* In case of a Goto phimoves have to be inserted before the */
2490 codegen_emit_phi_moves(jd, bptr);
2493 emit_br(cd, iptr->dst.block);
2497 case ICMD_JSR: /* ... ==> ... */
2499 emit_br(cd, iptr->sx.s23.s3.jsrtarget.block);
2503 case ICMD_IFNULL: /* ..., value ==> ... */
2504 case ICMD_IFNONNULL:
2506 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2508 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFNULL, BRANCH_OPT_NONE);
2511 case ICMD_IFEQ: /* ..., value ==> ... */
2518 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2519 M_CMP_IMM(iptr->sx.val.i, s1);
2520 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFEQ, BRANCH_OPT_NONE);
2523 case ICMD_IF_LEQ: /* ..., value ==> ... */
2525 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2526 if (iptr->sx.val.l == 0) {
2527 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2528 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2531 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2532 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2533 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2534 M_OR(REG_ITMP2, REG_ITMP1);
2536 emit_beq(cd, iptr->dst.block);
2539 case ICMD_IF_LLT: /* ..., value ==> ... */
2541 if (iptr->sx.val.l == 0) {
2542 /* If high 32-bit are less than zero, then the 64-bits
2544 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2546 emit_blt(cd, iptr->dst.block);
2549 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2550 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2551 emit_blt(cd, iptr->dst.block);
2553 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2554 emit_bult(cd, iptr->dst.block);
2558 case ICMD_IF_LLE: /* ..., value ==> ... */
2560 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2561 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2562 emit_blt(cd, iptr->dst.block);
2564 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2565 emit_bule(cd, iptr->dst.block);
2568 case ICMD_IF_LNE: /* ..., value ==> ... */
2570 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2571 if (iptr->sx.val.l == 0) {
2572 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2573 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2576 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2577 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2578 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2579 M_OR(REG_ITMP2, REG_ITMP1);
2581 emit_bne(cd, iptr->dst.block);
2584 case ICMD_IF_LGT: /* ..., value ==> ... */
2586 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2587 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2588 emit_bgt(cd, iptr->dst.block);
2590 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2591 emit_bugt(cd, iptr->dst.block);
2594 case ICMD_IF_LGE: /* ..., value ==> ... */
2596 if (iptr->sx.val.l == 0) {
2597 /* If high 32-bit are greater equal zero, then the
2599 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2601 emit_bge(cd, iptr->dst.block);
2604 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2605 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2606 emit_bgt(cd, iptr->dst.block);
2608 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2609 emit_buge(cd, iptr->dst.block);
2613 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2614 case ICMD_IF_ICMPNE:
2615 case ICMD_IF_ICMPLT:
2616 case ICMD_IF_ICMPGT:
2617 case ICMD_IF_ICMPGE:
2618 case ICMD_IF_ICMPLE:
2620 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2621 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2623 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ICMPEQ, BRANCH_OPT_NONE);
2626 case ICMD_IF_ACMPEQ: /* ..., value, value ==> ... */
2627 case ICMD_IF_ACMPNE:
2629 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2630 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2632 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ACMPEQ, BRANCH_OPT_NONE);
2635 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2637 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2638 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2639 M_INTMOVE(s1, REG_ITMP1);
2640 M_XOR(s2, REG_ITMP1);
2641 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2642 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2643 M_INTMOVE(s1, REG_ITMP2);
2644 M_XOR(s2, REG_ITMP2);
2645 M_OR(REG_ITMP1, REG_ITMP2);
2646 emit_beq(cd, iptr->dst.block);
2649 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2651 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2652 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2653 M_INTMOVE(s1, REG_ITMP1);
2654 M_XOR(s2, REG_ITMP1);
2655 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2656 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2657 M_INTMOVE(s1, REG_ITMP2);
2658 M_XOR(s2, REG_ITMP2);
2659 M_OR(REG_ITMP1, REG_ITMP2);
2660 emit_bne(cd, iptr->dst.block);
2663 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2665 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2666 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2668 emit_blt(cd, iptr->dst.block);
2669 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2670 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2673 emit_bult(cd, iptr->dst.block);
2676 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2678 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2679 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2681 emit_bgt(cd, iptr->dst.block);
2682 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2683 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2686 emit_bugt(cd, iptr->dst.block);
2689 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2691 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2692 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2694 emit_blt(cd, iptr->dst.block);
2695 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2696 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2699 emit_bule(cd, iptr->dst.block);
2702 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2704 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2705 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2707 emit_bgt(cd, iptr->dst.block);
2708 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2709 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2712 emit_buge(cd, iptr->dst.block);
2716 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2718 REPLACEMENT_POINT_RETURN(cd, iptr);
2719 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2720 M_INTMOVE(s1, REG_RESULT);
2721 goto nowperformreturn;
2723 case ICMD_LRETURN: /* ..., retvalue ==> ... */
2725 REPLACEMENT_POINT_RETURN(cd, iptr);
2726 s1 = emit_load_s1(jd, iptr, REG_RESULT_PACKED);
2727 M_LNGMOVE(s1, REG_RESULT_PACKED);
2728 goto nowperformreturn;
2730 case ICMD_ARETURN: /* ..., retvalue ==> ... */
2732 REPLACEMENT_POINT_RETURN(cd, iptr);
2733 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2734 M_INTMOVE(s1, REG_RESULT);
2736 #ifdef ENABLE_VERIFIER
2737 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2738 codegen_addpatchref(cd, PATCHER_athrow_areturn,
2739 iptr->sx.s23.s2.uc, 0);
2741 #endif /* ENABLE_VERIFIER */
2742 goto nowperformreturn;
2744 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2747 REPLACEMENT_POINT_RETURN(cd, iptr);
2748 s1 = emit_load_s1(jd, iptr, REG_FRESULT);
2749 goto nowperformreturn;
2751 case ICMD_RETURN: /* ... ==> ... */
2753 REPLACEMENT_POINT_RETURN(cd, iptr);
2759 p = cd->stackframesize;
2761 #if !defined(NDEBUG)
2762 emit_verbosecall_exit(jd);
2765 #if defined(ENABLE_THREADS)
2766 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2767 M_ALD(REG_ITMP2, REG_SP, rd->memuse * 4);
2769 /* we need to save the proper return value */
2770 switch (iptr->opc) {
2773 M_IST(REG_RESULT, REG_SP, rd->memuse * 4);
2777 M_LST(REG_RESULT_PACKED, REG_SP, rd->memuse * 4);
2781 emit_fstps_membase(cd, REG_SP, rd->memuse * 4);
2785 emit_fstpl_membase(cd, REG_SP, rd->memuse * 4);
2789 M_AST(REG_ITMP2, REG_SP, 0);
2790 M_MOV_IMM(LOCK_monitor_exit, REG_ITMP3);
2793 /* and now restore the proper return value */
2794 switch (iptr->opc) {
2797 M_ILD(REG_RESULT, REG_SP, rd->memuse * 4);
2801 M_LLD(REG_RESULT_PACKED, REG_SP, rd->memuse * 4);
2805 emit_flds_membase(cd, REG_SP, rd->memuse * 4);
2809 emit_fldl_membase(cd, REG_SP, rd->memuse * 4);
2815 /* restore saved registers */
2817 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
2818 p--; M_ALD(rd->savintregs[i], REG_SP, p * 4);
2821 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
2823 emit_fldl_membase(cd, REG_SP, p * 4);
2824 if (iptr->opc == ICMD_FRETURN || iptr->opc == ICMD_DRETURN) {
2826 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset + 1); */
2829 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset); */
2833 /* deallocate stack */
2835 if (cd->stackframesize)
2836 M_AADD_IMM(cd->stackframesize * 4, REG_SP);
2843 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2846 branch_target_t *table;
2848 table = iptr->dst.table;
2850 l = iptr->sx.s23.s2.tablelow;
2851 i = iptr->sx.s23.s3.tablehigh;
2853 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2854 M_INTMOVE(s1, REG_ITMP1);
2857 M_ISUB_IMM(l, REG_ITMP1);
2863 M_CMP_IMM(i - 1, REG_ITMP1);
2864 emit_bugt(cd, table[0].block);
2866 /* build jump table top down and use address of lowest entry */
2871 dseg_add_target(cd, table->block);
2875 /* length of dataseg after last dseg_addtarget is used
2878 M_MOV_IMM(0, REG_ITMP2);
2880 emit_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 2, REG_ITMP1);
2886 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2889 lookup_target_t *lookup;
2891 lookup = iptr->dst.lookup;
2893 i = iptr->sx.s23.s2.lookupcount;
2895 MCODECHECK((i<<2)+8);
2896 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2899 M_CMP_IMM(lookup->value, s1);
2900 emit_beq(cd, lookup->target.block);
2904 emit_br(cd, iptr->sx.s23.s3.lookupdefault.block);
2909 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
2911 bte = iptr->sx.s23.s3.bte;
2915 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2917 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2918 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
2919 case ICMD_INVOKEINTERFACE:
2921 REPLACEMENT_POINT_INVOKE(cd, iptr);
2923 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2924 md = iptr->sx.s23.s3.um->methodref->parseddesc.md;
2928 lm = iptr->sx.s23.s3.fmiref->p.method;
2929 md = lm->parseddesc;
2933 s3 = md->paramcount;
2935 MCODECHECK((s3 << 1) + 64);
2937 /* copy arguments to registers or stack location */
2939 for (s3 = s3 - 1; s3 >= 0; s3--) {
2940 var = VAR(iptr->sx.s23.s2.args[s3]);
2942 /* Already Preallocated (ARGVAR) ? */
2943 if (var->flags & PREALLOC)
2945 if (IS_INT_LNG_TYPE(var->type)) {
2946 if (!md->params[s3].inmemory) {
2947 log_text("No integer argument registers available!");
2951 if (IS_2_WORD_TYPE(var->type)) {
2952 d = emit_load(jd, iptr, var, REG_ITMP12_PACKED);
2953 M_LST(d, REG_SP, md->params[s3].regoff * 4);
2955 d = emit_load(jd, iptr, var, REG_ITMP1);
2956 M_IST(d, REG_SP, md->params[s3].regoff * 4);
2961 if (!md->params[s3].inmemory) {
2962 s1 = md->params[s3].regoff;
2963 d = emit_load(jd, iptr, var, s1);
2967 d = emit_load(jd, iptr, var, REG_FTMP1);
2968 if (IS_2_WORD_TYPE(var->type))
2969 M_DST(d, REG_SP, md->params[s3].regoff * 4);
2971 M_FST(d, REG_SP, md->params[s3].regoff * 4);
2976 switch (iptr->opc) {
2978 disp = (ptrint) bte->fp;
2979 d = md->returntype.type;
2981 M_MOV_IMM(disp, REG_ITMP1);
2984 emit_exception_check(cd, iptr);
2987 case ICMD_INVOKESPECIAL:
2988 M_ALD(REG_ITMP1, REG_SP, 0 * 4);
2989 emit_nullpointer_check(cd, iptr, REG_ITMP1);
2992 case ICMD_INVOKESTATIC:
2994 unresolved_method *um = iptr->sx.s23.s3.um;
2996 codegen_addpatchref(cd, PATCHER_invokestatic_special,
3000 d = md->returntype.type;
3003 disp = (ptrint) lm->stubroutine;
3004 d = lm->parseddesc->returntype.type;
3007 M_MOV_IMM(disp, REG_ITMP2);
3011 case ICMD_INVOKEVIRTUAL:
3012 M_ALD(REG_ITMP1, REG_SP, 0 * 4);
3013 emit_nullpointer_check(cd, iptr, s1);
3016 unresolved_method *um = iptr->sx.s23.s3.um;
3018 codegen_addpatchref(cd, PATCHER_invokevirtual, um, 0);
3021 d = md->returntype.type;
3024 s1 = OFFSET(vftbl_t, table[0]) +
3025 sizeof(methodptr) * lm->vftblindex;
3026 d = md->returntype.type;
3029 M_ALD(REG_METHODPTR, REG_ITMP1,
3030 OFFSET(java_objectheader, vftbl));
3031 M_ALD32(REG_ITMP3, REG_METHODPTR, s1);
3035 case ICMD_INVOKEINTERFACE:
3036 M_ALD(REG_ITMP1, REG_SP, 0 * 4);
3037 emit_nullpointer_check(cd, iptr, s1);
3040 unresolved_method *um = iptr->sx.s23.s3.um;
3042 codegen_addpatchref(cd, PATCHER_invokeinterface, um, 0);
3046 d = md->returntype.type;
3049 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3050 sizeof(methodptr) * lm->class->index;
3052 s2 = sizeof(methodptr) * (lm - lm->class->methods);
3054 d = md->returntype.type;
3057 M_ALD(REG_METHODPTR, REG_ITMP1,
3058 OFFSET(java_objectheader, vftbl));
3059 M_ALD32(REG_METHODPTR, REG_METHODPTR, s1);
3060 M_ALD32(REG_ITMP3, REG_METHODPTR, s2);
3065 /* store size of call code in replacement point */
3067 REPLACEMENT_POINT_INVOKE_RETURN(cd, iptr);
3069 /* d contains return type */
3071 if (d != TYPE_VOID) {
3072 #if defined(ENABLE_SSA)
3073 if ((ls == NULL) /* || (!IS_TEMPVAR_INDEX(iptr->dst.varindex)) */ ||
3074 (ls->lifetime[iptr->dst.varindex].type != UNUSED))
3075 /* a "living" stackslot */
3078 if (IS_INT_LNG_TYPE(d)) {
3079 if (IS_2_WORD_TYPE(d)) {
3080 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
3081 M_LNGMOVE(REG_RESULT_PACKED, s1);
3084 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3085 M_INTMOVE(REG_RESULT, s1);
3089 s1 = codegen_reg_of_dst(jd, iptr, REG_NULL);
3091 emit_store_dst(jd, iptr, s1);
3097 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3099 if (!(iptr->flags.bits & INS_FLAG_ARRAY)) {
3100 /* object type cast-check */
3103 vftbl_t *supervftbl;
3106 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3112 super = iptr->sx.s23.s3.c.cls;
3113 superindex = super->index;
3114 supervftbl = super->vftbl;
3117 if ((super == NULL) || !(super->flags & ACC_INTERFACE))
3118 CODEGEN_CRITICAL_SECTION_NEW;
3120 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3122 /* if class is not resolved, check which code to call */
3124 if (super == NULL) {
3126 emit_label_beq(cd, BRANCH_LABEL_1);
3128 codegen_addpatchref(cd, PATCHER_checkcast_instanceof_flags,
3129 iptr->sx.s23.s3.c.ref, 0);
3131 M_MOV_IMM(0, REG_ITMP2); /* super->flags */
3132 M_AND_IMM32(ACC_INTERFACE, REG_ITMP2);
3133 emit_label_beq(cd, BRANCH_LABEL_2);
3136 /* interface checkcast code */
3138 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3139 if (super != NULL) {
3141 emit_label_beq(cd, BRANCH_LABEL_3);
3144 M_ALD(REG_ITMP2, s1, OFFSET(java_objectheader, vftbl));
3146 if (super == NULL) {
3147 codegen_addpatchref(cd, PATCHER_checkcast_interface,
3148 iptr->sx.s23.s3.c.ref,
3153 REG_ITMP2, OFFSET(vftbl_t, interfacetablelength));
3154 M_ISUB_IMM32(superindex, REG_ITMP3);
3155 /* XXX do we need this one? */
3157 emit_classcast_check(cd, iptr, BRANCH_LE, REG_ITMP3, s1);
3159 M_ALD32(REG_ITMP3, REG_ITMP2,
3160 OFFSET(vftbl_t, interfacetable[0]) -
3161 superindex * sizeof(methodptr*));
3163 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_ITMP3, s1);
3166 emit_label_br(cd, BRANCH_LABEL_4);
3168 emit_label(cd, BRANCH_LABEL_3);
3171 /* class checkcast code */
3173 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3174 if (super == NULL) {
3175 emit_label(cd, BRANCH_LABEL_2);
3179 emit_label_beq(cd, BRANCH_LABEL_5);
3182 M_ALD(REG_ITMP2, s1, OFFSET(java_objectheader, vftbl));
3184 if (super == NULL) {
3185 codegen_addpatchref(cd, PATCHER_checkcast_class,
3186 iptr->sx.s23.s3.c.ref,
3190 M_MOV_IMM(supervftbl, REG_ITMP3);
3192 CODEGEN_CRITICAL_SECTION_START;
3194 M_ILD32(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3196 /* if (s1 != REG_ITMP1) { */
3197 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP1); */
3198 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3); */
3199 /* #if defined(ENABLE_THREADS) */
3200 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3202 /* emit_alu_reg_reg(cd, ALU_SUB, REG_ITMP1, REG_ITMP2); */
3205 M_ILD32(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, baseval));
3206 M_ISUB(REG_ITMP3, REG_ITMP2);
3207 M_MOV_IMM(supervftbl, REG_ITMP3);
3208 M_ILD(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3210 CODEGEN_CRITICAL_SECTION_END;
3214 M_CMP(REG_ITMP3, REG_ITMP2);
3215 emit_classcast_check(cd, iptr, BRANCH_ULE, REG_ITMP3, s1);
3218 emit_label(cd, BRANCH_LABEL_5);
3221 if (super == NULL) {
3222 emit_label(cd, BRANCH_LABEL_1);
3223 emit_label(cd, BRANCH_LABEL_4);
3226 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
3229 /* array type cast-check */
3231 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3232 M_AST(s1, REG_SP, 0 * 4);
3234 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3235 codegen_addpatchref(cd, PATCHER_builtin_arraycheckcast,
3236 iptr->sx.s23.s3.c.ref, 0);
3239 M_AST_IMM(iptr->sx.s23.s3.c.cls, REG_SP, 1 * 4);
3240 M_MOV_IMM(BUILTIN_arraycheckcast, REG_ITMP3);
3243 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3245 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_RESULT, s1);
3247 d = codegen_reg_of_dst(jd, iptr, s1);
3251 emit_store_dst(jd, iptr, d);
3254 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3258 vftbl_t *supervftbl;
3261 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3267 super = iptr->sx.s23.s3.c.cls;
3268 superindex = super->index;
3269 supervftbl = super->vftbl;
3272 if ((super == NULL) || !(super->flags & ACC_INTERFACE))
3273 CODEGEN_CRITICAL_SECTION_NEW;
3275 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3276 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
3279 M_INTMOVE(s1, REG_ITMP1);
3285 /* if class is not resolved, check which code to call */
3287 if (super == NULL) {
3289 emit_label_beq(cd, BRANCH_LABEL_1);
3291 codegen_addpatchref(cd, PATCHER_checkcast_instanceof_flags,
3292 iptr->sx.s23.s3.c.ref, 0);
3294 M_MOV_IMM(0, REG_ITMP3); /* super->flags */
3295 M_AND_IMM32(ACC_INTERFACE, REG_ITMP3);
3296 emit_label_beq(cd, BRANCH_LABEL_2);
3299 /* interface instanceof code */
3301 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3302 if (super != NULL) {
3304 emit_label_beq(cd, BRANCH_LABEL_3);
3307 M_ALD(REG_ITMP1, s1, OFFSET(java_objectheader, vftbl));
3309 if (super == NULL) {
3310 codegen_addpatchref(cd, PATCHER_instanceof_interface,
3311 iptr->sx.s23.s3.c.ref, 0);
3315 REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3316 M_ISUB_IMM32(superindex, REG_ITMP3);
3319 disp = (2 + 4 /* mov_membase32_reg */ + 2 /* test */ +
3320 6 /* jcc */ + 5 /* mov_imm_reg */);
3323 M_ALD32(REG_ITMP1, REG_ITMP1,
3324 OFFSET(vftbl_t, interfacetable[0]) -
3325 superindex * sizeof(methodptr*));
3327 /* emit_setcc_reg(cd, CC_A, d); */
3328 /* emit_jcc(cd, CC_BE, 5); */
3333 emit_label_br(cd, BRANCH_LABEL_4);
3335 emit_label(cd, BRANCH_LABEL_3);
3338 /* class instanceof code */
3340 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3341 if (super == NULL) {
3342 emit_label(cd, BRANCH_LABEL_2);
3346 emit_label_beq(cd, BRANCH_LABEL_5);
3349 M_ALD(REG_ITMP1, s1, OFFSET(java_objectheader, vftbl));
3351 if (super == NULL) {
3352 codegen_addpatchref(cd, PATCHER_instanceof_class,
3353 iptr->sx.s23.s3.c.ref, 0);
3356 M_MOV_IMM(supervftbl, REG_ITMP2);
3358 CODEGEN_CRITICAL_SECTION_START;
3360 M_ILD(REG_ITMP1, REG_ITMP1, OFFSET(vftbl_t, baseval));
3361 M_ILD(REG_ITMP3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3362 M_ILD(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3364 CODEGEN_CRITICAL_SECTION_END;
3366 M_ISUB(REG_ITMP2, REG_ITMP1);
3367 M_CLR(d); /* may be REG_ITMP2 */
3368 M_CMP(REG_ITMP3, REG_ITMP1);
3373 emit_label(cd, BRANCH_LABEL_5);
3376 if (super == NULL) {
3377 emit_label(cd, BRANCH_LABEL_1);
3378 emit_label(cd, BRANCH_LABEL_4);
3381 emit_store_dst(jd, iptr, d);
3385 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3387 /* check for negative sizes and copy sizes to stack if necessary */
3389 MCODECHECK((iptr->s1.argcount << 1) + 64);
3391 for (s1 = iptr->s1.argcount; --s1 >= 0; ) {
3392 /* copy SAVEDVAR sizes to stack */
3393 var = VAR(iptr->sx.s23.s2.args[s1]);
3395 /* Already Preallocated? */
3396 if (!(var->flags & PREALLOC)) {
3397 if (var->flags & INMEMORY) {
3398 M_ILD(REG_ITMP1, REG_SP, var->vv.regoff * 4);
3399 M_IST(REG_ITMP1, REG_SP, (s1 + 3) * 4);
3402 M_IST(var->vv.regoff, REG_SP, (s1 + 3) * 4);
3406 /* is a patcher function set? */
3408 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3409 codegen_addpatchref(cd, PATCHER_builtin_multianewarray,
3410 iptr->sx.s23.s3.c.ref, 0);
3416 disp = (ptrint) iptr->sx.s23.s3.c.cls;
3418 /* a0 = dimension count */
3420 M_IST_IMM(iptr->s1.argcount, REG_SP, 0 * 4);
3422 /* a1 = arraydescriptor */
3424 M_IST_IMM(disp, REG_SP, 1 * 4);
3426 /* a2 = pointer to dimensions = stack pointer */
3428 M_MOV(REG_SP, REG_ITMP1);
3429 M_AADD_IMM(3 * 4, REG_ITMP1);
3430 M_AST(REG_ITMP1, REG_SP, 2 * 4);
3432 M_MOV_IMM(BUILTIN_multianewarray, REG_ITMP1);
3435 /* check for exception before result assignment */
3437 emit_exception_check(cd, iptr);
3439 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3440 M_INTMOVE(REG_RESULT, s1);
3441 emit_store_dst(jd, iptr, s1);
3445 exceptions_throw_internalerror("Unknown ICMD %d during code generation",
3450 } /* for instruction */
3454 #if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
3457 #if defined(ENABLE_SSA)
3460 /* by edge splitting, in Blocks with phi moves there can only */
3461 /* be a goto as last command, no other Jump/Branch Command */
3463 if (!last_cmd_was_goto)
3464 codegen_emit_phi_moves(jd, bptr);
3469 /* At the end of a basic block we may have to append some nops,
3470 because the patcher stub calling code might be longer than the
3471 actual instruction. So codepatching does not change the
3472 following block unintentionally. */
3474 if (cd->mcodeptr < cd->lastmcodeptr) {
3475 while (cd->mcodeptr < cd->lastmcodeptr) {
3480 } /* if (bptr -> flags >= BBREACHED) */
3481 } /* for basic block */
3483 dseg_createlinenumbertable(cd);
3485 /* generate stubs */
3487 emit_patcher_stubs(jd);
3488 REPLACEMENT_EMIT_STUBS(jd);
3490 /* everything's ok */
3495 /* codegen_emit_stub_compiler **************************************************
3497 Emit a stub routine which calls the compiler.
3499 *******************************************************************************/
3501 void codegen_emit_stub_compiler(jitdata *jd)
3506 /* get required compiler data */
3511 /* code for the stub */
3513 M_MOV_IMM(m, REG_ITMP1);
3514 M_MOV_IMM(asm_call_jit_compiler, REG_ITMP3);
3519 /* codegen_emit_stub_native ****************************************************
3521 Emits a stub routine which calls a native method.
3523 *******************************************************************************/
3525 void codegen_emit_stub_native(jitdata *jd, methoddesc *nmd, functionptr f)
3532 s4 i, j; /* count variables */
3536 /* get required compiler data */
3542 /* set some variables */
3545 nativeparams = (m->flags & ACC_STATIC) ? 2 : 1;
3547 /* calculate stackframe size */
3549 cd->stackframesize =
3550 sizeof(stackframeinfo) / SIZEOF_VOID_P +
3551 sizeof(localref_table) / SIZEOF_VOID_P +
3552 1 + /* function pointer */
3553 4 + /* 4 arguments (start_native_call) */
3556 /* keep stack 16-byte aligned */
3558 cd->stackframesize |= 0x3;
3560 /* create method header */
3562 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
3563 (void) dseg_add_unique_s4(cd, cd->stackframesize * 4); /* FrameSize */
3564 (void) dseg_add_unique_s4(cd, 0); /* IsSync */
3565 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
3566 (void) dseg_add_unique_s4(cd, 0); /* IntSave */
3567 (void) dseg_add_unique_s4(cd, 0); /* FltSave */
3568 (void) dseg_addlinenumbertablesize(cd);
3569 (void) dseg_add_unique_s4(cd, 0); /* ExTableSize */
3571 #if defined(ENABLE_PROFILING)
3572 /* generate native method profiling code */
3574 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
3575 /* count frequency */
3577 M_MOV_IMM(code, REG_ITMP1);
3578 M_IADD_IMM_MEMBASE(1, REG_ITMP1, OFFSET(codeinfo, frequency));
3582 /* calculate stackframe size for native function */
3584 M_ASUB_IMM(cd->stackframesize * 4, REG_SP);
3586 #if !defined(NDEBUG)
3587 emit_verbosecall_enter(jd);
3590 /* get function address (this must happen before the stackframeinfo) */
3592 #if !defined(WITH_STATIC_CLASSPATH)
3594 codegen_addpatchref(cd, PATCHER_resolve_native, m, 0);
3597 M_AST_IMM((ptrint) f, REG_SP, 4 * 4);
3599 /* Mark the whole fpu stack as free for native functions (only for saved */
3600 /* register count == 0). */
3602 emit_ffree_reg(cd, 0);
3603 emit_ffree_reg(cd, 1);
3604 emit_ffree_reg(cd, 2);
3605 emit_ffree_reg(cd, 3);
3606 emit_ffree_reg(cd, 4);
3607 emit_ffree_reg(cd, 5);
3608 emit_ffree_reg(cd, 6);
3609 emit_ffree_reg(cd, 7);
3611 /* prepare data structures for native function call */
3613 M_MOV(REG_SP, REG_ITMP1);
3614 M_AADD_IMM(cd->stackframesize * 4, REG_ITMP1);
3616 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3617 M_IST_IMM(0, REG_SP, 1 * 4);
3620 M_MOV(REG_SP, REG_ITMP2);
3621 M_AADD_IMM(cd->stackframesize * 4 + SIZEOF_VOID_P, REG_ITMP2);
3623 M_AST(REG_ITMP2, REG_SP, 2 * 4);
3624 M_ALD(REG_ITMP3, REG_SP, cd->stackframesize * 4);
3625 M_AST(REG_ITMP3, REG_SP, 3 * 4);
3626 M_MOV_IMM(codegen_start_native_call, REG_ITMP1);
3629 M_ALD(REG_ITMP3, REG_SP, 4 * 4);
3631 /* copy arguments into new stackframe */
3633 for (i = md->paramcount - 1, j = i + nativeparams; i >= 0; i--, j--) {
3634 t = md->paramtypes[i].type;
3636 if (!md->params[i].inmemory) {
3637 /* no integer argument registers */
3638 } else { /* float/double in memory can be copied like int/longs */
3639 s1 = (md->params[i].regoff + cd->stackframesize + 1) * 4;
3640 s2 = nmd->params[j].regoff * 4;
3642 M_ILD(REG_ITMP1, REG_SP, s1);
3643 M_IST(REG_ITMP1, REG_SP, s2);
3644 if (IS_2_WORD_TYPE(t)) {
3645 M_ILD(REG_ITMP1, REG_SP, s1 + 4);
3646 M_IST(REG_ITMP1, REG_SP, s2 + 4);
3651 /* if function is static, put class into second argument */
3653 if (m->flags & ACC_STATIC)
3654 M_AST_IMM(m->class, REG_SP, 1 * 4);
3656 /* put env into first argument */
3658 M_AST_IMM(_Jv_env, REG_SP, 0 * 4);
3660 /* call the native function */
3664 /* save return value */
3666 switch (md->returntype.type) {
3669 M_IST(REG_RESULT, REG_SP, 1 * 4);
3672 M_LST(REG_RESULT_PACKED, REG_SP, 1 * 4);
3675 emit_fsts_membase(cd, REG_SP, 1 * 4);
3678 emit_fstl_membase(cd, REG_SP, 1 * 4);
3684 #if !defined(NDEBUG)
3685 emit_verbosecall_exit(jd);
3688 /* remove native stackframe info */
3690 M_MOV(REG_SP, REG_ITMP1);
3691 M_AADD_IMM(cd->stackframesize * 4, REG_ITMP1);
3693 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3694 M_MOV_IMM(codegen_finish_native_call, REG_ITMP1);
3696 M_MOV(REG_RESULT, REG_ITMP2); /* REG_ITMP3 == REG_RESULT2 */
3698 /* restore return value */
3700 switch (md->returntype.type) {
3703 M_ILD(REG_RESULT, REG_SP, 1 * 4);
3706 M_LLD(REG_RESULT_PACKED, REG_SP, 1 * 4);
3709 emit_flds_membase(cd, REG_SP, 1 * 4);
3712 emit_fldl_membase(cd, REG_SP, 1 * 4);
3718 M_AADD_IMM(cd->stackframesize * 4, REG_SP);
3720 /* check for exception */
3727 /* handle exception */
3729 M_MOV(REG_ITMP2, REG_ITMP1_XPTR);
3730 M_ALD(REG_ITMP2_XPC, REG_SP, 0);
3731 M_ASUB_IMM(2, REG_ITMP2_XPC);
3733 M_MOV_IMM(asm_handle_nat_exception, REG_ITMP3);
3736 /* generate patcher stubs */
3738 emit_patcher_stubs(jd);
3743 * These are local overrides for various environment variables in Emacs.
3744 * Please do not remove this and leave it at the end of the file, where
3745 * Emacs will automagically detect them.
3746 * ---------------------------------------------------------------------
3749 * indent-tabs-mode: t
3753 * vim:noexpandtab:sw=4:ts=4: