1 /* src/vm/jit/i386/codegen.c - machine code generator for i386
3 Copyright (C) 1996-2005, 2006, 2007 R. Grafl, A. Krall, C. Kruegel,
4 C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
5 E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
6 J. Wenninger, Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
36 #include "vm/jit/i386/md-abi.h"
38 #include "vm/jit/i386/codegen.h"
39 #include "vm/jit/i386/emit.h"
41 #include "mm/memory.h"
42 #include "native/jni.h"
43 #include "native/localref.h"
44 #include "native/native.h"
46 #include "threads/lock-common.h"
48 #include "vm/builtin.h"
49 #include "vm/exceptions.h"
50 #include "vm/global.h"
51 #include "vm/stringlocal.h"
54 #include "vm/jit/abi.h"
55 #include "vm/jit/asmpart.h"
56 #include "vm/jit/codegen-common.h"
57 #include "vm/jit/dseg.h"
58 #include "vm/jit/emit-common.h"
59 #include "vm/jit/jit.h"
60 #include "vm/jit/linenumbertable.h"
61 #include "vm/jit/parse.h"
62 #include "vm/jit/patcher-common.h"
63 #include "vm/jit/reg.h"
64 #include "vm/jit/replace.h"
65 #include "vm/jit/stacktrace.h"
67 #if defined(ENABLE_SSA)
68 # include "vm/jit/optimizing/lsra.h"
69 # include "vm/jit/optimizing/ssa.h"
70 #elif defined(ENABLE_LSRA)
71 # include "vm/jit/allocator/lsra.h"
74 #include "vmcore/loader.h"
75 #include "vmcore/options.h"
76 #include "vmcore/utf8.h"
79 /* codegen_emit ****************************************************************
81 Generates machine code.
83 *******************************************************************************/
85 bool codegen_emit(jitdata *jd)
91 s4 len, s1, s2, s3, d, disp;
92 int align_off; /* offset for alignment compensation */
97 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
98 builtintable_entry *bte;
101 unresolved_field *uf;
104 #if defined(ENABLE_SSA)
106 bool last_cmd_was_goto;
108 last_cmd_was_goto = false;
112 /* get required compiler data */
119 /* prevent compiler warnings */
130 s4 savedregs_num = 0;
133 /* space to save used callee saved registers */
135 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
136 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse);
138 cd->stackframesize = rd->memuse + savedregs_num;
141 #if defined(ENABLE_THREADS)
142 /* space to save argument of monitor_enter */
144 if (checksync && code_is_synchronized(code))
145 cd->stackframesize++;
148 /* create method header */
150 /* Keep stack of non-leaf functions 16-byte aligned. */
152 if (!code_is_leafmethod(code)) {
153 ALIGN_ODD(cd->stackframesize);
156 align_off = cd->stackframesize ? 4 : 0;
158 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
159 (void) dseg_add_unique_s4(
160 cd, cd->stackframesize * 8 + align_off); /* FrameSize */
162 code->synchronizedoffset = rd->memuse * 8;
164 /* REMOVEME: We still need it for exception handling in assembler. */
166 if (code_is_leafmethod(code))
167 (void) dseg_add_unique_s4(cd, 1); /* IsLeaf */
169 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
171 (void) dseg_add_unique_s4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
172 (void) dseg_add_unique_s4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
174 #if defined(ENABLE_PROFILING)
175 /* generate method profiling code */
177 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
178 /* count frequency */
180 M_MOV_IMM(code, REG_ITMP3);
181 M_IADD_IMM_MEMBASE(1, REG_ITMP3, OFFSET(codeinfo, frequency));
185 /* create stack frame (if necessary) */
187 if (cd->stackframesize)
189 M_ASUB_IMM(cd->stackframesize * 8 + 4, REG_SP);
191 /* save return address and used callee saved registers */
193 p = cd->stackframesize;
194 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
195 p--; M_AST(rd->savintregs[i], REG_SP, p * 8);
197 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
198 p--; emit_fld_reg(cd, rd->savfltregs[i]); emit_fstpl_membase(cd, REG_SP, p * 8);
201 /* take arguments out of register or stack frame */
206 for (p = 0, l = 0; p < md->paramcount; p++) {
207 t = md->paramtypes[p].type;
209 varindex = jd->local_map[l * 5 + t];
210 #if defined(ENABLE_SSA)
212 if (varindex != UNUSED)
213 varindex = ls->var_0[varindex];
214 if ((varindex != UNUSED) && (ls->lifetime[varindex].type == UNUSED))
219 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
222 if (varindex == UNUSED)
226 s1 = md->params[p].regoff;
229 if (IS_INT_LNG_TYPE(t)) { /* integer args */
230 if (!md->params[p].inmemory) { /* register arguments */
231 log_text("integer register argument");
233 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
234 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff */
236 else { /* reg arg -> spilled */
237 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff * 4 */
241 if (!(var->flags & INMEMORY)) {
243 cd->stackframesize * 8 + 4 + align_off + s1);
246 if (!IS_2_WORD_TYPE(t)) {
247 #if defined(ENABLE_SSA)
248 /* no copy avoiding by now possible with SSA */
250 emit_mov_membase_reg( /* + 4 for return address */
252 cd->stackframesize * 8 + s1 + 4 + align_off,
254 emit_mov_reg_membase(
255 cd, REG_ITMP1, REG_SP, var->vv.regoff);
258 #endif /*defined(ENABLE_SSA)*/
259 /* reuse stackslot */
260 var->vv.regoff = cd->stackframesize * 8 + 4 +
265 #if defined(ENABLE_SSA)
266 /* no copy avoiding by now possible with SSA */
268 emit_mov_membase_reg( /* + 4 for return address */
270 cd->stackframesize * 8 + s1 + 4 + align_off,
272 emit_mov_reg_membase(
273 cd, REG_ITMP1, REG_SP, var->vv.regoff);
274 emit_mov_membase_reg( /* + 4 for return address */
276 cd->stackframesize * 8 + s1 + 4 + 4 + align_off,
278 emit_mov_reg_membase(
279 cd, REG_ITMP1, REG_SP, var->vv.regoff + 4);
282 #endif /*defined(ENABLE_SSA)*/
283 /* reuse stackslot */
284 var->vv.regoff = cd->stackframesize * 8 + 8 + s1;
289 else { /* floating args */
290 if (!md->params[p].inmemory) { /* register arguments */
291 log_text("There are no float argument registers!");
293 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
294 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff */
295 } else { /* reg arg -> spilled */
296 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff * 8 */
300 else { /* stack arguments */
301 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
305 cd->stackframesize * 8 + s1 + 4 + align_off);
307 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
313 cd->stackframesize * 8 + s1 + 4 + align_off);
315 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
318 } else { /* stack-arg -> spilled */
319 #if defined(ENABLE_SSA)
320 /* no copy avoiding by now possible with SSA */
322 emit_mov_membase_reg(
324 cd->stackframesize * 8 + s1 + 4 + align_off,
326 emit_mov_reg_membase(
327 cd, REG_ITMP1, REG_SP, var->vv.regoff);
331 cd->stackframesize * 8 + s1 + 4 + align_off);
332 emit_fstps_membase(cd, REG_SP, var->vv.regoff);
337 cd->stackframesize * 8 + s1 + 4 + align_off);
338 emit_fstpl_membase(cd, REG_SP, var->vv.regoff);
342 #endif /*defined(ENABLE_SSA)*/
343 /* reuse stackslot */
344 var->vv.regoff = cd->stackframesize * 8 + 4 +
351 /* call monitorenter function */
353 #if defined(ENABLE_THREADS)
354 if (checksync && code_is_synchronized(code)) {
357 if (m->flags & ACC_STATIC) {
358 M_MOV_IMM(&m->class->object.header, REG_ITMP1);
361 M_ALD(REG_ITMP1, REG_SP, cd->stackframesize * 8 + 4 + align_off);
364 M_ALD_MEM(REG_ITMP1, EXCEPTION_HARDWARE_NULLPOINTER);
367 M_AST(REG_ITMP1, REG_SP, s1 * 8);
368 M_AST(REG_ITMP1, REG_SP, 0 * 4);
369 M_MOV_IMM(LOCK_monitor_enter, REG_ITMP3);
375 emit_verbosecall_enter(jd);
380 #if defined(ENABLE_SSA)
381 /* with SSA the Header is Basic Block 0 - insert phi Moves if necessary */
383 codegen_emit_phi_moves(jd, ls->basicblocks[0]);
386 /* end of header generation */
388 /* create replacement points */
390 REPLACEMENT_POINTS_INIT(cd, jd);
392 /* walk through all basic blocks */
394 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
396 bptr->mpc = (s4) (cd->mcodeptr - cd->mcodebase);
398 if (bptr->flags >= BBREACHED) {
399 /* branch resolving */
401 codegen_resolve_branchrefs(cd, bptr);
403 /* handle replacement points */
405 REPLACEMENT_POINT_BLOCK_START(cd, bptr);
407 #if defined(ENABLE_REPLACEMENT)
408 if (bptr->bitflags & BBFLAG_REPLACEMENT) {
409 if (cd->replacementpoint[-1].flags & RPLPOINT_FLAG_COUNTDOWN) {
411 disp = (s4) &(m->hitcountdown);
412 M_ISUB_IMM_MEMABS(1, disp);
418 /* copy interface registers to their destination */
423 #if defined(ENABLE_PROFILING)
424 /* generate basic block profiling code */
426 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
427 /* count frequency */
429 M_MOV_IMM(code->bbfrequency, REG_ITMP3);
430 M_IADD_IMM_MEMBASE(1, REG_ITMP3, bptr->nr * 4);
434 #if defined(ENABLE_LSRA) || defined(ENABLE_SSA)
435 # if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
438 # if defined(ENABLE_SSA)
440 last_cmd_was_goto = false;
444 var = VAR(bptr->invars[len]);
445 if (bptr->type != BBTYPE_STD) {
446 if (!IS_2_WORD_TYPE(var->type)) {
447 if (bptr->type == BBTYPE_EXH) {
448 d = codegen_reg_of_var(0, var, REG_ITMP1);
449 M_INTMOVE(REG_ITMP1, d);
450 emit_store(jd, NULL, var, d);
454 log_text("copy interface registers(EXH, SBR): longs \
455 have to be in memory (begin 1)");
463 #endif /* defined(ENABLE_LSRA) || defined(ENABLE_SSA) */
467 var = VAR(bptr->invars[len]);
468 if ((len == bptr->indepth-1) && (bptr->type != BBTYPE_STD)) {
469 if (!IS_2_WORD_TYPE(var->type)) {
470 if (bptr->type == BBTYPE_EXH) {
471 d = codegen_reg_of_var(0, var, REG_ITMP1);
472 M_INTMOVE(REG_ITMP1, d);
473 emit_store(jd, NULL, var, d);
477 log_text("copy interface registers: longs have to be in \
484 assert((var->flags & INOUT));
489 /* walk through all instructions */
494 for (iptr = bptr->iinstr; len > 0; len--, iptr++) {
495 if (iptr->line != currentline) {
496 linenumbertable_list_entry_add(cd, iptr->line);
497 currentline = iptr->line;
500 MCODECHECK(1024); /* 1kB should be enough */
503 case ICMD_NOP: /* ... ==> ... */
504 case ICMD_POP: /* ..., value ==> ... */
505 case ICMD_POP2: /* ..., value, value ==> ... */
508 case ICMD_INLINE_START:
510 REPLACEMENT_POINT_INLINE_START(cd, iptr);
513 case ICMD_INLINE_BODY:
515 REPLACEMENT_POINT_INLINE_BODY(cd, iptr);
516 linenumbertable_list_entry_add_inline_start(cd, iptr);
517 linenumbertable_list_entry_add(cd, iptr->line);
520 case ICMD_INLINE_END:
522 linenumbertable_list_entry_add_inline_end(cd, iptr);
523 linenumbertable_list_entry_add(cd, iptr->line);
526 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
528 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
529 emit_nullpointer_check(cd, iptr, s1);
532 /* constant operations ************************************************/
534 case ICMD_ICONST: /* ... ==> ..., constant */
536 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
537 ICONST(d, iptr->sx.val.i);
538 emit_store_dst(jd, iptr, d);
541 case ICMD_LCONST: /* ... ==> ..., constant */
543 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
544 LCONST(d, iptr->sx.val.l);
545 emit_store_dst(jd, iptr, d);
548 case ICMD_FCONST: /* ... ==> ..., constant */
550 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
551 if (iptr->sx.val.f == 0.0) {
555 if (iptr->sx.val.i == 0x80000000) {
559 } else if (iptr->sx.val.f == 1.0) {
562 } else if (iptr->sx.val.f == 2.0) {
568 disp = dseg_add_float(cd, iptr->sx.val.f);
569 emit_mov_imm_reg(cd, 0, REG_ITMP1);
571 emit_flds_membase(cd, REG_ITMP1, disp);
573 emit_store_dst(jd, iptr, d);
576 case ICMD_DCONST: /* ... ==> ..., constant */
578 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
579 if (iptr->sx.val.d == 0.0) {
583 if (iptr->sx.val.l == 0x8000000000000000LL) {
587 } else if (iptr->sx.val.d == 1.0) {
590 } else if (iptr->sx.val.d == 2.0) {
596 disp = dseg_add_double(cd, iptr->sx.val.d);
597 emit_mov_imm_reg(cd, 0, REG_ITMP1);
599 emit_fldl_membase(cd, REG_ITMP1, disp);
601 emit_store_dst(jd, iptr, d);
604 case ICMD_ACONST: /* ... ==> ..., constant */
606 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
608 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
609 patcher_add_patch_ref(jd, PATCHER_aconst,
610 iptr->sx.val.c.ref, 0);
615 if (iptr->sx.val.anyptr == NULL)
618 M_MOV_IMM(iptr->sx.val.anyptr, d);
620 emit_store_dst(jd, iptr, d);
624 /* load/store/copy/move operations ************************************/
642 if (!(iptr->flags.bits & INS_FLAG_RETADDR))
647 /* integer operations *************************************************/
649 case ICMD_INEG: /* ..., value ==> ..., - value */
651 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
652 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
655 emit_store_dst(jd, iptr, d);
658 case ICMD_LNEG: /* ..., value ==> ..., - value */
660 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
661 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
663 M_NEG(GET_LOW_REG(d));
664 M_IADDC_IMM(0, GET_HIGH_REG(d));
665 M_NEG(GET_HIGH_REG(d));
666 emit_store_dst(jd, iptr, d);
669 case ICMD_I2L: /* ..., value ==> ..., value */
671 s1 = emit_load_s1(jd, iptr, EAX);
672 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
675 M_LNGMOVE(EAX_EDX_PACKED, d);
676 emit_store_dst(jd, iptr, d);
679 case ICMD_L2I: /* ..., value ==> ..., value */
681 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
682 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
684 emit_store_dst(jd, iptr, d);
687 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
689 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
690 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
694 emit_store_dst(jd, iptr, d);
697 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
699 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
700 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
702 emit_store_dst(jd, iptr, d);
705 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
707 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
708 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
710 emit_store_dst(jd, iptr, d);
714 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
716 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
717 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
718 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
725 emit_store_dst(jd, iptr, d);
729 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
730 /* sx.val.i = constant */
732 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
733 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
735 /* `inc reg' is slower on p4's (regarding to ia32
736 optimization reference manual and benchmarks) and as
740 M_IADD_IMM(iptr->sx.val.i, d);
741 emit_store_dst(jd, iptr, d);
744 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
746 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
747 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
748 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
749 M_INTMOVE(s1, GET_LOW_REG(d));
750 M_IADD(s2, GET_LOW_REG(d));
751 /* don't use REG_ITMP1 */
752 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
753 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
754 M_INTMOVE(s1, GET_HIGH_REG(d));
755 M_IADDC(s2, GET_HIGH_REG(d));
756 emit_store_dst(jd, iptr, d);
759 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
760 /* sx.val.l = constant */
762 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
763 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
765 M_IADD_IMM(iptr->sx.val.l, GET_LOW_REG(d));
766 M_IADDC_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
767 emit_store_dst(jd, iptr, d);
770 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
772 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
773 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
774 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
776 M_INTMOVE(s1, REG_ITMP1);
777 M_ISUB(s2, REG_ITMP1);
778 M_INTMOVE(REG_ITMP1, d);
784 emit_store_dst(jd, iptr, d);
787 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
788 /* sx.val.i = constant */
790 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
791 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
793 M_ISUB_IMM(iptr->sx.val.i, d);
794 emit_store_dst(jd, iptr, d);
797 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
799 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
800 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
801 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
802 if (s2 == GET_LOW_REG(d)) {
803 M_INTMOVE(s1, REG_ITMP1);
804 M_ISUB(s2, REG_ITMP1);
805 M_INTMOVE(REG_ITMP1, GET_LOW_REG(d));
808 M_INTMOVE(s1, GET_LOW_REG(d));
809 M_ISUB(s2, GET_LOW_REG(d));
811 /* don't use REG_ITMP1 */
812 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
813 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
814 if (s2 == GET_HIGH_REG(d)) {
815 M_INTMOVE(s1, REG_ITMP2);
816 M_ISUBB(s2, REG_ITMP2);
817 M_INTMOVE(REG_ITMP2, GET_HIGH_REG(d));
820 M_INTMOVE(s1, GET_HIGH_REG(d));
821 M_ISUBB(s2, GET_HIGH_REG(d));
823 emit_store_dst(jd, iptr, d);
826 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
827 /* sx.val.l = constant */
829 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
830 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
832 M_ISUB_IMM(iptr->sx.val.l, GET_LOW_REG(d));
833 M_ISUBB_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
834 emit_store_dst(jd, iptr, d);
837 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
839 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
840 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
841 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
848 emit_store_dst(jd, iptr, d);
851 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
852 /* sx.val.i = constant */
854 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
855 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
856 M_IMUL_IMM(s1, iptr->sx.val.i, d);
857 emit_store_dst(jd, iptr, d);
860 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
862 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
863 s2 = emit_load_s2_low(jd, iptr, EDX);
864 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
866 M_INTMOVE(s1, REG_ITMP2);
867 M_IMUL(s2, REG_ITMP2);
869 s1 = emit_load_s1_low(jd, iptr, EAX);
870 s2 = emit_load_s2_high(jd, iptr, EDX);
873 M_IADD(EDX, REG_ITMP2);
875 s1 = emit_load_s1_low(jd, iptr, EAX);
876 s2 = emit_load_s2_low(jd, iptr, EDX);
879 M_INTMOVE(EAX, GET_LOW_REG(d));
880 M_IADD(REG_ITMP2, GET_HIGH_REG(d));
882 emit_store_dst(jd, iptr, d);
885 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
886 /* sx.val.l = constant */
888 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
889 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
890 ICONST(EAX, iptr->sx.val.l);
892 M_IMUL_IMM(s1, iptr->sx.val.l >> 32, REG_ITMP2);
893 M_IADD(REG_ITMP2, EDX);
894 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
895 M_IMUL_IMM(s1, iptr->sx.val.l, REG_ITMP2);
896 M_IADD(REG_ITMP2, EDX);
897 M_LNGMOVE(EAX_EDX_PACKED, d);
898 emit_store_dst(jd, iptr, d);
901 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
903 s1 = emit_load_s1(jd, iptr, EAX);
904 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
905 d = codegen_reg_of_dst(jd, iptr, EAX);
906 emit_arithmetic_check(cd, iptr, s2);
908 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
910 /* check as described in jvm spec */
912 M_CMP_IMM(0x80000000, EAX);
919 M_INTMOVE(EAX, d); /* if INMEMORY then d is already EAX */
920 emit_store_dst(jd, iptr, d);
923 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
925 s1 = emit_load_s1(jd, iptr, EAX);
926 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
927 d = codegen_reg_of_dst(jd, iptr, EDX);
928 emit_arithmetic_check(cd, iptr, s2);
930 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
932 /* check as described in jvm spec */
934 M_CMP_IMM(0x80000000, EAX);
942 M_INTMOVE(EDX, d); /* if INMEMORY then d is already EDX */
943 emit_store_dst(jd, iptr, d);
946 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
947 /* sx.val.i = constant */
949 /* TODO: optimize for `/ 2' */
950 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
951 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
955 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, d);/* 32-bit for jump off */
956 M_SRA_IMM(iptr->sx.val.i, d);
957 emit_store_dst(jd, iptr, d);
960 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
961 /* sx.val.i = constant */
963 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
964 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
966 M_MOV(s1, REG_ITMP1);
970 M_AND_IMM(iptr->sx.val.i, d);
972 M_BGE(2 + 2 + 6 + 2);
973 M_MOV(s1, d); /* don't use M_INTMOVE, so we know the jump offset */
975 M_AND_IMM32(iptr->sx.val.i, d); /* use 32-bit for jump offset */
977 emit_store_dst(jd, iptr, d);
980 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
981 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
983 s2 = emit_load_s2(jd, iptr, REG_ITMP12_PACKED);
984 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
986 M_INTMOVE(GET_LOW_REG(s2), REG_ITMP3);
987 M_OR(GET_HIGH_REG(s2), REG_ITMP3);
988 /* XXX could be optimized */
989 emit_arithmetic_check(cd, iptr, REG_ITMP3);
991 bte = iptr->sx.s23.s3.bte;
994 M_LST(s2, REG_SP, 2 * 4);
996 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
997 M_LST(s1, REG_SP, 0 * 4);
999 M_MOV_IMM(bte->fp, REG_ITMP3);
1001 emit_store_dst(jd, iptr, d);
1004 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1005 /* sx.val.i = constant */
1007 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1008 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1010 M_TEST(GET_HIGH_REG(d));
1012 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, GET_LOW_REG(d));
1013 M_IADDC_IMM(0, GET_HIGH_REG(d));
1014 M_SRLD_IMM(iptr->sx.val.i, GET_HIGH_REG(d), GET_LOW_REG(d));
1015 M_SRA_IMM(iptr->sx.val.i, GET_HIGH_REG(d));
1016 emit_store_dst(jd, iptr, d);
1020 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1021 /* sx.val.l = constant */
1023 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1024 if (iptr->dst.var->flags & INMEMORY) {
1025 if (iptr->s1.var->flags & INMEMORY) {
1026 /* Alpha algorithm */
1028 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8);
1030 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1036 /* TODO: hmm, don't know if this is always correct */
1038 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l & 0x00000000ffffffff);
1040 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l >> 32);
1046 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1047 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1049 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1050 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1051 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1052 emit_jcc(cd, CC_GE, disp);
1054 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1055 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1057 emit_neg_reg(cd, REG_ITMP1);
1058 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1059 emit_neg_reg(cd, REG_ITMP2);
1061 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1062 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1064 emit_neg_reg(cd, REG_ITMP1);
1065 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1066 emit_neg_reg(cd, REG_ITMP2);
1068 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst.var->vv.regoff * 8);
1069 emit_mov_reg_membase(cd, REG_ITMP2, REG_SP, iptr->dst.var->vv.regoff * 8 + 4);
1073 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1074 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1076 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1077 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1078 M_TEST(GET_LOW_REG(s1));
1084 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1086 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1087 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1088 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1089 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1092 emit_store_dst(jd, iptr, d);
1095 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1096 /* sx.val.i = constant */
1098 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1099 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1101 M_SLL_IMM(iptr->sx.val.i, d);
1102 emit_store_dst(jd, iptr, d);
1105 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1107 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1108 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1109 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1110 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1113 emit_store_dst(jd, iptr, d);
1116 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1117 /* sx.val.i = constant */
1119 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1120 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1122 M_SRA_IMM(iptr->sx.val.i, d);
1123 emit_store_dst(jd, iptr, d);
1126 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1128 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1129 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1130 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1131 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1134 emit_store_dst(jd, iptr, d);
1137 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1138 /* sx.val.i = constant */
1140 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1141 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1143 M_SRL_IMM(iptr->sx.val.i, d);
1144 emit_store_dst(jd, iptr, d);
1147 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1149 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1150 s2 = emit_load_s2(jd, iptr, ECX);
1151 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1154 M_TEST_IMM(32, ECX);
1156 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1157 M_CLR(GET_LOW_REG(d));
1158 M_SLLD(GET_LOW_REG(d), GET_HIGH_REG(d));
1159 M_SLL(GET_LOW_REG(d));
1160 emit_store_dst(jd, iptr, d);
1163 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1164 /* sx.val.i = constant */
1166 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1167 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1169 if (iptr->sx.val.i & 0x20) {
1170 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1171 M_CLR(GET_LOW_REG(d));
1172 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1176 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1178 M_SLL_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d));
1180 emit_store_dst(jd, iptr, d);
1183 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1185 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1186 s2 = emit_load_s2(jd, iptr, ECX);
1187 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1190 M_TEST_IMM(32, ECX);
1192 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1193 M_SRA_IMM(31, GET_HIGH_REG(d));
1194 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1195 M_SRA(GET_HIGH_REG(d));
1196 emit_store_dst(jd, iptr, d);
1199 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1200 /* sx.val.i = constant */
1202 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1203 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1205 if (iptr->sx.val.i & 0x20) {
1206 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1207 M_SRA_IMM(31, GET_HIGH_REG(d));
1208 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1212 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1214 M_SRA_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1216 emit_store_dst(jd, iptr, d);
1219 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1221 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1222 s2 = emit_load_s2(jd, iptr, ECX);
1223 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1226 M_TEST_IMM(32, ECX);
1228 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1229 M_CLR(GET_HIGH_REG(d));
1230 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1231 M_SRL(GET_HIGH_REG(d));
1232 emit_store_dst(jd, iptr, d);
1235 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1236 /* sx.val.l = constant */
1238 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1239 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1241 if (iptr->sx.val.i & 0x20) {
1242 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1243 M_CLR(GET_HIGH_REG(d));
1244 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1248 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1250 M_SRL_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1252 emit_store_dst(jd, iptr, d);
1255 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1257 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1258 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1259 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1266 emit_store_dst(jd, iptr, d);
1269 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1270 /* sx.val.i = constant */
1272 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1273 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1275 M_AND_IMM(iptr->sx.val.i, d);
1276 emit_store_dst(jd, iptr, d);
1279 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1281 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1282 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1283 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1284 if (s2 == GET_LOW_REG(d))
1285 M_AND(s1, GET_LOW_REG(d));
1287 M_INTMOVE(s1, GET_LOW_REG(d));
1288 M_AND(s2, GET_LOW_REG(d));
1290 /* REG_ITMP1 probably contains low 32-bit of destination */
1291 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1292 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1293 if (s2 == GET_HIGH_REG(d))
1294 M_AND(s1, GET_HIGH_REG(d));
1296 M_INTMOVE(s1, GET_HIGH_REG(d));
1297 M_AND(s2, GET_HIGH_REG(d));
1299 emit_store_dst(jd, iptr, d);
1302 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1303 /* sx.val.l = constant */
1305 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1306 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1308 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1309 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1310 emit_store_dst(jd, iptr, d);
1313 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1315 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1316 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1317 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1324 emit_store_dst(jd, iptr, d);
1327 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1328 /* sx.val.i = constant */
1330 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1331 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1333 M_OR_IMM(iptr->sx.val.i, d);
1334 emit_store_dst(jd, iptr, d);
1337 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1339 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1340 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1341 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1342 if (s2 == GET_LOW_REG(d))
1343 M_OR(s1, GET_LOW_REG(d));
1345 M_INTMOVE(s1, GET_LOW_REG(d));
1346 M_OR(s2, GET_LOW_REG(d));
1348 /* REG_ITMP1 probably contains low 32-bit of destination */
1349 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1350 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1351 if (s2 == GET_HIGH_REG(d))
1352 M_OR(s1, GET_HIGH_REG(d));
1354 M_INTMOVE(s1, GET_HIGH_REG(d));
1355 M_OR(s2, GET_HIGH_REG(d));
1357 emit_store_dst(jd, iptr, d);
1360 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1361 /* sx.val.l = constant */
1363 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1364 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1366 M_OR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1367 M_OR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1368 emit_store_dst(jd, iptr, d);
1371 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1373 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1374 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1375 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1382 emit_store_dst(jd, iptr, d);
1385 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1386 /* sx.val.i = constant */
1388 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1389 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1391 M_XOR_IMM(iptr->sx.val.i, d);
1392 emit_store_dst(jd, iptr, d);
1395 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1397 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1398 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1399 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1400 if (s2 == GET_LOW_REG(d))
1401 M_XOR(s1, GET_LOW_REG(d));
1403 M_INTMOVE(s1, GET_LOW_REG(d));
1404 M_XOR(s2, GET_LOW_REG(d));
1406 /* REG_ITMP1 probably contains low 32-bit of destination */
1407 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1408 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1409 if (s2 == GET_HIGH_REG(d))
1410 M_XOR(s1, GET_HIGH_REG(d));
1412 M_INTMOVE(s1, GET_HIGH_REG(d));
1413 M_XOR(s2, GET_HIGH_REG(d));
1415 emit_store_dst(jd, iptr, d);
1418 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1419 /* sx.val.l = constant */
1421 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1422 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1424 M_XOR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1425 M_XOR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1426 emit_store_dst(jd, iptr, d);
1430 /* floating operations ************************************************/
1432 case ICMD_FNEG: /* ..., value ==> ..., - value */
1434 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1435 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1437 emit_store_dst(jd, iptr, d);
1440 case ICMD_DNEG: /* ..., value ==> ..., - value */
1442 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1443 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1445 emit_store_dst(jd, iptr, d);
1448 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1450 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1451 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1452 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1454 emit_store_dst(jd, iptr, d);
1457 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1459 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1460 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1461 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1463 emit_store_dst(jd, iptr, d);
1466 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1468 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1469 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1470 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1472 emit_store_dst(jd, iptr, d);
1475 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1477 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1478 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1479 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1481 emit_store_dst(jd, iptr, d);
1484 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1486 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1487 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1488 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1490 emit_store_dst(jd, iptr, d);
1493 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1495 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1496 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1497 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1499 emit_store_dst(jd, iptr, d);
1502 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1504 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1505 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1506 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1508 emit_store_dst(jd, iptr, d);
1511 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1513 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1514 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1515 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1517 emit_store_dst(jd, iptr, d);
1520 case ICMD_FREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1522 /* exchanged to skip fxch */
1523 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1524 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1525 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1526 /* emit_fxch(cd); */
1531 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1532 emit_store_dst(jd, iptr, d);
1533 emit_ffree_reg(cd, 0);
1537 case ICMD_DREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1539 /* exchanged to skip fxch */
1540 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1541 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1542 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1543 /* emit_fxch(cd); */
1548 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1549 emit_store_dst(jd, iptr, d);
1550 emit_ffree_reg(cd, 0);
1554 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1555 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1557 var = VAROP(iptr->s1);
1558 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1560 if (var->flags & INMEMORY) {
1561 emit_fildl_membase(cd, REG_SP, var->vv.regoff);
1563 /* XXX not thread safe! */
1564 disp = dseg_add_unique_s4(cd, 0);
1565 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1567 emit_mov_reg_membase(cd, var->vv.regoff, REG_ITMP1, disp);
1568 emit_fildl_membase(cd, REG_ITMP1, disp);
1571 emit_store_dst(jd, iptr, d);
1574 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1575 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1577 var = VAROP(iptr->s1);
1578 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1579 if (var->flags & INMEMORY) {
1580 emit_fildll_membase(cd, REG_SP, var->vv.regoff);
1583 log_text("L2F: longs have to be in memory");
1586 emit_store_dst(jd, iptr, d);
1589 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1591 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1592 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1594 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1597 /* Round to zero, 53-bit mode, exception masked */
1598 disp = dseg_add_s4(cd, 0x0e7f);
1599 emit_fldcw_membase(cd, REG_ITMP1, disp);
1601 var = VAROP(iptr->dst);
1602 var1 = VAROP(iptr->s1);
1604 if (var->flags & INMEMORY) {
1605 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1607 /* Round to nearest, 53-bit mode, exceptions masked */
1608 disp = dseg_add_s4(cd, 0x027f);
1609 emit_fldcw_membase(cd, REG_ITMP1, disp);
1611 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1612 REG_SP, var->vv.regoff);
1615 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1617 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1620 /* XXX not thread safe! */
1621 disp = dseg_add_unique_s4(cd, 0);
1622 emit_fistpl_membase(cd, REG_ITMP1, disp);
1623 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1625 /* Round to nearest, 53-bit mode, exceptions masked */
1626 disp = dseg_add_s4(cd, 0x027f);
1627 emit_fldcw_membase(cd, REG_ITMP1, disp);
1629 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1632 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1633 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1636 emit_jcc(cd, CC_NE, disp);
1638 /* XXX: change this when we use registers */
1639 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1640 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2i, REG_ITMP1);
1641 emit_call_reg(cd, REG_ITMP1);
1643 if (var->flags & INMEMORY) {
1644 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1647 M_INTMOVE(REG_RESULT, var->vv.regoff);
1651 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1653 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1654 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1656 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1659 /* Round to zero, 53-bit mode, exception masked */
1660 disp = dseg_add_s4(cd, 0x0e7f);
1661 emit_fldcw_membase(cd, REG_ITMP1, disp);
1663 var = VAROP(iptr->dst);
1664 var1 = VAROP(iptr->s1);
1666 if (var->flags & INMEMORY) {
1667 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1669 /* Round to nearest, 53-bit mode, exceptions masked */
1670 disp = dseg_add_s4(cd, 0x027f);
1671 emit_fldcw_membase(cd, REG_ITMP1, disp);
1673 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1674 REG_SP, var->vv.regoff);
1677 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1679 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1682 /* XXX not thread safe! */
1683 disp = dseg_add_unique_s4(cd, 0);
1684 emit_fistpl_membase(cd, REG_ITMP1, disp);
1685 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1687 /* Round to nearest, 53-bit mode, exceptions masked */
1688 disp = dseg_add_s4(cd, 0x027f);
1689 emit_fldcw_membase(cd, REG_ITMP1, disp);
1691 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1694 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1695 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1698 emit_jcc(cd, CC_NE, disp);
1700 /* XXX: change this when we use registers */
1701 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1702 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2i, REG_ITMP1);
1703 emit_call_reg(cd, REG_ITMP1);
1705 if (var->flags & INMEMORY) {
1706 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1708 M_INTMOVE(REG_RESULT, var->vv.regoff);
1712 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1714 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1715 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1717 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1720 /* Round to zero, 53-bit mode, exception masked */
1721 disp = dseg_add_s4(cd, 0x0e7f);
1722 emit_fldcw_membase(cd, REG_ITMP1, disp);
1724 var = VAROP(iptr->dst);
1725 var1 = VAROP(iptr->s1);
1727 if (var->flags & INMEMORY) {
1728 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1730 /* Round to nearest, 53-bit mode, exceptions masked */
1731 disp = dseg_add_s4(cd, 0x027f);
1732 emit_fldcw_membase(cd, REG_ITMP1, disp);
1734 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1735 REG_SP, var->vv.regoff + 4);
1738 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1740 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1743 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1745 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1747 emit_jcc(cd, CC_NE, disp);
1749 emit_alu_imm_membase(cd, ALU_CMP, 0,
1750 REG_SP, var->vv.regoff);
1753 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1755 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1757 emit_jcc(cd, CC_NE, disp);
1759 /* XXX: change this when we use registers */
1760 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1761 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2l, REG_ITMP1);
1762 emit_call_reg(cd, REG_ITMP1);
1763 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1764 emit_mov_reg_membase(cd, REG_RESULT2,
1765 REG_SP, var->vv.regoff + 4);
1768 log_text("F2L: longs have to be in memory");
1773 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1775 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1776 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1778 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1781 /* Round to zero, 53-bit mode, exception masked */
1782 disp = dseg_add_s4(cd, 0x0e7f);
1783 emit_fldcw_membase(cd, REG_ITMP1, disp);
1785 var = VAROP(iptr->dst);
1786 var1 = VAROP(iptr->s1);
1788 if (var->flags & INMEMORY) {
1789 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1791 /* Round to nearest, 53-bit mode, exceptions masked */
1792 disp = dseg_add_s4(cd, 0x027f);
1793 emit_fldcw_membase(cd, REG_ITMP1, disp);
1795 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1796 REG_SP, var->vv.regoff + 4);
1799 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1801 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1804 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1806 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1808 emit_jcc(cd, CC_NE, disp);
1810 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, var->vv.regoff);
1813 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1815 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1817 emit_jcc(cd, CC_NE, disp);
1819 /* XXX: change this when we use registers */
1820 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1821 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2l, REG_ITMP1);
1822 emit_call_reg(cd, REG_ITMP1);
1823 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1824 emit_mov_reg_membase(cd, REG_RESULT2,
1825 REG_SP, var->vv.regoff + 4);
1828 log_text("D2L: longs have to be in memory");
1833 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1835 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1836 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1838 emit_store_dst(jd, iptr, d);
1841 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1843 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1844 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1846 emit_store_dst(jd, iptr, d);
1849 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1852 /* exchanged to skip fxch */
1853 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1854 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1855 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1856 /* emit_fxch(cd); */
1859 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as GT */
1860 emit_jcc(cd, CC_E, 6);
1861 emit_alu_imm_reg(cd, ALU_AND, 0x000000ff, EAX);
1863 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1864 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1865 emit_jcc(cd, CC_B, 3 + 5);
1866 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1867 emit_jmp_imm(cd, 3);
1868 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1869 emit_store_dst(jd, iptr, d);
1872 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1875 /* exchanged to skip fxch */
1876 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1877 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1878 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1879 /* emit_fxch(cd); */
1882 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as LT */
1883 emit_jcc(cd, CC_E, 3);
1884 emit_movb_imm_reg(cd, 1, REG_AH);
1886 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1887 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1888 emit_jcc(cd, CC_B, 3 + 5);
1889 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1890 emit_jmp_imm(cd, 3);
1891 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1892 emit_store_dst(jd, iptr, d);
1896 /* memory operations **************************************************/
1898 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., length */
1900 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1901 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1902 /* implicit null-pointer check */
1903 M_ILD(d, s1, OFFSET(java_array_t, size));
1904 emit_store_dst(jd, iptr, d);
1907 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
1909 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1910 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1911 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1912 /* implicit null-pointer check */
1913 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1914 emit_movsbl_memindex_reg(cd, OFFSET(java_bytearray_t, data[0]),
1916 emit_store_dst(jd, iptr, d);
1919 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
1921 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1922 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1923 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1924 /* implicit null-pointer check */
1925 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1926 emit_movzwl_memindex_reg(cd, OFFSET(java_chararray_t, data[0]),
1928 emit_store_dst(jd, iptr, d);
1931 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
1933 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1934 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1935 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1936 /* implicit null-pointer check */
1937 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1938 emit_movswl_memindex_reg(cd, OFFSET(java_shortarray_t, data[0]),
1940 emit_store_dst(jd, iptr, d);
1943 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
1945 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1946 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1947 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1948 /* implicit null-pointer check */
1949 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1950 emit_mov_memindex_reg(cd, OFFSET(java_intarray_t, data[0]),
1952 emit_store_dst(jd, iptr, d);
1955 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
1957 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1958 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1959 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
1960 /* implicit null-pointer check */
1961 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1963 var = VAROP(iptr->dst);
1965 assert(var->flags & INMEMORY);
1966 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]),
1967 s1, s2, 3, REG_ITMP3);
1968 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff);
1969 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]) + 4,
1970 s1, s2, 3, REG_ITMP3);
1971 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff + 4);
1974 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
1976 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1977 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1978 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1979 /* implicit null-pointer check */
1980 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1981 emit_flds_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2, 2);
1982 emit_store_dst(jd, iptr, d);
1985 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
1987 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1988 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1989 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1990 /* implicit null-pointer check */
1991 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1992 emit_fldl_memindex(cd, OFFSET(java_doublearray_t, data[0]), s1, s2,3);
1993 emit_store_dst(jd, iptr, d);
1996 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
1998 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1999 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2000 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2001 /* implicit null-pointer check */
2002 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2003 emit_mov_memindex_reg(cd, OFFSET(java_objectarray_t, data[0]),
2005 emit_store_dst(jd, iptr, d);
2009 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2011 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2012 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2013 /* implicit null-pointer check */
2014 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2015 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2017 /* because EBP, ESI, EDI have no xH and xL nibbles */
2018 M_INTMOVE(s3, REG_ITMP3);
2021 emit_movb_reg_memindex(cd, s3, OFFSET(java_bytearray_t, data[0]),
2025 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2027 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2028 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2029 /* implicit null-pointer check */
2030 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2031 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2032 emit_movw_reg_memindex(cd, s3, OFFSET(java_chararray_t, data[0]),
2036 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2038 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2039 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2040 /* implicit null-pointer check */
2041 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2042 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2043 emit_movw_reg_memindex(cd, s3, OFFSET(java_shortarray_t, data[0]),
2047 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2049 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2050 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2051 /* implicit null-pointer check */
2052 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2053 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2054 emit_mov_reg_memindex(cd, s3, OFFSET(java_intarray_t, data[0]),
2058 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2060 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2061 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2062 /* implicit null-pointer check */
2063 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2065 var = VAROP(iptr->sx.s23.s3);
2067 assert(var->flags & INMEMORY);
2068 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff, REG_ITMP3);
2069 emit_mov_reg_memindex(cd, REG_ITMP3, OFFSET(java_longarray_t, data[0])
2071 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff + 4, REG_ITMP3);
2072 emit_mov_reg_memindex(cd, REG_ITMP3,
2073 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2076 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2078 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2079 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2080 /* implicit null-pointer check */
2081 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2082 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2083 emit_fstps_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2,2);
2086 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2088 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2089 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2090 /* implicit null-pointer check */
2091 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2092 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2093 emit_fstpl_memindex(cd, OFFSET(java_doublearray_t, data[0]),
2097 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2099 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2100 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2101 /* implicit null-pointer check */
2102 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2103 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2105 M_AST(s1, REG_SP, 0 * 4);
2106 M_AST(s3, REG_SP, 1 * 4);
2107 M_MOV_IMM(BUILTIN_FAST_canstore, REG_ITMP1);
2109 emit_arraystore_check(cd, iptr);
2111 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2112 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2113 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2114 emit_mov_reg_memindex(cd, s3, OFFSET(java_objectarray_t, data[0]),
2118 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2120 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2121 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2122 /* implicit null-pointer check */
2123 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2124 emit_movb_imm_memindex(cd, iptr->sx.s23.s3.constval,
2125 OFFSET(java_bytearray_t, data[0]), s1, s2, 0);
2128 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2130 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2131 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2132 /* implicit null-pointer check */
2133 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2134 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2135 OFFSET(java_chararray_t, data[0]), s1, s2, 1);
2138 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2140 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2141 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2142 /* implicit null-pointer check */
2143 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2144 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2145 OFFSET(java_shortarray_t, data[0]), s1, s2, 1);
2148 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2150 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2151 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2152 /* implicit null-pointer check */
2153 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2154 emit_mov_imm_memindex(cd, iptr->sx.s23.s3.constval,
2155 OFFSET(java_intarray_t, data[0]), s1, s2, 2);
2158 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2160 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2161 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2162 /* implicit null-pointer check */
2163 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2164 emit_mov_imm_memindex(cd,
2165 (u4) (iptr->sx.s23.s3.constval & 0x00000000ffffffff),
2166 OFFSET(java_longarray_t, data[0]), s1, s2, 3);
2167 emit_mov_imm_memindex(cd,
2168 ((s4)iptr->sx.s23.s3.constval) >> 31,
2169 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2172 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2174 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2175 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2176 /* implicit null-pointer check */
2177 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2178 emit_mov_imm_memindex(cd, 0,
2179 OFFSET(java_objectarray_t, data[0]), s1, s2, 2);
2183 case ICMD_GETSTATIC: /* ... ==> ..., value */
2185 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2186 uf = iptr->sx.s23.s3.uf;
2187 fieldtype = uf->fieldref->parseddesc.fd->type;
2190 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2194 fi = iptr->sx.s23.s3.fmiref->p.field;
2195 fieldtype = fi->type;
2196 disp = (intptr_t) fi->value;
2198 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2199 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->class, 0);
2202 M_MOV_IMM(disp, REG_ITMP1);
2203 switch (fieldtype) {
2206 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2207 M_ILD(d, REG_ITMP1, 0);
2210 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2211 M_LLD(d, REG_ITMP1, 0);
2214 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2215 M_FLD(d, REG_ITMP1, 0);
2218 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2219 M_DLD(d, REG_ITMP1, 0);
2222 emit_store_dst(jd, iptr, d);
2225 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2227 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2228 uf = iptr->sx.s23.s3.uf;
2229 fieldtype = uf->fieldref->parseddesc.fd->type;
2232 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2235 fi = iptr->sx.s23.s3.fmiref->p.field;
2236 fieldtype = fi->type;
2237 disp = (intptr_t) fi->value;
2239 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2240 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->class, 0);
2243 M_MOV_IMM(disp, REG_ITMP1);
2244 switch (fieldtype) {
2247 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
2248 M_IST(s1, REG_ITMP1, 0);
2251 s1 = emit_load_s1(jd, iptr, REG_ITMP23_PACKED);
2252 M_LST(s1, REG_ITMP1, 0);
2255 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2256 emit_fstps_membase(cd, REG_ITMP1, 0);
2259 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2260 emit_fstpl_membase(cd, REG_ITMP1, 0);
2265 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2266 /* val = value (in current instruction) */
2267 /* following NOP) */
2269 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2270 uf = iptr->sx.s23.s3.uf;
2271 fieldtype = uf->fieldref->parseddesc.fd->type;
2274 patcher_add_patch_ref(jd, PATCHER_get_putstatic, uf, 0);
2277 fi = iptr->sx.s23.s3.fmiref->p.field;
2278 fieldtype = fi->type;
2279 disp = (intptr_t) fi->value;
2281 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2282 patcher_add_patch_ref(jd, PATCHER_initialize_class, fi->class, 0);
2285 M_MOV_IMM(disp, REG_ITMP1);
2286 switch (fieldtype) {
2289 M_IST_IMM(iptr->sx.s23.s2.constval, REG_ITMP1, 0);
2292 M_IST_IMM(iptr->sx.s23.s2.constval & 0xffffffff, REG_ITMP1, 0);
2293 M_IST_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, REG_ITMP1, 4);
2300 case ICMD_GETFIELD: /* .., objectref. ==> ..., value */
2302 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2303 emit_nullpointer_check(cd, iptr, s1);
2305 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2306 uf = iptr->sx.s23.s3.uf;
2307 fieldtype = uf->fieldref->parseddesc.fd->type;
2310 patcher_add_patch_ref(jd, PATCHER_getfield,
2311 iptr->sx.s23.s3.uf, 0);
2314 fi = iptr->sx.s23.s3.fmiref->p.field;
2315 fieldtype = fi->type;
2319 switch (fieldtype) {
2322 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2323 M_ILD32(d, s1, disp);
2326 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2327 M_LLD32(d, s1, disp);
2330 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2331 M_FLD32(d, s1, disp);
2334 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2335 M_DLD32(d, s1, disp);
2338 emit_store_dst(jd, iptr, d);
2341 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2343 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2344 emit_nullpointer_check(cd, iptr, s1);
2346 /* must be done here because of code patching */
2348 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2349 uf = iptr->sx.s23.s3.uf;
2350 fieldtype = uf->fieldref->parseddesc.fd->type;
2353 fi = iptr->sx.s23.s3.fmiref->p.field;
2354 fieldtype = fi->type;
2357 if (!IS_FLT_DBL_TYPE(fieldtype)) {
2358 if (IS_2_WORD_TYPE(fieldtype))
2359 s2 = emit_load_s2(jd, iptr, REG_ITMP23_PACKED);
2361 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2364 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
2366 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2368 uf = iptr->sx.s23.s3.uf;
2371 patcher_add_patch_ref(jd, PATCHER_putfield, uf, 0);
2375 fi = iptr->sx.s23.s3.fmiref->p.field;
2379 switch (fieldtype) {
2382 M_IST32(s2, s1, disp);
2385 M_LST32(s2, s1, disp);
2388 emit_fstps_membase32(cd, s1, disp);
2391 emit_fstpl_membase32(cd, s1, disp);
2396 case ICMD_PUTFIELDCONST: /* ..., objectref ==> ... */
2397 /* val = value (in current instruction) */
2398 /* following NOP) */
2400 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2401 emit_nullpointer_check(cd, iptr, s1);
2403 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2404 uf = iptr->sx.s23.s3.uf;
2405 fieldtype = uf->fieldref->parseddesc.fd->type;
2408 patcher_add_patch_ref(jd, PATCHER_putfieldconst,
2412 fi = iptr->sx.s23.s3.fmiref->p.field;
2413 fieldtype = fi->type;
2417 switch (fieldtype) {
2420 M_IST32_IMM(iptr->sx.s23.s2.constval, s1, disp);
2423 M_IST32_IMM(iptr->sx.s23.s2.constval & 0xffffffff, s1, disp);
2424 M_IST32_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, s1, disp + 4);
2432 /* branch operations **************************************************/
2434 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2436 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2437 M_INTMOVE(s1, REG_ITMP1_XPTR);
2439 #ifdef ENABLE_VERIFIER
2440 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2441 patcher_add_patch_ref(jd, PATCHER_resolve_class,
2442 iptr->sx.s23.s2.uc, 0);
2444 #endif /* ENABLE_VERIFIER */
2446 M_CALL_IMM(0); /* passing exception pc */
2447 M_POP(REG_ITMP2_XPC);
2449 M_MOV_IMM(asm_handle_exception, REG_ITMP3);
2453 case ICMD_GOTO: /* ... ==> ... */
2454 case ICMD_RET: /* ... ==> ... */
2456 #if defined(ENABLE_SSA)
2458 last_cmd_was_goto = true;
2460 /* In case of a Goto phimoves have to be inserted before the */
2463 codegen_emit_phi_moves(jd, bptr);
2466 emit_br(cd, iptr->dst.block);
2470 case ICMD_JSR: /* ... ==> ... */
2472 emit_br(cd, iptr->sx.s23.s3.jsrtarget.block);
2476 case ICMD_IFNULL: /* ..., value ==> ... */
2477 case ICMD_IFNONNULL:
2479 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2481 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFNULL, BRANCH_OPT_NONE);
2484 case ICMD_IFEQ: /* ..., value ==> ... */
2491 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2492 M_CMP_IMM(iptr->sx.val.i, s1);
2493 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFEQ, BRANCH_OPT_NONE);
2496 case ICMD_IF_LEQ: /* ..., value ==> ... */
2498 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2499 if (iptr->sx.val.l == 0) {
2500 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2501 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2504 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2505 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2506 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2507 M_OR(REG_ITMP2, REG_ITMP1);
2509 emit_beq(cd, iptr->dst.block);
2512 case ICMD_IF_LLT: /* ..., value ==> ... */
2514 if (iptr->sx.val.l == 0) {
2515 /* If high 32-bit are less than zero, then the 64-bits
2517 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2519 emit_blt(cd, iptr->dst.block);
2522 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2523 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2524 emit_blt(cd, iptr->dst.block);
2526 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2527 emit_bult(cd, iptr->dst.block);
2531 case ICMD_IF_LLE: /* ..., value ==> ... */
2533 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2534 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2535 emit_blt(cd, iptr->dst.block);
2537 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2538 emit_bule(cd, iptr->dst.block);
2541 case ICMD_IF_LNE: /* ..., value ==> ... */
2543 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2544 if (iptr->sx.val.l == 0) {
2545 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2546 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2549 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2550 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2551 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2552 M_OR(REG_ITMP2, REG_ITMP1);
2554 emit_bne(cd, iptr->dst.block);
2557 case ICMD_IF_LGT: /* ..., value ==> ... */
2559 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2560 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2561 emit_bgt(cd, iptr->dst.block);
2563 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2564 emit_bugt(cd, iptr->dst.block);
2567 case ICMD_IF_LGE: /* ..., value ==> ... */
2569 if (iptr->sx.val.l == 0) {
2570 /* If high 32-bit are greater equal zero, then the
2572 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2574 emit_bge(cd, iptr->dst.block);
2577 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2578 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2579 emit_bgt(cd, iptr->dst.block);
2581 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2582 emit_buge(cd, iptr->dst.block);
2586 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2587 case ICMD_IF_ICMPNE:
2588 case ICMD_IF_ICMPLT:
2589 case ICMD_IF_ICMPGT:
2590 case ICMD_IF_ICMPGE:
2591 case ICMD_IF_ICMPLE:
2593 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2594 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2596 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ICMPEQ, BRANCH_OPT_NONE);
2599 case ICMD_IF_ACMPEQ: /* ..., value, value ==> ... */
2600 case ICMD_IF_ACMPNE:
2602 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2603 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2605 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ACMPEQ, BRANCH_OPT_NONE);
2608 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2610 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2611 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2612 M_INTMOVE(s1, REG_ITMP1);
2613 M_XOR(s2, REG_ITMP1);
2614 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2615 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2616 M_INTMOVE(s1, REG_ITMP2);
2617 M_XOR(s2, REG_ITMP2);
2618 M_OR(REG_ITMP1, REG_ITMP2);
2619 emit_beq(cd, iptr->dst.block);
2622 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2624 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2625 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2626 M_INTMOVE(s1, REG_ITMP1);
2627 M_XOR(s2, REG_ITMP1);
2628 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2629 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2630 M_INTMOVE(s1, REG_ITMP2);
2631 M_XOR(s2, REG_ITMP2);
2632 M_OR(REG_ITMP1, REG_ITMP2);
2633 emit_bne(cd, iptr->dst.block);
2636 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2638 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2639 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2641 emit_blt(cd, iptr->dst.block);
2642 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2643 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2646 emit_bult(cd, iptr->dst.block);
2649 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2651 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2652 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2654 emit_bgt(cd, iptr->dst.block);
2655 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2656 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2659 emit_bugt(cd, iptr->dst.block);
2662 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2664 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2665 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2667 emit_blt(cd, iptr->dst.block);
2668 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2669 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2672 emit_bule(cd, iptr->dst.block);
2675 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2677 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2678 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2680 emit_bgt(cd, iptr->dst.block);
2681 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2682 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2685 emit_buge(cd, iptr->dst.block);
2689 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2691 REPLACEMENT_POINT_RETURN(cd, iptr);
2692 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2693 M_INTMOVE(s1, REG_RESULT);
2694 goto nowperformreturn;
2696 case ICMD_LRETURN: /* ..., retvalue ==> ... */
2698 REPLACEMENT_POINT_RETURN(cd, iptr);
2699 s1 = emit_load_s1(jd, iptr, REG_RESULT_PACKED);
2700 M_LNGMOVE(s1, REG_RESULT_PACKED);
2701 goto nowperformreturn;
2703 case ICMD_ARETURN: /* ..., retvalue ==> ... */
2705 REPLACEMENT_POINT_RETURN(cd, iptr);
2706 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2707 M_INTMOVE(s1, REG_RESULT);
2709 #ifdef ENABLE_VERIFIER
2710 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2711 patcher_add_patch_ref(jd, PATCHER_resolve_class,
2712 iptr->sx.s23.s2.uc, 0);
2714 #endif /* ENABLE_VERIFIER */
2715 goto nowperformreturn;
2717 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2720 REPLACEMENT_POINT_RETURN(cd, iptr);
2721 s1 = emit_load_s1(jd, iptr, REG_FRESULT);
2722 goto nowperformreturn;
2724 case ICMD_RETURN: /* ... ==> ... */
2726 REPLACEMENT_POINT_RETURN(cd, iptr);
2732 p = cd->stackframesize;
2734 #if !defined(NDEBUG)
2735 emit_verbosecall_exit(jd);
2738 #if defined(ENABLE_THREADS)
2739 if (checksync && code_is_synchronized(code)) {
2740 M_ALD(REG_ITMP2, REG_SP, rd->memuse * 8);
2742 /* we need to save the proper return value */
2743 switch (iptr->opc) {
2746 M_IST(REG_RESULT, REG_SP, rd->memuse * 8);
2750 M_LST(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2754 emit_fstps_membase(cd, REG_SP, rd->memuse * 8);
2758 emit_fstpl_membase(cd, REG_SP, rd->memuse * 8);
2762 M_AST(REG_ITMP2, REG_SP, 0);
2763 M_MOV_IMM(LOCK_monitor_exit, REG_ITMP3);
2766 /* and now restore the proper return value */
2767 switch (iptr->opc) {
2770 M_ILD(REG_RESULT, REG_SP, rd->memuse * 8);
2774 M_LLD(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2778 emit_flds_membase(cd, REG_SP, rd->memuse * 8);
2782 emit_fldl_membase(cd, REG_SP, rd->memuse * 8);
2788 /* restore saved registers */
2790 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
2791 p--; M_ALD(rd->savintregs[i], REG_SP, p * 8);
2794 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
2796 emit_fldl_membase(cd, REG_SP, p * 8);
2797 if (iptr->opc == ICMD_FRETURN || iptr->opc == ICMD_DRETURN) {
2799 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset + 1); */
2802 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset); */
2806 /* deallocate stack */
2808 if (cd->stackframesize)
2809 M_AADD_IMM(cd->stackframesize * 8 + 4, REG_SP);
2816 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2819 branch_target_t *table;
2821 table = iptr->dst.table;
2823 l = iptr->sx.s23.s2.tablelow;
2824 i = iptr->sx.s23.s3.tablehigh;
2826 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2827 M_INTMOVE(s1, REG_ITMP1);
2830 M_ISUB_IMM(l, REG_ITMP1);
2836 M_CMP_IMM(i - 1, REG_ITMP1);
2837 emit_bugt(cd, table[0].block);
2839 /* build jump table top down and use address of lowest entry */
2844 dseg_add_target(cd, table->block);
2848 /* length of dataseg after last dseg_addtarget is used
2851 M_MOV_IMM(0, REG_ITMP2);
2853 emit_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 2, REG_ITMP1);
2859 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2862 lookup_target_t *lookup;
2864 lookup = iptr->dst.lookup;
2866 i = iptr->sx.s23.s2.lookupcount;
2868 MCODECHECK((i<<2)+8);
2869 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2872 M_CMP_IMM(lookup->value, s1);
2873 emit_beq(cd, lookup->target.block);
2877 emit_br(cd, iptr->sx.s23.s3.lookupdefault.block);
2882 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
2884 REPLACEMENT_POINT_FORGC_BUILTIN(cd, iptr);
2886 bte = iptr->sx.s23.s3.bte;
2890 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2892 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2893 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
2894 case ICMD_INVOKEINTERFACE:
2896 REPLACEMENT_POINT_INVOKE(cd, iptr);
2898 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2899 md = iptr->sx.s23.s3.um->methodref->parseddesc.md;
2903 lm = iptr->sx.s23.s3.fmiref->p.method;
2904 md = lm->parseddesc;
2908 s3 = md->paramcount;
2910 MCODECHECK((s3 << 1) + 64);
2912 /* copy arguments to registers or stack location */
2914 for (s3 = s3 - 1; s3 >= 0; s3--) {
2915 var = VAR(iptr->sx.s23.s2.args[s3]);
2917 /* Already Preallocated (ARGVAR) ? */
2918 if (var->flags & PREALLOC)
2920 if (IS_INT_LNG_TYPE(var->type)) {
2921 if (!md->params[s3].inmemory) {
2922 log_text("No integer argument registers available!");
2926 if (IS_2_WORD_TYPE(var->type)) {
2927 d = emit_load(jd, iptr, var, REG_ITMP12_PACKED);
2928 M_LST(d, REG_SP, md->params[s3].regoff);
2930 d = emit_load(jd, iptr, var, REG_ITMP1);
2931 M_IST(d, REG_SP, md->params[s3].regoff);
2936 if (!md->params[s3].inmemory) {
2937 s1 = md->params[s3].regoff;
2938 d = emit_load(jd, iptr, var, s1);
2942 d = emit_load(jd, iptr, var, REG_FTMP1);
2943 if (IS_2_WORD_TYPE(var->type))
2944 M_DST(d, REG_SP, md->params[s3].regoff);
2946 M_FST(d, REG_SP, md->params[s3].regoff);
2951 switch (iptr->opc) {
2953 d = md->returntype.type;
2955 if (bte->stub == NULL) {
2956 M_MOV_IMM(bte->fp, REG_ITMP1);
2959 M_MOV_IMM(bte->stub, REG_ITMP1);
2964 case ICMD_INVOKESPECIAL:
2965 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
2966 emit_nullpointer_check(cd, iptr, REG_ITMP1);
2969 case ICMD_INVOKESTATIC:
2971 unresolved_method *um = iptr->sx.s23.s3.um;
2973 patcher_add_patch_ref(jd, PATCHER_invokestatic_special,
2977 d = md->returntype.type;
2980 disp = (ptrint) lm->stubroutine;
2981 d = lm->parseddesc->returntype.type;
2984 M_MOV_IMM(disp, REG_ITMP2);
2988 case ICMD_INVOKEVIRTUAL:
2989 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
2990 emit_nullpointer_check(cd, iptr, s1);
2993 unresolved_method *um = iptr->sx.s23.s3.um;
2995 patcher_add_patch_ref(jd, PATCHER_invokevirtual, um, 0);
2998 d = md->returntype.type;
3001 s1 = OFFSET(vftbl_t, table[0]) +
3002 sizeof(methodptr) * lm->vftblindex;
3003 d = md->returntype.type;
3006 M_ALD(REG_METHODPTR, REG_ITMP1,
3007 OFFSET(java_object_t, vftbl));
3008 M_ALD32(REG_ITMP3, REG_METHODPTR, s1);
3012 case ICMD_INVOKEINTERFACE:
3013 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
3014 emit_nullpointer_check(cd, iptr, s1);
3017 unresolved_method *um = iptr->sx.s23.s3.um;
3019 patcher_add_patch_ref(jd, PATCHER_invokeinterface, um, 0);
3023 d = md->returntype.type;
3026 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3027 sizeof(methodptr) * lm->class->index;
3029 s2 = sizeof(methodptr) * (lm - lm->class->methods);
3031 d = md->returntype.type;
3034 M_ALD(REG_METHODPTR, REG_ITMP1,
3035 OFFSET(java_object_t, vftbl));
3036 M_ALD32(REG_METHODPTR, REG_METHODPTR, s1);
3037 M_ALD32(REG_ITMP3, REG_METHODPTR, s2);
3042 /* store size of call code in replacement point */
3044 REPLACEMENT_POINT_INVOKE_RETURN(cd, iptr);
3045 REPLACEMENT_POINT_FORGC_BUILTIN_RETURN(cd, iptr);
3047 /* d contains return type */
3049 if (d != TYPE_VOID) {
3050 #if defined(ENABLE_SSA)
3051 if ((ls == NULL) /* || (!IS_TEMPVAR_INDEX(iptr->dst.varindex)) */ ||
3052 (ls->lifetime[iptr->dst.varindex].type != UNUSED))
3053 /* a "living" stackslot */
3056 if (IS_INT_LNG_TYPE(d)) {
3057 if (IS_2_WORD_TYPE(d)) {
3058 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
3059 M_LNGMOVE(REG_RESULT_PACKED, s1);
3062 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3063 M_INTMOVE(REG_RESULT, s1);
3067 s1 = codegen_reg_of_dst(jd, iptr, REG_NULL);
3069 emit_store_dst(jd, iptr, s1);
3075 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3077 if (!(iptr->flags.bits & INS_FLAG_ARRAY)) {
3078 /* object type cast-check */
3081 vftbl_t *supervftbl;
3084 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3090 super = iptr->sx.s23.s3.c.cls;
3091 superindex = super->index;
3092 supervftbl = super->vftbl;
3095 if ((super == NULL) || !(super->flags & ACC_INTERFACE))
3096 CODEGEN_CRITICAL_SECTION_NEW;
3098 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3100 /* if class is not resolved, check which code to call */
3102 if (super == NULL) {
3104 emit_label_beq(cd, BRANCH_LABEL_1);
3106 patcher_add_patch_ref(jd, PATCHER_checkcast_instanceof_flags,
3107 iptr->sx.s23.s3.c.ref, 0);
3109 M_MOV_IMM(0, REG_ITMP2); /* super->flags */
3110 M_AND_IMM32(ACC_INTERFACE, REG_ITMP2);
3111 emit_label_beq(cd, BRANCH_LABEL_2);
3114 /* interface checkcast code */
3116 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3117 if (super != NULL) {
3119 emit_label_beq(cd, BRANCH_LABEL_3);
3122 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3124 if (super == NULL) {
3125 patcher_add_patch_ref(jd, PATCHER_checkcast_interface,
3126 iptr->sx.s23.s3.c.ref,
3131 REG_ITMP2, OFFSET(vftbl_t, interfacetablelength));
3132 M_ISUB_IMM32(superindex, REG_ITMP3);
3133 /* XXX do we need this one? */
3135 emit_classcast_check(cd, iptr, BRANCH_LE, REG_ITMP3, s1);
3137 M_ALD32(REG_ITMP3, REG_ITMP2,
3138 OFFSET(vftbl_t, interfacetable[0]) -
3139 superindex * sizeof(methodptr*));
3141 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_ITMP3, s1);
3144 emit_label_br(cd, BRANCH_LABEL_4);
3146 emit_label(cd, BRANCH_LABEL_3);
3149 /* class checkcast code */
3151 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3152 if (super == NULL) {
3153 emit_label(cd, BRANCH_LABEL_2);
3157 emit_label_beq(cd, BRANCH_LABEL_5);
3160 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3162 if (super == NULL) {
3163 patcher_add_patch_ref(jd, PATCHER_checkcast_class,
3164 iptr->sx.s23.s3.c.ref,
3168 M_MOV_IMM(supervftbl, REG_ITMP3);
3170 CODEGEN_CRITICAL_SECTION_START;
3172 M_ILD32(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3174 /* if (s1 != REG_ITMP1) { */
3175 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP1); */
3176 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3); */
3177 /* #if defined(ENABLE_THREADS) */
3178 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3180 /* emit_alu_reg_reg(cd, ALU_SUB, REG_ITMP1, REG_ITMP2); */
3183 M_ILD32(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, baseval));
3184 M_ISUB(REG_ITMP3, REG_ITMP2);
3185 M_MOV_IMM(supervftbl, REG_ITMP3);
3186 M_ILD(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3188 CODEGEN_CRITICAL_SECTION_END;
3192 M_CMP(REG_ITMP3, REG_ITMP2);
3193 emit_classcast_check(cd, iptr, BRANCH_ULE, REG_ITMP3, s1);
3196 emit_label(cd, BRANCH_LABEL_5);
3199 if (super == NULL) {
3200 emit_label(cd, BRANCH_LABEL_1);
3201 emit_label(cd, BRANCH_LABEL_4);
3204 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
3207 /* array type cast-check */
3209 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3210 M_AST(s1, REG_SP, 0 * 4);
3212 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3213 patcher_add_patch_ref(jd, PATCHER_builtin_arraycheckcast,
3214 iptr->sx.s23.s3.c.ref, 0);
3217 M_AST_IMM(iptr->sx.s23.s3.c.cls, REG_SP, 1 * 4);
3218 M_MOV_IMM(BUILTIN_arraycheckcast, REG_ITMP3);
3221 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3223 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_RESULT, s1);
3225 d = codegen_reg_of_dst(jd, iptr, s1);
3229 emit_store_dst(jd, iptr, d);
3232 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3236 vftbl_t *supervftbl;
3239 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3245 super = iptr->sx.s23.s3.c.cls;
3246 superindex = super->index;
3247 supervftbl = super->vftbl;
3250 if ((super == NULL) || !(super->flags & ACC_INTERFACE))
3251 CODEGEN_CRITICAL_SECTION_NEW;
3253 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3254 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
3257 M_INTMOVE(s1, REG_ITMP1);
3263 /* if class is not resolved, check which code to call */
3265 if (super == NULL) {
3267 emit_label_beq(cd, BRANCH_LABEL_1);
3269 patcher_add_patch_ref(jd, PATCHER_checkcast_instanceof_flags,
3270 iptr->sx.s23.s3.c.ref, 0);
3272 M_MOV_IMM(0, REG_ITMP3); /* super->flags */
3273 M_AND_IMM32(ACC_INTERFACE, REG_ITMP3);
3274 emit_label_beq(cd, BRANCH_LABEL_2);
3277 /* interface instanceof code */
3279 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3280 if (super != NULL) {
3282 emit_label_beq(cd, BRANCH_LABEL_3);
3285 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3287 if (super == NULL) {
3288 patcher_add_patch_ref(jd, PATCHER_instanceof_interface,
3289 iptr->sx.s23.s3.c.ref, 0);
3293 REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3294 M_ISUB_IMM32(superindex, REG_ITMP3);
3297 disp = (2 + 4 /* mov_membase32_reg */ + 2 /* test */ +
3298 6 /* jcc */ + 5 /* mov_imm_reg */);
3301 M_ALD32(REG_ITMP1, REG_ITMP1,
3302 OFFSET(vftbl_t, interfacetable[0]) -
3303 superindex * sizeof(methodptr*));
3305 /* emit_setcc_reg(cd, CC_A, d); */
3306 /* emit_jcc(cd, CC_BE, 5); */
3311 emit_label_br(cd, BRANCH_LABEL_4);
3313 emit_label(cd, BRANCH_LABEL_3);
3316 /* class instanceof code */
3318 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3319 if (super == NULL) {
3320 emit_label(cd, BRANCH_LABEL_2);
3324 emit_label_beq(cd, BRANCH_LABEL_5);
3327 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3329 if (super == NULL) {
3330 patcher_add_patch_ref(jd, PATCHER_instanceof_class,
3331 iptr->sx.s23.s3.c.ref, 0);
3334 M_MOV_IMM(supervftbl, REG_ITMP2);
3336 CODEGEN_CRITICAL_SECTION_START;
3338 M_ILD(REG_ITMP1, REG_ITMP1, OFFSET(vftbl_t, baseval));
3339 M_ILD(REG_ITMP3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3340 M_ILD(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3342 CODEGEN_CRITICAL_SECTION_END;
3344 M_ISUB(REG_ITMP2, REG_ITMP1);
3345 M_CLR(d); /* may be REG_ITMP2 */
3346 M_CMP(REG_ITMP3, REG_ITMP1);
3351 emit_label(cd, BRANCH_LABEL_5);
3354 if (super == NULL) {
3355 emit_label(cd, BRANCH_LABEL_1);
3356 emit_label(cd, BRANCH_LABEL_4);
3359 emit_store_dst(jd, iptr, d);
3363 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3365 /* check for negative sizes and copy sizes to stack if necessary */
3367 MCODECHECK((iptr->s1.argcount << 1) + 64);
3369 for (s1 = iptr->s1.argcount; --s1 >= 0; ) {
3370 /* copy SAVEDVAR sizes to stack */
3371 var = VAR(iptr->sx.s23.s2.args[s1]);
3373 /* Already Preallocated? */
3374 if (!(var->flags & PREALLOC)) {
3375 if (var->flags & INMEMORY) {
3376 M_ILD(REG_ITMP1, REG_SP, var->vv.regoff);
3377 M_IST(REG_ITMP1, REG_SP, (s1 + 3) * 4);
3380 M_IST(var->vv.regoff, REG_SP, (s1 + 3) * 4);
3384 /* is a patcher function set? */
3386 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3387 patcher_add_patch_ref(jd, PATCHER_builtin_multianewarray,
3388 iptr->sx.s23.s3.c.ref, 0);
3394 disp = (ptrint) iptr->sx.s23.s3.c.cls;
3396 /* a0 = dimension count */
3398 M_IST_IMM(iptr->s1.argcount, REG_SP, 0 * 4);
3400 /* a1 = arraydescriptor */
3402 M_IST_IMM(disp, REG_SP, 1 * 4);
3404 /* a2 = pointer to dimensions = stack pointer */
3406 M_MOV(REG_SP, REG_ITMP1);
3407 M_AADD_IMM(3 * 4, REG_ITMP1);
3408 M_AST(REG_ITMP1, REG_SP, 2 * 4);
3410 M_MOV_IMM(BUILTIN_multianewarray, REG_ITMP1);
3413 /* check for exception before result assignment */
3415 emit_exception_check(cd, iptr);
3417 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3418 M_INTMOVE(REG_RESULT, s1);
3419 emit_store_dst(jd, iptr, s1);
3423 exceptions_throw_internalerror("Unknown ICMD %d during code generation",
3428 } /* for instruction */
3432 #if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
3435 #if defined(ENABLE_SSA)
3438 /* by edge splitting, in Blocks with phi moves there can only */
3439 /* be a goto as last command, no other Jump/Branch Command */
3441 if (!last_cmd_was_goto)
3442 codegen_emit_phi_moves(jd, bptr);
3447 /* At the end of a basic block we may have to append some nops,
3448 because the patcher stub calling code might be longer than the
3449 actual instruction. So codepatching does not change the
3450 following block unintentionally. */
3452 if (cd->mcodeptr < cd->lastmcodeptr) {
3453 while (cd->mcodeptr < cd->lastmcodeptr) {
3458 } /* if (bptr -> flags >= BBREACHED) */
3459 } /* for basic block */
3461 /* generate stubs */
3463 emit_patcher_traps(jd);
3465 /* everything's ok */
3471 /* codegen_emit_stub_native ****************************************************
3473 Emits a stub routine which calls a native method.
3475 *******************************************************************************/
3477 void codegen_emit_stub_native(jitdata *jd, methoddesc *nmd, functionptr f, int skipparams)
3483 int i, j; /* count variables */
3487 /* get required compiler data */
3493 /* set some variables */
3497 /* calculate stackframe size */
3499 cd->stackframesize =
3500 sizeof(stackframeinfo_t) / SIZEOF_VOID_P +
3501 sizeof(localref_table) / SIZEOF_VOID_P +
3502 4 + /* 4 arguments (start_native_call) */
3505 /* keep stack 16-byte aligned */
3507 ALIGN_ODD(cd->stackframesize);
3509 /* create method header */
3511 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
3512 (void) dseg_add_unique_s4(cd, cd->stackframesize * 8 + 4); /* FrameSize */
3513 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
3514 (void) dseg_add_unique_s4(cd, 0); /* IntSave */
3515 (void) dseg_add_unique_s4(cd, 0); /* FltSave */
3517 #if defined(ENABLE_PROFILING)
3518 /* generate native method profiling code */
3520 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
3521 /* count frequency */
3523 M_MOV_IMM(code, REG_ITMP1);
3524 M_IADD_IMM_MEMBASE(1, REG_ITMP1, OFFSET(codeinfo, frequency));
3528 /* calculate stackframe size for native function */
3530 M_ASUB_IMM(cd->stackframesize * 8 + 4, REG_SP);
3532 /* Mark the whole fpu stack as free for native functions (only for saved */
3533 /* register count == 0). */
3535 emit_ffree_reg(cd, 0);
3536 emit_ffree_reg(cd, 1);
3537 emit_ffree_reg(cd, 2);
3538 emit_ffree_reg(cd, 3);
3539 emit_ffree_reg(cd, 4);
3540 emit_ffree_reg(cd, 5);
3541 emit_ffree_reg(cd, 6);
3542 emit_ffree_reg(cd, 7);
3544 #if defined(ENABLE_GC_CACAO)
3545 /* remember callee saved int registers in stackframeinfo (GC may need to */
3546 /* recover them during a collection). */
3548 disp = cd->stackframesize * 8 - sizeof(stackframeinfo_t) +
3549 OFFSET(stackframeinfo_t, intregs);
3551 for (i = 0; i < INT_SAV_CNT; i++)
3552 M_AST(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3555 /* prepare data structures for native function call */
3557 M_MOV(REG_SP, REG_ITMP1);
3558 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3559 M_IST_IMM(0, REG_SP, 1 * 4);
3562 M_MOV_IMM(codegen_start_native_call, REG_ITMP1);
3565 /* remember class argument */
3567 if (m->flags & ACC_STATIC)
3568 M_MOV(REG_RESULT, REG_ITMP3);
3570 /* Copy or spill arguments to new locations. */
3572 for (i = md->paramcount - 1, j = i + skipparams; i >= 0; i--, j--) {
3573 if (!md->params[i].inmemory)
3576 s1 = md->params[i].regoff + cd->stackframesize * 8 + 8;
3577 s2 = nmd->params[j].regoff;
3579 /* float/double in memory can be copied like int/longs */
3581 switch (md->paramtypes[i].type) {
3585 M_ILD(REG_ITMP1, REG_SP, s1);
3586 M_IST(REG_ITMP1, REG_SP, s2);
3590 M_LLD(REG_ITMP12_PACKED, REG_SP, s1);
3591 M_LST(REG_ITMP12_PACKED, REG_SP, s2);
3596 /* Handle native Java methods. */
3598 if (m->flags & ACC_NATIVE) {
3599 /* if function is static, put class into second argument */
3601 if (m->flags & ACC_STATIC)
3602 M_AST(REG_ITMP3, REG_SP, 1 * 4);
3604 /* put env into first argument */
3606 M_AST_IMM(_Jv_env, REG_SP, 0 * 4);
3609 /* Call the native function. */
3611 disp = dseg_add_functionptr(cd, f);
3612 emit_mov_imm_reg(cd, 0, REG_ITMP3);
3614 M_ALD(REG_ITMP1, REG_ITMP3, disp);
3617 /* save return value */
3619 switch (md->returntype.type) {
3622 M_IST(REG_RESULT, REG_SP, 1 * 8);
3625 M_LST(REG_RESULT_PACKED, REG_SP, 1 * 8);
3628 emit_fsts_membase(cd, REG_SP, 1 * 8);
3631 emit_fstl_membase(cd, REG_SP, 1 * 8);
3637 /* remove native stackframe info */
3639 M_MOV(REG_SP, REG_ITMP1);
3640 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3641 M_IST_IMM(0, REG_SP, 1 * 4);
3644 M_MOV_IMM(codegen_finish_native_call, REG_ITMP1);
3646 M_MOV(REG_RESULT, REG_ITMP2); /* REG_ITMP3 == REG_RESULT2 */
3648 /* restore return value */
3650 switch (md->returntype.type) {
3653 M_ILD(REG_RESULT, REG_SP, 1 * 8);
3656 M_LLD(REG_RESULT_PACKED, REG_SP, 1 * 8);
3659 emit_flds_membase(cd, REG_SP, 1 * 8);
3662 emit_fldl_membase(cd, REG_SP, 1 * 8);
3668 #if defined(ENABLE_GC_CACAO)
3669 /* restore callee saved int registers from stackframeinfo (GC might have */
3670 /* modified them during a collection). */
3672 disp = cd->stackframesize * 8 - sizeof(stackframeinfo_t) +
3673 OFFSET(stackframeinfo_t, intregs);
3675 for (i = 0; i < INT_SAV_CNT; i++)
3676 M_ALD(abi_registers_integer_saved[i], REG_SP, disp + i * 4);
3679 M_AADD_IMM(cd->stackframesize * 8 + 4, REG_SP);
3681 /* check for exception */
3688 /* handle exception */
3690 M_MOV(REG_ITMP2, REG_ITMP1_XPTR);
3691 M_ALD(REG_ITMP2_XPC, REG_SP, 0);
3692 M_ASUB_IMM(2, REG_ITMP2_XPC);
3694 M_MOV_IMM(asm_handle_nat_exception, REG_ITMP3);
3700 * These are local overrides for various environment variables in Emacs.
3701 * Please do not remove this and leave it at the end of the file, where
3702 * Emacs will automagically detect them.
3703 * ---------------------------------------------------------------------
3706 * indent-tabs-mode: t
3710 * vim:noexpandtab:sw=4:ts=4: