1 /* src/vm/jit/i386/codegen.c - machine code generator for i386
3 Copyright (C) 1996-2005, 2006, 2007 R. Grafl, A. Krall, C. Kruegel,
4 C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
5 E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
6 J. Wenninger, Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
25 $Id: codegen.c 8318 2007-08-16 10:05:34Z michi $
38 #include "vm/jit/i386/md-abi.h"
40 #include "vm/jit/i386/codegen.h"
41 #include "vm/jit/i386/emit.h"
43 #include "mm/memory.h"
44 #include "native/jni.h"
45 #include "native/localref.h"
46 #include "native/native.h"
48 #include "threads/lock-common.h"
50 #include "vm/builtin.h"
51 #include "vm/exceptions.h"
52 #include "vm/global.h"
53 #include "vm/stringlocal.h"
56 #include "vm/jit/asmpart.h"
57 #include "vm/jit/codegen-common.h"
58 #include "vm/jit/dseg.h"
59 #include "vm/jit/emit-common.h"
60 #include "vm/jit/jit.h"
61 #include "vm/jit/parse.h"
62 #include "vm/jit/patcher.h"
63 #include "vm/jit/reg.h"
64 #include "vm/jit/replace.h"
65 #include "vm/jit/stacktrace.h"
67 #if defined(ENABLE_SSA)
68 # include "vm/jit/optimizing/lsra.h"
69 # include "vm/jit/optimizing/ssa.h"
70 #elif defined(ENABLE_LSRA)
71 # include "vm/jit/allocator/lsra.h"
74 #include "vmcore/loader.h"
75 #include "vmcore/options.h"
76 #include "vmcore/utf8.h"
79 /* codegen_emit ****************************************************************
81 Generates machine code.
83 *******************************************************************************/
85 bool codegen_emit(jitdata *jd)
91 s4 len, s1, s2, s3, d, disp;
97 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
98 builtintable_entry *bte;
101 unresolved_field *uf;
104 #if defined(ENABLE_SSA)
106 bool last_cmd_was_goto;
108 last_cmd_was_goto = false;
112 /* get required compiler data */
119 /* prevent compiler warnings */
130 s4 savedregs_num = 0;
133 /* space to save used callee saved registers */
135 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
136 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse);
138 cd->stackframesize = rd->memuse + savedregs_num;
141 #if defined(ENABLE_THREADS)
142 /* space to save argument of monitor_enter */
144 if (checksync && (m->flags & ACC_SYNCHRONIZED))
145 cd->stackframesize++;
148 /* create method header */
150 /* Keep stack of non-leaf functions 16-byte aligned. */
152 if (!jd->isleafmethod) {
153 ALIGN_ODD(cd->stackframesize); /* XXX this is wrong, +4 is missing */
156 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
157 (void) dseg_add_unique_s4(cd, cd->stackframesize * 8); /* FrameSize */
159 #if defined(ENABLE_THREADS)
160 /* IsSync contains the offset relative to the stack pointer for the
161 argument of monitor_exit used in the exception handler. Since the
162 offset could be zero and give a wrong meaning of the flag it is
166 if (checksync && (m->flags & ACC_SYNCHRONIZED))
167 (void) dseg_add_unique_s4(cd, (rd->memuse + 1) * 8); /* IsSync */
170 (void) dseg_add_unique_s4(cd, 0); /* IsSync */
172 (void) dseg_add_unique_s4(cd, jd->isleafmethod); /* IsLeaf */
173 (void) dseg_add_unique_s4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
174 (void) dseg_add_unique_s4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
176 /* adds a reference for the length of the line number counter. We don't
177 know the size yet, since we evaluate the information during code
178 generation, to save one additional iteration over the whole
179 instructions. During code optimization the position could have changed
180 to the information gotten from the class file */
181 (void) dseg_addlinenumbertablesize(cd);
183 (void) dseg_add_unique_s4(cd, jd->exceptiontablelength); /* ExTableSize */
185 /* create exception table */
187 for (ex = jd->exceptiontable; ex != NULL; ex = ex->down) {
188 dseg_add_target(cd, ex->start);
189 dseg_add_target(cd, ex->end);
190 dseg_add_target(cd, ex->handler);
191 (void) dseg_add_unique_address(cd, ex->catchtype.any);
194 #if defined(ENABLE_PROFILING)
195 /* generate method profiling code */
197 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
198 /* count frequency */
200 M_MOV_IMM(code, REG_ITMP3);
201 M_IADD_IMM_MEMBASE(1, REG_ITMP3, OFFSET(codeinfo, frequency));
205 /* create stack frame (if necessary) */
207 if (cd->stackframesize)
208 M_ASUB_IMM(cd->stackframesize * 8, REG_SP);
210 /* save return address and used callee saved registers */
212 p = cd->stackframesize;
213 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
214 p--; M_AST(rd->savintregs[i], REG_SP, p * 8);
216 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
217 p--; emit_fld_reg(cd, rd->savfltregs[i]); emit_fstpl_membase(cd, REG_SP, p * 8);
220 /* take arguments out of register or stack frame */
225 for (p = 0, l = 0; p < md->paramcount; p++) {
226 t = md->paramtypes[p].type;
228 varindex = jd->local_map[l * 5 + t];
229 #if defined(ENABLE_SSA)
231 if (varindex != UNUSED)
232 varindex = ls->var_0[varindex];
233 if ((varindex != UNUSED) && (ls->lifetime[varindex].type == UNUSED))
238 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
241 if (varindex == UNUSED)
245 s1 = md->params[p].regoff;
248 if (IS_INT_LNG_TYPE(t)) { /* integer args */
249 if (!md->params[p].inmemory) { /* register arguments */
250 log_text("integer register argument");
252 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
253 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff */
255 else { /* reg arg -> spilled */
256 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff * 4 */
260 if (!(var->flags & INMEMORY)) {
261 M_ILD(d, REG_SP, cd->stackframesize * 8 + 4 + s1);
264 if (!IS_2_WORD_TYPE(t)) {
265 #if defined(ENABLE_SSA)
266 /* no copy avoiding by now possible with SSA */
268 emit_mov_membase_reg( /* + 4 for return address */
269 cd, REG_SP, cd->stackframesize * 8 + s1 + 4,
271 emit_mov_reg_membase(
272 cd, REG_ITMP1, REG_SP, var->vv.regoff);
275 #endif /*defined(ENABLE_SSA)*/
276 /* reuse stackslot */
277 var->vv.regoff = cd->stackframesize * 8 + 4 + s1;
281 #if defined(ENABLE_SSA)
282 /* no copy avoiding by now possible with SSA */
284 emit_mov_membase_reg( /* + 4 for return address */
285 cd, REG_SP, cd->stackframesize * 8 + s1 + 4,
287 emit_mov_reg_membase(
288 cd, REG_ITMP1, REG_SP, var->vv.regoff);
289 emit_mov_membase_reg( /* + 4 for return address */
290 cd, REG_SP, cd->stackframesize * 8 + s1 + 4 + 4,
292 emit_mov_reg_membase(
293 cd, REG_ITMP1, REG_SP, var->vv.regoff + 4);
296 #endif /*defined(ENABLE_SSA)*/
297 /* reuse stackslot */
298 var->vv.regoff = cd->stackframesize * 8 + 4 + s1;
303 else { /* floating args */
304 if (!md->params[p].inmemory) { /* register arguments */
305 log_text("There are no float argument registers!");
307 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
308 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff */
309 } else { /* reg arg -> spilled */
310 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff * 8 */
314 else { /* stack arguments */
315 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
318 cd, REG_SP, cd->stackframesize * 8 + s1 + 4);
320 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
325 cd, REG_SP, cd->stackframesize * 8 + s1 + 4);
327 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
330 } else { /* stack-arg -> spilled */
331 #if defined(ENABLE_SSA)
332 /* no copy avoiding by now possible with SSA */
334 emit_mov_membase_reg(
335 cd, REG_SP, cd->stackframesize * 8 + s1 + 4, REG_ITMP1);
336 emit_mov_reg_membase(
337 cd, REG_ITMP1, REG_SP, var->vv.regoff);
340 cd, REG_SP, cd->stackframesize * 8 + s1 + 4);
341 emit_fstps_membase(cd, REG_SP, var->vv.regoff);
345 cd, REG_SP, cd->stackframesize * 8 + s1 + 4);
346 emit_fstpl_membase(cd, REG_SP, var->vv.regoff);
350 #endif /*defined(ENABLE_SSA)*/
351 /* reuse stackslot */
352 var->vv.regoff = cd->stackframesize * 8 + 4 + s1;
358 /* call monitorenter function */
360 #if defined(ENABLE_THREADS)
361 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
364 if (m->flags & ACC_STATIC) {
365 M_MOV_IMM(&m->class->object.header, REG_ITMP1);
368 M_ALD(REG_ITMP1, REG_SP, cd->stackframesize * 8 + 4);
371 M_ALD_MEM(REG_ITMP1, EXCEPTION_HARDWARE_NULLPOINTER);
374 M_AST(REG_ITMP1, REG_SP, s1 * 8);
375 M_AST(REG_ITMP1, REG_SP, 0 * 4);
376 M_MOV_IMM(LOCK_monitor_enter, REG_ITMP3);
382 emit_verbosecall_enter(jd);
387 #if defined(ENABLE_SSA)
388 /* with SSA the Header is Basic Block 0 - insert phi Moves if necessary */
390 codegen_emit_phi_moves(jd, ls->basicblocks[0]);
393 /* end of header generation */
395 /* create replacement points */
397 REPLACEMENT_POINTS_INIT(cd, jd);
399 /* walk through all basic blocks */
401 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
403 bptr->mpc = (s4) (cd->mcodeptr - cd->mcodebase);
405 if (bptr->flags >= BBREACHED) {
406 /* branch resolving */
408 codegen_resolve_branchrefs(cd, bptr);
410 /* handle replacement points */
412 REPLACEMENT_POINT_BLOCK_START(cd, bptr);
414 #if defined(ENABLE_REPLACEMENT)
415 if (bptr->bitflags & BBFLAG_REPLACEMENT) {
416 if (cd->replacementpoint[-1].flags & RPLPOINT_FLAG_COUNTDOWN) {
418 disp = (s4) &(m->hitcountdown);
419 M_ISUB_IMM_MEMABS(1, disp);
425 /* copy interface registers to their destination */
430 #if defined(ENABLE_PROFILING)
431 /* generate basic block profiling code */
433 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
434 /* count frequency */
436 M_MOV_IMM(code->bbfrequency, REG_ITMP3);
437 M_IADD_IMM_MEMBASE(1, REG_ITMP3, bptr->nr * 4);
441 #if defined(ENABLE_LSRA) || defined(ENABLE_SSA)
442 # if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
445 # if defined(ENABLE_SSA)
447 last_cmd_was_goto = false;
451 var = VAR(bptr->invars[len]);
452 if (bptr->type != BBTYPE_STD) {
453 if (!IS_2_WORD_TYPE(var->type)) {
454 if (bptr->type == BBTYPE_EXH) {
455 d = codegen_reg_of_var(0, var, REG_ITMP1);
456 M_INTMOVE(REG_ITMP1, d);
457 emit_store(jd, NULL, var, d);
461 log_text("copy interface registers(EXH, SBR): longs \
462 have to be in memory (begin 1)");
470 #endif /* defined(ENABLE_LSRA) || defined(ENABLE_SSA) */
474 var = VAR(bptr->invars[len]);
475 if ((len == bptr->indepth-1) && (bptr->type != BBTYPE_STD)) {
476 if (!IS_2_WORD_TYPE(var->type)) {
477 if (bptr->type == BBTYPE_EXH) {
478 d = codegen_reg_of_var(0, var, REG_ITMP1);
479 M_INTMOVE(REG_ITMP1, d);
480 emit_store(jd, NULL, var, d);
484 log_text("copy interface registers: longs have to be in \
491 assert((var->flags & INOUT));
496 /* walk through all instructions */
501 for (iptr = bptr->iinstr; len > 0; len--, iptr++) {
502 if (iptr->line != currentline) {
503 dseg_addlinenumber(cd, iptr->line);
504 currentline = iptr->line;
507 MCODECHECK(1024); /* 1kB should be enough */
510 case ICMD_NOP: /* ... ==> ... */
511 case ICMD_POP: /* ..., value ==> ... */
512 case ICMD_POP2: /* ..., value, value ==> ... */
515 case ICMD_INLINE_START:
517 REPLACEMENT_POINT_INLINE_START(cd, iptr);
520 case ICMD_INLINE_BODY:
522 REPLACEMENT_POINT_INLINE_BODY(cd, iptr);
523 dseg_addlinenumber_inline_start(cd, iptr);
524 dseg_addlinenumber(cd, iptr->line);
527 case ICMD_INLINE_END:
529 dseg_addlinenumber_inline_end(cd, iptr);
530 dseg_addlinenumber(cd, iptr->line);
533 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
535 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
536 emit_nullpointer_check(cd, iptr, s1);
539 /* constant operations ************************************************/
541 case ICMD_ICONST: /* ... ==> ..., constant */
543 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
544 ICONST(d, iptr->sx.val.i);
545 emit_store_dst(jd, iptr, d);
548 case ICMD_LCONST: /* ... ==> ..., constant */
550 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
551 LCONST(d, iptr->sx.val.l);
552 emit_store_dst(jd, iptr, d);
555 case ICMD_FCONST: /* ... ==> ..., constant */
557 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
558 if (iptr->sx.val.f == 0.0) {
562 if (iptr->sx.val.i == 0x80000000) {
566 } else if (iptr->sx.val.f == 1.0) {
569 } else if (iptr->sx.val.f == 2.0) {
575 disp = dseg_add_float(cd, iptr->sx.val.f);
576 emit_mov_imm_reg(cd, 0, REG_ITMP1);
578 emit_flds_membase(cd, REG_ITMP1, disp);
580 emit_store_dst(jd, iptr, d);
583 case ICMD_DCONST: /* ... ==> ..., constant */
585 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
586 if (iptr->sx.val.d == 0.0) {
590 if (iptr->sx.val.l == 0x8000000000000000LL) {
594 } else if (iptr->sx.val.d == 1.0) {
597 } else if (iptr->sx.val.d == 2.0) {
603 disp = dseg_add_double(cd, iptr->sx.val.d);
604 emit_mov_imm_reg(cd, 0, REG_ITMP1);
606 emit_fldl_membase(cd, REG_ITMP1, disp);
608 emit_store_dst(jd, iptr, d);
611 case ICMD_ACONST: /* ... ==> ..., constant */
613 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
615 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
616 codegen_addpatchref(cd, PATCHER_aconst,
617 iptr->sx.val.c.ref, 0);
622 if (iptr->sx.val.anyptr == NULL)
625 M_MOV_IMM(iptr->sx.val.anyptr, d);
627 emit_store_dst(jd, iptr, d);
631 /* load/store/copy/move operations ************************************/
649 if (!(iptr->flags.bits & INS_FLAG_RETADDR))
654 /* integer operations *************************************************/
656 case ICMD_INEG: /* ..., value ==> ..., - value */
658 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
659 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
662 emit_store_dst(jd, iptr, d);
665 case ICMD_LNEG: /* ..., value ==> ..., - value */
667 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
668 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
670 M_NEG(GET_LOW_REG(d));
671 M_IADDC_IMM(0, GET_HIGH_REG(d));
672 M_NEG(GET_HIGH_REG(d));
673 emit_store_dst(jd, iptr, d);
676 case ICMD_I2L: /* ..., value ==> ..., value */
678 s1 = emit_load_s1(jd, iptr, EAX);
679 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
682 M_LNGMOVE(EAX_EDX_PACKED, d);
683 emit_store_dst(jd, iptr, d);
686 case ICMD_L2I: /* ..., value ==> ..., value */
688 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
689 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
691 emit_store_dst(jd, iptr, d);
694 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
696 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
697 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
701 emit_store_dst(jd, iptr, d);
704 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
706 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
707 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
709 emit_store_dst(jd, iptr, d);
712 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
714 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
715 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
717 emit_store_dst(jd, iptr, d);
721 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
723 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
724 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
725 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
732 emit_store_dst(jd, iptr, d);
736 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
737 /* sx.val.i = constant */
739 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
740 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
742 /* `inc reg' is slower on p4's (regarding to ia32
743 optimization reference manual and benchmarks) and as
747 M_IADD_IMM(iptr->sx.val.i, d);
748 emit_store_dst(jd, iptr, d);
751 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
753 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
754 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
755 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
756 M_INTMOVE(s1, GET_LOW_REG(d));
757 M_IADD(s2, GET_LOW_REG(d));
758 /* don't use REG_ITMP1 */
759 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
760 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
761 M_INTMOVE(s1, GET_HIGH_REG(d));
762 M_IADDC(s2, GET_HIGH_REG(d));
763 emit_store_dst(jd, iptr, d);
766 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
767 /* sx.val.l = constant */
769 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
770 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
772 M_IADD_IMM(iptr->sx.val.l, GET_LOW_REG(d));
773 M_IADDC_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
774 emit_store_dst(jd, iptr, d);
777 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
779 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
780 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
781 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
783 M_INTMOVE(s1, REG_ITMP1);
784 M_ISUB(s2, REG_ITMP1);
785 M_INTMOVE(REG_ITMP1, d);
791 emit_store_dst(jd, iptr, d);
794 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
795 /* sx.val.i = constant */
797 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
798 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
800 M_ISUB_IMM(iptr->sx.val.i, d);
801 emit_store_dst(jd, iptr, d);
804 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
806 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
807 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
808 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
809 if (s2 == GET_LOW_REG(d)) {
810 M_INTMOVE(s1, REG_ITMP1);
811 M_ISUB(s2, REG_ITMP1);
812 M_INTMOVE(REG_ITMP1, GET_LOW_REG(d));
815 M_INTMOVE(s1, GET_LOW_REG(d));
816 M_ISUB(s2, GET_LOW_REG(d));
818 /* don't use REG_ITMP1 */
819 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
820 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
821 if (s2 == GET_HIGH_REG(d)) {
822 M_INTMOVE(s1, REG_ITMP2);
823 M_ISUBB(s2, REG_ITMP2);
824 M_INTMOVE(REG_ITMP2, GET_HIGH_REG(d));
827 M_INTMOVE(s1, GET_HIGH_REG(d));
828 M_ISUBB(s2, GET_HIGH_REG(d));
830 emit_store_dst(jd, iptr, d);
833 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
834 /* sx.val.l = constant */
836 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
837 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
839 M_ISUB_IMM(iptr->sx.val.l, GET_LOW_REG(d));
840 M_ISUBB_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
841 emit_store_dst(jd, iptr, d);
844 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
846 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
847 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
848 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
855 emit_store_dst(jd, iptr, d);
858 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
859 /* sx.val.i = constant */
861 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
862 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
863 M_IMUL_IMM(s1, iptr->sx.val.i, d);
864 emit_store_dst(jd, iptr, d);
867 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
869 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
870 s2 = emit_load_s2_low(jd, iptr, EDX);
871 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
873 M_INTMOVE(s1, REG_ITMP2);
874 M_IMUL(s2, REG_ITMP2);
876 s1 = emit_load_s1_low(jd, iptr, EAX);
877 s2 = emit_load_s2_high(jd, iptr, EDX);
880 M_IADD(EDX, REG_ITMP2);
882 s1 = emit_load_s1_low(jd, iptr, EAX);
883 s2 = emit_load_s2_low(jd, iptr, EDX);
886 M_INTMOVE(EAX, GET_LOW_REG(d));
887 M_IADD(REG_ITMP2, GET_HIGH_REG(d));
889 emit_store_dst(jd, iptr, d);
892 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
893 /* sx.val.l = constant */
895 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
896 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
897 ICONST(EAX, iptr->sx.val.l);
899 M_IMUL_IMM(s1, iptr->sx.val.l >> 32, REG_ITMP2);
900 M_IADD(REG_ITMP2, EDX);
901 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
902 M_IMUL_IMM(s1, iptr->sx.val.l, REG_ITMP2);
903 M_IADD(REG_ITMP2, EDX);
904 M_LNGMOVE(EAX_EDX_PACKED, d);
905 emit_store_dst(jd, iptr, d);
908 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
910 s1 = emit_load_s1(jd, iptr, EAX);
911 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
912 d = codegen_reg_of_dst(jd, iptr, EAX);
913 emit_arithmetic_check(cd, iptr, s2);
915 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
917 /* check as described in jvm spec */
919 M_CMP_IMM(0x80000000, EAX);
926 M_INTMOVE(EAX, d); /* if INMEMORY then d is already EAX */
927 emit_store_dst(jd, iptr, d);
930 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
932 s1 = emit_load_s1(jd, iptr, EAX);
933 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
934 d = codegen_reg_of_dst(jd, iptr, EDX);
935 emit_arithmetic_check(cd, iptr, s2);
937 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
939 /* check as described in jvm spec */
941 M_CMP_IMM(0x80000000, EAX);
949 M_INTMOVE(EDX, d); /* if INMEMORY then d is already EDX */
950 emit_store_dst(jd, iptr, d);
953 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
954 /* sx.val.i = constant */
956 /* TODO: optimize for `/ 2' */
957 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
958 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
962 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, d);/* 32-bit for jump off */
963 M_SRA_IMM(iptr->sx.val.i, d);
964 emit_store_dst(jd, iptr, d);
967 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
968 /* sx.val.i = constant */
970 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
971 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
973 M_MOV(s1, REG_ITMP1);
977 M_AND_IMM(iptr->sx.val.i, d);
979 M_BGE(2 + 2 + 6 + 2);
980 M_MOV(s1, d); /* don't use M_INTMOVE, so we know the jump offset */
982 M_AND_IMM32(iptr->sx.val.i, d); /* use 32-bit for jump offset */
984 emit_store_dst(jd, iptr, d);
987 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
988 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
990 s2 = emit_load_s2(jd, iptr, REG_ITMP12_PACKED);
991 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
993 M_INTMOVE(GET_LOW_REG(s2), REG_ITMP3);
994 M_OR(GET_HIGH_REG(s2), REG_ITMP3);
995 /* XXX could be optimized */
996 emit_arithmetic_check(cd, iptr, REG_ITMP3);
998 bte = iptr->sx.s23.s3.bte;
1001 M_LST(s2, REG_SP, 2 * 4);
1003 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1004 M_LST(s1, REG_SP, 0 * 4);
1006 M_MOV_IMM(bte->fp, REG_ITMP3);
1008 emit_store_dst(jd, iptr, d);
1011 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1012 /* sx.val.i = constant */
1014 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1015 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1017 M_TEST(GET_HIGH_REG(d));
1019 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, GET_LOW_REG(d));
1020 M_IADDC_IMM(0, GET_HIGH_REG(d));
1021 M_SRLD_IMM(iptr->sx.val.i, GET_HIGH_REG(d), GET_LOW_REG(d));
1022 M_SRA_IMM(iptr->sx.val.i, GET_HIGH_REG(d));
1023 emit_store_dst(jd, iptr, d);
1027 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1028 /* sx.val.l = constant */
1030 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1031 if (iptr->dst.var->flags & INMEMORY) {
1032 if (iptr->s1.var->flags & INMEMORY) {
1033 /* Alpha algorithm */
1035 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8);
1037 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1043 /* TODO: hmm, don't know if this is always correct */
1045 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l & 0x00000000ffffffff);
1047 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l >> 32);
1053 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1054 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1056 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1057 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1058 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, iptr->s1.var->vv.regoff * 8 + 4);
1059 emit_jcc(cd, CC_GE, disp);
1061 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8, REG_ITMP1);
1062 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 8 + 4, REG_ITMP2);
1064 emit_neg_reg(cd, REG_ITMP1);
1065 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1066 emit_neg_reg(cd, REG_ITMP2);
1068 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1069 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1071 emit_neg_reg(cd, REG_ITMP1);
1072 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1073 emit_neg_reg(cd, REG_ITMP2);
1075 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst.var->vv.regoff * 8);
1076 emit_mov_reg_membase(cd, REG_ITMP2, REG_SP, iptr->dst.var->vv.regoff * 8 + 4);
1080 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1081 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1083 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1084 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1085 M_TEST(GET_LOW_REG(s1));
1091 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1093 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1094 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1095 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1096 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1099 emit_store_dst(jd, iptr, d);
1102 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1103 /* sx.val.i = constant */
1105 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1106 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1108 M_SLL_IMM(iptr->sx.val.i, d);
1109 emit_store_dst(jd, iptr, d);
1112 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1114 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1115 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1116 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1117 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1120 emit_store_dst(jd, iptr, d);
1123 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1124 /* sx.val.i = constant */
1126 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1127 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1129 M_SRA_IMM(iptr->sx.val.i, d);
1130 emit_store_dst(jd, iptr, d);
1133 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1135 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1136 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1137 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1138 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1141 emit_store_dst(jd, iptr, d);
1144 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1145 /* sx.val.i = constant */
1147 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1148 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1150 M_SRL_IMM(iptr->sx.val.i, d);
1151 emit_store_dst(jd, iptr, d);
1154 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1156 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1157 s2 = emit_load_s2(jd, iptr, ECX);
1158 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1161 M_TEST_IMM(32, ECX);
1163 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1164 M_CLR(GET_LOW_REG(d));
1165 M_SLLD(GET_LOW_REG(d), GET_HIGH_REG(d));
1166 M_SLL(GET_LOW_REG(d));
1167 emit_store_dst(jd, iptr, d);
1170 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1171 /* sx.val.i = constant */
1173 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1174 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1176 if (iptr->sx.val.i & 0x20) {
1177 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1178 M_CLR(GET_LOW_REG(d));
1179 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1183 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1185 M_SLL_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d));
1187 emit_store_dst(jd, iptr, d);
1190 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1192 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1193 s2 = emit_load_s2(jd, iptr, ECX);
1194 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1197 M_TEST_IMM(32, ECX);
1199 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1200 M_SRA_IMM(31, GET_HIGH_REG(d));
1201 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1202 M_SRA(GET_HIGH_REG(d));
1203 emit_store_dst(jd, iptr, d);
1206 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1207 /* sx.val.i = constant */
1209 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1210 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1212 if (iptr->sx.val.i & 0x20) {
1213 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1214 M_SRA_IMM(31, GET_HIGH_REG(d));
1215 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1219 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1221 M_SRA_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1223 emit_store_dst(jd, iptr, d);
1226 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1228 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1229 s2 = emit_load_s2(jd, iptr, ECX);
1230 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1233 M_TEST_IMM(32, ECX);
1235 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1236 M_CLR(GET_HIGH_REG(d));
1237 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1238 M_SRL(GET_HIGH_REG(d));
1239 emit_store_dst(jd, iptr, d);
1242 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1243 /* sx.val.l = constant */
1245 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1246 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1248 if (iptr->sx.val.i & 0x20) {
1249 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1250 M_CLR(GET_HIGH_REG(d));
1251 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1255 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1257 M_SRL_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1259 emit_store_dst(jd, iptr, d);
1262 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1264 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1265 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1266 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1273 emit_store_dst(jd, iptr, d);
1276 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1277 /* sx.val.i = constant */
1279 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1280 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1282 M_AND_IMM(iptr->sx.val.i, d);
1283 emit_store_dst(jd, iptr, d);
1286 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1288 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1289 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1290 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1291 if (s2 == GET_LOW_REG(d))
1292 M_AND(s1, GET_LOW_REG(d));
1294 M_INTMOVE(s1, GET_LOW_REG(d));
1295 M_AND(s2, GET_LOW_REG(d));
1297 /* REG_ITMP1 probably contains low 32-bit of destination */
1298 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1299 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1300 if (s2 == GET_HIGH_REG(d))
1301 M_AND(s1, GET_HIGH_REG(d));
1303 M_INTMOVE(s1, GET_HIGH_REG(d));
1304 M_AND(s2, GET_HIGH_REG(d));
1306 emit_store_dst(jd, iptr, d);
1309 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1310 /* sx.val.l = constant */
1312 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1313 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1315 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1316 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1317 emit_store_dst(jd, iptr, d);
1320 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1322 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1323 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1324 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1331 emit_store_dst(jd, iptr, d);
1334 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1335 /* sx.val.i = constant */
1337 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1338 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1340 M_OR_IMM(iptr->sx.val.i, d);
1341 emit_store_dst(jd, iptr, d);
1344 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1346 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1347 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1348 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1349 if (s2 == GET_LOW_REG(d))
1350 M_OR(s1, GET_LOW_REG(d));
1352 M_INTMOVE(s1, GET_LOW_REG(d));
1353 M_OR(s2, GET_LOW_REG(d));
1355 /* REG_ITMP1 probably contains low 32-bit of destination */
1356 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1357 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1358 if (s2 == GET_HIGH_REG(d))
1359 M_OR(s1, GET_HIGH_REG(d));
1361 M_INTMOVE(s1, GET_HIGH_REG(d));
1362 M_OR(s2, GET_HIGH_REG(d));
1364 emit_store_dst(jd, iptr, d);
1367 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1368 /* sx.val.l = constant */
1370 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1371 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1373 M_OR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1374 M_OR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1375 emit_store_dst(jd, iptr, d);
1378 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1380 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1381 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1382 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1389 emit_store_dst(jd, iptr, d);
1392 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1393 /* sx.val.i = constant */
1395 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1396 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1398 M_XOR_IMM(iptr->sx.val.i, d);
1399 emit_store_dst(jd, iptr, d);
1402 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1404 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1405 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1406 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1407 if (s2 == GET_LOW_REG(d))
1408 M_XOR(s1, GET_LOW_REG(d));
1410 M_INTMOVE(s1, GET_LOW_REG(d));
1411 M_XOR(s2, GET_LOW_REG(d));
1413 /* REG_ITMP1 probably contains low 32-bit of destination */
1414 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1415 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1416 if (s2 == GET_HIGH_REG(d))
1417 M_XOR(s1, GET_HIGH_REG(d));
1419 M_INTMOVE(s1, GET_HIGH_REG(d));
1420 M_XOR(s2, GET_HIGH_REG(d));
1422 emit_store_dst(jd, iptr, d);
1425 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1426 /* sx.val.l = constant */
1428 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1429 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1431 M_XOR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1432 M_XOR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1433 emit_store_dst(jd, iptr, d);
1437 /* floating operations ************************************************/
1439 case ICMD_FNEG: /* ..., value ==> ..., - value */
1441 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1442 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1444 emit_store_dst(jd, iptr, d);
1447 case ICMD_DNEG: /* ..., value ==> ..., - value */
1449 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1450 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1452 emit_store_dst(jd, iptr, d);
1455 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1457 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1458 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1459 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1461 emit_store_dst(jd, iptr, d);
1464 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1466 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1467 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1468 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1470 emit_store_dst(jd, iptr, d);
1473 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1475 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1476 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1477 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1479 emit_store_dst(jd, iptr, d);
1482 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1484 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1485 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1486 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1488 emit_store_dst(jd, iptr, d);
1491 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1493 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1494 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1495 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1497 emit_store_dst(jd, iptr, d);
1500 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1502 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1503 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1504 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1506 emit_store_dst(jd, iptr, d);
1509 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1511 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1512 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1513 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1515 emit_store_dst(jd, iptr, d);
1518 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1520 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1521 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1522 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1524 emit_store_dst(jd, iptr, d);
1527 case ICMD_FREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1529 /* exchanged to skip fxch */
1530 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1531 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1532 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1533 /* emit_fxch(cd); */
1538 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1539 emit_store_dst(jd, iptr, d);
1540 emit_ffree_reg(cd, 0);
1544 case ICMD_DREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1546 /* exchanged to skip fxch */
1547 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1548 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1549 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1550 /* emit_fxch(cd); */
1555 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1556 emit_store_dst(jd, iptr, d);
1557 emit_ffree_reg(cd, 0);
1561 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1562 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1564 var = VAROP(iptr->s1);
1565 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1567 if (var->flags & INMEMORY) {
1568 emit_fildl_membase(cd, REG_SP, var->vv.regoff);
1570 /* XXX not thread safe! */
1571 disp = dseg_add_unique_s4(cd, 0);
1572 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1574 emit_mov_reg_membase(cd, var->vv.regoff, REG_ITMP1, disp);
1575 emit_fildl_membase(cd, REG_ITMP1, disp);
1578 emit_store_dst(jd, iptr, d);
1581 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1582 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1584 var = VAROP(iptr->s1);
1585 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1586 if (var->flags & INMEMORY) {
1587 emit_fildll_membase(cd, REG_SP, var->vv.regoff);
1590 log_text("L2F: longs have to be in memory");
1593 emit_store_dst(jd, iptr, d);
1596 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1598 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1599 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1601 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1604 /* Round to zero, 53-bit mode, exception masked */
1605 disp = dseg_add_s4(cd, 0x0e7f);
1606 emit_fldcw_membase(cd, REG_ITMP1, disp);
1608 var = VAROP(iptr->dst);
1609 var1 = VAROP(iptr->s1);
1611 if (var->flags & INMEMORY) {
1612 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1614 /* Round to nearest, 53-bit mode, exceptions masked */
1615 disp = dseg_add_s4(cd, 0x027f);
1616 emit_fldcw_membase(cd, REG_ITMP1, disp);
1618 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1619 REG_SP, var->vv.regoff);
1622 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1624 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1627 /* XXX not thread safe! */
1628 disp = dseg_add_unique_s4(cd, 0);
1629 emit_fistpl_membase(cd, REG_ITMP1, disp);
1630 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1632 /* Round to nearest, 53-bit mode, exceptions masked */
1633 disp = dseg_add_s4(cd, 0x027f);
1634 emit_fldcw_membase(cd, REG_ITMP1, disp);
1636 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1639 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1640 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1643 emit_jcc(cd, CC_NE, disp);
1645 /* XXX: change this when we use registers */
1646 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1647 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2i, REG_ITMP1);
1648 emit_call_reg(cd, REG_ITMP1);
1650 if (var->flags & INMEMORY) {
1651 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1654 M_INTMOVE(REG_RESULT, var->vv.regoff);
1658 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1660 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1661 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1663 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1666 /* Round to zero, 53-bit mode, exception masked */
1667 disp = dseg_add_s4(cd, 0x0e7f);
1668 emit_fldcw_membase(cd, REG_ITMP1, disp);
1670 var = VAROP(iptr->dst);
1671 var1 = VAROP(iptr->s1);
1673 if (var->flags & INMEMORY) {
1674 emit_fistpl_membase(cd, REG_SP, var->vv.regoff);
1676 /* Round to nearest, 53-bit mode, exceptions masked */
1677 disp = dseg_add_s4(cd, 0x027f);
1678 emit_fldcw_membase(cd, REG_ITMP1, disp);
1680 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1681 REG_SP, var->vv.regoff);
1684 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1686 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1689 /* XXX not thread safe! */
1690 disp = dseg_add_unique_s4(cd, 0);
1691 emit_fistpl_membase(cd, REG_ITMP1, disp);
1692 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1694 /* Round to nearest, 53-bit mode, exceptions masked */
1695 disp = dseg_add_s4(cd, 0x027f);
1696 emit_fldcw_membase(cd, REG_ITMP1, disp);
1698 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1701 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1702 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1705 emit_jcc(cd, CC_NE, disp);
1707 /* XXX: change this when we use registers */
1708 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1709 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2i, REG_ITMP1);
1710 emit_call_reg(cd, REG_ITMP1);
1712 if (var->flags & INMEMORY) {
1713 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1715 M_INTMOVE(REG_RESULT, var->vv.regoff);
1719 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1721 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1722 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1724 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1727 /* Round to zero, 53-bit mode, exception masked */
1728 disp = dseg_add_s4(cd, 0x0e7f);
1729 emit_fldcw_membase(cd, REG_ITMP1, disp);
1731 var = VAROP(iptr->dst);
1732 var1 = VAROP(iptr->s1);
1734 if (var->flags & INMEMORY) {
1735 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1737 /* Round to nearest, 53-bit mode, exceptions masked */
1738 disp = dseg_add_s4(cd, 0x027f);
1739 emit_fldcw_membase(cd, REG_ITMP1, disp);
1741 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1742 REG_SP, var->vv.regoff + 4);
1745 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1747 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1750 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1752 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1754 emit_jcc(cd, CC_NE, disp);
1756 emit_alu_imm_membase(cd, ALU_CMP, 0,
1757 REG_SP, var->vv.regoff);
1760 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1762 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1764 emit_jcc(cd, CC_NE, disp);
1766 /* XXX: change this when we use registers */
1767 emit_flds_membase(cd, REG_SP, var1->vv.regoff);
1768 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2l, REG_ITMP1);
1769 emit_call_reg(cd, REG_ITMP1);
1770 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1771 emit_mov_reg_membase(cd, REG_RESULT2,
1772 REG_SP, var->vv.regoff + 4);
1775 log_text("F2L: longs have to be in memory");
1780 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1782 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1783 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1785 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1788 /* Round to zero, 53-bit mode, exception masked */
1789 disp = dseg_add_s4(cd, 0x0e7f);
1790 emit_fldcw_membase(cd, REG_ITMP1, disp);
1792 var = VAROP(iptr->dst);
1793 var1 = VAROP(iptr->s1);
1795 if (var->flags & INMEMORY) {
1796 emit_fistpll_membase(cd, REG_SP, var->vv.regoff);
1798 /* Round to nearest, 53-bit mode, exceptions masked */
1799 disp = dseg_add_s4(cd, 0x027f);
1800 emit_fldcw_membase(cd, REG_ITMP1, disp);
1802 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1803 REG_SP, var->vv.regoff + 4);
1806 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1808 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1811 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1813 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff + 4);
1815 emit_jcc(cd, CC_NE, disp);
1817 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, var->vv.regoff);
1820 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff);
1822 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff);
1824 emit_jcc(cd, CC_NE, disp);
1826 /* XXX: change this when we use registers */
1827 emit_fldl_membase(cd, REG_SP, var1->vv.regoff);
1828 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2l, REG_ITMP1);
1829 emit_call_reg(cd, REG_ITMP1);
1830 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff);
1831 emit_mov_reg_membase(cd, REG_RESULT2,
1832 REG_SP, var->vv.regoff + 4);
1835 log_text("D2L: longs have to be in memory");
1840 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1842 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1843 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1845 emit_store_dst(jd, iptr, d);
1848 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1850 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1851 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1853 emit_store_dst(jd, iptr, d);
1856 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1859 /* exchanged to skip fxch */
1860 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1861 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1862 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1863 /* emit_fxch(cd); */
1866 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as GT */
1867 emit_jcc(cd, CC_E, 6);
1868 emit_alu_imm_reg(cd, ALU_AND, 0x000000ff, EAX);
1870 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1871 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1872 emit_jcc(cd, CC_B, 3 + 5);
1873 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1874 emit_jmp_imm(cd, 3);
1875 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1876 emit_store_dst(jd, iptr, d);
1879 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1882 /* exchanged to skip fxch */
1883 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1884 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1885 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1886 /* emit_fxch(cd); */
1889 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as LT */
1890 emit_jcc(cd, CC_E, 3);
1891 emit_movb_imm_reg(cd, 1, REG_AH);
1893 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1894 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1895 emit_jcc(cd, CC_B, 3 + 5);
1896 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1897 emit_jmp_imm(cd, 3);
1898 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1899 emit_store_dst(jd, iptr, d);
1903 /* memory operations **************************************************/
1905 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., length */
1907 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1908 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1909 /* implicit null-pointer check */
1910 M_ILD(d, s1, OFFSET(java_array_t, size));
1911 emit_store_dst(jd, iptr, d);
1914 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
1916 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1917 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1918 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1919 /* implicit null-pointer check */
1920 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1921 emit_movsbl_memindex_reg(cd, OFFSET(java_bytearray_t, data[0]),
1923 emit_store_dst(jd, iptr, d);
1926 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
1928 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1929 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1930 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1931 /* implicit null-pointer check */
1932 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1933 emit_movzwl_memindex_reg(cd, OFFSET(java_chararray_t, data[0]),
1935 emit_store_dst(jd, iptr, d);
1938 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
1940 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1941 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1942 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1943 /* implicit null-pointer check */
1944 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1945 emit_movswl_memindex_reg(cd, OFFSET(java_shortarray_t, data[0]),
1947 emit_store_dst(jd, iptr, d);
1950 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
1952 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1953 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1954 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1955 /* implicit null-pointer check */
1956 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1957 emit_mov_memindex_reg(cd, OFFSET(java_intarray_t, data[0]),
1959 emit_store_dst(jd, iptr, d);
1962 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
1964 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1965 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1966 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
1967 /* implicit null-pointer check */
1968 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1970 var = VAROP(iptr->dst);
1972 assert(var->flags & INMEMORY);
1973 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]),
1974 s1, s2, 3, REG_ITMP3);
1975 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff);
1976 emit_mov_memindex_reg(cd, OFFSET(java_longarray_t, data[0]) + 4,
1977 s1, s2, 3, REG_ITMP3);
1978 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff + 4);
1981 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
1983 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1984 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1985 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1986 /* implicit null-pointer check */
1987 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1988 emit_flds_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2, 2);
1989 emit_store_dst(jd, iptr, d);
1992 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
1994 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1995 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1996 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1997 /* implicit null-pointer check */
1998 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
1999 emit_fldl_memindex(cd, OFFSET(java_doublearray_t, data[0]), s1, s2,3);
2000 emit_store_dst(jd, iptr, d);
2003 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2005 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2006 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2007 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2008 /* implicit null-pointer check */
2009 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2010 emit_mov_memindex_reg(cd, OFFSET(java_objectarray_t, data[0]),
2012 emit_store_dst(jd, iptr, d);
2016 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2018 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2019 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2020 /* implicit null-pointer check */
2021 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2022 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2024 /* because EBP, ESI, EDI have no xH and xL nibbles */
2025 M_INTMOVE(s3, REG_ITMP3);
2028 emit_movb_reg_memindex(cd, s3, OFFSET(java_bytearray_t, data[0]),
2032 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2034 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2035 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2036 /* implicit null-pointer check */
2037 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2038 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2039 emit_movw_reg_memindex(cd, s3, OFFSET(java_chararray_t, data[0]),
2043 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2045 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2046 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2047 /* implicit null-pointer check */
2048 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2049 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2050 emit_movw_reg_memindex(cd, s3, OFFSET(java_shortarray_t, data[0]),
2054 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2056 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2057 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2058 /* implicit null-pointer check */
2059 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2060 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2061 emit_mov_reg_memindex(cd, s3, OFFSET(java_intarray_t, data[0]),
2065 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2067 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2068 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2069 /* implicit null-pointer check */
2070 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2072 var = VAROP(iptr->sx.s23.s3);
2074 assert(var->flags & INMEMORY);
2075 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff, REG_ITMP3);
2076 emit_mov_reg_memindex(cd, REG_ITMP3, OFFSET(java_longarray_t, data[0])
2078 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff + 4, REG_ITMP3);
2079 emit_mov_reg_memindex(cd, REG_ITMP3,
2080 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2083 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2085 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2086 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2087 /* implicit null-pointer check */
2088 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2089 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2090 emit_fstps_memindex(cd, OFFSET(java_floatarray_t, data[0]), s1, s2,2);
2093 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2095 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2096 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2097 /* implicit null-pointer check */
2098 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2099 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2100 emit_fstpl_memindex(cd, OFFSET(java_doublearray_t, data[0]),
2104 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2106 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2107 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2108 /* implicit null-pointer check */
2109 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2110 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2112 M_AST(s1, REG_SP, 0 * 4);
2113 M_AST(s3, REG_SP, 1 * 4);
2114 M_MOV_IMM(BUILTIN_canstore, REG_ITMP1);
2116 emit_exception_check(cd, iptr);
2118 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2119 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2120 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2121 emit_mov_reg_memindex(cd, s3, OFFSET(java_objectarray_t, data[0]),
2125 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2127 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2128 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2129 /* implicit null-pointer check */
2130 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2131 emit_movb_imm_memindex(cd, iptr->sx.s23.s3.constval,
2132 OFFSET(java_bytearray_t, data[0]), s1, s2, 0);
2135 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2137 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2138 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2139 /* implicit null-pointer check */
2140 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2141 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2142 OFFSET(java_chararray_t, data[0]), s1, s2, 1);
2145 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2147 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2148 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2149 /* implicit null-pointer check */
2150 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2151 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2152 OFFSET(java_shortarray_t, data[0]), s1, s2, 1);
2155 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2157 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2158 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2159 /* implicit null-pointer check */
2160 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2161 emit_mov_imm_memindex(cd, iptr->sx.s23.s3.constval,
2162 OFFSET(java_intarray_t, data[0]), s1, s2, 2);
2165 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2167 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2168 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2169 /* implicit null-pointer check */
2170 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2171 emit_mov_imm_memindex(cd,
2172 (u4) (iptr->sx.s23.s3.constval & 0x00000000ffffffff),
2173 OFFSET(java_longarray_t, data[0]), s1, s2, 3);
2174 emit_mov_imm_memindex(cd,
2175 ((s4)iptr->sx.s23.s3.constval) >> 31,
2176 OFFSET(java_longarray_t, data[0]) + 4, s1, s2, 3);
2179 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2181 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2182 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2183 /* implicit null-pointer check */
2184 emit_arrayindexoutofbounds_check(cd, iptr, s1, s2);
2185 emit_mov_imm_memindex(cd, 0,
2186 OFFSET(java_objectarray_t, data[0]), s1, s2, 2);
2190 case ICMD_GETSTATIC: /* ... ==> ..., value */
2192 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2193 uf = iptr->sx.s23.s3.uf;
2194 fieldtype = uf->fieldref->parseddesc.fd->type;
2197 codegen_addpatchref(cd, PATCHER_get_putstatic, uf, 0);
2201 fi = iptr->sx.s23.s3.fmiref->p.field;
2202 fieldtype = fi->type;
2203 disp = (intptr_t) fi->value;
2205 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2206 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2209 M_MOV_IMM(disp, REG_ITMP1);
2210 switch (fieldtype) {
2213 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2214 M_ILD(d, REG_ITMP1, 0);
2217 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2218 M_LLD(d, REG_ITMP1, 0);
2221 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2222 M_FLD(d, REG_ITMP1, 0);
2225 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2226 M_DLD(d, REG_ITMP1, 0);
2229 emit_store_dst(jd, iptr, d);
2232 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2234 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2235 uf = iptr->sx.s23.s3.uf;
2236 fieldtype = uf->fieldref->parseddesc.fd->type;
2239 codegen_addpatchref(cd, PATCHER_get_putstatic, uf, 0);
2242 fi = iptr->sx.s23.s3.fmiref->p.field;
2243 fieldtype = fi->type;
2244 disp = (intptr_t) fi->value;
2246 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2247 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2250 M_MOV_IMM(disp, REG_ITMP1);
2251 switch (fieldtype) {
2254 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
2255 M_IST(s1, REG_ITMP1, 0);
2258 s1 = emit_load_s1(jd, iptr, REG_ITMP23_PACKED);
2259 M_LST(s1, REG_ITMP1, 0);
2262 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2263 emit_fstps_membase(cd, REG_ITMP1, 0);
2266 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2267 emit_fstpl_membase(cd, REG_ITMP1, 0);
2272 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2273 /* val = value (in current instruction) */
2274 /* following NOP) */
2276 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2277 uf = iptr->sx.s23.s3.uf;
2278 fieldtype = uf->fieldref->parseddesc.fd->type;
2281 codegen_addpatchref(cd, PATCHER_get_putstatic, uf, 0);
2284 fi = iptr->sx.s23.s3.fmiref->p.field;
2285 fieldtype = fi->type;
2286 disp = (intptr_t) fi->value;
2288 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class))
2289 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2292 M_MOV_IMM(disp, REG_ITMP1);
2293 switch (fieldtype) {
2296 M_IST_IMM(iptr->sx.s23.s2.constval, REG_ITMP1, 0);
2299 M_IST_IMM(iptr->sx.s23.s2.constval & 0xffffffff, REG_ITMP1, 0);
2300 M_IST_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, REG_ITMP1, 4);
2307 case ICMD_GETFIELD: /* .., objectref. ==> ..., value */
2309 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2310 emit_nullpointer_check(cd, iptr, s1);
2312 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2313 uf = iptr->sx.s23.s3.uf;
2314 fieldtype = uf->fieldref->parseddesc.fd->type;
2317 codegen_addpatchref(cd, PATCHER_getfield,
2318 iptr->sx.s23.s3.uf, 0);
2321 fi = iptr->sx.s23.s3.fmiref->p.field;
2322 fieldtype = fi->type;
2326 switch (fieldtype) {
2329 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2330 M_ILD32(d, s1, disp);
2333 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2334 M_LLD32(d, s1, disp);
2337 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2338 M_FLD32(d, s1, disp);
2341 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2342 M_DLD32(d, s1, disp);
2345 emit_store_dst(jd, iptr, d);
2348 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2350 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2351 emit_nullpointer_check(cd, iptr, s1);
2353 /* must be done here because of code patching */
2355 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2356 uf = iptr->sx.s23.s3.uf;
2357 fieldtype = uf->fieldref->parseddesc.fd->type;
2360 fi = iptr->sx.s23.s3.fmiref->p.field;
2361 fieldtype = fi->type;
2364 if (!IS_FLT_DBL_TYPE(fieldtype)) {
2365 if (IS_2_WORD_TYPE(fieldtype))
2366 s2 = emit_load_s2(jd, iptr, REG_ITMP23_PACKED);
2368 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2371 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
2373 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2375 uf = iptr->sx.s23.s3.uf;
2378 codegen_addpatchref(cd, PATCHER_putfield, uf, 0);
2382 fi = iptr->sx.s23.s3.fmiref->p.field;
2386 switch (fieldtype) {
2389 M_IST32(s2, s1, disp);
2392 M_LST32(s2, s1, disp);
2395 emit_fstps_membase32(cd, s1, disp);
2398 emit_fstpl_membase32(cd, s1, disp);
2403 case ICMD_PUTFIELDCONST: /* ..., objectref ==> ... */
2404 /* val = value (in current instruction) */
2405 /* following NOP) */
2407 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2408 emit_nullpointer_check(cd, iptr, s1);
2410 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2411 uf = iptr->sx.s23.s3.uf;
2412 fieldtype = uf->fieldref->parseddesc.fd->type;
2415 codegen_addpatchref(cd, PATCHER_putfieldconst,
2419 fi = iptr->sx.s23.s3.fmiref->p.field;
2420 fieldtype = fi->type;
2424 switch (fieldtype) {
2427 M_IST32_IMM(iptr->sx.s23.s2.constval, s1, disp);
2430 M_IST32_IMM(iptr->sx.s23.s2.constval & 0xffffffff, s1, disp);
2431 M_IST32_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, s1, disp + 4);
2439 /* branch operations **************************************************/
2441 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2443 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2444 M_INTMOVE(s1, REG_ITMP1_XPTR);
2446 #ifdef ENABLE_VERIFIER
2447 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2448 codegen_addpatchref(cd, PATCHER_athrow_areturn,
2449 iptr->sx.s23.s2.uc, 0);
2451 #endif /* ENABLE_VERIFIER */
2453 M_CALL_IMM(0); /* passing exception pc */
2454 M_POP(REG_ITMP2_XPC);
2456 M_MOV_IMM(asm_handle_exception, REG_ITMP3);
2460 case ICMD_GOTO: /* ... ==> ... */
2461 case ICMD_RET: /* ... ==> ... */
2463 #if defined(ENABLE_SSA)
2465 last_cmd_was_goto = true;
2467 /* In case of a Goto phimoves have to be inserted before the */
2470 codegen_emit_phi_moves(jd, bptr);
2473 emit_br(cd, iptr->dst.block);
2477 case ICMD_JSR: /* ... ==> ... */
2479 emit_br(cd, iptr->sx.s23.s3.jsrtarget.block);
2483 case ICMD_IFNULL: /* ..., value ==> ... */
2484 case ICMD_IFNONNULL:
2486 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2488 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFNULL, BRANCH_OPT_NONE);
2491 case ICMD_IFEQ: /* ..., value ==> ... */
2498 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2499 M_CMP_IMM(iptr->sx.val.i, s1);
2500 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IFEQ, BRANCH_OPT_NONE);
2503 case ICMD_IF_LEQ: /* ..., value ==> ... */
2505 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2506 if (iptr->sx.val.l == 0) {
2507 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2508 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2511 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2512 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2513 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2514 M_OR(REG_ITMP2, REG_ITMP1);
2516 emit_beq(cd, iptr->dst.block);
2519 case ICMD_IF_LLT: /* ..., value ==> ... */
2521 if (iptr->sx.val.l == 0) {
2522 /* If high 32-bit are less than zero, then the 64-bits
2524 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2526 emit_blt(cd, iptr->dst.block);
2529 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2530 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2531 emit_blt(cd, iptr->dst.block);
2533 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2534 emit_bult(cd, iptr->dst.block);
2538 case ICMD_IF_LLE: /* ..., value ==> ... */
2540 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2541 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2542 emit_blt(cd, iptr->dst.block);
2544 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2545 emit_bule(cd, iptr->dst.block);
2548 case ICMD_IF_LNE: /* ..., value ==> ... */
2550 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2551 if (iptr->sx.val.l == 0) {
2552 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2553 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2556 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2557 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2558 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2559 M_OR(REG_ITMP2, REG_ITMP1);
2561 emit_bne(cd, iptr->dst.block);
2564 case ICMD_IF_LGT: /* ..., value ==> ... */
2566 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2567 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2568 emit_bgt(cd, iptr->dst.block);
2570 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2571 emit_bugt(cd, iptr->dst.block);
2574 case ICMD_IF_LGE: /* ..., value ==> ... */
2576 if (iptr->sx.val.l == 0) {
2577 /* If high 32-bit are greater equal zero, then the
2579 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2581 emit_bge(cd, iptr->dst.block);
2584 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2585 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2586 emit_bgt(cd, iptr->dst.block);
2588 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2589 emit_buge(cd, iptr->dst.block);
2593 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2594 case ICMD_IF_ICMPNE:
2595 case ICMD_IF_ICMPLT:
2596 case ICMD_IF_ICMPGT:
2597 case ICMD_IF_ICMPGE:
2598 case ICMD_IF_ICMPLE:
2600 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2601 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2603 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ICMPEQ, BRANCH_OPT_NONE);
2606 case ICMD_IF_ACMPEQ: /* ..., value, value ==> ... */
2607 case ICMD_IF_ACMPNE:
2609 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2610 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2612 emit_bcc(cd, iptr->dst.block, iptr->opc - ICMD_IF_ACMPEQ, BRANCH_OPT_NONE);
2615 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2617 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2618 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2619 M_INTMOVE(s1, REG_ITMP1);
2620 M_XOR(s2, REG_ITMP1);
2621 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2622 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2623 M_INTMOVE(s1, REG_ITMP2);
2624 M_XOR(s2, REG_ITMP2);
2625 M_OR(REG_ITMP1, REG_ITMP2);
2626 emit_beq(cd, iptr->dst.block);
2629 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2631 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2632 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2633 M_INTMOVE(s1, REG_ITMP1);
2634 M_XOR(s2, REG_ITMP1);
2635 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2636 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2637 M_INTMOVE(s1, REG_ITMP2);
2638 M_XOR(s2, REG_ITMP2);
2639 M_OR(REG_ITMP1, REG_ITMP2);
2640 emit_bne(cd, iptr->dst.block);
2643 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2645 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2646 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2648 emit_blt(cd, iptr->dst.block);
2649 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2650 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2653 emit_bult(cd, iptr->dst.block);
2656 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2658 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2659 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2661 emit_bgt(cd, iptr->dst.block);
2662 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2663 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2666 emit_bugt(cd, iptr->dst.block);
2669 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2671 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2672 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2674 emit_blt(cd, iptr->dst.block);
2675 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2676 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2679 emit_bule(cd, iptr->dst.block);
2682 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2684 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2685 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2687 emit_bgt(cd, iptr->dst.block);
2688 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2689 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2692 emit_buge(cd, iptr->dst.block);
2696 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2698 REPLACEMENT_POINT_RETURN(cd, iptr);
2699 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2700 M_INTMOVE(s1, REG_RESULT);
2701 goto nowperformreturn;
2703 case ICMD_LRETURN: /* ..., retvalue ==> ... */
2705 REPLACEMENT_POINT_RETURN(cd, iptr);
2706 s1 = emit_load_s1(jd, iptr, REG_RESULT_PACKED);
2707 M_LNGMOVE(s1, REG_RESULT_PACKED);
2708 goto nowperformreturn;
2710 case ICMD_ARETURN: /* ..., retvalue ==> ... */
2712 REPLACEMENT_POINT_RETURN(cd, iptr);
2713 s1 = emit_load_s1(jd, iptr, REG_RESULT);
2714 M_INTMOVE(s1, REG_RESULT);
2716 #ifdef ENABLE_VERIFIER
2717 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2718 codegen_addpatchref(cd, PATCHER_athrow_areturn,
2719 iptr->sx.s23.s2.uc, 0);
2721 #endif /* ENABLE_VERIFIER */
2722 goto nowperformreturn;
2724 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2727 REPLACEMENT_POINT_RETURN(cd, iptr);
2728 s1 = emit_load_s1(jd, iptr, REG_FRESULT);
2729 goto nowperformreturn;
2731 case ICMD_RETURN: /* ... ==> ... */
2733 REPLACEMENT_POINT_RETURN(cd, iptr);
2739 p = cd->stackframesize;
2741 #if !defined(NDEBUG)
2742 emit_verbosecall_exit(jd);
2745 #if defined(ENABLE_THREADS)
2746 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2747 M_ALD(REG_ITMP2, REG_SP, rd->memuse * 8);
2749 /* we need to save the proper return value */
2750 switch (iptr->opc) {
2753 M_IST(REG_RESULT, REG_SP, rd->memuse * 8);
2757 M_LST(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2761 emit_fstps_membase(cd, REG_SP, rd->memuse * 8);
2765 emit_fstpl_membase(cd, REG_SP, rd->memuse * 8);
2769 M_AST(REG_ITMP2, REG_SP, 0);
2770 M_MOV_IMM(LOCK_monitor_exit, REG_ITMP3);
2773 /* and now restore the proper return value */
2774 switch (iptr->opc) {
2777 M_ILD(REG_RESULT, REG_SP, rd->memuse * 8);
2781 M_LLD(REG_RESULT_PACKED, REG_SP, rd->memuse * 8);
2785 emit_flds_membase(cd, REG_SP, rd->memuse * 8);
2789 emit_fldl_membase(cd, REG_SP, rd->memuse * 8);
2795 /* restore saved registers */
2797 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
2798 p--; M_ALD(rd->savintregs[i], REG_SP, p * 8);
2801 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
2803 emit_fldl_membase(cd, REG_SP, p * 8);
2804 if (iptr->opc == ICMD_FRETURN || iptr->opc == ICMD_DRETURN) {
2806 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset + 1); */
2809 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset); */
2813 /* deallocate stack */
2815 if (cd->stackframesize)
2816 M_AADD_IMM(cd->stackframesize * 8, REG_SP);
2823 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2826 branch_target_t *table;
2828 table = iptr->dst.table;
2830 l = iptr->sx.s23.s2.tablelow;
2831 i = iptr->sx.s23.s3.tablehigh;
2833 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2834 M_INTMOVE(s1, REG_ITMP1);
2837 M_ISUB_IMM(l, REG_ITMP1);
2843 M_CMP_IMM(i - 1, REG_ITMP1);
2844 emit_bugt(cd, table[0].block);
2846 /* build jump table top down and use address of lowest entry */
2851 dseg_add_target(cd, table->block);
2855 /* length of dataseg after last dseg_addtarget is used
2858 M_MOV_IMM(0, REG_ITMP2);
2860 emit_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 2, REG_ITMP1);
2866 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2869 lookup_target_t *lookup;
2871 lookup = iptr->dst.lookup;
2873 i = iptr->sx.s23.s2.lookupcount;
2875 MCODECHECK((i<<2)+8);
2876 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2879 M_CMP_IMM(lookup->value, s1);
2880 emit_beq(cd, lookup->target.block);
2884 emit_br(cd, iptr->sx.s23.s3.lookupdefault.block);
2889 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
2891 bte = iptr->sx.s23.s3.bte;
2895 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2897 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2898 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
2899 case ICMD_INVOKEINTERFACE:
2901 REPLACEMENT_POINT_INVOKE(cd, iptr);
2903 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2904 md = iptr->sx.s23.s3.um->methodref->parseddesc.md;
2908 lm = iptr->sx.s23.s3.fmiref->p.method;
2909 md = lm->parseddesc;
2913 s3 = md->paramcount;
2915 MCODECHECK((s3 << 1) + 64);
2917 /* copy arguments to registers or stack location */
2919 for (s3 = s3 - 1; s3 >= 0; s3--) {
2920 var = VAR(iptr->sx.s23.s2.args[s3]);
2922 /* Already Preallocated (ARGVAR) ? */
2923 if (var->flags & PREALLOC)
2925 if (IS_INT_LNG_TYPE(var->type)) {
2926 if (!md->params[s3].inmemory) {
2927 log_text("No integer argument registers available!");
2931 if (IS_2_WORD_TYPE(var->type)) {
2932 d = emit_load(jd, iptr, var, REG_ITMP12_PACKED);
2933 M_LST(d, REG_SP, md->params[s3].regoff);
2935 d = emit_load(jd, iptr, var, REG_ITMP1);
2936 M_IST(d, REG_SP, md->params[s3].regoff);
2941 if (!md->params[s3].inmemory) {
2942 s1 = md->params[s3].regoff;
2943 d = emit_load(jd, iptr, var, s1);
2947 d = emit_load(jd, iptr, var, REG_FTMP1);
2948 if (IS_2_WORD_TYPE(var->type))
2949 M_DST(d, REG_SP, md->params[s3].regoff);
2951 M_FST(d, REG_SP, md->params[s3].regoff);
2956 switch (iptr->opc) {
2958 disp = (ptrint) bte->fp;
2959 d = md->returntype.type;
2961 M_MOV_IMM(disp, REG_ITMP1);
2964 emit_exception_check(cd, iptr);
2967 case ICMD_INVOKESPECIAL:
2968 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
2969 emit_nullpointer_check(cd, iptr, REG_ITMP1);
2972 case ICMD_INVOKESTATIC:
2974 unresolved_method *um = iptr->sx.s23.s3.um;
2976 codegen_addpatchref(cd, PATCHER_invokestatic_special,
2980 d = md->returntype.type;
2983 disp = (ptrint) lm->stubroutine;
2984 d = lm->parseddesc->returntype.type;
2987 M_MOV_IMM(disp, REG_ITMP2);
2991 case ICMD_INVOKEVIRTUAL:
2992 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
2993 emit_nullpointer_check(cd, iptr, s1);
2996 unresolved_method *um = iptr->sx.s23.s3.um;
2998 codegen_addpatchref(cd, PATCHER_invokevirtual, um, 0);
3001 d = md->returntype.type;
3004 s1 = OFFSET(vftbl_t, table[0]) +
3005 sizeof(methodptr) * lm->vftblindex;
3006 d = md->returntype.type;
3009 M_ALD(REG_METHODPTR, REG_ITMP1,
3010 OFFSET(java_object_t, vftbl));
3011 M_ALD32(REG_ITMP3, REG_METHODPTR, s1);
3015 case ICMD_INVOKEINTERFACE:
3016 M_ALD(REG_ITMP1, REG_SP, 0 * 8);
3017 emit_nullpointer_check(cd, iptr, s1);
3020 unresolved_method *um = iptr->sx.s23.s3.um;
3022 codegen_addpatchref(cd, PATCHER_invokeinterface, um, 0);
3026 d = md->returntype.type;
3029 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3030 sizeof(methodptr) * lm->class->index;
3032 s2 = sizeof(methodptr) * (lm - lm->class->methods);
3034 d = md->returntype.type;
3037 M_ALD(REG_METHODPTR, REG_ITMP1,
3038 OFFSET(java_object_t, vftbl));
3039 M_ALD32(REG_METHODPTR, REG_METHODPTR, s1);
3040 M_ALD32(REG_ITMP3, REG_METHODPTR, s2);
3045 /* store size of call code in replacement point */
3047 REPLACEMENT_POINT_INVOKE_RETURN(cd, iptr);
3049 /* d contains return type */
3051 if (d != TYPE_VOID) {
3052 #if defined(ENABLE_SSA)
3053 if ((ls == NULL) /* || (!IS_TEMPVAR_INDEX(iptr->dst.varindex)) */ ||
3054 (ls->lifetime[iptr->dst.varindex].type != UNUSED))
3055 /* a "living" stackslot */
3058 if (IS_INT_LNG_TYPE(d)) {
3059 if (IS_2_WORD_TYPE(d)) {
3060 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
3061 M_LNGMOVE(REG_RESULT_PACKED, s1);
3064 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3065 M_INTMOVE(REG_RESULT, s1);
3069 s1 = codegen_reg_of_dst(jd, iptr, REG_NULL);
3071 emit_store_dst(jd, iptr, s1);
3077 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3079 if (!(iptr->flags.bits & INS_FLAG_ARRAY)) {
3080 /* object type cast-check */
3083 vftbl_t *supervftbl;
3086 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3092 super = iptr->sx.s23.s3.c.cls;
3093 superindex = super->index;
3094 supervftbl = super->vftbl;
3097 if ((super == NULL) || !(super->flags & ACC_INTERFACE))
3098 CODEGEN_CRITICAL_SECTION_NEW;
3100 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3102 /* if class is not resolved, check which code to call */
3104 if (super == NULL) {
3106 emit_label_beq(cd, BRANCH_LABEL_1);
3108 codegen_addpatchref(cd, PATCHER_checkcast_instanceof_flags,
3109 iptr->sx.s23.s3.c.ref, 0);
3111 M_MOV_IMM(0, REG_ITMP2); /* super->flags */
3112 M_AND_IMM32(ACC_INTERFACE, REG_ITMP2);
3113 emit_label_beq(cd, BRANCH_LABEL_2);
3116 /* interface checkcast code */
3118 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3119 if (super != NULL) {
3121 emit_label_beq(cd, BRANCH_LABEL_3);
3124 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3126 if (super == NULL) {
3127 codegen_addpatchref(cd, PATCHER_checkcast_interface,
3128 iptr->sx.s23.s3.c.ref,
3133 REG_ITMP2, OFFSET(vftbl_t, interfacetablelength));
3134 M_ISUB_IMM32(superindex, REG_ITMP3);
3135 /* XXX do we need this one? */
3137 emit_classcast_check(cd, iptr, BRANCH_LE, REG_ITMP3, s1);
3139 M_ALD32(REG_ITMP3, REG_ITMP2,
3140 OFFSET(vftbl_t, interfacetable[0]) -
3141 superindex * sizeof(methodptr*));
3143 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_ITMP3, s1);
3146 emit_label_br(cd, BRANCH_LABEL_4);
3148 emit_label(cd, BRANCH_LABEL_3);
3151 /* class checkcast code */
3153 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3154 if (super == NULL) {
3155 emit_label(cd, BRANCH_LABEL_2);
3159 emit_label_beq(cd, BRANCH_LABEL_5);
3162 M_ALD(REG_ITMP2, s1, OFFSET(java_object_t, vftbl));
3164 if (super == NULL) {
3165 codegen_addpatchref(cd, PATCHER_checkcast_class,
3166 iptr->sx.s23.s3.c.ref,
3170 M_MOV_IMM(supervftbl, REG_ITMP3);
3172 CODEGEN_CRITICAL_SECTION_START;
3174 M_ILD32(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3176 /* if (s1 != REG_ITMP1) { */
3177 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP1); */
3178 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3); */
3179 /* #if defined(ENABLE_THREADS) */
3180 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3182 /* emit_alu_reg_reg(cd, ALU_SUB, REG_ITMP1, REG_ITMP2); */
3185 M_ILD32(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, baseval));
3186 M_ISUB(REG_ITMP3, REG_ITMP2);
3187 M_MOV_IMM(supervftbl, REG_ITMP3);
3188 M_ILD(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3190 CODEGEN_CRITICAL_SECTION_END;
3194 M_CMP(REG_ITMP3, REG_ITMP2);
3195 emit_classcast_check(cd, iptr, BRANCH_ULE, REG_ITMP3, s1);
3198 emit_label(cd, BRANCH_LABEL_5);
3201 if (super == NULL) {
3202 emit_label(cd, BRANCH_LABEL_1);
3203 emit_label(cd, BRANCH_LABEL_4);
3206 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
3209 /* array type cast-check */
3211 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3212 M_AST(s1, REG_SP, 0 * 4);
3214 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3215 codegen_addpatchref(cd, PATCHER_builtin_arraycheckcast,
3216 iptr->sx.s23.s3.c.ref, 0);
3219 M_AST_IMM(iptr->sx.s23.s3.c.cls, REG_SP, 1 * 4);
3220 M_MOV_IMM(BUILTIN_arraycheckcast, REG_ITMP3);
3223 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3225 emit_classcast_check(cd, iptr, BRANCH_EQ, REG_RESULT, s1);
3227 d = codegen_reg_of_dst(jd, iptr, s1);
3231 emit_store_dst(jd, iptr, d);
3234 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3238 vftbl_t *supervftbl;
3241 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3247 super = iptr->sx.s23.s3.c.cls;
3248 superindex = super->index;
3249 supervftbl = super->vftbl;
3252 if ((super == NULL) || !(super->flags & ACC_INTERFACE))
3253 CODEGEN_CRITICAL_SECTION_NEW;
3255 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3256 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
3259 M_INTMOVE(s1, REG_ITMP1);
3265 /* if class is not resolved, check which code to call */
3267 if (super == NULL) {
3269 emit_label_beq(cd, BRANCH_LABEL_1);
3271 codegen_addpatchref(cd, PATCHER_checkcast_instanceof_flags,
3272 iptr->sx.s23.s3.c.ref, 0);
3274 M_MOV_IMM(0, REG_ITMP3); /* super->flags */
3275 M_AND_IMM32(ACC_INTERFACE, REG_ITMP3);
3276 emit_label_beq(cd, BRANCH_LABEL_2);
3279 /* interface instanceof code */
3281 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3282 if (super != NULL) {
3284 emit_label_beq(cd, BRANCH_LABEL_3);
3287 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3289 if (super == NULL) {
3290 codegen_addpatchref(cd, PATCHER_instanceof_interface,
3291 iptr->sx.s23.s3.c.ref, 0);
3295 REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3296 M_ISUB_IMM32(superindex, REG_ITMP3);
3299 disp = (2 + 4 /* mov_membase32_reg */ + 2 /* test */ +
3300 6 /* jcc */ + 5 /* mov_imm_reg */);
3303 M_ALD32(REG_ITMP1, REG_ITMP1,
3304 OFFSET(vftbl_t, interfacetable[0]) -
3305 superindex * sizeof(methodptr*));
3307 /* emit_setcc_reg(cd, CC_A, d); */
3308 /* emit_jcc(cd, CC_BE, 5); */
3313 emit_label_br(cd, BRANCH_LABEL_4);
3315 emit_label(cd, BRANCH_LABEL_3);
3318 /* class instanceof code */
3320 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3321 if (super == NULL) {
3322 emit_label(cd, BRANCH_LABEL_2);
3326 emit_label_beq(cd, BRANCH_LABEL_5);
3329 M_ALD(REG_ITMP1, s1, OFFSET(java_object_t, vftbl));
3331 if (super == NULL) {
3332 codegen_addpatchref(cd, PATCHER_instanceof_class,
3333 iptr->sx.s23.s3.c.ref, 0);
3336 M_MOV_IMM(supervftbl, REG_ITMP2);
3338 CODEGEN_CRITICAL_SECTION_START;
3340 M_ILD(REG_ITMP1, REG_ITMP1, OFFSET(vftbl_t, baseval));
3341 M_ILD(REG_ITMP3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3342 M_ILD(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3344 CODEGEN_CRITICAL_SECTION_END;
3346 M_ISUB(REG_ITMP2, REG_ITMP1);
3347 M_CLR(d); /* may be REG_ITMP2 */
3348 M_CMP(REG_ITMP3, REG_ITMP1);
3353 emit_label(cd, BRANCH_LABEL_5);
3356 if (super == NULL) {
3357 emit_label(cd, BRANCH_LABEL_1);
3358 emit_label(cd, BRANCH_LABEL_4);
3361 emit_store_dst(jd, iptr, d);
3365 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3367 /* check for negative sizes and copy sizes to stack if necessary */
3369 MCODECHECK((iptr->s1.argcount << 1) + 64);
3371 for (s1 = iptr->s1.argcount; --s1 >= 0; ) {
3372 /* copy SAVEDVAR sizes to stack */
3373 var = VAR(iptr->sx.s23.s2.args[s1]);
3375 /* Already Preallocated? */
3376 if (!(var->flags & PREALLOC)) {
3377 if (var->flags & INMEMORY) {
3378 M_ILD(REG_ITMP1, REG_SP, var->vv.regoff);
3379 M_IST(REG_ITMP1, REG_SP, (s1 + 3) * 4);
3382 M_IST(var->vv.regoff, REG_SP, (s1 + 3) * 4);
3386 /* is a patcher function set? */
3388 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3389 codegen_addpatchref(cd, PATCHER_builtin_multianewarray,
3390 iptr->sx.s23.s3.c.ref, 0);
3396 disp = (ptrint) iptr->sx.s23.s3.c.cls;
3398 /* a0 = dimension count */
3400 M_IST_IMM(iptr->s1.argcount, REG_SP, 0 * 4);
3402 /* a1 = arraydescriptor */
3404 M_IST_IMM(disp, REG_SP, 1 * 4);
3406 /* a2 = pointer to dimensions = stack pointer */
3408 M_MOV(REG_SP, REG_ITMP1);
3409 M_AADD_IMM(3 * 4, REG_ITMP1);
3410 M_AST(REG_ITMP1, REG_SP, 2 * 4);
3412 M_MOV_IMM(BUILTIN_multianewarray, REG_ITMP1);
3415 /* check for exception before result assignment */
3417 emit_exception_check(cd, iptr);
3419 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3420 M_INTMOVE(REG_RESULT, s1);
3421 emit_store_dst(jd, iptr, s1);
3425 exceptions_throw_internalerror("Unknown ICMD %d during code generation",
3430 } /* for instruction */
3434 #if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
3437 #if defined(ENABLE_SSA)
3440 /* by edge splitting, in Blocks with phi moves there can only */
3441 /* be a goto as last command, no other Jump/Branch Command */
3443 if (!last_cmd_was_goto)
3444 codegen_emit_phi_moves(jd, bptr);
3449 /* At the end of a basic block we may have to append some nops,
3450 because the patcher stub calling code might be longer than the
3451 actual instruction. So codepatching does not change the
3452 following block unintentionally. */
3454 if (cd->mcodeptr < cd->lastmcodeptr) {
3455 while (cd->mcodeptr < cd->lastmcodeptr) {
3460 } /* if (bptr -> flags >= BBREACHED) */
3461 } /* for basic block */
3463 dseg_createlinenumbertable(cd);
3465 /* generate stubs */
3467 emit_patcher_stubs(jd);
3469 /* everything's ok */
3474 /* codegen_emit_stub_compiler **************************************************
3476 Emit a stub routine which calls the compiler.
3478 *******************************************************************************/
3480 void codegen_emit_stub_compiler(jitdata *jd)
3485 /* get required compiler data */
3490 /* code for the stub */
3492 M_MOV_IMM(m, REG_ITMP1);
3493 M_MOV_IMM(asm_call_jit_compiler, REG_ITMP3);
3498 /* codegen_emit_stub_native ****************************************************
3500 Emits a stub routine which calls a native method.
3502 *******************************************************************************/
3504 void codegen_emit_stub_native(jitdata *jd, methoddesc *nmd, functionptr f)
3511 s4 i, j; /* count variables */
3515 /* get required compiler data */
3521 /* set some variables */
3524 nativeparams = (m->flags & ACC_STATIC) ? 2 : 1;
3526 /* calculate stackframe size */
3528 cd->stackframesize =
3529 sizeof(stackframeinfo) / SIZEOF_VOID_P +
3530 sizeof(localref_table) / SIZEOF_VOID_P +
3531 1 + /* function pointer */
3532 4 + /* 4 arguments (start_native_call) */
3535 /* keep stack 16-byte aligned */
3537 ALIGN_ODD(cd->stackframesize); /* XXX this is wrong, +4 is missing */
3539 /* create method header */
3541 (void) dseg_add_unique_address(cd, code); /* CodeinfoPointer */
3542 (void) dseg_add_unique_s4(cd, cd->stackframesize * 8); /* FrameSize */
3543 (void) dseg_add_unique_s4(cd, 0); /* IsSync */
3544 (void) dseg_add_unique_s4(cd, 0); /* IsLeaf */
3545 (void) dseg_add_unique_s4(cd, 0); /* IntSave */
3546 (void) dseg_add_unique_s4(cd, 0); /* FltSave */
3547 (void) dseg_addlinenumbertablesize(cd);
3548 (void) dseg_add_unique_s4(cd, 0); /* ExTableSize */
3550 #if defined(ENABLE_PROFILING)
3551 /* generate native method profiling code */
3553 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
3554 /* count frequency */
3556 M_MOV_IMM(code, REG_ITMP1);
3557 M_IADD_IMM_MEMBASE(1, REG_ITMP1, OFFSET(codeinfo, frequency));
3561 /* calculate stackframe size for native function */
3563 M_ASUB_IMM(cd->stackframesize * 8, REG_SP);
3565 #if !defined(NDEBUG)
3566 emit_verbosecall_enter(jd);
3569 /* get function address (this must happen before the stackframeinfo) */
3571 #if !defined(WITH_STATIC_CLASSPATH)
3573 codegen_addpatchref(cd, PATCHER_resolve_native, m, 0);
3576 M_AST_IMM((ptrint) f, REG_SP, 4 * 4);
3578 /* Mark the whole fpu stack as free for native functions (only for saved */
3579 /* register count == 0). */
3581 emit_ffree_reg(cd, 0);
3582 emit_ffree_reg(cd, 1);
3583 emit_ffree_reg(cd, 2);
3584 emit_ffree_reg(cd, 3);
3585 emit_ffree_reg(cd, 4);
3586 emit_ffree_reg(cd, 5);
3587 emit_ffree_reg(cd, 6);
3588 emit_ffree_reg(cd, 7);
3590 /* prepare data structures for native function call */
3592 M_MOV(REG_SP, REG_ITMP1);
3593 M_AADD_IMM(cd->stackframesize * 8, REG_ITMP1);
3595 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3596 M_IST_IMM(0, REG_SP, 1 * 4);
3599 M_MOV(REG_SP, REG_ITMP2);
3600 M_AADD_IMM(cd->stackframesize * 8 + SIZEOF_VOID_P, REG_ITMP2);
3602 M_AST(REG_ITMP2, REG_SP, 2 * 4);
3603 M_ALD(REG_ITMP3, REG_SP, cd->stackframesize * 8);
3604 M_AST(REG_ITMP3, REG_SP, 3 * 4);
3605 M_MOV_IMM(codegen_start_native_call, REG_ITMP1);
3608 M_ALD(REG_ITMP3, REG_SP, 4 * 4);
3610 /* copy arguments into new stackframe */
3612 for (i = md->paramcount - 1, j = i + nativeparams; i >= 0; i--, j--) {
3613 t = md->paramtypes[i].type;
3615 if (!md->params[i].inmemory) {
3616 /* no integer argument registers */
3619 /* float/double in memory can be copied like int/longs */
3621 s1 = md->params[i].regoff + cd->stackframesize * 8 + 4;
3622 s2 = nmd->params[j].regoff;
3624 M_ILD(REG_ITMP1, REG_SP, s1);
3625 M_IST(REG_ITMP1, REG_SP, s2);
3626 if (IS_2_WORD_TYPE(t)) {
3627 M_ILD(REG_ITMP1, REG_SP, s1 + 4);
3628 M_IST(REG_ITMP1, REG_SP, s2 + 4);
3633 /* if function is static, put class into second argument */
3635 if (m->flags & ACC_STATIC)
3636 M_AST_IMM(m->class, REG_SP, 1 * 4);
3638 /* put env into first argument */
3640 M_AST_IMM(_Jv_env, REG_SP, 0 * 4);
3642 /* call the native function */
3646 /* save return value */
3648 switch (md->returntype.type) {
3651 M_IST(REG_RESULT, REG_SP, 1 * 8);
3654 M_LST(REG_RESULT_PACKED, REG_SP, 1 * 8);
3657 emit_fsts_membase(cd, REG_SP, 1 * 8);
3660 emit_fstl_membase(cd, REG_SP, 1 * 8);
3666 #if !defined(NDEBUG)
3667 emit_verbosecall_exit(jd);
3670 /* remove native stackframe info */
3672 M_MOV(REG_SP, REG_ITMP1);
3673 M_AADD_IMM(cd->stackframesize * 8, REG_ITMP1);
3675 M_AST(REG_ITMP1, REG_SP, 0 * 4);
3676 M_MOV_IMM(codegen_finish_native_call, REG_ITMP1);
3678 M_MOV(REG_RESULT, REG_ITMP2); /* REG_ITMP3 == REG_RESULT2 */
3680 /* restore return value */
3682 switch (md->returntype.type) {
3685 M_ILD(REG_RESULT, REG_SP, 1 * 8);
3688 M_LLD(REG_RESULT_PACKED, REG_SP, 1 * 8);
3691 emit_flds_membase(cd, REG_SP, 1 * 8);
3694 emit_fldl_membase(cd, REG_SP, 1 * 8);
3700 M_AADD_IMM(cd->stackframesize * 8, REG_SP);
3702 /* check for exception */
3709 /* handle exception */
3711 M_MOV(REG_ITMP2, REG_ITMP1_XPTR);
3712 M_ALD(REG_ITMP2_XPC, REG_SP, 0);
3713 M_ASUB_IMM(2, REG_ITMP2_XPC);
3715 M_MOV_IMM(asm_handle_nat_exception, REG_ITMP3);
3718 /* generate patcher stubs */
3720 emit_patcher_stubs(jd);
3725 * These are local overrides for various environment variables in Emacs.
3726 * Please do not remove this and leave it at the end of the file, where
3727 * Emacs will automagically detect them.
3728 * ---------------------------------------------------------------------
3731 * indent-tabs-mode: t
3735 * vim:noexpandtab:sw=4:ts=4: