1 /* src/vm/jit/i386/codegen.c - machine code generator for i386
3 Copyright (C) 1996-2005, 2006 R. Grafl, A. Krall, C. Kruegel,
4 C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
5 E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
6 J. Wenninger, Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
25 Contact: cacao@cacaojvm.org
27 Authors: Andreas Krall
33 $Id: codegen.c 5982 2006-11-15 15:30:36Z twisti $
45 #include "vm/jit/i386/md-abi.h"
47 #include "vm/jit/i386/codegen.h"
48 #include "vm/jit/i386/emit.h"
50 #include "mm/memory.h"
51 #include "native/jni.h"
52 #include "native/native.h"
54 #if defined(ENABLE_THREADS)
55 # include "threads/native/lock.h"
58 #include "vm/builtin.h"
59 #include "vm/exceptions.h"
60 #include "vm/global.h"
61 #include "vm/loader.h"
62 #include "vm/options.h"
63 #include "vm/stringlocal.h"
66 #include "vm/jit/asmpart.h"
67 #include "vm/jit/codegen-common.h"
68 #include "vm/jit/dseg.h"
69 #include "vm/jit/emit-common.h"
70 #include "vm/jit/jit.h"
71 #include "vm/jit/parse.h"
72 #include "vm/jit/patcher.h"
73 #include "vm/jit/reg.h"
74 #include "vm/jit/replace.h"
76 #if defined(ENABLE_SSA)
77 # include "vm/jit/optimizing/lsra.h"
78 # include "vm/jit/optimizing/ssa.h"
79 #elif defined(ENABLE_LSRA)
80 # include "vm/jit/allocator/lsra.h"
84 /* codegen *********************************************************************
86 Generates machine code.
88 *******************************************************************************/
90 #if defined(ENABLE_SSA)
91 void cg_move(codegendata *cd, s4 type, s4 src_regoff, s4 src_flags,
92 s4 dst_regoff, s4 dst_flags);
93 void codegen_insert_phi_moves(jitdata *jd, basicblock *bptr);
96 bool codegen(jitdata *jd)
102 s4 len, s1, s2, s3, d, disp;
108 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
109 builtintable_entry *bte;
111 rplpoint *replacementpoint;
114 #if defined(ENABLE_SSA)
116 bool last_cmd_was_goto;
118 last_cmd_was_goto = false;
122 /* get required compiler data */
129 /* prevent compiler warnings */
139 s4 savedregs_num = 0;
142 /* space to save used callee saved registers */
144 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
146 /* float register are saved on 2 4-byte stackslots */
147 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse) * 2;
149 cd->stackframesize = rd->memuse + savedregs_num;
152 #if defined(ENABLE_THREADS)
153 /* space to save argument of monitor_enter */
155 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
156 /* reserve 2 slots for long/double return values for monitorexit */
158 if (IS_2_WORD_TYPE(m->parseddesc->returntype.type))
159 cd->stackframesize += 2;
161 cd->stackframesize++;
165 /* create method header */
167 /* Keep stack of non-leaf functions 16-byte aligned. */
169 if (!jd->isleafmethod)
170 cd->stackframesize |= 0x3;
172 (void) dseg_addaddress(cd, code); /* CodeinfoPointer */
173 (void) dseg_adds4(cd, cd->stackframesize * 4); /* FrameSize */
175 #if defined(ENABLE_THREADS)
176 /* IsSync contains the offset relative to the stack pointer for the
177 argument of monitor_exit used in the exception handler. Since the
178 offset could be zero and give a wrong meaning of the flag it is
182 if (checksync && (m->flags & ACC_SYNCHRONIZED))
183 (void) dseg_adds4(cd, (rd->memuse + 1) * 4); /* IsSync */
186 (void) dseg_adds4(cd, 0); /* IsSync */
188 (void) dseg_adds4(cd, jd->isleafmethod); /* IsLeaf */
189 (void) dseg_adds4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
190 (void) dseg_adds4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
192 /* adds a reference for the length of the line number counter. We don't
193 know the size yet, since we evaluate the information during code
194 generation, to save one additional iteration over the whole
195 instructions. During code optimization the position could have changed
196 to the information gotten from the class file */
197 (void) dseg_addlinenumbertablesize(cd);
199 (void) dseg_adds4(cd, jd->exceptiontablelength); /* ExTableSize */
201 /* create exception table */
203 for (ex = jd->exceptiontable; ex != NULL; ex = ex->down) {
204 dseg_addtarget(cd, ex->start);
205 dseg_addtarget(cd, ex->end);
206 dseg_addtarget(cd, ex->handler);
207 (void) dseg_addaddress(cd, ex->catchtype.any);
210 /* generate method profiling code */
212 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
213 /* count frequency */
215 M_MOV_IMM(code, REG_ITMP3);
216 M_IADD_IMM_MEMBASE(1, REG_ITMP3, OFFSET(codeinfo, frequency));
219 /* create stack frame (if necessary) */
221 if (cd->stackframesize)
222 M_ASUB_IMM(cd->stackframesize * 4, REG_SP);
224 /* save return address and used callee saved registers */
226 p = cd->stackframesize;
227 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
228 p--; M_AST(rd->savintregs[i], REG_SP, p * 4);
230 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
231 p-=2; emit_fld_reg(cd, rd->savfltregs[i]); emit_fstpl_membase(cd, REG_SP, p * 4);
234 /* take arguments out of register or stack frame */
239 for (p = 0, l = 0; p < md->paramcount; p++) {
240 t = md->paramtypes[p].type;
242 #if defined(ENABLE_SSA)
247 varindex = jd->local_map[l * 5 + t];
249 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
252 if (varindex == UNUSED)
257 s1 = md->params[p].regoff;
259 if (IS_INT_LNG_TYPE(t)) { /* integer args */
260 if (!md->params[p].inmemory) { /* register arguments */
261 log_text("integer register argument");
263 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
264 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff */
266 else { /* reg arg -> spilled */
267 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff * 4 */
270 else { /* stack arguments */
271 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
272 emit_mov_membase_reg( /* + 4 for return address */
273 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4, var->vv.regoff);
274 /* + 4 for return address */
276 else { /* stack arg -> spilled */
277 if (!IS_2_WORD_TYPE(t)) {
278 #if defined(ENABLE_SSA)
279 /* no copy avoiding by now possible with SSA */
281 emit_mov_membase_reg( /* + 4 for return address */
282 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4,
284 emit_mov_reg_membase(
285 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4);
288 #endif /*defined(ENABLE_SSA)*/
289 /* reuse Stackslotand avoid copying */
290 var->vv.regoff = cd->stackframesize + s1 + 1;
294 #if defined(ENABLE_SSA)
295 /* no copy avoiding by now possible with SSA */
297 emit_mov_membase_reg( /* + 4 for return address */
298 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4,
300 emit_mov_reg_membase(
301 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4);
302 emit_mov_membase_reg( /* + 4 for return address */
303 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4 + 4,
305 emit_mov_reg_membase(
306 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4 + 4);
309 #endif /*defined(ENABLE_SSA)*/
310 /* reuse Stackslotand avoid copying */
311 var->vv.regoff = cd->stackframesize + s1 + 1;
316 else { /* floating args */
317 if (!md->params[p].inmemory) { /* register arguments */
318 log_text("There are no float argument registers!");
320 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
321 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff */
322 } else { /* reg arg -> spilled */
323 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff * 4 */
327 else { /* stack arguments */
328 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
331 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
333 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
338 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
340 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
343 } else { /* stack-arg -> spilled */
344 #if defined(ENABLE_SSA)
345 /* no copy avoiding by now possible with SSA */
347 emit_mov_membase_reg(
348 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4, REG_ITMP1);
349 emit_mov_reg_membase(
350 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4);
353 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
354 emit_fstps_membase(cd, REG_SP, var->vv.regoff * 4);
358 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
359 emit_fstpl_membase(cd, REG_SP, var->vv.regoff * 4);
363 #endif /*defined(ENABLE_SSA)*/
364 /* reuse Stackslotand avoid copying */
365 var->vv.regoff = cd->stackframesize + s1 + 1;
371 /* call monitorenter function */
373 #if defined(ENABLE_THREADS)
374 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
377 if (m->flags & ACC_STATIC) {
378 M_MOV_IMM(&m->class->object.header, REG_ITMP1);
381 M_ALD(REG_ITMP1, REG_SP, cd->stackframesize * 4 + 4);
384 codegen_add_nullpointerexception_ref(cd);
387 M_AST(REG_ITMP1, REG_SP, s1 * 4);
388 M_AST(REG_ITMP1, REG_SP, 0 * 4);
389 M_MOV_IMM(LOCK_monitor_enter, REG_ITMP3);
395 if (JITDATA_HAS_FLAG_VERBOSECALL(jd))
396 emit_verbosecall_enter(jd);
401 /* create replacement points */
403 if (!replace_create_replacement_points(jd))
406 #if defined(ENABLE_SSA)
407 /* with SSA Header is Basic Block 0 - insert phi Moves if necessary */
409 codegen_insert_phi_moves(jd, ls->basicblocks[0]);
412 /* end of header generation */
414 replacementpoint = jd->code->rplpoints;
416 /* walk through all basic blocks */
418 for (bptr = jd->basicblocks; bptr != NULL; bptr = bptr->next) {
420 bptr->mpc = (s4) (cd->mcodeptr - cd->mcodebase);
422 if (bptr->flags >= BBREACHED) {
423 /* branch resolving */
425 codegen_resolve_branchrefs(cd, bptr);
427 /* handle replacement points */
429 if (bptr->bitflags & BBFLAG_REPLACEMENT) {
430 replacementpoint->pc = (u1*)bptr->mpc; /* will be resolved later */
434 assert(cd->lastmcodeptr <= cd->mcodeptr);
435 cd->lastmcodeptr = cd->mcodeptr + 5; /* 5 byte jmp patch */
438 /* copy interface registers to their destination */
444 /* generate basic block profiling code */
446 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
447 /* count frequency */
449 M_MOV_IMM(code->bbfrequency, REG_ITMP3);
450 M_IADD_IMM_MEMBASE(1, REG_ITMP3, bptr->nr * 4);
454 #if defined(ENABLE_LSRA) || defined(ENABLE_SSA)
455 # if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
458 # if defined(ENABLE_SSA)
460 last_cmd_was_goto = false;
464 var = VAR(bptr->invars[len]);
465 if (bptr->type != BBTYPE_STD) {
466 if (!IS_2_WORD_TYPE(var->type)) {
467 if (bptr->type == BBTYPE_EXH) {
468 d = codegen_reg_of_var(0, var, REG_ITMP1);
469 M_INTMOVE(REG_ITMP1, d);
470 emit_store(jd, NULL, var, d);
474 log_text("copy interface registers(EXH, SBR): longs \
475 have to be in memory (begin 1)");
483 #endif /* defined(ENABLE_LSRA) || defined(ENABLE_SSA) */
487 var = VAR(bptr->invars[len]);
488 if ((len == bptr->indepth-1) && (bptr->type != BBTYPE_STD)) {
489 if (!IS_2_WORD_TYPE(var->type)) {
490 if (bptr->type == BBTYPE_EXH) {
491 d = codegen_reg_of_var(0, var, REG_ITMP1);
492 M_INTMOVE(REG_ITMP1, d);
493 emit_store(jd, NULL, var, d);
497 log_text("copy interface registers: longs have to be in \
504 assert((var->flags & INOUT));
509 /* walk through all instructions */
514 for (iptr = bptr->iinstr; len > 0; len--, iptr++) {
515 if (iptr->line != currentline) {
516 dseg_addlinenumber(cd, iptr->line);
517 currentline = iptr->line;
520 MCODECHECK(1024); /* 1kB should be enough */
523 case ICMD_INLINE_START:
525 insinfo_inline *insinfo = iptr->sx.s23.s3.inlineinfo;
527 /* handle replacement point */
529 replacementpoint->pc = (u1*) (ptrint) (cd->mcodeptr - cd->mcodebase);
531 /* XXX assert(cd->lastmcodeptr <= cd->mcodeptr); */
532 cd->lastmcodeptr = cd->mcodeptr + 5; /* 5 byte jmp patch */
534 #if defined(ENABLE_THREADS)
535 if (insinfo->synchronize) {
536 /* add monitor enter code */
537 if (insinfo->method->flags & ACC_STATIC) {
538 M_MOV_IMM(&insinfo->method->class->object.header, REG_ITMP1);
539 M_AST(REG_ITMP1, REG_SP, 0 * 4);
542 /* nullpointer check must have been performed before */
543 /* (XXX not done, yet) */
544 var = VAR(insinfo->synclocal);
545 if (var->flags & INMEMORY) {
546 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff * 4, REG_ITMP1);
547 M_AST(REG_ITMP1, REG_SP, 0 * 4);
550 M_AST(var->vv.regoff, REG_SP, 0 * 4);
554 M_MOV_IMM(LOCK_monitor_enter, REG_ITMP3);
558 dseg_addlinenumber_inline_start(cd, iptr);
562 case ICMD_INLINE_END:
564 insinfo_inline *insinfo = iptr->sx.s23.s3.inlineinfo;
566 dseg_addlinenumber_inline_end(cd, iptr);
567 dseg_addlinenumber(cd, iptr->line);
569 #if defined(ENABLE_THREADS)
570 if (insinfo->synchronize) {
571 /* add monitor exit code */
572 if (insinfo->method->flags & ACC_STATIC) {
573 M_MOV_IMM(&insinfo->method->class->object.header, REG_ITMP1);
574 M_AST(REG_ITMP1, REG_SP, 0 * 4);
577 var = VAR(insinfo->synclocal);
578 if (var->flags & INMEMORY) {
579 M_ALD(REG_ITMP1, REG_SP, var->vv.regoff * 4);
580 M_AST(REG_ITMP1, REG_SP, 0 * 4);
583 M_AST(var->vv.regoff, REG_SP, 0 * 4);
587 M_MOV_IMM(LOCK_monitor_exit, REG_ITMP3);
594 case ICMD_NOP: /* ... ==> ... */
597 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
599 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
602 codegen_add_nullpointerexception_ref(cd);
605 /* constant operations ************************************************/
607 case ICMD_ICONST: /* ... ==> ..., constant */
609 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
610 ICONST(d, iptr->sx.val.i);
611 emit_store_dst(jd, iptr, d);
614 case ICMD_LCONST: /* ... ==> ..., constant */
616 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
617 LCONST(d, iptr->sx.val.l);
618 emit_store_dst(jd, iptr, d);
621 case ICMD_FCONST: /* ... ==> ..., constant */
623 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
624 if (iptr->sx.val.f == 0.0) {
628 if (iptr->sx.val.i == 0x80000000) {
632 } else if (iptr->sx.val.f == 1.0) {
635 } else if (iptr->sx.val.f == 2.0) {
641 disp = dseg_addfloat(cd, iptr->sx.val.f);
642 emit_mov_imm_reg(cd, 0, REG_ITMP1);
644 emit_flds_membase(cd, REG_ITMP1, disp);
646 emit_store_dst(jd, iptr, d);
649 case ICMD_DCONST: /* ... ==> ..., constant */
651 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
652 if (iptr->sx.val.d == 0.0) {
656 if (iptr->sx.val.l == 0x8000000000000000LL) {
660 } else if (iptr->sx.val.d == 1.0) {
663 } else if (iptr->sx.val.d == 2.0) {
669 disp = dseg_adddouble(cd, iptr->sx.val.d);
670 emit_mov_imm_reg(cd, 0, REG_ITMP1);
672 emit_fldl_membase(cd, REG_ITMP1, disp);
674 emit_store_dst(jd, iptr, d);
677 case ICMD_ACONST: /* ... ==> ..., constant */
679 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
681 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
682 codegen_addpatchref(cd, PATCHER_aconst,
683 iptr->sx.val.c.ref, 0);
685 if (opt_showdisassemble) {
686 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
692 if (iptr->sx.val.anyptr == NULL)
695 M_MOV_IMM(iptr->sx.val.anyptr, d);
697 emit_store_dst(jd, iptr, d);
701 /* load/store/copy/move operations ************************************/
716 emit_copy(jd, iptr, VAROP(iptr->s1), VAROP(iptr->dst));
720 /* pop operations *****************************************************/
722 /* attention: double and longs are only one entry in CACAO ICMDs */
724 case ICMD_POP: /* ..., value ==> ... */
725 case ICMD_POP2: /* ..., value, value ==> ... */
730 /* integer operations *************************************************/
732 case ICMD_INEG: /* ..., value ==> ..., - value */
734 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
735 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
738 emit_store_dst(jd, iptr, d);
741 case ICMD_LNEG: /* ..., value ==> ..., - value */
743 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
744 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
746 M_NEG(GET_LOW_REG(d));
747 M_IADDC_IMM(0, GET_HIGH_REG(d));
748 M_NEG(GET_HIGH_REG(d));
749 emit_store_dst(jd, iptr, d);
752 case ICMD_I2L: /* ..., value ==> ..., value */
754 s1 = emit_load_s1(jd, iptr, EAX);
755 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
758 M_LNGMOVE(EAX_EDX_PACKED, d);
759 emit_store_dst(jd, iptr, d);
762 case ICMD_L2I: /* ..., value ==> ..., value */
764 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
765 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
767 emit_store_dst(jd, iptr, d);
770 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
772 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
773 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
777 emit_store_dst(jd, iptr, d);
780 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
782 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
783 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
785 emit_store_dst(jd, iptr, d);
788 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
790 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
791 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
793 emit_store_dst(jd, iptr, d);
797 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
799 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
800 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
801 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
808 emit_store_dst(jd, iptr, d);
812 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
813 /* sx.val.i = constant */
815 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
816 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
818 /* `inc reg' is slower on p4's (regarding to ia32
819 optimization reference manual and benchmarks) and as
823 M_IADD_IMM(iptr->sx.val.i, d);
824 emit_store_dst(jd, iptr, d);
827 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
829 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
830 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
831 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
832 M_INTMOVE(s1, GET_LOW_REG(d));
833 M_IADD(s2, GET_LOW_REG(d));
834 /* don't use REG_ITMP1 */
835 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
836 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
837 M_INTMOVE(s1, GET_HIGH_REG(d));
838 M_IADDC(s2, GET_HIGH_REG(d));
839 emit_store_dst(jd, iptr, d);
842 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
843 /* sx.val.l = constant */
845 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
846 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
848 M_IADD_IMM(iptr->sx.val.l, GET_LOW_REG(d));
849 M_IADDC_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
850 emit_store_dst(jd, iptr, d);
853 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
855 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
856 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
857 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
859 M_INTMOVE(s1, REG_ITMP1);
860 M_ISUB(s2, REG_ITMP1);
861 M_INTMOVE(REG_ITMP1, d);
867 emit_store_dst(jd, iptr, d);
870 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
871 /* sx.val.i = constant */
873 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
874 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
876 M_ISUB_IMM(iptr->sx.val.i, d);
877 emit_store_dst(jd, iptr, d);
880 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
882 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
883 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
884 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
885 if (s2 == GET_LOW_REG(d)) {
886 M_INTMOVE(s1, REG_ITMP1);
887 M_ISUB(s2, REG_ITMP1);
888 M_INTMOVE(REG_ITMP1, GET_LOW_REG(d));
891 M_INTMOVE(s1, GET_LOW_REG(d));
892 M_ISUB(s2, GET_LOW_REG(d));
894 /* don't use REG_ITMP1 */
895 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
896 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
897 if (s2 == GET_HIGH_REG(d)) {
898 M_INTMOVE(s1, REG_ITMP2);
899 M_ISUBB(s2, REG_ITMP2);
900 M_INTMOVE(REG_ITMP2, GET_HIGH_REG(d));
903 M_INTMOVE(s1, GET_HIGH_REG(d));
904 M_ISUBB(s2, GET_HIGH_REG(d));
906 emit_store_dst(jd, iptr, d);
909 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
910 /* sx.val.l = constant */
912 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
913 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
915 M_ISUB_IMM(iptr->sx.val.l, GET_LOW_REG(d));
916 M_ISUBB_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
917 emit_store_dst(jd, iptr, d);
920 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
922 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
923 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
924 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
931 emit_store_dst(jd, iptr, d);
934 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
935 /* sx.val.i = constant */
937 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
938 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
939 M_IMUL_IMM(s1, iptr->sx.val.i, d);
940 emit_store_dst(jd, iptr, d);
943 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
945 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
946 s2 = emit_load_s2_low(jd, iptr, EDX);
947 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
949 M_INTMOVE(s1, REG_ITMP2);
950 M_IMUL(s2, REG_ITMP2);
952 s1 = emit_load_s1_low(jd, iptr, EAX);
953 s2 = emit_load_s2_high(jd, iptr, EDX);
956 M_IADD(EDX, REG_ITMP2);
958 s1 = emit_load_s1_low(jd, iptr, EAX);
959 s2 = emit_load_s2_low(jd, iptr, EDX);
962 M_INTMOVE(EAX, GET_LOW_REG(d));
963 M_IADD(REG_ITMP2, GET_HIGH_REG(d));
965 emit_store_dst(jd, iptr, d);
968 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
969 /* sx.val.l = constant */
971 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
972 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
973 ICONST(EAX, iptr->sx.val.l);
975 M_IMUL_IMM(s1, iptr->sx.val.l >> 32, REG_ITMP2);
976 M_IADD(REG_ITMP2, EDX);
977 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
978 M_IMUL_IMM(s1, iptr->sx.val.l, REG_ITMP2);
979 M_IADD(REG_ITMP2, EDX);
980 M_LNGMOVE(EAX_EDX_PACKED, d);
981 emit_store_dst(jd, iptr, d);
984 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
986 s1 = emit_load_s1(jd, iptr, EAX);
987 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
988 d = codegen_reg_of_dst(jd, iptr, EAX);
993 codegen_add_arithmeticexception_ref(cd);
996 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
998 /* check as described in jvm spec */
1000 M_CMP_IMM(0x80000000, EAX);
1007 M_INTMOVE(EAX, d); /* if INMEMORY then d is already EAX */
1008 emit_store_dst(jd, iptr, d);
1011 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1013 s1 = emit_load_s1(jd, iptr, EAX);
1014 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1015 d = codegen_reg_of_dst(jd, iptr, EDX);
1020 codegen_add_arithmeticexception_ref(cd);
1023 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
1025 /* check as described in jvm spec */
1027 M_CMP_IMM(0x80000000, EAX);
1035 M_INTMOVE(EDX, d); /* if INMEMORY then d is already EDX */
1036 emit_store_dst(jd, iptr, d);
1039 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
1040 /* sx.val.i = constant */
1042 /* TODO: optimize for `/ 2' */
1043 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1044 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1048 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, d);/* 32-bit for jump off */
1049 M_SRA_IMM(iptr->sx.val.i, d);
1050 emit_store_dst(jd, iptr, d);
1053 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
1054 /* sx.val.i = constant */
1056 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1057 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1059 M_MOV(s1, REG_ITMP1);
1063 M_AND_IMM(iptr->sx.val.i, d);
1065 M_BGE(2 + 2 + 6 + 2);
1066 M_MOV(s1, d); /* don't use M_INTMOVE, so we know the jump offset */
1068 M_AND_IMM32(iptr->sx.val.i, d); /* use 32-bit for jump offset */
1070 emit_store_dst(jd, iptr, d);
1073 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1074 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1076 s2 = emit_load_s2(jd, iptr, REG_ITMP12_PACKED);
1077 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1079 M_INTMOVE(GET_LOW_REG(s2), REG_ITMP3);
1080 M_OR(GET_HIGH_REG(s2), REG_ITMP3);
1082 codegen_add_arithmeticexception_ref(cd);
1084 bte = iptr->sx.s23.s3.bte;
1087 M_LST(s2, REG_SP, 2 * 4);
1089 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1090 M_LST(s1, REG_SP, 0 * 4);
1092 M_MOV_IMM(bte->fp, REG_ITMP3);
1094 emit_store_dst(jd, iptr, d);
1097 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1098 /* sx.val.i = constant */
1100 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1101 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1103 M_TEST(GET_HIGH_REG(d));
1105 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, GET_LOW_REG(d));
1106 M_IADDC_IMM(0, GET_HIGH_REG(d));
1107 M_SRLD_IMM(iptr->sx.val.i, GET_HIGH_REG(d), GET_LOW_REG(d));
1108 M_SRA_IMM(iptr->sx.val.i, GET_HIGH_REG(d));
1109 emit_store_dst(jd, iptr, d);
1113 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1114 /* sx.val.l = constant */
1116 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1117 if (iptr->dst.var->flags & INMEMORY) {
1118 if (iptr->s1.var->flags & INMEMORY) {
1119 /* Alpha algorithm */
1121 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 4);
1123 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 4 + 4);
1129 /* TODO: hmm, don't know if this is always correct */
1131 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l & 0x00000000ffffffff);
1133 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l >> 32);
1139 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4, REG_ITMP1);
1140 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4 + 4, REG_ITMP2);
1142 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1143 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1144 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, iptr->s1.var->vv.regoff * 4 + 4);
1145 emit_jcc(cd, CC_GE, disp);
1147 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4, REG_ITMP1);
1148 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4 + 4, REG_ITMP2);
1150 emit_neg_reg(cd, REG_ITMP1);
1151 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1152 emit_neg_reg(cd, REG_ITMP2);
1154 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1155 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1157 emit_neg_reg(cd, REG_ITMP1);
1158 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1159 emit_neg_reg(cd, REG_ITMP2);
1161 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst.var->vv.regoff * 4);
1162 emit_mov_reg_membase(cd, REG_ITMP2, REG_SP, iptr->dst.var->vv.regoff * 4 + 4);
1166 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1167 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1169 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1170 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1171 M_TEST(GET_LOW_REG(s1));
1177 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1179 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1180 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1181 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1182 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1185 emit_store_dst(jd, iptr, d);
1188 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1189 /* sx.val.i = constant */
1191 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1192 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1194 M_SLL_IMM(iptr->sx.val.i, d);
1195 emit_store_dst(jd, iptr, d);
1198 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1200 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1201 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1202 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1203 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1206 emit_store_dst(jd, iptr, d);
1209 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1210 /* sx.val.i = constant */
1212 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1213 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1215 M_SRA_IMM(iptr->sx.val.i, d);
1216 emit_store_dst(jd, iptr, d);
1219 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1221 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1222 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1223 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1224 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1227 emit_store_dst(jd, iptr, d);
1230 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1231 /* sx.val.i = constant */
1233 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1234 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1236 M_SRL_IMM(iptr->sx.val.i, d);
1237 emit_store_dst(jd, iptr, d);
1240 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1242 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1243 s2 = emit_load_s2(jd, iptr, ECX);
1244 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1247 M_TEST_IMM(32, ECX);
1249 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1250 M_CLR(GET_LOW_REG(d));
1251 M_SLLD(GET_LOW_REG(d), GET_HIGH_REG(d));
1252 M_SLL(GET_LOW_REG(d));
1253 emit_store_dst(jd, iptr, d);
1256 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1257 /* sx.val.i = constant */
1259 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1260 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1262 if (iptr->sx.val.i & 0x20) {
1263 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1264 M_CLR(GET_LOW_REG(d));
1265 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1269 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1271 M_SLL_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d));
1273 emit_store_dst(jd, iptr, d);
1276 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1278 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1279 s2 = emit_load_s2(jd, iptr, ECX);
1280 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1283 M_TEST_IMM(32, ECX);
1285 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1286 M_SRA_IMM(31, GET_HIGH_REG(d));
1287 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1288 M_SRA(GET_HIGH_REG(d));
1289 emit_store_dst(jd, iptr, d);
1292 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1293 /* sx.val.i = constant */
1295 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1296 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1298 if (iptr->sx.val.i & 0x20) {
1299 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1300 M_SRA_IMM(31, GET_HIGH_REG(d));
1301 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1305 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1307 M_SRA_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1309 emit_store_dst(jd, iptr, d);
1312 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1314 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1315 s2 = emit_load_s2(jd, iptr, ECX);
1316 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1319 M_TEST_IMM(32, ECX);
1321 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1322 M_CLR(GET_HIGH_REG(d));
1323 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1324 M_SRL(GET_HIGH_REG(d));
1325 emit_store_dst(jd, iptr, d);
1328 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1329 /* sx.val.l = constant */
1331 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1332 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1334 if (iptr->sx.val.i & 0x20) {
1335 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1336 M_CLR(GET_HIGH_REG(d));
1337 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1341 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1343 M_SRL_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1345 emit_store_dst(jd, iptr, d);
1348 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1350 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1351 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1352 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1359 emit_store_dst(jd, iptr, d);
1362 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1363 /* sx.val.i = constant */
1365 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1366 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1368 M_AND_IMM(iptr->sx.val.i, d);
1369 emit_store_dst(jd, iptr, d);
1372 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1374 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1375 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1376 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1377 if (s2 == GET_LOW_REG(d))
1378 M_AND(s1, GET_LOW_REG(d));
1380 M_INTMOVE(s1, GET_LOW_REG(d));
1381 M_AND(s2, GET_LOW_REG(d));
1383 /* REG_ITMP1 probably contains low 32-bit of destination */
1384 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1385 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1386 if (s2 == GET_HIGH_REG(d))
1387 M_AND(s1, GET_HIGH_REG(d));
1389 M_INTMOVE(s1, GET_HIGH_REG(d));
1390 M_AND(s2, GET_HIGH_REG(d));
1392 emit_store_dst(jd, iptr, d);
1395 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1396 /* sx.val.l = constant */
1398 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1399 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1401 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1402 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1403 emit_store_dst(jd, iptr, d);
1406 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1408 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1409 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1410 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1417 emit_store_dst(jd, iptr, d);
1420 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1421 /* sx.val.i = constant */
1423 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1424 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1426 M_OR_IMM(iptr->sx.val.i, d);
1427 emit_store_dst(jd, iptr, d);
1430 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1432 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1433 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1434 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1435 if (s2 == GET_LOW_REG(d))
1436 M_OR(s1, GET_LOW_REG(d));
1438 M_INTMOVE(s1, GET_LOW_REG(d));
1439 M_OR(s2, GET_LOW_REG(d));
1441 /* REG_ITMP1 probably contains low 32-bit of destination */
1442 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1443 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1444 if (s2 == GET_HIGH_REG(d))
1445 M_OR(s1, GET_HIGH_REG(d));
1447 M_INTMOVE(s1, GET_HIGH_REG(d));
1448 M_OR(s2, GET_HIGH_REG(d));
1450 emit_store_dst(jd, iptr, d);
1453 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1454 /* sx.val.l = constant */
1456 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1457 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1459 M_OR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1460 M_OR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1461 emit_store_dst(jd, iptr, d);
1464 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1466 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1467 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1468 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1475 emit_store_dst(jd, iptr, d);
1478 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1479 /* sx.val.i = constant */
1481 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1482 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1484 M_XOR_IMM(iptr->sx.val.i, d);
1485 emit_store_dst(jd, iptr, d);
1488 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1490 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1491 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1492 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1493 if (s2 == GET_LOW_REG(d))
1494 M_XOR(s1, GET_LOW_REG(d));
1496 M_INTMOVE(s1, GET_LOW_REG(d));
1497 M_XOR(s2, GET_LOW_REG(d));
1499 /* REG_ITMP1 probably contains low 32-bit of destination */
1500 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1501 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1502 if (s2 == GET_HIGH_REG(d))
1503 M_XOR(s1, GET_HIGH_REG(d));
1505 M_INTMOVE(s1, GET_HIGH_REG(d));
1506 M_XOR(s2, GET_HIGH_REG(d));
1508 emit_store_dst(jd, iptr, d);
1511 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1512 /* sx.val.l = constant */
1514 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1515 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1517 M_XOR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1518 M_XOR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1519 emit_store_dst(jd, iptr, d);
1523 /* floating operations ************************************************/
1525 case ICMD_FNEG: /* ..., value ==> ..., - value */
1527 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1528 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1530 emit_store_dst(jd, iptr, d);
1533 case ICMD_DNEG: /* ..., value ==> ..., - value */
1535 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1536 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1538 emit_store_dst(jd, iptr, d);
1541 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1543 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1544 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1545 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1547 emit_store_dst(jd, iptr, d);
1550 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1552 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1553 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1554 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1556 emit_store_dst(jd, iptr, d);
1559 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1561 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1562 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1563 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1565 emit_store_dst(jd, iptr, d);
1568 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1570 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1571 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1572 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1574 emit_store_dst(jd, iptr, d);
1577 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1579 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1580 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1581 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1583 emit_store_dst(jd, iptr, d);
1586 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1588 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1589 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1590 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1592 emit_store_dst(jd, iptr, d);
1595 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1597 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1598 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1599 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1601 emit_store_dst(jd, iptr, d);
1604 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1606 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1607 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1608 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1610 emit_store_dst(jd, iptr, d);
1613 case ICMD_FREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1615 /* exchanged to skip fxch */
1616 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1617 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1618 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1619 /* emit_fxch(cd); */
1624 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1625 emit_store_dst(jd, iptr, d);
1626 emit_ffree_reg(cd, 0);
1630 case ICMD_DREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1632 /* exchanged to skip fxch */
1633 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1634 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1635 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1636 /* emit_fxch(cd); */
1641 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1642 emit_store_dst(jd, iptr, d);
1643 emit_ffree_reg(cd, 0);
1647 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1648 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1650 var = VAROP(iptr->s1);
1651 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1653 if (var->flags & INMEMORY) {
1654 emit_fildl_membase(cd, REG_SP, var->vv.regoff * 4);
1656 disp = dseg_adds4(cd, 0);
1657 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1659 emit_mov_reg_membase(cd, var->vv.regoff, REG_ITMP1, disp);
1660 emit_fildl_membase(cd, REG_ITMP1, disp);
1663 emit_store_dst(jd, iptr, d);
1666 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1667 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1669 var = VAROP(iptr->s1);
1670 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1671 if (var->flags & INMEMORY) {
1672 emit_fildll_membase(cd, REG_SP, var->vv.regoff * 4);
1675 log_text("L2F: longs have to be in memory");
1678 emit_store_dst(jd, iptr, d);
1681 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1683 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1684 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1686 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1689 /* Round to zero, 53-bit mode, exception masked */
1690 disp = dseg_adds4(cd, 0x0e7f);
1691 emit_fldcw_membase(cd, REG_ITMP1, disp);
1693 var = VAROP(iptr->dst);
1694 var1 = VAROP(iptr->s1);
1696 if (var->flags & INMEMORY) {
1697 emit_fistpl_membase(cd, REG_SP, var->vv.regoff * 4);
1699 /* Round to nearest, 53-bit mode, exceptions masked */
1700 disp = dseg_adds4(cd, 0x027f);
1701 emit_fldcw_membase(cd, REG_ITMP1, disp);
1703 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1704 REG_SP, var->vv.regoff * 4);
1707 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1709 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1712 disp = dseg_adds4(cd, 0);
1713 emit_fistpl_membase(cd, REG_ITMP1, disp);
1714 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1716 /* Round to nearest, 53-bit mode, exceptions masked */
1717 disp = dseg_adds4(cd, 0x027f);
1718 emit_fldcw_membase(cd, REG_ITMP1, disp);
1720 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1723 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1724 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1727 emit_jcc(cd, CC_NE, disp);
1729 /* XXX: change this when we use registers */
1730 emit_flds_membase(cd, REG_SP, var1->vv.regoff * 4);
1731 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2i, REG_ITMP1);
1732 emit_call_reg(cd, REG_ITMP1);
1734 if (var->flags & INMEMORY) {
1735 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1738 M_INTMOVE(REG_RESULT, var->vv.regoff);
1742 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1744 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1745 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1747 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1750 /* Round to zero, 53-bit mode, exception masked */
1751 disp = dseg_adds4(cd, 0x0e7f);
1752 emit_fldcw_membase(cd, REG_ITMP1, disp);
1754 var = VAROP(iptr->dst);
1755 var1 = VAROP(iptr->s1);
1757 if (var->flags & INMEMORY) {
1758 emit_fistpl_membase(cd, REG_SP, var->vv.regoff * 4);
1760 /* Round to nearest, 53-bit mode, exceptions masked */
1761 disp = dseg_adds4(cd, 0x027f);
1762 emit_fldcw_membase(cd, REG_ITMP1, disp);
1764 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1765 REG_SP, var->vv.regoff * 4);
1768 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1770 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1773 disp = dseg_adds4(cd, 0);
1774 emit_fistpl_membase(cd, REG_ITMP1, disp);
1775 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1777 /* Round to nearest, 53-bit mode, exceptions masked */
1778 disp = dseg_adds4(cd, 0x027f);
1779 emit_fldcw_membase(cd, REG_ITMP1, disp);
1781 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1784 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1785 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1788 emit_jcc(cd, CC_NE, disp);
1790 /* XXX: change this when we use registers */
1791 emit_fldl_membase(cd, REG_SP, var1->vv.regoff * 4);
1792 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2i, REG_ITMP1);
1793 emit_call_reg(cd, REG_ITMP1);
1795 if (var->flags & INMEMORY) {
1796 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1798 M_INTMOVE(REG_RESULT, var->vv.regoff);
1802 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1804 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1805 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1807 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1810 /* Round to zero, 53-bit mode, exception masked */
1811 disp = dseg_adds4(cd, 0x0e7f);
1812 emit_fldcw_membase(cd, REG_ITMP1, disp);
1814 var = VAROP(iptr->dst);
1815 var1 = VAROP(iptr->s1);
1817 if (var->flags & INMEMORY) {
1818 emit_fistpll_membase(cd, REG_SP, var->vv.regoff * 4);
1820 /* Round to nearest, 53-bit mode, exceptions masked */
1821 disp = dseg_adds4(cd, 0x027f);
1822 emit_fldcw_membase(cd, REG_ITMP1, disp);
1824 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1825 REG_SP, var->vv.regoff * 4 + 4);
1828 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1830 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1833 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1835 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4 + 4);
1837 emit_jcc(cd, CC_NE, disp);
1839 emit_alu_imm_membase(cd, ALU_CMP, 0,
1840 REG_SP, var->vv.regoff * 4);
1843 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1845 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1847 emit_jcc(cd, CC_NE, disp);
1849 /* XXX: change this when we use registers */
1850 emit_flds_membase(cd, REG_SP, var1->vv.regoff * 4);
1851 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2l, REG_ITMP1);
1852 emit_call_reg(cd, REG_ITMP1);
1853 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1854 emit_mov_reg_membase(cd, REG_RESULT2,
1855 REG_SP, var->vv.regoff * 4 + 4);
1858 log_text("F2L: longs have to be in memory");
1863 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1865 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1866 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1868 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1871 /* Round to zero, 53-bit mode, exception masked */
1872 disp = dseg_adds4(cd, 0x0e7f);
1873 emit_fldcw_membase(cd, REG_ITMP1, disp);
1875 var = VAROP(iptr->dst);
1876 var1 = VAROP(iptr->s1);
1878 if (var->flags & INMEMORY) {
1879 emit_fistpll_membase(cd, REG_SP, var->vv.regoff * 4);
1881 /* Round to nearest, 53-bit mode, exceptions masked */
1882 disp = dseg_adds4(cd, 0x027f);
1883 emit_fldcw_membase(cd, REG_ITMP1, disp);
1885 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1886 REG_SP, var->vv.regoff * 4 + 4);
1889 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1891 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1894 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1896 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4 + 4);
1898 emit_jcc(cd, CC_NE, disp);
1900 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, var->vv.regoff * 4);
1903 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1905 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1907 emit_jcc(cd, CC_NE, disp);
1909 /* XXX: change this when we use registers */
1910 emit_fldl_membase(cd, REG_SP, var1->vv.regoff * 4);
1911 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2l, REG_ITMP1);
1912 emit_call_reg(cd, REG_ITMP1);
1913 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1914 emit_mov_reg_membase(cd, REG_RESULT2,
1915 REG_SP, var->vv.regoff * 4 + 4);
1918 log_text("D2L: longs have to be in memory");
1923 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1925 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1926 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1928 emit_store_dst(jd, iptr, d);
1931 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1933 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1934 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1936 emit_store_dst(jd, iptr, d);
1939 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1942 /* exchanged to skip fxch */
1943 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1944 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1945 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1946 /* emit_fxch(cd); */
1949 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as GT */
1950 emit_jcc(cd, CC_E, 6);
1951 emit_alu_imm_reg(cd, ALU_AND, 0x000000ff, EAX);
1953 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1954 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1955 emit_jcc(cd, CC_B, 3 + 5);
1956 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1957 emit_jmp_imm(cd, 3);
1958 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1959 emit_store_dst(jd, iptr, d);
1962 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1965 /* exchanged to skip fxch */
1966 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1967 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1968 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1969 /* emit_fxch(cd); */
1972 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as LT */
1973 emit_jcc(cd, CC_E, 3);
1974 emit_movb_imm_reg(cd, 1, REG_AH);
1976 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1977 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1978 emit_jcc(cd, CC_B, 3 + 5);
1979 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1980 emit_jmp_imm(cd, 3);
1981 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1982 emit_store_dst(jd, iptr, d);
1986 /* memory operations **************************************************/
1988 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., length */
1990 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1991 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1992 gen_nullptr_check(s1);
1993 M_ILD(d, s1, OFFSET(java_arrayheader, size));
1994 emit_store_dst(jd, iptr, d);
1997 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
1999 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2000 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2001 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2002 if (INSTRUCTION_MUST_CHECK(iptr)) {
2003 gen_nullptr_check(s1);
2006 emit_movsbl_memindex_reg(cd, OFFSET(java_bytearray, data[0]),
2008 emit_store_dst(jd, iptr, d);
2011 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
2013 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2014 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2015 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2016 if (INSTRUCTION_MUST_CHECK(iptr)) {
2017 gen_nullptr_check(s1);
2020 emit_movzwl_memindex_reg(cd, OFFSET(java_chararray, data[0]),
2022 emit_store_dst(jd, iptr, d);
2025 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
2027 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2028 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2029 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2030 if (INSTRUCTION_MUST_CHECK(iptr)) {
2031 gen_nullptr_check(s1);
2034 emit_movswl_memindex_reg(cd, OFFSET(java_shortarray, data[0]),
2036 emit_store_dst(jd, iptr, d);
2039 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
2041 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2042 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2043 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2044 if (INSTRUCTION_MUST_CHECK(iptr)) {
2045 gen_nullptr_check(s1);
2048 emit_mov_memindex_reg(cd, OFFSET(java_intarray, data[0]),
2050 emit_store_dst(jd, iptr, d);
2053 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
2055 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2056 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2057 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
2058 if (INSTRUCTION_MUST_CHECK(iptr)) {
2059 gen_nullptr_check(s1);
2063 var = VAROP(iptr->dst);
2065 assert(var->flags & INMEMORY);
2066 emit_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]),
2067 s1, s2, 3, REG_ITMP3);
2068 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff * 4);
2069 emit_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]) + 4,
2070 s1, s2, 3, REG_ITMP3);
2071 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff * 4 + 4);
2074 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2076 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2077 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2078 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2079 if (INSTRUCTION_MUST_CHECK(iptr)) {
2080 gen_nullptr_check(s1);
2083 emit_flds_memindex(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2084 emit_store_dst(jd, iptr, d);
2087 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2089 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2090 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2091 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
2092 if (INSTRUCTION_MUST_CHECK(iptr)) {
2093 gen_nullptr_check(s1);
2096 emit_fldl_memindex(cd, OFFSET(java_doublearray, data[0]), s1, s2,3);
2097 emit_store_dst(jd, iptr, d);
2100 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2102 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2103 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2104 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2105 if (INSTRUCTION_MUST_CHECK(iptr)) {
2106 gen_nullptr_check(s1);
2109 emit_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]),
2111 emit_store_dst(jd, iptr, d);
2115 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2117 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2118 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2119 if (INSTRUCTION_MUST_CHECK(iptr)) {
2120 gen_nullptr_check(s1);
2123 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2125 /* because EBP, ESI, EDI have no xH and xL nibbles */
2126 M_INTMOVE(s3, REG_ITMP3);
2129 emit_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]),
2133 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2135 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2136 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2137 if (INSTRUCTION_MUST_CHECK(iptr)) {
2138 gen_nullptr_check(s1);
2141 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2142 emit_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]),
2146 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2148 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2149 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2150 if (INSTRUCTION_MUST_CHECK(iptr)) {
2151 gen_nullptr_check(s1);
2154 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2155 emit_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]),
2159 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2161 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2162 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2163 if (INSTRUCTION_MUST_CHECK(iptr)) {
2164 gen_nullptr_check(s1);
2167 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2168 emit_mov_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]),
2172 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2174 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2175 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2176 if (INSTRUCTION_MUST_CHECK(iptr)) {
2177 gen_nullptr_check(s1);
2181 var = VAROP(iptr->sx.s23.s3);
2183 assert(var->flags & INMEMORY);
2184 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff * 4, REG_ITMP3);
2185 emit_mov_reg_memindex(cd, REG_ITMP3, OFFSET(java_longarray, data[0])
2187 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff * 4 + 4, REG_ITMP3);
2188 emit_mov_reg_memindex(cd, REG_ITMP3,
2189 OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2192 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2194 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2195 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2196 if (INSTRUCTION_MUST_CHECK(iptr)) {
2197 gen_nullptr_check(s1);
2200 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2201 emit_fstps_memindex(cd, OFFSET(java_floatarray, data[0]), s1, s2,2);
2204 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2206 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2207 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2208 if (INSTRUCTION_MUST_CHECK(iptr)) {
2209 gen_nullptr_check(s1);
2212 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2213 emit_fstpl_memindex(cd, OFFSET(java_doublearray, data[0]),
2217 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2219 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2220 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2221 if (INSTRUCTION_MUST_CHECK(iptr)) {
2222 gen_nullptr_check(s1);
2225 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2227 M_AST(s1, REG_SP, 0 * 4);
2228 M_AST(s3, REG_SP, 1 * 4);
2229 M_MOV_IMM(BUILTIN_canstore, REG_ITMP1);
2233 codegen_add_arraystoreexception_ref(cd);
2235 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2236 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2237 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2238 emit_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]),
2242 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2244 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2245 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2246 if (INSTRUCTION_MUST_CHECK(iptr)) {
2247 gen_nullptr_check(s1);
2250 emit_movb_imm_memindex(cd, iptr->sx.s23.s3.constval,
2251 OFFSET(java_bytearray, data[0]), s1, s2, 0);
2254 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2256 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2257 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2258 if (INSTRUCTION_MUST_CHECK(iptr)) {
2259 gen_nullptr_check(s1);
2262 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2263 OFFSET(java_chararray, data[0]), s1, s2, 1);
2266 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2268 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2269 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2270 if (INSTRUCTION_MUST_CHECK(iptr)) {
2271 gen_nullptr_check(s1);
2274 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2275 OFFSET(java_shortarray, data[0]), s1, s2, 1);
2278 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2280 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2281 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2282 if (INSTRUCTION_MUST_CHECK(iptr)) {
2283 gen_nullptr_check(s1);
2286 emit_mov_imm_memindex(cd, iptr->sx.s23.s3.constval,
2287 OFFSET(java_intarray, data[0]), s1, s2, 2);
2290 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2292 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2293 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2294 if (INSTRUCTION_MUST_CHECK(iptr)) {
2295 gen_nullptr_check(s1);
2298 emit_mov_imm_memindex(cd,
2299 (u4) (iptr->sx.s23.s3.constval & 0x00000000ffffffff),
2300 OFFSET(java_longarray, data[0]), s1, s2, 3);
2301 emit_mov_imm_memindex(cd,
2302 ((s4)iptr->sx.s23.s3.constval) >> 31,
2303 OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2306 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2308 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2309 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2310 if (INSTRUCTION_MUST_CHECK(iptr)) {
2311 gen_nullptr_check(s1);
2314 emit_mov_imm_memindex(cd, 0,
2315 OFFSET(java_objectarray, data[0]), s1, s2, 2);
2319 case ICMD_GETSTATIC: /* ... ==> ..., value */
2321 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2322 unresolved_field *uf = iptr->sx.s23.s3.uf;
2324 fieldtype = uf->fieldref->parseddesc.fd->type;
2326 codegen_addpatchref(cd, PATCHER_get_putstatic,
2327 iptr->sx.s23.s3.uf, 0);
2329 if (opt_showdisassemble) {
2330 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2337 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2339 fieldtype = fi->type;
2341 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class)) {
2342 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2344 if (opt_showdisassemble) {
2345 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2349 disp = (ptrint) &(fi->value);
2352 M_MOV_IMM(disp, REG_ITMP1);
2353 switch (fieldtype) {
2356 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2357 M_ILD(d, REG_ITMP1, 0);
2360 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2361 M_LLD(d, REG_ITMP1, 0);
2364 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2365 M_FLD(d, REG_ITMP1, 0);
2368 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2369 M_DLD(d, REG_ITMP1, 0);
2372 emit_store_dst(jd, iptr, d);
2375 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2377 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2378 unresolved_field *uf = iptr->sx.s23.s3.uf;
2380 fieldtype = uf->fieldref->parseddesc.fd->type;
2382 codegen_addpatchref(cd, PATCHER_get_putstatic,
2383 iptr->sx.s23.s3.uf, 0);
2385 if (opt_showdisassemble) {
2386 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2393 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2395 fieldtype = fi->type;
2397 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class)) {
2398 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2400 if (opt_showdisassemble) {
2401 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2405 disp = (ptrint) &(fi->value);
2408 M_MOV_IMM(disp, REG_ITMP1);
2409 switch (fieldtype) {
2412 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
2413 M_IST(s1, REG_ITMP1, 0);
2416 s1 = emit_load_s1(jd, iptr, REG_ITMP23_PACKED);
2417 M_LST(s1, REG_ITMP1, 0);
2420 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2421 emit_fstps_membase(cd, REG_ITMP1, 0);
2424 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2425 emit_fstpl_membase(cd, REG_ITMP1, 0);
2430 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2431 /* val = value (in current instruction) */
2432 /* following NOP) */
2434 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2435 unresolved_field *uf = iptr->sx.s23.s3.uf;
2437 fieldtype = uf->fieldref->parseddesc.fd->type;
2439 codegen_addpatchref(cd, PATCHER_get_putstatic,
2442 if (opt_showdisassemble) {
2443 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2450 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2452 fieldtype = fi->type;
2454 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class)) {
2455 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2457 if (opt_showdisassemble) {
2458 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2462 disp = (ptrint) &(fi->value);
2465 M_MOV_IMM(disp, REG_ITMP1);
2466 switch (fieldtype) {
2469 M_IST_IMM(iptr->sx.s23.s2.constval, REG_ITMP1, 0);
2472 M_IST_IMM(iptr->sx.s23.s2.constval & 0xffffffff, REG_ITMP1, 0);
2473 M_IST_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, REG_ITMP1, 4);
2480 case ICMD_GETFIELD: /* .., objectref. ==> ..., value */
2482 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2483 gen_nullptr_check(s1);
2485 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2486 unresolved_field *uf = iptr->sx.s23.s3.uf;
2488 fieldtype = uf->fieldref->parseddesc.fd->type;
2490 codegen_addpatchref(cd, PATCHER_getfield,
2491 iptr->sx.s23.s3.uf, 0);
2493 if (opt_showdisassemble) {
2494 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2501 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2503 fieldtype = fi->type;
2507 switch (fieldtype) {
2510 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2511 M_ILD32(d, s1, disp);
2514 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2515 M_LLD32(d, s1, disp);
2518 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2519 M_FLD32(d, s1, disp);
2522 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2523 M_DLD32(d, s1, disp);
2526 emit_store_dst(jd, iptr, d);
2529 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2531 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2532 gen_nullptr_check(s1);
2534 /* must be done here because of code patching */
2536 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2537 unresolved_field *uf = iptr->sx.s23.s3.uf;
2539 fieldtype = uf->fieldref->parseddesc.fd->type;
2542 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2544 fieldtype = fi->type;
2547 if (!IS_FLT_DBL_TYPE(fieldtype)) {
2548 if (IS_2_WORD_TYPE(fieldtype))
2549 s2 = emit_load_s2(jd, iptr, REG_ITMP23_PACKED);
2551 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2554 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
2556 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2557 unresolved_field *uf = iptr->sx.s23.s3.uf;
2559 codegen_addpatchref(cd, PATCHER_putfield, uf, 0);
2561 if (opt_showdisassemble) {
2562 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2569 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2574 switch (fieldtype) {
2577 M_IST32(s2, s1, disp);
2580 M_LST32(s2, s1, disp);
2583 emit_fstps_membase32(cd, s1, disp);
2586 emit_fstpl_membase32(cd, s1, disp);
2591 case ICMD_PUTFIELDCONST: /* ..., objectref ==> ... */
2592 /* val = value (in current instruction) */
2593 /* following NOP) */
2595 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2596 gen_nullptr_check(s1);
2598 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2599 unresolved_field *uf = iptr->sx.s23.s3.uf;
2601 fieldtype = uf->fieldref->parseddesc.fd->type;
2603 codegen_addpatchref(cd, PATCHER_putfieldconst,
2606 if (opt_showdisassemble) {
2607 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2615 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2617 fieldtype = fi->type;
2622 switch (fieldtype) {
2625 M_IST32_IMM(iptr->sx.s23.s2.constval, s1, disp);
2628 M_IST32_IMM(iptr->sx.s23.s2.constval & 0xffffffff, s1, disp);
2629 M_IST32_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, s1, disp + 4);
2637 /* branch operations **************************************************/
2639 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2641 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2642 M_INTMOVE(s1, REG_ITMP1_XPTR);
2644 #ifdef ENABLE_VERIFIER
2645 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2646 codegen_addpatchref(cd, PATCHER_athrow_areturn,
2647 iptr->sx.s23.s2.uc, 0);
2649 if (opt_showdisassemble) {
2650 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2653 #endif /* ENABLE_VERIFIER */
2655 M_CALL_IMM(0); /* passing exception pc */
2656 M_POP(REG_ITMP2_XPC);
2658 M_MOV_IMM(asm_handle_exception, REG_ITMP3);
2662 case ICMD_INLINE_GOTO:
2664 M_COPY(src, iptr->dst.var);
2668 case ICMD_GOTO: /* ... ==> ... */
2669 case ICMD_RET: /* ... ==> ... */
2671 #if defined(ENABLE_SSA)
2673 last_cmd_was_goto = true;
2674 /* In case of a Goto phimoves have to be inserted before the */
2676 codegen_insert_phi_moves(jd, bptr);
2680 codegen_addreference(cd, iptr->dst.block);
2684 case ICMD_JSR: /* ... ==> ... */
2687 codegen_addreference(cd, iptr->sx.s23.s3.jsrtarget.block);
2690 case ICMD_IFNULL: /* ..., value ==> ... */
2692 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2695 codegen_addreference(cd, iptr->dst.block);
2698 case ICMD_IFNONNULL: /* ..., value ==> ... */
2700 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2703 codegen_addreference(cd, iptr->dst.block);
2706 case ICMD_IFEQ: /* ..., value ==> ... */
2708 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2709 M_CMP_IMM(iptr->sx.val.i, s1);
2711 codegen_addreference(cd, iptr->dst.block);
2714 case ICMD_IFLT: /* ..., value ==> ... */
2716 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2717 M_CMP_IMM(iptr->sx.val.i, s1);
2719 codegen_addreference(cd, iptr->dst.block);
2722 case ICMD_IFLE: /* ..., value ==> ... */
2724 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2725 M_CMP_IMM(iptr->sx.val.i, s1);
2727 codegen_addreference(cd, iptr->dst.block);
2730 case ICMD_IFNE: /* ..., value ==> ... */
2732 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2733 M_CMP_IMM(iptr->sx.val.i, s1);
2735 codegen_addreference(cd, iptr->dst.block);
2738 case ICMD_IFGT: /* ..., value ==> ... */
2740 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2741 M_CMP_IMM(iptr->sx.val.i, s1);
2743 codegen_addreference(cd, iptr->dst.block);
2746 case ICMD_IFGE: /* ..., value ==> ... */
2748 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2749 M_CMP_IMM(iptr->sx.val.i, s1);
2751 codegen_addreference(cd, iptr->dst.block);
2754 case ICMD_IF_LEQ: /* ..., value ==> ... */
2756 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2757 if (iptr->sx.val.l == 0) {
2758 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2759 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2762 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2763 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2764 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2765 M_OR(REG_ITMP2, REG_ITMP1);
2768 codegen_addreference(cd, iptr->dst.block);
2771 case ICMD_IF_LLT: /* ..., value ==> ... */
2773 if (iptr->sx.val.l == 0) {
2774 /* If high 32-bit are less than zero, then the 64-bits
2776 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2781 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2782 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2784 codegen_addreference(cd, iptr->dst.block);
2786 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2789 codegen_addreference(cd, iptr->dst.block);
2792 case ICMD_IF_LLE: /* ..., value ==> ... */
2794 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2795 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2797 codegen_addreference(cd, iptr->dst.block);
2799 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2801 codegen_addreference(cd, iptr->dst.block);
2804 case ICMD_IF_LNE: /* ..., value ==> ... */
2806 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2807 if (iptr->sx.val.l == 0) {
2808 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2809 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2812 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2813 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2814 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2815 M_OR(REG_ITMP2, REG_ITMP1);
2818 codegen_addreference(cd, iptr->dst.block);
2821 case ICMD_IF_LGT: /* ..., value ==> ... */
2823 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2824 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2826 codegen_addreference(cd, iptr->dst.block);
2828 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2830 codegen_addreference(cd, iptr->dst.block);
2833 case ICMD_IF_LGE: /* ..., value ==> ... */
2835 if (iptr->sx.val.l == 0) {
2836 /* If high 32-bit are greater equal zero, then the
2838 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2843 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2844 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2846 codegen_addreference(cd, iptr->dst.block);
2848 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2851 codegen_addreference(cd, iptr->dst.block);
2854 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2855 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2857 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2858 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2861 codegen_addreference(cd, iptr->dst.block);
2864 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2866 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2867 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2868 M_INTMOVE(s1, REG_ITMP1);
2869 M_XOR(s2, REG_ITMP1);
2870 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2871 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2872 M_INTMOVE(s1, REG_ITMP2);
2873 M_XOR(s2, REG_ITMP2);
2874 M_OR(REG_ITMP1, REG_ITMP2);
2876 codegen_addreference(cd, iptr->dst.block);
2879 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2880 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2882 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2883 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2886 codegen_addreference(cd, iptr->dst.block);
2889 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2891 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2892 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2893 M_INTMOVE(s1, REG_ITMP1);
2894 M_XOR(s2, REG_ITMP1);
2895 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2896 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2897 M_INTMOVE(s1, REG_ITMP2);
2898 M_XOR(s2, REG_ITMP2);
2899 M_OR(REG_ITMP1, REG_ITMP2);
2901 codegen_addreference(cd, iptr->dst.block);
2904 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2906 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2907 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2910 codegen_addreference(cd, iptr->dst.block);
2913 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2915 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2916 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2919 codegen_addreference(cd, iptr->dst.block);
2920 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2921 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2925 codegen_addreference(cd, iptr->dst.block);
2928 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2930 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2931 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2934 codegen_addreference(cd, iptr->dst.block);
2937 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2939 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2940 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2943 codegen_addreference(cd, iptr->dst.block);
2944 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2945 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2949 codegen_addreference(cd, iptr->dst.block);
2952 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2954 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2955 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2958 codegen_addreference(cd, iptr->dst.block);
2961 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2963 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2964 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2967 codegen_addreference(cd, iptr->dst.block);
2968 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2969 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2973 codegen_addreference(cd, iptr->dst.block);
2976 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2978 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2979 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2982 codegen_addreference(cd, iptr->dst.block);
2985 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2987 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2988 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2991 codegen_addreference(cd, iptr->dst.block);
2992 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2993 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2997 codegen_addreference(cd, iptr->dst.block);
3001 case ICMD_IRETURN: /* ..., retvalue ==> ... */
3003 s1 = emit_load_s1(jd, iptr, REG_RESULT);
3004 M_INTMOVE(s1, REG_RESULT);
3005 goto nowperformreturn;
3007 case ICMD_LRETURN: /* ..., retvalue ==> ... */
3009 s1 = emit_load_s1(jd, iptr, REG_RESULT_PACKED);
3010 M_LNGMOVE(s1, REG_RESULT_PACKED);
3011 goto nowperformreturn;
3013 case ICMD_ARETURN: /* ..., retvalue ==> ... */
3015 s1 = emit_load_s1(jd, iptr, REG_RESULT);
3016 M_INTMOVE(s1, REG_RESULT);
3018 #ifdef ENABLE_VERIFIER
3019 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3020 codegen_addpatchref(cd, PATCHER_athrow_areturn,
3021 iptr->sx.s23.s2.uc, 0);
3023 if (opt_showdisassemble) {
3024 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3027 #endif /* ENABLE_VERIFIER */
3028 goto nowperformreturn;
3030 case ICMD_FRETURN: /* ..., retvalue ==> ... */
3033 s1 = emit_load_s1(jd, iptr, REG_FRESULT);
3034 goto nowperformreturn;
3036 case ICMD_RETURN: /* ... ==> ... */
3042 /* handle replacement point */
3044 replacementpoint->pc = (u1*) (ptrint) (cd->mcodeptr - cd->mcodebase);
3046 /* XXX assert(cd->lastmcodeptr <= cd->mcodeptr); */
3047 cd->lastmcodeptr = cd->mcodeptr + 5; /* 5 byte jmp patch */
3049 p = cd->stackframesize;
3051 #if !defined(NDEBUG)
3052 if (JITDATA_HAS_FLAG_VERBOSECALL(jd))
3053 emit_verbosecall_exit(jd);
3056 #if defined(ENABLE_THREADS)
3057 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
3058 M_ALD(REG_ITMP2, REG_SP, rd->memuse * 4);
3060 /* we need to save the proper return value */
3061 switch (iptr->opc) {
3064 M_IST(REG_RESULT, REG_SP, rd->memuse * 4);
3068 M_LST(REG_RESULT_PACKED, REG_SP, rd->memuse * 4);
3072 emit_fstps_membase(cd, REG_SP, rd->memuse * 4);
3076 emit_fstpl_membase(cd, REG_SP, rd->memuse * 4);
3080 M_AST(REG_ITMP2, REG_SP, 0);
3081 M_MOV_IMM(LOCK_monitor_exit, REG_ITMP3);
3084 /* and now restore the proper return value */
3085 switch (iptr->opc) {
3088 M_ILD(REG_RESULT, REG_SP, rd->memuse * 4);
3092 M_LLD(REG_RESULT_PACKED, REG_SP, rd->memuse * 4);
3096 emit_flds_membase(cd, REG_SP, rd->memuse * 4);
3100 emit_fldl_membase(cd, REG_SP, rd->memuse * 4);
3106 /* restore saved registers */
3108 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
3109 p--; M_ALD(rd->savintregs[i], REG_SP, p * 4);
3112 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
3114 emit_fldl_membase(cd, REG_SP, p * 4);
3115 if (iptr->opc == ICMD_FRETURN || iptr->opc == ICMD_DRETURN) {
3117 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset + 1); */
3120 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset); */
3124 /* deallocate stack */
3126 if (cd->stackframesize)
3127 M_AADD_IMM(cd->stackframesize * 4, REG_SP);
3134 case ICMD_TABLESWITCH: /* ..., index ==> ... */
3137 branch_target_t *table;
3139 table = iptr->dst.table;
3141 l = iptr->sx.s23.s2.tablelow;
3142 i = iptr->sx.s23.s3.tablehigh;
3144 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3145 M_INTMOVE(s1, REG_ITMP1);
3148 M_ISUB_IMM(l, REG_ITMP1);
3153 M_CMP_IMM(i - 1, REG_ITMP1);
3156 codegen_addreference(cd, table[0].block); /* default target */
3158 /* build jump table top down and use address of lowest entry */
3163 dseg_addtarget(cd, table->block);
3167 /* length of dataseg after last dseg_addtarget is used
3170 M_MOV_IMM(0, REG_ITMP2);
3172 emit_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 2, REG_ITMP1);
3178 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
3181 lookup_target_t *lookup;
3183 lookup = iptr->dst.lookup;
3185 i = iptr->sx.s23.s2.lookupcount;
3187 MCODECHECK((i<<2)+8);
3188 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3191 M_CMP_IMM(lookup->value, s1);
3193 codegen_addreference(cd, lookup->target.block);
3199 codegen_addreference(cd, iptr->sx.s23.s3.lookupdefault.block);
3203 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
3205 bte = iptr->sx.s23.s3.bte;
3209 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
3211 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3212 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
3213 case ICMD_INVOKEINTERFACE:
3215 /* handle replacement point */
3217 replacementpoint->pc = (u1*) (ptrint) (cd->mcodeptr - cd->mcodebase);
3219 /* XXX assert(cd->lastmcodeptr <= cd->mcodeptr); */
3220 cd->lastmcodeptr = cd->mcodeptr + 5; /* 5 byte jmp patch */
3222 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3223 md = iptr->sx.s23.s3.um->methodref->parseddesc.md;
3227 lm = iptr->sx.s23.s3.fmiref->p.method;
3228 md = lm->parseddesc;
3232 s3 = md->paramcount;
3234 MCODECHECK((s3 << 1) + 64);
3236 /* copy arguments to registers or stack location */
3238 for (s3 = s3 - 1; s3 >= 0; s3--) {
3239 var = VAR(iptr->sx.s23.s2.args[s3]);
3241 /* Already Preallocated (ARGVAR) ? */
3242 if (var->flags & PREALLOC)
3244 if (IS_INT_LNG_TYPE(var->type)) {
3245 if (!md->params[s3].inmemory) {
3246 log_text("No integer argument registers available!");
3250 if (IS_2_WORD_TYPE(var->type)) {
3251 d = emit_load(jd, iptr, var, REG_ITMP12_PACKED);
3252 M_LST(d, REG_SP, md->params[s3].regoff * 4);
3254 d = emit_load(jd, iptr, var, REG_ITMP1);
3255 M_IST(d, REG_SP, md->params[s3].regoff * 4);
3260 if (!md->params[s3].inmemory) {
3261 s1 = rd->argfltregs[md->params[s3].regoff];
3262 d = emit_load(jd, iptr, var, s1);
3266 d = emit_load(jd, iptr, var, REG_FTMP1);
3267 if (IS_2_WORD_TYPE(var->type))
3268 M_DST(d, REG_SP, md->params[s3].regoff * 4);
3270 M_FST(d, REG_SP, md->params[s3].regoff * 4);
3275 switch (iptr->opc) {
3277 disp = (ptrint) bte->fp;
3278 d = md->returntype.type;
3280 M_MOV_IMM(disp, REG_ITMP1);
3284 if (INSTRUCTION_MUST_CHECK(iptr)) {
3287 codegen_add_fillinstacktrace_ref(cd);
3291 case ICMD_INVOKESPECIAL:
3292 M_ALD(REG_ITMP1, REG_SP, 0);
3295 codegen_add_nullpointerexception_ref(cd);
3299 case ICMD_INVOKESTATIC:
3301 unresolved_method *um = iptr->sx.s23.s3.um;
3303 codegen_addpatchref(cd, PATCHER_invokestatic_special,
3306 if (opt_showdisassemble) {
3307 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3311 d = md->returntype.type;
3314 disp = (ptrint) lm->stubroutine;
3315 d = lm->parseddesc->returntype.type;
3318 M_MOV_IMM(disp, REG_ITMP2);
3322 case ICMD_INVOKEVIRTUAL:
3323 M_ALD(REG_ITMP1, REG_SP, 0 * 4);
3324 gen_nullptr_check(REG_ITMP1);
3327 unresolved_method *um = iptr->sx.s23.s3.um;
3329 codegen_addpatchref(cd, PATCHER_invokevirtual, um, 0);
3331 if (opt_showdisassemble) {
3332 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3336 d = md->returntype.type;
3339 s1 = OFFSET(vftbl_t, table[0]) +
3340 sizeof(methodptr) * lm->vftblindex;
3341 d = md->returntype.type;
3344 M_ALD(REG_METHODPTR, REG_ITMP1,
3345 OFFSET(java_objectheader, vftbl));
3346 M_ALD32(REG_ITMP3, REG_METHODPTR, s1);
3350 case ICMD_INVOKEINTERFACE:
3351 M_ALD(REG_ITMP1, REG_SP, 0 * 4);
3352 gen_nullptr_check(REG_ITMP1);
3355 unresolved_method *um = iptr->sx.s23.s3.um;
3357 codegen_addpatchref(cd, PATCHER_invokeinterface, um, 0);
3359 if (opt_showdisassemble) {
3360 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3365 d = md->returntype.type;
3368 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3369 sizeof(methodptr) * lm->class->index;
3371 s2 = sizeof(methodptr) * (lm - lm->class->methods);
3373 d = md->returntype.type;
3376 M_ALD(REG_METHODPTR, REG_ITMP1,
3377 OFFSET(java_objectheader, vftbl));
3378 M_ALD32(REG_METHODPTR, REG_METHODPTR, s1);
3379 M_ALD32(REG_ITMP3, REG_METHODPTR, s2);
3384 /* store size of call code in replacement point */
3386 if (iptr->opc != ICMD_BUILTIN)
3387 replacementpoint[-1].callsize = (cd->mcodeptr - cd->mcodebase)
3388 - (ptrint) replacementpoint[-1].pc;
3390 /* d contains return type */
3392 if (d != TYPE_VOID) {
3393 #if defined(ENABLE_SSA)
3394 if ((ls == NULL) || (!IS_TEMPVAR_INDEX(iptr->dst.varindex)) ||
3395 (ls->lifetime[-iptr->dst.varindex-1].type != -1))
3396 /* a "living" stackslot */
3399 if (IS_INT_LNG_TYPE(d)) {
3400 if (IS_2_WORD_TYPE(d)) {
3401 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
3402 M_LNGMOVE(REG_RESULT_PACKED, s1);
3405 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3406 M_INTMOVE(REG_RESULT, s1);
3410 s1 = codegen_reg_of_dst(jd, iptr, REG_NULL);
3412 emit_store_dst(jd, iptr, s1);
3418 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3419 /* val.a: (classinfo*) superclass */
3421 /* superclass is an interface:
3423 * OK if ((sub == NULL) ||
3424 * (sub->vftbl->interfacetablelength > super->index) &&
3425 * (sub->vftbl->interfacetable[-super->index] != NULL));
3427 * superclass is a class:
3429 * OK if ((sub == NULL) || (0
3430 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3431 * super->vftbl->diffval));
3434 if (!(iptr->flags.bits & INS_FLAG_ARRAY)) {
3435 /* object type cast-check */
3438 vftbl_t *supervftbl;
3441 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3447 super = iptr->sx.s23.s3.c.cls;
3448 superindex = super->index;
3449 supervftbl = super->vftbl;
3452 #if defined(ENABLE_THREADS)
3453 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3455 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3457 /* calculate interface checkcast code size */
3459 s2 = 2; /* mov_membase_reg */
3460 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3462 s2 += (2 + 4 /* mov_membase32_reg */ + 2 + 4 /* sub imm32 */ +
3463 2 /* test */ + 6 /* jcc */ + 2 + 4 /* mov_membase32_reg */ +
3464 2 /* test */ + 6 /* jcc */);
3467 s2 += (opt_showdisassemble ? 5 : 0);
3469 /* calculate class checkcast code size */
3471 s3 = 2; /* mov_membase_reg */
3472 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3474 s3 += 5 /* mov_imm_reg */ + 2 + 4 /* mov_membase32_reg */;
3477 if (s1 != REG_ITMP1) {
3479 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, baseval));
3482 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, diffval));
3489 s3 += (2 + 4 /* mov_membase32_reg */ + 2 /* sub */ +
3490 5 /* mov_imm_reg */ + 2 /* mov_membase_reg */);
3491 CALCOFFSETBYTES(s3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3494 s3 += 2 /* cmp */ + 6 /* jcc */;
3497 s3 += (opt_showdisassemble ? 5 : 0);
3499 /* if class is not resolved, check which code to call */
3501 if (super == NULL) {
3503 M_BEQ(5 + (opt_showdisassemble ? 5 : 0) + 6 + 6 + s2 + 5 + s3);
3505 codegen_addpatchref(cd, PATCHER_checkcast_instanceof_flags,
3506 iptr->sx.s23.s3.c.ref, 0);
3508 if (opt_showdisassemble) {
3509 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3512 M_MOV_IMM(0, REG_ITMP2); /* super->flags */
3513 M_AND_IMM32(ACC_INTERFACE, REG_ITMP2);
3517 /* interface checkcast code */
3519 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3520 if (super != NULL) {
3525 M_ALD(REG_ITMP2, s1, OFFSET(java_objectheader, vftbl));
3527 if (super == NULL) {
3528 codegen_addpatchref(cd,
3529 PATCHER_checkcast_instanceof_interface,
3530 iptr->sx.s23.s3.c.ref,
3533 if (opt_showdisassemble) {
3534 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3539 REG_ITMP2, OFFSET(vftbl_t, interfacetablelength));
3540 M_ISUB_IMM32(superindex, REG_ITMP3);
3543 codegen_add_classcastexception_ref(cd, s1);
3544 M_ALD32(REG_ITMP3, REG_ITMP2,
3545 OFFSET(vftbl_t, interfacetable[0]) -
3546 superindex * sizeof(methodptr*));
3549 codegen_add_classcastexception_ref(cd, s1);
3555 /* class checkcast code */
3557 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3558 if (super != NULL) {
3563 M_ALD(REG_ITMP2, s1, OFFSET(java_objectheader, vftbl));
3565 if (super == NULL) {
3566 codegen_addpatchref(cd, PATCHER_checkcast_class,
3567 iptr->sx.s23.s3.c.ref,
3570 if (opt_showdisassemble) {
3571 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3575 M_MOV_IMM(supervftbl, REG_ITMP3);
3576 #if defined(ENABLE_THREADS)
3577 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3579 M_ILD32(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3581 /* if (s1 != REG_ITMP1) { */
3582 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP1); */
3583 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3); */
3584 /* #if defined(ENABLE_THREADS) */
3585 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3587 /* emit_alu_reg_reg(cd, ALU_SUB, REG_ITMP1, REG_ITMP2); */
3590 M_ILD32(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, baseval));
3591 M_ISUB(REG_ITMP3, REG_ITMP2);
3592 M_MOV_IMM(supervftbl, REG_ITMP3);
3593 M_ILD(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3594 #if defined(ENABLE_THREADS)
3595 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3599 M_CMP(REG_ITMP3, REG_ITMP2);
3600 M_BA(0); /* (u) REG_ITMP2 > (u) REG_ITMP3 -> jump */
3601 codegen_add_classcastexception_ref(cd, s1);
3604 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
3607 /* array type cast-check */
3609 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3610 M_AST(s1, REG_SP, 0 * 4);
3612 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3613 codegen_addpatchref(cd, PATCHER_builtin_arraycheckcast,
3614 iptr->sx.s23.s3.c.ref, 0);
3616 if (opt_showdisassemble) {
3617 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3621 M_AST_IMM(iptr->sx.s23.s3.c.cls, REG_SP, 1 * 4);
3622 M_MOV_IMM(BUILTIN_arraycheckcast, REG_ITMP3);
3625 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3628 codegen_add_classcastexception_ref(cd, s1);
3630 d = codegen_reg_of_dst(jd, iptr, s1);
3634 emit_store_dst(jd, iptr, d);
3637 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3638 /* val.a: (classinfo*) superclass */
3640 /* superclass is an interface:
3642 * return (sub != NULL) &&
3643 * (sub->vftbl->interfacetablelength > super->index) &&
3644 * (sub->vftbl->interfacetable[-super->index] != NULL);
3646 * superclass is a class:
3648 * return ((sub != NULL) && (0
3649 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3650 * super->vftbl->diffvall));
3655 vftbl_t *supervftbl;
3658 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3664 super = iptr->sx.s23.s3.c.cls;
3665 superindex = super->index;
3666 supervftbl = super->vftbl;
3669 #if defined(ENABLE_THREADS)
3670 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3673 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3674 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
3676 M_INTMOVE(s1, REG_ITMP1);
3680 /* calculate interface instanceof code size */
3682 s2 = 2; /* mov_membase_reg */
3683 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3685 s2 += (2 + 4 /* mov_membase32_reg */ + 2 + 4 /* alu_imm32_reg */ +
3686 2 /* test */ + 6 /* jcc */ + 2 + 4 /* mov_membase32_reg */ +
3687 2 /* test */ + 6 /* jcc */ + 5 /* mov_imm_reg */);
3690 s2 += (opt_showdisassemble ? 5 : 0);
3692 /* calculate class instanceof code size */
3694 s3 = 2; /* mov_membase_reg */
3695 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3696 s3 += 5; /* mov_imm_reg */
3698 CALCOFFSETBYTES(s3, REG_ITMP1, OFFSET(vftbl_t, baseval));
3700 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3702 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, baseval));
3704 s3 += (2 /* alu_reg_reg */ + 2 /* alu_reg_reg */ +
3705 2 /* alu_reg_reg */ + 6 /* jcc */ + 5 /* mov_imm_reg */);
3708 s3 += (opt_showdisassemble ? 5 : 0);
3712 /* if class is not resolved, check which code to call */
3716 M_BEQ(5 + (opt_showdisassemble ? 5 : 0) + 6 + 6 + s2 + 5 + s3);
3718 codegen_addpatchref(cd, PATCHER_checkcast_instanceof_flags,
3719 iptr->sx.s23.s3.c.ref, 0);
3721 if (opt_showdisassemble) {
3722 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3725 M_MOV_IMM(0, REG_ITMP3); /* super->flags */
3726 M_AND_IMM32(ACC_INTERFACE, REG_ITMP3);
3730 /* interface instanceof code */
3732 if (!super || (super->flags & ACC_INTERFACE)) {
3738 M_ALD(REG_ITMP1, s1, OFFSET(java_objectheader, vftbl));
3741 codegen_addpatchref(cd,
3742 PATCHER_checkcast_instanceof_interface,
3743 iptr->sx.s23.s3.c.ref, 0);
3745 if (opt_showdisassemble) {
3746 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3751 REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3752 M_ISUB_IMM32(superindex, REG_ITMP3);
3755 disp = (2 + 4 /* mov_membase32_reg */ + 2 /* test */ +
3756 6 /* jcc */ + 5 /* mov_imm_reg */);
3759 M_ALD32(REG_ITMP1, REG_ITMP1,
3760 OFFSET(vftbl_t, interfacetable[0]) -
3761 superindex * sizeof(methodptr*));
3763 /* emit_setcc_reg(cd, CC_A, d); */
3764 /* emit_jcc(cd, CC_BE, 5); */
3772 /* class instanceof code */
3774 if (!super || !(super->flags & ACC_INTERFACE)) {
3780 M_ALD(REG_ITMP1, s1, OFFSET(java_objectheader, vftbl));
3783 codegen_addpatchref(cd, PATCHER_instanceof_class,
3784 iptr->sx.s23.s3.c.ref, 0);
3786 if (opt_showdisassemble) {
3787 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3791 M_MOV_IMM(supervftbl, REG_ITMP2);
3792 #if defined(ENABLE_THREADS)
3793 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3795 M_ILD(REG_ITMP1, REG_ITMP1, OFFSET(vftbl_t, baseval));
3796 M_ILD(REG_ITMP3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3797 M_ILD(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3798 #if defined(ENABLE_THREADS)
3799 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3801 M_ISUB(REG_ITMP2, REG_ITMP1);
3802 M_CLR(d); /* may be REG_ITMP2 */
3803 M_CMP(REG_ITMP3, REG_ITMP1);
3807 emit_store_dst(jd, iptr, d);
3813 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3815 /* check for negative sizes and copy sizes to stack if necessary */
3817 MCODECHECK((iptr->s1.argcount << 1) + 64);
3819 for (s1 = iptr->s1.argcount; --s1 >= 0; ) {
3820 /* copy SAVEDVAR sizes to stack */
3821 var = VAR(iptr->sx.s23.s2.args[s1]);
3823 /* Already Preallocated? */
3824 if (!(var->flags & PREALLOC)) {
3825 if (var->flags & INMEMORY) {
3826 M_ILD(REG_ITMP1, REG_SP, var->vv.regoff * 4);
3827 M_IST(REG_ITMP1, REG_SP, (s1 + 3) * 4);
3830 M_IST(var->vv.regoff, REG_SP, (s1 + 3) * 4);
3834 /* is a patcher function set? */
3836 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3837 codegen_addpatchref(cd, PATCHER_builtin_multianewarray,
3838 iptr->sx.s23.s3.c.ref, 0);
3840 if (opt_showdisassemble) {
3841 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3848 disp = (ptrint) iptr->sx.s23.s3.c.cls;
3850 /* a0 = dimension count */
3852 M_IST_IMM(iptr->s1.argcount, REG_SP, 0 * 4);
3854 /* a1 = arraydescriptor */
3856 M_IST_IMM(disp, REG_SP, 1 * 4);
3858 /* a2 = pointer to dimensions = stack pointer */
3860 M_MOV(REG_SP, REG_ITMP1);
3861 M_AADD_IMM(3 * 4, REG_ITMP1);
3862 M_AST(REG_ITMP1, REG_SP, 2 * 4);
3864 M_MOV_IMM(BUILTIN_multianewarray, REG_ITMP1);
3867 /* check for exception before result assignment */
3871 codegen_add_fillinstacktrace_ref(cd);
3873 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3874 M_INTMOVE(REG_RESULT, s1);
3875 emit_store_dst(jd, iptr, s1);
3880 new_internalerror("Unknown ICMD %d", iptr->opc);
3884 } /* for instruction */
3888 #if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
3891 #if defined(ENABLE_SSA)
3893 /* by edge splitting, in Blocks with phi moves there can only */
3894 /* be a goto as last command, no other Jump/Branch Command */
3895 if (!last_cmd_was_goto)
3896 codegen_insert_phi_moves(jd, bptr);
3901 /* At the end of a basic block we may have to append some nops,
3902 because the patcher stub calling code might be longer than the
3903 actual instruction. So codepatching does not change the
3904 following block unintentionally. */
3906 if (cd->mcodeptr < cd->lastmcodeptr) {
3907 while (cd->mcodeptr < cd->lastmcodeptr) {
3912 } /* if (bptr -> flags >= BBREACHED) */
3913 } /* for basic block */
3915 dseg_createlinenumbertable(cd);
3918 /* generate exception and patcher stubs */
3920 emit_exception_stubs(jd);
3921 emit_patcher_stubs(jd);
3922 emit_replacement_stubs(jd);
3926 /* everything's ok */
3931 #if defined(ENABLE_SSA)
3932 void codegen_insert_phi_moves(jitdata *jd, basicblock *bptr) {
3933 /* look for phi moves */
3934 int t_a,s_a,i, type;
3935 int t_lt, s_lt; /* lifetime indices of phi_moves */
3936 s4 t_regoff, s_regoff, s_flags, t_flags;
3945 /* Moves from phi functions with highest indices have to be */
3946 /* inserted first, since this is the order as is used for */
3947 /* conflict resolution */
3948 for(i = ls->num_phi_moves[bptr->nr] - 1; i >= 0 ; i--) {
3949 t_a = ls->phi_moves[bptr->nr][i][0];
3950 s_a = ls->phi_moves[bptr->nr][i][1];
3951 #if defined(SSA_DEBUG_VERBOSE)
3953 printf("BB %3i Move %3i <- %3i ", bptr->nr, t_a, s_a);
3956 /* local var lifetimes */
3957 t_lt = ls->maxlifetimes + t_a;
3958 type = ls->lifetime[t_lt].type;
3962 type = ls->lifetime[t_lt].local_ss->s->type;
3963 /* stackslot lifetime */
3967 #if defined(SSA_DEBUG_VERBOSE)
3969 printf("...returning - phi lifetimes where joined\n");
3975 /* local var lifetimes */
3976 s_lt = ls->maxlifetimes + s_a;
3977 type = ls->lifetime[s_lt].type;
3981 type = ls->lifetime[s_lt].type;
3982 /* stackslot lifetime */
3986 #if defined(SSA_DEBUG_VERBOSE)
3988 printf("...returning - phi lifetimes where joined\n");
3994 t_flags = VAR(t_a)->flags;
3995 t_regoff = VAR(t_a)->vv.regoff;
3999 t_flags = ls->lifetime[t_lt].local_ss->s->flags;
4000 t_regoff = ls->lifetime[t_lt].local_ss->s->regoff;
4004 /* local var move */
4005 s_flags = VAR(s_a)->flags;
4006 s_regoff = VAR(s_a)->vv.regoff;
4008 /* stackslot lifetime */
4009 s_flags = ls->lifetime[s_lt].local_ss->s->flags;
4010 s_regoff = ls->lifetime[s_lt].local_ss->s->regoff;
4014 #if defined(SSA_DEBUG_VERBOSE)
4016 printf("...returning - phi lifetimes where joined\n");
4021 cg_move(cd, type, s_regoff, s_flags, t_regoff, t_flags);
4023 #if defined(SSA_DEBUG_VERBOSE)
4024 if (compileverbose) {
4025 if (IS_INMEMORY(t_flags) && IS_INMEMORY(s_flags)) {
4027 printf("M%3i <- M%3i",t_regoff,s_regoff);
4029 else if (IS_INMEMORY(s_flags)) {
4031 printf("R%3i <- M%3i",t_regoff,s_regoff);
4033 else if (IS_INMEMORY(t_flags)) {
4035 printf("M%3i <- R%3i",t_regoff,s_regoff);
4039 printf("R%3i <- R%3i",t_regoff,s_regoff);
4043 #endif /* defined(SSA_DEBUG_VERBOSE) */
4047 void cg_move(codegendata *cd, s4 type, s4 src_regoff, s4 src_flags,
4048 s4 dst_regoff, s4 dst_flags) {
4049 if ((IS_INMEMORY(dst_flags)) && (IS_INMEMORY(src_flags))) {
4051 if (dst_regoff != src_regoff) {
4052 if (!IS_2_WORD_TYPE(type)) {
4053 if (IS_FLT_DBL_TYPE(type)) {
4054 emit_flds_membase(cd, REG_SP, src_regoff * 4);
4055 emit_fstps_membase(cd, REG_SP, dst_regoff * 4);
4057 emit_mov_membase_reg(cd, REG_SP, src_regoff * 4,
4059 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, dst_regoff * 4);
4061 } else { /* LONG OR DOUBLE */
4062 if (IS_FLT_DBL_TYPE(type)) {
4063 emit_fldl_membase( cd, REG_SP, src_regoff * 4);
4064 emit_fstpl_membase(cd, REG_SP, dst_regoff * 4);
4066 emit_mov_membase_reg(cd, REG_SP, src_regoff * 4,
4068 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, dst_regoff * 4);
4069 emit_mov_membase_reg(cd, REG_SP, src_regoff * 4 + 4,
4071 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP,
4072 dst_regoff * 4 + 4);
4077 if (IS_FLT_DBL_TYPE(type)) {
4078 log_text("cg_move: flt/dbl type have to be in memory\n");
4081 if (IS_2_WORD_TYPE(type)) {
4082 log_text("cg_move: longs have to be in memory\n");
4085 if (IS_INMEMORY(src_flags)) {
4087 emit_mov_membase_reg(cd, REG_SP, src_regoff * 4, dst_regoff);
4088 } else if (IS_INMEMORY(dst_flags)) {
4090 emit_mov_reg_membase(cd, src_regoff, REG_SP, dst_regoff * 4);
4093 /* only ints can be in regs on i386 */
4094 M_INTMOVE(src_regoff,dst_regoff);
4098 #endif /* defined(ENABLE_SSA) */
4100 /* createcompilerstub **********************************************************
4102 Creates a stub routine which calls the compiler.
4104 *******************************************************************************/
4106 #define COMPILERSTUB_DATASIZE 3 * SIZEOF_VOID_P
4107 #define COMPILERSTUB_CODESIZE 12
4109 #define COMPILERSTUB_SIZE COMPILERSTUB_DATASIZE + COMPILERSTUB_CODESIZE
4112 u1 *createcompilerstub(methodinfo *m)
4114 u1 *s; /* memory to hold the stub */
4120 s = CNEW(u1, COMPILERSTUB_SIZE);
4122 /* set data pointer and code pointer */
4125 s = s + COMPILERSTUB_DATASIZE;
4127 /* mark start of dump memory area */
4129 dumpsize = dump_size();
4131 cd = DNEW(codegendata);
4134 /* Store the codeinfo pointer in the same place as in the
4135 methodheader for compiled methods. */
4137 code = code_codeinfo_new(m);
4139 d[0] = (ptrint) asm_call_jit_compiler;
4141 d[2] = (ptrint) code;
4143 /* code for the stub */
4145 M_MOV_IMM(m, REG_ITMP1); /* method info */
4146 M_MOV_IMM(asm_call_jit_compiler, REG_ITMP3);
4149 #if defined(ENABLE_STATISTICS)
4151 count_cstub_len += COMPILERSTUB_SIZE;
4154 /* release dump area */
4156 dump_release(dumpsize);
4162 /* createnativestub ************************************************************
4164 Creates a stub routine which calls a native method.
4166 *******************************************************************************/
4168 u1 *createnativestub(functionptr f, jitdata *jd, methoddesc *nmd)
4176 s4 i, j; /* count variables */
4180 /* get required compiler data */
4187 /* set some variables */
4190 nativeparams = (m->flags & ACC_STATIC) ? 2 : 1;
4192 /* calculate stackframe size */
4194 cd->stackframesize =
4195 sizeof(stackframeinfo) / SIZEOF_VOID_P +
4196 sizeof(localref_table) / SIZEOF_VOID_P +
4197 1 + /* function pointer */
4198 4 * 4 + /* 4 arguments (start_native_call) */
4201 /* keep stack 16-byte aligned */
4203 cd->stackframesize |= 0x3;
4205 /* create method header */
4207 (void) dseg_addaddress(cd, code); /* CodeinfoPointer */
4208 (void) dseg_adds4(cd, cd->stackframesize * 4); /* FrameSize */
4209 (void) dseg_adds4(cd, 0); /* IsSync */
4210 (void) dseg_adds4(cd, 0); /* IsLeaf */
4211 (void) dseg_adds4(cd, 0); /* IntSave */
4212 (void) dseg_adds4(cd, 0); /* FltSave */
4213 (void) dseg_addlinenumbertablesize(cd);
4214 (void) dseg_adds4(cd, 0); /* ExTableSize */
4216 /* generate native method profiling code */
4218 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
4219 /* count frequency */
4221 M_MOV_IMM(code, REG_ITMP1);
4222 M_IADD_IMM_MEMBASE(1, REG_ITMP1, OFFSET(codeinfo, frequency));
4225 /* calculate stackframe size for native function */
4227 M_ASUB_IMM(cd->stackframesize * 4, REG_SP);
4229 #if !defined(NDEBUG)
4230 if (JITDATA_HAS_FLAG_VERBOSECALL(jd))
4231 emit_verbosecall_enter(jd);
4234 /* get function address (this must happen before the stackframeinfo) */
4236 #if !defined(WITH_STATIC_CLASSPATH)
4238 codegen_addpatchref(cd, PATCHER_resolve_native, m, 0);
4240 if (opt_showdisassemble) {
4241 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
4246 M_AST_IMM((ptrint) f, REG_SP, 4 * 4);
4248 /* Mark the whole fpu stack as free for native functions (only for saved */
4249 /* register count == 0). */
4251 emit_ffree_reg(cd, 0);
4252 emit_ffree_reg(cd, 1);
4253 emit_ffree_reg(cd, 2);
4254 emit_ffree_reg(cd, 3);
4255 emit_ffree_reg(cd, 4);
4256 emit_ffree_reg(cd, 5);
4257 emit_ffree_reg(cd, 6);
4258 emit_ffree_reg(cd, 7);
4260 /* prepare data structures for native function call */
4262 M_MOV(REG_SP, REG_ITMP1);
4263 M_AADD_IMM(cd->stackframesize * 4, REG_ITMP1);
4265 M_AST(REG_ITMP1, REG_SP, 0 * 4);
4266 M_IST_IMM(0, REG_SP, 1 * 4);
4269 M_MOV(REG_SP, REG_ITMP2);
4270 M_AADD_IMM(cd->stackframesize * 4 + SIZEOF_VOID_P, REG_ITMP2);
4272 M_AST(REG_ITMP2, REG_SP, 2 * 4);
4273 M_ALD(REG_ITMP3, REG_SP, cd->stackframesize * 4);
4274 M_AST(REG_ITMP3, REG_SP, 3 * 4);
4275 M_MOV_IMM(codegen_start_native_call, REG_ITMP1);
4278 M_ALD(REG_ITMP3, REG_SP, 4 * 4);
4280 /* copy arguments into new stackframe */
4282 for (i = md->paramcount - 1, j = i + nativeparams; i >= 0; i--, j--) {
4283 t = md->paramtypes[i].type;
4285 if (!md->params[i].inmemory) {
4286 /* no integer argument registers */
4287 } else { /* float/double in memory can be copied like int/longs */
4288 s1 = (md->params[i].regoff + cd->stackframesize + 1) * 4;
4289 s2 = nmd->params[j].regoff * 4;
4291 M_ILD(REG_ITMP1, REG_SP, s1);
4292 M_IST(REG_ITMP1, REG_SP, s2);
4293 if (IS_2_WORD_TYPE(t)) {
4294 M_ILD(REG_ITMP1, REG_SP, s1 + 4);
4295 M_IST(REG_ITMP1, REG_SP, s2 + 4);
4300 /* if function is static, put class into second argument */
4302 if (m->flags & ACC_STATIC)
4303 M_AST_IMM(m->class, REG_SP, 1 * 4);
4305 /* put env into first argument */
4307 M_AST_IMM(_Jv_env, REG_SP, 0 * 4);
4309 /* call the native function */
4313 /* save return value */
4315 if (md->returntype.type != TYPE_VOID) {
4316 if (IS_INT_LNG_TYPE(md->returntype.type)) {
4317 if (IS_2_WORD_TYPE(md->returntype.type))
4318 M_IST(REG_RESULT2, REG_SP, 2 * 4);
4319 M_IST(REG_RESULT, REG_SP, 1 * 4);
4322 if (IS_2_WORD_TYPE(md->returntype.type))
4323 emit_fstl_membase(cd, REG_SP, 1 * 4);
4325 emit_fsts_membase(cd, REG_SP, 1 * 4);
4329 #if !defined(NDEBUG)
4330 if (JITDATA_HAS_FLAG_VERBOSECALL(jd))
4331 emit_verbosecall_exit(jd);
4334 /* remove native stackframe info */
4336 M_MOV(REG_SP, REG_ITMP1);
4337 M_AADD_IMM(cd->stackframesize * 4, REG_ITMP1);
4339 M_AST(REG_ITMP1, REG_SP, 0 * 4);
4340 M_MOV_IMM(codegen_finish_native_call, REG_ITMP1);
4342 M_MOV(REG_RESULT, REG_ITMP2); /* REG_ITMP3 == REG_RESULT2 */
4344 /* restore return value */
4346 if (md->returntype.type != TYPE_VOID) {
4347 if (IS_INT_LNG_TYPE(md->returntype.type)) {
4348 if (IS_2_WORD_TYPE(md->returntype.type))
4349 M_ILD(REG_RESULT2, REG_SP, 2 * 4);
4350 M_ILD(REG_RESULT, REG_SP, 1 * 4);
4353 if (IS_2_WORD_TYPE(md->returntype.type))
4354 emit_fldl_membase(cd, REG_SP, 1 * 4);
4356 emit_flds_membase(cd, REG_SP, 1 * 4);
4360 M_AADD_IMM(cd->stackframesize * 4, REG_SP);
4362 /* check for exception */
4369 /* handle exception */
4371 M_MOV(REG_ITMP2, REG_ITMP1_XPTR);
4372 M_ALD(REG_ITMP2_XPC, REG_SP, 0);
4373 M_ASUB_IMM(2, REG_ITMP2_XPC);
4375 M_MOV_IMM(asm_handle_nat_exception, REG_ITMP3);
4379 /* generate patcher stubs */
4381 emit_patcher_stubs(jd);
4385 return code->entrypoint;
4390 * These are local overrides for various environment variables in Emacs.
4391 * Please do not remove this and leave it at the end of the file, where
4392 * Emacs will automagically detect them.
4393 * ---------------------------------------------------------------------
4396 * indent-tabs-mode: t
4400 * vim:noexpandtab:sw=4:ts=4: