1 /* src/vm/jit/i386/codegen.c - machine code generator for i386
3 Copyright (C) 1996-2005, 2006 R. Grafl, A. Krall, C. Kruegel,
4 C. Oates, R. Obermaisser, M. Platter, M. Probst, S. Ring,
5 E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich,
6 J. Wenninger, Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
25 Contact: cacao@cacaojvm.org
27 Authors: Andreas Krall
30 Changes: Joseph Wenninger
34 $Id: codegen.c 5653 2006-10-03 20:26:10Z edwin $
46 #include "vm/jit/i386/md-abi.h"
48 #include "vm/jit/i386/codegen.h"
49 #include "vm/jit/i386/emit.h"
51 #include "mm/memory.h"
52 #include "native/jni.h"
53 #include "native/native.h"
55 #if defined(ENABLE_THREADS)
56 # include "threads/native/lock.h"
59 #include "vm/builtin.h"
60 #include "vm/exceptions.h"
61 #include "vm/global.h"
62 #include "vm/loader.h"
63 #include "vm/options.h"
64 #include "vm/stringlocal.h"
67 #include "vm/jit/asmpart.h"
68 #include "vm/jit/codegen-common.h"
69 #include "vm/jit/dseg.h"
70 #include "vm/jit/emit-common.h"
71 #include "vm/jit/jit.h"
72 #include "vm/jit/parse.h"
73 #include "vm/jit/patcher.h"
74 #include "vm/jit/reg.h"
75 #include "vm/jit/replace.h"
77 #if defined(ENABLE_SSA)
78 # include "vm/jit/optimizing/lsra.h"
79 # include "vm/jit/optimizing/ssa.h"
80 #elif defined(ENABLE_LSRA)
81 # include "vm/jit/allocator/lsra.h"
85 /* codegen *********************************************************************
87 Generates machine code.
89 *******************************************************************************/
91 #if defined(ENABLE_SSA)
92 void cg_move(codegendata *cd, s4 type, s4 src_regoff, s4 src_flags,
93 s4 dst_regoff, s4 dst_flags);
94 void codegen_insert_phi_moves(jitdata *jd, basicblock *bptr);
97 bool codegen(jitdata *jd)
103 s4 len, s1, s2, s3, d, disp;
109 methodinfo *lm; /* local methodinfo for ICMD_INVOKE* */
110 builtintable_entry *bte;
112 rplpoint *replacementpoint;
115 #if defined(ENABLE_SSA)
117 bool last_cmd_was_goto;
119 last_cmd_was_goto = false;
123 /* get required compiler data */
130 /* prevent compiler warnings */
140 s4 savedregs_num = 0;
143 /* space to save used callee saved registers */
145 savedregs_num += (INT_SAV_CNT - rd->savintreguse);
147 /* float register are saved on 2 4-byte stackslots */
148 savedregs_num += (FLT_SAV_CNT - rd->savfltreguse) * 2;
150 cd->stackframesize = rd->memuse + savedregs_num;
153 #if defined(ENABLE_THREADS)
154 /* space to save argument of monitor_enter */
156 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
157 /* reserve 2 slots for long/double return values for monitorexit */
159 if (IS_2_WORD_TYPE(m->parseddesc->returntype.type))
160 cd->stackframesize += 2;
162 cd->stackframesize++;
166 /* create method header */
168 /* Keep stack of non-leaf functions 16-byte aligned. */
170 if (!jd->isleafmethod)
171 cd->stackframesize |= 0x3;
173 (void) dseg_addaddress(cd, code); /* CodeinfoPointer */
174 (void) dseg_adds4(cd, cd->stackframesize * 4); /* FrameSize */
176 #if defined(ENABLE_THREADS)
177 /* IsSync contains the offset relative to the stack pointer for the
178 argument of monitor_exit used in the exception handler. Since the
179 offset could be zero and give a wrong meaning of the flag it is
183 if (checksync && (m->flags & ACC_SYNCHRONIZED))
184 (void) dseg_adds4(cd, (rd->memuse + 1) * 4); /* IsSync */
187 (void) dseg_adds4(cd, 0); /* IsSync */
189 (void) dseg_adds4(cd, jd->isleafmethod); /* IsLeaf */
190 (void) dseg_adds4(cd, INT_SAV_CNT - rd->savintreguse); /* IntSave */
191 (void) dseg_adds4(cd, FLT_SAV_CNT - rd->savfltreguse); /* FltSave */
193 /* adds a reference for the length of the line number counter. We don't
194 know the size yet, since we evaluate the information during code
195 generation, to save one additional iteration over the whole
196 instructions. During code optimization the position could have changed
197 to the information gotten from the class file */
198 (void) dseg_addlinenumbertablesize(cd);
200 (void) dseg_adds4(cd, cd->exceptiontablelength); /* ExTableSize */
202 /* create exception table */
204 for (ex = cd->exceptiontable; ex != NULL; ex = ex->down) {
205 dseg_addtarget(cd, ex->start);
206 dseg_addtarget(cd, ex->end);
207 dseg_addtarget(cd, ex->handler);
208 (void) dseg_addaddress(cd, ex->catchtype.any);
211 /* generate method profiling code */
213 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
214 /* count frequency */
216 M_MOV_IMM(code, REG_ITMP3);
217 M_IADD_IMM_MEMBASE(1, REG_ITMP3, OFFSET(codeinfo, frequency));
220 /* create stack frame (if necessary) */
222 if (cd->stackframesize)
223 M_ASUB_IMM(cd->stackframesize * 4, REG_SP);
225 /* save return address and used callee saved registers */
227 p = cd->stackframesize;
228 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
229 p--; M_AST(rd->savintregs[i], REG_SP, p * 4);
231 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
232 p-=2; emit_fld_reg(cd, rd->savfltregs[i]); emit_fstpl_membase(cd, REG_SP, p * 4);
235 /* take arguments out of register or stack frame */
240 for (p = 0, l = 0; p < md->paramcount; p++) {
241 t = md->paramtypes[p].type;
243 #if defined(ENABLE_SSA)
248 varindex = jd->local_map[l * 5 + t];
250 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
253 if (varindex == UNUSED)
258 s1 = md->params[p].regoff;
260 if (IS_INT_LNG_TYPE(t)) { /* integer args */
261 if (!md->params[p].inmemory) { /* register arguments */
262 log_text("integer register argument");
264 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
265 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff */
267 else { /* reg arg -> spilled */
268 /* rd->argintregs[md->params[p].regoff -> var->vv.regoff * 4 */
271 else { /* stack arguments */
272 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
273 emit_mov_membase_reg( /* + 4 for return address */
274 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4, var->vv.regoff);
275 /* + 4 for return address */
277 else { /* stack arg -> spilled */
278 if (!IS_2_WORD_TYPE(t)) {
279 #if defined(ENABLE_SSA)
280 /* no copy avoiding by now possible with SSA */
282 emit_mov_membase_reg( /* + 4 for return address */
283 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4,
285 emit_mov_reg_membase(
286 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4);
289 #endif /*defined(ENABLE_SSA)*/
290 /* reuse Stackslotand avoid copying */
291 var->vv.regoff = cd->stackframesize + s1 + 1;
295 #if defined(ENABLE_SSA)
296 /* no copy avoiding by now possible with SSA */
298 emit_mov_membase_reg( /* + 4 for return address */
299 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4,
301 emit_mov_reg_membase(
302 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4);
303 emit_mov_membase_reg( /* + 4 for return address */
304 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4 + 4,
306 emit_mov_reg_membase(
307 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4 + 4);
310 #endif /*defined(ENABLE_SSA)*/
311 /* reuse Stackslotand avoid copying */
312 var->vv.regoff = cd->stackframesize + s1 + 1;
317 else { /* floating args */
318 if (!md->params[p].inmemory) { /* register arguments */
319 log_text("There are no float argument registers!");
321 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
322 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff */
323 } else { /* reg arg -> spilled */
324 /* rd->argfltregs[md->params[p].regoff -> var->vv.regoff * 4 */
328 else { /* stack arguments */
329 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
332 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
334 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
339 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
341 /* emit_fstp_reg(cd, var->vv.regoff + fpu_st_offset); */
344 } else { /* stack-arg -> spilled */
345 #if defined(ENABLE_SSA)
346 /* no copy avoiding by now possible with SSA */
348 emit_mov_membase_reg(
349 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4, REG_ITMP1);
350 emit_mov_reg_membase(
351 cd, REG_ITMP1, REG_SP, var->vv.regoff * 4);
354 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
355 emit_fstps_membase(cd, REG_SP, var->vv.regoff * 4);
359 cd, REG_SP, (cd->stackframesize + s1) * 4 + 4);
360 emit_fstpl_membase(cd, REG_SP, var->vv.regoff * 4);
364 #endif /*defined(ENABLE_SSA)*/
365 /* reuse Stackslotand avoid copying */
366 var->vv.regoff = cd->stackframesize + s1 + 1;
372 /* call monitorenter function */
374 #if defined(ENABLE_THREADS)
375 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
378 if (m->flags & ACC_STATIC) {
379 M_MOV_IMM(&m->class->object.header, REG_ITMP1);
382 M_ALD(REG_ITMP1, REG_SP, cd->stackframesize * 4 + 4);
385 codegen_add_nullpointerexception_ref(cd);
388 M_AST(REG_ITMP1, REG_SP, s1 * 4);
389 M_AST(REG_ITMP1, REG_SP, 0 * 4);
390 M_MOV_IMM(LOCK_monitor_enter, REG_ITMP3);
396 if (JITDATA_HAS_FLAG_VERBOSECALL(jd))
397 emit_verbosecall_enter(jd);
402 #if defined(ENABLE_SSA)
403 /* with SSA Header is Basic Block 0 - insert phi Moves if necessary */
405 codegen_insert_phi_moves(jd, ls->basicblocks[0]);
408 /* end of header generation */
410 replacementpoint = jd->code->rplpoints;
412 /* walk through all basic blocks */
413 for (bptr = jd->new_basicblocks; bptr != NULL; bptr = bptr->next) {
415 bptr->mpc = (s4) (cd->mcodeptr - cd->mcodebase);
417 if (bptr->flags >= BBREACHED) {
419 /* branch resolving */
422 for (brefs = bptr->branchrefs; brefs != NULL; brefs = brefs->next) {
423 gen_resolvebranch(cd->mcodebase + brefs->branchpos,
429 /* handle replacement points */
431 if (bptr->bitflags & BBFLAG_REPLACEMENT) {
432 replacementpoint->pc = (u1*)bptr->mpc; /* will be resolved later */
436 assert(cd->lastmcodeptr <= cd->mcodeptr);
437 cd->lastmcodeptr = cd->mcodeptr + 5; /* 5 byte jmp patch */
441 /* copy interface registers to their destination */
447 /* generate basic block profiling code */
449 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
450 /* count frequency */
452 M_MOV_IMM(code->bbfrequency, REG_ITMP3);
453 M_IADD_IMM_MEMBASE(1, REG_ITMP3, bptr->nr * 4);
457 #if defined(ENABLE_LSRA) || defined(ENABLE_SSA)
458 # if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
461 # if defined(ENABLE_SSA)
463 last_cmd_was_goto = false;
467 var = VAR(bptr->invars[len]);
468 if (bptr->type != BBTYPE_STD) {
469 if (!IS_2_WORD_TYPE(var->type)) {
470 if (bptr->type == BBTYPE_EXH) {
471 d = codegen_reg_of_var(0, var, REG_ITMP1);
472 M_INTMOVE(REG_ITMP1, d);
473 emit_store(jd, NULL, var, d);
477 log_text("copy interface registers(EXH, SBR): longs \
478 have to be in memory (begin 1)");
486 #endif /* defined(ENABLE_LSRA) || defined(ENABLE_SSA) */
490 var = VAR(bptr->invars[len]);
491 if ((len == bptr->indepth-1) && (bptr->type != BBTYPE_STD)) {
492 if (!IS_2_WORD_TYPE(var->type)) {
493 if (bptr->type == BBTYPE_EXH) {
494 d = codegen_reg_of_var(0, var, REG_ITMP1);
495 M_INTMOVE(REG_ITMP1, d);
496 emit_store(jd, NULL, var, d);
500 log_text("copy interface registers: longs have to be in \
507 assert((var->flags & INOUT));
512 /* walk through all instructions */
517 for (iptr = bptr->iinstr; len > 0; len--, iptr++) {
518 if (iptr->line != currentline) {
519 dseg_addlinenumber(cd, iptr->line);
520 currentline = iptr->line;
523 MCODECHECK(1024); /* 1kB should be enough */
526 case ICMD_INLINE_START:
529 insinfo_inline *insinfo = (insinfo_inline *) iptr->target;
530 #if defined(ENABLE_THREADS)
531 if (insinfo->synchronize) {
532 /* add monitor enter code */
533 if (insinfo->method->flags & ACC_STATIC) {
534 M_MOV_IMM(&insinfo->method->class->object.header, REG_ITMP1);
535 M_AST(REG_ITMP1, REG_SP, 0 * 4);
538 /* nullpointer check must have been performed before */
539 /* (XXX not done, yet) */
540 var = &(rd->locals[insinfo->synclocal][TYPE_ADR]);
541 if (var->flags & INMEMORY) {
542 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff * 4, REG_ITMP1);
543 M_AST(REG_ITMP1, REG_SP, 0 * 4);
546 M_AST(var->vv.regoff, REG_SP, 0 * 4);
550 M_MOV_IMM(LOCK_monitor_enter, REG_ITMP3);
554 dseg_addlinenumber_inline_start(cd, iptr);
559 case ICMD_INLINE_END:
562 insinfo_inline *insinfo = (insinfo_inline *) iptr->target;
564 dseg_addlinenumber_inline_end(cd, iptr);
565 dseg_addlinenumber(cd, iptr->line);
567 #if defined(ENABLE_THREADS)
568 if (insinfo->synchronize) {
569 /* add monitor exit code */
570 if (insinfo->method->flags & ACC_STATIC) {
571 M_MOV_IMM(&insinfo->method->class->object.header, REG_ITMP1);
572 M_AST(REG_ITMP1, REG_SP, 0 * 4);
575 var = &(rd->locals[insinfo->synclocal][TYPE_ADR]);
576 if (var->flags & INMEMORY) {
577 M_ALD(REG_ITMP1, REG_SP, var->vv.regoff * 4);
578 M_AST(REG_ITMP1, REG_SP, 0 * 4);
581 M_AST(var->vv.regoff, REG_SP, 0 * 4);
585 M_MOV_IMM(LOCK_monitor_exit, REG_ITMP3);
593 case ICMD_NOP: /* ... ==> ... */
596 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
598 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
601 codegen_add_nullpointerexception_ref(cd);
604 /* constant operations ************************************************/
606 case ICMD_ICONST: /* ... ==> ..., constant */
608 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
609 ICONST(d, iptr->sx.val.i);
610 emit_store_dst(jd, iptr, d);
613 case ICMD_LCONST: /* ... ==> ..., constant */
615 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
616 LCONST(d, iptr->sx.val.l);
617 emit_store_dst(jd, iptr, d);
620 case ICMD_FCONST: /* ... ==> ..., constant */
622 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
623 if (iptr->sx.val.f == 0.0) {
627 if (iptr->sx.val.i == 0x80000000) {
631 } else if (iptr->sx.val.f == 1.0) {
634 } else if (iptr->sx.val.f == 2.0) {
640 disp = dseg_addfloat(cd, iptr->sx.val.f);
641 emit_mov_imm_reg(cd, 0, REG_ITMP1);
643 emit_flds_membase(cd, REG_ITMP1, disp);
645 emit_store_dst(jd, iptr, d);
648 case ICMD_DCONST: /* ... ==> ..., constant */
650 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
651 if (iptr->sx.val.d == 0.0) {
655 if (iptr->sx.val.l == 0x8000000000000000LL) {
659 } else if (iptr->sx.val.d == 1.0) {
662 } else if (iptr->sx.val.d == 2.0) {
668 disp = dseg_adddouble(cd, iptr->sx.val.d);
669 emit_mov_imm_reg(cd, 0, REG_ITMP1);
671 emit_fldl_membase(cd, REG_ITMP1, disp);
673 emit_store_dst(jd, iptr, d);
676 case ICMD_ACONST: /* ... ==> ..., constant */
678 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
680 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
681 codegen_addpatchref(cd, PATCHER_aconst,
682 iptr->sx.val.c.ref, 0);
684 if (opt_showdisassemble) {
685 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
691 if (iptr->sx.val.anyptr == NULL)
694 M_MOV_IMM(iptr->sx.val.anyptr, d);
696 emit_store_dst(jd, iptr, d);
700 /* load/store/copy/move operations ************************************/
715 emit_copy(jd, iptr, VAROP(iptr->s1), VAROP(iptr->dst));
719 /* pop operations *****************************************************/
721 /* attention: double and longs are only one entry in CACAO ICMDs */
723 case ICMD_POP: /* ..., value ==> ... */
724 case ICMD_POP2: /* ..., value, value ==> ... */
729 /* integer operations *************************************************/
731 case ICMD_INEG: /* ..., value ==> ..., - value */
733 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
734 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
737 emit_store_dst(jd, iptr, d);
740 case ICMD_LNEG: /* ..., value ==> ..., - value */
742 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
743 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
745 M_NEG(GET_LOW_REG(d));
746 M_IADDC_IMM(0, GET_HIGH_REG(d));
747 M_NEG(GET_HIGH_REG(d));
748 emit_store_dst(jd, iptr, d);
751 case ICMD_I2L: /* ..., value ==> ..., value */
753 s1 = emit_load_s1(jd, iptr, EAX);
754 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
757 M_LNGMOVE(EAX_EDX_PACKED, d);
758 emit_store_dst(jd, iptr, d);
761 case ICMD_L2I: /* ..., value ==> ..., value */
763 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
764 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
766 emit_store_dst(jd, iptr, d);
769 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
771 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
772 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
776 emit_store_dst(jd, iptr, d);
779 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
781 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
782 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
784 emit_store_dst(jd, iptr, d);
787 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
789 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
790 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
792 emit_store_dst(jd, iptr, d);
796 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
798 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
799 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
800 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
807 emit_store_dst(jd, iptr, d);
811 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
812 /* sx.val.i = constant */
814 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
815 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
817 /* `inc reg' is slower on p4's (regarding to ia32
818 optimization reference manual and benchmarks) and as
822 M_IADD_IMM(iptr->sx.val.i, d);
823 emit_store_dst(jd, iptr, d);
826 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
828 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
829 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
830 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
831 M_INTMOVE(s1, GET_LOW_REG(d));
832 M_IADD(s2, GET_LOW_REG(d));
833 /* don't use REG_ITMP1 */
834 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
835 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
836 M_INTMOVE(s1, GET_HIGH_REG(d));
837 M_IADDC(s2, GET_HIGH_REG(d));
838 emit_store_dst(jd, iptr, d);
841 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
842 /* sx.val.l = constant */
844 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
845 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
847 M_IADD_IMM(iptr->sx.val.l, GET_LOW_REG(d));
848 M_IADDC_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
849 emit_store_dst(jd, iptr, d);
852 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
854 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
855 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
856 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
858 M_INTMOVE(s1, REG_ITMP1);
859 M_ISUB(s2, REG_ITMP1);
860 M_INTMOVE(REG_ITMP1, d);
866 emit_store_dst(jd, iptr, d);
869 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
870 /* sx.val.i = constant */
872 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
873 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
875 M_ISUB_IMM(iptr->sx.val.i, d);
876 emit_store_dst(jd, iptr, d);
879 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
881 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
882 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
883 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
884 if (s2 == GET_LOW_REG(d)) {
885 M_INTMOVE(s1, REG_ITMP1);
886 M_ISUB(s2, REG_ITMP1);
887 M_INTMOVE(REG_ITMP1, GET_LOW_REG(d));
890 M_INTMOVE(s1, GET_LOW_REG(d));
891 M_ISUB(s2, GET_LOW_REG(d));
893 /* don't use REG_ITMP1 */
894 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
895 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
896 if (s2 == GET_HIGH_REG(d)) {
897 M_INTMOVE(s1, REG_ITMP2);
898 M_ISUBB(s2, REG_ITMP2);
899 M_INTMOVE(REG_ITMP2, GET_HIGH_REG(d));
902 M_INTMOVE(s1, GET_HIGH_REG(d));
903 M_ISUBB(s2, GET_HIGH_REG(d));
905 emit_store_dst(jd, iptr, d);
908 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
909 /* sx.val.l = constant */
911 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
912 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
914 M_ISUB_IMM(iptr->sx.val.l, GET_LOW_REG(d));
915 M_ISUBB_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
916 emit_store_dst(jd, iptr, d);
919 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
921 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
922 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
923 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
930 emit_store_dst(jd, iptr, d);
933 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
934 /* sx.val.i = constant */
936 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
937 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
938 M_IMUL_IMM(s1, iptr->sx.val.i, d);
939 emit_store_dst(jd, iptr, d);
942 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
944 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
945 s2 = emit_load_s2_low(jd, iptr, EDX);
946 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
948 M_INTMOVE(s1, REG_ITMP2);
949 M_IMUL(s2, REG_ITMP2);
951 s1 = emit_load_s1_low(jd, iptr, EAX);
952 s2 = emit_load_s2_high(jd, iptr, EDX);
955 M_IADD(EDX, REG_ITMP2);
957 s1 = emit_load_s1_low(jd, iptr, EAX);
958 s2 = emit_load_s2_low(jd, iptr, EDX);
961 M_INTMOVE(EAX, GET_LOW_REG(d));
962 M_IADD(REG_ITMP2, GET_HIGH_REG(d));
964 emit_store_dst(jd, iptr, d);
967 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
968 /* sx.val.l = constant */
970 s1 = emit_load_s1_low(jd, iptr, REG_ITMP2);
971 d = codegen_reg_of_dst(jd, iptr, EAX_EDX_PACKED);
972 ICONST(EAX, iptr->sx.val.l);
974 M_IMUL_IMM(s1, iptr->sx.val.l >> 32, REG_ITMP2);
975 M_IADD(REG_ITMP2, EDX);
976 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
977 M_IMUL_IMM(s1, iptr->sx.val.l, REG_ITMP2);
978 M_IADD(REG_ITMP2, EDX);
979 M_LNGMOVE(EAX_EDX_PACKED, d);
980 emit_store_dst(jd, iptr, d);
983 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
985 s1 = emit_load_s1(jd, iptr, EAX);
986 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
987 d = codegen_reg_of_dst(jd, iptr, EAX);
992 codegen_add_arithmeticexception_ref(cd);
995 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
997 /* check as described in jvm spec */
999 M_CMP_IMM(0x80000000, EAX);
1006 M_INTMOVE(EAX, d); /* if INMEMORY then d is already EAX */
1007 emit_store_dst(jd, iptr, d);
1010 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1012 s1 = emit_load_s1(jd, iptr, EAX);
1013 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1014 d = codegen_reg_of_dst(jd, iptr, EDX);
1019 codegen_add_arithmeticexception_ref(cd);
1022 M_INTMOVE(s1, EAX); /* we need the first operand in EAX */
1024 /* check as described in jvm spec */
1026 M_CMP_IMM(0x80000000, EAX);
1034 M_INTMOVE(EDX, d); /* if INMEMORY then d is already EDX */
1035 emit_store_dst(jd, iptr, d);
1038 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
1039 /* sx.val.i = constant */
1041 /* TODO: optimize for `/ 2' */
1042 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1043 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1047 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, d);/* 32-bit for jump off */
1048 M_SRA_IMM(iptr->sx.val.i, d);
1049 emit_store_dst(jd, iptr, d);
1052 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
1053 /* sx.val.i = constant */
1055 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1056 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1058 M_MOV(s1, REG_ITMP1);
1062 M_AND_IMM(iptr->sx.val.i, d);
1064 M_BGE(2 + 2 + 6 + 2);
1065 M_MOV(s1, d); /* don't use M_INTMOVE, so we know the jump offset */
1067 M_AND_IMM32(iptr->sx.val.i, d); /* use 32-bit for jump offset */
1069 emit_store_dst(jd, iptr, d);
1072 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1073 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1075 s2 = emit_load_s2(jd, iptr, REG_ITMP12_PACKED);
1076 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1078 M_INTMOVE(GET_LOW_REG(s2), REG_ITMP3);
1079 M_OR(GET_HIGH_REG(s2), REG_ITMP3);
1081 codegen_add_arithmeticexception_ref(cd);
1083 bte = iptr->sx.s23.s3.bte;
1086 M_LST(s2, REG_SP, 2 * 4);
1088 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1089 M_LST(s1, REG_SP, 0 * 4);
1091 M_MOV_IMM(bte->fp, REG_ITMP3);
1093 emit_store_dst(jd, iptr, d);
1096 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1097 /* sx.val.i = constant */
1099 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1100 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1102 M_TEST(GET_HIGH_REG(d));
1104 M_IADD_IMM32((1 << iptr->sx.val.i) - 1, GET_LOW_REG(d));
1105 M_IADDC_IMM(0, GET_HIGH_REG(d));
1106 M_SRLD_IMM(iptr->sx.val.i, GET_HIGH_REG(d), GET_LOW_REG(d));
1107 M_SRA_IMM(iptr->sx.val.i, GET_HIGH_REG(d));
1108 emit_store_dst(jd, iptr, d);
1112 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1113 /* sx.val.l = constant */
1115 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1116 if (iptr->dst.var->flags & INMEMORY) {
1117 if (iptr->s1.var->flags & INMEMORY) {
1118 /* Alpha algorithm */
1120 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 4);
1122 CALCOFFSETBYTES(disp, REG_SP, iptr->s1.var->vv.regoff * 4 + 4);
1128 /* TODO: hmm, don't know if this is always correct */
1130 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l & 0x00000000ffffffff);
1132 CALCIMMEDIATEBYTES(disp, iptr->sx.val.l >> 32);
1138 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4, REG_ITMP1);
1139 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4 + 4, REG_ITMP2);
1141 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1142 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1143 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, iptr->s1.var->vv.regoff * 4 + 4);
1144 emit_jcc(cd, CC_GE, disp);
1146 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4, REG_ITMP1);
1147 emit_mov_membase_reg(cd, REG_SP, iptr->s1.var->vv.regoff * 4 + 4, REG_ITMP2);
1149 emit_neg_reg(cd, REG_ITMP1);
1150 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1151 emit_neg_reg(cd, REG_ITMP2);
1153 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l, REG_ITMP1);
1154 emit_alu_imm_reg(cd, ALU_AND, iptr->sx.val.l >> 32, REG_ITMP2);
1156 emit_neg_reg(cd, REG_ITMP1);
1157 emit_alu_imm_reg(cd, ALU_ADC, 0, REG_ITMP2);
1158 emit_neg_reg(cd, REG_ITMP2);
1160 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst.var->vv.regoff * 4);
1161 emit_mov_reg_membase(cd, REG_ITMP2, REG_SP, iptr->dst.var->vv.regoff * 4 + 4);
1165 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1166 d = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
1168 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1169 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1170 M_TEST(GET_LOW_REG(s1));
1176 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1178 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1179 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1180 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1181 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1184 emit_store_dst(jd, iptr, d);
1187 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1188 /* sx.val.i = constant */
1190 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1191 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1193 M_SLL_IMM(iptr->sx.val.i, d);
1194 emit_store_dst(jd, iptr, d);
1197 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1199 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1200 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1201 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1202 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1205 emit_store_dst(jd, iptr, d);
1208 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1209 /* sx.val.i = constant */
1211 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1212 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1214 M_SRA_IMM(iptr->sx.val.i, d);
1215 emit_store_dst(jd, iptr, d);
1218 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1220 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1221 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1222 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1223 M_INTMOVE(s2, ECX); /* s2 may be equal to d */
1226 emit_store_dst(jd, iptr, d);
1229 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1230 /* sx.val.i = constant */
1232 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1233 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1235 M_SRL_IMM(iptr->sx.val.i, d);
1236 emit_store_dst(jd, iptr, d);
1239 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1241 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1242 s2 = emit_load_s2(jd, iptr, ECX);
1243 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1246 M_TEST_IMM(32, ECX);
1248 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1249 M_CLR(GET_LOW_REG(d));
1250 M_SLLD(GET_LOW_REG(d), GET_HIGH_REG(d));
1251 M_SLL(GET_LOW_REG(d));
1252 emit_store_dst(jd, iptr, d);
1255 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1256 /* sx.val.i = constant */
1258 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1259 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1261 if (iptr->sx.val.i & 0x20) {
1262 M_MOV(GET_LOW_REG(d), GET_HIGH_REG(d));
1263 M_CLR(GET_LOW_REG(d));
1264 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1268 M_SLLD_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d),
1270 M_SLL_IMM(iptr->sx.val.i & 0x3f, GET_LOW_REG(d));
1272 emit_store_dst(jd, iptr, d);
1275 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1277 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1278 s2 = emit_load_s2(jd, iptr, ECX);
1279 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1282 M_TEST_IMM(32, ECX);
1284 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1285 M_SRA_IMM(31, GET_HIGH_REG(d));
1286 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1287 M_SRA(GET_HIGH_REG(d));
1288 emit_store_dst(jd, iptr, d);
1291 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1292 /* sx.val.i = constant */
1294 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1295 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1297 if (iptr->sx.val.i & 0x20) {
1298 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1299 M_SRA_IMM(31, GET_HIGH_REG(d));
1300 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1304 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1306 M_SRA_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1308 emit_store_dst(jd, iptr, d);
1311 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1313 s1 = emit_load_s1(jd, iptr, REG_ITMP13_PACKED);
1314 s2 = emit_load_s2(jd, iptr, ECX);
1315 d = codegen_reg_of_dst(jd, iptr, REG_ITMP13_PACKED);
1318 M_TEST_IMM(32, ECX);
1320 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1321 M_CLR(GET_HIGH_REG(d));
1322 M_SRLD(GET_HIGH_REG(d), GET_LOW_REG(d));
1323 M_SRL(GET_HIGH_REG(d));
1324 emit_store_dst(jd, iptr, d);
1327 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1328 /* sx.val.l = constant */
1330 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1331 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1333 if (iptr->sx.val.i & 0x20) {
1334 M_MOV(GET_HIGH_REG(d), GET_LOW_REG(d));
1335 M_CLR(GET_HIGH_REG(d));
1336 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1340 M_SRLD_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d),
1342 M_SRL_IMM(iptr->sx.val.i & 0x3f, GET_HIGH_REG(d));
1344 emit_store_dst(jd, iptr, d);
1347 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1349 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1350 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1351 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1358 emit_store_dst(jd, iptr, d);
1361 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1362 /* sx.val.i = constant */
1364 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1365 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1367 M_AND_IMM(iptr->sx.val.i, d);
1368 emit_store_dst(jd, iptr, d);
1371 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1373 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1374 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1375 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1376 if (s2 == GET_LOW_REG(d))
1377 M_AND(s1, GET_LOW_REG(d));
1379 M_INTMOVE(s1, GET_LOW_REG(d));
1380 M_AND(s2, GET_LOW_REG(d));
1382 /* REG_ITMP1 probably contains low 32-bit of destination */
1383 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1384 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1385 if (s2 == GET_HIGH_REG(d))
1386 M_AND(s1, GET_HIGH_REG(d));
1388 M_INTMOVE(s1, GET_HIGH_REG(d));
1389 M_AND(s2, GET_HIGH_REG(d));
1391 emit_store_dst(jd, iptr, d);
1394 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1395 /* sx.val.l = constant */
1397 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1398 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1400 M_AND_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1401 M_AND_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1402 emit_store_dst(jd, iptr, d);
1405 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1407 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1408 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1409 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1416 emit_store_dst(jd, iptr, d);
1419 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1420 /* sx.val.i = constant */
1422 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1423 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1425 M_OR_IMM(iptr->sx.val.i, d);
1426 emit_store_dst(jd, iptr, d);
1429 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1431 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1432 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1433 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1434 if (s2 == GET_LOW_REG(d))
1435 M_OR(s1, GET_LOW_REG(d));
1437 M_INTMOVE(s1, GET_LOW_REG(d));
1438 M_OR(s2, GET_LOW_REG(d));
1440 /* REG_ITMP1 probably contains low 32-bit of destination */
1441 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1442 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1443 if (s2 == GET_HIGH_REG(d))
1444 M_OR(s1, GET_HIGH_REG(d));
1446 M_INTMOVE(s1, GET_HIGH_REG(d));
1447 M_OR(s2, GET_HIGH_REG(d));
1449 emit_store_dst(jd, iptr, d);
1452 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1453 /* sx.val.l = constant */
1455 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1456 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1458 M_OR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1459 M_OR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1460 emit_store_dst(jd, iptr, d);
1463 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1465 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1466 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
1467 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
1474 emit_store_dst(jd, iptr, d);
1477 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1478 /* sx.val.i = constant */
1480 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1481 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1483 M_XOR_IMM(iptr->sx.val.i, d);
1484 emit_store_dst(jd, iptr, d);
1487 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1489 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
1490 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
1491 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1492 if (s2 == GET_LOW_REG(d))
1493 M_XOR(s1, GET_LOW_REG(d));
1495 M_INTMOVE(s1, GET_LOW_REG(d));
1496 M_XOR(s2, GET_LOW_REG(d));
1498 /* REG_ITMP1 probably contains low 32-bit of destination */
1499 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
1500 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
1501 if (s2 == GET_HIGH_REG(d))
1502 M_XOR(s1, GET_HIGH_REG(d));
1504 M_INTMOVE(s1, GET_HIGH_REG(d));
1505 M_XOR(s2, GET_HIGH_REG(d));
1507 emit_store_dst(jd, iptr, d);
1510 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1511 /* sx.val.l = constant */
1513 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
1514 d = codegen_reg_of_dst(jd, iptr, REG_ITMP12_PACKED);
1516 M_XOR_IMM(iptr->sx.val.l, GET_LOW_REG(d));
1517 M_XOR_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(d));
1518 emit_store_dst(jd, iptr, d);
1522 /* floating operations ************************************************/
1524 case ICMD_FNEG: /* ..., value ==> ..., - value */
1526 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1527 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1529 emit_store_dst(jd, iptr, d);
1532 case ICMD_DNEG: /* ..., value ==> ..., - value */
1534 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1535 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1537 emit_store_dst(jd, iptr, d);
1540 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1542 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1543 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1544 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1546 emit_store_dst(jd, iptr, d);
1549 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1551 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1552 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1553 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1555 emit_store_dst(jd, iptr, d);
1558 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1560 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1561 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1562 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1564 emit_store_dst(jd, iptr, d);
1567 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1569 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1570 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1571 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1573 emit_store_dst(jd, iptr, d);
1576 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1578 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1579 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1580 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1582 emit_store_dst(jd, iptr, d);
1585 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1587 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1588 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1589 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1591 emit_store_dst(jd, iptr, d);
1594 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1596 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1597 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1598 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1600 emit_store_dst(jd, iptr, d);
1603 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1605 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1606 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1607 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1609 emit_store_dst(jd, iptr, d);
1612 case ICMD_FREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1614 /* exchanged to skip fxch */
1615 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1616 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1617 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1618 /* emit_fxch(cd); */
1623 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1624 emit_store_dst(jd, iptr, d);
1625 emit_ffree_reg(cd, 0);
1629 case ICMD_DREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1631 /* exchanged to skip fxch */
1632 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
1633 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1634 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1635 /* emit_fxch(cd); */
1640 emit_jcc(cd, CC_P, -(2 + 1 + 2 + 1 + 6));
1641 emit_store_dst(jd, iptr, d);
1642 emit_ffree_reg(cd, 0);
1646 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1647 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1649 var = VAROP(iptr->s1);
1650 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1652 if (var->flags & INMEMORY) {
1653 emit_fildl_membase(cd, REG_SP, var->vv.regoff * 4);
1655 disp = dseg_adds4(cd, 0);
1656 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1658 emit_mov_reg_membase(cd, var->vv.regoff, REG_ITMP1, disp);
1659 emit_fildl_membase(cd, REG_ITMP1, disp);
1662 emit_store_dst(jd, iptr, d);
1665 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1666 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1668 var = VAROP(iptr->s1);
1669 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
1670 if (var->flags & INMEMORY) {
1671 emit_fildll_membase(cd, REG_SP, var->vv.regoff * 4);
1674 log_text("L2F: longs have to be in memory");
1677 emit_store_dst(jd, iptr, d);
1680 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1682 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1683 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1685 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1688 /* Round to zero, 53-bit mode, exception masked */
1689 disp = dseg_adds4(cd, 0x0e7f);
1690 emit_fldcw_membase(cd, REG_ITMP1, disp);
1692 var = VAROP(iptr->dst);
1693 var1 = VAROP(iptr->s1);
1695 if (var->flags & INMEMORY) {
1696 emit_fistpl_membase(cd, REG_SP, var->vv.regoff * 4);
1698 /* Round to nearest, 53-bit mode, exceptions masked */
1699 disp = dseg_adds4(cd, 0x027f);
1700 emit_fldcw_membase(cd, REG_ITMP1, disp);
1702 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1703 REG_SP, var->vv.regoff * 4);
1706 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1708 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1711 disp = dseg_adds4(cd, 0);
1712 emit_fistpl_membase(cd, REG_ITMP1, disp);
1713 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1715 /* Round to nearest, 53-bit mode, exceptions masked */
1716 disp = dseg_adds4(cd, 0x027f);
1717 emit_fldcw_membase(cd, REG_ITMP1, disp);
1719 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1722 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1723 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1726 emit_jcc(cd, CC_NE, disp);
1728 /* XXX: change this when we use registers */
1729 emit_flds_membase(cd, REG_SP, var1->vv.regoff * 4);
1730 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2i, REG_ITMP1);
1731 emit_call_reg(cd, REG_ITMP1);
1733 if (var->flags & INMEMORY) {
1734 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1737 M_INTMOVE(REG_RESULT, var->vv.regoff);
1741 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1743 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1744 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1746 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1749 /* Round to zero, 53-bit mode, exception masked */
1750 disp = dseg_adds4(cd, 0x0e7f);
1751 emit_fldcw_membase(cd, REG_ITMP1, disp);
1753 var = VAROP(iptr->dst);
1754 var1 = VAROP(iptr->s1);
1756 if (var->flags & INMEMORY) {
1757 emit_fistpl_membase(cd, REG_SP, var->vv.regoff * 4);
1759 /* Round to nearest, 53-bit mode, exceptions masked */
1760 disp = dseg_adds4(cd, 0x027f);
1761 emit_fldcw_membase(cd, REG_ITMP1, disp);
1763 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1764 REG_SP, var->vv.regoff * 4);
1767 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1769 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1772 disp = dseg_adds4(cd, 0);
1773 emit_fistpl_membase(cd, REG_ITMP1, disp);
1774 emit_mov_membase_reg(cd, REG_ITMP1, disp, var->vv.regoff);
1776 /* Round to nearest, 53-bit mode, exceptions masked */
1777 disp = dseg_adds4(cd, 0x027f);
1778 emit_fldcw_membase(cd, REG_ITMP1, disp);
1780 emit_alu_imm_reg(cd, ALU_CMP, 0x80000000, var->vv.regoff);
1783 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1784 disp += 5 + 2 + ((REG_RESULT == var->vv.regoff) ? 0 : 2);
1787 emit_jcc(cd, CC_NE, disp);
1789 /* XXX: change this when we use registers */
1790 emit_fldl_membase(cd, REG_SP, var1->vv.regoff * 4);
1791 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2i, REG_ITMP1);
1792 emit_call_reg(cd, REG_ITMP1);
1794 if (var->flags & INMEMORY) {
1795 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1797 M_INTMOVE(REG_RESULT, var->vv.regoff);
1801 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1803 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1804 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1806 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1809 /* Round to zero, 53-bit mode, exception masked */
1810 disp = dseg_adds4(cd, 0x0e7f);
1811 emit_fldcw_membase(cd, REG_ITMP1, disp);
1813 var = VAROP(iptr->dst);
1814 var1 = VAROP(iptr->s1);
1816 if (var->flags & INMEMORY) {
1817 emit_fistpll_membase(cd, REG_SP, var->vv.regoff * 4);
1819 /* Round to nearest, 53-bit mode, exceptions masked */
1820 disp = dseg_adds4(cd, 0x027f);
1821 emit_fldcw_membase(cd, REG_ITMP1, disp);
1823 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1824 REG_SP, var->vv.regoff * 4 + 4);
1827 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1829 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1832 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1834 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4 + 4);
1836 emit_jcc(cd, CC_NE, disp);
1838 emit_alu_imm_membase(cd, ALU_CMP, 0,
1839 REG_SP, var->vv.regoff * 4);
1842 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1844 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1846 emit_jcc(cd, CC_NE, disp);
1848 /* XXX: change this when we use registers */
1849 emit_flds_membase(cd, REG_SP, var1->vv.regoff * 4);
1850 emit_mov_imm_reg(cd, (ptrint) asm_builtin_f2l, REG_ITMP1);
1851 emit_call_reg(cd, REG_ITMP1);
1852 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1853 emit_mov_reg_membase(cd, REG_RESULT2,
1854 REG_SP, var->vv.regoff * 4 + 4);
1857 log_text("F2L: longs have to be in memory");
1862 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1864 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1865 d = codegen_reg_of_dst(jd, iptr, REG_NULL);
1867 emit_mov_imm_reg(cd, 0, REG_ITMP1);
1870 /* Round to zero, 53-bit mode, exception masked */
1871 disp = dseg_adds4(cd, 0x0e7f);
1872 emit_fldcw_membase(cd, REG_ITMP1, disp);
1874 var = VAROP(iptr->dst);
1875 var1 = VAROP(iptr->s1);
1877 if (var->flags & INMEMORY) {
1878 emit_fistpll_membase(cd, REG_SP, var->vv.regoff * 4);
1880 /* Round to nearest, 53-bit mode, exceptions masked */
1881 disp = dseg_adds4(cd, 0x027f);
1882 emit_fldcw_membase(cd, REG_ITMP1, disp);
1884 emit_alu_imm_membase(cd, ALU_CMP, 0x80000000,
1885 REG_SP, var->vv.regoff * 4 + 4);
1888 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1890 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1893 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1895 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4 + 4);
1897 emit_jcc(cd, CC_NE, disp);
1899 emit_alu_imm_membase(cd, ALU_CMP, 0, REG_SP, var->vv.regoff * 4);
1902 CALCOFFSETBYTES(disp, REG_SP, var1->vv.regoff * 4);
1904 CALCOFFSETBYTES(disp, REG_SP, var->vv.regoff * 4);
1906 emit_jcc(cd, CC_NE, disp);
1908 /* XXX: change this when we use registers */
1909 emit_fldl_membase(cd, REG_SP, var1->vv.regoff * 4);
1910 emit_mov_imm_reg(cd, (ptrint) asm_builtin_d2l, REG_ITMP1);
1911 emit_call_reg(cd, REG_ITMP1);
1912 emit_mov_reg_membase(cd, REG_RESULT, REG_SP, var->vv.regoff * 4);
1913 emit_mov_reg_membase(cd, REG_RESULT2,
1914 REG_SP, var->vv.regoff * 4 + 4);
1917 log_text("D2L: longs have to be in memory");
1922 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1924 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1925 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1927 emit_store_dst(jd, iptr, d);
1930 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1932 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
1933 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
1935 emit_store_dst(jd, iptr, d);
1938 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1941 /* exchanged to skip fxch */
1942 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1943 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1944 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1945 /* emit_fxch(cd); */
1948 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as GT */
1949 emit_jcc(cd, CC_E, 6);
1950 emit_alu_imm_reg(cd, ALU_AND, 0x000000ff, EAX);
1952 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1953 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1954 emit_jcc(cd, CC_B, 3 + 5);
1955 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1956 emit_jmp_imm(cd, 3);
1957 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1958 emit_store_dst(jd, iptr, d);
1961 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1964 /* exchanged to skip fxch */
1965 s2 = emit_load_s1(jd, iptr, REG_FTMP1);
1966 s1 = emit_load_s2(jd, iptr, REG_FTMP2);
1967 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1968 /* emit_fxch(cd); */
1971 emit_test_imm_reg(cd, 0x400, EAX); /* unordered treat as LT */
1972 emit_jcc(cd, CC_E, 3);
1973 emit_movb_imm_reg(cd, 1, REG_AH);
1975 emit_mov_imm_reg(cd, 0, d); /* does not affect flags */
1976 emit_jcc(cd, CC_E, 6 + 3 + 5 + 3);
1977 emit_jcc(cd, CC_B, 3 + 5);
1978 emit_alu_imm_reg(cd, ALU_SUB, 1, d);
1979 emit_jmp_imm(cd, 3);
1980 emit_alu_imm_reg(cd, ALU_ADD, 1, d);
1981 emit_store_dst(jd, iptr, d);
1985 /* memory operations **************************************************/
1987 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., length */
1989 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1990 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
1991 gen_nullptr_check(s1);
1992 M_ILD(d, s1, OFFSET(java_arrayheader, size));
1993 emit_store_dst(jd, iptr, d);
1996 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
1998 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
1999 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2000 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2001 if (INSTRUCTION_MUST_CHECK(iptr)) {
2002 gen_nullptr_check(s1);
2005 emit_movsbl_memindex_reg(cd, OFFSET(java_bytearray, data[0]),
2007 emit_store_dst(jd, iptr, d);
2010 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
2012 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2013 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2014 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2015 if (INSTRUCTION_MUST_CHECK(iptr)) {
2016 gen_nullptr_check(s1);
2019 emit_movzwl_memindex_reg(cd, OFFSET(java_chararray, data[0]),
2021 emit_store_dst(jd, iptr, d);
2024 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
2026 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2027 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2028 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2029 if (INSTRUCTION_MUST_CHECK(iptr)) {
2030 gen_nullptr_check(s1);
2033 emit_movswl_memindex_reg(cd, OFFSET(java_shortarray, data[0]),
2035 emit_store_dst(jd, iptr, d);
2038 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
2040 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2041 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2042 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2043 if (INSTRUCTION_MUST_CHECK(iptr)) {
2044 gen_nullptr_check(s1);
2047 emit_mov_memindex_reg(cd, OFFSET(java_intarray, data[0]),
2049 emit_store_dst(jd, iptr, d);
2052 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
2054 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2055 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2056 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
2057 if (INSTRUCTION_MUST_CHECK(iptr)) {
2058 gen_nullptr_check(s1);
2062 var = VAROP(iptr->dst);
2064 assert(var->flags & INMEMORY);
2065 emit_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]),
2066 s1, s2, 3, REG_ITMP3);
2067 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff * 4);
2068 emit_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]) + 4,
2069 s1, s2, 3, REG_ITMP3);
2070 emit_mov_reg_membase(cd, REG_ITMP3, REG_SP, var->vv.regoff * 4 + 4);
2073 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2075 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2076 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2077 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2078 if (INSTRUCTION_MUST_CHECK(iptr)) {
2079 gen_nullptr_check(s1);
2082 emit_flds_memindex(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2083 emit_store_dst(jd, iptr, d);
2086 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2088 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2089 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2090 d = codegen_reg_of_dst(jd, iptr, REG_FTMP3);
2091 if (INSTRUCTION_MUST_CHECK(iptr)) {
2092 gen_nullptr_check(s1);
2095 emit_fldl_memindex(cd, OFFSET(java_doublearray, data[0]), s1, s2,3);
2096 emit_store_dst(jd, iptr, d);
2099 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2101 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2102 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2103 d = codegen_reg_of_dst(jd, iptr, REG_ITMP1);
2104 if (INSTRUCTION_MUST_CHECK(iptr)) {
2105 gen_nullptr_check(s1);
2108 emit_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]),
2110 emit_store_dst(jd, iptr, d);
2114 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2116 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2117 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2118 if (INSTRUCTION_MUST_CHECK(iptr)) {
2119 gen_nullptr_check(s1);
2122 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2124 /* because EBP, ESI, EDI have no xH and xL nibbles */
2125 M_INTMOVE(s3, REG_ITMP3);
2128 emit_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]),
2132 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2134 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2135 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2136 if (INSTRUCTION_MUST_CHECK(iptr)) {
2137 gen_nullptr_check(s1);
2140 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2141 emit_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]),
2145 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2147 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2148 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2149 if (INSTRUCTION_MUST_CHECK(iptr)) {
2150 gen_nullptr_check(s1);
2153 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2154 emit_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]),
2158 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2160 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2161 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2162 if (INSTRUCTION_MUST_CHECK(iptr)) {
2163 gen_nullptr_check(s1);
2166 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2167 emit_mov_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]),
2171 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2173 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2174 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2175 if (INSTRUCTION_MUST_CHECK(iptr)) {
2176 gen_nullptr_check(s1);
2180 var = VAROP(iptr->sx.s23.s3);
2182 assert(var->flags & INMEMORY);
2183 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff * 4, REG_ITMP3);
2184 emit_mov_reg_memindex(cd, REG_ITMP3, OFFSET(java_longarray, data[0])
2186 emit_mov_membase_reg(cd, REG_SP, var->vv.regoff * 4 + 4, REG_ITMP3);
2187 emit_mov_reg_memindex(cd, REG_ITMP3,
2188 OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2191 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2193 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2194 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2195 if (INSTRUCTION_MUST_CHECK(iptr)) {
2196 gen_nullptr_check(s1);
2199 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2200 emit_fstps_memindex(cd, OFFSET(java_floatarray, data[0]), s1, s2,2);
2203 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2205 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2206 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2207 if (INSTRUCTION_MUST_CHECK(iptr)) {
2208 gen_nullptr_check(s1);
2211 s3 = emit_load_s3(jd, iptr, REG_FTMP1);
2212 emit_fstpl_memindex(cd, OFFSET(java_doublearray, data[0]),
2216 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2218 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2219 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2220 if (INSTRUCTION_MUST_CHECK(iptr)) {
2221 gen_nullptr_check(s1);
2224 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2226 M_AST(s1, REG_SP, 0 * 4);
2227 M_AST(s3, REG_SP, 1 * 4);
2228 M_MOV_IMM(BUILTIN_canstore, REG_ITMP1);
2232 codegen_add_arraystoreexception_ref(cd);
2234 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2235 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2236 s3 = emit_load_s3(jd, iptr, REG_ITMP3);
2237 emit_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]),
2241 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2243 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2244 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2245 if (INSTRUCTION_MUST_CHECK(iptr)) {
2246 gen_nullptr_check(s1);
2249 emit_movb_imm_memindex(cd, iptr->sx.s23.s3.constval,
2250 OFFSET(java_bytearray, data[0]), s1, s2, 0);
2253 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2255 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2256 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2257 if (INSTRUCTION_MUST_CHECK(iptr)) {
2258 gen_nullptr_check(s1);
2261 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2262 OFFSET(java_chararray, data[0]), s1, s2, 1);
2265 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2267 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2268 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2269 if (INSTRUCTION_MUST_CHECK(iptr)) {
2270 gen_nullptr_check(s1);
2273 emit_movw_imm_memindex(cd, iptr->sx.s23.s3.constval,
2274 OFFSET(java_shortarray, data[0]), s1, s2, 1);
2277 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2279 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2280 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2281 if (INSTRUCTION_MUST_CHECK(iptr)) {
2282 gen_nullptr_check(s1);
2285 emit_mov_imm_memindex(cd, iptr->sx.s23.s3.constval,
2286 OFFSET(java_intarray, data[0]), s1, s2, 2);
2289 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2291 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2292 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2293 if (INSTRUCTION_MUST_CHECK(iptr)) {
2294 gen_nullptr_check(s1);
2297 emit_mov_imm_memindex(cd,
2298 (u4) (iptr->sx.s23.s3.constval & 0x00000000ffffffff),
2299 OFFSET(java_longarray, data[0]), s1, s2, 3);
2300 emit_mov_imm_memindex(cd,
2301 ((s4)iptr->sx.s23.s3.constval) >> 31,
2302 OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2305 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2307 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2308 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2309 if (INSTRUCTION_MUST_CHECK(iptr)) {
2310 gen_nullptr_check(s1);
2313 emit_mov_imm_memindex(cd, 0,
2314 OFFSET(java_objectarray, data[0]), s1, s2, 2);
2318 case ICMD_GETSTATIC: /* ... ==> ..., value */
2320 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2321 unresolved_field *uf = iptr->sx.s23.s3.uf;
2323 fieldtype = uf->fieldref->parseddesc.fd->type;
2325 codegen_addpatchref(cd, PATCHER_get_putstatic,
2326 iptr->sx.s23.s3.uf, 0);
2328 if (opt_showdisassemble) {
2329 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2336 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2338 fieldtype = fi->type;
2340 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class)) {
2341 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2343 if (opt_showdisassemble) {
2344 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2348 disp = (ptrint) &(fi->value);
2351 M_MOV_IMM(disp, REG_ITMP1);
2352 switch (fieldtype) {
2355 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2356 M_ILD(d, REG_ITMP1, 0);
2359 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2360 M_LLD(d, REG_ITMP1, 0);
2363 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2364 M_FLD(d, REG_ITMP1, 0);
2367 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2368 M_DLD(d, REG_ITMP1, 0);
2371 emit_store_dst(jd, iptr, d);
2374 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2376 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2377 unresolved_field *uf = iptr->sx.s23.s3.uf;
2379 fieldtype = uf->fieldref->parseddesc.fd->type;
2381 codegen_addpatchref(cd, PATCHER_get_putstatic,
2382 iptr->sx.s23.s3.uf, 0);
2384 if (opt_showdisassemble) {
2385 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2392 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2394 fieldtype = fi->type;
2396 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class)) {
2397 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2399 if (opt_showdisassemble) {
2400 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2404 disp = (ptrint) &(fi->value);
2407 M_MOV_IMM(disp, REG_ITMP1);
2408 switch (fieldtype) {
2411 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
2412 M_IST(s1, REG_ITMP1, 0);
2415 s1 = emit_load_s1(jd, iptr, REG_ITMP23_PACKED);
2416 M_LST(s1, REG_ITMP1, 0);
2419 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2420 emit_fstps_membase(cd, REG_ITMP1, 0);
2423 s1 = emit_load_s1(jd, iptr, REG_FTMP1);
2424 emit_fstpl_membase(cd, REG_ITMP1, 0);
2429 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2430 /* val = value (in current instruction) */
2431 /* following NOP) */
2433 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2434 unresolved_field *uf = iptr->sx.s23.s3.uf;
2436 fieldtype = uf->fieldref->parseddesc.fd->type;
2438 codegen_addpatchref(cd, PATCHER_get_putstatic,
2441 if (opt_showdisassemble) {
2442 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2449 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2451 fieldtype = fi->type;
2453 if (!CLASS_IS_OR_ALMOST_INITIALIZED(fi->class)) {
2454 codegen_addpatchref(cd, PATCHER_clinit, fi->class, 0);
2456 if (opt_showdisassemble) {
2457 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2461 disp = (ptrint) &(fi->value);
2464 M_MOV_IMM(disp, REG_ITMP1);
2465 switch (fieldtype) {
2468 M_IST_IMM(iptr->sx.s23.s2.constval, REG_ITMP1, 0);
2471 M_IST_IMM(iptr->sx.s23.s2.constval & 0xffffffff, REG_ITMP1, 0);
2472 M_IST_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, REG_ITMP1, 4);
2479 case ICMD_GETFIELD: /* .., objectref. ==> ..., value */
2481 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2482 gen_nullptr_check(s1);
2484 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2485 unresolved_field *uf = iptr->sx.s23.s3.uf;
2487 fieldtype = uf->fieldref->parseddesc.fd->type;
2489 codegen_addpatchref(cd, PATCHER_getfield,
2490 iptr->sx.s23.s3.uf, 0);
2492 if (opt_showdisassemble) {
2493 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2500 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2502 fieldtype = fi->type;
2506 switch (fieldtype) {
2509 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
2510 M_ILD32(d, s1, disp);
2513 d = codegen_reg_of_dst(jd, iptr, REG_ITMP23_PACKED);
2514 M_LLD32(d, s1, disp);
2517 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2518 M_FLD32(d, s1, disp);
2521 d = codegen_reg_of_dst(jd, iptr, REG_FTMP1);
2522 M_DLD32(d, s1, disp);
2525 emit_store_dst(jd, iptr, d);
2528 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2530 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2531 gen_nullptr_check(s1);
2533 /* must be done here because of code patching */
2535 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2536 unresolved_field *uf = iptr->sx.s23.s3.uf;
2538 fieldtype = uf->fieldref->parseddesc.fd->type;
2541 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2543 fieldtype = fi->type;
2546 if (!IS_FLT_DBL_TYPE(fieldtype)) {
2547 if (IS_2_WORD_TYPE(fieldtype))
2548 s2 = emit_load_s2(jd, iptr, REG_ITMP23_PACKED);
2550 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2553 s2 = emit_load_s2(jd, iptr, REG_FTMP2);
2555 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2556 unresolved_field *uf = iptr->sx.s23.s3.uf;
2558 codegen_addpatchref(cd, PATCHER_putfield, uf, 0);
2560 if (opt_showdisassemble) {
2561 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2568 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2573 switch (fieldtype) {
2576 M_IST32(s2, s1, disp);
2579 M_LST32(s2, s1, disp);
2582 emit_fstps_membase32(cd, s1, disp);
2585 emit_fstpl_membase32(cd, s1, disp);
2590 case ICMD_PUTFIELDCONST: /* ..., objectref ==> ... */
2591 /* val = value (in current instruction) */
2592 /* following NOP) */
2594 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2595 gen_nullptr_check(s1);
2597 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2598 unresolved_field *uf = iptr->sx.s23.s3.uf;
2600 fieldtype = uf->fieldref->parseddesc.fd->type;
2602 codegen_addpatchref(cd, PATCHER_putfieldconst,
2605 if (opt_showdisassemble) {
2606 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2614 fieldinfo *fi = iptr->sx.s23.s3.fmiref->p.field;
2616 fieldtype = fi->type;
2621 switch (fieldtype) {
2624 M_IST32_IMM(iptr->sx.s23.s2.constval, s1, disp);
2627 M_IST32_IMM(iptr->sx.s23.s2.constval & 0xffffffff, s1, disp);
2628 M_IST32_IMM(((s4)iptr->sx.s23.s2.constval) >> 31, s1, disp + 4);
2636 /* branch operations **************************************************/
2638 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2640 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2641 M_INTMOVE(s1, REG_ITMP1_XPTR);
2643 #ifdef ENABLE_VERIFIER
2644 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
2645 codegen_addpatchref(cd, PATCHER_athrow_areturn,
2646 iptr->sx.s23.s2.uc, 0);
2648 if (opt_showdisassemble) {
2649 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2652 #endif /* ENABLE_VERIFIER */
2654 M_CALL_IMM(0); /* passing exception pc */
2655 M_POP(REG_ITMP2_XPC);
2657 M_MOV_IMM(asm_handle_exception, REG_ITMP3);
2661 case ICMD_INLINE_GOTO:
2663 M_COPY(src, iptr->dst.var);
2667 case ICMD_GOTO: /* ... ==> ... */
2668 case ICMD_RET: /* ... ==> ... */
2670 #if defined(ENABLE_SSA)
2672 last_cmd_was_goto = true;
2673 /* In case of a Goto phimoves have to be inserted before the */
2675 codegen_insert_phi_moves(jd, bptr);
2679 codegen_addreference(cd, iptr->dst.block);
2683 case ICMD_JSR: /* ... ==> ... */
2686 codegen_addreference(cd, iptr->sx.s23.s3.jsrtarget.block);
2689 case ICMD_IFNULL: /* ..., value ==> ... */
2691 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2694 codegen_addreference(cd, iptr->dst.block);
2697 case ICMD_IFNONNULL: /* ..., value ==> ... */
2699 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2702 codegen_addreference(cd, iptr->dst.block);
2705 case ICMD_IFEQ: /* ..., value ==> ... */
2707 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2708 M_CMP_IMM(iptr->sx.val.i, s1);
2710 codegen_addreference(cd, iptr->dst.block);
2713 case ICMD_IFLT: /* ..., value ==> ... */
2715 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2716 M_CMP_IMM(iptr->sx.val.i, s1);
2718 codegen_addreference(cd, iptr->dst.block);
2721 case ICMD_IFLE: /* ..., value ==> ... */
2723 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2724 M_CMP_IMM(iptr->sx.val.i, s1);
2726 codegen_addreference(cd, iptr->dst.block);
2729 case ICMD_IFNE: /* ..., value ==> ... */
2731 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2732 M_CMP_IMM(iptr->sx.val.i, s1);
2734 codegen_addreference(cd, iptr->dst.block);
2737 case ICMD_IFGT: /* ..., value ==> ... */
2739 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2740 M_CMP_IMM(iptr->sx.val.i, s1);
2742 codegen_addreference(cd, iptr->dst.block);
2745 case ICMD_IFGE: /* ..., value ==> ... */
2747 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2748 M_CMP_IMM(iptr->sx.val.i, s1);
2750 codegen_addreference(cd, iptr->dst.block);
2753 case ICMD_IF_LEQ: /* ..., value ==> ... */
2755 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2756 if (iptr->sx.val.l == 0) {
2757 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2758 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2761 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2762 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2763 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2764 M_OR(REG_ITMP2, REG_ITMP1);
2767 codegen_addreference(cd, iptr->dst.block);
2770 case ICMD_IF_LLT: /* ..., value ==> ... */
2772 if (iptr->sx.val.l == 0) {
2773 /* If high 32-bit are less than zero, then the 64-bits
2775 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2780 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2781 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2783 codegen_addreference(cd, iptr->dst.block);
2785 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2788 codegen_addreference(cd, iptr->dst.block);
2791 case ICMD_IF_LLE: /* ..., value ==> ... */
2793 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2794 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2796 codegen_addreference(cd, iptr->dst.block);
2798 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2800 codegen_addreference(cd, iptr->dst.block);
2803 case ICMD_IF_LNE: /* ..., value ==> ... */
2805 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2806 if (iptr->sx.val.l == 0) {
2807 M_INTMOVE(GET_LOW_REG(s1), REG_ITMP1);
2808 M_OR(GET_HIGH_REG(s1), REG_ITMP1);
2811 M_LNGMOVE(s1, REG_ITMP12_PACKED);
2812 M_XOR_IMM(iptr->sx.val.l, REG_ITMP1);
2813 M_XOR_IMM(iptr->sx.val.l >> 32, REG_ITMP2);
2814 M_OR(REG_ITMP2, REG_ITMP1);
2817 codegen_addreference(cd, iptr->dst.block);
2820 case ICMD_IF_LGT: /* ..., value ==> ... */
2822 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2823 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2825 codegen_addreference(cd, iptr->dst.block);
2827 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2829 codegen_addreference(cd, iptr->dst.block);
2832 case ICMD_IF_LGE: /* ..., value ==> ... */
2834 if (iptr->sx.val.l == 0) {
2835 /* If high 32-bit are greater equal zero, then the
2837 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2842 s1 = emit_load_s1(jd, iptr, REG_ITMP12_PACKED);
2843 M_CMP_IMM(iptr->sx.val.l >> 32, GET_HIGH_REG(s1));
2845 codegen_addreference(cd, iptr->dst.block);
2847 M_CMP_IMM32(iptr->sx.val.l, GET_LOW_REG(s1));
2850 codegen_addreference(cd, iptr->dst.block);
2853 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2854 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2856 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2857 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2860 codegen_addreference(cd, iptr->dst.block);
2863 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2865 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2866 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2867 M_INTMOVE(s1, REG_ITMP1);
2868 M_XOR(s2, REG_ITMP1);
2869 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2870 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2871 M_INTMOVE(s1, REG_ITMP2);
2872 M_XOR(s2, REG_ITMP2);
2873 M_OR(REG_ITMP1, REG_ITMP2);
2875 codegen_addreference(cd, iptr->dst.block);
2878 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2879 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2881 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2882 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2885 codegen_addreference(cd, iptr->dst.block);
2888 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2890 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2891 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2892 M_INTMOVE(s1, REG_ITMP1);
2893 M_XOR(s2, REG_ITMP1);
2894 s1 = emit_load_s1_high(jd, iptr, REG_ITMP2);
2895 s2 = emit_load_s2_high(jd, iptr, REG_ITMP3);
2896 M_INTMOVE(s1, REG_ITMP2);
2897 M_XOR(s2, REG_ITMP2);
2898 M_OR(REG_ITMP1, REG_ITMP2);
2900 codegen_addreference(cd, iptr->dst.block);
2903 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2905 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2906 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2909 codegen_addreference(cd, iptr->dst.block);
2912 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2914 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2915 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2918 codegen_addreference(cd, iptr->dst.block);
2919 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2920 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2924 codegen_addreference(cd, iptr->dst.block);
2927 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2929 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2930 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2933 codegen_addreference(cd, iptr->dst.block);
2936 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2938 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2939 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2942 codegen_addreference(cd, iptr->dst.block);
2943 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2944 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2948 codegen_addreference(cd, iptr->dst.block);
2951 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2953 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2954 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2957 codegen_addreference(cd, iptr->dst.block);
2960 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2962 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2963 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2966 codegen_addreference(cd, iptr->dst.block);
2967 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2968 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2972 codegen_addreference(cd, iptr->dst.block);
2975 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2977 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
2978 s2 = emit_load_s2(jd, iptr, REG_ITMP2);
2981 codegen_addreference(cd, iptr->dst.block);
2984 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2986 s1 = emit_load_s1_high(jd, iptr, REG_ITMP1);
2987 s2 = emit_load_s2_high(jd, iptr, REG_ITMP2);
2990 codegen_addreference(cd, iptr->dst.block);
2991 s1 = emit_load_s1_low(jd, iptr, REG_ITMP1);
2992 s2 = emit_load_s2_low(jd, iptr, REG_ITMP2);
2996 codegen_addreference(cd, iptr->dst.block);
3000 case ICMD_IRETURN: /* ..., retvalue ==> ... */
3002 s1 = emit_load_s1(jd, iptr, REG_RESULT);
3003 M_INTMOVE(s1, REG_RESULT);
3004 goto nowperformreturn;
3006 case ICMD_LRETURN: /* ..., retvalue ==> ... */
3008 s1 = emit_load_s1(jd, iptr, REG_RESULT_PACKED);
3009 M_LNGMOVE(s1, REG_RESULT_PACKED);
3010 goto nowperformreturn;
3012 case ICMD_ARETURN: /* ..., retvalue ==> ... */
3014 s1 = emit_load_s1(jd, iptr, REG_RESULT);
3015 M_INTMOVE(s1, REG_RESULT);
3017 #ifdef ENABLE_VERIFIER
3018 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3019 codegen_addpatchref(cd, PATCHER_athrow_areturn,
3020 iptr->sx.s23.s2.uc, 0);
3022 if (opt_showdisassemble) {
3023 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3026 #endif /* ENABLE_VERIFIER */
3027 goto nowperformreturn;
3029 case ICMD_FRETURN: /* ..., retvalue ==> ... */
3032 s1 = emit_load_s1(jd, iptr, REG_FRESULT);
3033 goto nowperformreturn;
3035 case ICMD_RETURN: /* ... ==> ... */
3041 p = cd->stackframesize;
3043 #if !defined(NDEBUG)
3044 if (JITDATA_HAS_FLAG_VERBOSECALL(jd))
3045 emit_verbosecall_exit(jd);
3048 #if defined(ENABLE_THREADS)
3049 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
3050 M_ALD(REG_ITMP2, REG_SP, rd->memuse * 4);
3052 /* we need to save the proper return value */
3053 switch (iptr->opc) {
3056 M_IST(REG_RESULT, REG_SP, rd->memuse * 4);
3060 M_LST(REG_RESULT_PACKED, REG_SP, rd->memuse * 4);
3064 emit_fstps_membase(cd, REG_SP, rd->memuse * 4);
3068 emit_fstpl_membase(cd, REG_SP, rd->memuse * 4);
3072 M_AST(REG_ITMP2, REG_SP, 0);
3073 M_MOV_IMM(LOCK_monitor_exit, REG_ITMP3);
3076 /* and now restore the proper return value */
3077 switch (iptr->opc) {
3080 M_ILD(REG_RESULT, REG_SP, rd->memuse * 4);
3084 M_LLD(REG_RESULT_PACKED, REG_SP, rd->memuse * 4);
3088 emit_flds_membase(cd, REG_SP, rd->memuse * 4);
3092 emit_fldl_membase(cd, REG_SP, rd->memuse * 4);
3098 /* restore saved registers */
3100 for (i = INT_SAV_CNT - 1; i >= rd->savintreguse; i--) {
3101 p--; M_ALD(rd->savintregs[i], REG_SP, p * 4);
3104 for (i = FLT_SAV_CNT - 1; i >= rd->savfltreguse; i--) {
3106 emit_fldl_membase(cd, REG_SP, p * 4);
3107 if (iptr->opc == ICMD_FRETURN || iptr->opc == ICMD_DRETURN) {
3109 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset + 1); */
3112 /* emit_fstp_reg(cd, rd->savfltregs[i] + fpu_st_offset); */
3116 /* deallocate stack */
3118 if (cd->stackframesize)
3119 M_AADD_IMM(cd->stackframesize * 4, REG_SP);
3126 case ICMD_TABLESWITCH: /* ..., index ==> ... */
3129 branch_target_t *table;
3131 table = iptr->dst.table;
3133 l = iptr->sx.s23.s2.tablelow;
3134 i = iptr->sx.s23.s3.tablehigh;
3136 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3137 M_INTMOVE(s1, REG_ITMP1);
3140 M_ISUB_IMM(l, REG_ITMP1);
3145 M_CMP_IMM(i - 1, REG_ITMP1);
3148 codegen_addreference(cd, table[0].block); /* default target */
3150 /* build jump table top down and use address of lowest entry */
3155 dseg_addtarget(cd, table->block);
3159 /* length of dataseg after last dseg_addtarget is used
3162 M_MOV_IMM(0, REG_ITMP2);
3164 emit_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 2, REG_ITMP1);
3170 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
3173 lookup_target_t *lookup;
3175 lookup = iptr->dst.lookup;
3177 i = iptr->sx.s23.s2.lookupcount;
3179 MCODECHECK((i<<2)+8);
3180 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3183 M_CMP_IMM(lookup->value, s1);
3185 codegen_addreference(cd, lookup->target.block);
3191 codegen_addreference(cd, iptr->sx.s23.s3.lookupdefault.block);
3195 case ICMD_BUILTIN: /* ..., [arg1, [arg2 ...]] ==> ... */
3197 bte = iptr->sx.s23.s3.bte;
3201 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
3203 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3204 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
3205 case ICMD_INVOKEINTERFACE:
3207 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3208 md = iptr->sx.s23.s3.um->methodref->parseddesc.md;
3212 lm = iptr->sx.s23.s3.fmiref->p.method;
3213 md = lm->parseddesc;
3217 s3 = md->paramcount;
3219 MCODECHECK((s3 << 1) + 64);
3221 /* copy arguments to registers or stack location */
3223 for (s3 = s3 - 1; s3 >= 0; s3--) {
3224 var = VAR(iptr->sx.s23.s2.args[s3]);
3226 /* Already Preallocated (ARGVAR) ? */
3227 if (var->flags & PREALLOC)
3229 if (IS_INT_LNG_TYPE(var->type)) {
3230 if (!md->params[s3].inmemory) {
3231 log_text("No integer argument registers available!");
3235 if (IS_2_WORD_TYPE(var->type)) {
3236 d = emit_load(jd, iptr, var, REG_ITMP12_PACKED);
3237 M_LST(d, REG_SP, md->params[s3].regoff * 4);
3239 d = emit_load(jd, iptr, var, REG_ITMP1);
3240 M_IST(d, REG_SP, md->params[s3].regoff * 4);
3245 if (!md->params[s3].inmemory) {
3246 s1 = rd->argfltregs[md->params[s3].regoff];
3247 d = emit_load(jd, iptr, var, s1);
3251 d = emit_load(jd, iptr, var, REG_FTMP1);
3252 if (IS_2_WORD_TYPE(var->type))
3253 M_DST(d, REG_SP, md->params[s3].regoff * 4);
3255 M_FST(d, REG_SP, md->params[s3].regoff * 4);
3260 switch (iptr->opc) {
3262 disp = (ptrint) bte->fp;
3263 d = md->returntype.type;
3265 M_MOV_IMM(disp, REG_ITMP1);
3269 if (INSTRUCTION_MUST_CHECK(iptr)) {
3272 codegen_add_fillinstacktrace_ref(cd);
3276 case ICMD_INVOKESPECIAL:
3277 M_ALD(REG_ITMP1, REG_SP, 0);
3280 codegen_add_nullpointerexception_ref(cd);
3284 case ICMD_INVOKESTATIC:
3286 unresolved_method *um = iptr->sx.s23.s3.um;
3288 codegen_addpatchref(cd, PATCHER_invokestatic_special,
3291 if (opt_showdisassemble) {
3292 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3296 d = md->returntype.type;
3299 disp = (ptrint) lm->stubroutine;
3300 d = lm->parseddesc->returntype.type;
3303 M_MOV_IMM(disp, REG_ITMP2);
3307 case ICMD_INVOKEVIRTUAL:
3308 M_ALD(REG_ITMP1, REG_SP, 0 * 4);
3309 gen_nullptr_check(REG_ITMP1);
3312 unresolved_method *um = iptr->sx.s23.s3.um;
3314 codegen_addpatchref(cd, PATCHER_invokevirtual, um, 0);
3316 if (opt_showdisassemble) {
3317 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3321 d = md->returntype.type;
3324 s1 = OFFSET(vftbl_t, table[0]) +
3325 sizeof(methodptr) * lm->vftblindex;
3326 d = md->returntype.type;
3329 M_ALD(REG_METHODPTR, REG_ITMP1,
3330 OFFSET(java_objectheader, vftbl));
3331 M_ALD32(REG_ITMP3, REG_METHODPTR, s1);
3335 case ICMD_INVOKEINTERFACE:
3336 M_ALD(REG_ITMP1, REG_SP, 0 * 4);
3337 gen_nullptr_check(REG_ITMP1);
3340 unresolved_method *um = iptr->sx.s23.s3.um;
3342 codegen_addpatchref(cd, PATCHER_invokeinterface, um, 0);
3344 if (opt_showdisassemble) {
3345 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3350 d = md->returntype.type;
3353 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3354 sizeof(methodptr) * lm->class->index;
3356 s2 = sizeof(methodptr) * (lm - lm->class->methods);
3358 d = md->returntype.type;
3361 M_ALD(REG_METHODPTR, REG_ITMP1,
3362 OFFSET(java_objectheader, vftbl));
3363 M_ALD32(REG_METHODPTR, REG_METHODPTR, s1);
3364 M_ALD32(REG_ITMP3, REG_METHODPTR, s2);
3369 /* d contains return type */
3371 if (d != TYPE_VOID) {
3372 #if defined(ENABLE_SSA)
3373 if ((ls == NULL) || (!IS_TEMPVAR_INDEX(iptr->dst.varindex)) ||
3374 (ls->lifetime[-iptr->dst.varindex-1].type != -1))
3375 /* a "living" stackslot */
3378 if (IS_INT_LNG_TYPE(d)) {
3379 if (IS_2_WORD_TYPE(d)) {
3380 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT_PACKED);
3381 M_LNGMOVE(REG_RESULT_PACKED, s1);
3384 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3385 M_INTMOVE(REG_RESULT, s1);
3389 s1 = codegen_reg_of_dst(jd, iptr, REG_NULL);
3391 emit_store_dst(jd, iptr, s1);
3397 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3398 /* val.a: (classinfo*) superclass */
3400 /* superclass is an interface:
3402 * OK if ((sub == NULL) ||
3403 * (sub->vftbl->interfacetablelength > super->index) &&
3404 * (sub->vftbl->interfacetable[-super->index] != NULL));
3406 * superclass is a class:
3408 * OK if ((sub == NULL) || (0
3409 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3410 * super->vftbl->diffval));
3413 if (!(iptr->flags.bits & INS_FLAG_ARRAY)) {
3414 /* object type cast-check */
3417 vftbl_t *supervftbl;
3420 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3426 super = iptr->sx.s23.s3.c.cls;
3427 superindex = super->index;
3428 supervftbl = super->vftbl;
3431 #if defined(ENABLE_THREADS)
3432 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3434 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3436 /* calculate interface checkcast code size */
3438 s2 = 2; /* mov_membase_reg */
3439 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3441 s2 += (2 + 4 /* mov_membase32_reg */ + 2 + 4 /* sub imm32 */ +
3442 2 /* test */ + 6 /* jcc */ + 2 + 4 /* mov_membase32_reg */ +
3443 2 /* test */ + 6 /* jcc */);
3446 s2 += (opt_showdisassemble ? 5 : 0);
3448 /* calculate class checkcast code size */
3450 s3 = 2; /* mov_membase_reg */
3451 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3453 s3 += 5 /* mov_imm_reg */ + 2 + 4 /* mov_membase32_reg */;
3456 if (s1 != REG_ITMP1) {
3458 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, baseval));
3461 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, diffval));
3468 s3 += (2 + 4 /* mov_membase32_reg */ + 2 /* sub */ +
3469 5 /* mov_imm_reg */ + 2 /* mov_membase_reg */);
3470 CALCOFFSETBYTES(s3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3473 s3 += 2 /* cmp */ + 6 /* jcc */;
3476 s3 += (opt_showdisassemble ? 5 : 0);
3478 /* if class is not resolved, check which code to call */
3480 if (super == NULL) {
3482 M_BEQ(5 + (opt_showdisassemble ? 5 : 0) + 6 + 6 + s2 + 5 + s3);
3484 codegen_addpatchref(cd, PATCHER_checkcast_instanceof_flags,
3485 iptr->sx.s23.s3.c.ref, 0);
3487 if (opt_showdisassemble) {
3488 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3491 M_MOV_IMM(0, REG_ITMP2); /* super->flags */
3492 M_AND_IMM32(ACC_INTERFACE, REG_ITMP2);
3496 /* interface checkcast code */
3498 if ((super == NULL) || (super->flags & ACC_INTERFACE)) {
3499 if (super != NULL) {
3504 M_ALD(REG_ITMP2, s1, OFFSET(java_objectheader, vftbl));
3506 if (super == NULL) {
3507 codegen_addpatchref(cd,
3508 PATCHER_checkcast_instanceof_interface,
3509 iptr->sx.s23.s3.c.ref,
3512 if (opt_showdisassemble) {
3513 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3518 REG_ITMP2, OFFSET(vftbl_t, interfacetablelength));
3519 M_ISUB_IMM32(superindex, REG_ITMP3);
3522 codegen_add_classcastexception_ref(cd, s1);
3523 M_ALD32(REG_ITMP3, REG_ITMP2,
3524 OFFSET(vftbl_t, interfacetable[0]) -
3525 superindex * sizeof(methodptr*));
3528 codegen_add_classcastexception_ref(cd, s1);
3534 /* class checkcast code */
3536 if ((super == NULL) || !(super->flags & ACC_INTERFACE)) {
3537 if (super != NULL) {
3542 M_ALD(REG_ITMP2, s1, OFFSET(java_objectheader, vftbl));
3544 if (super == NULL) {
3545 codegen_addpatchref(cd, PATCHER_checkcast_class,
3546 iptr->sx.s23.s3.c.ref,
3549 if (opt_showdisassemble) {
3550 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3554 M_MOV_IMM(supervftbl, REG_ITMP3);
3555 #if defined(ENABLE_THREADS)
3556 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3558 M_ILD32(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3560 /* if (s1 != REG_ITMP1) { */
3561 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP1); */
3562 /* emit_mov_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3); */
3563 /* #if defined(ENABLE_THREADS) */
3564 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3566 /* emit_alu_reg_reg(cd, ALU_SUB, REG_ITMP1, REG_ITMP2); */
3569 M_ILD32(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, baseval));
3570 M_ISUB(REG_ITMP3, REG_ITMP2);
3571 M_MOV_IMM(supervftbl, REG_ITMP3);
3572 M_ILD(REG_ITMP3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3573 #if defined(ENABLE_THREADS)
3574 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3578 M_CMP(REG_ITMP3, REG_ITMP2);
3579 M_BA(0); /* (u) REG_ITMP2 > (u) REG_ITMP3 -> jump */
3580 codegen_add_classcastexception_ref(cd, s1);
3583 d = codegen_reg_of_dst(jd, iptr, REG_ITMP3);
3586 /* array type cast-check */
3588 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3589 M_AST(s1, REG_SP, 0 * 4);
3591 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3592 codegen_addpatchref(cd, PATCHER_builtin_arraycheckcast,
3593 iptr->sx.s23.s3.c.ref, 0);
3595 if (opt_showdisassemble) {
3596 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3600 M_AST_IMM(iptr->sx.s23.s3.c.cls, REG_SP, 1 * 4);
3601 M_MOV_IMM(BUILTIN_arraycheckcast, REG_ITMP3);
3604 s1 = emit_load_s1(jd, iptr, REG_ITMP2);
3607 codegen_add_classcastexception_ref(cd, s1);
3609 d = codegen_reg_of_dst(jd, iptr, s1);
3613 emit_store_dst(jd, iptr, d);
3616 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3617 /* val.a: (classinfo*) superclass */
3619 /* superclass is an interface:
3621 * return (sub != NULL) &&
3622 * (sub->vftbl->interfacetablelength > super->index) &&
3623 * (sub->vftbl->interfacetable[-super->index] != NULL);
3625 * superclass is a class:
3627 * return ((sub != NULL) && (0
3628 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3629 * super->vftbl->diffvall));
3634 vftbl_t *supervftbl;
3637 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3643 super = iptr->sx.s23.s3.c.cls;
3644 superindex = super->index;
3645 supervftbl = super->vftbl;
3648 #if defined(ENABLE_THREADS)
3649 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3652 s1 = emit_load_s1(jd, iptr, REG_ITMP1);
3653 d = codegen_reg_of_dst(jd, iptr, REG_ITMP2);
3655 M_INTMOVE(s1, REG_ITMP1);
3659 /* calculate interface instanceof code size */
3661 s2 = 2; /* mov_membase_reg */
3662 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3664 s2 += (2 + 4 /* mov_membase32_reg */ + 2 + 4 /* alu_imm32_reg */ +
3665 2 /* test */ + 6 /* jcc */ + 2 + 4 /* mov_membase32_reg */ +
3666 2 /* test */ + 6 /* jcc */ + 5 /* mov_imm_reg */);
3669 s2 += (opt_showdisassemble ? 5 : 0);
3671 /* calculate class instanceof code size */
3673 s3 = 2; /* mov_membase_reg */
3674 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3675 s3 += 5; /* mov_imm_reg */
3677 CALCOFFSETBYTES(s3, REG_ITMP1, OFFSET(vftbl_t, baseval));
3679 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3681 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, baseval));
3683 s3 += (2 /* alu_reg_reg */ + 2 /* alu_reg_reg */ +
3684 2 /* alu_reg_reg */ + 6 /* jcc */ + 5 /* mov_imm_reg */);
3687 s3 += (opt_showdisassemble ? 5 : 0);
3691 /* if class is not resolved, check which code to call */
3695 M_BEQ(5 + (opt_showdisassemble ? 5 : 0) + 6 + 6 + s2 + 5 + s3);
3697 codegen_addpatchref(cd, PATCHER_checkcast_instanceof_flags,
3698 iptr->sx.s23.s3.c.ref, 0);
3700 if (opt_showdisassemble) {
3701 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3704 M_MOV_IMM(0, REG_ITMP3); /* super->flags */
3705 M_AND_IMM32(ACC_INTERFACE, REG_ITMP3);
3709 /* interface instanceof code */
3711 if (!super || (super->flags & ACC_INTERFACE)) {
3717 M_ALD(REG_ITMP1, s1, OFFSET(java_objectheader, vftbl));
3720 codegen_addpatchref(cd,
3721 PATCHER_checkcast_instanceof_interface,
3722 iptr->sx.s23.s3.c.ref, 0);
3724 if (opt_showdisassemble) {
3725 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3730 REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3731 M_ISUB_IMM32(superindex, REG_ITMP3);
3734 disp = (2 + 4 /* mov_membase32_reg */ + 2 /* test */ +
3735 6 /* jcc */ + 5 /* mov_imm_reg */);
3738 M_ALD32(REG_ITMP1, REG_ITMP1,
3739 OFFSET(vftbl_t, interfacetable[0]) -
3740 superindex * sizeof(methodptr*));
3742 /* emit_setcc_reg(cd, CC_A, d); */
3743 /* emit_jcc(cd, CC_BE, 5); */
3751 /* class instanceof code */
3753 if (!super || !(super->flags & ACC_INTERFACE)) {
3759 M_ALD(REG_ITMP1, s1, OFFSET(java_objectheader, vftbl));
3762 codegen_addpatchref(cd, PATCHER_instanceof_class,
3763 iptr->sx.s23.s3.c.ref, 0);
3765 if (opt_showdisassemble) {
3766 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3770 M_MOV_IMM(supervftbl, REG_ITMP2);
3771 #if defined(ENABLE_THREADS)
3772 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3774 M_ILD(REG_ITMP1, REG_ITMP1, OFFSET(vftbl_t, baseval));
3775 M_ILD(REG_ITMP3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3776 M_ILD(REG_ITMP2, REG_ITMP2, OFFSET(vftbl_t, baseval));
3777 #if defined(ENABLE_THREADS)
3778 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3780 M_ISUB(REG_ITMP2, REG_ITMP1);
3781 M_CLR(d); /* may be REG_ITMP2 */
3782 M_CMP(REG_ITMP3, REG_ITMP1);
3786 emit_store_dst(jd, iptr, d);
3792 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3794 /* check for negative sizes and copy sizes to stack if necessary */
3796 MCODECHECK((iptr->s1.argcount << 1) + 64);
3798 for (s1 = iptr->s1.argcount; --s1 >= 0; ) {
3799 /* copy SAVEDVAR sizes to stack */
3800 var = VAR(iptr->sx.s23.s2.args[s1]);
3802 /* Already Preallocated? */
3803 if (!(var->flags & PREALLOC)) {
3804 if (var->flags & INMEMORY) {
3805 M_ILD(REG_ITMP1, REG_SP, var->vv.regoff * 4);
3806 M_IST(REG_ITMP1, REG_SP, (s1 + 3) * 4);
3809 M_IST(var->vv.regoff, REG_SP, (s1 + 3) * 4);
3813 /* is a patcher function set? */
3815 if (INSTRUCTION_IS_UNRESOLVED(iptr)) {
3816 codegen_addpatchref(cd, PATCHER_builtin_multianewarray,
3817 iptr->sx.s23.s3.c.ref, 0);
3819 if (opt_showdisassemble) {
3820 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3827 disp = (ptrint) iptr->sx.s23.s3.c.cls;
3829 /* a0 = dimension count */
3831 M_IST_IMM(iptr->s1.argcount, REG_SP, 0 * 4);
3833 /* a1 = arraydescriptor */
3835 M_IST_IMM(disp, REG_SP, 1 * 4);
3837 /* a2 = pointer to dimensions = stack pointer */
3839 M_MOV(REG_SP, REG_ITMP1);
3840 M_AADD_IMM(3 * 4, REG_ITMP1);
3841 M_AST(REG_ITMP1, REG_SP, 2 * 4);
3843 M_MOV_IMM(BUILTIN_multianewarray, REG_ITMP1);
3846 /* check for exception before result assignment */
3850 codegen_add_fillinstacktrace_ref(cd);
3852 s1 = codegen_reg_of_dst(jd, iptr, REG_RESULT);
3853 M_INTMOVE(REG_RESULT, s1);
3854 emit_store_dst(jd, iptr, s1);
3859 new_internalerror("Unknown ICMD %d", iptr->opc);
3863 } /* for instruction */
3867 #if defined(ENABLE_LSRA) && !defined(ENABLE_SSA)
3870 #if defined(ENABLE_SSA)
3872 /* by edge splitting, in Blocks with phi moves there can only */
3873 /* be a goto as last command, no other Jump/Branch Command */
3874 if (!last_cmd_was_goto)
3875 codegen_insert_phi_moves(jd, bptr);
3880 /* At the end of a basic block we may have to append some nops,
3881 because the patcher stub calling code might be longer than the
3882 actual instruction. So codepatching does not change the
3883 following block unintentionally. */
3885 if (cd->mcodeptr < cd->lastmcodeptr) {
3886 while (cd->mcodeptr < cd->lastmcodeptr) {
3891 } /* if (bptr -> flags >= BBREACHED) */
3892 } /* for basic block */
3894 dseg_createlinenumbertable(cd);
3897 /* generate exception and patcher stubs */
3899 emit_exception_stubs(jd);
3900 emit_patcher_stubs(jd);
3902 emit_replacement_stubs(jd);
3907 /* everything's ok */
3912 #if defined(ENABLE_SSA)
3913 void codegen_insert_phi_moves(jitdata *jd, basicblock *bptr) {
3914 /* look for phi moves */
3915 int t_a,s_a,i, type;
3916 int t_lt, s_lt; /* lifetime indices of phi_moves */
3917 s4 t_regoff, s_regoff, s_flags, t_flags;
3926 /* Moves from phi functions with highest indices have to be */
3927 /* inserted first, since this is the order as is used for */
3928 /* conflict resolution */
3929 for(i = ls->num_phi_moves[bptr->nr] - 1; i >= 0 ; i--) {
3930 t_a = ls->phi_moves[bptr->nr][i][0];
3931 s_a = ls->phi_moves[bptr->nr][i][1];
3932 #if defined(SSA_DEBUG_VERBOSE)
3934 printf("BB %3i Move %3i <- %3i ", bptr->nr, t_a, s_a);
3937 /* local var lifetimes */
3938 t_lt = ls->maxlifetimes + t_a;
3939 type = ls->lifetime[t_lt].type;
3943 type = ls->lifetime[t_lt].local_ss->s->type;
3944 /* stackslot lifetime */
3948 #if defined(SSA_DEBUG_VERBOSE)
3950 printf("...returning - phi lifetimes where joined\n");
3956 /* local var lifetimes */
3957 s_lt = ls->maxlifetimes + s_a;
3958 type = ls->lifetime[s_lt].type;
3962 type = ls->lifetime[s_lt].type;
3963 /* stackslot lifetime */
3967 #if defined(SSA_DEBUG_VERBOSE)
3969 printf("...returning - phi lifetimes where joined\n");
3975 t_flags = VAR(t_a)->flags;
3976 t_regoff = VAR(t_a)->vv.regoff;
3980 t_flags = ls->lifetime[t_lt].local_ss->s->flags;
3981 t_regoff = ls->lifetime[t_lt].local_ss->s->regoff;
3985 /* local var move */
3986 s_flags = VAR(s_a)->flags;
3987 s_regoff = VAR(s_a)->vv.regoff;
3989 /* stackslot lifetime */
3990 s_flags = ls->lifetime[s_lt].local_ss->s->flags;
3991 s_regoff = ls->lifetime[s_lt].local_ss->s->regoff;
3995 #if defined(SSA_DEBUG_VERBOSE)
3997 printf("...returning - phi lifetimes where joined\n");
4002 cg_move(cd, type, s_regoff, s_flags, t_regoff, t_flags);
4004 #if defined(SSA_DEBUG_VERBOSE)
4005 if (compileverbose) {
4006 if (IS_INMEMORY(t_flags) && IS_INMEMORY(s_flags)) {
4008 printf("M%3i <- M%3i",t_regoff,s_regoff);
4010 else if (IS_INMEMORY(s_flags)) {
4012 printf("R%3i <- M%3i",t_regoff,s_regoff);
4014 else if (IS_INMEMORY(t_flags)) {
4016 printf("M%3i <- R%3i",t_regoff,s_regoff);
4020 printf("R%3i <- R%3i",t_regoff,s_regoff);
4024 #endif /* defined(SSA_DEBUG_VERBOSE) */
4028 void cg_move(codegendata *cd, s4 type, s4 src_regoff, s4 src_flags,
4029 s4 dst_regoff, s4 dst_flags) {
4030 if ((IS_INMEMORY(dst_flags)) && (IS_INMEMORY(src_flags))) {
4032 if (dst_regoff != src_regoff) {
4033 if (!IS_2_WORD_TYPE(type)) {
4034 if (IS_FLT_DBL_TYPE(type)) {
4035 emit_flds_membase(cd, REG_SP, src_regoff * 4);
4036 emit_fstps_membase(cd, REG_SP, dst_regoff * 4);
4038 emit_mov_membase_reg(cd, REG_SP, src_regoff * 4,
4040 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, dst_regoff * 4);
4042 } else { /* LONG OR DOUBLE */
4043 if (IS_FLT_DBL_TYPE(type)) {
4044 emit_fldl_membase( cd, REG_SP, src_regoff * 4);
4045 emit_fstpl_membase(cd, REG_SP, dst_regoff * 4);
4047 emit_mov_membase_reg(cd, REG_SP, src_regoff * 4,
4049 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP, dst_regoff * 4);
4050 emit_mov_membase_reg(cd, REG_SP, src_regoff * 4 + 4,
4052 emit_mov_reg_membase(cd, REG_ITMP1, REG_SP,
4053 dst_regoff * 4 + 4);
4058 if (IS_FLT_DBL_TYPE(type)) {
4059 log_text("cg_move: flt/dbl type have to be in memory\n");
4062 if (IS_2_WORD_TYPE(type)) {
4063 log_text("cg_move: longs have to be in memory\n");
4066 if (IS_INMEMORY(src_flags)) {
4068 emit_mov_membase_reg(cd, REG_SP, src_regoff * 4, dst_regoff);
4069 } else if (IS_INMEMORY(dst_flags)) {
4071 emit_mov_reg_membase(cd, src_regoff, REG_SP, dst_regoff * 4);
4074 /* only ints can be in regs on i386 */
4075 M_INTMOVE(src_regoff,dst_regoff);
4079 #endif /* defined(ENABLE_SSA) */
4081 /* createcompilerstub **********************************************************
4083 Creates a stub routine which calls the compiler.
4085 *******************************************************************************/
4087 #define COMPILERSTUB_DATASIZE 3 * SIZEOF_VOID_P
4088 #define COMPILERSTUB_CODESIZE 12
4090 #define COMPILERSTUB_SIZE COMPILERSTUB_DATASIZE + COMPILERSTUB_CODESIZE
4093 u1 *createcompilerstub(methodinfo *m)
4095 u1 *s; /* memory to hold the stub */
4101 s = CNEW(u1, COMPILERSTUB_SIZE);
4103 /* set data pointer and code pointer */
4106 s = s + COMPILERSTUB_DATASIZE;
4108 /* mark start of dump memory area */
4110 dumpsize = dump_size();
4112 cd = DNEW(codegendata);
4115 /* Store the codeinfo pointer in the same place as in the
4116 methodheader for compiled methods. */
4118 code = code_codeinfo_new(m);
4120 d[0] = (ptrint) asm_call_jit_compiler;
4122 d[2] = (ptrint) code;
4124 /* code for the stub */
4126 M_MOV_IMM(m, REG_ITMP1); /* method info */
4127 M_MOV_IMM(asm_call_jit_compiler, REG_ITMP3);
4130 #if defined(ENABLE_STATISTICS)
4132 count_cstub_len += COMPILERSTUB_SIZE;
4135 /* release dump area */
4137 dump_release(dumpsize);
4143 /* createnativestub ************************************************************
4145 Creates a stub routine which calls a native method.
4147 *******************************************************************************/
4149 u1 *createnativestub(functionptr f, jitdata *jd, methoddesc *nmd)
4157 s4 i, j; /* count variables */
4161 /* get required compiler data */
4168 /* set some variables */
4171 nativeparams = (m->flags & ACC_STATIC) ? 2 : 1;
4173 /* calculate stackframe size */
4175 cd->stackframesize =
4176 sizeof(stackframeinfo) / SIZEOF_VOID_P +
4177 sizeof(localref_table) / SIZEOF_VOID_P +
4178 1 + /* function pointer */
4179 4 * 4 + /* 4 arguments (start_native_call) */
4182 /* keep stack 16-byte aligned */
4184 cd->stackframesize |= 0x3;
4186 /* create method header */
4188 (void) dseg_addaddress(cd, code); /* CodeinfoPointer */
4189 (void) dseg_adds4(cd, cd->stackframesize * 4); /* FrameSize */
4190 (void) dseg_adds4(cd, 0); /* IsSync */
4191 (void) dseg_adds4(cd, 0); /* IsLeaf */
4192 (void) dseg_adds4(cd, 0); /* IntSave */
4193 (void) dseg_adds4(cd, 0); /* FltSave */
4194 (void) dseg_addlinenumbertablesize(cd);
4195 (void) dseg_adds4(cd, 0); /* ExTableSize */
4197 /* generate native method profiling code */
4199 if (JITDATA_HAS_FLAG_INSTRUMENT(jd)) {
4200 /* count frequency */
4202 M_MOV_IMM(code, REG_ITMP1);
4203 M_IADD_IMM_MEMBASE(1, REG_ITMP1, OFFSET(codeinfo, frequency));
4206 /* calculate stackframe size for native function */
4208 M_ASUB_IMM(cd->stackframesize * 4, REG_SP);
4210 #if !defined(NDEBUG)
4211 if (JITDATA_HAS_FLAG_VERBOSECALL(jd))
4212 emit_verbosecall_enter(jd);
4215 /* get function address (this must happen before the stackframeinfo) */
4217 #if !defined(WITH_STATIC_CLASSPATH)
4219 codegen_addpatchref(cd, PATCHER_resolve_native, m, 0);
4221 if (opt_showdisassemble) {
4222 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
4227 M_AST_IMM((ptrint) f, REG_SP, 4 * 4);
4229 /* Mark the whole fpu stack as free for native functions (only for saved */
4230 /* register count == 0). */
4232 emit_ffree_reg(cd, 0);
4233 emit_ffree_reg(cd, 1);
4234 emit_ffree_reg(cd, 2);
4235 emit_ffree_reg(cd, 3);
4236 emit_ffree_reg(cd, 4);
4237 emit_ffree_reg(cd, 5);
4238 emit_ffree_reg(cd, 6);
4239 emit_ffree_reg(cd, 7);
4241 /* prepare data structures for native function call */
4243 M_MOV(REG_SP, REG_ITMP1);
4244 M_AADD_IMM(cd->stackframesize * 4, REG_ITMP1);
4246 M_AST(REG_ITMP1, REG_SP, 0 * 4);
4247 M_IST_IMM(0, REG_SP, 1 * 4);
4250 M_MOV(REG_SP, REG_ITMP2);
4251 M_AADD_IMM(cd->stackframesize * 4 + SIZEOF_VOID_P, REG_ITMP2);
4253 M_AST(REG_ITMP2, REG_SP, 2 * 4);
4254 M_ALD(REG_ITMP3, REG_SP, cd->stackframesize * 4);
4255 M_AST(REG_ITMP3, REG_SP, 3 * 4);
4256 M_MOV_IMM(codegen_start_native_call, REG_ITMP1);
4259 M_ALD(REG_ITMP3, REG_SP, 4 * 4);
4261 /* copy arguments into new stackframe */
4263 for (i = md->paramcount - 1, j = i + nativeparams; i >= 0; i--, j--) {
4264 t = md->paramtypes[i].type;
4266 if (!md->params[i].inmemory) {
4267 /* no integer argument registers */
4268 } else { /* float/double in memory can be copied like int/longs */
4269 s1 = (md->params[i].regoff + cd->stackframesize + 1) * 4;
4270 s2 = nmd->params[j].regoff * 4;
4272 M_ILD(REG_ITMP1, REG_SP, s1);
4273 M_IST(REG_ITMP1, REG_SP, s2);
4274 if (IS_2_WORD_TYPE(t)) {
4275 M_ILD(REG_ITMP1, REG_SP, s1 + 4);
4276 M_IST(REG_ITMP1, REG_SP, s2 + 4);
4281 /* if function is static, put class into second argument */
4283 if (m->flags & ACC_STATIC)
4284 M_AST_IMM(m->class, REG_SP, 1 * 4);
4286 /* put env into first argument */
4288 M_AST_IMM(_Jv_env, REG_SP, 0 * 4);
4290 /* call the native function */
4294 /* save return value */
4296 if (md->returntype.type != TYPE_VOID) {
4297 if (IS_INT_LNG_TYPE(md->returntype.type)) {
4298 if (IS_2_WORD_TYPE(md->returntype.type))
4299 M_IST(REG_RESULT2, REG_SP, 2 * 4);
4300 M_IST(REG_RESULT, REG_SP, 1 * 4);
4303 if (IS_2_WORD_TYPE(md->returntype.type))
4304 emit_fstl_membase(cd, REG_SP, 1 * 4);
4306 emit_fsts_membase(cd, REG_SP, 1 * 4);
4310 #if !defined(NDEBUG)
4311 if (JITDATA_HAS_FLAG_VERBOSECALL(jd))
4312 emit_verbosecall_exit(jd);
4315 /* remove native stackframe info */
4317 M_MOV(REG_SP, REG_ITMP1);
4318 M_AADD_IMM(cd->stackframesize * 4, REG_ITMP1);
4320 M_AST(REG_ITMP1, REG_SP, 0 * 4);
4321 M_MOV_IMM(codegen_finish_native_call, REG_ITMP1);
4323 M_MOV(REG_RESULT, REG_ITMP2); /* REG_ITMP3 == REG_RESULT2 */
4325 /* restore return value */
4327 if (md->returntype.type != TYPE_VOID) {
4328 if (IS_INT_LNG_TYPE(md->returntype.type)) {
4329 if (IS_2_WORD_TYPE(md->returntype.type))
4330 M_ILD(REG_RESULT2, REG_SP, 2 * 4);
4331 M_ILD(REG_RESULT, REG_SP, 1 * 4);
4334 if (IS_2_WORD_TYPE(md->returntype.type))
4335 emit_fldl_membase(cd, REG_SP, 1 * 4);
4337 emit_flds_membase(cd, REG_SP, 1 * 4);
4341 M_AADD_IMM(cd->stackframesize * 4, REG_SP);
4343 /* check for exception */
4350 /* handle exception */
4352 M_MOV(REG_ITMP2, REG_ITMP1_XPTR);
4353 M_ALD(REG_ITMP2_XPC, REG_SP, 0);
4354 M_ASUB_IMM(2, REG_ITMP2_XPC);
4356 M_MOV_IMM(asm_handle_nat_exception, REG_ITMP3);
4360 /* generate patcher stubs */
4362 emit_patcher_stubs(jd);
4366 return code->entrypoint;
4371 * These are local overrides for various environment variables in Emacs.
4372 * Please do not remove this and leave it at the end of the file, where
4373 * Emacs will automagically detect them.
4374 * ---------------------------------------------------------------------
4377 * indent-tabs-mode: t
4381 * vim:noexpandtab:sw=4:ts=4: