1 /* jit/x86_64/codegen.c - machine code generator for x86_64
3 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 Institut f. Computersprachen, TU Wien
5 R. Grafl, A. Krall, C. Kruegel, C. Oates, R. Obermaisser, M. Probst,
6 S. Ring, E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich,
9 This file is part of CACAO.
11 This program is free software; you can redistribute it and/or
12 modify it under the terms of the GNU General Public License as
13 published by the Free Software Foundation; either version 2, or (at
14 your option) any later version.
16 This program is distributed in the hope that it will be useful, but
17 WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 General Public License for more details.
21 You should have received a copy of the GNU General Public License
22 along with this program; if not, write to the Free Software
23 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
26 Contact: cacao@complang.tuwien.ac.at
28 Authors: Andreas Krall
31 $Id: codegen.c 1680 2004-12-04 12:02:08Z jowenn $
41 #include "native/native.h"
42 /* #include "native/jni.h" */
43 #include "vm/global.h"
44 #include "vm/builtin.h"
45 #include "vm/loader.h"
46 #include "vm/tables.h"
47 #include "vm/jit/asmpart.h"
48 #include "vm/jit/jit.h"
49 #include "vm/jit/reg.h"
50 #include "vm/jit/parse.h"
51 #include "vm/jit/x86_64/arch.h"
52 #include "vm/jit/x86_64/codegen.h"
53 #include "vm/jit/x86_64/emitfuncs.h"
54 #include "vm/jit/x86_64/types.h"
55 #include "vm/jit/x86_64/asmoffsets.h"
57 #include "vm/jit/stacktrace.inc"
60 /* register descripton - array ************************************************/
62 /* #define REG_RES 0 reserved register for OS or code generator */
63 /* #define REG_RET 1 return value register */
64 /* #define REG_EXC 2 exception value register (only old jit) */
65 /* #define REG_SAV 3 (callee) saved register */
66 /* #define REG_TMP 4 scratch temporary register (caller saved) */
67 /* #define REG_ARG 5 argument register (caller saved) */
69 /* #define REG_END -1 last entry in tables */
71 static int nregdescint[] = {
72 REG_RET, REG_ARG, REG_ARG, REG_TMP, REG_RES, REG_SAV, REG_ARG, REG_ARG,
73 REG_ARG, REG_ARG, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV,
78 static int nregdescfloat[] = {
79 /* REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_TMP, REG_TMP, REG_TMP, REG_TMP, */
80 /* REG_RES, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV, REG_SAV, */
81 REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
82 REG_RES, REG_RES, REG_RES, REG_TMP, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
87 /* Include independent code generation stuff -- include after register */
88 /* descriptions to avoid extern definitions. */
90 #include "vm/jit/codegen.inc"
91 #include "vm/jit/reg.inc"
93 #include "vm/jit/lsra.inc"
97 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
98 void thread_restartcriticalsection(ucontext_t *uc)
102 critical = thread_checkcritical((void *) uc->uc_mcontext.gregs[REG_RIP]);
105 uc->uc_mcontext.gregs[REG_RIP] = (u8) critical;
110 /* NullPointerException signal handler for hardware null pointer check */
112 void catch_NullPointerException(int sig, siginfo_t *siginfo, void *_p)
116 /* long faultaddr; */
118 struct ucontext *_uc = (struct ucontext *) _p;
119 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
120 struct sigaction act;
121 java_objectheader *xptr;
123 /* Reset signal handler - necessary for SysV, does no harm for BSD */
126 /* instr = *((int*)(sigctx->rip)); */
127 /* faultaddr = sigctx->sc_regs[(instr >> 16) & 0x1f]; */
129 /* if (faultaddr == 0) { */
130 act.sa_sigaction = (functionptr) catch_NullPointerException; /* reinstall handler */
131 act.sa_flags = SA_SIGINFO;
132 sigaction(sig, &act, NULL);
135 sigaddset(&nsig, sig);
136 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
138 xptr = new_nullpointerexception();
140 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
141 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
142 sigctx->rip = (u8) asm_handle_exception;
147 /* faultaddr += (long) ((instr << 16) >> 16); */
148 /* fprintf(stderr, "faulting address: 0x%08x\n", faultaddr); */
149 /* panic("Stack overflow"); */
154 /* ArithmeticException signal handler for hardware divide by zero check */
156 void catch_ArithmeticException(int sig, siginfo_t *siginfo, void *_p)
160 struct ucontext *_uc = (struct ucontext *) _p;
161 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
162 struct sigaction act;
163 java_objectheader *xptr;
165 /* Reset signal handler - necessary for SysV, does no harm for BSD */
167 act.sa_sigaction = (functionptr) catch_ArithmeticException; /* reinstall handler */
168 act.sa_flags = SA_SIGINFO;
169 sigaction(sig, &act, NULL);
172 sigaddset(&nsig, sig);
173 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
175 xptr = new_arithmeticexception();
177 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
178 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
179 sigctx->rip = (u8) asm_handle_exception;
185 void init_exceptions(void)
187 struct sigaction act;
189 /* install signal handlers we need to convert to exceptions */
190 sigemptyset(&act.sa_mask);
194 act.sa_sigaction = (functionptr) catch_NullPointerException;
195 act.sa_flags = SA_SIGINFO;
196 sigaction(SIGSEGV, &act, NULL);
200 act.sa_sigaction = (functionptr) catch_NullPointerException;
201 act.sa_flags = SA_SIGINFO;
202 sigaction(SIGBUS, &act, NULL);
206 act.sa_sigaction = (functionptr) catch_ArithmeticException;
207 act.sa_flags = SA_SIGINFO;
208 sigaction(SIGFPE, &act, NULL);
212 /* function gen_mcode **********************************************************
214 generates machine code
216 *******************************************************************************/
218 void codegen(methodinfo *m, codegendata *cd, registerdata *rd)
220 s4 len, s1, s2, s3, d;
235 /* space to save used callee saved registers */
237 savedregs_num += (rd->savintregcnt - rd->maxsavintreguse);
238 savedregs_num += (rd->savfltregcnt - rd->maxsavfltreguse);
240 parentargs_base = rd->maxmemuse + savedregs_num;
242 #if defined(USE_THREADS) /* space to save argument of monitor_enter */
244 if (checksync && (m->flags & ACC_SYNCHRONIZED))
249 /* keep stack 16-byte aligned for calls into native code e.g. libc or jni */
250 /* (alignment problems with movaps) */
252 if (!(parentargs_base & 0x1)) {
256 /* create method header */
258 (void) dseg_addaddress(cd, m); /* MethodPointer */
259 (void) dseg_adds4(cd, parentargs_base * 8); /* FrameSize */
261 #if defined(USE_THREADS)
263 /* IsSync contains the offset relative to the stack pointer for the
264 argument of monitor_exit used in the exception handler. Since the
265 offset could be zero and give a wrong meaning of the flag it is
269 if (checksync && (m->flags & ACC_SYNCHRONIZED))
270 (void) dseg_adds4(cd, (rd->maxmemuse + 1) * 8); /* IsSync */
275 (void) dseg_adds4(cd, 0); /* IsSync */
277 (void) dseg_adds4(cd, m->isleafmethod); /* IsLeaf */
278 (void) dseg_adds4(cd, rd->savintregcnt - rd->maxsavintreguse);/* IntSave */
279 (void) dseg_adds4(cd, rd->savfltregcnt - rd->maxsavfltreguse);/* FltSave */
280 (void) dseg_adds4(cd, cd->exceptiontablelength); /* ExTableSize */
282 /* create exception table */
284 for (ex = cd->exceptiontable; ex != NULL; ex = ex->down) {
285 dseg_addtarget(cd, ex->start);
286 dseg_addtarget(cd, ex->end);
287 dseg_addtarget(cd, ex->handler);
288 (void) dseg_addaddress(cd, ex->catchtype);
291 /* initialize mcode variables */
293 cd->mcodeptr = (u1 *) cd->mcodebase;
294 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
295 MCODECHECK(128 + m->paramcount);
297 /* create stack frame (if necessary) */
299 if (parentargs_base) {
300 x86_64_alu_imm_reg(cd, X86_64_SUB, parentargs_base * 8, REG_SP);
303 /* save return address and used callee saved registers */
306 for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
307 p--; x86_64_mov_reg_membase(cd, rd->savintregs[i], REG_SP, p * 8);
309 for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
310 p--; x86_64_movq_reg_membase(cd, rd->savfltregs[i], REG_SP, p * 8);
313 /* save monitorenter argument */
315 #if defined(USE_THREADS)
316 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
317 if (m->flags & ACC_STATIC) {
318 x86_64_mov_imm_reg(cd, (s8) m->class, REG_ITMP1);
319 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, rd->maxmemuse * 8);
322 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, rd->maxmemuse * 8);
327 /* copy argument registers to stack and call trace function with pointer
328 to arguments on stack.
331 x86_64_alu_imm_reg(cd, X86_64_SUB, (6 + 8 + 1 + 1) * 8, REG_SP);
333 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, 1 * 8);
334 x86_64_mov_reg_membase(cd, rd->argintregs[1], REG_SP, 2 * 8);
335 x86_64_mov_reg_membase(cd, rd->argintregs[2], REG_SP, 3 * 8);
336 x86_64_mov_reg_membase(cd, rd->argintregs[3], REG_SP, 4 * 8);
337 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 5 * 8);
338 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 6 * 8);
340 x86_64_movq_reg_membase(cd, rd->argfltregs[0], REG_SP, 7 * 8);
341 x86_64_movq_reg_membase(cd, rd->argfltregs[1], REG_SP, 8 * 8);
342 x86_64_movq_reg_membase(cd, rd->argfltregs[2], REG_SP, 9 * 8);
343 x86_64_movq_reg_membase(cd, rd->argfltregs[3], REG_SP, 10 * 8);
344 /* x86_64_movq_reg_membase(cd, rd->argfltregs[4], REG_SP, 11 * 8); */
345 /* x86_64_movq_reg_membase(cd, rd->argfltregs[5], REG_SP, 12 * 8); */
346 /* x86_64_movq_reg_membase(cd, rd->argfltregs[6], REG_SP, 13 * 8); */
347 /* x86_64_movq_reg_membase(cd, rd->argfltregs[7], REG_SP, 14 * 8); */
349 for (p = 0, l = 0; p < m->paramcount; p++) {
350 t = m->paramtypes[p];
352 if (IS_FLT_DBL_TYPE(t)) {
353 for (s1 = (m->paramcount > INT_ARG_CNT) ? INT_ARG_CNT - 2 : m->paramcount - 2; s1 >= p; s1--) {
354 x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
357 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[p]);
362 x86_64_mov_imm_reg(cd, (s8) m, REG_ITMP2);
363 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_SP, 0 * 8);
364 x86_64_mov_imm_reg(cd, (s8) builtin_trace_args, REG_ITMP1);
365 x86_64_call_reg(cd, REG_ITMP1);
367 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, rd->argintregs[0]);
368 x86_64_mov_membase_reg(cd, REG_SP, 2 * 8, rd->argintregs[1]);
369 x86_64_mov_membase_reg(cd, REG_SP, 3 * 8, rd->argintregs[2]);
370 x86_64_mov_membase_reg(cd, REG_SP, 4 * 8, rd->argintregs[3]);
371 x86_64_mov_membase_reg(cd, REG_SP, 5 * 8, rd->argintregs[4]);
372 x86_64_mov_membase_reg(cd, REG_SP, 6 * 8, rd->argintregs[5]);
374 x86_64_movq_membase_reg(cd, REG_SP, 7 * 8, rd->argfltregs[0]);
375 x86_64_movq_membase_reg(cd, REG_SP, 8 * 8, rd->argfltregs[1]);
376 x86_64_movq_membase_reg(cd, REG_SP, 9 * 8, rd->argfltregs[2]);
377 x86_64_movq_membase_reg(cd, REG_SP, 10 * 8, rd->argfltregs[3]);
378 /* x86_64_movq_membase_reg(cd, REG_SP, 11 * 8, rd->argfltregs[4]); */
379 /* x86_64_movq_membase_reg(cd, REG_SP, 12 * 8, rd->argfltregs[5]); */
380 /* x86_64_movq_membase_reg(cd, REG_SP, 13 * 8, rd->argfltregs[6]); */
381 /* x86_64_movq_membase_reg(cd, REG_SP, 14 * 8, rd->argfltregs[7]); */
383 x86_64_alu_imm_reg(cd, X86_64_ADD, (6 + 8 + 1 + 1) * 8, REG_SP);
386 /* take arguments out of register or stack frame */
388 for (p = 0, l = 0, s1 = 0, s2 = 0; p < m->paramcount; p++) {
389 t = m->paramtypes[p];
390 var = &(rd->locals[l][t]);
392 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
395 if (IS_INT_LNG_TYPE(t)) {
402 if (IS_INT_LNG_TYPE(t)) { /* integer args */
403 if (s1 < INT_ARG_CNT) { /* register arguments */
404 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
405 M_INTMOVE(rd->argintregs[s1], var->regoff);
407 } else { /* reg arg -> spilled */
408 x86_64_mov_reg_membase(cd, rd->argintregs[s1], REG_SP, var->regoff * 8);
411 } else { /* stack arguments */
412 pa = s1 - INT_ARG_CNT;
413 if (s2 >= FLT_ARG_CNT) {
414 pa += s2 - FLT_ARG_CNT;
416 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
417 x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff); /* + 8 for return address */
418 } else { /* stack arg -> spilled */
419 x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_ITMP1); /* + 8 for return address */
420 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, var->regoff * 8);
425 } else { /* floating args */
426 if (s2 < FLT_ARG_CNT) { /* register arguments */
427 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
428 M_FLTMOVE(rd->argfltregs[s2], var->regoff);
430 } else { /* reg arg -> spilled */
431 x86_64_movq_reg_membase(cd, rd->argfltregs[s2], REG_SP, var->regoff * 8);
434 } else { /* stack arguments */
435 pa = s2 - FLT_ARG_CNT;
436 if (s1 >= INT_ARG_CNT) {
437 pa += s1 - INT_ARG_CNT;
439 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
440 x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff);
443 x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_FTMP1);
444 x86_64_movq_reg_membase(cd, REG_FTMP1, REG_SP, var->regoff * 8);
451 /* call monitorenter function */
453 #if defined(USE_THREADS)
454 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
455 s8 func_enter = (m->flags & ACC_STATIC) ?
456 (s8) builtin_staticmonitorenter : (s8) builtin_monitorenter;
457 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
458 x86_64_mov_imm_reg(cd, func_enter, REG_ITMP1);
459 x86_64_call_reg(cd, REG_ITMP1);
464 /* end of header generation */
466 /* walk through all basic blocks */
467 for (bptr = m->basicblocks; bptr != NULL; bptr = bptr->next) {
469 bptr->mpc = (u4) ((u1 *) cd->mcodeptr - cd->mcodebase);
471 if (bptr->flags >= BBREACHED) {
473 /* branch resolving */
476 for (bref = bptr->branchrefs; bref != NULL; bref = bref->next) {
477 gen_resolvebranch((u1 *) cd->mcodebase + bref->branchpos,
482 /* copy interface registers to their destination */
486 MCODECHECK(64 + len);
487 while (src != NULL) {
489 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
490 if (bptr->type == BBTYPE_SBR) {
491 d = reg_of_var(rd, src, REG_ITMP1);
492 x86_64_pop_reg(cd, d);
493 store_reg_to_var_int(src, d);
495 } else if (bptr->type == BBTYPE_EXH) {
496 d = reg_of_var(rd, src, REG_ITMP1);
497 M_INTMOVE(REG_ITMP1, d);
498 store_reg_to_var_int(src, d);
502 d = reg_of_var(rd, src, REG_ITMP1);
503 if ((src->varkind != STACKVAR)) {
505 if (IS_FLT_DBL_TYPE(s2)) {
506 s1 = rd->interfaces[len][s2].regoff;
507 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
511 x86_64_movq_membase_reg(cd, REG_SP, s1 * 8, d);
513 store_reg_to_var_flt(src, d);
516 s1 = rd->interfaces[len][s2].regoff;
517 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
521 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, d);
523 store_reg_to_var_int(src, d);
530 /* walk through all instructions */
534 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
536 MCODECHECK(64); /* an instruction usually needs < 64 words */
539 case ICMD_NOP: /* ... ==> ... */
542 case ICMD_NULLCHECKPOP: /* ..., objectref ==> ... */
543 if (src->flags & INMEMORY) {
544 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
547 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
549 x86_64_jcc(cd, X86_64_CC_E, 0);
550 codegen_addxnullrefs(cd, cd->mcodeptr);
553 /* constant operations ************************************************/
555 case ICMD_ICONST: /* ... ==> ..., constant */
556 /* op1 = 0, val.i = constant */
558 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
559 if (iptr->val.i == 0) {
560 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
562 x86_64_movl_imm_reg(cd, iptr->val.i, d);
564 store_reg_to_var_int(iptr->dst, d);
567 case ICMD_ACONST: /* ... ==> ..., constant */
568 /* op1 = 0, val.a = constant */
570 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
571 if (iptr->val.a == 0) {
572 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
574 x86_64_mov_imm_reg(cd, (s8) iptr->val.a, d);
576 store_reg_to_var_int(iptr->dst, d);
579 case ICMD_LCONST: /* ... ==> ..., constant */
580 /* op1 = 0, val.l = constant */
582 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
583 if (iptr->val.l == 0) {
584 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
586 x86_64_mov_imm_reg(cd, iptr->val.l, d);
588 store_reg_to_var_int(iptr->dst, d);
591 case ICMD_FCONST: /* ... ==> ..., constant */
592 /* op1 = 0, val.f = constant */
594 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
595 a = dseg_addfloat(cd, iptr->val.f);
596 x86_64_movdl_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + ((d > 7) ? 9 : 8)) - (s8) cd->mcodebase) + a, d);
597 store_reg_to_var_flt(iptr->dst, d);
600 case ICMD_DCONST: /* ... ==> ..., constant */
601 /* op1 = 0, val.d = constant */
603 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
604 a = dseg_adddouble(cd, iptr->val.d);
605 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, d);
606 store_reg_to_var_flt(iptr->dst, d);
610 /* load/store operations **********************************************/
612 case ICMD_ILOAD: /* ... ==> ..., content of local variable */
613 /* op1 = local variable */
615 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
616 if ((iptr->dst->varkind == LOCALVAR) &&
617 (iptr->dst->varnum == iptr->op1)) {
620 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
621 if (var->flags & INMEMORY) {
622 x86_64_movl_membase_reg(cd, REG_SP, var->regoff * 8, d);
623 store_reg_to_var_int(iptr->dst, d);
626 if (iptr->dst->flags & INMEMORY) {
627 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
630 M_INTMOVE(var->regoff, d);
635 case ICMD_LLOAD: /* ... ==> ..., content of local variable */
636 case ICMD_ALOAD: /* op1 = local variable */
638 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
639 if ((iptr->dst->varkind == LOCALVAR) &&
640 (iptr->dst->varnum == iptr->op1)) {
643 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
644 if (var->flags & INMEMORY) {
645 x86_64_mov_membase_reg(cd, REG_SP, var->regoff * 8, d);
646 store_reg_to_var_int(iptr->dst, d);
649 if (iptr->dst->flags & INMEMORY) {
650 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
653 M_INTMOVE(var->regoff, d);
658 case ICMD_FLOAD: /* ... ==> ..., content of local variable */
659 case ICMD_DLOAD: /* op1 = local variable */
661 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
662 if ((iptr->dst->varkind == LOCALVAR) &&
663 (iptr->dst->varnum == iptr->op1)) {
666 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
667 if (var->flags & INMEMORY) {
668 x86_64_movq_membase_reg(cd, REG_SP, var->regoff * 8, d);
669 store_reg_to_var_flt(iptr->dst, d);
672 if (iptr->dst->flags & INMEMORY) {
673 x86_64_movq_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
676 M_FLTMOVE(var->regoff, d);
681 case ICMD_ISTORE: /* ..., value ==> ... */
682 case ICMD_LSTORE: /* op1 = local variable */
685 if ((src->varkind == LOCALVAR) &&
686 (src->varnum == iptr->op1)) {
689 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
690 if (var->flags & INMEMORY) {
691 var_to_reg_int(s1, src, REG_ITMP1);
692 x86_64_mov_reg_membase(cd, s1, REG_SP, var->regoff * 8);
695 var_to_reg_int(s1, src, var->regoff);
696 M_INTMOVE(s1, var->regoff);
700 case ICMD_FSTORE: /* ..., value ==> ... */
701 case ICMD_DSTORE: /* op1 = local variable */
703 if ((src->varkind == LOCALVAR) &&
704 (src->varnum == iptr->op1)) {
707 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
708 if (var->flags & INMEMORY) {
709 var_to_reg_flt(s1, src, REG_FTMP1);
710 x86_64_movq_reg_membase(cd, s1, REG_SP, var->regoff * 8);
713 var_to_reg_flt(s1, src, var->regoff);
714 M_FLTMOVE(s1, var->regoff);
719 /* pop/dup/swap operations ********************************************/
721 /* attention: double and longs are only one entry in CACAO ICMDs */
723 case ICMD_POP: /* ..., value ==> ... */
724 case ICMD_POP2: /* ..., value, value ==> ... */
727 case ICMD_DUP: /* ..., a ==> ..., a, a */
728 M_COPY(src, iptr->dst);
731 case ICMD_DUP_X1: /* ..., a, b ==> ..., b, a, b */
733 M_COPY(src, iptr->dst);
734 M_COPY(src->prev, iptr->dst->prev);
735 M_COPY(iptr->dst, iptr->dst->prev->prev);
738 case ICMD_DUP_X2: /* ..., a, b, c ==> ..., c, a, b, c */
740 M_COPY(src, iptr->dst);
741 M_COPY(src->prev, iptr->dst->prev);
742 M_COPY(src->prev->prev, iptr->dst->prev->prev);
743 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
746 case ICMD_DUP2: /* ..., a, b ==> ..., a, b, a, b */
748 M_COPY(src, iptr->dst);
749 M_COPY(src->prev, iptr->dst->prev);
752 case ICMD_DUP2_X1: /* ..., a, b, c ==> ..., b, c, a, b, c */
754 M_COPY(src, iptr->dst);
755 M_COPY(src->prev, iptr->dst->prev);
756 M_COPY(src->prev->prev, iptr->dst->prev->prev);
757 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
758 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev);
761 case ICMD_DUP2_X2: /* ..., a, b, c, d ==> ..., c, d, a, b, c, d */
763 M_COPY(src, iptr->dst);
764 M_COPY(src->prev, iptr->dst->prev);
765 M_COPY(src->prev->prev, iptr->dst->prev->prev);
766 M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
767 M_COPY(iptr->dst, iptr->dst->prev->prev->prev->prev);
768 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev->prev);
771 case ICMD_SWAP: /* ..., a, b ==> ..., b, a */
773 M_COPY(src, iptr->dst->prev);
774 M_COPY(src->prev, iptr->dst);
778 /* integer operations *************************************************/
780 case ICMD_INEG: /* ..., value ==> ..., - value */
782 d = reg_of_var(rd, iptr->dst, REG_NULL);
783 if (iptr->dst->flags & INMEMORY) {
784 if (src->flags & INMEMORY) {
785 if (src->regoff == iptr->dst->regoff) {
786 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
789 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
790 x86_64_negl_reg(cd, REG_ITMP1);
791 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
795 x86_64_movl_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
796 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
800 if (src->flags & INMEMORY) {
801 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
802 x86_64_negl_reg(cd, d);
805 M_INTMOVE(src->regoff, iptr->dst->regoff);
806 x86_64_negl_reg(cd, iptr->dst->regoff);
811 case ICMD_LNEG: /* ..., value ==> ..., - value */
813 d = reg_of_var(rd, iptr->dst, REG_NULL);
814 if (iptr->dst->flags & INMEMORY) {
815 if (src->flags & INMEMORY) {
816 if (src->regoff == iptr->dst->regoff) {
817 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
820 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
821 x86_64_neg_reg(cd, REG_ITMP1);
822 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
826 x86_64_mov_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
827 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
831 if (src->flags & INMEMORY) {
832 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
833 x86_64_neg_reg(cd, iptr->dst->regoff);
836 M_INTMOVE(src->regoff, iptr->dst->regoff);
837 x86_64_neg_reg(cd, iptr->dst->regoff);
842 case ICMD_I2L: /* ..., value ==> ..., value */
844 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
845 if (src->flags & INMEMORY) {
846 x86_64_movslq_membase_reg(cd, REG_SP, src->regoff * 8, d);
849 x86_64_movslq_reg_reg(cd, src->regoff, d);
851 store_reg_to_var_int(iptr->dst, d);
854 case ICMD_L2I: /* ..., value ==> ..., value */
856 var_to_reg_int(s1, src, REG_ITMP1);
857 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
859 store_reg_to_var_int(iptr->dst, d);
862 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
864 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
865 if (src->flags & INMEMORY) {
866 x86_64_movsbq_membase_reg(cd, REG_SP, src->regoff * 8, d);
869 x86_64_movsbq_reg_reg(cd, src->regoff, d);
871 store_reg_to_var_int(iptr->dst, d);
874 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
876 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
877 if (src->flags & INMEMORY) {
878 x86_64_movzwq_membase_reg(cd, REG_SP, src->regoff * 8, d);
881 x86_64_movzwq_reg_reg(cd, src->regoff, d);
883 store_reg_to_var_int(iptr->dst, d);
886 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
888 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
889 if (src->flags & INMEMORY) {
890 x86_64_movswq_membase_reg(cd, REG_SP, src->regoff * 8, d);
893 x86_64_movswq_reg_reg(cd, src->regoff, d);
895 store_reg_to_var_int(iptr->dst, d);
899 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
901 d = reg_of_var(rd, iptr->dst, REG_NULL);
902 x86_64_emit_ialu(cd, X86_64_ADD, src, iptr);
905 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
906 /* val.i = constant */
908 d = reg_of_var(rd, iptr->dst, REG_NULL);
909 x86_64_emit_ialuconst(cd, X86_64_ADD, src, iptr);
912 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
914 d = reg_of_var(rd, iptr->dst, REG_NULL);
915 x86_64_emit_lalu(cd, X86_64_ADD, src, iptr);
918 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
919 /* val.l = constant */
921 d = reg_of_var(rd, iptr->dst, REG_NULL);
922 x86_64_emit_laluconst(cd, X86_64_ADD, src, iptr);
925 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
927 d = reg_of_var(rd, iptr->dst, REG_NULL);
928 if (iptr->dst->flags & INMEMORY) {
929 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
930 if (src->prev->regoff == iptr->dst->regoff) {
931 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
932 x86_64_alul_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
935 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
936 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
937 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
940 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
941 M_INTMOVE(src->prev->regoff, REG_ITMP1);
942 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
943 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
945 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
946 if (src->prev->regoff == iptr->dst->regoff) {
947 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
950 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
951 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
952 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
956 x86_64_movl_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
957 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
961 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
962 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
963 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
965 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
966 M_INTMOVE(src->prev->regoff, d);
967 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
969 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
970 /* workaround for reg alloc */
971 if (src->regoff == iptr->dst->regoff) {
972 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
973 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
974 M_INTMOVE(REG_ITMP1, d);
977 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
978 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
982 /* workaround for reg alloc */
983 if (src->regoff == iptr->dst->regoff) {
984 M_INTMOVE(src->prev->regoff, REG_ITMP1);
985 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
986 M_INTMOVE(REG_ITMP1, d);
989 M_INTMOVE(src->prev->regoff, d);
990 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
996 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
997 /* val.i = constant */
999 d = reg_of_var(rd, iptr->dst, REG_NULL);
1000 x86_64_emit_ialuconst(cd, X86_64_SUB, src, iptr);
1003 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1005 d = reg_of_var(rd, iptr->dst, REG_NULL);
1006 if (iptr->dst->flags & INMEMORY) {
1007 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1008 if (src->prev->regoff == iptr->dst->regoff) {
1009 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1010 x86_64_alu_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1013 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1014 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1015 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1018 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1019 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1020 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1021 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1023 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1024 if (src->prev->regoff == iptr->dst->regoff) {
1025 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1028 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1029 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1030 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1034 x86_64_mov_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1035 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1039 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1040 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1041 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1043 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1044 M_INTMOVE(src->prev->regoff, d);
1045 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1047 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1048 /* workaround for reg alloc */
1049 if (src->regoff == iptr->dst->regoff) {
1050 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1051 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1052 M_INTMOVE(REG_ITMP1, d);
1055 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1056 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1060 /* workaround for reg alloc */
1061 if (src->regoff == iptr->dst->regoff) {
1062 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1063 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1064 M_INTMOVE(REG_ITMP1, d);
1067 M_INTMOVE(src->prev->regoff, d);
1068 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1074 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
1075 /* val.l = constant */
1077 d = reg_of_var(rd, iptr->dst, REG_NULL);
1078 x86_64_emit_laluconst(cd, X86_64_SUB, src, iptr);
1081 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1083 d = reg_of_var(rd, iptr->dst, REG_NULL);
1084 if (iptr->dst->flags & INMEMORY) {
1085 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1086 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1087 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1088 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1090 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1091 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1092 x86_64_imull_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1093 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1095 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1096 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1097 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1098 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1101 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1102 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1103 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1107 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1108 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1109 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1111 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1112 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1113 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1115 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1116 M_INTMOVE(src->regoff, iptr->dst->regoff);
1117 x86_64_imull_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1120 if (src->regoff == iptr->dst->regoff) {
1121 x86_64_imull_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1124 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1125 x86_64_imull_reg_reg(cd, src->regoff, iptr->dst->regoff);
1131 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
1132 /* val.i = constant */
1134 d = reg_of_var(rd, iptr->dst, REG_NULL);
1135 if (iptr->dst->flags & INMEMORY) {
1136 if (src->flags & INMEMORY) {
1137 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1138 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1141 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, REG_ITMP1);
1142 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1146 if (src->flags & INMEMORY) {
1147 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1150 if (iptr->val.i == 2) {
1151 M_INTMOVE(src->regoff, iptr->dst->regoff);
1152 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1155 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, iptr->dst->regoff); /* 3 cycles */
1161 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1163 d = reg_of_var(rd, iptr->dst, REG_NULL);
1164 if (iptr->dst->flags & INMEMORY) {
1165 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1166 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1167 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1168 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1170 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1171 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1172 x86_64_imul_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1173 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1175 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1176 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1177 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1178 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1181 x86_64_mov_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1182 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1183 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1187 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1188 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1189 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1191 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1192 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1193 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1195 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1196 M_INTMOVE(src->regoff, iptr->dst->regoff);
1197 x86_64_imul_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1200 if (src->regoff == iptr->dst->regoff) {
1201 x86_64_imul_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1204 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1205 x86_64_imul_reg_reg(cd, src->regoff, iptr->dst->regoff);
1211 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
1212 /* val.l = constant */
1214 d = reg_of_var(rd, iptr->dst, REG_NULL);
1215 if (iptr->dst->flags & INMEMORY) {
1216 if (src->flags & INMEMORY) {
1217 if (x86_64_is_imm32(iptr->val.l)) {
1218 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1221 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1222 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1224 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1227 if (x86_64_is_imm32(iptr->val.l)) {
1228 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, REG_ITMP1);
1231 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1232 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1234 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1238 if (src->flags & INMEMORY) {
1239 if (x86_64_is_imm32(iptr->val.l)) {
1240 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1243 x86_64_mov_imm_reg(cd, iptr->val.l, iptr->dst->regoff);
1244 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1248 /* should match in many cases */
1249 if (iptr->val.l == 2) {
1250 M_INTMOVE(src->regoff, iptr->dst->regoff);
1251 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1254 if (x86_64_is_imm32(iptr->val.l)) {
1255 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, iptr->dst->regoff); /* 4 cycles */
1258 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1259 M_INTMOVE(src->regoff, iptr->dst->regoff);
1260 x86_64_imul_reg_reg(cd, REG_ITMP1, iptr->dst->regoff);
1267 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1269 d = reg_of_var(rd, iptr->dst, REG_NULL);
1270 if (src->prev->flags & INMEMORY) {
1271 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1274 M_INTMOVE(src->prev->regoff, RAX);
1277 if (src->flags & INMEMORY) {
1278 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1281 M_INTMOVE(src->regoff, REG_ITMP3);
1285 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1286 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1287 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1288 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1290 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1292 x86_64_idivl_reg(cd, REG_ITMP3);
1294 if (iptr->dst->flags & INMEMORY) {
1295 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1296 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1299 M_INTMOVE(RAX, iptr->dst->regoff);
1301 if (iptr->dst->regoff != RDX) {
1302 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1307 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1309 d = reg_of_var(rd, iptr->dst, REG_NULL);
1310 if (src->prev->flags & INMEMORY) {
1311 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1314 M_INTMOVE(src->prev->regoff, RAX);
1317 if (src->flags & INMEMORY) {
1318 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1321 M_INTMOVE(src->regoff, REG_ITMP3);
1325 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1326 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1327 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1328 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1329 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1331 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1333 x86_64_idivl_reg(cd, REG_ITMP3);
1335 if (iptr->dst->flags & INMEMORY) {
1336 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1337 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1340 M_INTMOVE(RDX, iptr->dst->regoff);
1342 if (iptr->dst->regoff != RDX) {
1343 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1348 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
1349 /* val.i = constant */
1351 var_to_reg_int(s1, src, REG_ITMP1);
1352 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1353 M_INTMOVE(s1, REG_ITMP1);
1354 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1355 x86_64_leal_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1356 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1357 x86_64_shiftl_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1358 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1359 store_reg_to_var_int(iptr->dst, d);
1362 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
1363 /* val.i = constant */
1365 var_to_reg_int(s1, src, REG_ITMP1);
1366 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1367 M_INTMOVE(s1, REG_ITMP1);
1368 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1369 x86_64_leal_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1370 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1371 x86_64_alul_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1372 x86_64_alul_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1373 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1374 store_reg_to_var_int(iptr->dst, d);
1378 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1380 d = reg_of_var(rd, iptr->dst, REG_NULL);
1381 if (src->prev->flags & INMEMORY) {
1382 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1385 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1388 if (src->flags & INMEMORY) {
1389 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1392 M_INTMOVE(src->regoff, REG_ITMP3);
1396 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1397 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1398 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1399 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1400 x86_64_jcc(cd, X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1402 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1404 x86_64_idiv_reg(cd, REG_ITMP3);
1406 if (iptr->dst->flags & INMEMORY) {
1407 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1408 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1411 M_INTMOVE(RAX, iptr->dst->regoff);
1413 if (iptr->dst->regoff != RDX) {
1414 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1419 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1421 d = reg_of_var(rd, iptr->dst, REG_NULL);
1422 if (src->prev->flags & INMEMORY) {
1423 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1426 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1429 if (src->flags & INMEMORY) {
1430 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1433 M_INTMOVE(src->regoff, REG_ITMP3);
1437 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1438 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1439 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1440 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1441 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1442 x86_64_jcc(cd, X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1444 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1446 x86_64_idiv_reg(cd, REG_ITMP3);
1448 if (iptr->dst->flags & INMEMORY) {
1449 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1450 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1453 M_INTMOVE(RDX, iptr->dst->regoff);
1455 if (iptr->dst->regoff != RDX) {
1456 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1461 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1462 /* val.i = constant */
1464 var_to_reg_int(s1, src, REG_ITMP1);
1465 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1466 M_INTMOVE(s1, REG_ITMP1);
1467 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1468 x86_64_lea_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1469 x86_64_cmovcc_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1470 x86_64_shift_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1471 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1472 store_reg_to_var_int(iptr->dst, d);
1475 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1476 /* val.l = constant */
1478 var_to_reg_int(s1, src, REG_ITMP1);
1479 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1480 M_INTMOVE(s1, REG_ITMP1);
1481 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1482 x86_64_lea_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1483 x86_64_cmovcc_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1484 x86_64_alu_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1485 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1486 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1487 store_reg_to_var_int(iptr->dst, d);
1490 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1492 d = reg_of_var(rd, iptr->dst, REG_NULL);
1493 x86_64_emit_ishift(cd, X86_64_SHL, src, iptr);
1496 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1497 /* val.i = constant */
1499 d = reg_of_var(rd, iptr->dst, REG_NULL);
1500 x86_64_emit_ishiftconst(cd, X86_64_SHL, src, iptr);
1503 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1505 d = reg_of_var(rd, iptr->dst, REG_NULL);
1506 x86_64_emit_ishift(cd, X86_64_SAR, src, iptr);
1509 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1510 /* val.i = constant */
1512 d = reg_of_var(rd, iptr->dst, REG_NULL);
1513 x86_64_emit_ishiftconst(cd, X86_64_SAR, src, iptr);
1516 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1518 d = reg_of_var(rd, iptr->dst, REG_NULL);
1519 x86_64_emit_ishift(cd, X86_64_SHR, src, iptr);
1522 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1523 /* val.i = constant */
1525 d = reg_of_var(rd, iptr->dst, REG_NULL);
1526 x86_64_emit_ishiftconst(cd, X86_64_SHR, src, iptr);
1529 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1531 d = reg_of_var(rd, iptr->dst, REG_NULL);
1532 x86_64_emit_lshift(cd, X86_64_SHL, src, iptr);
1535 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1536 /* val.i = constant */
1538 d = reg_of_var(rd, iptr->dst, REG_NULL);
1539 x86_64_emit_lshiftconst(cd, X86_64_SHL, src, iptr);
1542 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1544 d = reg_of_var(rd, iptr->dst, REG_NULL);
1545 x86_64_emit_lshift(cd, X86_64_SAR, src, iptr);
1548 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1549 /* val.i = constant */
1551 d = reg_of_var(rd, iptr->dst, REG_NULL);
1552 x86_64_emit_lshiftconst(cd, X86_64_SAR, src, iptr);
1555 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1557 d = reg_of_var(rd, iptr->dst, REG_NULL);
1558 x86_64_emit_lshift(cd, X86_64_SHR, src, iptr);
1561 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1562 /* val.l = constant */
1564 d = reg_of_var(rd, iptr->dst, REG_NULL);
1565 x86_64_emit_lshiftconst(cd, X86_64_SHR, src, iptr);
1568 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1570 d = reg_of_var(rd, iptr->dst, REG_NULL);
1571 x86_64_emit_ialu(cd, X86_64_AND, src, iptr);
1574 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1575 /* val.i = constant */
1577 d = reg_of_var(rd, iptr->dst, REG_NULL);
1578 x86_64_emit_ialuconst(cd, X86_64_AND, src, iptr);
1581 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1583 d = reg_of_var(rd, iptr->dst, REG_NULL);
1584 x86_64_emit_lalu(cd, X86_64_AND, src, iptr);
1587 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1588 /* val.l = constant */
1590 d = reg_of_var(rd, iptr->dst, REG_NULL);
1591 x86_64_emit_laluconst(cd, X86_64_AND, src, iptr);
1594 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1596 d = reg_of_var(rd, iptr->dst, REG_NULL);
1597 x86_64_emit_ialu(cd, X86_64_OR, src, iptr);
1600 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1601 /* val.i = constant */
1603 d = reg_of_var(rd, iptr->dst, REG_NULL);
1604 x86_64_emit_ialuconst(cd, X86_64_OR, src, iptr);
1607 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1609 d = reg_of_var(rd, iptr->dst, REG_NULL);
1610 x86_64_emit_lalu(cd, X86_64_OR, src, iptr);
1613 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1614 /* val.l = constant */
1616 d = reg_of_var(rd, iptr->dst, REG_NULL);
1617 x86_64_emit_laluconst(cd, X86_64_OR, src, iptr);
1620 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1622 d = reg_of_var(rd, iptr->dst, REG_NULL);
1623 x86_64_emit_ialu(cd, X86_64_XOR, src, iptr);
1626 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1627 /* val.i = constant */
1629 d = reg_of_var(rd, iptr->dst, REG_NULL);
1630 x86_64_emit_ialuconst(cd, X86_64_XOR, src, iptr);
1633 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1635 d = reg_of_var(rd, iptr->dst, REG_NULL);
1636 x86_64_emit_lalu(cd, X86_64_XOR, src, iptr);
1639 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1640 /* val.l = constant */
1642 d = reg_of_var(rd, iptr->dst, REG_NULL);
1643 x86_64_emit_laluconst(cd, X86_64_XOR, src, iptr);
1647 case ICMD_IINC: /* ..., value ==> ..., value + constant */
1648 /* op1 = variable, val.i = constant */
1650 /* using inc and dec is definitely faster than add -- tested */
1653 var = &(rd->locals[iptr->op1][TYPE_INT]);
1655 if (var->flags & INMEMORY) {
1656 if (iptr->val.i == 1) {
1657 x86_64_incl_membase(cd, REG_SP, d * 8);
1659 } else if (iptr->val.i == -1) {
1660 x86_64_decl_membase(cd, REG_SP, d * 8);
1663 x86_64_alul_imm_membase(cd, X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1667 if (iptr->val.i == 1) {
1668 x86_64_incl_reg(cd, d);
1670 } else if (iptr->val.i == -1) {
1671 x86_64_decl_reg(cd, d);
1674 x86_64_alul_imm_reg(cd, X86_64_ADD, iptr->val.i, d);
1680 /* floating operations ************************************************/
1682 case ICMD_FNEG: /* ..., value ==> ..., - value */
1684 var_to_reg_flt(s1, src, REG_FTMP1);
1685 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1686 a = dseg_adds4(cd, 0x80000000);
1688 x86_64_movss_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1689 x86_64_xorps_reg_reg(cd, REG_FTMP2, d);
1690 store_reg_to_var_flt(iptr->dst, d);
1693 case ICMD_DNEG: /* ..., value ==> ..., - value */
1695 var_to_reg_flt(s1, src, REG_FTMP1);
1696 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1697 a = dseg_adds8(cd, 0x8000000000000000);
1699 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1700 x86_64_xorpd_reg_reg(cd, REG_FTMP2, d);
1701 store_reg_to_var_flt(iptr->dst, d);
1704 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1706 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1707 var_to_reg_flt(s2, src, REG_FTMP2);
1708 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1710 x86_64_addss_reg_reg(cd, s2, d);
1711 } else if (s2 == d) {
1712 x86_64_addss_reg_reg(cd, s1, d);
1715 x86_64_addss_reg_reg(cd, s2, d);
1717 store_reg_to_var_flt(iptr->dst, d);
1720 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1722 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1723 var_to_reg_flt(s2, src, REG_FTMP2);
1724 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1726 x86_64_addsd_reg_reg(cd, s2, d);
1727 } else if (s2 == d) {
1728 x86_64_addsd_reg_reg(cd, s1, d);
1731 x86_64_addsd_reg_reg(cd, s2, d);
1733 store_reg_to_var_flt(iptr->dst, d);
1736 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1738 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1739 var_to_reg_flt(s2, src, REG_FTMP2);
1740 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1742 M_FLTMOVE(s2, REG_FTMP2);
1746 x86_64_subss_reg_reg(cd, s2, d);
1747 store_reg_to_var_flt(iptr->dst, d);
1750 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1752 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1753 var_to_reg_flt(s2, src, REG_FTMP2);
1754 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1756 M_FLTMOVE(s2, REG_FTMP2);
1760 x86_64_subsd_reg_reg(cd, s2, d);
1761 store_reg_to_var_flt(iptr->dst, d);
1764 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1766 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1767 var_to_reg_flt(s2, src, REG_FTMP2);
1768 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1770 x86_64_mulss_reg_reg(cd, s2, d);
1771 } else if (s2 == d) {
1772 x86_64_mulss_reg_reg(cd, s1, d);
1775 x86_64_mulss_reg_reg(cd, s2, d);
1777 store_reg_to_var_flt(iptr->dst, d);
1780 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1782 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1783 var_to_reg_flt(s2, src, REG_FTMP2);
1784 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1786 x86_64_mulsd_reg_reg(cd, s2, d);
1787 } else if (s2 == d) {
1788 x86_64_mulsd_reg_reg(cd, s1, d);
1791 x86_64_mulsd_reg_reg(cd, s2, d);
1793 store_reg_to_var_flt(iptr->dst, d);
1796 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1798 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1799 var_to_reg_flt(s2, src, REG_FTMP2);
1800 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1802 M_FLTMOVE(s2, REG_FTMP2);
1806 x86_64_divss_reg_reg(cd, s2, d);
1807 store_reg_to_var_flt(iptr->dst, d);
1810 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1812 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1813 var_to_reg_flt(s2, src, REG_FTMP2);
1814 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1816 M_FLTMOVE(s2, REG_FTMP2);
1820 x86_64_divsd_reg_reg(cd, s2, d);
1821 store_reg_to_var_flt(iptr->dst, d);
1824 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1826 var_to_reg_int(s1, src, REG_ITMP1);
1827 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1828 x86_64_cvtsi2ss_reg_reg(cd, s1, d);
1829 store_reg_to_var_flt(iptr->dst, d);
1832 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1834 var_to_reg_int(s1, src, REG_ITMP1);
1835 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1836 x86_64_cvtsi2sd_reg_reg(cd, s1, d);
1837 store_reg_to_var_flt(iptr->dst, d);
1840 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1842 var_to_reg_int(s1, src, REG_ITMP1);
1843 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1844 x86_64_cvtsi2ssq_reg_reg(cd, s1, d);
1845 store_reg_to_var_flt(iptr->dst, d);
1848 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1850 var_to_reg_int(s1, src, REG_ITMP1);
1851 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1852 x86_64_cvtsi2sdq_reg_reg(cd, s1, d);
1853 store_reg_to_var_flt(iptr->dst, d);
1856 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1858 var_to_reg_flt(s1, src, REG_FTMP1);
1859 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1860 x86_64_cvttss2si_reg_reg(cd, s1, d);
1861 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1862 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1863 x86_64_jcc(cd, X86_64_CC_NE, a);
1864 M_FLTMOVE(s1, REG_FTMP1);
1865 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2i, REG_ITMP2);
1866 x86_64_call_reg(cd, REG_ITMP2);
1867 M_INTMOVE(REG_RESULT, d);
1868 store_reg_to_var_int(iptr->dst, d);
1871 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1873 var_to_reg_flt(s1, src, REG_FTMP1);
1874 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1875 x86_64_cvttsd2si_reg_reg(cd, s1, d);
1876 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1877 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1878 x86_64_jcc(cd, X86_64_CC_NE, a);
1879 M_FLTMOVE(s1, REG_FTMP1);
1880 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2i, REG_ITMP2);
1881 x86_64_call_reg(cd, REG_ITMP2);
1882 M_INTMOVE(REG_RESULT, d);
1883 store_reg_to_var_int(iptr->dst, d);
1886 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1888 var_to_reg_flt(s1, src, REG_FTMP1);
1889 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1890 x86_64_cvttss2siq_reg_reg(cd, s1, d);
1891 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1892 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1893 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1894 x86_64_jcc(cd, X86_64_CC_NE, a);
1895 M_FLTMOVE(s1, REG_FTMP1);
1896 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2l, REG_ITMP2);
1897 x86_64_call_reg(cd, REG_ITMP2);
1898 M_INTMOVE(REG_RESULT, d);
1899 store_reg_to_var_int(iptr->dst, d);
1902 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1904 var_to_reg_flt(s1, src, REG_FTMP1);
1905 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1906 x86_64_cvttsd2siq_reg_reg(cd, s1, d);
1907 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1908 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1909 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1910 x86_64_jcc(cd, X86_64_CC_NE, a);
1911 M_FLTMOVE(s1, REG_FTMP1);
1912 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2l, REG_ITMP2);
1913 x86_64_call_reg(cd, REG_ITMP2);
1914 M_INTMOVE(REG_RESULT, d);
1915 store_reg_to_var_int(iptr->dst, d);
1918 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1920 var_to_reg_flt(s1, src, REG_FTMP1);
1921 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1922 x86_64_cvtss2sd_reg_reg(cd, s1, d);
1923 store_reg_to_var_flt(iptr->dst, d);
1926 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1928 var_to_reg_flt(s1, src, REG_FTMP1);
1929 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1930 x86_64_cvtsd2ss_reg_reg(cd, s1, d);
1931 store_reg_to_var_flt(iptr->dst, d);
1934 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1935 /* == => 0, < => 1, > => -1 */
1937 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1938 var_to_reg_flt(s2, src, REG_FTMP2);
1939 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1940 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1941 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1942 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1943 x86_64_ucomiss_reg_reg(cd, s1, s2);
1944 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1945 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1946 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1947 store_reg_to_var_int(iptr->dst, d);
1950 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1951 /* == => 0, < => 1, > => -1 */
1953 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1954 var_to_reg_flt(s2, src, REG_FTMP2);
1955 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1956 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1957 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1958 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1959 x86_64_ucomiss_reg_reg(cd, s1, s2);
1960 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1961 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1962 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1963 store_reg_to_var_int(iptr->dst, d);
1966 case ICMD_DCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1967 /* == => 0, < => 1, > => -1 */
1969 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1970 var_to_reg_flt(s2, src, REG_FTMP2);
1971 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1972 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1973 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1974 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1975 x86_64_ucomisd_reg_reg(cd, s1, s2);
1976 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1977 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1978 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1979 store_reg_to_var_int(iptr->dst, d);
1982 case ICMD_DCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1983 /* == => 0, < => 1, > => -1 */
1985 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1986 var_to_reg_flt(s2, src, REG_FTMP2);
1987 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1988 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1989 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1990 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1991 x86_64_ucomisd_reg_reg(cd, s1, s2);
1992 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1993 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1994 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1995 store_reg_to_var_int(iptr->dst, d);
1999 /* memory operations **************************************************/
2001 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., (int) length */
2003 var_to_reg_int(s1, src, REG_ITMP1);
2004 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2005 gen_nullptr_check(s1);
2006 x86_64_movl_membase_reg(cd, s1, OFFSET(java_arrayheader, size), d);
2007 store_reg_to_var_int(iptr->dst, d);
2010 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2012 var_to_reg_int(s1, src->prev, REG_ITMP1);
2013 var_to_reg_int(s2, src, REG_ITMP2);
2014 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2015 if (iptr->op1 == 0) {
2016 gen_nullptr_check(s1);
2019 x86_64_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
2020 store_reg_to_var_int(iptr->dst, d);
2023 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
2025 var_to_reg_int(s1, src->prev, REG_ITMP1);
2026 var_to_reg_int(s2, src, REG_ITMP2);
2027 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2028 if (iptr->op1 == 0) {
2029 gen_nullptr_check(s1);
2032 x86_64_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]), s1, s2, 3, d);
2033 store_reg_to_var_int(iptr->dst, d);
2036 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
2038 var_to_reg_int(s1, src->prev, REG_ITMP1);
2039 var_to_reg_int(s2, src, REG_ITMP2);
2040 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2041 if (iptr->op1 == 0) {
2042 gen_nullptr_check(s1);
2045 x86_64_movl_memindex_reg(cd, OFFSET(java_intarray, data[0]), s1, s2, 2, d);
2046 store_reg_to_var_int(iptr->dst, d);
2049 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2051 var_to_reg_int(s1, src->prev, REG_ITMP1);
2052 var_to_reg_int(s2, src, REG_ITMP2);
2053 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2054 if (iptr->op1 == 0) {
2055 gen_nullptr_check(s1);
2058 x86_64_movss_memindex_reg(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2059 store_reg_to_var_flt(iptr->dst, d);
2062 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2064 var_to_reg_int(s1, src->prev, REG_ITMP1);
2065 var_to_reg_int(s2, src, REG_ITMP2);
2066 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2067 if (iptr->op1 == 0) {
2068 gen_nullptr_check(s1);
2071 x86_64_movsd_memindex_reg(cd, OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2072 store_reg_to_var_flt(iptr->dst, d);
2075 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
2077 var_to_reg_int(s1, src->prev, REG_ITMP1);
2078 var_to_reg_int(s2, src, REG_ITMP2);
2079 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2080 if (iptr->op1 == 0) {
2081 gen_nullptr_check(s1);
2084 x86_64_movzwq_memindex_reg(cd, OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2085 store_reg_to_var_int(iptr->dst, d);
2088 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
2090 var_to_reg_int(s1, src->prev, REG_ITMP1);
2091 var_to_reg_int(s2, src, REG_ITMP2);
2092 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2093 if (iptr->op1 == 0) {
2094 gen_nullptr_check(s1);
2097 x86_64_movswq_memindex_reg(cd, OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2098 store_reg_to_var_int(iptr->dst, d);
2101 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
2103 var_to_reg_int(s1, src->prev, REG_ITMP1);
2104 var_to_reg_int(s2, src, REG_ITMP2);
2105 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2106 if (iptr->op1 == 0) {
2107 gen_nullptr_check(s1);
2110 x86_64_movsbq_memindex_reg(cd, OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2111 store_reg_to_var_int(iptr->dst, d);
2115 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2117 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2118 var_to_reg_int(s2, src->prev, REG_ITMP2);
2119 if (iptr->op1 == 0) {
2120 gen_nullptr_check(s1);
2123 var_to_reg_int(s3, src, REG_ITMP3);
2124 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2127 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2129 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2130 var_to_reg_int(s2, src->prev, REG_ITMP2);
2131 if (iptr->op1 == 0) {
2132 gen_nullptr_check(s1);
2135 var_to_reg_int(s3, src, REG_ITMP3);
2136 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2139 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2141 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2142 var_to_reg_int(s2, src->prev, REG_ITMP2);
2143 if (iptr->op1 == 0) {
2144 gen_nullptr_check(s1);
2147 var_to_reg_int(s3, src, REG_ITMP3);
2148 x86_64_movl_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2151 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2153 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2154 var_to_reg_int(s2, src->prev, REG_ITMP2);
2155 if (iptr->op1 == 0) {
2156 gen_nullptr_check(s1);
2159 var_to_reg_flt(s3, src, REG_FTMP3);
2160 x86_64_movss_reg_memindex(cd, s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2163 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2165 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2166 var_to_reg_int(s2, src->prev, REG_ITMP2);
2167 if (iptr->op1 == 0) {
2168 gen_nullptr_check(s1);
2171 var_to_reg_flt(s3, src, REG_FTMP3);
2172 x86_64_movsd_reg_memindex(cd, s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2175 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2177 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2178 var_to_reg_int(s2, src->prev, REG_ITMP2);
2179 if (iptr->op1 == 0) {
2180 gen_nullptr_check(s1);
2183 var_to_reg_int(s3, src, REG_ITMP3);
2184 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2187 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2189 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2190 var_to_reg_int(s2, src->prev, REG_ITMP2);
2191 if (iptr->op1 == 0) {
2192 gen_nullptr_check(s1);
2195 var_to_reg_int(s3, src, REG_ITMP3);
2196 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2199 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2201 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2202 var_to_reg_int(s2, src->prev, REG_ITMP2);
2203 if (iptr->op1 == 0) {
2204 gen_nullptr_check(s1);
2207 var_to_reg_int(s3, src, REG_ITMP3);
2208 x86_64_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2211 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2213 var_to_reg_int(s1, src->prev, REG_ITMP1);
2214 var_to_reg_int(s2, src, REG_ITMP2);
2215 if (iptr->op1 == 0) {
2216 gen_nullptr_check(s1);
2219 x86_64_movl_imm_memindex(cd, iptr->val.i, OFFSET(java_intarray, data[0]), s1, s2, 2);
2222 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2224 var_to_reg_int(s1, src->prev, REG_ITMP1);
2225 var_to_reg_int(s2, src, REG_ITMP2);
2226 if (iptr->op1 == 0) {
2227 gen_nullptr_check(s1);
2231 if (x86_64_is_imm32(iptr->val.l)) {
2232 x86_64_mov_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2235 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2236 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l >> 32), OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2240 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2242 var_to_reg_int(s1, src->prev, REG_ITMP1);
2243 var_to_reg_int(s2, src, REG_ITMP2);
2244 if (iptr->op1 == 0) {
2245 gen_nullptr_check(s1);
2248 x86_64_mov_imm_memindex(cd, 0, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2251 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2253 var_to_reg_int(s1, src->prev, REG_ITMP1);
2254 var_to_reg_int(s2, src, REG_ITMP2);
2255 if (iptr->op1 == 0) {
2256 gen_nullptr_check(s1);
2259 x86_64_movb_imm_memindex(cd, iptr->val.i, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2262 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2264 var_to_reg_int(s1, src->prev, REG_ITMP1);
2265 var_to_reg_int(s2, src, REG_ITMP2);
2266 if (iptr->op1 == 0) {
2267 gen_nullptr_check(s1);
2270 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_chararray, data[0]), s1, s2, 1);
2273 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2275 var_to_reg_int(s1, src->prev, REG_ITMP1);
2276 var_to_reg_int(s2, src, REG_ITMP2);
2277 if (iptr->op1 == 0) {
2278 gen_nullptr_check(s1);
2281 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2285 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2286 /* op1 = type, val.a = field address */
2288 /* If the static fields' class is not yet initialized, we do it */
2289 /* now. The call code is generated later. */
2290 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2291 codegen_addclinitref(cd, cd->mcodeptr, ((fieldinfo *) iptr->val.a)->class);
2293 /* This is just for debugging purposes. Is very difficult to */
2294 /* read patched code. Here we patch the following 5 nop's */
2295 /* so that the real code keeps untouched. */
2296 if (showdisassemble) {
2305 /* This approach is much faster than moving the field address */
2306 /* inline into a register. */
2307 a = dseg_addaddress(cd, &(((fieldinfo *) iptr->val.a)->value));
2308 x86_64_mov_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 7) - (s8) cd->mcodebase) + a, REG_ITMP2);
2309 switch (iptr->op1) {
2311 var_to_reg_int(s2, src, REG_ITMP1);
2312 x86_64_movl_reg_membase(cd, s2, REG_ITMP2, 0);
2316 var_to_reg_int(s2, src, REG_ITMP1);
2317 x86_64_mov_reg_membase(cd, s2, REG_ITMP2, 0);
2320 var_to_reg_flt(s2, src, REG_FTMP1);
2321 x86_64_movss_reg_membase(cd, s2, REG_ITMP2, 0);
2324 var_to_reg_flt(s2, src, REG_FTMP1);
2325 x86_64_movsd_reg_membase(cd, s2, REG_ITMP2, 0);
2328 throw_cacao_exception_exit(string_java_lang_InternalError,
2329 "Unknown PUTSTATIC operand type %d",
2334 case ICMD_GETSTATIC: /* ... ==> ..., value */
2335 /* op1 = type, val.a = field address */
2337 /* If the static fields' class is not yet initialized, we do it */
2338 /* now. The call code is generated later. */
2339 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2340 codegen_addclinitref(cd, cd->mcodeptr, ((fieldinfo *) iptr->val.a)->class);
2342 /* This is just for debugging purposes. Is very difficult to */
2343 /* read patched code. Here we patch the following 5 nop's */
2344 /* so that the real code keeps untouched. */
2345 if (showdisassemble) {
2354 /* This approach is much faster than moving the field address */
2355 /* inline into a register. */
2356 a = dseg_addaddress(cd, &(((fieldinfo *) iptr->val.a)->value));
2357 x86_64_mov_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 7) - (s8) cd->mcodebase) + a, REG_ITMP2);
2358 switch (iptr->op1) {
2360 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2361 x86_64_movl_membase_reg(cd, REG_ITMP2, 0, d);
2362 store_reg_to_var_int(iptr->dst, d);
2366 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2367 x86_64_mov_membase_reg(cd, REG_ITMP2, 0, d);
2368 store_reg_to_var_int(iptr->dst, d);
2371 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2372 x86_64_movss_membase_reg(cd, REG_ITMP2, 0, d);
2373 store_reg_to_var_flt(iptr->dst, d);
2376 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2377 x86_64_movsd_membase_reg(cd, REG_ITMP2, 0, d);
2378 store_reg_to_var_flt(iptr->dst, d);
2381 throw_cacao_exception_exit(string_java_lang_InternalError,
2382 "Unknown GETSTATIC operand type %d",
2387 case ICMD_PUTFIELD: /* ..., value ==> ... */
2388 /* op1 = type, val.i = field offset */
2390 a = ((fieldinfo *)(iptr->val.a))->offset;
2391 var_to_reg_int(s1, src->prev, REG_ITMP1);
2392 switch (iptr->op1) {
2394 var_to_reg_int(s2, src, REG_ITMP2);
2395 gen_nullptr_check(s1);
2396 x86_64_movl_reg_membase(cd, s2, s1, a);
2400 var_to_reg_int(s2, src, REG_ITMP2);
2401 gen_nullptr_check(s1);
2402 x86_64_mov_reg_membase(cd, s2, s1, a);
2405 var_to_reg_flt(s2, src, REG_FTMP2);
2406 gen_nullptr_check(s1);
2407 x86_64_movss_reg_membase(cd, s2, s1, a);
2410 var_to_reg_flt(s2, src, REG_FTMP2);
2411 gen_nullptr_check(s1);
2412 x86_64_movsd_reg_membase(cd, s2, s1, a);
2415 throw_cacao_exception_exit(string_java_lang_InternalError,
2416 "Unknown PUTFIELD operand type %d",
2421 case ICMD_GETFIELD: /* ... ==> ..., value */
2422 /* op1 = type, val.i = field offset */
2424 a = ((fieldinfo *)(iptr->val.a))->offset;
2425 var_to_reg_int(s1, src, REG_ITMP1);
2426 switch (iptr->op1) {
2428 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2429 gen_nullptr_check(s1);
2430 x86_64_movl_membase_reg(cd, s1, a, d);
2431 store_reg_to_var_int(iptr->dst, d);
2435 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2436 gen_nullptr_check(s1);
2437 x86_64_mov_membase_reg(cd, s1, a, d);
2438 store_reg_to_var_int(iptr->dst, d);
2441 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2442 gen_nullptr_check(s1);
2443 x86_64_movss_membase_reg(cd, s1, a, d);
2444 store_reg_to_var_flt(iptr->dst, d);
2447 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2448 gen_nullptr_check(s1);
2449 x86_64_movsd_membase_reg(cd, s1, a, d);
2450 store_reg_to_var_flt(iptr->dst, d);
2453 throw_cacao_exception_exit(string_java_lang_InternalError,
2454 "Unknown GETFIELD operand type %d",
2460 /* branch operations **************************************************/
2462 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2464 var_to_reg_int(s1, src, REG_ITMP1);
2465 M_INTMOVE(s1, REG_ITMP1_XPTR);
2467 x86_64_call_imm(cd, 0); /* passing exception pointer */
2468 x86_64_pop_reg(cd, REG_ITMP2_XPC);
2470 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
2471 x86_64_jmp_reg(cd, REG_ITMP3);
2475 case ICMD_GOTO: /* ... ==> ... */
2476 /* op1 = target JavaVM pc */
2478 x86_64_jmp_imm(cd, 0);
2479 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2483 case ICMD_JSR: /* ... ==> ... */
2484 /* op1 = target JavaVM pc */
2486 x86_64_call_imm(cd, 0);
2487 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2490 case ICMD_RET: /* ... ==> ... */
2491 /* op1 = local variable */
2493 var = &(rd->locals[iptr->op1][TYPE_ADR]);
2494 var_to_reg_int(s1, var, REG_ITMP1);
2495 x86_64_jmp_reg(cd, s1);
2498 case ICMD_IFNULL: /* ..., value ==> ... */
2499 /* op1 = target JavaVM pc */
2501 if (src->flags & INMEMORY) {
2502 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2505 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2507 x86_64_jcc(cd, X86_64_CC_E, 0);
2508 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2511 case ICMD_IFNONNULL: /* ..., value ==> ... */
2512 /* op1 = target JavaVM pc */
2514 if (src->flags & INMEMORY) {
2515 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2518 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2520 x86_64_jcc(cd, X86_64_CC_NE, 0);
2521 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2524 case ICMD_IFEQ: /* ..., value ==> ... */
2525 /* op1 = target JavaVM pc, val.i = constant */
2527 x86_64_emit_ifcc(cd, X86_64_CC_E, src, iptr);
2530 case ICMD_IFLT: /* ..., value ==> ... */
2531 /* op1 = target JavaVM pc, val.i = constant */
2533 x86_64_emit_ifcc(cd, X86_64_CC_L, src, iptr);
2536 case ICMD_IFLE: /* ..., value ==> ... */
2537 /* op1 = target JavaVM pc, val.i = constant */
2539 x86_64_emit_ifcc(cd, X86_64_CC_LE, src, iptr);
2542 case ICMD_IFNE: /* ..., value ==> ... */
2543 /* op1 = target JavaVM pc, val.i = constant */
2545 x86_64_emit_ifcc(cd, X86_64_CC_NE, src, iptr);
2548 case ICMD_IFGT: /* ..., value ==> ... */
2549 /* op1 = target JavaVM pc, val.i = constant */
2551 x86_64_emit_ifcc(cd, X86_64_CC_G, src, iptr);
2554 case ICMD_IFGE: /* ..., value ==> ... */
2555 /* op1 = target JavaVM pc, val.i = constant */
2557 x86_64_emit_ifcc(cd, X86_64_CC_GE, src, iptr);
2560 case ICMD_IF_LEQ: /* ..., value ==> ... */
2561 /* op1 = target JavaVM pc, val.l = constant */
2563 x86_64_emit_if_lcc(cd, X86_64_CC_E, src, iptr);
2566 case ICMD_IF_LLT: /* ..., value ==> ... */
2567 /* op1 = target JavaVM pc, val.l = constant */
2569 x86_64_emit_if_lcc(cd, X86_64_CC_L, src, iptr);
2572 case ICMD_IF_LLE: /* ..., value ==> ... */
2573 /* op1 = target JavaVM pc, val.l = constant */
2575 x86_64_emit_if_lcc(cd, X86_64_CC_LE, src, iptr);
2578 case ICMD_IF_LNE: /* ..., value ==> ... */
2579 /* op1 = target JavaVM pc, val.l = constant */
2581 x86_64_emit_if_lcc(cd, X86_64_CC_NE, src, iptr);
2584 case ICMD_IF_LGT: /* ..., value ==> ... */
2585 /* op1 = target JavaVM pc, val.l = constant */
2587 x86_64_emit_if_lcc(cd, X86_64_CC_G, src, iptr);
2590 case ICMD_IF_LGE: /* ..., value ==> ... */
2591 /* op1 = target JavaVM pc, val.l = constant */
2593 x86_64_emit_if_lcc(cd, X86_64_CC_GE, src, iptr);
2596 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2597 /* op1 = target JavaVM pc */
2599 x86_64_emit_if_icmpcc(cd, X86_64_CC_E, src, iptr);
2602 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2603 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2605 x86_64_emit_if_lcmpcc(cd, X86_64_CC_E, src, iptr);
2608 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2609 /* op1 = target JavaVM pc */
2611 x86_64_emit_if_icmpcc(cd, X86_64_CC_NE, src, iptr);
2614 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2615 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2617 x86_64_emit_if_lcmpcc(cd, X86_64_CC_NE, src, iptr);
2620 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2621 /* op1 = target JavaVM pc */
2623 x86_64_emit_if_icmpcc(cd, X86_64_CC_L, src, iptr);
2626 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2627 /* op1 = target JavaVM pc */
2629 x86_64_emit_if_lcmpcc(cd, X86_64_CC_L, src, iptr);
2632 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2633 /* op1 = target JavaVM pc */
2635 x86_64_emit_if_icmpcc(cd, X86_64_CC_G, src, iptr);
2638 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2639 /* op1 = target JavaVM pc */
2641 x86_64_emit_if_lcmpcc(cd, X86_64_CC_G, src, iptr);
2644 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2645 /* op1 = target JavaVM pc */
2647 x86_64_emit_if_icmpcc(cd, X86_64_CC_LE, src, iptr);
2650 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2651 /* op1 = target JavaVM pc */
2653 x86_64_emit_if_lcmpcc(cd, X86_64_CC_LE, src, iptr);
2656 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2657 /* op1 = target JavaVM pc */
2659 x86_64_emit_if_icmpcc(cd, X86_64_CC_GE, src, iptr);
2662 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2663 /* op1 = target JavaVM pc */
2665 x86_64_emit_if_lcmpcc(cd, X86_64_CC_GE, src, iptr);
2668 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST */
2670 case ICMD_ELSE_ICONST: /* handled by IFxx_ICONST */
2673 case ICMD_IFEQ_ICONST: /* ..., value ==> ..., constant */
2674 /* val.i = constant */
2676 var_to_reg_int(s1, src, REG_ITMP1);
2677 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2679 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2681 M_INTMOVE(s1, REG_ITMP1);
2684 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2686 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2687 x86_64_testl_reg_reg(cd, s1, s1);
2688 x86_64_cmovccl_reg_reg(cd, X86_64_CC_E, REG_ITMP2, d);
2689 store_reg_to_var_int(iptr->dst, d);
2692 case ICMD_IFNE_ICONST: /* ..., value ==> ..., constant */
2693 /* val.i = constant */
2695 var_to_reg_int(s1, src, REG_ITMP1);
2696 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2698 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2700 M_INTMOVE(s1, REG_ITMP1);
2703 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2705 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2706 x86_64_testl_reg_reg(cd, s1, s1);
2707 x86_64_cmovccl_reg_reg(cd, X86_64_CC_NE, REG_ITMP2, d);
2708 store_reg_to_var_int(iptr->dst, d);
2711 case ICMD_IFLT_ICONST: /* ..., value ==> ..., constant */
2712 /* val.i = constant */
2714 var_to_reg_int(s1, src, REG_ITMP1);
2715 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2717 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2719 M_INTMOVE(s1, REG_ITMP1);
2722 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2724 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2725 x86_64_testl_reg_reg(cd, s1, s1);
2726 x86_64_cmovccl_reg_reg(cd, X86_64_CC_L, REG_ITMP2, d);
2727 store_reg_to_var_int(iptr->dst, d);
2730 case ICMD_IFGE_ICONST: /* ..., value ==> ..., constant */
2731 /* val.i = constant */
2733 var_to_reg_int(s1, src, REG_ITMP1);
2734 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2736 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2738 M_INTMOVE(s1, REG_ITMP1);
2741 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2743 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2744 x86_64_testl_reg_reg(cd, s1, s1);
2745 x86_64_cmovccl_reg_reg(cd, X86_64_CC_GE, REG_ITMP2, d);
2746 store_reg_to_var_int(iptr->dst, d);
2749 case ICMD_IFGT_ICONST: /* ..., value ==> ..., constant */
2750 /* val.i = constant */
2752 var_to_reg_int(s1, src, REG_ITMP1);
2753 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2755 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2757 M_INTMOVE(s1, REG_ITMP1);
2760 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2762 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2763 x86_64_testl_reg_reg(cd, s1, s1);
2764 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP2, d);
2765 store_reg_to_var_int(iptr->dst, d);
2768 case ICMD_IFLE_ICONST: /* ..., value ==> ..., constant */
2769 /* val.i = constant */
2771 var_to_reg_int(s1, src, REG_ITMP1);
2772 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2774 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2776 M_INTMOVE(s1, REG_ITMP1);
2779 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2781 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2782 x86_64_testl_reg_reg(cd, s1, s1);
2783 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, d);
2784 store_reg_to_var_int(iptr->dst, d);
2788 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2792 var_to_reg_int(s1, src, REG_RESULT);
2793 M_INTMOVE(s1, REG_RESULT);
2795 #if defined(USE_THREADS)
2796 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2797 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
2798 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, rd->maxmemuse * 8);
2799 x86_64_mov_imm_reg(cd, (u8) builtin_monitorexit, REG_ITMP1);
2800 x86_64_call_reg(cd, REG_ITMP1);
2801 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_RESULT);
2805 goto nowperformreturn;
2807 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2810 var_to_reg_flt(s1, src, REG_FRESULT);
2811 M_FLTMOVE(s1, REG_FRESULT);
2813 #if defined(USE_THREADS)
2814 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2815 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
2816 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, rd->maxmemuse * 8);
2817 x86_64_mov_imm_reg(cd, (u8) builtin_monitorexit, REG_ITMP1);
2818 x86_64_call_reg(cd, REG_ITMP1);
2819 x86_64_movq_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_FRESULT);
2823 goto nowperformreturn;
2825 case ICMD_RETURN: /* ... ==> ... */
2827 #if defined(USE_THREADS)
2828 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2829 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
2830 x86_64_mov_imm_reg(cd, (u8) builtin_monitorexit, REG_ITMP1);
2831 x86_64_call_reg(cd, REG_ITMP1);
2839 p = parentargs_base;
2841 /* call trace function */
2843 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
2845 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
2846 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
2848 x86_64_mov_imm_reg(cd, (s8) m, rd->argintregs[0]);
2849 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
2850 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
2851 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
2853 x86_64_mov_imm_reg(cd, (s8) builtin_displaymethodstop, REG_ITMP1);
2854 x86_64_call_reg(cd, REG_ITMP1);
2856 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
2857 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
2859 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
2862 /* restore saved registers */
2863 for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
2864 p--; x86_64_mov_membase_reg(cd, REG_SP, p * 8, rd->savintregs[i]);
2866 for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
2867 p--; x86_64_movq_membase_reg(cd, REG_SP, p * 8, rd->savfltregs[i]);
2870 /* deallocate stack */
2871 if (parentargs_base) {
2872 x86_64_alu_imm_reg(cd, X86_64_ADD, parentargs_base * 8, REG_SP);
2881 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2886 tptr = (void **) iptr->target;
2888 s4ptr = iptr->val.a;
2889 l = s4ptr[1]; /* low */
2890 i = s4ptr[2]; /* high */
2892 var_to_reg_int(s1, src, REG_ITMP1);
2893 M_INTMOVE(s1, REG_ITMP1);
2895 x86_64_alul_imm_reg(cd, X86_64_SUB, l, REG_ITMP1);
2900 x86_64_alul_imm_reg(cd, X86_64_CMP, i - 1, REG_ITMP1);
2901 x86_64_jcc(cd, X86_64_CC_A, 0);
2903 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[0]), cd->mcodeptr); */
2904 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
2906 /* build jump table top down and use address of lowest entry */
2908 /* s4ptr += 3 + i; */
2912 /* dseg_addtarget(cd, BlockPtrOfPC(*--s4ptr)); */
2913 dseg_addtarget(cd, (basicblock *) tptr[0]);
2917 /* length of dataseg after last dseg_addtarget is used by load */
2919 x86_64_mov_imm_reg(cd, 0, REG_ITMP2);
2920 dseg_adddata(cd, cd->mcodeptr);
2921 x86_64_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
2922 x86_64_jmp_reg(cd, REG_ITMP1);
2928 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2930 s4 i, l, val, *s4ptr;
2933 tptr = (void **) iptr->target;
2935 s4ptr = iptr->val.a;
2936 l = s4ptr[0]; /* default */
2937 i = s4ptr[1]; /* count */
2939 MCODECHECK((i<<2)+8);
2940 var_to_reg_int(s1, src, REG_ITMP1); /* reg compare should always be faster */
2946 x86_64_alul_imm_reg(cd, X86_64_CMP, val, s1);
2947 x86_64_jcc(cd, X86_64_CC_E, 0);
2948 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[1]), cd->mcodeptr); */
2949 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
2952 x86_64_jmp_imm(cd, 0);
2953 /* codegen_addreference(cd, BlockPtrOfPC(l), cd->mcodeptr); */
2955 tptr = (void **) iptr->target;
2956 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
2963 case ICMD_BUILTIN3: /* ..., arg1, arg2, arg3 ==> ... */
2964 /* op1 = return type, val.a = function pointer*/
2968 case ICMD_BUILTIN2: /* ..., arg1, arg2 ==> ... */
2969 /* op1 = return type, val.a = function pointer*/
2973 case ICMD_BUILTIN1: /* ..., arg1 ==> ... */
2974 /* op1 = return type, val.a = function pointer*/
2978 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2979 /* op1 = arg count, val.a = method pointer */
2981 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2982 /* op1 = arg count, val.a = method pointer */
2984 case ICMD_INVOKEVIRTUAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2985 /* op1 = arg count, val.a = method pointer */
2987 case ICMD_INVOKEINTERFACE:/*.., objectref, [arg1, [arg2 ...]] ==> ... */
2988 /* op1 = arg count, val.a = method pointer */
2998 MCODECHECK((s3 << 1) + 64);
3005 /* copy arguments to registers or stack location */
3006 for (; --s3 >= 0; src = src->prev) {
3007 IS_INT_LNG_TYPE(src->type) ? iarg++ : farg++;
3013 s2 = (iarg > INT_ARG_CNT) ? iarg - INT_ARG_CNT : 0 + (farg > FLT_ARG_CNT) ? farg - FLT_ARG_CNT : 0;
3015 for (; --s3 >= 0; src = src->prev) {
3016 IS_INT_LNG_TYPE(src->type) ? iarg-- : farg--;
3017 if (src->varkind == ARGVAR) {
3018 if (IS_INT_LNG_TYPE(src->type)) {
3019 if (iarg >= INT_ARG_CNT) {
3023 if (farg >= FLT_ARG_CNT) {
3030 if (IS_INT_LNG_TYPE(src->type)) {
3031 if (iarg < INT_ARG_CNT) {
3032 s1 = rd->argintregs[iarg];
3033 var_to_reg_int(d, src, s1);
3037 var_to_reg_int(d, src, REG_ITMP1);
3039 x86_64_mov_reg_membase(cd, d, REG_SP, s2 * 8);
3043 if (farg < FLT_ARG_CNT) {
3044 s1 = rd->argfltregs[farg];
3045 var_to_reg_flt(d, src, s1);
3049 var_to_reg_flt(d, src, REG_FTMP1);
3051 x86_64_movq_reg_membase(cd, d, REG_SP, s2 * 8);
3057 switch (iptr->opc) {
3065 x86_64_mov_imm_reg(cd, a, REG_ITMP1);
3066 x86_64_call_reg(cd, REG_ITMP1);
3069 case ICMD_INVOKESTATIC:
3071 a = (s8) lm->stubroutine;
3074 x86_64_mov_imm_reg(cd, a, REG_ITMP2);
3075 x86_64_call_reg(cd, REG_ITMP2);
3078 case ICMD_INVOKESPECIAL:
3080 a = (s8) lm->stubroutine;
3083 gen_nullptr_check(rd->argintregs[0]); /* first argument contains pointer */
3084 x86_64_mov_membase_reg(cd, rd->argintregs[0], 0, REG_ITMP2); /* access memory for hardware nullptr */
3085 x86_64_mov_imm_reg(cd, a, REG_ITMP2);
3086 x86_64_call_reg(cd, REG_ITMP2);
3089 case ICMD_INVOKEVIRTUAL:
3093 gen_nullptr_check(rd->argintregs[0]);
3094 x86_64_mov_membase_reg(cd, rd->argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3095 x86_64_mov_membase32_reg(cd, REG_ITMP2, OFFSET(vftbl_t, table[0]) + sizeof(methodptr) * lm->vftblindex, REG_ITMP1);
3096 x86_64_call_reg(cd, REG_ITMP1);
3099 case ICMD_INVOKEINTERFACE:
3104 gen_nullptr_check(rd->argintregs[0]);
3105 x86_64_mov_membase_reg(cd, rd->argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3106 x86_64_mov_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, interfacetable[0]) - sizeof(methodptr) * ci->index, REG_ITMP2);
3107 x86_64_mov_membase32_reg(cd, REG_ITMP2, sizeof(methodptr) * (lm - ci->methods), REG_ITMP1);
3108 x86_64_call_reg(cd, REG_ITMP1);
3113 error("Unkown ICMD-Command: %d", iptr->opc);
3116 /* d contains return type */
3118 if (d != TYPE_VOID) {
3119 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3120 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3121 M_INTMOVE(REG_RESULT, s1);
3122 store_reg_to_var_int(iptr->dst, s1);
3125 s1 = reg_of_var(rd, iptr->dst, REG_FRESULT);
3126 M_FLTMOVE(REG_FRESULT, s1);
3127 store_reg_to_var_flt(iptr->dst, s1);
3134 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3136 /* op1: 0 == array, 1 == class */
3137 /* val.a: (classinfo*) superclass */
3139 /* superclass is an interface:
3141 * return (sub != NULL) &&
3142 * (sub->vftbl->interfacetablelength > super->index) &&
3143 * (sub->vftbl->interfacetable[-super->index] != NULL);
3145 * superclass is a class:
3147 * return ((sub != NULL) && (0
3148 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3149 * super->vftbl->diffvall));
3153 classinfo *super = (classinfo*) iptr->val.a;
3155 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3156 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3159 var_to_reg_int(s1, src, REG_ITMP1);
3160 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3162 M_INTMOVE(s1, REG_ITMP1);
3165 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3166 if (iptr->op1) { /* class/interface */
3167 if (super->flags & ACC_INTERFACE) { /* interface */
3168 x86_64_test_reg_reg(cd, s1, s1);
3170 /* TODO: clean up this calculation */
3171 a = 3; /* mov_membase_reg */
3172 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3174 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3175 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3178 CALCIMMEDIATEBYTES(a, super->index);
3183 a += 3; /* mov_membase_reg */
3184 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3189 x86_64_jcc(cd, X86_64_CC_E, a);
3191 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3192 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength), REG_ITMP2);
3193 x86_64_alu_imm_reg(cd, X86_64_SUB, super->index, REG_ITMP2);
3194 x86_64_test_reg_reg(cd, REG_ITMP2, REG_ITMP2);
3196 /* TODO: clean up this calculation */
3198 a += 3; /* mov_membase_reg */
3199 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3204 x86_64_jcc(cd, X86_64_CC_LE, a);
3205 x86_64_mov_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP1);
3206 x86_64_test_reg_reg(cd, REG_ITMP1, REG_ITMP1);
3207 x86_64_setcc_reg(cd, X86_64_CC_NE, d);
3209 } else { /* class */
3210 x86_64_test_reg_reg(cd, s1, s1);
3212 /* TODO: clean up this calculation */
3213 a = 3; /* mov_membase_reg */
3214 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3216 a += 10; /* mov_imm_reg */
3218 a += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3219 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, baseval));
3221 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3222 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3224 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3225 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, diffval));
3232 x86_64_jcc(cd, X86_64_CC_E, a);
3234 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3235 x86_64_mov_imm_reg(cd, (s8) super->vftbl, REG_ITMP2);
3236 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3237 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3239 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, baseval), REG_ITMP1);
3240 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP3);
3241 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, diffval), REG_ITMP2);
3242 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3243 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3245 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP1);
3246 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3247 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
3248 x86_64_setcc_reg(cd, X86_64_CC_BE, d);
3252 panic("internal error: no inlined array instanceof");
3254 store_reg_to_var_int(iptr->dst, d);
3257 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3259 /* op1: 0 == array, 1 == class */
3260 /* val.a: (classinfo*) superclass */
3262 /* superclass is an interface:
3264 * OK if ((sub == NULL) ||
3265 * (sub->vftbl->interfacetablelength > super->index) &&
3266 * (sub->vftbl->interfacetable[-super->index] != NULL));
3268 * superclass is a class:
3270 * OK if ((sub == NULL) || (0
3271 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3272 * super->vftbl->diffvall));
3276 classinfo *super = (classinfo*) iptr->val.a;
3278 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3279 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3281 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3282 var_to_reg_int(s1, src, d);
3283 if (iptr->op1) { /* class/interface */
3284 if (super->flags & ACC_INTERFACE) { /* interface */
3285 x86_64_test_reg_reg(cd, s1, s1);
3287 /* TODO: clean up this calculation */
3288 a = 3; /* mov_membase_reg */
3289 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3291 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3292 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3295 CALCIMMEDIATEBYTES(a, super->index);
3300 a += 3; /* mov_membase_reg */
3301 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3306 x86_64_jcc(cd, X86_64_CC_E, a);
3308 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3309 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength), REG_ITMP2);
3310 x86_64_alu_imm_reg(cd, X86_64_SUB, super->index, REG_ITMP2);
3311 x86_64_test_reg_reg(cd, REG_ITMP2, REG_ITMP2);
3312 x86_64_jcc(cd, X86_64_CC_LE, 0);
3313 codegen_addxcastrefs(cd, cd->mcodeptr);
3314 x86_64_mov_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP2);
3315 x86_64_test_reg_reg(cd, REG_ITMP2, REG_ITMP2);
3316 x86_64_jcc(cd, X86_64_CC_E, 0);
3317 codegen_addxcastrefs(cd, cd->mcodeptr);
3319 } else { /* class */
3320 x86_64_test_reg_reg(cd, s1, s1);
3322 /* TODO: clean up this calculation */
3323 a = 3; /* mov_membase_reg */
3324 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3325 a += 10; /* mov_imm_reg */
3326 a += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3327 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, baseval));
3329 if (d != REG_ITMP3) {
3330 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3331 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3332 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3333 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, diffval));
3337 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3338 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3340 a += 10; /* mov_imm_reg */
3341 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3342 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, diffval));
3348 x86_64_jcc(cd, X86_64_CC_E, a);
3350 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3351 x86_64_mov_imm_reg(cd, (s8) super->vftbl, REG_ITMP2);
3352 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3353 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3355 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, baseval), REG_ITMP1);
3356 if (d != REG_ITMP3) {
3357 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP3);
3358 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, diffval), REG_ITMP2);
3359 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3360 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3362 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP1);
3365 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP2);
3366 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
3367 x86_64_mov_imm_reg(cd, (s8) super->vftbl, REG_ITMP2);
3368 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, diffval), REG_ITMP2);
3369 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3370 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3373 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
3374 x86_64_jcc(cd, X86_64_CC_A, 0); /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3375 codegen_addxcastrefs(cd, cd->mcodeptr);
3379 panic("internal error: no inlined array checkcast");
3382 store_reg_to_var_int(iptr->dst, d);
3385 case ICMD_CHECKASIZE: /* ..., size ==> ..., size */
3387 if (src->flags & INMEMORY) {
3388 x86_64_alul_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
3391 x86_64_testl_reg_reg(cd, src->regoff, src->regoff);
3393 x86_64_jcc(cd, X86_64_CC_L, 0);
3394 codegen_addxcheckarefs(cd, cd->mcodeptr);
3397 case ICMD_CHECKEXCEPTION: /* ... ==> ... */
3399 x86_64_test_reg_reg(cd, REG_RESULT, REG_RESULT);
3400 x86_64_jcc(cd, X86_64_CC_E, 0);
3401 codegen_addxexceptionrefs(cd, cd->mcodeptr);
3404 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3405 /* op1 = dimension, val.a = array descriptor */
3407 /* check for negative sizes and copy sizes to stack if necessary */
3409 MCODECHECK((iptr->op1 << 1) + 64);
3411 for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3412 var_to_reg_int(s2, src, REG_ITMP1);
3413 x86_64_testl_reg_reg(cd, s2, s2);
3414 x86_64_jcc(cd, X86_64_CC_L, 0);
3415 codegen_addxcheckarefs(cd, cd->mcodeptr);
3417 /* copy sizes to stack (argument numbers >= INT_ARG_CNT) */
3419 if (src->varkind != ARGVAR) {
3420 x86_64_mov_reg_membase(cd, s2, REG_SP, (s1 + INT_ARG_CNT) * 8);
3424 /* a0 = dimension count */
3425 x86_64_mov_imm_reg(cd, iptr->op1, rd->argintregs[0]);
3427 /* a1 = arraydescriptor */
3428 x86_64_mov_imm_reg(cd, (s8) iptr->val.a, rd->argintregs[1]);
3430 /* a2 = pointer to dimensions = stack pointer */
3431 x86_64_mov_reg_reg(cd, REG_SP, rd->argintregs[2]);
3433 x86_64_mov_imm_reg(cd, (s8) builtin_nmultianewarray, REG_ITMP1);
3434 x86_64_call_reg(cd, REG_ITMP1);
3436 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3437 M_INTMOVE(REG_RESULT, s1);
3438 store_reg_to_var_int(iptr->dst, s1);
3442 throw_cacao_exception_exit(string_java_lang_InternalError,
3443 "Unknown ICMD %d", iptr->opc);
3446 } /* for instruction */
3448 /* copy values to interface registers */
3450 src = bptr->outstack;
3451 len = bptr->outdepth;
3452 MCODECHECK(64 + len);
3455 if ((src->varkind != STACKVAR)) {
3457 if (IS_FLT_DBL_TYPE(s2)) {
3458 var_to_reg_flt(s1, src, REG_FTMP1);
3459 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3460 M_FLTMOVE(s1, rd->interfaces[len][s2].regoff);
3463 x86_64_movq_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3467 var_to_reg_int(s1, src, REG_ITMP1);
3468 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3469 M_INTMOVE(s1, rd->interfaces[len][s2].regoff);
3472 x86_64_mov_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3478 } /* if (bptr -> flags >= BBREACHED) */
3479 } /* for basic block */
3483 /* generate bound check stubs */
3485 u1 *xcodeptr = NULL;
3488 for (bref = cd->xboundrefs; bref != NULL; bref = bref->next) {
3489 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3491 cd->mcodeptr - cd->mcodebase);
3495 /* move index register into REG_ITMP1 */
3496 x86_64_mov_reg_reg(cd, bref->reg, REG_ITMP1); /* 3 bytes */
3498 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3499 dseg_adddata(cd, cd->mcodeptr);
3500 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3501 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3503 if (xcodeptr != NULL) {
3504 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3507 xcodeptr = cd->mcodeptr;
3509 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3510 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3512 x86_64_mov_reg_reg(cd, REG_ITMP1, rd->argintregs[0]);
3513 x86_64_mov_imm_reg(cd, (s8) new_arrayindexoutofboundsexception, REG_ITMP3);
3514 x86_64_call_reg(cd, REG_ITMP3);
3516 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3517 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3519 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
3520 x86_64_jmp_reg(cd, REG_ITMP3);
3524 /* generate negative array size check stubs */
3528 for (bref = cd->xcheckarefs; bref != NULL; bref = bref->next) {
3529 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3530 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3532 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3536 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3538 cd->mcodeptr - cd->mcodebase);
3542 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3543 dseg_adddata(cd, cd->mcodeptr);
3544 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3545 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3547 if (xcodeptr != NULL) {
3548 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3551 xcodeptr = cd->mcodeptr;
3553 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3554 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3556 x86_64_mov_imm_reg(cd, (s8) new_negativearraysizeexception, REG_ITMP3);
3557 x86_64_call_reg(cd, REG_ITMP3);
3559 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3560 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3562 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
3563 x86_64_jmp_reg(cd, REG_ITMP3);
3567 /* generate cast check stubs */
3571 for (bref = cd->xcastrefs; bref != NULL; bref = bref->next) {
3572 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3573 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3575 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3579 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3581 cd->mcodeptr - cd->mcodebase);
3585 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3586 dseg_adddata(cd, cd->mcodeptr);
3587 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3588 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3590 if (xcodeptr != NULL) {
3591 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3594 xcodeptr = cd->mcodeptr;
3596 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3597 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3599 x86_64_mov_imm_reg(cd, (s8) new_classcastexception, REG_ITMP3);
3600 x86_64_call_reg(cd, REG_ITMP3);
3602 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3603 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3605 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
3606 x86_64_jmp_reg(cd, REG_ITMP3);
3610 /* generate divide by zero check stubs */
3614 for (bref = cd->xdivrefs; bref != NULL; bref = bref->next) {
3615 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3616 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3618 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3622 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3624 cd->mcodeptr - cd->mcodebase);
3628 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3629 dseg_adddata(cd, cd->mcodeptr);
3630 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3631 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3633 if (xcodeptr != NULL) {
3634 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3637 xcodeptr = cd->mcodeptr;
3639 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3640 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3642 x86_64_mov_imm_reg(cd, (u8) new_arithmeticexception, REG_ITMP3);
3643 x86_64_call_reg(cd, REG_ITMP3);
3645 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3646 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3648 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3649 x86_64_jmp_reg(cd, REG_ITMP3);
3653 /* generate exception check stubs */
3657 for (bref = cd->xexceptionrefs; bref != NULL; bref = bref->next) {
3658 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3659 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3661 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3665 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3667 cd->mcodeptr - cd->mcodebase);
3671 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3672 dseg_adddata(cd, cd->mcodeptr);
3673 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
3674 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3676 if (xcodeptr != NULL) {
3677 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3680 xcodeptr = cd->mcodeptr;
3682 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3683 x86_64_alu_imm_reg(cd, X86_64_SUB, 8, REG_SP);
3684 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0);
3685 x86_64_mov_imm_reg(cd, (u8) &builtin_get_exceptionptrptr, REG_ITMP1);
3686 x86_64_call_reg(cd, REG_ITMP1);
3687 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
3688 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
3689 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
3690 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC);
3691 x86_64_alu_imm_reg(cd, X86_64_ADD, 8, REG_SP);
3693 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
3694 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP1_XPTR);
3695 x86_64_mov_imm_membase(cd, 0, REG_ITMP3, 0);
3698 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3699 x86_64_jmp_reg(cd, REG_ITMP3);
3703 /* generate null pointer check stubs */
3707 for (bref = cd->xnullrefs; bref != NULL; bref = bref->next) {
3708 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3709 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3711 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3715 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3717 cd->mcodeptr - cd->mcodebase);
3721 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3722 dseg_adddata(cd, cd->mcodeptr);
3723 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
3724 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3726 if (xcodeptr != NULL) {
3727 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3730 xcodeptr = cd->mcodeptr;
3732 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3733 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3735 x86_64_mov_imm_reg(cd, (s8) new_nullpointerexception, REG_ITMP3);
3736 x86_64_call_reg(cd, REG_ITMP3);
3738 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3739 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3741 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
3742 x86_64_jmp_reg(cd, REG_ITMP3);
3746 /* generate put/getstatic stub call code */
3754 tmpcd = DNEW(codegendata);
3756 for (cref = cd->clinitrefs; cref != NULL; cref = cref->next) {
3757 /* Get machine code which is patched back in later. A */
3758 /* `call rel32' is 5 bytes long. */
3759 xcodeptr = cd->mcodebase + cref->branchpos;
3761 mcode = *((u4 *) (xcodeptr + 1));
3765 /* patch in `call rel32' to call the following code */
3766 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
3767 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
3769 /* Save current stack pointer into a temporary register. */
3770 x86_64_mov_reg_reg(cd, REG_SP, REG_ITMP1);
3772 /* Push machine code bytes to patch onto the stack. */
3773 x86_64_push_imm(cd, (u1) xmcode);
3774 x86_64_push_imm(cd, (u4) mcode);
3776 x86_64_push_imm(cd, (u8) cref->class);
3778 /* Push previously saved stack pointer onto stack. */
3779 x86_64_push_reg(cd, REG_ITMP1);
3781 x86_64_mov_imm_reg(cd, (u8) asm_check_clinit, REG_ITMP1);
3782 x86_64_jmp_reg(cd, REG_ITMP1);
3787 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
3791 /* function createcompilerstub *************************************************
3793 creates a stub routine which calls the compiler
3795 *******************************************************************************/
3797 #define COMPSTUBSIZE 23
3799 u1 *createcompilerstub(methodinfo *m)
3801 u1 *s = CNEW(u1, COMPSTUBSIZE); /* memory to hold the stub */
3805 /* mark start of dump memory area */
3807 dumpsize = dump_size();
3809 cd = DNEW(codegendata);
3812 /* code for the stub */
3813 x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP1); /* pass method to compiler */
3814 x86_64_mov_imm_reg(cd, (u8) asm_call_jit_compiler, REG_ITMP3);/* load address */
3815 x86_64_jmp_reg(cd, REG_ITMP3); /* jump to compiler */
3817 #if defined(STATISTICS)
3819 count_cstub_len += COMPSTUBSIZE;
3822 /* release dump area */
3824 dump_release(dumpsize);
3830 /* function removecompilerstub *************************************************
3832 deletes a compilerstub from memory (simply by freeing it)
3834 *******************************************************************************/
3836 void removecompilerstub(u1 *stub)
3838 CFREE(stub, COMPSTUBSIZE);
3842 /* function: createnativestub **************************************************
3844 creates a stub routine which calls a native method
3846 *******************************************************************************/
3848 /* #if defined(USE_THREADS) && defined(NATIVE_THREADS) */
3849 /* static java_objectheader **(*callgetexceptionptrptr)() = builtin_get_exceptionptrptr; */
3852 #define NATIVESTUBSIZE 420
3854 u1 *createnativestub(functionptr f, methodinfo *m)
3856 u1 *s = CNEW(u1, NATIVESTUBSIZE); /* memory to hold the stub */
3857 s4 stackframesize; /* size of stackframe if needed */
3860 t_inlining_globals *id;
3863 void **callAddrPatchPos=0;
3865 void **jmpInstrPatchPos=0;
3867 /* mark start of dump memory area */
3869 dumpsize = dump_size();
3871 cd = DNEW(codegendata);
3872 rd = DNEW(registerdata);
3873 id = DNEW(t_inlining_globals);
3875 /* setup registers before using it */
3877 inlining_setup(m, id);
3878 reg_setup(m, rd, id);
3880 /* set some required varibles which are normally set by codegen_setup */
3883 cd->clinitrefs = NULL;
3885 descriptor2types(m); /* set paramcount and paramtypes */
3887 /* if function is static, check for initialized */
3889 if (m->flags & ACC_STATIC) {
3890 /* if class isn't yet initialized, do it */
3891 if (!m->class->initialized) {
3892 codegen_addclinitref(cd, cd->mcodeptr, m->class);
3897 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
3899 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, 1 * 8);
3900 x86_64_mov_reg_membase(cd, rd->argintregs[1], REG_SP, 2 * 8);
3901 x86_64_mov_reg_membase(cd, rd->argintregs[2], REG_SP, 3 * 8);
3902 x86_64_mov_reg_membase(cd, rd->argintregs[3], REG_SP, 4 * 8);
3903 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 5 * 8);
3904 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 6 * 8);
3906 x86_64_movq_reg_membase(cd, rd->argfltregs[0], REG_SP, 7 * 8);
3907 x86_64_movq_reg_membase(cd, rd->argfltregs[1], REG_SP, 8 * 8);
3908 x86_64_movq_reg_membase(cd, rd->argfltregs[2], REG_SP, 9 * 8);
3909 x86_64_movq_reg_membase(cd, rd->argfltregs[3], REG_SP, 10 * 8);
3910 /* x86_64_movq_reg_membase(cd, rd->argfltregs[4], REG_SP, 11 * 8); */
3911 /* x86_64_movq_reg_membase(cd, rd->argfltregs[5], REG_SP, 12 * 8); */
3912 /* x86_64_movq_reg_membase(cd, rd->argfltregs[6], REG_SP, 13 * 8); */
3913 /* x86_64_movq_reg_membase(cd, rd->argfltregs[7], REG_SP, 14 * 8); */
3918 /* show integer hex code for float arguments */
3919 for (p = 0, l = 0; p < m->paramcount; p++) {
3920 if (IS_FLT_DBL_TYPE(m->paramtypes[p])) {
3921 for (s1 = (m->paramcount > INT_ARG_CNT) ? INT_ARG_CNT - 2 : m->paramcount - 2; s1 >= p; s1--) {
3922 x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
3925 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[p]);
3930 x86_64_mov_imm_reg(cd, (s8) m, REG_ITMP1);
3931 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, 0 * 8);
3932 x86_64_mov_imm_reg(cd, (s8) builtin_trace_args, REG_ITMP1);
3933 x86_64_call_reg(cd, REG_ITMP1);
3935 /* call method to resolve native function if needed */
3936 #ifndef STATIC_CLASSPATH
3937 if (f==0) { /* only if not already resolved */
3938 x86_64_jmp_imm(cd,0);
3939 jmpInstrPos=cd->mcodeptr-4; /*needed to patch a jump over this block*/
3940 x86_64_mov_imm_reg(cd,(u8)m,rd->argintregs[0]);
3941 x86_64_mov_imm_reg(cd,0,rd->argintregs[1]);
3942 callAddrPatchPos=cd->mcodeptr-8; /* at this position the place is specified where the native function adress should be patched into*/
3943 x86_64_mov_imm_reg(cd,0,rd->argintregs[2]);
3944 jmpInstrPatchPos=cd->mcodeptr-8;
3945 x86_64_mov_imm_reg(cd,jmpInstrPos,rd->argintregs[3]);
3946 x86_64_mov_imm_reg(cd,(s8)codegen_resolve_native,REG_ITMP1);
3947 x86_64_call_reg(cd,REG_ITMP1);
3948 *(jmpInstrPatchPos)=cd->mcodeptr-jmpInstrPos-1; /*=opcode jmp_imm size*/
3952 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, rd->argintregs[0]);
3953 x86_64_mov_membase_reg(cd, REG_SP, 2 * 8, rd->argintregs[1]);
3954 x86_64_mov_membase_reg(cd, REG_SP, 3 * 8, rd->argintregs[2]);
3955 x86_64_mov_membase_reg(cd, REG_SP, 4 * 8, rd->argintregs[3]);
3956 x86_64_mov_membase_reg(cd, REG_SP, 5 * 8, rd->argintregs[4]);
3957 x86_64_mov_membase_reg(cd, REG_SP, 6 * 8, rd->argintregs[5]);
3959 x86_64_movq_membase_reg(cd, REG_SP, 7 * 8, rd->argfltregs[0]);
3960 x86_64_movq_membase_reg(cd, REG_SP, 8 * 8, rd->argfltregs[1]);
3961 x86_64_movq_membase_reg(cd, REG_SP, 9 * 8, rd->argfltregs[2]);
3962 x86_64_movq_membase_reg(cd, REG_SP, 10 * 8, rd->argfltregs[3]);
3963 /* x86_64_movq_membase_reg(cd, REG_SP, 11 * 8, rd->argfltregs[4]); */
3964 /* x86_64_movq_membase_reg(cd, REG_SP, 12 * 8, rd->argfltregs[5]); */
3965 /* x86_64_movq_membase_reg(cd, REG_SP, 13 * 8, rd->argfltregs[6]); */
3966 /* x86_64_movq_membase_reg(cd, REG_SP, 14 * 8, rd->argfltregs[7]); */
3968 x86_64_alu_imm_reg(cd, X86_64_ADD, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
3972 x86_64_alu_imm_reg(cd, X86_64_SUB, 7 * 8, REG_SP); /* keep stack 16-byte aligned */
3974 /* save callee saved float registers */
3975 x86_64_movq_reg_membase(cd, XMM15, REG_SP, 0 * 8);
3976 x86_64_movq_reg_membase(cd, XMM14, REG_SP, 1 * 8);
3977 x86_64_movq_reg_membase(cd, XMM13, REG_SP, 2 * 8);
3978 x86_64_movq_reg_membase(cd, XMM12, REG_SP, 3 * 8);
3979 x86_64_movq_reg_membase(cd, XMM11, REG_SP, 4 * 8);
3980 x86_64_movq_reg_membase(cd, XMM10, REG_SP, 5 * 8);
3983 /* save argument registers on stack -- if we have to */
3984 if ((m->flags & ACC_STATIC && m->paramcount > (INT_ARG_CNT - 2)) || m->paramcount > (INT_ARG_CNT - 1)) {
3986 s4 paramshiftcnt = (m->flags & ACC_STATIC) ? 2 : 1;
3987 s4 stackparamcnt = (m->paramcount > INT_ARG_CNT) ? m->paramcount - INT_ARG_CNT : 0;
3989 stackframesize = stackparamcnt + paramshiftcnt;
3991 /* keep stack 16-byte aligned */
3992 if (!(stackframesize & 0x1))
3995 x86_64_alu_imm_reg(cd, X86_64_SUB, stackframesize * 8, REG_SP);
3997 /* copy stack arguments into new stack frame -- if any */
3998 for (i = 0; i < stackparamcnt; i++) {
3999 x86_64_mov_membase_reg(cd, REG_SP, (stackparamcnt + 1 + i) * 8, REG_ITMP1);
4000 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, (paramshiftcnt + i) * 8);
4003 if (m->flags & ACC_STATIC) {
4004 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 1 * 8);
4005 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 0 * 8);
4008 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 0 * 8);
4012 /* keep stack 16-byte aligned */
4013 x86_64_alu_imm_reg(cd, X86_64_SUB, 8, REG_SP);
4017 if (m->flags & ACC_STATIC) {
4018 x86_64_mov_reg_reg(cd, rd->argintregs[3], rd->argintregs[5]);
4019 x86_64_mov_reg_reg(cd, rd->argintregs[2], rd->argintregs[4]);
4020 x86_64_mov_reg_reg(cd, rd->argintregs[1], rd->argintregs[3]);
4021 x86_64_mov_reg_reg(cd, rd->argintregs[0], rd->argintregs[2]);
4023 /* put class into second argument register */
4024 x86_64_mov_imm_reg(cd, (u8) m->class, rd->argintregs[1]);
4027 x86_64_mov_reg_reg(cd, rd->argintregs[4], rd->argintregs[5]);
4028 x86_64_mov_reg_reg(cd, rd->argintregs[3], rd->argintregs[4]);
4029 x86_64_mov_reg_reg(cd, rd->argintregs[2], rd->argintregs[3]);
4030 x86_64_mov_reg_reg(cd, rd->argintregs[1], rd->argintregs[2]);
4031 x86_64_mov_reg_reg(cd, rd->argintregs[0], rd->argintregs[1]);
4034 /* put env into first argument register */
4035 x86_64_mov_imm_reg(cd, (u8) &env, rd->argintregs[0]);
4037 x86_64_mov_imm_reg(cd, (u8) f, REG_ITMP1);
4038 #ifndef STATIC_CLASSPATH
4040 (*callAddrPatchPos)=cd->mcodeptr-8;
4042 x86_64_call_reg(cd, REG_ITMP1);
4044 /* remove stackframe if there is one */
4045 if (stackframesize) {
4046 x86_64_alu_imm_reg(cd, X86_64_ADD, stackframesize * 8, REG_SP);
4050 x86_64_alu_imm_reg(cd, X86_64_SUB, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
4052 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
4053 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
4055 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
4056 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
4057 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
4058 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
4060 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
4061 x86_64_call_reg(cd, REG_ITMP1);
4063 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
4064 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
4066 x86_64_alu_imm_reg(cd, X86_64_ADD, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
4070 /* restore callee saved registers */
4071 x86_64_movq_membase_reg(cd, REG_SP, 0 * 8, XMM15);
4072 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, XMM14);
4073 x86_64_movq_membase_reg(cd, REG_SP, 2 * 8, XMM13);
4074 x86_64_movq_membase_reg(cd, REG_SP, 3 * 8, XMM12);
4075 x86_64_movq_membase_reg(cd, REG_SP, 4 * 8, XMM11);
4076 x86_64_movq_membase_reg(cd, REG_SP, 5 * 8, XMM10);
4078 x86_64_alu_imm_reg(cd, X86_64_ADD, 7 * 8, REG_SP); /* keep stack 16-byte aligned */
4081 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4082 x86_64_push_reg(cd, REG_RESULT);
4083 /* x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
4084 x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
4085 x86_64_call_reg(cd, REG_ITMP3);
4086 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
4087 x86_64_pop_reg(cd, REG_RESULT);
4089 x86_64_mov_imm_reg(cd, (s8) &_exceptionptr, REG_ITMP3);
4090 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP3);
4092 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
4093 x86_64_jcc(cd, X86_64_CC_NE, 1);
4097 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4098 x86_64_push_reg(cd, REG_ITMP3);
4099 /* x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
4100 x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
4101 x86_64_call_reg(cd, REG_ITMP3);
4102 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
4103 x86_64_pop_reg(cd, REG_ITMP1_XPTR);
4105 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
4106 x86_64_mov_imm_reg(cd, (s8) &_exceptionptr, REG_ITMP3);
4107 x86_64_alu_reg_reg(cd, X86_64_XOR, REG_ITMP2, REG_ITMP2);
4108 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_ITMP3, 0); /* clear exception pointer */
4111 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC); /* get return address from stack */
4112 x86_64_alu_imm_reg(cd, X86_64_SUB, 3, REG_ITMP2_XPC); /* callq */
4114 x86_64_mov_imm_reg(cd, (s8) asm_handle_nat_exception, REG_ITMP3);
4115 x86_64_jmp_reg(cd, REG_ITMP3);
4124 tmpcd = DNEW(codegendata);
4126 /* there can only be one clinit ref entry */
4127 cref = cd->clinitrefs;
4130 /* Get machine code which is patched back in later. A */
4131 /* `call rel32' is 5 bytes long. */
4132 xcodeptr = cd->mcodebase + cref->branchpos;
4134 mcode = *((u4 *) (xcodeptr + 1));
4136 /* patch in `call rel32' to call the following code */
4137 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
4138 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4140 /* Save current stack pointer into a temporary register. */
4141 x86_64_mov_reg_reg(cd, REG_SP, REG_ITMP1);
4143 /* Push machine code bytes to patch onto the stack. */
4144 x86_64_push_imm(cd, (u1) xmcode);
4145 x86_64_push_imm(cd, (u4) mcode);
4147 x86_64_push_imm(cd, (u8) cref->class);
4149 /* Push previously saved stack pointer onto stack. */
4150 x86_64_push_reg(cd, REG_ITMP1);
4152 x86_64_mov_imm_reg(cd, (u8) asm_check_clinit, REG_ITMP1);
4153 x86_64_jmp_reg(cd, REG_ITMP1);
4159 static int stubprinted;
4161 printf("stubsize: %d\n", ((long) cd->mcodeptr - (long) s));
4166 #if defined(STATISTICS)
4168 count_nstub_len += NATIVESTUBSIZE;
4171 /* release dump area */
4173 dump_release(dumpsize);
4179 /* function: removenativestub **************************************************
4181 removes a previously created native-stub from memory
4183 *******************************************************************************/
4185 void removenativestub(u1 *stub)
4187 CFREE(stub, NATIVESTUBSIZE);
4192 * These are local overrides for various environment variables in Emacs.
4193 * Please do not remove this and leave it at the end of the file, where
4194 * Emacs will automagically detect them.
4195 * ---------------------------------------------------------------------
4198 * indent-tabs-mode: t