1 /* jit/x86_64/codegen.c - machine code generator for x86_64
3 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 Institut f. Computersprachen, TU Wien
5 R. Grafl, A. Krall, C. Kruegel, C. Oates, R. Obermaisser, M. Probst,
6 S. Ring, E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich,
9 This file is part of CACAO.
11 This program is free software; you can redistribute it and/or
12 modify it under the terms of the GNU General Public License as
13 published by the Free Software Foundation; either version 2, or (at
14 your option) any later version.
16 This program is distributed in the hope that it will be useful, but
17 WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 General Public License for more details.
21 You should have received a copy of the GNU General Public License
22 along with this program; if not, write to the Free Software
23 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
26 Contact: cacao@complang.tuwien.ac.at
28 Authors: Andreas Krall
31 $Id: codegen.c 1701 2004-12-06 14:31:45Z twisti $
41 #include "native/native.h"
42 /* #include "native/jni.h" */
43 #include "vm/global.h"
44 #include "vm/builtin.h"
45 #include "vm/loader.h"
46 #include "vm/tables.h"
47 #include "vm/jit/asmpart.h"
48 #include "vm/jit/jit.h"
49 #include "vm/jit/reg.h"
50 #include "vm/jit/parse.h"
51 #include "vm/jit/x86_64/arch.h"
52 #include "vm/jit/x86_64/codegen.h"
53 #include "vm/jit/x86_64/emitfuncs.h"
54 #include "vm/jit/x86_64/types.h"
55 #include "vm/jit/x86_64/asmoffsets.h"
58 /* register descripton - array ************************************************/
60 /* #define REG_RES 0 reserved register for OS or code generator */
61 /* #define REG_RET 1 return value register */
62 /* #define REG_EXC 2 exception value register (only old jit) */
63 /* #define REG_SAV 3 (callee) saved register */
64 /* #define REG_TMP 4 scratch temporary register (caller saved) */
65 /* #define REG_ARG 5 argument register (caller saved) */
67 /* #define REG_END -1 last entry in tables */
69 static int nregdescint[] = {
70 REG_RET, REG_ARG, REG_ARG, REG_TMP, REG_RES, REG_SAV, REG_ARG, REG_ARG,
71 REG_ARG, REG_ARG, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV,
76 static int nregdescfloat[] = {
77 /* REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_TMP, REG_TMP, REG_TMP, REG_TMP, */
78 /* REG_RES, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV, REG_SAV, */
79 REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
80 REG_RES, REG_RES, REG_RES, REG_TMP, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
85 /* Include independent code generation stuff -- include after register */
86 /* descriptions to avoid extern definitions. */
88 #include "vm/jit/codegen.inc"
89 #include "vm/jit/reg.inc"
91 #include "vm/jit/lsra.inc"
95 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
96 void thread_restartcriticalsection(ucontext_t *uc)
100 critical = thread_checkcritical((void *) uc->uc_mcontext.gregs[REG_RIP]);
103 uc->uc_mcontext.gregs[REG_RIP] = (u8) critical;
108 /* NullPointerException signal handler for hardware null pointer check */
110 void catch_NullPointerException(int sig, siginfo_t *siginfo, void *_p)
114 /* long faultaddr; */
116 struct ucontext *_uc = (struct ucontext *) _p;
117 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
118 struct sigaction act;
119 java_objectheader *xptr;
121 /* Reset signal handler - necessary for SysV, does no harm for BSD */
124 /* instr = *((int*)(sigctx->rip)); */
125 /* faultaddr = sigctx->sc_regs[(instr >> 16) & 0x1f]; */
127 /* if (faultaddr == 0) { */
128 act.sa_sigaction = (functionptr) catch_NullPointerException; /* reinstall handler */
129 act.sa_flags = SA_SIGINFO;
130 sigaction(sig, &act, NULL);
133 sigaddset(&nsig, sig);
134 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
136 xptr = new_nullpointerexception();
138 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
139 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
140 sigctx->rip = (u8) asm_handle_exception;
145 /* faultaddr += (long) ((instr << 16) >> 16); */
146 /* fprintf(stderr, "faulting address: 0x%08x\n", faultaddr); */
147 /* panic("Stack overflow"); */
152 /* ArithmeticException signal handler for hardware divide by zero check */
154 void catch_ArithmeticException(int sig, siginfo_t *siginfo, void *_p)
158 struct ucontext *_uc = (struct ucontext *) _p;
159 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
160 struct sigaction act;
161 java_objectheader *xptr;
163 /* Reset signal handler - necessary for SysV, does no harm for BSD */
165 act.sa_sigaction = (functionptr) catch_ArithmeticException; /* reinstall handler */
166 act.sa_flags = SA_SIGINFO;
167 sigaction(sig, &act, NULL);
170 sigaddset(&nsig, sig);
171 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
173 xptr = new_arithmeticexception();
175 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
176 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
177 sigctx->rip = (u8) asm_handle_exception;
183 void init_exceptions(void)
185 struct sigaction act;
187 /* install signal handlers we need to convert to exceptions */
188 sigemptyset(&act.sa_mask);
192 act.sa_sigaction = (functionptr) catch_NullPointerException;
193 act.sa_flags = SA_SIGINFO;
194 sigaction(SIGSEGV, &act, NULL);
198 act.sa_sigaction = (functionptr) catch_NullPointerException;
199 act.sa_flags = SA_SIGINFO;
200 sigaction(SIGBUS, &act, NULL);
204 act.sa_sigaction = (functionptr) catch_ArithmeticException;
205 act.sa_flags = SA_SIGINFO;
206 sigaction(SIGFPE, &act, NULL);
210 /* function gen_mcode **********************************************************
212 generates machine code
214 *******************************************************************************/
216 void codegen(methodinfo *m, codegendata *cd, registerdata *rd)
218 s4 len, s1, s2, s3, d;
233 /* space to save used callee saved registers */
235 savedregs_num += (rd->savintregcnt - rd->maxsavintreguse);
236 savedregs_num += (rd->savfltregcnt - rd->maxsavfltreguse);
238 parentargs_base = rd->maxmemuse + savedregs_num;
240 #if defined(USE_THREADS) /* space to save argument of monitor_enter */
242 if (checksync && (m->flags & ACC_SYNCHRONIZED))
247 /* keep stack 16-byte aligned for calls into native code e.g. libc or jni */
248 /* (alignment problems with movaps) */
250 if (!(parentargs_base & 0x1)) {
254 /* create method header */
256 (void) dseg_addaddress(cd, m); /* MethodPointer */
257 (void) dseg_adds4(cd, parentargs_base * 8); /* FrameSize */
259 #if defined(USE_THREADS)
261 /* IsSync contains the offset relative to the stack pointer for the
262 argument of monitor_exit used in the exception handler. Since the
263 offset could be zero and give a wrong meaning of the flag it is
267 if (checksync && (m->flags & ACC_SYNCHRONIZED))
268 (void) dseg_adds4(cd, (rd->maxmemuse + 1) * 8); /* IsSync */
273 (void) dseg_adds4(cd, 0); /* IsSync */
275 (void) dseg_adds4(cd, m->isleafmethod); /* IsLeaf */
276 (void) dseg_adds4(cd, rd->savintregcnt - rd->maxsavintreguse);/* IntSave */
277 (void) dseg_adds4(cd, rd->savfltregcnt - rd->maxsavfltreguse);/* FltSave */
278 (void) dseg_adds4(cd, cd->exceptiontablelength); /* ExTableSize */
280 /* create exception table */
282 for (ex = cd->exceptiontable; ex != NULL; ex = ex->down) {
283 dseg_addtarget(cd, ex->start);
284 dseg_addtarget(cd, ex->end);
285 dseg_addtarget(cd, ex->handler);
286 (void) dseg_addaddress(cd, ex->catchtype);
289 /* initialize mcode variables */
291 cd->mcodeptr = (u1 *) cd->mcodebase;
292 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
293 MCODECHECK(128 + m->paramcount);
295 /* create stack frame (if necessary) */
297 if (parentargs_base) {
298 x86_64_alu_imm_reg(cd, X86_64_SUB, parentargs_base * 8, REG_SP);
301 /* save return address and used callee saved registers */
304 for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
305 p--; x86_64_mov_reg_membase(cd, rd->savintregs[i], REG_SP, p * 8);
307 for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
308 p--; x86_64_movq_reg_membase(cd, rd->savfltregs[i], REG_SP, p * 8);
311 /* save monitorenter argument */
313 #if defined(USE_THREADS)
314 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
315 if (m->flags & ACC_STATIC) {
316 x86_64_mov_imm_reg(cd, (s8) m->class, REG_ITMP1);
317 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, rd->maxmemuse * 8);
320 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, rd->maxmemuse * 8);
325 /* copy argument registers to stack and call trace function with pointer
326 to arguments on stack.
329 x86_64_alu_imm_reg(cd, X86_64_SUB, (6 + 8 + 1 + 1) * 8, REG_SP);
331 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, 1 * 8);
332 x86_64_mov_reg_membase(cd, rd->argintregs[1], REG_SP, 2 * 8);
333 x86_64_mov_reg_membase(cd, rd->argintregs[2], REG_SP, 3 * 8);
334 x86_64_mov_reg_membase(cd, rd->argintregs[3], REG_SP, 4 * 8);
335 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 5 * 8);
336 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 6 * 8);
338 x86_64_movq_reg_membase(cd, rd->argfltregs[0], REG_SP, 7 * 8);
339 x86_64_movq_reg_membase(cd, rd->argfltregs[1], REG_SP, 8 * 8);
340 x86_64_movq_reg_membase(cd, rd->argfltregs[2], REG_SP, 9 * 8);
341 x86_64_movq_reg_membase(cd, rd->argfltregs[3], REG_SP, 10 * 8);
342 /* x86_64_movq_reg_membase(cd, rd->argfltregs[4], REG_SP, 11 * 8); */
343 /* x86_64_movq_reg_membase(cd, rd->argfltregs[5], REG_SP, 12 * 8); */
344 /* x86_64_movq_reg_membase(cd, rd->argfltregs[6], REG_SP, 13 * 8); */
345 /* x86_64_movq_reg_membase(cd, rd->argfltregs[7], REG_SP, 14 * 8); */
347 for (p = 0, l = 0; p < m->paramcount; p++) {
348 t = m->paramtypes[p];
350 if (IS_FLT_DBL_TYPE(t)) {
351 for (s1 = (m->paramcount > INT_ARG_CNT) ? INT_ARG_CNT - 2 : m->paramcount - 2; s1 >= p; s1--) {
352 x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
355 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[p]);
360 x86_64_mov_imm_reg(cd, (s8) m, REG_ITMP2);
361 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_SP, 0 * 8);
362 x86_64_mov_imm_reg(cd, (s8) builtin_trace_args, REG_ITMP1);
363 x86_64_call_reg(cd, REG_ITMP1);
365 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, rd->argintregs[0]);
366 x86_64_mov_membase_reg(cd, REG_SP, 2 * 8, rd->argintregs[1]);
367 x86_64_mov_membase_reg(cd, REG_SP, 3 * 8, rd->argintregs[2]);
368 x86_64_mov_membase_reg(cd, REG_SP, 4 * 8, rd->argintregs[3]);
369 x86_64_mov_membase_reg(cd, REG_SP, 5 * 8, rd->argintregs[4]);
370 x86_64_mov_membase_reg(cd, REG_SP, 6 * 8, rd->argintregs[5]);
372 x86_64_movq_membase_reg(cd, REG_SP, 7 * 8, rd->argfltregs[0]);
373 x86_64_movq_membase_reg(cd, REG_SP, 8 * 8, rd->argfltregs[1]);
374 x86_64_movq_membase_reg(cd, REG_SP, 9 * 8, rd->argfltregs[2]);
375 x86_64_movq_membase_reg(cd, REG_SP, 10 * 8, rd->argfltregs[3]);
376 /* x86_64_movq_membase_reg(cd, REG_SP, 11 * 8, rd->argfltregs[4]); */
377 /* x86_64_movq_membase_reg(cd, REG_SP, 12 * 8, rd->argfltregs[5]); */
378 /* x86_64_movq_membase_reg(cd, REG_SP, 13 * 8, rd->argfltregs[6]); */
379 /* x86_64_movq_membase_reg(cd, REG_SP, 14 * 8, rd->argfltregs[7]); */
381 x86_64_alu_imm_reg(cd, X86_64_ADD, (6 + 8 + 1 + 1) * 8, REG_SP);
384 /* take arguments out of register or stack frame */
386 for (p = 0, l = 0, s1 = 0, s2 = 0; p < m->paramcount; p++) {
387 t = m->paramtypes[p];
388 var = &(rd->locals[l][t]);
390 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
393 if (IS_INT_LNG_TYPE(t)) {
400 if (IS_INT_LNG_TYPE(t)) { /* integer args */
401 if (s1 < INT_ARG_CNT) { /* register arguments */
402 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
403 M_INTMOVE(rd->argintregs[s1], var->regoff);
405 } else { /* reg arg -> spilled */
406 x86_64_mov_reg_membase(cd, rd->argintregs[s1], REG_SP, var->regoff * 8);
409 } else { /* stack arguments */
410 pa = s1 - INT_ARG_CNT;
411 if (s2 >= FLT_ARG_CNT) {
412 pa += s2 - FLT_ARG_CNT;
414 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
415 x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff); /* + 8 for return address */
416 } else { /* stack arg -> spilled */
417 x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_ITMP1); /* + 8 for return address */
418 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, var->regoff * 8);
423 } else { /* floating args */
424 if (s2 < FLT_ARG_CNT) { /* register arguments */
425 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
426 M_FLTMOVE(rd->argfltregs[s2], var->regoff);
428 } else { /* reg arg -> spilled */
429 x86_64_movq_reg_membase(cd, rd->argfltregs[s2], REG_SP, var->regoff * 8);
432 } else { /* stack arguments */
433 pa = s2 - FLT_ARG_CNT;
434 if (s1 >= INT_ARG_CNT) {
435 pa += s1 - INT_ARG_CNT;
437 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
438 x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff);
441 x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_FTMP1);
442 x86_64_movq_reg_membase(cd, REG_FTMP1, REG_SP, var->regoff * 8);
449 /* call monitorenter function */
451 #if defined(USE_THREADS)
452 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
453 s8 func_enter = (m->flags & ACC_STATIC) ?
454 (s8) builtin_staticmonitorenter : (s8) builtin_monitorenter;
455 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
456 x86_64_mov_imm_reg(cd, func_enter, REG_ITMP1);
457 x86_64_call_reg(cd, REG_ITMP1);
462 /* end of header generation */
464 /* walk through all basic blocks */
465 for (bptr = m->basicblocks; bptr != NULL; bptr = bptr->next) {
467 bptr->mpc = (u4) ((u1 *) cd->mcodeptr - cd->mcodebase);
469 if (bptr->flags >= BBREACHED) {
471 /* branch resolving */
474 for (bref = bptr->branchrefs; bref != NULL; bref = bref->next) {
475 gen_resolvebranch((u1 *) cd->mcodebase + bref->branchpos,
480 /* copy interface registers to their destination */
484 MCODECHECK(64 + len);
485 while (src != NULL) {
487 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
488 if (bptr->type == BBTYPE_SBR) {
489 d = reg_of_var(rd, src, REG_ITMP1);
490 x86_64_pop_reg(cd, d);
491 store_reg_to_var_int(src, d);
493 } else if (bptr->type == BBTYPE_EXH) {
494 d = reg_of_var(rd, src, REG_ITMP1);
495 M_INTMOVE(REG_ITMP1, d);
496 store_reg_to_var_int(src, d);
500 d = reg_of_var(rd, src, REG_ITMP1);
501 if ((src->varkind != STACKVAR)) {
503 if (IS_FLT_DBL_TYPE(s2)) {
504 s1 = rd->interfaces[len][s2].regoff;
505 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
509 x86_64_movq_membase_reg(cd, REG_SP, s1 * 8, d);
511 store_reg_to_var_flt(src, d);
514 s1 = rd->interfaces[len][s2].regoff;
515 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
519 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, d);
521 store_reg_to_var_int(src, d);
528 /* walk through all instructions */
532 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
534 MCODECHECK(64); /* an instruction usually needs < 64 words */
537 case ICMD_NOP: /* ... ==> ... */
540 case ICMD_NULLCHECKPOP: /* ..., objectref ==> ... */
541 if (src->flags & INMEMORY) {
542 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
545 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
547 x86_64_jcc(cd, X86_64_CC_E, 0);
548 codegen_addxnullrefs(cd, cd->mcodeptr);
551 /* constant operations ************************************************/
553 case ICMD_ICONST: /* ... ==> ..., constant */
554 /* op1 = 0, val.i = constant */
556 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
557 if (iptr->val.i == 0) {
558 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
560 x86_64_movl_imm_reg(cd, iptr->val.i, d);
562 store_reg_to_var_int(iptr->dst, d);
565 case ICMD_ACONST: /* ... ==> ..., constant */
566 /* op1 = 0, val.a = constant */
568 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
569 if (iptr->val.a == 0) {
570 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
572 x86_64_mov_imm_reg(cd, (s8) iptr->val.a, d);
574 store_reg_to_var_int(iptr->dst, d);
577 case ICMD_LCONST: /* ... ==> ..., constant */
578 /* op1 = 0, val.l = constant */
580 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
581 if (iptr->val.l == 0) {
582 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
584 x86_64_mov_imm_reg(cd, iptr->val.l, d);
586 store_reg_to_var_int(iptr->dst, d);
589 case ICMD_FCONST: /* ... ==> ..., constant */
590 /* op1 = 0, val.f = constant */
592 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
593 a = dseg_addfloat(cd, iptr->val.f);
594 x86_64_movdl_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + ((d > 7) ? 9 : 8)) - (s8) cd->mcodebase) + a, d);
595 store_reg_to_var_flt(iptr->dst, d);
598 case ICMD_DCONST: /* ... ==> ..., constant */
599 /* op1 = 0, val.d = constant */
601 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
602 a = dseg_adddouble(cd, iptr->val.d);
603 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, d);
604 store_reg_to_var_flt(iptr->dst, d);
608 /* load/store operations **********************************************/
610 case ICMD_ILOAD: /* ... ==> ..., content of local variable */
611 /* op1 = local variable */
613 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
614 if ((iptr->dst->varkind == LOCALVAR) &&
615 (iptr->dst->varnum == iptr->op1)) {
618 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
619 if (var->flags & INMEMORY) {
620 x86_64_movl_membase_reg(cd, REG_SP, var->regoff * 8, d);
621 store_reg_to_var_int(iptr->dst, d);
624 if (iptr->dst->flags & INMEMORY) {
625 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
628 M_INTMOVE(var->regoff, d);
633 case ICMD_LLOAD: /* ... ==> ..., content of local variable */
634 case ICMD_ALOAD: /* op1 = local variable */
636 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
637 if ((iptr->dst->varkind == LOCALVAR) &&
638 (iptr->dst->varnum == iptr->op1)) {
641 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
642 if (var->flags & INMEMORY) {
643 x86_64_mov_membase_reg(cd, REG_SP, var->regoff * 8, d);
644 store_reg_to_var_int(iptr->dst, d);
647 if (iptr->dst->flags & INMEMORY) {
648 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
651 M_INTMOVE(var->regoff, d);
656 case ICMD_FLOAD: /* ... ==> ..., content of local variable */
657 case ICMD_DLOAD: /* op1 = local variable */
659 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
660 if ((iptr->dst->varkind == LOCALVAR) &&
661 (iptr->dst->varnum == iptr->op1)) {
664 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
665 if (var->flags & INMEMORY) {
666 x86_64_movq_membase_reg(cd, REG_SP, var->regoff * 8, d);
667 store_reg_to_var_flt(iptr->dst, d);
670 if (iptr->dst->flags & INMEMORY) {
671 x86_64_movq_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
674 M_FLTMOVE(var->regoff, d);
679 case ICMD_ISTORE: /* ..., value ==> ... */
680 case ICMD_LSTORE: /* op1 = local variable */
683 if ((src->varkind == LOCALVAR) &&
684 (src->varnum == iptr->op1)) {
687 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
688 if (var->flags & INMEMORY) {
689 var_to_reg_int(s1, src, REG_ITMP1);
690 x86_64_mov_reg_membase(cd, s1, REG_SP, var->regoff * 8);
693 var_to_reg_int(s1, src, var->regoff);
694 M_INTMOVE(s1, var->regoff);
698 case ICMD_FSTORE: /* ..., value ==> ... */
699 case ICMD_DSTORE: /* op1 = local variable */
701 if ((src->varkind == LOCALVAR) &&
702 (src->varnum == iptr->op1)) {
705 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
706 if (var->flags & INMEMORY) {
707 var_to_reg_flt(s1, src, REG_FTMP1);
708 x86_64_movq_reg_membase(cd, s1, REG_SP, var->regoff * 8);
711 var_to_reg_flt(s1, src, var->regoff);
712 M_FLTMOVE(s1, var->regoff);
717 /* pop/dup/swap operations ********************************************/
719 /* attention: double and longs are only one entry in CACAO ICMDs */
721 case ICMD_POP: /* ..., value ==> ... */
722 case ICMD_POP2: /* ..., value, value ==> ... */
725 case ICMD_DUP: /* ..., a ==> ..., a, a */
726 M_COPY(src, iptr->dst);
729 case ICMD_DUP_X1: /* ..., a, b ==> ..., b, a, b */
731 M_COPY(src, iptr->dst);
732 M_COPY(src->prev, iptr->dst->prev);
733 M_COPY(iptr->dst, iptr->dst->prev->prev);
736 case ICMD_DUP_X2: /* ..., a, b, c ==> ..., c, a, b, c */
738 M_COPY(src, iptr->dst);
739 M_COPY(src->prev, iptr->dst->prev);
740 M_COPY(src->prev->prev, iptr->dst->prev->prev);
741 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
744 case ICMD_DUP2: /* ..., a, b ==> ..., a, b, a, b */
746 M_COPY(src, iptr->dst);
747 M_COPY(src->prev, iptr->dst->prev);
750 case ICMD_DUP2_X1: /* ..., a, b, c ==> ..., b, c, a, b, c */
752 M_COPY(src, iptr->dst);
753 M_COPY(src->prev, iptr->dst->prev);
754 M_COPY(src->prev->prev, iptr->dst->prev->prev);
755 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
756 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev);
759 case ICMD_DUP2_X2: /* ..., a, b, c, d ==> ..., c, d, a, b, c, d */
761 M_COPY(src, iptr->dst);
762 M_COPY(src->prev, iptr->dst->prev);
763 M_COPY(src->prev->prev, iptr->dst->prev->prev);
764 M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
765 M_COPY(iptr->dst, iptr->dst->prev->prev->prev->prev);
766 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev->prev);
769 case ICMD_SWAP: /* ..., a, b ==> ..., b, a */
771 M_COPY(src, iptr->dst->prev);
772 M_COPY(src->prev, iptr->dst);
776 /* integer operations *************************************************/
778 case ICMD_INEG: /* ..., value ==> ..., - value */
780 d = reg_of_var(rd, iptr->dst, REG_NULL);
781 if (iptr->dst->flags & INMEMORY) {
782 if (src->flags & INMEMORY) {
783 if (src->regoff == iptr->dst->regoff) {
784 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
787 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
788 x86_64_negl_reg(cd, REG_ITMP1);
789 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
793 x86_64_movl_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
794 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
798 if (src->flags & INMEMORY) {
799 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
800 x86_64_negl_reg(cd, d);
803 M_INTMOVE(src->regoff, iptr->dst->regoff);
804 x86_64_negl_reg(cd, iptr->dst->regoff);
809 case ICMD_LNEG: /* ..., value ==> ..., - value */
811 d = reg_of_var(rd, iptr->dst, REG_NULL);
812 if (iptr->dst->flags & INMEMORY) {
813 if (src->flags & INMEMORY) {
814 if (src->regoff == iptr->dst->regoff) {
815 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
818 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
819 x86_64_neg_reg(cd, REG_ITMP1);
820 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
824 x86_64_mov_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
825 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
829 if (src->flags & INMEMORY) {
830 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
831 x86_64_neg_reg(cd, iptr->dst->regoff);
834 M_INTMOVE(src->regoff, iptr->dst->regoff);
835 x86_64_neg_reg(cd, iptr->dst->regoff);
840 case ICMD_I2L: /* ..., value ==> ..., value */
842 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
843 if (src->flags & INMEMORY) {
844 x86_64_movslq_membase_reg(cd, REG_SP, src->regoff * 8, d);
847 x86_64_movslq_reg_reg(cd, src->regoff, d);
849 store_reg_to_var_int(iptr->dst, d);
852 case ICMD_L2I: /* ..., value ==> ..., value */
854 var_to_reg_int(s1, src, REG_ITMP1);
855 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
857 store_reg_to_var_int(iptr->dst, d);
860 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
862 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
863 if (src->flags & INMEMORY) {
864 x86_64_movsbq_membase_reg(cd, REG_SP, src->regoff * 8, d);
867 x86_64_movsbq_reg_reg(cd, src->regoff, d);
869 store_reg_to_var_int(iptr->dst, d);
872 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
874 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
875 if (src->flags & INMEMORY) {
876 x86_64_movzwq_membase_reg(cd, REG_SP, src->regoff * 8, d);
879 x86_64_movzwq_reg_reg(cd, src->regoff, d);
881 store_reg_to_var_int(iptr->dst, d);
884 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
886 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
887 if (src->flags & INMEMORY) {
888 x86_64_movswq_membase_reg(cd, REG_SP, src->regoff * 8, d);
891 x86_64_movswq_reg_reg(cd, src->regoff, d);
893 store_reg_to_var_int(iptr->dst, d);
897 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
899 d = reg_of_var(rd, iptr->dst, REG_NULL);
900 x86_64_emit_ialu(cd, X86_64_ADD, src, iptr);
903 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
904 /* val.i = constant */
906 d = reg_of_var(rd, iptr->dst, REG_NULL);
907 x86_64_emit_ialuconst(cd, X86_64_ADD, src, iptr);
910 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
912 d = reg_of_var(rd, iptr->dst, REG_NULL);
913 x86_64_emit_lalu(cd, X86_64_ADD, src, iptr);
916 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
917 /* val.l = constant */
919 d = reg_of_var(rd, iptr->dst, REG_NULL);
920 x86_64_emit_laluconst(cd, X86_64_ADD, src, iptr);
923 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
925 d = reg_of_var(rd, iptr->dst, REG_NULL);
926 if (iptr->dst->flags & INMEMORY) {
927 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
928 if (src->prev->regoff == iptr->dst->regoff) {
929 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
930 x86_64_alul_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
933 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
934 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
935 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
938 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
939 M_INTMOVE(src->prev->regoff, REG_ITMP1);
940 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
941 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
943 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
944 if (src->prev->regoff == iptr->dst->regoff) {
945 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
948 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
949 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
950 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
954 x86_64_movl_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
955 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
959 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
960 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
961 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
963 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
964 M_INTMOVE(src->prev->regoff, d);
965 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
967 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
968 /* workaround for reg alloc */
969 if (src->regoff == iptr->dst->regoff) {
970 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
971 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
972 M_INTMOVE(REG_ITMP1, d);
975 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
976 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
980 /* workaround for reg alloc */
981 if (src->regoff == iptr->dst->regoff) {
982 M_INTMOVE(src->prev->regoff, REG_ITMP1);
983 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
984 M_INTMOVE(REG_ITMP1, d);
987 M_INTMOVE(src->prev->regoff, d);
988 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
994 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
995 /* val.i = constant */
997 d = reg_of_var(rd, iptr->dst, REG_NULL);
998 x86_64_emit_ialuconst(cd, X86_64_SUB, src, iptr);
1001 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1003 d = reg_of_var(rd, iptr->dst, REG_NULL);
1004 if (iptr->dst->flags & INMEMORY) {
1005 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1006 if (src->prev->regoff == iptr->dst->regoff) {
1007 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1008 x86_64_alu_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1011 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1012 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1013 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1016 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1017 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1018 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1019 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1021 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1022 if (src->prev->regoff == iptr->dst->regoff) {
1023 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1026 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1027 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1028 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1032 x86_64_mov_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1033 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1037 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1038 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1039 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1041 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1042 M_INTMOVE(src->prev->regoff, d);
1043 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1045 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1046 /* workaround for reg alloc */
1047 if (src->regoff == iptr->dst->regoff) {
1048 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1049 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1050 M_INTMOVE(REG_ITMP1, d);
1053 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1054 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1058 /* workaround for reg alloc */
1059 if (src->regoff == iptr->dst->regoff) {
1060 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1061 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1062 M_INTMOVE(REG_ITMP1, d);
1065 M_INTMOVE(src->prev->regoff, d);
1066 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1072 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
1073 /* val.l = constant */
1075 d = reg_of_var(rd, iptr->dst, REG_NULL);
1076 x86_64_emit_laluconst(cd, X86_64_SUB, src, iptr);
1079 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1081 d = reg_of_var(rd, iptr->dst, REG_NULL);
1082 if (iptr->dst->flags & INMEMORY) {
1083 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1084 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1085 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1086 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1088 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1089 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1090 x86_64_imull_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1091 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1093 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1094 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1095 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1096 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1099 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1100 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1101 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1105 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1106 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1107 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1109 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1110 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1111 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1113 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1114 M_INTMOVE(src->regoff, iptr->dst->regoff);
1115 x86_64_imull_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1118 if (src->regoff == iptr->dst->regoff) {
1119 x86_64_imull_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1122 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1123 x86_64_imull_reg_reg(cd, src->regoff, iptr->dst->regoff);
1129 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
1130 /* val.i = constant */
1132 d = reg_of_var(rd, iptr->dst, REG_NULL);
1133 if (iptr->dst->flags & INMEMORY) {
1134 if (src->flags & INMEMORY) {
1135 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1136 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1139 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, REG_ITMP1);
1140 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1144 if (src->flags & INMEMORY) {
1145 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1148 if (iptr->val.i == 2) {
1149 M_INTMOVE(src->regoff, iptr->dst->regoff);
1150 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1153 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, iptr->dst->regoff); /* 3 cycles */
1159 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1161 d = reg_of_var(rd, iptr->dst, REG_NULL);
1162 if (iptr->dst->flags & INMEMORY) {
1163 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1164 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1165 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1166 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1168 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1169 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1170 x86_64_imul_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1171 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1173 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1174 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1175 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1176 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1179 x86_64_mov_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1180 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1181 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1185 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1186 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1187 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1189 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1190 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1191 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1193 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1194 M_INTMOVE(src->regoff, iptr->dst->regoff);
1195 x86_64_imul_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1198 if (src->regoff == iptr->dst->regoff) {
1199 x86_64_imul_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1202 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1203 x86_64_imul_reg_reg(cd, src->regoff, iptr->dst->regoff);
1209 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
1210 /* val.l = constant */
1212 d = reg_of_var(rd, iptr->dst, REG_NULL);
1213 if (iptr->dst->flags & INMEMORY) {
1214 if (src->flags & INMEMORY) {
1215 if (x86_64_is_imm32(iptr->val.l)) {
1216 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1219 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1220 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1222 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1225 if (x86_64_is_imm32(iptr->val.l)) {
1226 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, REG_ITMP1);
1229 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1230 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1232 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1236 if (src->flags & INMEMORY) {
1237 if (x86_64_is_imm32(iptr->val.l)) {
1238 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1241 x86_64_mov_imm_reg(cd, iptr->val.l, iptr->dst->regoff);
1242 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1246 /* should match in many cases */
1247 if (iptr->val.l == 2) {
1248 M_INTMOVE(src->regoff, iptr->dst->regoff);
1249 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1252 if (x86_64_is_imm32(iptr->val.l)) {
1253 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, iptr->dst->regoff); /* 4 cycles */
1256 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1257 M_INTMOVE(src->regoff, iptr->dst->regoff);
1258 x86_64_imul_reg_reg(cd, REG_ITMP1, iptr->dst->regoff);
1265 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1267 d = reg_of_var(rd, iptr->dst, REG_NULL);
1268 if (src->prev->flags & INMEMORY) {
1269 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1272 M_INTMOVE(src->prev->regoff, RAX);
1275 if (src->flags & INMEMORY) {
1276 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1279 M_INTMOVE(src->regoff, REG_ITMP3);
1283 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1284 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1285 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1286 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1288 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1290 x86_64_idivl_reg(cd, REG_ITMP3);
1292 if (iptr->dst->flags & INMEMORY) {
1293 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1294 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1297 M_INTMOVE(RAX, iptr->dst->regoff);
1299 if (iptr->dst->regoff != RDX) {
1300 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1305 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1307 d = reg_of_var(rd, iptr->dst, REG_NULL);
1308 if (src->prev->flags & INMEMORY) {
1309 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1312 M_INTMOVE(src->prev->regoff, RAX);
1315 if (src->flags & INMEMORY) {
1316 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1319 M_INTMOVE(src->regoff, REG_ITMP3);
1323 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1324 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1325 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1326 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1327 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1329 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1331 x86_64_idivl_reg(cd, REG_ITMP3);
1333 if (iptr->dst->flags & INMEMORY) {
1334 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1335 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1338 M_INTMOVE(RDX, iptr->dst->regoff);
1340 if (iptr->dst->regoff != RDX) {
1341 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1346 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
1347 /* val.i = constant */
1349 var_to_reg_int(s1, src, REG_ITMP1);
1350 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1351 M_INTMOVE(s1, REG_ITMP1);
1352 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1353 x86_64_leal_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1354 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1355 x86_64_shiftl_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1356 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1357 store_reg_to_var_int(iptr->dst, d);
1360 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
1361 /* val.i = constant */
1363 var_to_reg_int(s1, src, REG_ITMP1);
1364 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1365 M_INTMOVE(s1, REG_ITMP1);
1366 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1367 x86_64_leal_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1368 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1369 x86_64_alul_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1370 x86_64_alul_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1371 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1372 store_reg_to_var_int(iptr->dst, d);
1376 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1378 d = reg_of_var(rd, iptr->dst, REG_NULL);
1379 if (src->prev->flags & INMEMORY) {
1380 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1383 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1386 if (src->flags & INMEMORY) {
1387 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1390 M_INTMOVE(src->regoff, REG_ITMP3);
1394 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1395 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1396 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1397 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1398 x86_64_jcc(cd, X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1400 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1402 x86_64_idiv_reg(cd, REG_ITMP3);
1404 if (iptr->dst->flags & INMEMORY) {
1405 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1406 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1409 M_INTMOVE(RAX, iptr->dst->regoff);
1411 if (iptr->dst->regoff != RDX) {
1412 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1417 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1419 d = reg_of_var(rd, iptr->dst, REG_NULL);
1420 if (src->prev->flags & INMEMORY) {
1421 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1424 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1427 if (src->flags & INMEMORY) {
1428 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1431 M_INTMOVE(src->regoff, REG_ITMP3);
1435 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1436 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1437 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1438 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1439 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1440 x86_64_jcc(cd, X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1442 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1444 x86_64_idiv_reg(cd, REG_ITMP3);
1446 if (iptr->dst->flags & INMEMORY) {
1447 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1448 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1451 M_INTMOVE(RDX, iptr->dst->regoff);
1453 if (iptr->dst->regoff != RDX) {
1454 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1459 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1460 /* val.i = constant */
1462 var_to_reg_int(s1, src, REG_ITMP1);
1463 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1464 M_INTMOVE(s1, REG_ITMP1);
1465 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1466 x86_64_lea_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1467 x86_64_cmovcc_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1468 x86_64_shift_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1469 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1470 store_reg_to_var_int(iptr->dst, d);
1473 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1474 /* val.l = constant */
1476 var_to_reg_int(s1, src, REG_ITMP1);
1477 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1478 M_INTMOVE(s1, REG_ITMP1);
1479 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1480 x86_64_lea_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1481 x86_64_cmovcc_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1482 x86_64_alu_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1483 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1484 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1485 store_reg_to_var_int(iptr->dst, d);
1488 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1490 d = reg_of_var(rd, iptr->dst, REG_NULL);
1491 x86_64_emit_ishift(cd, X86_64_SHL, src, iptr);
1494 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1495 /* val.i = constant */
1497 d = reg_of_var(rd, iptr->dst, REG_NULL);
1498 x86_64_emit_ishiftconst(cd, X86_64_SHL, src, iptr);
1501 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1503 d = reg_of_var(rd, iptr->dst, REG_NULL);
1504 x86_64_emit_ishift(cd, X86_64_SAR, src, iptr);
1507 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1508 /* val.i = constant */
1510 d = reg_of_var(rd, iptr->dst, REG_NULL);
1511 x86_64_emit_ishiftconst(cd, X86_64_SAR, src, iptr);
1514 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1516 d = reg_of_var(rd, iptr->dst, REG_NULL);
1517 x86_64_emit_ishift(cd, X86_64_SHR, src, iptr);
1520 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1521 /* val.i = constant */
1523 d = reg_of_var(rd, iptr->dst, REG_NULL);
1524 x86_64_emit_ishiftconst(cd, X86_64_SHR, src, iptr);
1527 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1529 d = reg_of_var(rd, iptr->dst, REG_NULL);
1530 x86_64_emit_lshift(cd, X86_64_SHL, src, iptr);
1533 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1534 /* val.i = constant */
1536 d = reg_of_var(rd, iptr->dst, REG_NULL);
1537 x86_64_emit_lshiftconst(cd, X86_64_SHL, src, iptr);
1540 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1542 d = reg_of_var(rd, iptr->dst, REG_NULL);
1543 x86_64_emit_lshift(cd, X86_64_SAR, src, iptr);
1546 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1547 /* val.i = constant */
1549 d = reg_of_var(rd, iptr->dst, REG_NULL);
1550 x86_64_emit_lshiftconst(cd, X86_64_SAR, src, iptr);
1553 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1555 d = reg_of_var(rd, iptr->dst, REG_NULL);
1556 x86_64_emit_lshift(cd, X86_64_SHR, src, iptr);
1559 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1560 /* val.l = constant */
1562 d = reg_of_var(rd, iptr->dst, REG_NULL);
1563 x86_64_emit_lshiftconst(cd, X86_64_SHR, src, iptr);
1566 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1568 d = reg_of_var(rd, iptr->dst, REG_NULL);
1569 x86_64_emit_ialu(cd, X86_64_AND, src, iptr);
1572 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1573 /* val.i = constant */
1575 d = reg_of_var(rd, iptr->dst, REG_NULL);
1576 x86_64_emit_ialuconst(cd, X86_64_AND, src, iptr);
1579 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1581 d = reg_of_var(rd, iptr->dst, REG_NULL);
1582 x86_64_emit_lalu(cd, X86_64_AND, src, iptr);
1585 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1586 /* val.l = constant */
1588 d = reg_of_var(rd, iptr->dst, REG_NULL);
1589 x86_64_emit_laluconst(cd, X86_64_AND, src, iptr);
1592 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1594 d = reg_of_var(rd, iptr->dst, REG_NULL);
1595 x86_64_emit_ialu(cd, X86_64_OR, src, iptr);
1598 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1599 /* val.i = constant */
1601 d = reg_of_var(rd, iptr->dst, REG_NULL);
1602 x86_64_emit_ialuconst(cd, X86_64_OR, src, iptr);
1605 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1607 d = reg_of_var(rd, iptr->dst, REG_NULL);
1608 x86_64_emit_lalu(cd, X86_64_OR, src, iptr);
1611 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1612 /* val.l = constant */
1614 d = reg_of_var(rd, iptr->dst, REG_NULL);
1615 x86_64_emit_laluconst(cd, X86_64_OR, src, iptr);
1618 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1620 d = reg_of_var(rd, iptr->dst, REG_NULL);
1621 x86_64_emit_ialu(cd, X86_64_XOR, src, iptr);
1624 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1625 /* val.i = constant */
1627 d = reg_of_var(rd, iptr->dst, REG_NULL);
1628 x86_64_emit_ialuconst(cd, X86_64_XOR, src, iptr);
1631 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1633 d = reg_of_var(rd, iptr->dst, REG_NULL);
1634 x86_64_emit_lalu(cd, X86_64_XOR, src, iptr);
1637 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1638 /* val.l = constant */
1640 d = reg_of_var(rd, iptr->dst, REG_NULL);
1641 x86_64_emit_laluconst(cd, X86_64_XOR, src, iptr);
1645 case ICMD_IINC: /* ..., value ==> ..., value + constant */
1646 /* op1 = variable, val.i = constant */
1648 /* using inc and dec is definitely faster than add -- tested */
1651 var = &(rd->locals[iptr->op1][TYPE_INT]);
1653 if (var->flags & INMEMORY) {
1654 if (iptr->val.i == 1) {
1655 x86_64_incl_membase(cd, REG_SP, d * 8);
1657 } else if (iptr->val.i == -1) {
1658 x86_64_decl_membase(cd, REG_SP, d * 8);
1661 x86_64_alul_imm_membase(cd, X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1665 if (iptr->val.i == 1) {
1666 x86_64_incl_reg(cd, d);
1668 } else if (iptr->val.i == -1) {
1669 x86_64_decl_reg(cd, d);
1672 x86_64_alul_imm_reg(cd, X86_64_ADD, iptr->val.i, d);
1678 /* floating operations ************************************************/
1680 case ICMD_FNEG: /* ..., value ==> ..., - value */
1682 var_to_reg_flt(s1, src, REG_FTMP1);
1683 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1684 a = dseg_adds4(cd, 0x80000000);
1686 x86_64_movss_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1687 x86_64_xorps_reg_reg(cd, REG_FTMP2, d);
1688 store_reg_to_var_flt(iptr->dst, d);
1691 case ICMD_DNEG: /* ..., value ==> ..., - value */
1693 var_to_reg_flt(s1, src, REG_FTMP1);
1694 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1695 a = dseg_adds8(cd, 0x8000000000000000);
1697 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1698 x86_64_xorpd_reg_reg(cd, REG_FTMP2, d);
1699 store_reg_to_var_flt(iptr->dst, d);
1702 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1704 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1705 var_to_reg_flt(s2, src, REG_FTMP2);
1706 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1708 x86_64_addss_reg_reg(cd, s2, d);
1709 } else if (s2 == d) {
1710 x86_64_addss_reg_reg(cd, s1, d);
1713 x86_64_addss_reg_reg(cd, s2, d);
1715 store_reg_to_var_flt(iptr->dst, d);
1718 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1720 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1721 var_to_reg_flt(s2, src, REG_FTMP2);
1722 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1724 x86_64_addsd_reg_reg(cd, s2, d);
1725 } else if (s2 == d) {
1726 x86_64_addsd_reg_reg(cd, s1, d);
1729 x86_64_addsd_reg_reg(cd, s2, d);
1731 store_reg_to_var_flt(iptr->dst, d);
1734 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1736 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1737 var_to_reg_flt(s2, src, REG_FTMP2);
1738 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1740 M_FLTMOVE(s2, REG_FTMP2);
1744 x86_64_subss_reg_reg(cd, s2, d);
1745 store_reg_to_var_flt(iptr->dst, d);
1748 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1750 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1751 var_to_reg_flt(s2, src, REG_FTMP2);
1752 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1754 M_FLTMOVE(s2, REG_FTMP2);
1758 x86_64_subsd_reg_reg(cd, s2, d);
1759 store_reg_to_var_flt(iptr->dst, d);
1762 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1764 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1765 var_to_reg_flt(s2, src, REG_FTMP2);
1766 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1768 x86_64_mulss_reg_reg(cd, s2, d);
1769 } else if (s2 == d) {
1770 x86_64_mulss_reg_reg(cd, s1, d);
1773 x86_64_mulss_reg_reg(cd, s2, d);
1775 store_reg_to_var_flt(iptr->dst, d);
1778 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1780 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1781 var_to_reg_flt(s2, src, REG_FTMP2);
1782 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1784 x86_64_mulsd_reg_reg(cd, s2, d);
1785 } else if (s2 == d) {
1786 x86_64_mulsd_reg_reg(cd, s1, d);
1789 x86_64_mulsd_reg_reg(cd, s2, d);
1791 store_reg_to_var_flt(iptr->dst, d);
1794 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1796 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1797 var_to_reg_flt(s2, src, REG_FTMP2);
1798 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1800 M_FLTMOVE(s2, REG_FTMP2);
1804 x86_64_divss_reg_reg(cd, s2, d);
1805 store_reg_to_var_flt(iptr->dst, d);
1808 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1810 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1811 var_to_reg_flt(s2, src, REG_FTMP2);
1812 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1814 M_FLTMOVE(s2, REG_FTMP2);
1818 x86_64_divsd_reg_reg(cd, s2, d);
1819 store_reg_to_var_flt(iptr->dst, d);
1822 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1824 var_to_reg_int(s1, src, REG_ITMP1);
1825 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1826 x86_64_cvtsi2ss_reg_reg(cd, s1, d);
1827 store_reg_to_var_flt(iptr->dst, d);
1830 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1832 var_to_reg_int(s1, src, REG_ITMP1);
1833 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1834 x86_64_cvtsi2sd_reg_reg(cd, s1, d);
1835 store_reg_to_var_flt(iptr->dst, d);
1838 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1840 var_to_reg_int(s1, src, REG_ITMP1);
1841 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1842 x86_64_cvtsi2ssq_reg_reg(cd, s1, d);
1843 store_reg_to_var_flt(iptr->dst, d);
1846 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1848 var_to_reg_int(s1, src, REG_ITMP1);
1849 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1850 x86_64_cvtsi2sdq_reg_reg(cd, s1, d);
1851 store_reg_to_var_flt(iptr->dst, d);
1854 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1856 var_to_reg_flt(s1, src, REG_FTMP1);
1857 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1858 x86_64_cvttss2si_reg_reg(cd, s1, d);
1859 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1860 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1861 x86_64_jcc(cd, X86_64_CC_NE, a);
1862 M_FLTMOVE(s1, REG_FTMP1);
1863 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2i, REG_ITMP2);
1864 x86_64_call_reg(cd, REG_ITMP2);
1865 M_INTMOVE(REG_RESULT, d);
1866 store_reg_to_var_int(iptr->dst, d);
1869 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1871 var_to_reg_flt(s1, src, REG_FTMP1);
1872 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1873 x86_64_cvttsd2si_reg_reg(cd, s1, d);
1874 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1875 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1876 x86_64_jcc(cd, X86_64_CC_NE, a);
1877 M_FLTMOVE(s1, REG_FTMP1);
1878 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2i, REG_ITMP2);
1879 x86_64_call_reg(cd, REG_ITMP2);
1880 M_INTMOVE(REG_RESULT, d);
1881 store_reg_to_var_int(iptr->dst, d);
1884 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1886 var_to_reg_flt(s1, src, REG_FTMP1);
1887 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1888 x86_64_cvttss2siq_reg_reg(cd, s1, d);
1889 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1890 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1891 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1892 x86_64_jcc(cd, X86_64_CC_NE, a);
1893 M_FLTMOVE(s1, REG_FTMP1);
1894 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2l, REG_ITMP2);
1895 x86_64_call_reg(cd, REG_ITMP2);
1896 M_INTMOVE(REG_RESULT, d);
1897 store_reg_to_var_int(iptr->dst, d);
1900 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1902 var_to_reg_flt(s1, src, REG_FTMP1);
1903 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1904 x86_64_cvttsd2siq_reg_reg(cd, s1, d);
1905 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1906 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1907 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1908 x86_64_jcc(cd, X86_64_CC_NE, a);
1909 M_FLTMOVE(s1, REG_FTMP1);
1910 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2l, REG_ITMP2);
1911 x86_64_call_reg(cd, REG_ITMP2);
1912 M_INTMOVE(REG_RESULT, d);
1913 store_reg_to_var_int(iptr->dst, d);
1916 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1918 var_to_reg_flt(s1, src, REG_FTMP1);
1919 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1920 x86_64_cvtss2sd_reg_reg(cd, s1, d);
1921 store_reg_to_var_flt(iptr->dst, d);
1924 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1926 var_to_reg_flt(s1, src, REG_FTMP1);
1927 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1928 x86_64_cvtsd2ss_reg_reg(cd, s1, d);
1929 store_reg_to_var_flt(iptr->dst, d);
1932 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1933 /* == => 0, < => 1, > => -1 */
1935 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1936 var_to_reg_flt(s2, src, REG_FTMP2);
1937 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1938 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1939 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1940 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1941 x86_64_ucomiss_reg_reg(cd, s1, s2);
1942 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1943 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1944 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1945 store_reg_to_var_int(iptr->dst, d);
1948 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1949 /* == => 0, < => 1, > => -1 */
1951 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1952 var_to_reg_flt(s2, src, REG_FTMP2);
1953 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1954 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1955 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1956 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1957 x86_64_ucomiss_reg_reg(cd, s1, s2);
1958 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1959 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1960 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1961 store_reg_to_var_int(iptr->dst, d);
1964 case ICMD_DCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1965 /* == => 0, < => 1, > => -1 */
1967 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1968 var_to_reg_flt(s2, src, REG_FTMP2);
1969 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1970 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1971 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1972 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1973 x86_64_ucomisd_reg_reg(cd, s1, s2);
1974 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1975 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1976 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1977 store_reg_to_var_int(iptr->dst, d);
1980 case ICMD_DCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1981 /* == => 0, < => 1, > => -1 */
1983 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1984 var_to_reg_flt(s2, src, REG_FTMP2);
1985 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1986 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1987 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1988 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1989 x86_64_ucomisd_reg_reg(cd, s1, s2);
1990 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1991 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1992 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1993 store_reg_to_var_int(iptr->dst, d);
1997 /* memory operations **************************************************/
1999 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., (int) length */
2001 var_to_reg_int(s1, src, REG_ITMP1);
2002 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2003 gen_nullptr_check(s1);
2004 x86_64_movl_membase_reg(cd, s1, OFFSET(java_arrayheader, size), d);
2005 store_reg_to_var_int(iptr->dst, d);
2008 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2010 var_to_reg_int(s1, src->prev, REG_ITMP1);
2011 var_to_reg_int(s2, src, REG_ITMP2);
2012 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2013 if (iptr->op1 == 0) {
2014 gen_nullptr_check(s1);
2017 x86_64_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
2018 store_reg_to_var_int(iptr->dst, d);
2021 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
2023 var_to_reg_int(s1, src->prev, REG_ITMP1);
2024 var_to_reg_int(s2, src, REG_ITMP2);
2025 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2026 if (iptr->op1 == 0) {
2027 gen_nullptr_check(s1);
2030 x86_64_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]), s1, s2, 3, d);
2031 store_reg_to_var_int(iptr->dst, d);
2034 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
2036 var_to_reg_int(s1, src->prev, REG_ITMP1);
2037 var_to_reg_int(s2, src, REG_ITMP2);
2038 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2039 if (iptr->op1 == 0) {
2040 gen_nullptr_check(s1);
2043 x86_64_movl_memindex_reg(cd, OFFSET(java_intarray, data[0]), s1, s2, 2, d);
2044 store_reg_to_var_int(iptr->dst, d);
2047 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2049 var_to_reg_int(s1, src->prev, REG_ITMP1);
2050 var_to_reg_int(s2, src, REG_ITMP2);
2051 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2052 if (iptr->op1 == 0) {
2053 gen_nullptr_check(s1);
2056 x86_64_movss_memindex_reg(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2057 store_reg_to_var_flt(iptr->dst, d);
2060 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2062 var_to_reg_int(s1, src->prev, REG_ITMP1);
2063 var_to_reg_int(s2, src, REG_ITMP2);
2064 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2065 if (iptr->op1 == 0) {
2066 gen_nullptr_check(s1);
2069 x86_64_movsd_memindex_reg(cd, OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2070 store_reg_to_var_flt(iptr->dst, d);
2073 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
2075 var_to_reg_int(s1, src->prev, REG_ITMP1);
2076 var_to_reg_int(s2, src, REG_ITMP2);
2077 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2078 if (iptr->op1 == 0) {
2079 gen_nullptr_check(s1);
2082 x86_64_movzwq_memindex_reg(cd, OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2083 store_reg_to_var_int(iptr->dst, d);
2086 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
2088 var_to_reg_int(s1, src->prev, REG_ITMP1);
2089 var_to_reg_int(s2, src, REG_ITMP2);
2090 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2091 if (iptr->op1 == 0) {
2092 gen_nullptr_check(s1);
2095 x86_64_movswq_memindex_reg(cd, OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2096 store_reg_to_var_int(iptr->dst, d);
2099 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
2101 var_to_reg_int(s1, src->prev, REG_ITMP1);
2102 var_to_reg_int(s2, src, REG_ITMP2);
2103 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2104 if (iptr->op1 == 0) {
2105 gen_nullptr_check(s1);
2108 x86_64_movsbq_memindex_reg(cd, OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2109 store_reg_to_var_int(iptr->dst, d);
2113 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2115 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2116 var_to_reg_int(s2, src->prev, REG_ITMP2);
2117 if (iptr->op1 == 0) {
2118 gen_nullptr_check(s1);
2121 var_to_reg_int(s3, src, REG_ITMP3);
2122 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2125 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2127 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2128 var_to_reg_int(s2, src->prev, REG_ITMP2);
2129 if (iptr->op1 == 0) {
2130 gen_nullptr_check(s1);
2133 var_to_reg_int(s3, src, REG_ITMP3);
2134 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2137 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2139 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2140 var_to_reg_int(s2, src->prev, REG_ITMP2);
2141 if (iptr->op1 == 0) {
2142 gen_nullptr_check(s1);
2145 var_to_reg_int(s3, src, REG_ITMP3);
2146 x86_64_movl_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2149 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2151 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2152 var_to_reg_int(s2, src->prev, REG_ITMP2);
2153 if (iptr->op1 == 0) {
2154 gen_nullptr_check(s1);
2157 var_to_reg_flt(s3, src, REG_FTMP3);
2158 x86_64_movss_reg_memindex(cd, s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2161 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2163 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2164 var_to_reg_int(s2, src->prev, REG_ITMP2);
2165 if (iptr->op1 == 0) {
2166 gen_nullptr_check(s1);
2169 var_to_reg_flt(s3, src, REG_FTMP3);
2170 x86_64_movsd_reg_memindex(cd, s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2173 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2175 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2176 var_to_reg_int(s2, src->prev, REG_ITMP2);
2177 if (iptr->op1 == 0) {
2178 gen_nullptr_check(s1);
2181 var_to_reg_int(s3, src, REG_ITMP3);
2182 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2185 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2187 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2188 var_to_reg_int(s2, src->prev, REG_ITMP2);
2189 if (iptr->op1 == 0) {
2190 gen_nullptr_check(s1);
2193 var_to_reg_int(s3, src, REG_ITMP3);
2194 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2197 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2199 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2200 var_to_reg_int(s2, src->prev, REG_ITMP2);
2201 if (iptr->op1 == 0) {
2202 gen_nullptr_check(s1);
2205 var_to_reg_int(s3, src, REG_ITMP3);
2206 x86_64_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2209 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2211 var_to_reg_int(s1, src->prev, REG_ITMP1);
2212 var_to_reg_int(s2, src, REG_ITMP2);
2213 if (iptr->op1 == 0) {
2214 gen_nullptr_check(s1);
2217 x86_64_movl_imm_memindex(cd, iptr->val.i, OFFSET(java_intarray, data[0]), s1, s2, 2);
2220 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2222 var_to_reg_int(s1, src->prev, REG_ITMP1);
2223 var_to_reg_int(s2, src, REG_ITMP2);
2224 if (iptr->op1 == 0) {
2225 gen_nullptr_check(s1);
2229 if (x86_64_is_imm32(iptr->val.l)) {
2230 x86_64_mov_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2233 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2234 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l >> 32), OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2238 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2240 var_to_reg_int(s1, src->prev, REG_ITMP1);
2241 var_to_reg_int(s2, src, REG_ITMP2);
2242 if (iptr->op1 == 0) {
2243 gen_nullptr_check(s1);
2246 x86_64_mov_imm_memindex(cd, 0, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2249 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2251 var_to_reg_int(s1, src->prev, REG_ITMP1);
2252 var_to_reg_int(s2, src, REG_ITMP2);
2253 if (iptr->op1 == 0) {
2254 gen_nullptr_check(s1);
2257 x86_64_movb_imm_memindex(cd, iptr->val.i, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2260 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2262 var_to_reg_int(s1, src->prev, REG_ITMP1);
2263 var_to_reg_int(s2, src, REG_ITMP2);
2264 if (iptr->op1 == 0) {
2265 gen_nullptr_check(s1);
2268 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_chararray, data[0]), s1, s2, 1);
2271 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2273 var_to_reg_int(s1, src->prev, REG_ITMP1);
2274 var_to_reg_int(s2, src, REG_ITMP2);
2275 if (iptr->op1 == 0) {
2276 gen_nullptr_check(s1);
2279 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2283 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2284 /* op1 = type, val.a = field address */
2286 /* If the static fields' class is not yet initialized, we do it */
2287 /* now. The call code is generated later. */
2288 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2289 codegen_addclinitref(cd, cd->mcodeptr, ((fieldinfo *) iptr->val.a)->class);
2291 /* This is just for debugging purposes. Is very difficult to */
2292 /* read patched code. Here we patch the following 5 nop's */
2293 /* so that the real code keeps untouched. */
2294 if (showdisassemble) {
2303 /* This approach is much faster than moving the field address */
2304 /* inline into a register. */
2305 a = dseg_addaddress(cd, &(((fieldinfo *) iptr->val.a)->value));
2306 x86_64_mov_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 7) - (s8) cd->mcodebase) + a, REG_ITMP2);
2307 switch (iptr->op1) {
2309 var_to_reg_int(s2, src, REG_ITMP1);
2310 x86_64_movl_reg_membase(cd, s2, REG_ITMP2, 0);
2314 var_to_reg_int(s2, src, REG_ITMP1);
2315 x86_64_mov_reg_membase(cd, s2, REG_ITMP2, 0);
2318 var_to_reg_flt(s2, src, REG_FTMP1);
2319 x86_64_movss_reg_membase(cd, s2, REG_ITMP2, 0);
2322 var_to_reg_flt(s2, src, REG_FTMP1);
2323 x86_64_movsd_reg_membase(cd, s2, REG_ITMP2, 0);
2326 throw_cacao_exception_exit(string_java_lang_InternalError,
2327 "Unknown PUTSTATIC operand type %d",
2332 case ICMD_GETSTATIC: /* ... ==> ..., value */
2333 /* op1 = type, val.a = field address */
2335 /* If the static fields' class is not yet initialized, we do it */
2336 /* now. The call code is generated later. */
2337 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2338 codegen_addclinitref(cd, cd->mcodeptr, ((fieldinfo *) iptr->val.a)->class);
2340 /* This is just for debugging purposes. Is very difficult to */
2341 /* read patched code. Here we patch the following 5 nop's */
2342 /* so that the real code keeps untouched. */
2343 if (showdisassemble) {
2352 /* This approach is much faster than moving the field address */
2353 /* inline into a register. */
2354 a = dseg_addaddress(cd, &(((fieldinfo *) iptr->val.a)->value));
2355 x86_64_mov_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 7) - (s8) cd->mcodebase) + a, REG_ITMP2);
2356 switch (iptr->op1) {
2358 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2359 x86_64_movl_membase_reg(cd, REG_ITMP2, 0, d);
2360 store_reg_to_var_int(iptr->dst, d);
2364 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2365 x86_64_mov_membase_reg(cd, REG_ITMP2, 0, d);
2366 store_reg_to_var_int(iptr->dst, d);
2369 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2370 x86_64_movss_membase_reg(cd, REG_ITMP2, 0, d);
2371 store_reg_to_var_flt(iptr->dst, d);
2374 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2375 x86_64_movsd_membase_reg(cd, REG_ITMP2, 0, d);
2376 store_reg_to_var_flt(iptr->dst, d);
2379 throw_cacao_exception_exit(string_java_lang_InternalError,
2380 "Unknown GETSTATIC operand type %d",
2385 case ICMD_PUTFIELD: /* ..., value ==> ... */
2386 /* op1 = type, val.i = field offset */
2388 a = ((fieldinfo *)(iptr->val.a))->offset;
2389 var_to_reg_int(s1, src->prev, REG_ITMP1);
2390 switch (iptr->op1) {
2392 var_to_reg_int(s2, src, REG_ITMP2);
2393 gen_nullptr_check(s1);
2394 x86_64_movl_reg_membase(cd, s2, s1, a);
2398 var_to_reg_int(s2, src, REG_ITMP2);
2399 gen_nullptr_check(s1);
2400 x86_64_mov_reg_membase(cd, s2, s1, a);
2403 var_to_reg_flt(s2, src, REG_FTMP2);
2404 gen_nullptr_check(s1);
2405 x86_64_movss_reg_membase(cd, s2, s1, a);
2408 var_to_reg_flt(s2, src, REG_FTMP2);
2409 gen_nullptr_check(s1);
2410 x86_64_movsd_reg_membase(cd, s2, s1, a);
2413 throw_cacao_exception_exit(string_java_lang_InternalError,
2414 "Unknown PUTFIELD operand type %d",
2419 case ICMD_GETFIELD: /* ... ==> ..., value */
2420 /* op1 = type, val.i = field offset */
2422 a = ((fieldinfo *)(iptr->val.a))->offset;
2423 var_to_reg_int(s1, src, REG_ITMP1);
2424 switch (iptr->op1) {
2426 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2427 gen_nullptr_check(s1);
2428 x86_64_movl_membase_reg(cd, s1, a, d);
2429 store_reg_to_var_int(iptr->dst, d);
2433 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2434 gen_nullptr_check(s1);
2435 x86_64_mov_membase_reg(cd, s1, a, d);
2436 store_reg_to_var_int(iptr->dst, d);
2439 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2440 gen_nullptr_check(s1);
2441 x86_64_movss_membase_reg(cd, s1, a, d);
2442 store_reg_to_var_flt(iptr->dst, d);
2445 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2446 gen_nullptr_check(s1);
2447 x86_64_movsd_membase_reg(cd, s1, a, d);
2448 store_reg_to_var_flt(iptr->dst, d);
2451 throw_cacao_exception_exit(string_java_lang_InternalError,
2452 "Unknown GETFIELD operand type %d",
2458 /* branch operations **************************************************/
2460 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2462 var_to_reg_int(s1, src, REG_ITMP1);
2463 M_INTMOVE(s1, REG_ITMP1_XPTR);
2465 x86_64_call_imm(cd, 0); /* passing exception pointer */
2466 x86_64_pop_reg(cd, REG_ITMP2_XPC);
2468 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
2469 x86_64_jmp_reg(cd, REG_ITMP3);
2473 case ICMD_GOTO: /* ... ==> ... */
2474 /* op1 = target JavaVM pc */
2476 x86_64_jmp_imm(cd, 0);
2477 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2481 case ICMD_JSR: /* ... ==> ... */
2482 /* op1 = target JavaVM pc */
2484 x86_64_call_imm(cd, 0);
2485 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2488 case ICMD_RET: /* ... ==> ... */
2489 /* op1 = local variable */
2491 var = &(rd->locals[iptr->op1][TYPE_ADR]);
2492 var_to_reg_int(s1, var, REG_ITMP1);
2493 x86_64_jmp_reg(cd, s1);
2496 case ICMD_IFNULL: /* ..., value ==> ... */
2497 /* op1 = target JavaVM pc */
2499 if (src->flags & INMEMORY) {
2500 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2503 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2505 x86_64_jcc(cd, X86_64_CC_E, 0);
2506 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2509 case ICMD_IFNONNULL: /* ..., value ==> ... */
2510 /* op1 = target JavaVM pc */
2512 if (src->flags & INMEMORY) {
2513 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2516 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2518 x86_64_jcc(cd, X86_64_CC_NE, 0);
2519 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2522 case ICMD_IFEQ: /* ..., value ==> ... */
2523 /* op1 = target JavaVM pc, val.i = constant */
2525 x86_64_emit_ifcc(cd, X86_64_CC_E, src, iptr);
2528 case ICMD_IFLT: /* ..., value ==> ... */
2529 /* op1 = target JavaVM pc, val.i = constant */
2531 x86_64_emit_ifcc(cd, X86_64_CC_L, src, iptr);
2534 case ICMD_IFLE: /* ..., value ==> ... */
2535 /* op1 = target JavaVM pc, val.i = constant */
2537 x86_64_emit_ifcc(cd, X86_64_CC_LE, src, iptr);
2540 case ICMD_IFNE: /* ..., value ==> ... */
2541 /* op1 = target JavaVM pc, val.i = constant */
2543 x86_64_emit_ifcc(cd, X86_64_CC_NE, src, iptr);
2546 case ICMD_IFGT: /* ..., value ==> ... */
2547 /* op1 = target JavaVM pc, val.i = constant */
2549 x86_64_emit_ifcc(cd, X86_64_CC_G, src, iptr);
2552 case ICMD_IFGE: /* ..., value ==> ... */
2553 /* op1 = target JavaVM pc, val.i = constant */
2555 x86_64_emit_ifcc(cd, X86_64_CC_GE, src, iptr);
2558 case ICMD_IF_LEQ: /* ..., value ==> ... */
2559 /* op1 = target JavaVM pc, val.l = constant */
2561 x86_64_emit_if_lcc(cd, X86_64_CC_E, src, iptr);
2564 case ICMD_IF_LLT: /* ..., value ==> ... */
2565 /* op1 = target JavaVM pc, val.l = constant */
2567 x86_64_emit_if_lcc(cd, X86_64_CC_L, src, iptr);
2570 case ICMD_IF_LLE: /* ..., value ==> ... */
2571 /* op1 = target JavaVM pc, val.l = constant */
2573 x86_64_emit_if_lcc(cd, X86_64_CC_LE, src, iptr);
2576 case ICMD_IF_LNE: /* ..., value ==> ... */
2577 /* op1 = target JavaVM pc, val.l = constant */
2579 x86_64_emit_if_lcc(cd, X86_64_CC_NE, src, iptr);
2582 case ICMD_IF_LGT: /* ..., value ==> ... */
2583 /* op1 = target JavaVM pc, val.l = constant */
2585 x86_64_emit_if_lcc(cd, X86_64_CC_G, src, iptr);
2588 case ICMD_IF_LGE: /* ..., value ==> ... */
2589 /* op1 = target JavaVM pc, val.l = constant */
2591 x86_64_emit_if_lcc(cd, X86_64_CC_GE, src, iptr);
2594 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2595 /* op1 = target JavaVM pc */
2597 x86_64_emit_if_icmpcc(cd, X86_64_CC_E, src, iptr);
2600 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2601 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2603 x86_64_emit_if_lcmpcc(cd, X86_64_CC_E, src, iptr);
2606 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2607 /* op1 = target JavaVM pc */
2609 x86_64_emit_if_icmpcc(cd, X86_64_CC_NE, src, iptr);
2612 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2613 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2615 x86_64_emit_if_lcmpcc(cd, X86_64_CC_NE, src, iptr);
2618 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2619 /* op1 = target JavaVM pc */
2621 x86_64_emit_if_icmpcc(cd, X86_64_CC_L, src, iptr);
2624 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2625 /* op1 = target JavaVM pc */
2627 x86_64_emit_if_lcmpcc(cd, X86_64_CC_L, src, iptr);
2630 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2631 /* op1 = target JavaVM pc */
2633 x86_64_emit_if_icmpcc(cd, X86_64_CC_G, src, iptr);
2636 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2637 /* op1 = target JavaVM pc */
2639 x86_64_emit_if_lcmpcc(cd, X86_64_CC_G, src, iptr);
2642 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2643 /* op1 = target JavaVM pc */
2645 x86_64_emit_if_icmpcc(cd, X86_64_CC_LE, src, iptr);
2648 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2649 /* op1 = target JavaVM pc */
2651 x86_64_emit_if_lcmpcc(cd, X86_64_CC_LE, src, iptr);
2654 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2655 /* op1 = target JavaVM pc */
2657 x86_64_emit_if_icmpcc(cd, X86_64_CC_GE, src, iptr);
2660 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2661 /* op1 = target JavaVM pc */
2663 x86_64_emit_if_lcmpcc(cd, X86_64_CC_GE, src, iptr);
2666 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST */
2668 case ICMD_ELSE_ICONST: /* handled by IFxx_ICONST */
2671 case ICMD_IFEQ_ICONST: /* ..., value ==> ..., constant */
2672 /* val.i = constant */
2674 var_to_reg_int(s1, src, REG_ITMP1);
2675 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2677 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2679 M_INTMOVE(s1, REG_ITMP1);
2682 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2684 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2685 x86_64_testl_reg_reg(cd, s1, s1);
2686 x86_64_cmovccl_reg_reg(cd, X86_64_CC_E, REG_ITMP2, d);
2687 store_reg_to_var_int(iptr->dst, d);
2690 case ICMD_IFNE_ICONST: /* ..., value ==> ..., constant */
2691 /* val.i = constant */
2693 var_to_reg_int(s1, src, REG_ITMP1);
2694 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2696 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2698 M_INTMOVE(s1, REG_ITMP1);
2701 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2703 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2704 x86_64_testl_reg_reg(cd, s1, s1);
2705 x86_64_cmovccl_reg_reg(cd, X86_64_CC_NE, REG_ITMP2, d);
2706 store_reg_to_var_int(iptr->dst, d);
2709 case ICMD_IFLT_ICONST: /* ..., value ==> ..., constant */
2710 /* val.i = constant */
2712 var_to_reg_int(s1, src, REG_ITMP1);
2713 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2715 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2717 M_INTMOVE(s1, REG_ITMP1);
2720 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2722 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2723 x86_64_testl_reg_reg(cd, s1, s1);
2724 x86_64_cmovccl_reg_reg(cd, X86_64_CC_L, REG_ITMP2, d);
2725 store_reg_to_var_int(iptr->dst, d);
2728 case ICMD_IFGE_ICONST: /* ..., value ==> ..., constant */
2729 /* val.i = constant */
2731 var_to_reg_int(s1, src, REG_ITMP1);
2732 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2734 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2736 M_INTMOVE(s1, REG_ITMP1);
2739 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2741 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2742 x86_64_testl_reg_reg(cd, s1, s1);
2743 x86_64_cmovccl_reg_reg(cd, X86_64_CC_GE, REG_ITMP2, d);
2744 store_reg_to_var_int(iptr->dst, d);
2747 case ICMD_IFGT_ICONST: /* ..., value ==> ..., constant */
2748 /* val.i = constant */
2750 var_to_reg_int(s1, src, REG_ITMP1);
2751 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2753 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2755 M_INTMOVE(s1, REG_ITMP1);
2758 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2760 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2761 x86_64_testl_reg_reg(cd, s1, s1);
2762 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP2, d);
2763 store_reg_to_var_int(iptr->dst, d);
2766 case ICMD_IFLE_ICONST: /* ..., value ==> ..., constant */
2767 /* val.i = constant */
2769 var_to_reg_int(s1, src, REG_ITMP1);
2770 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2772 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2774 M_INTMOVE(s1, REG_ITMP1);
2777 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2779 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2780 x86_64_testl_reg_reg(cd, s1, s1);
2781 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, d);
2782 store_reg_to_var_int(iptr->dst, d);
2786 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2790 var_to_reg_int(s1, src, REG_RESULT);
2791 M_INTMOVE(s1, REG_RESULT);
2793 #if defined(USE_THREADS)
2794 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2795 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
2796 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, rd->maxmemuse * 8);
2797 x86_64_mov_imm_reg(cd, (u8) builtin_monitorexit, REG_ITMP1);
2798 x86_64_call_reg(cd, REG_ITMP1);
2799 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_RESULT);
2803 goto nowperformreturn;
2805 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2808 var_to_reg_flt(s1, src, REG_FRESULT);
2809 M_FLTMOVE(s1, REG_FRESULT);
2811 #if defined(USE_THREADS)
2812 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2813 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
2814 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, rd->maxmemuse * 8);
2815 x86_64_mov_imm_reg(cd, (u8) builtin_monitorexit, REG_ITMP1);
2816 x86_64_call_reg(cd, REG_ITMP1);
2817 x86_64_movq_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_FRESULT);
2821 goto nowperformreturn;
2823 case ICMD_RETURN: /* ... ==> ... */
2825 #if defined(USE_THREADS)
2826 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2827 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
2828 x86_64_mov_imm_reg(cd, (u8) builtin_monitorexit, REG_ITMP1);
2829 x86_64_call_reg(cd, REG_ITMP1);
2837 p = parentargs_base;
2839 /* call trace function */
2841 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
2843 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
2844 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
2846 x86_64_mov_imm_reg(cd, (s8) m, rd->argintregs[0]);
2847 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
2848 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
2849 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
2851 x86_64_mov_imm_reg(cd, (s8) builtin_displaymethodstop, REG_ITMP1);
2852 x86_64_call_reg(cd, REG_ITMP1);
2854 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
2855 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
2857 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
2860 /* restore saved registers */
2861 for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
2862 p--; x86_64_mov_membase_reg(cd, REG_SP, p * 8, rd->savintregs[i]);
2864 for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
2865 p--; x86_64_movq_membase_reg(cd, REG_SP, p * 8, rd->savfltregs[i]);
2868 /* deallocate stack */
2869 if (parentargs_base) {
2870 x86_64_alu_imm_reg(cd, X86_64_ADD, parentargs_base * 8, REG_SP);
2879 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2884 tptr = (void **) iptr->target;
2886 s4ptr = iptr->val.a;
2887 l = s4ptr[1]; /* low */
2888 i = s4ptr[2]; /* high */
2890 var_to_reg_int(s1, src, REG_ITMP1);
2891 M_INTMOVE(s1, REG_ITMP1);
2893 x86_64_alul_imm_reg(cd, X86_64_SUB, l, REG_ITMP1);
2898 x86_64_alul_imm_reg(cd, X86_64_CMP, i - 1, REG_ITMP1);
2899 x86_64_jcc(cd, X86_64_CC_A, 0);
2901 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[0]), cd->mcodeptr); */
2902 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
2904 /* build jump table top down and use address of lowest entry */
2906 /* s4ptr += 3 + i; */
2910 /* dseg_addtarget(cd, BlockPtrOfPC(*--s4ptr)); */
2911 dseg_addtarget(cd, (basicblock *) tptr[0]);
2915 /* length of dataseg after last dseg_addtarget is used by load */
2917 x86_64_mov_imm_reg(cd, 0, REG_ITMP2);
2918 dseg_adddata(cd, cd->mcodeptr);
2919 x86_64_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
2920 x86_64_jmp_reg(cd, REG_ITMP1);
2926 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2928 s4 i, l, val, *s4ptr;
2931 tptr = (void **) iptr->target;
2933 s4ptr = iptr->val.a;
2934 l = s4ptr[0]; /* default */
2935 i = s4ptr[1]; /* count */
2937 MCODECHECK((i<<2)+8);
2938 var_to_reg_int(s1, src, REG_ITMP1); /* reg compare should always be faster */
2944 x86_64_alul_imm_reg(cd, X86_64_CMP, val, s1);
2945 x86_64_jcc(cd, X86_64_CC_E, 0);
2946 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[1]), cd->mcodeptr); */
2947 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
2950 x86_64_jmp_imm(cd, 0);
2951 /* codegen_addreference(cd, BlockPtrOfPC(l), cd->mcodeptr); */
2953 tptr = (void **) iptr->target;
2954 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
2961 case ICMD_BUILTIN3: /* ..., arg1, arg2, arg3 ==> ... */
2962 /* op1 = return type, val.a = function pointer*/
2966 case ICMD_BUILTIN2: /* ..., arg1, arg2 ==> ... */
2967 /* op1 = return type, val.a = function pointer*/
2971 case ICMD_BUILTIN1: /* ..., arg1 ==> ... */
2972 /* op1 = return type, val.a = function pointer*/
2976 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2977 /* op1 = arg count, val.a = method pointer */
2979 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2980 /* op1 = arg count, val.a = method pointer */
2982 case ICMD_INVOKEVIRTUAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2983 /* op1 = arg count, val.a = method pointer */
2985 case ICMD_INVOKEINTERFACE:/*.., objectref, [arg1, [arg2 ...]] ==> ... */
2986 /* op1 = arg count, val.a = method pointer */
2996 MCODECHECK((s3 << 1) + 64);
3003 /* copy arguments to registers or stack location */
3004 for (; --s3 >= 0; src = src->prev) {
3005 IS_INT_LNG_TYPE(src->type) ? iarg++ : farg++;
3011 s2 = (iarg > INT_ARG_CNT) ? iarg - INT_ARG_CNT : 0 + (farg > FLT_ARG_CNT) ? farg - FLT_ARG_CNT : 0;
3013 for (; --s3 >= 0; src = src->prev) {
3014 IS_INT_LNG_TYPE(src->type) ? iarg-- : farg--;
3015 if (src->varkind == ARGVAR) {
3016 if (IS_INT_LNG_TYPE(src->type)) {
3017 if (iarg >= INT_ARG_CNT) {
3021 if (farg >= FLT_ARG_CNT) {
3028 if (IS_INT_LNG_TYPE(src->type)) {
3029 if (iarg < INT_ARG_CNT) {
3030 s1 = rd->argintregs[iarg];
3031 var_to_reg_int(d, src, s1);
3035 var_to_reg_int(d, src, REG_ITMP1);
3037 x86_64_mov_reg_membase(cd, d, REG_SP, s2 * 8);
3041 if (farg < FLT_ARG_CNT) {
3042 s1 = rd->argfltregs[farg];
3043 var_to_reg_flt(d, src, s1);
3047 var_to_reg_flt(d, src, REG_FTMP1);
3049 x86_64_movq_reg_membase(cd, d, REG_SP, s2 * 8);
3055 switch (iptr->opc) {
3063 x86_64_mov_imm_reg(cd, a, REG_ITMP1);
3064 x86_64_call_reg(cd, REG_ITMP1);
3067 case ICMD_INVOKESTATIC:
3069 a = (s8) lm->stubroutine;
3072 x86_64_mov_imm_reg(cd, a, REG_ITMP2);
3073 x86_64_call_reg(cd, REG_ITMP2);
3076 case ICMD_INVOKESPECIAL:
3078 a = (s8) lm->stubroutine;
3081 gen_nullptr_check(rd->argintregs[0]); /* first argument contains pointer */
3082 x86_64_mov_membase_reg(cd, rd->argintregs[0], 0, REG_ITMP2); /* access memory for hardware nullptr */
3083 x86_64_mov_imm_reg(cd, a, REG_ITMP2);
3084 x86_64_call_reg(cd, REG_ITMP2);
3087 case ICMD_INVOKEVIRTUAL:
3091 gen_nullptr_check(rd->argintregs[0]);
3092 x86_64_mov_membase_reg(cd, rd->argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3093 x86_64_mov_membase32_reg(cd, REG_ITMP2, OFFSET(vftbl_t, table[0]) + sizeof(methodptr) * lm->vftblindex, REG_ITMP1);
3094 x86_64_call_reg(cd, REG_ITMP1);
3097 case ICMD_INVOKEINTERFACE:
3102 gen_nullptr_check(rd->argintregs[0]);
3103 x86_64_mov_membase_reg(cd, rd->argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3104 x86_64_mov_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, interfacetable[0]) - sizeof(methodptr) * ci->index, REG_ITMP2);
3105 x86_64_mov_membase32_reg(cd, REG_ITMP2, sizeof(methodptr) * (lm - ci->methods), REG_ITMP1);
3106 x86_64_call_reg(cd, REG_ITMP1);
3111 error("Unkown ICMD-Command: %d", iptr->opc);
3114 /* d contains return type */
3116 if (d != TYPE_VOID) {
3117 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3118 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3119 M_INTMOVE(REG_RESULT, s1);
3120 store_reg_to_var_int(iptr->dst, s1);
3123 s1 = reg_of_var(rd, iptr->dst, REG_FRESULT);
3124 M_FLTMOVE(REG_FRESULT, s1);
3125 store_reg_to_var_flt(iptr->dst, s1);
3132 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3134 /* op1: 0 == array, 1 == class */
3135 /* val.a: (classinfo*) superclass */
3137 /* superclass is an interface:
3139 * return (sub != NULL) &&
3140 * (sub->vftbl->interfacetablelength > super->index) &&
3141 * (sub->vftbl->interfacetable[-super->index] != NULL);
3143 * superclass is a class:
3145 * return ((sub != NULL) && (0
3146 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3147 * super->vftbl->diffvall));
3151 classinfo *super = (classinfo*) iptr->val.a;
3153 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3154 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3157 var_to_reg_int(s1, src, REG_ITMP1);
3158 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3160 M_INTMOVE(s1, REG_ITMP1);
3163 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3164 if (iptr->op1) { /* class/interface */
3165 if (super->flags & ACC_INTERFACE) { /* interface */
3166 x86_64_test_reg_reg(cd, s1, s1);
3168 /* TODO: clean up this calculation */
3169 a = 3; /* mov_membase_reg */
3170 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3172 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3173 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3176 CALCIMMEDIATEBYTES(a, super->index);
3181 a += 3; /* mov_membase_reg */
3182 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3187 x86_64_jcc(cd, X86_64_CC_E, a);
3189 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3190 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength), REG_ITMP2);
3191 x86_64_alu_imm_reg(cd, X86_64_SUB, super->index, REG_ITMP2);
3192 x86_64_test_reg_reg(cd, REG_ITMP2, REG_ITMP2);
3194 /* TODO: clean up this calculation */
3196 a += 3; /* mov_membase_reg */
3197 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3202 x86_64_jcc(cd, X86_64_CC_LE, a);
3203 x86_64_mov_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP1);
3204 x86_64_test_reg_reg(cd, REG_ITMP1, REG_ITMP1);
3205 x86_64_setcc_reg(cd, X86_64_CC_NE, d);
3207 } else { /* class */
3208 x86_64_test_reg_reg(cd, s1, s1);
3210 /* TODO: clean up this calculation */
3211 a = 3; /* mov_membase_reg */
3212 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3214 a += 10; /* mov_imm_reg */
3216 a += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3217 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, baseval));
3219 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3220 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3222 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3223 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, diffval));
3230 x86_64_jcc(cd, X86_64_CC_E, a);
3232 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3233 x86_64_mov_imm_reg(cd, (s8) super->vftbl, REG_ITMP2);
3234 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3235 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3237 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, baseval), REG_ITMP1);
3238 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP3);
3239 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, diffval), REG_ITMP2);
3240 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3241 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3243 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP1);
3244 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3245 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
3246 x86_64_setcc_reg(cd, X86_64_CC_BE, d);
3250 panic("internal error: no inlined array instanceof");
3252 store_reg_to_var_int(iptr->dst, d);
3255 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3257 /* op1: 0 == array, 1 == class */
3258 /* val.a: (classinfo*) superclass */
3260 /* superclass is an interface:
3262 * OK if ((sub == NULL) ||
3263 * (sub->vftbl->interfacetablelength > super->index) &&
3264 * (sub->vftbl->interfacetable[-super->index] != NULL));
3266 * superclass is a class:
3268 * OK if ((sub == NULL) || (0
3269 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3270 * super->vftbl->diffvall));
3274 classinfo *super = (classinfo*) iptr->val.a;
3276 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3277 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3279 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3280 var_to_reg_int(s1, src, d);
3281 if (iptr->op1) { /* class/interface */
3282 if (super->flags & ACC_INTERFACE) { /* interface */
3283 x86_64_test_reg_reg(cd, s1, s1);
3285 /* TODO: clean up this calculation */
3286 a = 3; /* mov_membase_reg */
3287 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3289 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3290 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3293 CALCIMMEDIATEBYTES(a, super->index);
3298 a += 3; /* mov_membase_reg */
3299 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3304 x86_64_jcc(cd, X86_64_CC_E, a);
3306 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3307 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength), REG_ITMP2);
3308 x86_64_alu_imm_reg(cd, X86_64_SUB, super->index, REG_ITMP2);
3309 x86_64_test_reg_reg(cd, REG_ITMP2, REG_ITMP2);
3310 x86_64_jcc(cd, X86_64_CC_LE, 0);
3311 codegen_addxcastrefs(cd, cd->mcodeptr);
3312 x86_64_mov_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP2);
3313 x86_64_test_reg_reg(cd, REG_ITMP2, REG_ITMP2);
3314 x86_64_jcc(cd, X86_64_CC_E, 0);
3315 codegen_addxcastrefs(cd, cd->mcodeptr);
3317 } else { /* class */
3318 x86_64_test_reg_reg(cd, s1, s1);
3320 /* TODO: clean up this calculation */
3321 a = 3; /* mov_membase_reg */
3322 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3323 a += 10; /* mov_imm_reg */
3324 a += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3325 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, baseval));
3327 if (d != REG_ITMP3) {
3328 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3329 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3330 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3331 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, diffval));
3335 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3336 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3338 a += 10; /* mov_imm_reg */
3339 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3340 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, diffval));
3346 x86_64_jcc(cd, X86_64_CC_E, a);
3348 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3349 x86_64_mov_imm_reg(cd, (s8) super->vftbl, REG_ITMP2);
3350 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3351 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3353 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, baseval), REG_ITMP1);
3354 if (d != REG_ITMP3) {
3355 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP3);
3356 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, diffval), REG_ITMP2);
3357 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3358 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3360 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP1);
3363 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP2);
3364 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
3365 x86_64_mov_imm_reg(cd, (s8) super->vftbl, REG_ITMP2);
3366 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, diffval), REG_ITMP2);
3367 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3368 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3371 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
3372 x86_64_jcc(cd, X86_64_CC_A, 0); /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3373 codegen_addxcastrefs(cd, cd->mcodeptr);
3377 panic("internal error: no inlined array checkcast");
3380 store_reg_to_var_int(iptr->dst, d);
3383 case ICMD_CHECKASIZE: /* ..., size ==> ..., size */
3385 if (src->flags & INMEMORY) {
3386 x86_64_alul_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
3389 x86_64_testl_reg_reg(cd, src->regoff, src->regoff);
3391 x86_64_jcc(cd, X86_64_CC_L, 0);
3392 codegen_addxcheckarefs(cd, cd->mcodeptr);
3395 case ICMD_CHECKEXCEPTION: /* ... ==> ... */
3397 x86_64_test_reg_reg(cd, REG_RESULT, REG_RESULT);
3398 x86_64_jcc(cd, X86_64_CC_E, 0);
3399 codegen_addxexceptionrefs(cd, cd->mcodeptr);
3402 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3403 /* op1 = dimension, val.a = array descriptor */
3405 /* check for negative sizes and copy sizes to stack if necessary */
3407 MCODECHECK((iptr->op1 << 1) + 64);
3409 for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3410 var_to_reg_int(s2, src, REG_ITMP1);
3411 x86_64_testl_reg_reg(cd, s2, s2);
3412 x86_64_jcc(cd, X86_64_CC_L, 0);
3413 codegen_addxcheckarefs(cd, cd->mcodeptr);
3415 /* copy sizes to stack (argument numbers >= INT_ARG_CNT) */
3417 if (src->varkind != ARGVAR) {
3418 x86_64_mov_reg_membase(cd, s2, REG_SP, (s1 + INT_ARG_CNT) * 8);
3422 /* a0 = dimension count */
3423 x86_64_mov_imm_reg(cd, iptr->op1, rd->argintregs[0]);
3425 /* a1 = arraydescriptor */
3426 x86_64_mov_imm_reg(cd, (s8) iptr->val.a, rd->argintregs[1]);
3428 /* a2 = pointer to dimensions = stack pointer */
3429 x86_64_mov_reg_reg(cd, REG_SP, rd->argintregs[2]);
3431 x86_64_mov_imm_reg(cd, (s8) builtin_nmultianewarray, REG_ITMP1);
3432 x86_64_call_reg(cd, REG_ITMP1);
3434 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3435 M_INTMOVE(REG_RESULT, s1);
3436 store_reg_to_var_int(iptr->dst, s1);
3440 throw_cacao_exception_exit(string_java_lang_InternalError,
3441 "Unknown ICMD %d", iptr->opc);
3444 } /* for instruction */
3446 /* copy values to interface registers */
3448 src = bptr->outstack;
3449 len = bptr->outdepth;
3450 MCODECHECK(64 + len);
3453 if ((src->varkind != STACKVAR)) {
3455 if (IS_FLT_DBL_TYPE(s2)) {
3456 var_to_reg_flt(s1, src, REG_FTMP1);
3457 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3458 M_FLTMOVE(s1, rd->interfaces[len][s2].regoff);
3461 x86_64_movq_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3465 var_to_reg_int(s1, src, REG_ITMP1);
3466 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3467 M_INTMOVE(s1, rd->interfaces[len][s2].regoff);
3470 x86_64_mov_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3476 } /* if (bptr -> flags >= BBREACHED) */
3477 } /* for basic block */
3481 /* generate bound check stubs */
3483 u1 *xcodeptr = NULL;
3486 for (bref = cd->xboundrefs; bref != NULL; bref = bref->next) {
3487 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3489 cd->mcodeptr - cd->mcodebase);
3493 /* move index register into REG_ITMP1 */
3494 x86_64_mov_reg_reg(cd, bref->reg, REG_ITMP1); /* 3 bytes */
3496 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3497 dseg_adddata(cd, cd->mcodeptr);
3498 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3499 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3501 if (xcodeptr != NULL) {
3502 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3505 xcodeptr = cd->mcodeptr;
3507 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3508 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3510 x86_64_mov_reg_reg(cd, REG_ITMP1, rd->argintregs[0]);
3511 x86_64_mov_imm_reg(cd, (s8) new_arrayindexoutofboundsexception, REG_ITMP3);
3512 x86_64_call_reg(cd, REG_ITMP3);
3514 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3515 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3517 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
3518 x86_64_jmp_reg(cd, REG_ITMP3);
3522 /* generate negative array size check stubs */
3526 for (bref = cd->xcheckarefs; bref != NULL; bref = bref->next) {
3527 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3528 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3530 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3534 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3536 cd->mcodeptr - cd->mcodebase);
3540 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3541 dseg_adddata(cd, cd->mcodeptr);
3542 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3543 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3545 if (xcodeptr != NULL) {
3546 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3549 xcodeptr = cd->mcodeptr;
3551 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3552 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3554 x86_64_mov_imm_reg(cd, (s8) new_negativearraysizeexception, REG_ITMP3);
3555 x86_64_call_reg(cd, REG_ITMP3);
3557 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3558 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3560 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
3561 x86_64_jmp_reg(cd, REG_ITMP3);
3565 /* generate cast check stubs */
3569 for (bref = cd->xcastrefs; bref != NULL; bref = bref->next) {
3570 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3571 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3573 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3577 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3579 cd->mcodeptr - cd->mcodebase);
3583 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3584 dseg_adddata(cd, cd->mcodeptr);
3585 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3586 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3588 if (xcodeptr != NULL) {
3589 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3592 xcodeptr = cd->mcodeptr;
3594 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3595 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3597 x86_64_mov_imm_reg(cd, (s8) new_classcastexception, REG_ITMP3);
3598 x86_64_call_reg(cd, REG_ITMP3);
3600 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3601 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3603 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
3604 x86_64_jmp_reg(cd, REG_ITMP3);
3608 /* generate divide by zero check stubs */
3612 for (bref = cd->xdivrefs; bref != NULL; bref = bref->next) {
3613 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3614 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3616 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3620 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3622 cd->mcodeptr - cd->mcodebase);
3626 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3627 dseg_adddata(cd, cd->mcodeptr);
3628 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3629 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3631 if (xcodeptr != NULL) {
3632 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3635 xcodeptr = cd->mcodeptr;
3637 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3638 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3640 x86_64_mov_imm_reg(cd, (u8) new_arithmeticexception, REG_ITMP3);
3641 x86_64_call_reg(cd, REG_ITMP3);
3643 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3644 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3646 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3647 x86_64_jmp_reg(cd, REG_ITMP3);
3651 /* generate exception check stubs */
3655 for (bref = cd->xexceptionrefs; bref != NULL; bref = bref->next) {
3656 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3657 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3659 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3663 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3665 cd->mcodeptr - cd->mcodebase);
3669 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3670 dseg_adddata(cd, cd->mcodeptr);
3671 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
3672 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3674 if (xcodeptr != NULL) {
3675 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3678 xcodeptr = cd->mcodeptr;
3680 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3681 x86_64_alu_imm_reg(cd, X86_64_SUB, 8, REG_SP);
3682 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0);
3683 x86_64_mov_imm_reg(cd, (u8) &builtin_get_exceptionptrptr, REG_ITMP1);
3684 x86_64_call_reg(cd, REG_ITMP1);
3685 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
3686 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
3687 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
3688 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC);
3689 x86_64_alu_imm_reg(cd, X86_64_ADD, 8, REG_SP);
3691 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
3692 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP1_XPTR);
3693 x86_64_mov_imm_membase(cd, 0, REG_ITMP3, 0);
3696 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3697 x86_64_jmp_reg(cd, REG_ITMP3);
3701 /* generate null pointer check stubs */
3705 for (bref = cd->xnullrefs; bref != NULL; bref = bref->next) {
3706 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3707 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3709 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3713 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3715 cd->mcodeptr - cd->mcodebase);
3719 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3720 dseg_adddata(cd, cd->mcodeptr);
3721 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
3722 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3724 if (xcodeptr != NULL) {
3725 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3728 xcodeptr = cd->mcodeptr;
3730 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3731 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3733 x86_64_mov_imm_reg(cd, (s8) new_nullpointerexception, REG_ITMP3);
3734 x86_64_call_reg(cd, REG_ITMP3);
3736 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3737 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3739 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
3740 x86_64_jmp_reg(cd, REG_ITMP3);
3744 /* generate put/getstatic stub call code */
3752 tmpcd = DNEW(codegendata);
3754 for (cref = cd->clinitrefs; cref != NULL; cref = cref->next) {
3755 /* Get machine code which is patched back in later. A */
3756 /* `call rel32' is 5 bytes long. */
3757 xcodeptr = cd->mcodebase + cref->branchpos;
3759 mcode = *((u4 *) (xcodeptr + 1));
3763 /* patch in `call rel32' to call the following code */
3764 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
3765 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
3767 /* Save current stack pointer into a temporary register. */
3768 x86_64_mov_reg_reg(cd, REG_SP, REG_ITMP1);
3770 /* Push machine code bytes to patch onto the stack. */
3771 x86_64_push_imm(cd, (u1) xmcode);
3772 x86_64_push_imm(cd, (u4) mcode);
3774 x86_64_push_imm(cd, (u8) cref->class);
3776 /* Push previously saved stack pointer onto stack. */
3777 x86_64_push_reg(cd, REG_ITMP1);
3779 x86_64_mov_imm_reg(cd, (u8) asm_check_clinit, REG_ITMP1);
3780 x86_64_jmp_reg(cd, REG_ITMP1);
3785 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
3789 /* function createcompilerstub *************************************************
3791 creates a stub routine which calls the compiler
3793 *******************************************************************************/
3795 #define COMPSTUBSIZE 23
3797 u1 *createcompilerstub(methodinfo *m)
3799 u1 *s = CNEW(u1, COMPSTUBSIZE); /* memory to hold the stub */
3803 /* mark start of dump memory area */
3805 dumpsize = dump_size();
3807 cd = DNEW(codegendata);
3810 /* code for the stub */
3811 x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP1); /* pass method to compiler */
3812 x86_64_mov_imm_reg(cd, (u8) asm_call_jit_compiler, REG_ITMP3);/* load address */
3813 x86_64_jmp_reg(cd, REG_ITMP3); /* jump to compiler */
3815 #if defined(STATISTICS)
3817 count_cstub_len += COMPSTUBSIZE;
3820 /* release dump area */
3822 dump_release(dumpsize);
3828 /* function removecompilerstub *************************************************
3830 deletes a compilerstub from memory (simply by freeing it)
3832 *******************************************************************************/
3834 void removecompilerstub(u1 *stub)
3836 CFREE(stub, COMPSTUBSIZE);
3840 /* function: createnativestub **************************************************
3842 creates a stub routine which calls a native method
3844 *******************************************************************************/
3846 /* #if defined(USE_THREADS) && defined(NATIVE_THREADS) */
3847 /* static java_objectheader **(*callgetexceptionptrptr)() = builtin_get_exceptionptrptr; */
3850 #define NATIVESTUBSIZE 420
3852 u1 *createnativestub(functionptr f, methodinfo *m)
3854 u1 *s = CNEW(u1, NATIVESTUBSIZE); /* memory to hold the stub */
3855 s4 stackframesize; /* size of stackframe if needed */
3858 t_inlining_globals *id;
3861 void **callAddrPatchPos=0;
3863 void **jmpInstrPatchPos=0;
3865 /* mark start of dump memory area */
3867 dumpsize = dump_size();
3869 cd = DNEW(codegendata);
3870 rd = DNEW(registerdata);
3871 id = DNEW(t_inlining_globals);
3873 /* setup registers before using it */
3875 inlining_setup(m, id);
3876 reg_setup(m, rd, id);
3878 /* set some required varibles which are normally set by codegen_setup */
3881 cd->clinitrefs = NULL;
3883 descriptor2types(m); /* set paramcount and paramtypes */
3885 /* if function is static, check for initialized */
3887 if (m->flags & ACC_STATIC) {
3888 /* if class isn't yet initialized, do it */
3889 if (!m->class->initialized) {
3890 codegen_addclinitref(cd, cd->mcodeptr, m->class);
3895 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
3897 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, 1 * 8);
3898 x86_64_mov_reg_membase(cd, rd->argintregs[1], REG_SP, 2 * 8);
3899 x86_64_mov_reg_membase(cd, rd->argintregs[2], REG_SP, 3 * 8);
3900 x86_64_mov_reg_membase(cd, rd->argintregs[3], REG_SP, 4 * 8);
3901 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 5 * 8);
3902 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 6 * 8);
3904 x86_64_movq_reg_membase(cd, rd->argfltregs[0], REG_SP, 7 * 8);
3905 x86_64_movq_reg_membase(cd, rd->argfltregs[1], REG_SP, 8 * 8);
3906 x86_64_movq_reg_membase(cd, rd->argfltregs[2], REG_SP, 9 * 8);
3907 x86_64_movq_reg_membase(cd, rd->argfltregs[3], REG_SP, 10 * 8);
3908 /* x86_64_movq_reg_membase(cd, rd->argfltregs[4], REG_SP, 11 * 8); */
3909 /* x86_64_movq_reg_membase(cd, rd->argfltregs[5], REG_SP, 12 * 8); */
3910 /* x86_64_movq_reg_membase(cd, rd->argfltregs[6], REG_SP, 13 * 8); */
3911 /* x86_64_movq_reg_membase(cd, rd->argfltregs[7], REG_SP, 14 * 8); */
3916 /* show integer hex code for float arguments */
3917 for (p = 0, l = 0; p < m->paramcount; p++) {
3918 if (IS_FLT_DBL_TYPE(m->paramtypes[p])) {
3919 for (s1 = (m->paramcount > INT_ARG_CNT) ? INT_ARG_CNT - 2 : m->paramcount - 2; s1 >= p; s1--) {
3920 x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
3923 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[p]);
3928 x86_64_mov_imm_reg(cd, (s8) m, REG_ITMP1);
3929 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, 0 * 8);
3930 x86_64_mov_imm_reg(cd, (s8) builtin_trace_args, REG_ITMP1);
3931 x86_64_call_reg(cd, REG_ITMP1);
3933 /* call method to resolve native function if needed */
3934 #ifndef STATIC_CLASSPATH
3935 if (f==0) { /* only if not already resolved */
3936 x86_64_jmp_imm(cd,0);
3937 jmpInstrPos=cd->mcodeptr-4; /*needed to patch a jump over this block*/
3938 x86_64_mov_imm_reg(cd,(u8)m,rd->argintregs[0]);
3939 x86_64_mov_imm_reg(cd,0,rd->argintregs[1]);
3940 callAddrPatchPos=cd->mcodeptr-8; /* at this position the place is specified where the native function adress should be patched into*/
3941 x86_64_mov_imm_reg(cd,0,rd->argintregs[2]);
3942 jmpInstrPatchPos=cd->mcodeptr-8;
3943 x86_64_mov_imm_reg(cd,jmpInstrPos,rd->argintregs[3]);
3944 x86_64_mov_imm_reg(cd,(s8)codegen_resolve_native,REG_ITMP1);
3945 x86_64_call_reg(cd,REG_ITMP1);
3946 *(jmpInstrPatchPos)=cd->mcodeptr-jmpInstrPos-1; /*=opcode jmp_imm size*/
3950 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, rd->argintregs[0]);
3951 x86_64_mov_membase_reg(cd, REG_SP, 2 * 8, rd->argintregs[1]);
3952 x86_64_mov_membase_reg(cd, REG_SP, 3 * 8, rd->argintregs[2]);
3953 x86_64_mov_membase_reg(cd, REG_SP, 4 * 8, rd->argintregs[3]);
3954 x86_64_mov_membase_reg(cd, REG_SP, 5 * 8, rd->argintregs[4]);
3955 x86_64_mov_membase_reg(cd, REG_SP, 6 * 8, rd->argintregs[5]);
3957 x86_64_movq_membase_reg(cd, REG_SP, 7 * 8, rd->argfltregs[0]);
3958 x86_64_movq_membase_reg(cd, REG_SP, 8 * 8, rd->argfltregs[1]);
3959 x86_64_movq_membase_reg(cd, REG_SP, 9 * 8, rd->argfltregs[2]);
3960 x86_64_movq_membase_reg(cd, REG_SP, 10 * 8, rd->argfltregs[3]);
3961 /* x86_64_movq_membase_reg(cd, REG_SP, 11 * 8, rd->argfltregs[4]); */
3962 /* x86_64_movq_membase_reg(cd, REG_SP, 12 * 8, rd->argfltregs[5]); */
3963 /* x86_64_movq_membase_reg(cd, REG_SP, 13 * 8, rd->argfltregs[6]); */
3964 /* x86_64_movq_membase_reg(cd, REG_SP, 14 * 8, rd->argfltregs[7]); */
3966 x86_64_alu_imm_reg(cd, X86_64_ADD, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
3970 x86_64_alu_imm_reg(cd, X86_64_SUB, 7 * 8, REG_SP); /* keep stack 16-byte aligned */
3972 /* save callee saved float registers */
3973 x86_64_movq_reg_membase(cd, XMM15, REG_SP, 0 * 8);
3974 x86_64_movq_reg_membase(cd, XMM14, REG_SP, 1 * 8);
3975 x86_64_movq_reg_membase(cd, XMM13, REG_SP, 2 * 8);
3976 x86_64_movq_reg_membase(cd, XMM12, REG_SP, 3 * 8);
3977 x86_64_movq_reg_membase(cd, XMM11, REG_SP, 4 * 8);
3978 x86_64_movq_reg_membase(cd, XMM10, REG_SP, 5 * 8);
3981 /* save argument registers on stack -- if we have to */
3982 if ((m->flags & ACC_STATIC && m->paramcount > (INT_ARG_CNT - 2)) || m->paramcount > (INT_ARG_CNT - 1)) {
3984 s4 paramshiftcnt = (m->flags & ACC_STATIC) ? 2 : 1;
3985 s4 stackparamcnt = (m->paramcount > INT_ARG_CNT) ? m->paramcount - INT_ARG_CNT : 0;
3987 stackframesize = stackparamcnt + paramshiftcnt;
3989 /* keep stack 16-byte aligned */
3990 if (!(stackframesize & 0x1))
3993 x86_64_alu_imm_reg(cd, X86_64_SUB, stackframesize * 8, REG_SP);
3995 /* copy stack arguments into new stack frame -- if any */
3996 for (i = 0; i < stackparamcnt; i++) {
3997 x86_64_mov_membase_reg(cd, REG_SP, (stackparamcnt + 1 + i) * 8, REG_ITMP1);
3998 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, (paramshiftcnt + i) * 8);
4001 if (m->flags & ACC_STATIC) {
4002 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 1 * 8);
4003 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 0 * 8);
4006 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 0 * 8);
4010 /* keep stack 16-byte aligned */
4011 x86_64_alu_imm_reg(cd, X86_64_SUB, 8, REG_SP);
4015 if (m->flags & ACC_STATIC) {
4016 x86_64_mov_reg_reg(cd, rd->argintregs[3], rd->argintregs[5]);
4017 x86_64_mov_reg_reg(cd, rd->argintregs[2], rd->argintregs[4]);
4018 x86_64_mov_reg_reg(cd, rd->argintregs[1], rd->argintregs[3]);
4019 x86_64_mov_reg_reg(cd, rd->argintregs[0], rd->argintregs[2]);
4021 /* put class into second argument register */
4022 x86_64_mov_imm_reg(cd, (u8) m->class, rd->argintregs[1]);
4025 x86_64_mov_reg_reg(cd, rd->argintregs[4], rd->argintregs[5]);
4026 x86_64_mov_reg_reg(cd, rd->argintregs[3], rd->argintregs[4]);
4027 x86_64_mov_reg_reg(cd, rd->argintregs[2], rd->argintregs[3]);
4028 x86_64_mov_reg_reg(cd, rd->argintregs[1], rd->argintregs[2]);
4029 x86_64_mov_reg_reg(cd, rd->argintregs[0], rd->argintregs[1]);
4032 /* put env into first argument register */
4033 x86_64_mov_imm_reg(cd, (u8) &env, rd->argintregs[0]);
4035 x86_64_mov_imm_reg(cd, (u8) f, REG_ITMP1);
4036 #ifndef STATIC_CLASSPATH
4038 (*callAddrPatchPos)=cd->mcodeptr-8;
4040 x86_64_call_reg(cd, REG_ITMP1);
4042 /* remove stackframe if there is one */
4043 if (stackframesize) {
4044 x86_64_alu_imm_reg(cd, X86_64_ADD, stackframesize * 8, REG_SP);
4048 x86_64_alu_imm_reg(cd, X86_64_SUB, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
4050 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
4051 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
4053 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
4054 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
4055 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
4056 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
4058 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
4059 x86_64_call_reg(cd, REG_ITMP1);
4061 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
4062 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
4064 x86_64_alu_imm_reg(cd, X86_64_ADD, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
4068 /* restore callee saved registers */
4069 x86_64_movq_membase_reg(cd, REG_SP, 0 * 8, XMM15);
4070 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, XMM14);
4071 x86_64_movq_membase_reg(cd, REG_SP, 2 * 8, XMM13);
4072 x86_64_movq_membase_reg(cd, REG_SP, 3 * 8, XMM12);
4073 x86_64_movq_membase_reg(cd, REG_SP, 4 * 8, XMM11);
4074 x86_64_movq_membase_reg(cd, REG_SP, 5 * 8, XMM10);
4076 x86_64_alu_imm_reg(cd, X86_64_ADD, 7 * 8, REG_SP); /* keep stack 16-byte aligned */
4079 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4080 x86_64_push_reg(cd, REG_RESULT);
4081 /* x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
4082 x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
4083 x86_64_call_reg(cd, REG_ITMP3);
4084 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
4085 x86_64_pop_reg(cd, REG_RESULT);
4087 x86_64_mov_imm_reg(cd, (s8) &_exceptionptr, REG_ITMP3);
4088 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP3);
4090 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
4091 x86_64_jcc(cd, X86_64_CC_NE, 1);
4095 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4096 x86_64_push_reg(cd, REG_ITMP3);
4097 /* x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
4098 x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
4099 x86_64_call_reg(cd, REG_ITMP3);
4100 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
4101 x86_64_pop_reg(cd, REG_ITMP1_XPTR);
4103 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
4104 x86_64_mov_imm_reg(cd, (s8) &_exceptionptr, REG_ITMP3);
4105 x86_64_alu_reg_reg(cd, X86_64_XOR, REG_ITMP2, REG_ITMP2);
4106 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_ITMP3, 0); /* clear exception pointer */
4109 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC); /* get return address from stack */
4110 x86_64_alu_imm_reg(cd, X86_64_SUB, 3, REG_ITMP2_XPC); /* callq */
4112 x86_64_mov_imm_reg(cd, (s8) asm_handle_nat_exception, REG_ITMP3);
4113 x86_64_jmp_reg(cd, REG_ITMP3);
4122 tmpcd = DNEW(codegendata);
4124 /* there can only be one clinit ref entry */
4125 cref = cd->clinitrefs;
4128 /* Get machine code which is patched back in later. A */
4129 /* `call rel32' is 5 bytes long. */
4130 xcodeptr = cd->mcodebase + cref->branchpos;
4132 mcode = *((u4 *) (xcodeptr + 1));
4134 /* patch in `call rel32' to call the following code */
4135 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
4136 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4138 /* Save current stack pointer into a temporary register. */
4139 x86_64_mov_reg_reg(cd, REG_SP, REG_ITMP1);
4141 /* Push machine code bytes to patch onto the stack. */
4142 x86_64_push_imm(cd, (u1) xmcode);
4143 x86_64_push_imm(cd, (u4) mcode);
4145 x86_64_push_imm(cd, (u8) cref->class);
4147 /* Push previously saved stack pointer onto stack. */
4148 x86_64_push_reg(cd, REG_ITMP1);
4150 x86_64_mov_imm_reg(cd, (u8) asm_check_clinit, REG_ITMP1);
4151 x86_64_jmp_reg(cd, REG_ITMP1);
4157 static int stubprinted;
4159 printf("stubsize: %d\n", ((long) cd->mcodeptr - (long) s));
4164 #if defined(STATISTICS)
4166 count_nstub_len += NATIVESTUBSIZE;
4169 /* release dump area */
4171 dump_release(dumpsize);
4177 /* function: removenativestub **************************************************
4179 removes a previously created native-stub from memory
4181 *******************************************************************************/
4183 void removenativestub(u1 *stub)
4185 CFREE(stub, NATIVESTUBSIZE);
4190 * These are local overrides for various environment variables in Emacs.
4191 * Please do not remove this and leave it at the end of the file, where
4192 * Emacs will automagically detect them.
4193 * ---------------------------------------------------------------------
4196 * indent-tabs-mode: t