1 /* jit/x86_64/codegen.c - machine code generator for x86_64
3 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 Institut f. Computersprachen, TU Wien
5 R. Grafl, A. Krall, C. Kruegel, C. Oates, R. Obermaisser, M. Probst,
6 S. Ring, E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich,
9 This file is part of CACAO.
11 This program is free software; you can redistribute it and/or
12 modify it under the terms of the GNU General Public License as
13 published by the Free Software Foundation; either version 2, or (at
14 your option) any later version.
16 This program is distributed in the hope that it will be useful, but
17 WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 General Public License for more details.
21 You should have received a copy of the GNU General Public License
22 along with this program; if not, write to the Free Software
23 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
26 Contact: cacao@complang.tuwien.ac.at
28 Authors: Andreas Krall
31 $Id: codegen.c 1353 2004-07-26 22:31:24Z twisti $
40 #include <sys/ucontext.h>
49 #include "jit/parse.h"
50 #include "jit/x86_64/codegen.h"
51 #include "jit/x86_64/emitfuncs.h"
52 #include "jit/x86_64/types.h"
54 /* include independent code generation stuff */
55 #include "jit/codegen.inc"
56 #include "jit/reg.inc"
59 /* register descripton - array ************************************************/
61 /* #define REG_RES 0 reserved register for OS or code generator */
62 /* #define REG_RET 1 return value register */
63 /* #define REG_EXC 2 exception value register (only old jit) */
64 /* #define REG_SAV 3 (callee) saved register */
65 /* #define REG_TMP 4 scratch temporary register (caller saved) */
66 /* #define REG_ARG 5 argument register (caller saved) */
68 /* #define REG_END -1 last entry in tables */
71 REG_RET, REG_ARG, REG_ARG, REG_TMP, REG_RES, REG_SAV, REG_ARG, REG_ARG,
72 REG_ARG, REG_ARG, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV,
77 int nregdescfloat[] = {
78 /* REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_TMP, REG_TMP, REG_TMP, REG_TMP, */
79 /* REG_RES, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV, REG_SAV, */
80 REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
81 REG_RES, REG_RES, REG_RES, REG_TMP, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
86 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
87 void thread_restartcriticalsection(ucontext_t *uc)
91 critical = thread_checkcritical((void *) uc->uc_mcontext.gregs[REG_RIP]);
94 uc->uc_mcontext.gregs[REG_RIP] = (u8) critical;
99 /* NullPointerException signal handler for hardware null pointer check */
101 void catch_NullPointerException(int sig, siginfo_t *siginfo, void *_p)
105 /* long faultaddr; */
107 struct ucontext *_uc = (struct ucontext *) _p;
108 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
109 struct sigaction act;
110 java_objectheader *xptr;
112 /* Reset signal handler - necessary for SysV, does no harm for BSD */
115 /* instr = *((int*)(sigctx->rip)); */
116 /* faultaddr = sigctx->sc_regs[(instr >> 16) & 0x1f]; */
118 /* if (faultaddr == 0) { */
119 act.sa_sigaction = (void *) catch_NullPointerException; /* reinstall handler */
120 act.sa_flags = SA_SIGINFO;
121 sigaction(sig, &act, NULL);
124 sigaddset(&nsig, sig);
125 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
127 xptr = new_exception(string_java_lang_NullPointerException);
129 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
130 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
131 sigctx->rip = (u8) asm_handle_exception;
136 /* faultaddr += (long) ((instr << 16) >> 16); */
137 /* fprintf(stderr, "faulting address: 0x%08x\n", faultaddr); */
138 /* panic("Stack overflow"); */
143 /* ArithmeticException signal handler for hardware divide by zero check */
145 void catch_ArithmeticException(int sig, siginfo_t *siginfo, void *_p)
149 struct ucontext *_uc = (struct ucontext *) _p;
150 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
151 struct sigaction act;
152 java_objectheader *xptr;
154 /* Reset signal handler - necessary for SysV, does no harm for BSD */
156 act.sa_sigaction = (void *) catch_ArithmeticException; /* reinstall handler */
157 act.sa_flags = SA_SIGINFO;
158 sigaction(sig, &act, NULL);
161 sigaddset(&nsig, sig);
162 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
164 xptr = new_exception_message(string_java_lang_ArithmeticException,
165 string_java_lang_ArithmeticException_message);
167 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
168 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
169 sigctx->rip = (u8) asm_handle_exception;
175 void init_exceptions(void)
177 struct sigaction act;
179 /* install signal handlers we need to convert to exceptions */
183 act.sa_sigaction = (void *) catch_NullPointerException;
184 act.sa_flags = SA_SIGINFO;
185 sigaction(SIGSEGV, &act, NULL);
189 act.sa_sigaction = (void *) catch_NullPointerException;
190 act.sa_flags = SA_SIGINFO;
191 sigaction(SIGBUS, &act, NULL);
195 act.sa_sigaction = (void *) catch_ArithmeticException;
196 act.sa_flags = SA_SIGINFO;
197 sigaction(SIGFPE, &act, NULL);
201 /* function gen_mcode **********************************************************
203 generates machine code
205 *******************************************************************************/
207 void codegen(methodinfo *m)
209 s4 len, s1, s2, s3, d;
220 /* keep code size smaller */
230 /* space to save used callee saved registers */
232 savedregs_num += (r->savintregcnt - r->maxsavintreguse);
233 savedregs_num += (r->savfltregcnt - r->maxsavfltreguse);
235 parentargs_base = r->maxmemuse + savedregs_num;
237 #if defined(USE_THREADS) /* space to save argument of monitor_enter */
239 if (checksync && (m->flags & ACC_SYNCHRONIZED))
244 /* keep stack 16-byte aligned for calls into libc */
246 if (!m->isleafmethod || runverbose) {
247 if ((parentargs_base % 2) == 0) {
252 /* create method header */
254 (void) dseg_addaddress(m, m); /* MethodPointer */
255 (void) dseg_adds4(m, parentargs_base * 8); /* FrameSize */
257 #if defined(USE_THREADS)
259 /* IsSync contains the offset relative to the stack pointer for the
260 argument of monitor_exit used in the exception handler. Since the
261 offset could be zero and give a wrong meaning of the flag it is
265 if (checksync && (m->flags & ACC_SYNCHRONIZED))
266 (void) dseg_adds4(m, (r->maxmemuse + 1) * 8); /* IsSync */
271 (void) dseg_adds4(m, 0); /* IsSync */
273 (void) dseg_adds4(m, m->isleafmethod); /* IsLeaf */
274 (void) dseg_adds4(m, r->savintregcnt - r->maxsavintreguse);/* IntSave */
275 (void) dseg_adds4(m, r->savfltregcnt - r->maxsavfltreguse);/* FltSave */
276 (void) dseg_adds4(m, m->exceptiontablelength); /* ExTableSize */
278 /* create exception table */
280 for (ex = m->exceptiontable; ex != NULL; ex = ex->down) {
281 dseg_addtarget(m, ex->start);
282 dseg_addtarget(m, ex->end);
283 dseg_addtarget(m, ex->handler);
284 (void) dseg_addaddress(m, ex->catchtype);
287 /* initialize mcode variables */
289 cd->mcodeptr = (u1 *) cd->mcodebase;
290 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
291 MCODECHECK(128 + m->paramcount);
293 /* create stack frame (if necessary) */
295 if (parentargs_base) {
296 x86_64_alu_imm_reg(cd, X86_64_SUB, parentargs_base * 8, REG_SP);
299 /* save return address and used callee saved registers */
302 for (i = r->savintregcnt - 1; i >= r->maxsavintreguse; i--) {
303 p--; x86_64_mov_reg_membase(cd, r->savintregs[i], REG_SP, p * 8);
305 for (i = r->savfltregcnt - 1; i >= r->maxsavfltreguse; i--) {
306 p--; x86_64_movq_reg_membase(cd, r->savfltregs[i], REG_SP, p * 8);
309 /* save monitorenter argument */
311 #if defined(USE_THREADS)
312 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
313 if (m->flags & ACC_STATIC) {
314 x86_64_mov_imm_reg(cd, (s8) m->class, REG_ITMP1);
315 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, r->maxmemuse * 8);
318 x86_64_mov_reg_membase(cd, r->argintregs[0], REG_SP, r->maxmemuse * 8);
323 /* copy argument registers to stack and call trace function with pointer
324 to arguments on stack.
327 x86_64_alu_imm_reg(cd, X86_64_SUB, (6 + 8 + 1 + 1) * 8, REG_SP);
329 x86_64_mov_reg_membase(cd, r->argintregs[0], REG_SP, 1 * 8);
330 x86_64_mov_reg_membase(cd, r->argintregs[1], REG_SP, 2 * 8);
331 x86_64_mov_reg_membase(cd, r->argintregs[2], REG_SP, 3 * 8);
332 x86_64_mov_reg_membase(cd, r->argintregs[3], REG_SP, 4 * 8);
333 x86_64_mov_reg_membase(cd, r->argintregs[4], REG_SP, 5 * 8);
334 x86_64_mov_reg_membase(cd, r->argintregs[5], REG_SP, 6 * 8);
336 x86_64_movq_reg_membase(cd, r->argfltregs[0], REG_SP, 7 * 8);
337 x86_64_movq_reg_membase(cd, r->argfltregs[1], REG_SP, 8 * 8);
338 x86_64_movq_reg_membase(cd, r->argfltregs[2], REG_SP, 9 * 8);
339 x86_64_movq_reg_membase(cd, r->argfltregs[3], REG_SP, 10 * 8);
340 /* x86_64_movq_reg_membase(cd, r->argfltregs[4], REG_SP, 11 * 8); */
341 /* x86_64_movq_reg_membase(cd, r->argfltregs[5], REG_SP, 12 * 8); */
342 /* x86_64_movq_reg_membase(cd, r->argfltregs[6], REG_SP, 13 * 8); */
343 /* x86_64_movq_reg_membase(cd, r->argfltregs[7], REG_SP, 14 * 8); */
345 for (p = 0, l = 0; p < m->paramcount; p++) {
346 t = m->paramtypes[p];
348 if (IS_FLT_DBL_TYPE(t)) {
349 for (s1 = (m->paramcount > INT_ARG_CNT) ? INT_ARG_CNT - 2 : m->paramcount - 2; s1 >= p; s1--) {
350 x86_64_mov_reg_reg(cd, r->argintregs[s1], r->argintregs[s1 + 1]);
353 x86_64_movd_freg_reg(cd, r->argfltregs[l], r->argintregs[p]);
358 x86_64_mov_imm_reg(cd, (s8) m, REG_ITMP2);
359 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_SP, 0 * 8);
360 x86_64_mov_imm_reg(cd, (s8) builtin_trace_args, REG_ITMP1);
361 x86_64_call_reg(cd, REG_ITMP1);
363 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, r->argintregs[0]);
364 x86_64_mov_membase_reg(cd, REG_SP, 2 * 8, r->argintregs[1]);
365 x86_64_mov_membase_reg(cd, REG_SP, 3 * 8, r->argintregs[2]);
366 x86_64_mov_membase_reg(cd, REG_SP, 4 * 8, r->argintregs[3]);
367 x86_64_mov_membase_reg(cd, REG_SP, 5 * 8, r->argintregs[4]);
368 x86_64_mov_membase_reg(cd, REG_SP, 6 * 8, r->argintregs[5]);
370 x86_64_movq_membase_reg(cd, REG_SP, 7 * 8, r->argfltregs[0]);
371 x86_64_movq_membase_reg(cd, REG_SP, 8 * 8, r->argfltregs[1]);
372 x86_64_movq_membase_reg(cd, REG_SP, 9 * 8, r->argfltregs[2]);
373 x86_64_movq_membase_reg(cd, REG_SP, 10 * 8, r->argfltregs[3]);
374 /* x86_64_movq_membase_reg(cd, REG_SP, 11 * 8, r->argfltregs[4]); */
375 /* x86_64_movq_membase_reg(cd, REG_SP, 12 * 8, r->argfltregs[5]); */
376 /* x86_64_movq_membase_reg(cd, REG_SP, 13 * 8, r->argfltregs[6]); */
377 /* x86_64_movq_membase_reg(cd, REG_SP, 14 * 8, r->argfltregs[7]); */
379 x86_64_alu_imm_reg(cd, X86_64_ADD, (6 + 8 + 1 + 1) * 8, REG_SP);
382 /* take arguments out of register or stack frame */
384 for (p = 0, l = 0, s1 = 0, s2 = 0; p < m->paramcount; p++) {
385 t = m->paramtypes[p];
386 var = &(r->locals[l][t]);
388 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
391 if (IS_INT_LNG_TYPE(t)) {
398 if (IS_INT_LNG_TYPE(t)) { /* integer args */
399 if (s1 < INT_ARG_CNT) { /* register arguments */
400 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
401 M_INTMOVE(r->argintregs[s1], var->regoff);
403 } else { /* reg arg -> spilled */
404 x86_64_mov_reg_membase(cd, r->argintregs[s1], REG_SP, var->regoff * 8);
407 } else { /* stack arguments */
408 pa = s1 - INT_ARG_CNT;
409 if (s2 >= FLT_ARG_CNT) {
410 pa += s2 - FLT_ARG_CNT;
412 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
413 x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff); /* + 8 for return address */
414 } else { /* stack arg -> spilled */
415 x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_ITMP1); /* + 8 for return address */
416 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, var->regoff * 8);
421 } else { /* floating args */
422 if (s2 < FLT_ARG_CNT) { /* register arguments */
423 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
424 M_FLTMOVE(r->argfltregs[s2], var->regoff);
426 } else { /* reg arg -> spilled */
427 x86_64_movq_reg_membase(cd, r->argfltregs[s2], REG_SP, var->regoff * 8);
430 } else { /* stack arguments */
431 pa = s2 - FLT_ARG_CNT;
432 if (s1 >= INT_ARG_CNT) {
433 pa += s1 - INT_ARG_CNT;
435 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
436 x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff);
439 x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_FTMP1);
440 x86_64_movq_reg_membase(cd, REG_FTMP1, REG_SP, var->regoff * 8);
447 /* call monitorenter function */
449 #if defined(USE_THREADS)
450 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
451 s8 func_enter = (m->flags & ACC_STATIC) ?
452 (s8) builtin_staticmonitorenter : (s8) builtin_monitorenter;
453 x86_64_mov_membase_reg(cd, REG_SP, r->maxmemuse * 8, r->argintregs[0]);
454 x86_64_mov_imm_reg(cd, func_enter, REG_ITMP1);
455 x86_64_call_reg(cd, REG_ITMP1);
460 /* end of header generation */
462 /* walk through all basic blocks */
463 for (bptr = m->basicblocks; bptr != NULL; bptr = bptr->next) {
465 bptr->mpc = (u4) ((u1 *) cd->mcodeptr - cd->mcodebase);
467 if (bptr->flags >= BBREACHED) {
469 /* branch resolving */
472 for (bref = bptr->branchrefs; bref != NULL; bref = bref->next) {
473 gen_resolvebranch((u1 *) cd->mcodebase + bref->branchpos,
478 /* copy interface registers to their destination */
482 MCODECHECK(64 + len);
483 while (src != NULL) {
485 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
486 if (bptr->type == BBTYPE_SBR) {
487 d = reg_of_var(m, src, REG_ITMP1);
488 x86_64_pop_reg(cd, d);
489 store_reg_to_var_int(src, d);
491 } else if (bptr->type == BBTYPE_EXH) {
492 d = reg_of_var(m, src, REG_ITMP1);
493 M_INTMOVE(REG_ITMP1, d);
494 store_reg_to_var_int(src, d);
498 d = reg_of_var(m, src, REG_ITMP1);
499 if ((src->varkind != STACKVAR)) {
501 if (IS_FLT_DBL_TYPE(s2)) {
502 s1 = r->interfaces[len][s2].regoff;
503 if (!(r->interfaces[len][s2].flags & INMEMORY)) {
507 x86_64_movq_membase_reg(cd, REG_SP, s1 * 8, d);
509 store_reg_to_var_flt(src, d);
512 s1 = r->interfaces[len][s2].regoff;
513 if (!(r->interfaces[len][s2].flags & INMEMORY)) {
517 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, d);
519 store_reg_to_var_int(src, d);
526 /* walk through all instructions */
530 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
532 MCODECHECK(64); /* an instruction usually needs < 64 words */
535 case ICMD_NOP: /* ... ==> ... */
538 case ICMD_NULLCHECKPOP: /* ..., objectref ==> ... */
539 if (src->flags & INMEMORY) {
540 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
543 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
545 x86_64_jcc(cd, X86_64_CC_E, 0);
546 codegen_addxnullrefs(m, cd->mcodeptr);
549 /* constant operations ************************************************/
551 case ICMD_ICONST: /* ... ==> ..., constant */
552 /* op1 = 0, val.i = constant */
554 d = reg_of_var(m, iptr->dst, REG_ITMP1);
555 if (iptr->val.i == 0) {
556 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
558 x86_64_movl_imm_reg(cd, iptr->val.i, d);
560 store_reg_to_var_int(iptr->dst, d);
563 case ICMD_ACONST: /* ... ==> ..., constant */
564 /* op1 = 0, val.a = constant */
566 d = reg_of_var(m, iptr->dst, REG_ITMP1);
567 if (iptr->val.a == 0) {
568 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
570 x86_64_mov_imm_reg(cd, (s8) iptr->val.a, d);
572 store_reg_to_var_int(iptr->dst, d);
575 case ICMD_LCONST: /* ... ==> ..., constant */
576 /* op1 = 0, val.l = constant */
578 d = reg_of_var(m, iptr->dst, REG_ITMP1);
579 if (iptr->val.l == 0) {
580 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
582 x86_64_mov_imm_reg(cd, iptr->val.l, d);
584 store_reg_to_var_int(iptr->dst, d);
587 case ICMD_FCONST: /* ... ==> ..., constant */
588 /* op1 = 0, val.f = constant */
590 d = reg_of_var(m, iptr->dst, REG_FTMP1);
591 a = dseg_addfloat(m, iptr->val.f);
592 x86_64_movdl_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + ((d > 7) ? 9 : 8)) - (s8) cd->mcodebase) + a, d);
593 store_reg_to_var_flt(iptr->dst, d);
596 case ICMD_DCONST: /* ... ==> ..., constant */
597 /* op1 = 0, val.d = constant */
599 d = reg_of_var(m, iptr->dst, REG_FTMP1);
600 a = dseg_adddouble(m, iptr->val.d);
601 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, d);
602 store_reg_to_var_flt(iptr->dst, d);
606 /* load/store operations **********************************************/
608 case ICMD_ILOAD: /* ... ==> ..., content of local variable */
609 /* op1 = local variable */
611 d = reg_of_var(m, iptr->dst, REG_ITMP1);
612 if ((iptr->dst->varkind == LOCALVAR) &&
613 (iptr->dst->varnum == iptr->op1)) {
616 var = &(r->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
617 if (var->flags & INMEMORY) {
618 x86_64_movl_membase_reg(cd, REG_SP, var->regoff * 8, d);
619 store_reg_to_var_int(iptr->dst, d);
622 if (iptr->dst->flags & INMEMORY) {
623 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
626 M_INTMOVE(var->regoff, d);
631 case ICMD_LLOAD: /* ... ==> ..., content of local variable */
632 case ICMD_ALOAD: /* op1 = local variable */
634 d = reg_of_var(m, iptr->dst, REG_ITMP1);
635 if ((iptr->dst->varkind == LOCALVAR) &&
636 (iptr->dst->varnum == iptr->op1)) {
639 var = &(r->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
640 if (var->flags & INMEMORY) {
641 x86_64_mov_membase_reg(cd, REG_SP, var->regoff * 8, d);
642 store_reg_to_var_int(iptr->dst, d);
645 if (iptr->dst->flags & INMEMORY) {
646 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
649 M_INTMOVE(var->regoff, d);
654 case ICMD_FLOAD: /* ... ==> ..., content of local variable */
655 case ICMD_DLOAD: /* op1 = local variable */
657 d = reg_of_var(m, iptr->dst, REG_FTMP1);
658 if ((iptr->dst->varkind == LOCALVAR) &&
659 (iptr->dst->varnum == iptr->op1)) {
662 var = &(r->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
663 if (var->flags & INMEMORY) {
664 x86_64_movq_membase_reg(cd, REG_SP, var->regoff * 8, d);
665 store_reg_to_var_flt(iptr->dst, d);
668 if (iptr->dst->flags & INMEMORY) {
669 x86_64_movq_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
672 M_FLTMOVE(var->regoff, d);
677 case ICMD_ISTORE: /* ..., value ==> ... */
678 case ICMD_LSTORE: /* op1 = local variable */
681 if ((src->varkind == LOCALVAR) &&
682 (src->varnum == iptr->op1)) {
685 var = &(r->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
686 if (var->flags & INMEMORY) {
687 var_to_reg_int(s1, src, REG_ITMP1);
688 x86_64_mov_reg_membase(cd, s1, REG_SP, var->regoff * 8);
691 var_to_reg_int(s1, src, var->regoff);
692 M_INTMOVE(s1, var->regoff);
696 case ICMD_FSTORE: /* ..., value ==> ... */
697 case ICMD_DSTORE: /* op1 = local variable */
699 if ((src->varkind == LOCALVAR) &&
700 (src->varnum == iptr->op1)) {
703 var = &(r->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
704 if (var->flags & INMEMORY) {
705 var_to_reg_flt(s1, src, REG_FTMP1);
706 x86_64_movq_reg_membase(cd, s1, REG_SP, var->regoff * 8);
709 var_to_reg_flt(s1, src, var->regoff);
710 M_FLTMOVE(s1, var->regoff);
715 /* pop/dup/swap operations ********************************************/
717 /* attention: double and longs are only one entry in CACAO ICMDs */
719 case ICMD_POP: /* ..., value ==> ... */
720 case ICMD_POP2: /* ..., value, value ==> ... */
723 case ICMD_DUP: /* ..., a ==> ..., a, a */
724 M_COPY(src, iptr->dst);
727 case ICMD_DUP_X1: /* ..., a, b ==> ..., b, a, b */
729 M_COPY(src, iptr->dst);
730 M_COPY(src->prev, iptr->dst->prev);
731 M_COPY(iptr->dst, iptr->dst->prev->prev);
734 case ICMD_DUP_X2: /* ..., a, b, c ==> ..., c, a, b, c */
736 M_COPY(src, iptr->dst);
737 M_COPY(src->prev, iptr->dst->prev);
738 M_COPY(src->prev->prev, iptr->dst->prev->prev);
739 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
742 case ICMD_DUP2: /* ..., a, b ==> ..., a, b, a, b */
744 M_COPY(src, iptr->dst);
745 M_COPY(src->prev, iptr->dst->prev);
748 case ICMD_DUP2_X1: /* ..., a, b, c ==> ..., b, c, a, b, c */
750 M_COPY(src, iptr->dst);
751 M_COPY(src->prev, iptr->dst->prev);
752 M_COPY(src->prev->prev, iptr->dst->prev->prev);
753 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
754 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev);
757 case ICMD_DUP2_X2: /* ..., a, b, c, d ==> ..., c, d, a, b, c, d */
759 M_COPY(src, iptr->dst);
760 M_COPY(src->prev, iptr->dst->prev);
761 M_COPY(src->prev->prev, iptr->dst->prev->prev);
762 M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
763 M_COPY(iptr->dst, iptr->dst->prev->prev->prev->prev);
764 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev->prev);
767 case ICMD_SWAP: /* ..., a, b ==> ..., b, a */
769 M_COPY(src, iptr->dst->prev);
770 M_COPY(src->prev, iptr->dst);
774 /* integer operations *************************************************/
776 case ICMD_INEG: /* ..., value ==> ..., - value */
778 d = reg_of_var(m, iptr->dst, REG_NULL);
779 if (iptr->dst->flags & INMEMORY) {
780 if (src->flags & INMEMORY) {
781 if (src->regoff == iptr->dst->regoff) {
782 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
785 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
786 x86_64_negl_reg(cd, REG_ITMP1);
787 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
791 x86_64_movl_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
792 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
796 if (src->flags & INMEMORY) {
797 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
798 x86_64_negl_reg(cd, d);
801 M_INTMOVE(src->regoff, iptr->dst->regoff);
802 x86_64_negl_reg(cd, iptr->dst->regoff);
807 case ICMD_LNEG: /* ..., value ==> ..., - value */
809 d = reg_of_var(m, iptr->dst, REG_NULL);
810 if (iptr->dst->flags & INMEMORY) {
811 if (src->flags & INMEMORY) {
812 if (src->regoff == iptr->dst->regoff) {
813 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
816 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
817 x86_64_neg_reg(cd, REG_ITMP1);
818 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
822 x86_64_mov_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
823 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
827 if (src->flags & INMEMORY) {
828 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
829 x86_64_neg_reg(cd, iptr->dst->regoff);
832 M_INTMOVE(src->regoff, iptr->dst->regoff);
833 x86_64_neg_reg(cd, iptr->dst->regoff);
838 case ICMD_I2L: /* ..., value ==> ..., value */
840 d = reg_of_var(m, iptr->dst, REG_ITMP3);
841 if (src->flags & INMEMORY) {
842 x86_64_movslq_membase_reg(cd, REG_SP, src->regoff * 8, d);
845 x86_64_movslq_reg_reg(cd, src->regoff, d);
847 store_reg_to_var_int(iptr->dst, d);
850 case ICMD_L2I: /* ..., value ==> ..., value */
852 var_to_reg_int(s1, src, REG_ITMP1);
853 d = reg_of_var(m, iptr->dst, REG_ITMP3);
855 store_reg_to_var_int(iptr->dst, d);
858 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
860 d = reg_of_var(m, iptr->dst, REG_ITMP3);
861 if (src->flags & INMEMORY) {
862 x86_64_movsbq_membase_reg(cd, REG_SP, src->regoff * 8, d);
865 x86_64_movsbq_reg_reg(cd, src->regoff, d);
867 store_reg_to_var_int(iptr->dst, d);
870 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
872 d = reg_of_var(m, iptr->dst, REG_ITMP3);
873 if (src->flags & INMEMORY) {
874 x86_64_movzwq_membase_reg(cd, REG_SP, src->regoff * 8, d);
877 x86_64_movzwq_reg_reg(cd, src->regoff, d);
879 store_reg_to_var_int(iptr->dst, d);
882 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
884 d = reg_of_var(m, iptr->dst, REG_ITMP3);
885 if (src->flags & INMEMORY) {
886 x86_64_movswq_membase_reg(cd, REG_SP, src->regoff * 8, d);
889 x86_64_movswq_reg_reg(cd, src->regoff, d);
891 store_reg_to_var_int(iptr->dst, d);
895 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
897 d = reg_of_var(m, iptr->dst, REG_NULL);
898 x86_64_emit_ialu(m, X86_64_ADD, src, iptr);
901 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
902 /* val.i = constant */
904 d = reg_of_var(m, iptr->dst, REG_NULL);
905 x86_64_emit_ialuconst(m, X86_64_ADD, src, iptr);
908 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
910 d = reg_of_var(m, iptr->dst, REG_NULL);
911 x86_64_emit_lalu(m, X86_64_ADD, src, iptr);
914 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
915 /* val.l = constant */
917 d = reg_of_var(m, iptr->dst, REG_NULL);
918 x86_64_emit_laluconst(m, X86_64_ADD, src, iptr);
921 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
923 d = reg_of_var(m, iptr->dst, REG_NULL);
924 if (iptr->dst->flags & INMEMORY) {
925 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
926 if (src->prev->regoff == iptr->dst->regoff) {
927 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
928 x86_64_alul_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
931 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
932 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
933 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
936 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
937 M_INTMOVE(src->prev->regoff, REG_ITMP1);
938 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
939 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
941 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
942 if (src->prev->regoff == iptr->dst->regoff) {
943 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
946 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
947 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
948 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
952 x86_64_movl_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
953 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
957 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
958 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
959 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
961 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
962 M_INTMOVE(src->prev->regoff, d);
963 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
965 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
966 /* workaround for reg alloc */
967 if (src->regoff == iptr->dst->regoff) {
968 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
969 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
970 M_INTMOVE(REG_ITMP1, d);
973 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
974 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
978 /* workaround for reg alloc */
979 if (src->regoff == iptr->dst->regoff) {
980 M_INTMOVE(src->prev->regoff, REG_ITMP1);
981 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
982 M_INTMOVE(REG_ITMP1, d);
985 M_INTMOVE(src->prev->regoff, d);
986 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
992 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
993 /* val.i = constant */
995 d = reg_of_var(m, iptr->dst, REG_NULL);
996 x86_64_emit_ialuconst(m, X86_64_SUB, src, iptr);
999 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1001 d = reg_of_var(m, iptr->dst, REG_NULL);
1002 if (iptr->dst->flags & INMEMORY) {
1003 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1004 if (src->prev->regoff == iptr->dst->regoff) {
1005 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1006 x86_64_alu_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1009 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1010 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1011 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1014 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1015 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1016 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1017 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1019 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1020 if (src->prev->regoff == iptr->dst->regoff) {
1021 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1024 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1025 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1026 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1030 x86_64_mov_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1031 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1035 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1036 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1037 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1039 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1040 M_INTMOVE(src->prev->regoff, d);
1041 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1043 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1044 /* workaround for reg alloc */
1045 if (src->regoff == iptr->dst->regoff) {
1046 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1047 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1048 M_INTMOVE(REG_ITMP1, d);
1051 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1052 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1056 /* workaround for reg alloc */
1057 if (src->regoff == iptr->dst->regoff) {
1058 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1059 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1060 M_INTMOVE(REG_ITMP1, d);
1063 M_INTMOVE(src->prev->regoff, d);
1064 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1070 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
1071 /* val.l = constant */
1073 d = reg_of_var(m, iptr->dst, REG_NULL);
1074 x86_64_emit_laluconst(m, X86_64_SUB, src, iptr);
1077 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1079 d = reg_of_var(m, iptr->dst, REG_NULL);
1080 if (iptr->dst->flags & INMEMORY) {
1081 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1082 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1083 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1084 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1086 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1087 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1088 x86_64_imull_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1089 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1091 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1092 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1093 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1094 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1097 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1098 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1099 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1103 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1104 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1105 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1107 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1108 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1109 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1111 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1112 M_INTMOVE(src->regoff, iptr->dst->regoff);
1113 x86_64_imull_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1116 if (src->regoff == iptr->dst->regoff) {
1117 x86_64_imull_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1120 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1121 x86_64_imull_reg_reg(cd, src->regoff, iptr->dst->regoff);
1127 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
1128 /* val.i = constant */
1130 d = reg_of_var(m, iptr->dst, REG_NULL);
1131 if (iptr->dst->flags & INMEMORY) {
1132 if (src->flags & INMEMORY) {
1133 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1134 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1137 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, REG_ITMP1);
1138 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1142 if (src->flags & INMEMORY) {
1143 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1146 if (iptr->val.i == 2) {
1147 M_INTMOVE(src->regoff, iptr->dst->regoff);
1148 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1151 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, iptr->dst->regoff); /* 3 cycles */
1157 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1159 d = reg_of_var(m, iptr->dst, REG_NULL);
1160 if (iptr->dst->flags & INMEMORY) {
1161 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1162 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1163 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1164 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1166 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1167 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1168 x86_64_imul_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1169 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1171 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1172 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1173 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1174 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1177 x86_64_mov_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1178 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1179 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1183 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1184 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1185 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1187 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1188 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1189 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1191 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1192 M_INTMOVE(src->regoff, iptr->dst->regoff);
1193 x86_64_imul_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1196 if (src->regoff == iptr->dst->regoff) {
1197 x86_64_imul_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1200 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1201 x86_64_imul_reg_reg(cd, src->regoff, iptr->dst->regoff);
1207 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
1208 /* val.l = constant */
1210 d = reg_of_var(m, iptr->dst, REG_NULL);
1211 if (iptr->dst->flags & INMEMORY) {
1212 if (src->flags & INMEMORY) {
1213 if (x86_64_is_imm32(iptr->val.l)) {
1214 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1217 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1218 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1220 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1223 if (x86_64_is_imm32(iptr->val.l)) {
1224 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, REG_ITMP1);
1227 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1228 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1230 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1234 if (src->flags & INMEMORY) {
1235 if (x86_64_is_imm32(iptr->val.l)) {
1236 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1239 x86_64_mov_imm_reg(cd, iptr->val.l, iptr->dst->regoff);
1240 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1244 /* should match in many cases */
1245 if (iptr->val.l == 2) {
1246 M_INTMOVE(src->regoff, iptr->dst->regoff);
1247 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1250 if (x86_64_is_imm32(iptr->val.l)) {
1251 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, iptr->dst->regoff); /* 4 cycles */
1254 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1255 M_INTMOVE(src->regoff, iptr->dst->regoff);
1256 x86_64_imul_reg_reg(cd, REG_ITMP1, iptr->dst->regoff);
1263 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1265 d = reg_of_var(m, iptr->dst, REG_NULL);
1266 if (src->prev->flags & INMEMORY) {
1267 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1270 M_INTMOVE(src->prev->regoff, RAX);
1273 if (src->flags & INMEMORY) {
1274 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1277 M_INTMOVE(src->regoff, REG_ITMP3);
1281 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1282 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1283 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1284 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1286 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1288 x86_64_idivl_reg(cd, REG_ITMP3);
1290 if (iptr->dst->flags & INMEMORY) {
1291 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1292 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1295 M_INTMOVE(RAX, iptr->dst->regoff);
1297 if (iptr->dst->regoff != RDX) {
1298 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1303 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1305 d = reg_of_var(m, iptr->dst, REG_NULL);
1306 if (src->prev->flags & INMEMORY) {
1307 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1310 M_INTMOVE(src->prev->regoff, RAX);
1313 if (src->flags & INMEMORY) {
1314 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1317 M_INTMOVE(src->regoff, REG_ITMP3);
1321 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1322 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1323 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1324 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1325 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1327 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1329 x86_64_idivl_reg(cd, REG_ITMP3);
1331 if (iptr->dst->flags & INMEMORY) {
1332 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1333 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1336 M_INTMOVE(RDX, iptr->dst->regoff);
1338 if (iptr->dst->regoff != RDX) {
1339 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1344 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
1345 /* val.i = constant */
1347 var_to_reg_int(s1, src, REG_ITMP1);
1348 d = reg_of_var(m, iptr->dst, REG_ITMP3);
1349 M_INTMOVE(s1, REG_ITMP1);
1350 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1351 x86_64_leal_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1352 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1353 x86_64_shiftl_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1354 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1355 store_reg_to_var_int(iptr->dst, d);
1358 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
1359 /* val.i = constant */
1361 var_to_reg_int(s1, src, REG_ITMP1);
1362 d = reg_of_var(m, iptr->dst, REG_ITMP3);
1363 M_INTMOVE(s1, REG_ITMP1);
1364 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1365 x86_64_leal_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1366 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1367 x86_64_alul_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1368 x86_64_alul_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1369 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1370 store_reg_to_var_int(iptr->dst, d);
1374 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1376 d = reg_of_var(m, iptr->dst, REG_NULL);
1377 if (src->prev->flags & INMEMORY) {
1378 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1381 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1384 if (src->flags & INMEMORY) {
1385 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1388 M_INTMOVE(src->regoff, REG_ITMP3);
1392 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1393 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1394 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1395 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1396 x86_64_jcc(cd, X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1398 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1400 x86_64_idiv_reg(cd, REG_ITMP3);
1402 if (iptr->dst->flags & INMEMORY) {
1403 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1404 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1407 M_INTMOVE(RAX, iptr->dst->regoff);
1409 if (iptr->dst->regoff != RDX) {
1410 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1415 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1417 d = reg_of_var(m, iptr->dst, REG_NULL);
1418 if (src->prev->flags & INMEMORY) {
1419 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1422 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1425 if (src->flags & INMEMORY) {
1426 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1429 M_INTMOVE(src->regoff, REG_ITMP3);
1433 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1434 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1435 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1436 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1437 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1438 x86_64_jcc(cd, X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1440 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1442 x86_64_idiv_reg(cd, REG_ITMP3);
1444 if (iptr->dst->flags & INMEMORY) {
1445 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1446 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1449 M_INTMOVE(RDX, iptr->dst->regoff);
1451 if (iptr->dst->regoff != RDX) {
1452 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1457 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1458 /* val.i = constant */
1460 var_to_reg_int(s1, src, REG_ITMP1);
1461 d = reg_of_var(m, iptr->dst, REG_ITMP3);
1462 M_INTMOVE(s1, REG_ITMP1);
1463 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1464 x86_64_lea_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1465 x86_64_cmovcc_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1466 x86_64_shift_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1467 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1468 store_reg_to_var_int(iptr->dst, d);
1471 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1472 /* val.l = constant */
1474 var_to_reg_int(s1, src, REG_ITMP1);
1475 d = reg_of_var(m, iptr->dst, REG_ITMP3);
1476 M_INTMOVE(s1, REG_ITMP1);
1477 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1478 x86_64_lea_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1479 x86_64_cmovcc_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1480 x86_64_alu_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1481 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1482 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1483 store_reg_to_var_int(iptr->dst, d);
1486 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1488 d = reg_of_var(m, iptr->dst, REG_NULL);
1489 x86_64_emit_ishift(m, X86_64_SHL, src, iptr);
1492 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1493 /* val.i = constant */
1495 d = reg_of_var(m, iptr->dst, REG_NULL);
1496 x86_64_emit_ishiftconst(m, X86_64_SHL, src, iptr);
1499 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1501 d = reg_of_var(m, iptr->dst, REG_NULL);
1502 x86_64_emit_ishift(m, X86_64_SAR, src, iptr);
1505 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1506 /* val.i = constant */
1508 d = reg_of_var(m, iptr->dst, REG_NULL);
1509 x86_64_emit_ishiftconst(m, X86_64_SAR, src, iptr);
1512 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1514 d = reg_of_var(m, iptr->dst, REG_NULL);
1515 x86_64_emit_ishift(m, X86_64_SHR, src, iptr);
1518 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1519 /* val.i = constant */
1521 d = reg_of_var(m, iptr->dst, REG_NULL);
1522 x86_64_emit_ishiftconst(m, X86_64_SHR, src, iptr);
1525 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1527 d = reg_of_var(m, iptr->dst, REG_NULL);
1528 x86_64_emit_lshift(m, X86_64_SHL, src, iptr);
1531 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1532 /* val.i = constant */
1534 d = reg_of_var(m, iptr->dst, REG_NULL);
1535 x86_64_emit_lshiftconst(m, X86_64_SHL, src, iptr);
1538 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1540 d = reg_of_var(m, iptr->dst, REG_NULL);
1541 x86_64_emit_lshift(m, X86_64_SAR, src, iptr);
1544 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1545 /* val.i = constant */
1547 d = reg_of_var(m, iptr->dst, REG_NULL);
1548 x86_64_emit_lshiftconst(m, X86_64_SAR, src, iptr);
1551 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1553 d = reg_of_var(m, iptr->dst, REG_NULL);
1554 x86_64_emit_lshift(m, X86_64_SHR, src, iptr);
1557 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1558 /* val.l = constant */
1560 d = reg_of_var(m, iptr->dst, REG_NULL);
1561 x86_64_emit_lshiftconst(m, X86_64_SHR, src, iptr);
1564 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1566 d = reg_of_var(m, iptr->dst, REG_NULL);
1567 x86_64_emit_ialu(m, X86_64_AND, src, iptr);
1570 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1571 /* val.i = constant */
1573 d = reg_of_var(m, iptr->dst, REG_NULL);
1574 x86_64_emit_ialuconst(m, X86_64_AND, src, iptr);
1577 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1579 d = reg_of_var(m, iptr->dst, REG_NULL);
1580 x86_64_emit_lalu(m, X86_64_AND, src, iptr);
1583 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1584 /* val.l = constant */
1586 d = reg_of_var(m, iptr->dst, REG_NULL);
1587 x86_64_emit_laluconst(m, X86_64_AND, src, iptr);
1590 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1592 d = reg_of_var(m, iptr->dst, REG_NULL);
1593 x86_64_emit_ialu(m, X86_64_OR, src, iptr);
1596 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1597 /* val.i = constant */
1599 d = reg_of_var(m, iptr->dst, REG_NULL);
1600 x86_64_emit_ialuconst(m, X86_64_OR, src, iptr);
1603 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1605 d = reg_of_var(m, iptr->dst, REG_NULL);
1606 x86_64_emit_lalu(m, X86_64_OR, src, iptr);
1609 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1610 /* val.l = constant */
1612 d = reg_of_var(m, iptr->dst, REG_NULL);
1613 x86_64_emit_laluconst(m, X86_64_OR, src, iptr);
1616 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1618 d = reg_of_var(m, iptr->dst, REG_NULL);
1619 x86_64_emit_ialu(m, X86_64_XOR, src, iptr);
1622 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1623 /* val.i = constant */
1625 d = reg_of_var(m, iptr->dst, REG_NULL);
1626 x86_64_emit_ialuconst(m, X86_64_XOR, src, iptr);
1629 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1631 d = reg_of_var(m, iptr->dst, REG_NULL);
1632 x86_64_emit_lalu(m, X86_64_XOR, src, iptr);
1635 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1636 /* val.l = constant */
1638 d = reg_of_var(m, iptr->dst, REG_NULL);
1639 x86_64_emit_laluconst(m, X86_64_XOR, src, iptr);
1643 case ICMD_IINC: /* ..., value ==> ..., value + constant */
1644 /* op1 = variable, val.i = constant */
1646 var = &(r->locals[iptr->op1][TYPE_INT]);
1648 if (var->flags & INMEMORY) {
1649 if (iptr->val.i == 1) {
1650 x86_64_incl_membase(cd, REG_SP, d * 8);
1652 } else if (iptr->val.i == -1) {
1653 x86_64_decl_membase(cd, REG_SP, d * 8);
1656 x86_64_alul_imm_membase(cd, X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1660 if (iptr->val.i == 1) {
1661 x86_64_incl_reg(cd, d);
1663 } else if (iptr->val.i == -1) {
1664 x86_64_decl_reg(cd, d);
1667 x86_64_alul_imm_reg(cd, X86_64_ADD, iptr->val.i, d);
1673 /* floating operations ************************************************/
1675 case ICMD_FNEG: /* ..., value ==> ..., - value */
1677 var_to_reg_flt(s1, src, REG_FTMP1);
1678 d = reg_of_var(m, iptr->dst, REG_FTMP3);
1679 a = dseg_adds4(m, 0x80000000);
1681 x86_64_movss_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1682 x86_64_xorps_reg_reg(cd, REG_FTMP2, d);
1683 store_reg_to_var_flt(iptr->dst, d);
1686 case ICMD_DNEG: /* ..., value ==> ..., - value */
1688 var_to_reg_flt(s1, src, REG_FTMP1);
1689 d = reg_of_var(m, iptr->dst, REG_FTMP3);
1690 a = dseg_adds8(m, 0x8000000000000000);
1692 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1693 x86_64_xorpd_reg_reg(cd, REG_FTMP2, d);
1694 store_reg_to_var_flt(iptr->dst, d);
1697 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1699 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1700 var_to_reg_flt(s2, src, REG_FTMP2);
1701 d = reg_of_var(m, iptr->dst, REG_FTMP3);
1703 x86_64_addss_reg_reg(cd, s2, d);
1704 } else if (s2 == d) {
1705 x86_64_addss_reg_reg(cd, s1, d);
1708 x86_64_addss_reg_reg(cd, s2, d);
1710 store_reg_to_var_flt(iptr->dst, d);
1713 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1715 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1716 var_to_reg_flt(s2, src, REG_FTMP2);
1717 d = reg_of_var(m, iptr->dst, REG_FTMP3);
1719 x86_64_addsd_reg_reg(cd, s2, d);
1720 } else if (s2 == d) {
1721 x86_64_addsd_reg_reg(cd, s1, d);
1724 x86_64_addsd_reg_reg(cd, s2, d);
1726 store_reg_to_var_flt(iptr->dst, d);
1729 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1731 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1732 var_to_reg_flt(s2, src, REG_FTMP2);
1733 d = reg_of_var(m, iptr->dst, REG_FTMP3);
1735 M_FLTMOVE(s2, REG_FTMP2);
1739 x86_64_subss_reg_reg(cd, s2, d);
1740 store_reg_to_var_flt(iptr->dst, d);
1743 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1745 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1746 var_to_reg_flt(s2, src, REG_FTMP2);
1747 d = reg_of_var(m, iptr->dst, REG_FTMP3);
1749 M_FLTMOVE(s2, REG_FTMP2);
1753 x86_64_subsd_reg_reg(cd, s2, d);
1754 store_reg_to_var_flt(iptr->dst, d);
1757 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1759 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1760 var_to_reg_flt(s2, src, REG_FTMP2);
1761 d = reg_of_var(m, iptr->dst, REG_FTMP3);
1763 x86_64_mulss_reg_reg(cd, s2, d);
1764 } else if (s2 == d) {
1765 x86_64_mulss_reg_reg(cd, s1, d);
1768 x86_64_mulss_reg_reg(cd, s2, d);
1770 store_reg_to_var_flt(iptr->dst, d);
1773 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1775 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1776 var_to_reg_flt(s2, src, REG_FTMP2);
1777 d = reg_of_var(m, iptr->dst, REG_FTMP3);
1779 x86_64_mulsd_reg_reg(cd, s2, d);
1780 } else if (s2 == d) {
1781 x86_64_mulsd_reg_reg(cd, s1, d);
1784 x86_64_mulsd_reg_reg(cd, s2, d);
1786 store_reg_to_var_flt(iptr->dst, d);
1789 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1791 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1792 var_to_reg_flt(s2, src, REG_FTMP2);
1793 d = reg_of_var(m, iptr->dst, REG_FTMP3);
1795 M_FLTMOVE(s2, REG_FTMP2);
1799 x86_64_divss_reg_reg(cd, s2, d);
1800 store_reg_to_var_flt(iptr->dst, d);
1803 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1805 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1806 var_to_reg_flt(s2, src, REG_FTMP2);
1807 d = reg_of_var(m, iptr->dst, REG_FTMP3);
1809 M_FLTMOVE(s2, REG_FTMP2);
1813 x86_64_divsd_reg_reg(cd, s2, d);
1814 store_reg_to_var_flt(iptr->dst, d);
1817 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1819 var_to_reg_int(s1, src, REG_ITMP1);
1820 d = reg_of_var(m, iptr->dst, REG_FTMP1);
1821 x86_64_cvtsi2ss_reg_reg(cd, s1, d);
1822 store_reg_to_var_flt(iptr->dst, d);
1825 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1827 var_to_reg_int(s1, src, REG_ITMP1);
1828 d = reg_of_var(m, iptr->dst, REG_FTMP1);
1829 x86_64_cvtsi2sd_reg_reg(cd, s1, d);
1830 store_reg_to_var_flt(iptr->dst, d);
1833 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1835 var_to_reg_int(s1, src, REG_ITMP1);
1836 d = reg_of_var(m, iptr->dst, REG_FTMP1);
1837 x86_64_cvtsi2ssq_reg_reg(cd, s1, d);
1838 store_reg_to_var_flt(iptr->dst, d);
1841 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1843 var_to_reg_int(s1, src, REG_ITMP1);
1844 d = reg_of_var(m, iptr->dst, REG_FTMP1);
1845 x86_64_cvtsi2sdq_reg_reg(cd, s1, d);
1846 store_reg_to_var_flt(iptr->dst, d);
1849 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1851 var_to_reg_flt(s1, src, REG_FTMP1);
1852 d = reg_of_var(m, iptr->dst, REG_ITMP1);
1853 x86_64_cvttss2si_reg_reg(cd, s1, d);
1854 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1855 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1856 x86_64_jcc(cd, X86_64_CC_NE, a);
1857 M_FLTMOVE(s1, REG_FTMP1);
1858 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2i, REG_ITMP2);
1859 x86_64_call_reg(cd, REG_ITMP2);
1860 M_INTMOVE(REG_RESULT, d);
1861 store_reg_to_var_int(iptr->dst, d);
1864 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1866 var_to_reg_flt(s1, src, REG_FTMP1);
1867 d = reg_of_var(m, iptr->dst, REG_ITMP1);
1868 x86_64_cvttsd2si_reg_reg(cd, s1, d);
1869 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1870 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1871 x86_64_jcc(cd, X86_64_CC_NE, a);
1872 M_FLTMOVE(s1, REG_FTMP1);
1873 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2i, REG_ITMP2);
1874 x86_64_call_reg(cd, REG_ITMP2);
1875 M_INTMOVE(REG_RESULT, d);
1876 store_reg_to_var_int(iptr->dst, d);
1879 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1881 var_to_reg_flt(s1, src, REG_FTMP1);
1882 d = reg_of_var(m, iptr->dst, REG_ITMP1);
1883 x86_64_cvttss2siq_reg_reg(cd, s1, d);
1884 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1885 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1886 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1887 x86_64_jcc(cd, X86_64_CC_NE, a);
1888 M_FLTMOVE(s1, REG_FTMP1);
1889 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2l, REG_ITMP2);
1890 x86_64_call_reg(cd, REG_ITMP2);
1891 M_INTMOVE(REG_RESULT, d);
1892 store_reg_to_var_int(iptr->dst, d);
1895 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1897 var_to_reg_flt(s1, src, REG_FTMP1);
1898 d = reg_of_var(m, iptr->dst, REG_ITMP1);
1899 x86_64_cvttsd2siq_reg_reg(cd, s1, d);
1900 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1901 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1902 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1903 x86_64_jcc(cd, X86_64_CC_NE, a);
1904 M_FLTMOVE(s1, REG_FTMP1);
1905 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2l, REG_ITMP2);
1906 x86_64_call_reg(cd, REG_ITMP2);
1907 M_INTMOVE(REG_RESULT, d);
1908 store_reg_to_var_int(iptr->dst, d);
1911 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1913 var_to_reg_flt(s1, src, REG_FTMP1);
1914 d = reg_of_var(m, iptr->dst, REG_FTMP3);
1915 x86_64_cvtss2sd_reg_reg(cd, s1, d);
1916 store_reg_to_var_flt(iptr->dst, d);
1919 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1921 var_to_reg_flt(s1, src, REG_FTMP1);
1922 d = reg_of_var(m, iptr->dst, REG_FTMP3);
1923 x86_64_cvtsd2ss_reg_reg(cd, s1, d);
1924 store_reg_to_var_flt(iptr->dst, d);
1927 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1928 /* == => 0, < => 1, > => -1 */
1930 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1931 var_to_reg_flt(s2, src, REG_FTMP2);
1932 d = reg_of_var(m, iptr->dst, REG_ITMP3);
1933 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1934 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1935 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1936 x86_64_ucomiss_reg_reg(cd, s1, s2);
1937 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1938 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1939 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1940 store_reg_to_var_int(iptr->dst, d);
1943 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1944 /* == => 0, < => 1, > => -1 */
1946 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1947 var_to_reg_flt(s2, src, REG_FTMP2);
1948 d = reg_of_var(m, iptr->dst, REG_ITMP3);
1949 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1950 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1951 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1952 x86_64_ucomiss_reg_reg(cd, s1, s2);
1953 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1954 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1955 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1956 store_reg_to_var_int(iptr->dst, d);
1959 case ICMD_DCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1960 /* == => 0, < => 1, > => -1 */
1962 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1963 var_to_reg_flt(s2, src, REG_FTMP2);
1964 d = reg_of_var(m, iptr->dst, REG_ITMP3);
1965 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1966 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1967 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1968 x86_64_ucomisd_reg_reg(cd, s1, s2);
1969 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1970 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1971 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1972 store_reg_to_var_int(iptr->dst, d);
1975 case ICMD_DCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1976 /* == => 0, < => 1, > => -1 */
1978 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1979 var_to_reg_flt(s2, src, REG_FTMP2);
1980 d = reg_of_var(m, iptr->dst, REG_ITMP3);
1981 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1982 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1983 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1984 x86_64_ucomisd_reg_reg(cd, s1, s2);
1985 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1986 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1987 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1988 store_reg_to_var_int(iptr->dst, d);
1992 /* memory operations **************************************************/
1994 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., (int) length */
1996 var_to_reg_int(s1, src, REG_ITMP1);
1997 d = reg_of_var(m, iptr->dst, REG_ITMP3);
1998 gen_nullptr_check(s1);
1999 x86_64_movl_membase_reg(cd, s1, OFFSET(java_arrayheader, size), d);
2000 store_reg_to_var_int(iptr->dst, d);
2003 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2005 var_to_reg_int(s1, src->prev, REG_ITMP1);
2006 var_to_reg_int(s2, src, REG_ITMP2);
2007 d = reg_of_var(m, iptr->dst, REG_ITMP3);
2008 if (iptr->op1 == 0) {
2009 gen_nullptr_check(s1);
2012 x86_64_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
2013 store_reg_to_var_int(iptr->dst, d);
2016 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
2018 var_to_reg_int(s1, src->prev, REG_ITMP1);
2019 var_to_reg_int(s2, src, REG_ITMP2);
2020 d = reg_of_var(m, iptr->dst, REG_ITMP3);
2021 if (iptr->op1 == 0) {
2022 gen_nullptr_check(s1);
2025 x86_64_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]), s1, s2, 3, d);
2026 store_reg_to_var_int(iptr->dst, d);
2029 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
2031 var_to_reg_int(s1, src->prev, REG_ITMP1);
2032 var_to_reg_int(s2, src, REG_ITMP2);
2033 d = reg_of_var(m, iptr->dst, REG_ITMP3);
2034 if (iptr->op1 == 0) {
2035 gen_nullptr_check(s1);
2038 x86_64_movl_memindex_reg(cd, OFFSET(java_intarray, data[0]), s1, s2, 2, d);
2039 store_reg_to_var_int(iptr->dst, d);
2042 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2044 var_to_reg_int(s1, src->prev, REG_ITMP1);
2045 var_to_reg_int(s2, src, REG_ITMP2);
2046 d = reg_of_var(m, iptr->dst, REG_FTMP3);
2047 if (iptr->op1 == 0) {
2048 gen_nullptr_check(s1);
2051 x86_64_movss_memindex_reg(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2052 store_reg_to_var_flt(iptr->dst, d);
2055 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2057 var_to_reg_int(s1, src->prev, REG_ITMP1);
2058 var_to_reg_int(s2, src, REG_ITMP2);
2059 d = reg_of_var(m, iptr->dst, REG_FTMP3);
2060 if (iptr->op1 == 0) {
2061 gen_nullptr_check(s1);
2064 x86_64_movsd_memindex_reg(cd, OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2065 store_reg_to_var_flt(iptr->dst, d);
2068 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
2070 var_to_reg_int(s1, src->prev, REG_ITMP1);
2071 var_to_reg_int(s2, src, REG_ITMP2);
2072 d = reg_of_var(m, iptr->dst, REG_ITMP3);
2073 if (iptr->op1 == 0) {
2074 gen_nullptr_check(s1);
2077 x86_64_movzwq_memindex_reg(cd, OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2078 store_reg_to_var_int(iptr->dst, d);
2081 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
2083 var_to_reg_int(s1, src->prev, REG_ITMP1);
2084 var_to_reg_int(s2, src, REG_ITMP2);
2085 d = reg_of_var(m, iptr->dst, REG_ITMP3);
2086 if (iptr->op1 == 0) {
2087 gen_nullptr_check(s1);
2090 x86_64_movswq_memindex_reg(cd, OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2091 store_reg_to_var_int(iptr->dst, d);
2094 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
2096 var_to_reg_int(s1, src->prev, REG_ITMP1);
2097 var_to_reg_int(s2, src, REG_ITMP2);
2098 d = reg_of_var(m, iptr->dst, REG_ITMP3);
2099 if (iptr->op1 == 0) {
2100 gen_nullptr_check(s1);
2103 x86_64_movsbq_memindex_reg(cd, OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2104 store_reg_to_var_int(iptr->dst, d);
2108 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2110 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2111 var_to_reg_int(s2, src->prev, REG_ITMP2);
2112 if (iptr->op1 == 0) {
2113 gen_nullptr_check(s1);
2116 var_to_reg_int(s3, src, REG_ITMP3);
2117 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2120 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2122 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2123 var_to_reg_int(s2, src->prev, REG_ITMP2);
2124 if (iptr->op1 == 0) {
2125 gen_nullptr_check(s1);
2128 var_to_reg_int(s3, src, REG_ITMP3);
2129 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2132 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2134 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2135 var_to_reg_int(s2, src->prev, REG_ITMP2);
2136 if (iptr->op1 == 0) {
2137 gen_nullptr_check(s1);
2140 var_to_reg_int(s3, src, REG_ITMP3);
2141 x86_64_movl_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2144 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2146 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2147 var_to_reg_int(s2, src->prev, REG_ITMP2);
2148 if (iptr->op1 == 0) {
2149 gen_nullptr_check(s1);
2152 var_to_reg_flt(s3, src, REG_FTMP3);
2153 x86_64_movss_reg_memindex(cd, s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2156 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2158 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2159 var_to_reg_int(s2, src->prev, REG_ITMP2);
2160 if (iptr->op1 == 0) {
2161 gen_nullptr_check(s1);
2164 var_to_reg_flt(s3, src, REG_FTMP3);
2165 x86_64_movsd_reg_memindex(cd, s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2168 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2170 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2171 var_to_reg_int(s2, src->prev, REG_ITMP2);
2172 if (iptr->op1 == 0) {
2173 gen_nullptr_check(s1);
2176 var_to_reg_int(s3, src, REG_ITMP3);
2177 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2180 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2182 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2183 var_to_reg_int(s2, src->prev, REG_ITMP2);
2184 if (iptr->op1 == 0) {
2185 gen_nullptr_check(s1);
2188 var_to_reg_int(s3, src, REG_ITMP3);
2189 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2192 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2194 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2195 var_to_reg_int(s2, src->prev, REG_ITMP2);
2196 if (iptr->op1 == 0) {
2197 gen_nullptr_check(s1);
2200 var_to_reg_int(s3, src, REG_ITMP3);
2201 x86_64_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2205 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2206 /* op1 = type, val.a = field address */
2208 /* if class isn't yet initialized, do it */
2209 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2210 /* call helper function which patches this code */
2211 x86_64_mov_imm_reg(cd, (s8) ((fieldinfo *) iptr->val.a)->class, REG_ITMP1);
2212 x86_64_mov_imm_reg(cd, (s8) asm_check_clinit, REG_ITMP2);
2213 x86_64_call_reg(cd, REG_ITMP2);
2216 a = dseg_addaddress(m, &(((fieldinfo *) iptr->val.a)->value));
2217 /* x86_64_mov_imm_reg(cd, 0, REG_ITMP2); */
2218 /* dseg_adddata(m, cd->mcodeptr); */
2219 /* x86_64_mov_membase_reg(cd, REG_ITMP2, a, REG_ITMP2); */
2220 x86_64_mov_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 7) - (s8) cd->mcodebase) + a, REG_ITMP2);
2221 switch (iptr->op1) {
2223 var_to_reg_int(s2, src, REG_ITMP1);
2224 x86_64_movl_reg_membase(cd, s2, REG_ITMP2, 0);
2228 var_to_reg_int(s2, src, REG_ITMP1);
2229 x86_64_mov_reg_membase(cd, s2, REG_ITMP2, 0);
2232 var_to_reg_flt(s2, src, REG_FTMP1);
2233 x86_64_movss_reg_membase(cd, s2, REG_ITMP2, 0);
2236 var_to_reg_flt(s2, src, REG_FTMP1);
2237 x86_64_movsd_reg_membase(cd, s2, REG_ITMP2, 0);
2239 default: panic("internal error");
2243 case ICMD_GETSTATIC: /* ... ==> ..., value */
2244 /* op1 = type, val.a = field address */
2246 /* if class isn't yet initialized, do it */
2247 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2248 /* call helper function which patches this code */
2249 x86_64_mov_imm_reg(cd, (s8) ((fieldinfo *) iptr->val.a)->class, REG_ITMP1);
2250 x86_64_mov_imm_reg(cd, (s8) asm_check_clinit, REG_ITMP2);
2251 x86_64_call_reg(cd, REG_ITMP2);
2254 a = dseg_addaddress(m, &(((fieldinfo *) iptr->val.a)->value));
2255 /* x86_64_mov_imm_reg(cd, 0, REG_ITMP2); */
2256 /* dseg_adddata(m, cd->mcodeptr); */
2257 /* x86_64_mov_membase_reg(cd, REG_ITMP2, a, REG_ITMP2); */
2258 x86_64_mov_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 7) - (s8) cd->mcodebase) + a, REG_ITMP2);
2259 switch (iptr->op1) {
2261 d = reg_of_var(m, iptr->dst, REG_ITMP1);
2262 x86_64_movl_membase_reg(cd, REG_ITMP2, 0, d);
2263 store_reg_to_var_int(iptr->dst, d);
2267 d = reg_of_var(m, iptr->dst, REG_ITMP1);
2268 x86_64_mov_membase_reg(cd, REG_ITMP2, 0, d);
2269 store_reg_to_var_int(iptr->dst, d);
2272 d = reg_of_var(m, iptr->dst, REG_ITMP1);
2273 x86_64_movss_membase_reg(cd, REG_ITMP2, 0, d);
2274 store_reg_to_var_flt(iptr->dst, d);
2277 d = reg_of_var(m, iptr->dst, REG_ITMP1);
2278 x86_64_movsd_membase_reg(cd, REG_ITMP2, 0, d);
2279 store_reg_to_var_flt(iptr->dst, d);
2281 default: panic("internal error");
2285 case ICMD_PUTFIELD: /* ..., value ==> ... */
2286 /* op1 = type, val.i = field offset */
2288 /* if class isn't yet initialized, do it */
2289 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2290 /* call helper function which patches this code */
2291 x86_64_mov_imm_reg(cd, (s8) ((fieldinfo *) iptr->val.a)->class, REG_ITMP1);
2292 x86_64_mov_imm_reg(cd, (s8) asm_check_clinit, REG_ITMP2);
2293 x86_64_call_reg(cd, REG_ITMP2);
2296 a = ((fieldinfo *)(iptr->val.a))->offset;
2297 var_to_reg_int(s1, src->prev, REG_ITMP1);
2298 switch (iptr->op1) {
2300 var_to_reg_int(s2, src, REG_ITMP2);
2301 gen_nullptr_check(s1);
2302 x86_64_movl_reg_membase(cd, s2, s1, a);
2306 var_to_reg_int(s2, src, REG_ITMP2);
2307 gen_nullptr_check(s1);
2308 x86_64_mov_reg_membase(cd, s2, s1, a);
2311 var_to_reg_flt(s2, src, REG_FTMP2);
2312 gen_nullptr_check(s1);
2313 x86_64_movss_reg_membase(cd, s2, s1, a);
2316 var_to_reg_flt(s2, src, REG_FTMP2);
2317 gen_nullptr_check(s1);
2318 x86_64_movsd_reg_membase(cd, s2, s1, a);
2320 default: panic ("internal error");
2324 case ICMD_GETFIELD: /* ... ==> ..., value */
2325 /* op1 = type, val.i = field offset */
2327 a = ((fieldinfo *)(iptr->val.a))->offset;
2328 var_to_reg_int(s1, src, REG_ITMP1);
2329 switch (iptr->op1) {
2331 d = reg_of_var(m, iptr->dst, REG_ITMP1);
2332 gen_nullptr_check(s1);
2333 x86_64_movl_membase_reg(cd, s1, a, d);
2334 store_reg_to_var_int(iptr->dst, d);
2338 d = reg_of_var(m, iptr->dst, REG_ITMP1);
2339 gen_nullptr_check(s1);
2340 x86_64_mov_membase_reg(cd, s1, a, d);
2341 store_reg_to_var_int(iptr->dst, d);
2344 d = reg_of_var(m, iptr->dst, REG_FTMP1);
2345 gen_nullptr_check(s1);
2346 x86_64_movss_membase_reg(cd, s1, a, d);
2347 store_reg_to_var_flt(iptr->dst, d);
2350 d = reg_of_var(m, iptr->dst, REG_FTMP1);
2351 gen_nullptr_check(s1);
2352 x86_64_movsd_membase_reg(cd, s1, a, d);
2353 store_reg_to_var_flt(iptr->dst, d);
2355 default: panic ("internal error");
2360 /* branch operations **************************************************/
2362 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2364 var_to_reg_int(s1, src, REG_ITMP1);
2365 M_INTMOVE(s1, REG_ITMP1_XPTR);
2367 x86_64_call_imm(cd, 0); /* passing exception pointer */
2368 x86_64_pop_reg(cd, REG_ITMP2_XPC);
2370 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
2371 x86_64_jmp_reg(cd, REG_ITMP3);
2375 case ICMD_GOTO: /* ... ==> ... */
2376 /* op1 = target JavaVM pc */
2378 x86_64_jmp_imm(cd, 0);
2379 codegen_addreference(m, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2383 case ICMD_JSR: /* ... ==> ... */
2384 /* op1 = target JavaVM pc */
2386 x86_64_call_imm(cd, 0);
2387 codegen_addreference(m, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2390 case ICMD_RET: /* ... ==> ... */
2391 /* op1 = local variable */
2393 var = &(r->locals[iptr->op1][TYPE_ADR]);
2394 var_to_reg_int(s1, var, REG_ITMP1);
2395 x86_64_jmp_reg(cd, s1);
2398 case ICMD_IFNULL: /* ..., value ==> ... */
2399 /* op1 = target JavaVM pc */
2401 if (src->flags & INMEMORY) {
2402 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2405 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2407 x86_64_jcc(cd, X86_64_CC_E, 0);
2408 codegen_addreference(m, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2411 case ICMD_IFNONNULL: /* ..., value ==> ... */
2412 /* op1 = target JavaVM pc */
2414 if (src->flags & INMEMORY) {
2415 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2418 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2420 x86_64_jcc(cd, X86_64_CC_NE, 0);
2421 codegen_addreference(m, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2424 case ICMD_IFEQ: /* ..., value ==> ... */
2425 /* op1 = target JavaVM pc, val.i = constant */
2427 x86_64_emit_ifcc(m, X86_64_CC_E, src, iptr);
2430 case ICMD_IFLT: /* ..., value ==> ... */
2431 /* op1 = target JavaVM pc, val.i = constant */
2433 x86_64_emit_ifcc(m, X86_64_CC_L, src, iptr);
2436 case ICMD_IFLE: /* ..., value ==> ... */
2437 /* op1 = target JavaVM pc, val.i = constant */
2439 x86_64_emit_ifcc(m, X86_64_CC_LE, src, iptr);
2442 case ICMD_IFNE: /* ..., value ==> ... */
2443 /* op1 = target JavaVM pc, val.i = constant */
2445 x86_64_emit_ifcc(m, X86_64_CC_NE, src, iptr);
2448 case ICMD_IFGT: /* ..., value ==> ... */
2449 /* op1 = target JavaVM pc, val.i = constant */
2451 x86_64_emit_ifcc(m, X86_64_CC_G, src, iptr);
2454 case ICMD_IFGE: /* ..., value ==> ... */
2455 /* op1 = target JavaVM pc, val.i = constant */
2457 x86_64_emit_ifcc(m, X86_64_CC_GE, src, iptr);
2460 case ICMD_IF_LEQ: /* ..., value ==> ... */
2461 /* op1 = target JavaVM pc, val.l = constant */
2463 x86_64_emit_if_lcc(m, X86_64_CC_E, src, iptr);
2466 case ICMD_IF_LLT: /* ..., value ==> ... */
2467 /* op1 = target JavaVM pc, val.l = constant */
2469 x86_64_emit_if_lcc(m, X86_64_CC_L, src, iptr);
2472 case ICMD_IF_LLE: /* ..., value ==> ... */
2473 /* op1 = target JavaVM pc, val.l = constant */
2475 x86_64_emit_if_lcc(m, X86_64_CC_LE, src, iptr);
2478 case ICMD_IF_LNE: /* ..., value ==> ... */
2479 /* op1 = target JavaVM pc, val.l = constant */
2481 x86_64_emit_if_lcc(m, X86_64_CC_NE, src, iptr);
2484 case ICMD_IF_LGT: /* ..., value ==> ... */
2485 /* op1 = target JavaVM pc, val.l = constant */
2487 x86_64_emit_if_lcc(m, X86_64_CC_G, src, iptr);
2490 case ICMD_IF_LGE: /* ..., value ==> ... */
2491 /* op1 = target JavaVM pc, val.l = constant */
2493 x86_64_emit_if_lcc(m, X86_64_CC_GE, src, iptr);
2496 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2497 /* op1 = target JavaVM pc */
2499 x86_64_emit_if_icmpcc(m, X86_64_CC_E, src, iptr);
2502 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2503 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2505 x86_64_emit_if_lcmpcc(m, X86_64_CC_E, src, iptr);
2508 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2509 /* op1 = target JavaVM pc */
2511 x86_64_emit_if_icmpcc(m, X86_64_CC_NE, src, iptr);
2514 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2515 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2517 x86_64_emit_if_lcmpcc(m, X86_64_CC_NE, src, iptr);
2520 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2521 /* op1 = target JavaVM pc */
2523 x86_64_emit_if_icmpcc(m, X86_64_CC_L, src, iptr);
2526 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2527 /* op1 = target JavaVM pc */
2529 x86_64_emit_if_lcmpcc(m, X86_64_CC_L, src, iptr);
2532 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2533 /* op1 = target JavaVM pc */
2535 x86_64_emit_if_icmpcc(m, X86_64_CC_G, src, iptr);
2538 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2539 /* op1 = target JavaVM pc */
2541 x86_64_emit_if_lcmpcc(m, X86_64_CC_G, src, iptr);
2544 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2545 /* op1 = target JavaVM pc */
2547 x86_64_emit_if_icmpcc(m, X86_64_CC_LE, src, iptr);
2550 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2551 /* op1 = target JavaVM pc */
2553 x86_64_emit_if_lcmpcc(m, X86_64_CC_LE, src, iptr);
2556 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2557 /* op1 = target JavaVM pc */
2559 x86_64_emit_if_icmpcc(m, X86_64_CC_GE, src, iptr);
2562 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2563 /* op1 = target JavaVM pc */
2565 x86_64_emit_if_lcmpcc(m, X86_64_CC_GE, src, iptr);
2568 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST */
2570 case ICMD_ELSE_ICONST: /* handled by IFxx_ICONST */
2573 case ICMD_IFEQ_ICONST: /* ..., value ==> ..., constant */
2574 /* val.i = constant */
2576 var_to_reg_int(s1, src, REG_ITMP1);
2577 d = reg_of_var(m, iptr->dst, REG_ITMP3);
2579 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2581 M_INTMOVE(s1, REG_ITMP1);
2584 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2586 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2587 x86_64_testl_reg_reg(cd, s1, s1);
2588 x86_64_cmovccl_reg_reg(cd, X86_64_CC_E, REG_ITMP2, d);
2589 store_reg_to_var_int(iptr->dst, d);
2592 case ICMD_IFNE_ICONST: /* ..., value ==> ..., constant */
2593 /* val.i = constant */
2595 var_to_reg_int(s1, src, REG_ITMP1);
2596 d = reg_of_var(m, iptr->dst, REG_ITMP3);
2598 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2600 M_INTMOVE(s1, REG_ITMP1);
2603 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2605 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2606 x86_64_testl_reg_reg(cd, s1, s1);
2607 x86_64_cmovccl_reg_reg(cd, X86_64_CC_NE, REG_ITMP2, d);
2608 store_reg_to_var_int(iptr->dst, d);
2611 case ICMD_IFLT_ICONST: /* ..., value ==> ..., constant */
2612 /* val.i = constant */
2614 var_to_reg_int(s1, src, REG_ITMP1);
2615 d = reg_of_var(m, iptr->dst, REG_ITMP3);
2617 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2619 M_INTMOVE(s1, REG_ITMP1);
2622 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2624 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2625 x86_64_testl_reg_reg(cd, s1, s1);
2626 x86_64_cmovccl_reg_reg(cd, X86_64_CC_L, REG_ITMP2, d);
2627 store_reg_to_var_int(iptr->dst, d);
2630 case ICMD_IFGE_ICONST: /* ..., value ==> ..., constant */
2631 /* val.i = constant */
2633 var_to_reg_int(s1, src, REG_ITMP1);
2634 d = reg_of_var(m, iptr->dst, REG_ITMP3);
2636 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2638 M_INTMOVE(s1, REG_ITMP1);
2641 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2643 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2644 x86_64_testl_reg_reg(cd, s1, s1);
2645 x86_64_cmovccl_reg_reg(cd, X86_64_CC_GE, REG_ITMP2, d);
2646 store_reg_to_var_int(iptr->dst, d);
2649 case ICMD_IFGT_ICONST: /* ..., value ==> ..., constant */
2650 /* val.i = constant */
2652 var_to_reg_int(s1, src, REG_ITMP1);
2653 d = reg_of_var(m, iptr->dst, REG_ITMP3);
2655 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2657 M_INTMOVE(s1, REG_ITMP1);
2660 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2662 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2663 x86_64_testl_reg_reg(cd, s1, s1);
2664 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP2, d);
2665 store_reg_to_var_int(iptr->dst, d);
2668 case ICMD_IFLE_ICONST: /* ..., value ==> ..., constant */
2669 /* val.i = constant */
2671 var_to_reg_int(s1, src, REG_ITMP1);
2672 d = reg_of_var(m, iptr->dst, REG_ITMP3);
2674 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2676 M_INTMOVE(s1, REG_ITMP1);
2679 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2681 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2682 x86_64_testl_reg_reg(cd, s1, s1);
2683 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, d);
2684 store_reg_to_var_int(iptr->dst, d);
2688 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2692 var_to_reg_int(s1, src, REG_RESULT);
2693 M_INTMOVE(s1, REG_RESULT);
2695 #if defined(USE_THREADS)
2696 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2697 x86_64_mov_membase_reg(cd, REG_SP, r->maxmemuse * 8, r->argintregs[0]);
2698 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, r->maxmemuse * 8);
2699 x86_64_mov_imm_reg(cd, (u8) builtin_monitorexit, REG_ITMP1);
2700 x86_64_call_reg(cd, REG_ITMP1);
2701 x86_64_mov_membase_reg(cd, REG_SP, r->maxmemuse * 8, REG_RESULT);
2705 goto nowperformreturn;
2707 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2710 var_to_reg_flt(s1, src, REG_FRESULT);
2711 M_FLTMOVE(s1, REG_FRESULT);
2713 #if defined(USE_THREADS)
2714 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2715 x86_64_mov_membase_reg(cd, REG_SP, r->maxmemuse * 8, r->argintregs[0]);
2716 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, r->maxmemuse * 8);
2717 x86_64_mov_imm_reg(cd, (u8) builtin_monitorexit, REG_ITMP1);
2718 x86_64_call_reg(cd, REG_ITMP1);
2719 x86_64_movq_membase_reg(cd, REG_SP, r->maxmemuse * 8, REG_FRESULT);
2723 goto nowperformreturn;
2725 case ICMD_RETURN: /* ... ==> ... */
2727 #if defined(USE_THREADS)
2728 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2729 x86_64_mov_membase_reg(cd, REG_SP, r->maxmemuse * 8, r->argintregs[0]);
2730 x86_64_mov_imm_reg(cd, (u8) builtin_monitorexit, REG_ITMP1);
2731 x86_64_call_reg(cd, REG_ITMP1);
2739 p = parentargs_base;
2741 /* call trace function */
2743 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
2745 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
2746 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
2748 x86_64_mov_imm_reg(cd, (s8) m, r->argintregs[0]);
2749 x86_64_mov_reg_reg(cd, REG_RESULT, r->argintregs[1]);
2750 M_FLTMOVE(REG_FRESULT, r->argfltregs[0]);
2751 M_FLTMOVE(REG_FRESULT, r->argfltregs[1]);
2753 x86_64_mov_imm_reg(cd, (s8) builtin_displaymethodstop, REG_ITMP1);
2754 x86_64_call_reg(cd, REG_ITMP1);
2756 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
2757 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
2759 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
2762 /* restore saved registers */
2763 for (i = r->savintregcnt - 1; i >= r->maxsavintreguse; i--) {
2764 p--; x86_64_mov_membase_reg(cd, REG_SP, p * 8, r->savintregs[i]);
2766 for (i = r->savfltregcnt - 1; i >= r->maxsavfltreguse; i--) {
2767 p--; x86_64_movq_membase_reg(cd, REG_SP, p * 8, r->savfltregs[i]);
2770 /* deallocate stack */
2771 if (parentargs_base) {
2772 x86_64_alu_imm_reg(cd, X86_64_ADD, parentargs_base * 8, REG_SP);
2781 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2786 tptr = (void **) iptr->target;
2788 s4ptr = iptr->val.a;
2789 l = s4ptr[1]; /* low */
2790 i = s4ptr[2]; /* high */
2792 var_to_reg_int(s1, src, REG_ITMP1);
2793 M_INTMOVE(s1, REG_ITMP1);
2795 x86_64_alul_imm_reg(cd, X86_64_SUB, l, REG_ITMP1);
2800 x86_64_alul_imm_reg(cd, X86_64_CMP, i - 1, REG_ITMP1);
2801 x86_64_jcc(cd, X86_64_CC_A, 0);
2803 /* codegen_addreference(m, BlockPtrOfPC(s4ptr[0]), cd->mcodeptr); */
2804 codegen_addreference(m, (basicblock *) tptr[0], cd->mcodeptr);
2806 /* build jump table top down and use address of lowest entry */
2808 /* s4ptr += 3 + i; */
2812 /* dseg_addtarget(m, BlockPtrOfPC(*--s4ptr)); */
2813 dseg_addtarget(m, (basicblock *) tptr[0]);
2817 /* length of dataseg after last dseg_addtarget is used by load */
2819 x86_64_mov_imm_reg(cd, 0, REG_ITMP2);
2820 dseg_adddata(m, cd->mcodeptr);
2821 x86_64_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
2822 x86_64_jmp_reg(cd, REG_ITMP1);
2828 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2830 s4 i, l, val, *s4ptr;
2833 tptr = (void **) iptr->target;
2835 s4ptr = iptr->val.a;
2836 l = s4ptr[0]; /* default */
2837 i = s4ptr[1]; /* count */
2839 MCODECHECK((i<<2)+8);
2840 var_to_reg_int(s1, src, REG_ITMP1); /* reg compare should always be faster */
2846 x86_64_alul_imm_reg(cd, X86_64_CMP, val, s1);
2847 x86_64_jcc(cd, X86_64_CC_E, 0);
2848 /* codegen_addreference(m, BlockPtrOfPC(s4ptr[1]), cd->mcodeptr); */
2849 codegen_addreference(m, (basicblock *) tptr[0], cd->mcodeptr);
2852 x86_64_jmp_imm(cd, 0);
2853 /* codegen_addreference(m, BlockPtrOfPC(l), cd->mcodeptr); */
2855 tptr = (void **) iptr->target;
2856 codegen_addreference(m, (basicblock *) tptr[0], cd->mcodeptr);
2863 case ICMD_BUILTIN3: /* ..., arg1, arg2, arg3 ==> ... */
2864 /* op1 = return type, val.a = function pointer*/
2868 case ICMD_BUILTIN2: /* ..., arg1, arg2 ==> ... */
2869 /* op1 = return type, val.a = function pointer*/
2873 case ICMD_BUILTIN1: /* ..., arg1 ==> ... */
2874 /* op1 = return type, val.a = function pointer*/
2878 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2879 /* op1 = arg count, val.a = method pointer */
2881 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2882 /* op1 = arg count, val.a = method pointer */
2884 case ICMD_INVOKEVIRTUAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2885 /* op1 = arg count, val.a = method pointer */
2887 case ICMD_INVOKEINTERFACE:/*.., objectref, [arg1, [arg2 ...]] ==> ... */
2888 /* op1 = arg count, val.a = method pointer */
2898 MCODECHECK((s3 << 1) + 64);
2905 /* copy arguments to registers or stack location */
2906 for (; --s3 >= 0; src = src->prev) {
2907 IS_INT_LNG_TYPE(src->type) ? iarg++ : farg++;
2913 s2 = (iarg > INT_ARG_CNT) ? iarg - INT_ARG_CNT : 0 + (farg > FLT_ARG_CNT) ? farg - FLT_ARG_CNT : 0;
2915 for (; --s3 >= 0; src = src->prev) {
2916 IS_INT_LNG_TYPE(src->type) ? iarg-- : farg--;
2917 if (src->varkind == ARGVAR) {
2918 if (IS_INT_LNG_TYPE(src->type)) {
2919 if (iarg >= INT_ARG_CNT) {
2923 if (farg >= FLT_ARG_CNT) {
2930 if (IS_INT_LNG_TYPE(src->type)) {
2931 if (iarg < INT_ARG_CNT) {
2932 s1 = r->argintregs[iarg];
2933 var_to_reg_int(d, src, s1);
2937 var_to_reg_int(d, src, REG_ITMP1);
2939 x86_64_mov_reg_membase(cd, d, REG_SP, s2 * 8);
2943 if (farg < FLT_ARG_CNT) {
2944 s1 = r->argfltregs[farg];
2945 var_to_reg_flt(d, src, s1);
2949 var_to_reg_flt(d, src, REG_FTMP1);
2951 x86_64_movq_reg_membase(cd, d, REG_SP, s2 * 8);
2957 switch (iptr->opc) {
2965 x86_64_mov_imm_reg(cd, a, REG_ITMP1);
2966 x86_64_call_reg(cd, REG_ITMP1);
2969 case ICMD_INVOKESTATIC:
2971 a = (s8) lm->stubroutine;
2974 x86_64_mov_imm_reg(cd, a, REG_ITMP2);
2975 x86_64_call_reg(cd, REG_ITMP2);
2978 case ICMD_INVOKESPECIAL:
2980 a = (s8) lm->stubroutine;
2983 gen_nullptr_check(r->argintregs[0]); /* first argument contains pointer */
2984 x86_64_mov_membase_reg(cd, r->argintregs[0], 0, REG_ITMP2); /* access memory for hardware nullptr */
2985 x86_64_mov_imm_reg(cd, a, REG_ITMP2);
2986 x86_64_call_reg(cd, REG_ITMP2);
2989 case ICMD_INVOKEVIRTUAL:
2993 gen_nullptr_check(r->argintregs[0]);
2994 x86_64_mov_membase_reg(cd, r->argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
2995 x86_64_mov_membase32_reg(cd, REG_ITMP2, OFFSET(vftbl_t, table[0]) + sizeof(methodptr) * lm->vftblindex, REG_ITMP1);
2996 x86_64_call_reg(cd, REG_ITMP1);
2999 case ICMD_INVOKEINTERFACE:
3004 gen_nullptr_check(r->argintregs[0]);
3005 x86_64_mov_membase_reg(cd, r->argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3006 x86_64_mov_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, interfacetable[0]) - sizeof(methodptr) * ci->index, REG_ITMP2);
3007 x86_64_mov_membase32_reg(cd, REG_ITMP2, sizeof(methodptr) * (lm - ci->methods), REG_ITMP1);
3008 x86_64_call_reg(cd, REG_ITMP1);
3013 error("Unkown ICMD-Command: %d", iptr->opc);
3016 /* d contains return type */
3018 if (d != TYPE_VOID) {
3019 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3020 s1 = reg_of_var(m, iptr->dst, REG_RESULT);
3021 M_INTMOVE(REG_RESULT, s1);
3022 store_reg_to_var_int(iptr->dst, s1);
3025 s1 = reg_of_var(m, iptr->dst, REG_FRESULT);
3026 M_FLTMOVE(REG_FRESULT, s1);
3027 store_reg_to_var_flt(iptr->dst, s1);
3034 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3036 /* op1: 0 == array, 1 == class */
3037 /* val.a: (classinfo*) superclass */
3039 /* superclass is an interface:
3041 * return (sub != NULL) &&
3042 * (sub->vftbl->interfacetablelength > super->index) &&
3043 * (sub->vftbl->interfacetable[-super->index] != NULL);
3045 * superclass is a class:
3047 * return ((sub != NULL) && (0
3048 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3049 * super->vftbl->diffvall));
3053 classinfo *super = (classinfo*) iptr->val.a;
3055 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3056 codegen_threadcritrestart(m, cd->mcodeptr - cd->mcodebase);
3059 var_to_reg_int(s1, src, REG_ITMP1);
3060 d = reg_of_var(m, iptr->dst, REG_ITMP3);
3062 M_INTMOVE(s1, REG_ITMP1);
3065 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3066 if (iptr->op1) { /* class/interface */
3067 if (super->flags & ACC_INTERFACE) { /* interface */
3068 x86_64_test_reg_reg(cd, s1, s1);
3070 /* TODO: clean up this calculation */
3071 a = 3; /* mov_membase_reg */
3072 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3074 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3075 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3078 CALCIMMEDIATEBYTES(a, super->index);
3083 a += 3; /* mov_membase_reg */
3084 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3089 x86_64_jcc(cd, X86_64_CC_E, a);
3091 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3092 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength), REG_ITMP2);
3093 x86_64_alu_imm_reg(cd, X86_64_SUB, super->index, REG_ITMP2);
3094 x86_64_test_reg_reg(cd, REG_ITMP2, REG_ITMP2);
3096 /* TODO: clean up this calculation */
3098 a += 3; /* mov_membase_reg */
3099 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3104 x86_64_jcc(cd, X86_64_CC_LE, a);
3105 x86_64_mov_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP1);
3106 x86_64_test_reg_reg(cd, REG_ITMP1, REG_ITMP1);
3107 x86_64_setcc_reg(cd, X86_64_CC_NE, d);
3109 } else { /* class */
3110 x86_64_test_reg_reg(cd, s1, s1);
3112 /* TODO: clean up this calculation */
3113 a = 3; /* mov_membase_reg */
3114 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3116 a += 10; /* mov_imm_reg */
3118 a += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3119 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, baseval));
3121 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3122 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3124 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3125 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, diffval));
3132 x86_64_jcc(cd, X86_64_CC_E, a);
3134 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3135 x86_64_mov_imm_reg(cd, (s8) super->vftbl, REG_ITMP2);
3136 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3137 codegen_threadcritstart(m, cd->mcodeptr - cd->mcodebase);
3139 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, baseval), REG_ITMP1);
3140 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP3);
3141 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, diffval), REG_ITMP2);
3142 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3143 codegen_threadcritstop(m, cd->mcodeptr - cd->mcodebase);
3145 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP1);
3146 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3147 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
3148 x86_64_setcc_reg(cd, X86_64_CC_BE, d);
3152 panic("internal error: no inlined array instanceof");
3154 store_reg_to_var_int(iptr->dst, d);
3157 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3159 /* op1: 0 == array, 1 == class */
3160 /* val.a: (classinfo*) superclass */
3162 /* superclass is an interface:
3164 * OK if ((sub == NULL) ||
3165 * (sub->vftbl->interfacetablelength > super->index) &&
3166 * (sub->vftbl->interfacetable[-super->index] != NULL));
3168 * superclass is a class:
3170 * OK if ((sub == NULL) || (0
3171 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3172 * super->vftbl->diffvall));
3176 classinfo *super = (classinfo*) iptr->val.a;
3178 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3179 codegen_threadcritrestart(m, cd->mcodeptr - cd->mcodebase);
3181 d = reg_of_var(m, iptr->dst, REG_ITMP3);
3182 var_to_reg_int(s1, src, d);
3183 if (iptr->op1) { /* class/interface */
3184 if (super->flags & ACC_INTERFACE) { /* interface */
3185 x86_64_test_reg_reg(cd, s1, s1);
3187 /* TODO: clean up this calculation */
3188 a = 3; /* mov_membase_reg */
3189 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3191 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3192 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3195 CALCIMMEDIATEBYTES(a, super->index);
3200 a += 3; /* mov_membase_reg */
3201 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3206 x86_64_jcc(cd, X86_64_CC_E, a);
3208 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3209 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength), REG_ITMP2);
3210 x86_64_alu_imm_reg(cd, X86_64_SUB, super->index, REG_ITMP2);
3211 x86_64_test_reg_reg(cd, REG_ITMP2, REG_ITMP2);
3212 x86_64_jcc(cd, X86_64_CC_LE, 0);
3213 codegen_addxcastrefs(m, cd->mcodeptr);
3214 x86_64_mov_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP2);
3215 x86_64_test_reg_reg(cd, REG_ITMP2, REG_ITMP2);
3216 x86_64_jcc(cd, X86_64_CC_E, 0);
3217 codegen_addxcastrefs(m, cd->mcodeptr);
3219 } else { /* class */
3220 x86_64_test_reg_reg(cd, s1, s1);
3222 /* TODO: clean up this calculation */
3223 a = 3; /* mov_membase_reg */
3224 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3225 a += 10; /* mov_imm_reg */
3226 a += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3227 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, baseval));
3229 if (d != REG_ITMP3) {
3230 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3231 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3232 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3233 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, diffval));
3237 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3238 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3240 a += 10; /* mov_imm_reg */
3241 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3242 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, diffval));
3248 x86_64_jcc(cd, X86_64_CC_E, a);
3250 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3251 x86_64_mov_imm_reg(cd, (s8) super->vftbl, REG_ITMP2);
3252 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3253 codegen_threadcritstart(m, cd->mcodeptr - cd->mcodebase);
3255 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, baseval), REG_ITMP1);
3256 if (d != REG_ITMP3) {
3257 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP3);
3258 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, diffval), REG_ITMP2);
3259 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3260 codegen_threadcritstop(m, cd->mcodeptr - cd->mcodebase);
3262 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP1);
3265 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP2);
3266 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
3267 x86_64_mov_imm_reg(cd, (s8) super->vftbl, REG_ITMP2);
3268 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, diffval), REG_ITMP2);
3269 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3270 codegen_threadcritstop(m, cd->mcodeptr - cd->mcodebase);
3273 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
3274 x86_64_jcc(cd, X86_64_CC_A, 0); /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3275 codegen_addxcastrefs(m, cd->mcodeptr);
3279 panic("internal error: no inlined array checkcast");
3282 store_reg_to_var_int(iptr->dst, d);
3285 case ICMD_CHECKASIZE: /* ..., size ==> ..., size */
3287 if (src->flags & INMEMORY) {
3288 x86_64_alul_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
3291 x86_64_testl_reg_reg(cd, src->regoff, src->regoff);
3293 x86_64_jcc(cd, X86_64_CC_L, 0);
3294 codegen_addxcheckarefs(m, cd->mcodeptr);
3297 case ICMD_CHECKEXCEPTION: /* ... ==> ... */
3299 x86_64_test_reg_reg(cd, REG_RESULT, REG_RESULT);
3300 x86_64_jcc(cd, X86_64_CC_E, 0);
3301 codegen_addxexceptionrefs(m, cd->mcodeptr);
3304 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3305 /* op1 = dimension, val.a = array descriptor */
3307 /* check for negative sizes and copy sizes to stack if necessary */
3309 MCODECHECK((iptr->op1 << 1) + 64);
3311 for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3312 var_to_reg_int(s2, src, REG_ITMP1);
3313 x86_64_testl_reg_reg(cd, s2, s2);
3314 x86_64_jcc(cd, X86_64_CC_L, 0);
3315 codegen_addxcheckarefs(m, cd->mcodeptr);
3317 /* copy sizes to stack (argument numbers >= INT_ARG_CNT) */
3319 if (src->varkind != ARGVAR) {
3320 x86_64_mov_reg_membase(cd, s2, REG_SP, (s1 + INT_ARG_CNT) * 8);
3324 /* a0 = dimension count */
3325 x86_64_mov_imm_reg(cd, iptr->op1, r->argintregs[0]);
3327 /* a1 = arraydescriptor */
3328 x86_64_mov_imm_reg(cd, (s8) iptr->val.a, r->argintregs[1]);
3330 /* a2 = pointer to dimensions = stack pointer */
3331 x86_64_mov_reg_reg(cd, REG_SP, r->argintregs[2]);
3333 x86_64_mov_imm_reg(cd, (s8) builtin_nmultianewarray, REG_ITMP1);
3334 x86_64_call_reg(cd, REG_ITMP1);
3336 s1 = reg_of_var(m, iptr->dst, REG_RESULT);
3337 M_INTMOVE(REG_RESULT, s1);
3338 store_reg_to_var_int(iptr->dst, s1);
3341 default: error("Unknown pseudo command: %d", iptr->opc);
3344 } /* for instruction */
3346 /* copy values to interface registers */
3348 src = bptr->outstack;
3349 len = bptr->outdepth;
3350 MCODECHECK(64 + len);
3353 if ((src->varkind != STACKVAR)) {
3355 if (IS_FLT_DBL_TYPE(s2)) {
3356 var_to_reg_flt(s1, src, REG_FTMP1);
3357 if (!(r->interfaces[len][s2].flags & INMEMORY)) {
3358 M_FLTMOVE(s1, r->interfaces[len][s2].regoff);
3361 x86_64_movq_reg_membase(cd, s1, REG_SP, r->interfaces[len][s2].regoff * 8);
3365 var_to_reg_int(s1, src, REG_ITMP1);
3366 if (!(r->interfaces[len][s2].flags & INMEMORY)) {
3367 M_INTMOVE(s1, r->interfaces[len][s2].regoff);
3370 x86_64_mov_reg_membase(cd, s1, REG_SP, r->interfaces[len][s2].regoff * 8);
3376 } /* if (bptr -> flags >= BBREACHED) */
3377 } /* for basic block */
3381 /* generate bound check stubs */
3383 u1 *xcodeptr = NULL;
3386 for (bref = cd->xboundrefs; bref != NULL; bref = bref->next) {
3387 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3389 cd->mcodeptr - cd->mcodebase);
3393 /* move index register into REG_ITMP1 */
3394 x86_64_mov_reg_reg(cd, bref->reg, REG_ITMP1); /* 3 bytes */
3396 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3397 dseg_adddata(m, cd->mcodeptr);
3398 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3399 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3401 if (xcodeptr != NULL) {
3402 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3405 xcodeptr = cd->mcodeptr;
3407 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3408 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3409 x86_64_mov_imm_reg(cd, (s8) string_java_lang_ArrayIndexOutOfBoundsException, r->argintregs[0]);
3410 x86_64_mov_reg_reg(cd, REG_ITMP1, r->argintregs[1]);
3411 x86_64_mov_imm_reg(cd, (s8) new_exception_int, REG_ITMP3);
3412 x86_64_call_reg(cd, REG_ITMP3);
3413 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3414 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3416 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
3417 x86_64_jmp_reg(cd, REG_ITMP3);
3421 /* generate negative array size check stubs */
3425 for (bref = cd->xcheckarefs; bref != NULL; bref = bref->next) {
3426 if ((m->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3427 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3429 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3433 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3435 cd->mcodeptr - cd->mcodebase);
3439 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3440 dseg_adddata(m, cd->mcodeptr);
3441 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3442 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3444 if (xcodeptr != NULL) {
3445 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3448 xcodeptr = cd->mcodeptr;
3450 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3451 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3452 x86_64_mov_imm_reg(cd, (s8) string_java_lang_NegativeArraySizeException, r->argintregs[0]);
3453 x86_64_mov_imm_reg(cd, (s8) new_exception, REG_ITMP3);
3454 x86_64_call_reg(cd, REG_ITMP3);
3455 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3456 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3458 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
3459 x86_64_jmp_reg(cd, REG_ITMP3);
3463 /* generate cast check stubs */
3467 for (bref = cd->xcastrefs; bref != NULL; bref = bref->next) {
3468 if ((m->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3469 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3471 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3475 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3477 cd->mcodeptr - cd->mcodebase);
3481 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3482 dseg_adddata(m, cd->mcodeptr);
3483 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3484 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3486 if (xcodeptr != NULL) {
3487 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3490 xcodeptr = cd->mcodeptr;
3492 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3493 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3494 x86_64_mov_imm_reg(cd, (s8) string_java_lang_ClassCastException, r->argintregs[0]);
3495 x86_64_mov_imm_reg(cd, (s8) new_exception, REG_ITMP3);
3496 x86_64_call_reg(cd, REG_ITMP3);
3497 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3498 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3500 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
3501 x86_64_jmp_reg(cd, REG_ITMP3);
3505 /* generate divide by zero check stubs */
3509 for (bref = cd->xdivrefs; bref != NULL; bref = bref->next) {
3510 if ((m->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3511 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3513 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3517 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3519 cd->mcodeptr - cd->mcodebase);
3523 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3524 dseg_adddata(m, cd->mcodeptr);
3525 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3526 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3528 if (xcodeptr != NULL) {
3529 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3532 xcodeptr = cd->mcodeptr;
3534 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3535 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3536 x86_64_mov_imm_reg(cd, (u8) string_java_lang_ArithmeticException, r->argintregs[0]);
3537 x86_64_mov_imm_reg(cd, (u8) string_java_lang_ArithmeticException_message, r->argintregs[1]);
3538 x86_64_mov_imm_reg(cd, (u8) new_exception, REG_ITMP3);
3539 x86_64_call_reg(cd, REG_ITMP3);
3540 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3541 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3543 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3544 x86_64_jmp_reg(cd, REG_ITMP3);
3548 /* generate exception check stubs */
3552 for (bref = cd->xexceptionrefs; bref != NULL; bref = bref->next) {
3553 if ((m->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3554 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3556 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3560 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3562 cd->mcodeptr - cd->mcodebase);
3566 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3567 dseg_adddata(m, cd->mcodeptr);
3568 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
3569 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3571 if (xcodeptr != NULL) {
3572 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3575 xcodeptr = cd->mcodeptr;
3577 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3578 x86_64_alu_imm_reg(cd, X86_64_SUB, 8, REG_SP);
3579 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0);
3580 x86_64_mov_imm_reg(cd, (u8) &builtin_get_exceptionptrptr, REG_ITMP1);
3581 x86_64_call_reg(cd, REG_ITMP1);
3582 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
3583 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
3584 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
3585 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC);
3586 x86_64_alu_imm_reg(cd, X86_64_ADD, 8, REG_SP);
3588 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
3589 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP1_XPTR);
3590 x86_64_mov_imm_membase(cd, 0, REG_ITMP3, 0);
3593 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3594 x86_64_jmp_reg(cd, REG_ITMP3);
3598 /* generate null pointer check stubs */
3602 for (bref = cd->xnullrefs; bref != NULL; bref = bref->next) {
3603 if ((m->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3604 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3606 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3610 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3612 cd->mcodeptr - cd->mcodebase);
3616 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3617 dseg_adddata(m, cd->mcodeptr);
3618 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
3619 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3621 if (xcodeptr != NULL) {
3622 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3625 xcodeptr = cd->mcodeptr;
3627 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3628 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3629 x86_64_mov_imm_reg(cd, (s8) string_java_lang_NullPointerException, r->argintregs[0]);
3630 x86_64_mov_imm_reg(cd, (s8) new_exception, REG_ITMP3);
3631 x86_64_call_reg(cd, REG_ITMP3);
3632 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3633 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3635 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
3636 x86_64_jmp_reg(cd, REG_ITMP3);
3641 codegen_finish(m, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
3645 /* function createcompilerstub *************************************************
3647 creates a stub routine which calls the compiler
3649 *******************************************************************************/
3651 #define COMPSTUBSIZE 23
3653 u1 *createcompilerstub(methodinfo *m)
3655 u1 *s = CNEW(u1, COMPSTUBSIZE); /* memory to hold the stub */
3658 /* setup codegendata structure */
3661 cd = m->codegendata;
3664 /* code for the stub */
3665 x86_64_mov_imm_reg(cd, (s8) m, REG_ITMP1); /* pass method pointer to compiler */
3666 x86_64_mov_imm_reg(cd, (s8) asm_call_jit_compiler, REG_ITMP3);/* load address */
3667 x86_64_jmp_reg(cd, REG_ITMP3); /* jump to compiler */
3669 /* free codegendata memory */
3672 #if defined(STATISTICS)
3674 count_cstub_len += COMPSTUBSIZE;
3681 /* function removecompilerstub *************************************************
3683 deletes a compilerstub from memory (simply by freeing it)
3685 *******************************************************************************/
3687 void removecompilerstub(u1 *stub)
3689 CFREE(stub, COMPSTUBSIZE);
3693 /* function: createnativestub **************************************************
3695 creates a stub routine which calls a native method
3697 *******************************************************************************/
3699 /* #if defined(USE_THREADS) && defined(NATIVE_THREADS) */
3700 /* static java_objectheader **(*callgetexceptionptrptr)() = builtin_get_exceptionptrptr; */
3703 #define NATIVESTUBSIZE 420
3705 u1 *createnativestub(functionptr f, methodinfo *m)
3707 u1 *s = CNEW(u1, NATIVESTUBSIZE); /* memory to hold the stub */
3708 s4 stackframesize; /* size of stackframe if needed */
3712 /* setup codegendata structure */
3715 /* initialize registers before using it */
3718 /* keep code size smaller */
3719 r = m->registerdata;
3720 cd = m->codegendata;
3724 descriptor2types(m); /* set paramcount and paramtypes */
3726 /* if function is static, check for initialized */
3728 if (m->flags & ACC_STATIC) {
3729 /* if class isn't yet initialized, do it */
3730 if (!m->class->initialized) {
3731 /* call helper function which patches this code */
3732 x86_64_mov_imm_reg(cd, (u8) m->class, REG_ITMP1);
3733 x86_64_mov_imm_reg(cd, (u8) asm_check_clinit, REG_ITMP2);
3734 x86_64_call_reg(cd, REG_ITMP2);
3741 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
3743 x86_64_mov_reg_membase(cd, r->argintregs[0], REG_SP, 1 * 8);
3744 x86_64_mov_reg_membase(cd, r->argintregs[1], REG_SP, 2 * 8);
3745 x86_64_mov_reg_membase(cd, r->argintregs[2], REG_SP, 3 * 8);
3746 x86_64_mov_reg_membase(cd, r->argintregs[3], REG_SP, 4 * 8);
3747 x86_64_mov_reg_membase(cd, r->argintregs[4], REG_SP, 5 * 8);
3748 x86_64_mov_reg_membase(cd, r->argintregs[5], REG_SP, 6 * 8);
3750 x86_64_movq_reg_membase(cd, r->argfltregs[0], REG_SP, 7 * 8);
3751 x86_64_movq_reg_membase(cd, r->argfltregs[1], REG_SP, 8 * 8);
3752 x86_64_movq_reg_membase(cd, r->argfltregs[2], REG_SP, 9 * 8);
3753 x86_64_movq_reg_membase(cd, r->argfltregs[3], REG_SP, 10 * 8);
3754 /* x86_64_movq_reg_membase(cd, r->argfltregs[4], REG_SP, 11 * 8); */
3755 /* x86_64_movq_reg_membase(cd, r->argfltregs[5], REG_SP, 12 * 8); */
3756 /* x86_64_movq_reg_membase(cd, r->argfltregs[6], REG_SP, 13 * 8); */
3757 /* x86_64_movq_reg_membase(cd, r->argfltregs[7], REG_SP, 14 * 8); */
3759 /* show integer hex code for float arguments */
3760 for (p = 0, l = 0; p < m->paramcount; p++) {
3761 if (IS_FLT_DBL_TYPE(m->paramtypes[p])) {
3762 for (s1 = (m->paramcount > INT_ARG_CNT) ? INT_ARG_CNT - 2 : m->paramcount - 2; s1 >= p; s1--) {
3763 x86_64_mov_reg_reg(cd, r->argintregs[s1], r->argintregs[s1 + 1]);
3766 x86_64_movd_freg_reg(cd, r->argfltregs[l], r->argintregs[p]);
3771 x86_64_mov_imm_reg(cd, (s8) m, REG_ITMP1);
3772 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, 0 * 8);
3773 x86_64_mov_imm_reg(cd, (s8) builtin_trace_args, REG_ITMP1);
3774 x86_64_call_reg(cd, REG_ITMP1);
3776 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, r->argintregs[0]);
3777 x86_64_mov_membase_reg(cd, REG_SP, 2 * 8, r->argintregs[1]);
3778 x86_64_mov_membase_reg(cd, REG_SP, 3 * 8, r->argintregs[2]);
3779 x86_64_mov_membase_reg(cd, REG_SP, 4 * 8, r->argintregs[3]);
3780 x86_64_mov_membase_reg(cd, REG_SP, 5 * 8, r->argintregs[4]);
3781 x86_64_mov_membase_reg(cd, REG_SP, 6 * 8, r->argintregs[5]);
3783 x86_64_movq_membase_reg(cd, REG_SP, 7 * 8, r->argfltregs[0]);
3784 x86_64_movq_membase_reg(cd, REG_SP, 8 * 8, r->argfltregs[1]);
3785 x86_64_movq_membase_reg(cd, REG_SP, 9 * 8, r->argfltregs[2]);
3786 x86_64_movq_membase_reg(cd, REG_SP, 10 * 8, r->argfltregs[3]);
3787 /* x86_64_movq_membase_reg(cd, REG_SP, 11 * 8, r->argfltregs[4]); */
3788 /* x86_64_movq_membase_reg(cd, REG_SP, 12 * 8, r->argfltregs[5]); */
3789 /* x86_64_movq_membase_reg(cd, REG_SP, 13 * 8, r->argfltregs[6]); */
3790 /* x86_64_movq_membase_reg(cd, REG_SP, 14 * 8, r->argfltregs[7]); */
3792 x86_64_alu_imm_reg(cd, X86_64_ADD, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
3796 x86_64_alu_imm_reg(cd, X86_64_SUB, 7 * 8, REG_SP); /* keep stack 16-byte aligned */
3798 /* save callee saved float registers */
3799 x86_64_movq_reg_membase(cd, XMM15, REG_SP, 0 * 8);
3800 x86_64_movq_reg_membase(cd, XMM14, REG_SP, 1 * 8);
3801 x86_64_movq_reg_membase(cd, XMM13, REG_SP, 2 * 8);
3802 x86_64_movq_reg_membase(cd, XMM12, REG_SP, 3 * 8);
3803 x86_64_movq_reg_membase(cd, XMM11, REG_SP, 4 * 8);
3804 x86_64_movq_reg_membase(cd, XMM10, REG_SP, 5 * 8);
3807 /* save argument registers on stack -- if we have to */
3808 if ((m->flags & ACC_STATIC && m->paramcount > (INT_ARG_CNT - 2)) || m->paramcount > (INT_ARG_CNT - 1)) {
3810 s4 paramshiftcnt = (m->flags & ACC_STATIC) ? 2 : 1;
3811 s4 stackparamcnt = (m->paramcount > INT_ARG_CNT) ? m->paramcount - INT_ARG_CNT : 0;
3813 stackframesize = stackparamcnt + paramshiftcnt;
3815 /* keep stack 16-byte aligned */
3816 if ((stackframesize % 2) == 0) stackframesize++;
3818 x86_64_alu_imm_reg(cd, X86_64_SUB, stackframesize * 8, REG_SP);
3820 /* copy stack arguments into new stack frame -- if any */
3821 for (i = 0; i < stackparamcnt; i++) {
3822 x86_64_mov_membase_reg(cd, REG_SP, (stackparamcnt + 1 + i) * 8, REG_ITMP1);
3823 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, (paramshiftcnt + i) * 8);
3826 if (m->flags & ACC_STATIC) {
3827 x86_64_mov_reg_membase(cd, r->argintregs[5], REG_SP, 1 * 8);
3828 x86_64_mov_reg_membase(cd, r->argintregs[4], REG_SP, 0 * 8);
3831 x86_64_mov_reg_membase(cd, r->argintregs[5], REG_SP, 0 * 8);
3835 /* keep stack 16-byte aligned -- this is essential for x86_64 */
3836 x86_64_alu_imm_reg(cd, X86_64_SUB, 8, REG_SP);
3840 if (m->flags & ACC_STATIC) {
3841 x86_64_mov_reg_reg(cd, r->argintregs[3], r->argintregs[5]);
3842 x86_64_mov_reg_reg(cd, r->argintregs[2], r->argintregs[4]);
3843 x86_64_mov_reg_reg(cd, r->argintregs[1], r->argintregs[3]);
3844 x86_64_mov_reg_reg(cd, r->argintregs[0], r->argintregs[2]);
3846 /* put class into second argument register */
3847 x86_64_mov_imm_reg(cd, (u8) m->class, r->argintregs[1]);
3850 x86_64_mov_reg_reg(cd, r->argintregs[4], r->argintregs[5]);
3851 x86_64_mov_reg_reg(cd, r->argintregs[3], r->argintregs[4]);
3852 x86_64_mov_reg_reg(cd, r->argintregs[2], r->argintregs[3]);
3853 x86_64_mov_reg_reg(cd, r->argintregs[1], r->argintregs[2]);
3854 x86_64_mov_reg_reg(cd, r->argintregs[0], r->argintregs[1]);
3857 /* put env into first argument register */
3858 x86_64_mov_imm_reg(cd, (u8) &env, r->argintregs[0]);
3860 x86_64_mov_imm_reg(cd, (u8) f, REG_ITMP1);
3861 x86_64_call_reg(cd, REG_ITMP1);
3863 /* remove stackframe if there is one */
3864 if (stackframesize) {
3865 x86_64_alu_imm_reg(cd, X86_64_ADD, stackframesize * 8, REG_SP);
3869 x86_64_alu_imm_reg(cd, X86_64_SUB, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
3871 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
3872 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
3874 x86_64_mov_imm_reg(cd, (u8) m, r->argintregs[0]);
3875 x86_64_mov_reg_reg(cd, REG_RESULT, r->argintregs[1]);
3876 M_FLTMOVE(REG_FRESULT, r->argfltregs[0]);
3877 M_FLTMOVE(REG_FRESULT, r->argfltregs[1]);
3879 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
3880 x86_64_call_reg(cd, REG_ITMP1);
3882 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
3883 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
3885 x86_64_alu_imm_reg(cd, X86_64_ADD, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
3889 /* restore callee saved registers */
3890 x86_64_movq_membase_reg(cd, REG_SP, 0 * 8, XMM15);
3891 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, XMM14);
3892 x86_64_movq_membase_reg(cd, REG_SP, 2 * 8, XMM13);
3893 x86_64_movq_membase_reg(cd, REG_SP, 3 * 8, XMM12);
3894 x86_64_movq_membase_reg(cd, REG_SP, 4 * 8, XMM11);
3895 x86_64_movq_membase_reg(cd, REG_SP, 5 * 8, XMM10);
3897 x86_64_alu_imm_reg(cd, X86_64_ADD, 7 * 8, REG_SP); /* keep stack 16-byte aligned */
3900 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3901 x86_64_push_reg(cd, REG_RESULT);
3902 /* x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
3903 x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
3904 x86_64_call_reg(cd, REG_ITMP3);
3905 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
3906 x86_64_pop_reg(cd, REG_RESULT);
3908 x86_64_mov_imm_reg(cd, (s8) &_exceptionptr, REG_ITMP3);
3909 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP3);
3911 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3912 x86_64_jcc(cd, X86_64_CC_NE, 1);
3916 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3917 x86_64_push_reg(cd, REG_ITMP3);
3918 /* x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
3919 x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
3920 x86_64_call_reg(cd, REG_ITMP3);
3921 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
3922 x86_64_pop_reg(cd, REG_ITMP1_XPTR);
3924 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
3925 x86_64_mov_imm_reg(cd, (s8) &_exceptionptr, REG_ITMP3);
3926 x86_64_alu_reg_reg(cd, X86_64_XOR, REG_ITMP2, REG_ITMP2);
3927 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_ITMP3, 0); /* clear exception pointer */
3930 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC); /* get return address from stack */
3931 x86_64_alu_imm_reg(cd, X86_64_SUB, 3, REG_ITMP2_XPC); /* callq */
3933 x86_64_mov_imm_reg(cd, (s8) asm_handle_nat_exception, REG_ITMP3);
3934 x86_64_jmp_reg(cd, REG_ITMP3);
3938 static int stubprinted;
3940 printf("stubsize: %d\n", ((long) cd->mcodeptr - (long) s));
3945 /* free codegendata memory */
3948 #if defined(STATISTICS)
3950 count_nstub_len += NATIVESTUBSIZE;
3957 /* function: removenativestub **************************************************
3959 removes a previously created native-stub from memory
3961 *******************************************************************************/
3963 void removenativestub(u1 *stub)
3965 CFREE(stub, NATIVESTUBSIZE);
3970 * These are local overrides for various environment variables in Emacs.
3971 * Please do not remove this and leave it at the end of the file, where
3972 * Emacs will automagically detect them.
3973 * ---------------------------------------------------------------------
3976 * indent-tabs-mode: t