1 /* jit/x86_64/codegen.c - machine code generator for x86_64
3 Copyright (C) 1996-2005 R. Grafl, A. Krall, C. Kruegel, C. Oates,
4 R. Obermaisser, M. Platter, M. Probst, S. Ring, E. Steiner,
5 C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich, J. Wenninger,
6 Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
25 Contact: cacao@complang.tuwien.ac.at
27 Authors: Andreas Krall
30 $Id: codegen.c 1735 2004-12-07 14:33:27Z twisti $
40 #include "native/native.h"
41 /* #include "native/jni.h" */
42 #include "vm/global.h"
43 #include "vm/builtin.h"
44 #include "vm/loader.h"
45 #include "vm/tables.h"
46 #include "vm/jit/asmpart.h"
47 #include "vm/jit/jit.h"
48 #include "vm/jit/reg.h"
49 #include "vm/jit/parse.h"
50 #include "vm/jit/x86_64/arch.h"
51 #include "vm/jit/x86_64/codegen.h"
52 #include "vm/jit/x86_64/emitfuncs.h"
53 #include "vm/jit/x86_64/types.h"
54 #include "vm/jit/x86_64/asmoffsets.h"
57 /* register descripton - array ************************************************/
59 /* #define REG_RES 0 reserved register for OS or code generator */
60 /* #define REG_RET 1 return value register */
61 /* #define REG_EXC 2 exception value register (only old jit) */
62 /* #define REG_SAV 3 (callee) saved register */
63 /* #define REG_TMP 4 scratch temporary register (caller saved) */
64 /* #define REG_ARG 5 argument register (caller saved) */
66 /* #define REG_END -1 last entry in tables */
68 static int nregdescint[] = {
69 REG_RET, REG_ARG, REG_ARG, REG_TMP, REG_RES, REG_SAV, REG_ARG, REG_ARG,
70 REG_ARG, REG_ARG, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV,
75 static int nregdescfloat[] = {
76 /* REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_TMP, REG_TMP, REG_TMP, REG_TMP, */
77 /* REG_RES, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV, REG_SAV, */
78 REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
79 REG_RES, REG_RES, REG_RES, REG_TMP, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
84 /* Include independent code generation stuff -- include after register */
85 /* descriptions to avoid extern definitions. */
87 #include "vm/jit/codegen.inc"
88 #include "vm/jit/reg.inc"
90 #include "vm/jit/lsra.inc"
94 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
95 void thread_restartcriticalsection(ucontext_t *uc)
99 critical = thread_checkcritical((void *) uc->uc_mcontext.gregs[REG_RIP]);
102 uc->uc_mcontext.gregs[REG_RIP] = (u8) critical;
107 /* NullPointerException signal handler for hardware null pointer check */
109 void catch_NullPointerException(int sig, siginfo_t *siginfo, void *_p)
113 /* long faultaddr; */
115 struct ucontext *_uc = (struct ucontext *) _p;
116 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
117 struct sigaction act;
118 java_objectheader *xptr;
120 /* Reset signal handler - necessary for SysV, does no harm for BSD */
123 /* instr = *((int*)(sigctx->rip)); */
124 /* faultaddr = sigctx->sc_regs[(instr >> 16) & 0x1f]; */
126 /* if (faultaddr == 0) { */
127 act.sa_sigaction = (functionptr) catch_NullPointerException; /* reinstall handler */
128 act.sa_flags = SA_SIGINFO;
129 sigaction(sig, &act, NULL);
132 sigaddset(&nsig, sig);
133 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
135 xptr = new_nullpointerexception();
137 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
138 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
139 sigctx->rip = (u8) asm_handle_exception;
144 /* faultaddr += (long) ((instr << 16) >> 16); */
145 /* fprintf(stderr, "faulting address: 0x%08x\n", faultaddr); */
146 /* panic("Stack overflow"); */
151 /* ArithmeticException signal handler for hardware divide by zero check */
153 void catch_ArithmeticException(int sig, siginfo_t *siginfo, void *_p)
157 struct ucontext *_uc = (struct ucontext *) _p;
158 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
159 struct sigaction act;
160 java_objectheader *xptr;
162 /* Reset signal handler - necessary for SysV, does no harm for BSD */
164 act.sa_sigaction = (functionptr) catch_ArithmeticException; /* reinstall handler */
165 act.sa_flags = SA_SIGINFO;
166 sigaction(sig, &act, NULL);
169 sigaddset(&nsig, sig);
170 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
172 xptr = new_arithmeticexception();
174 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
175 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
176 sigctx->rip = (u8) asm_handle_exception;
182 void init_exceptions(void)
184 struct sigaction act;
186 /* install signal handlers we need to convert to exceptions */
187 sigemptyset(&act.sa_mask);
191 act.sa_sigaction = (functionptr) catch_NullPointerException;
192 act.sa_flags = SA_SIGINFO;
193 sigaction(SIGSEGV, &act, NULL);
197 act.sa_sigaction = (functionptr) catch_NullPointerException;
198 act.sa_flags = SA_SIGINFO;
199 sigaction(SIGBUS, &act, NULL);
203 act.sa_sigaction = (functionptr) catch_ArithmeticException;
204 act.sa_flags = SA_SIGINFO;
205 sigaction(SIGFPE, &act, NULL);
209 /* function gen_mcode **********************************************************
211 generates machine code
213 *******************************************************************************/
215 void codegen(methodinfo *m, codegendata *cd, registerdata *rd)
217 s4 len, s1, s2, s3, d;
232 /* space to save used callee saved registers */
234 savedregs_num += (rd->savintregcnt - rd->maxsavintreguse);
235 savedregs_num += (rd->savfltregcnt - rd->maxsavfltreguse);
237 parentargs_base = rd->maxmemuse + savedregs_num;
239 #if defined(USE_THREADS) /* space to save argument of monitor_enter */
241 if (checksync && (m->flags & ACC_SYNCHRONIZED))
246 /* keep stack 16-byte aligned for calls into native code e.g. libc or jni */
247 /* (alignment problems with movaps) */
249 if (!(parentargs_base & 0x1)) {
253 /* create method header */
255 (void) dseg_addaddress(cd, m); /* MethodPointer */
256 (void) dseg_adds4(cd, parentargs_base * 8); /* FrameSize */
258 #if defined(USE_THREADS)
260 /* IsSync contains the offset relative to the stack pointer for the
261 argument of monitor_exit used in the exception handler. Since the
262 offset could be zero and give a wrong meaning of the flag it is
266 if (checksync && (m->flags & ACC_SYNCHRONIZED))
267 (void) dseg_adds4(cd, (rd->maxmemuse + 1) * 8); /* IsSync */
272 (void) dseg_adds4(cd, 0); /* IsSync */
274 (void) dseg_adds4(cd, m->isleafmethod); /* IsLeaf */
275 (void) dseg_adds4(cd, rd->savintregcnt - rd->maxsavintreguse);/* IntSave */
276 (void) dseg_adds4(cd, rd->savfltregcnt - rd->maxsavfltreguse);/* FltSave */
277 (void) dseg_adds4(cd, cd->exceptiontablelength); /* ExTableSize */
279 /* create exception table */
281 for (ex = cd->exceptiontable; ex != NULL; ex = ex->down) {
282 dseg_addtarget(cd, ex->start);
283 dseg_addtarget(cd, ex->end);
284 dseg_addtarget(cd, ex->handler);
285 (void) dseg_addaddress(cd, ex->catchtype);
288 /* initialize mcode variables */
290 cd->mcodeptr = (u1 *) cd->mcodebase;
291 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
292 MCODECHECK(128 + m->paramcount);
294 /* create stack frame (if necessary) */
296 if (parentargs_base) {
297 x86_64_alu_imm_reg(cd, X86_64_SUB, parentargs_base * 8, REG_SP);
300 /* save return address and used callee saved registers */
303 for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
304 p--; x86_64_mov_reg_membase(cd, rd->savintregs[i], REG_SP, p * 8);
306 for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
307 p--; x86_64_movq_reg_membase(cd, rd->savfltregs[i], REG_SP, p * 8);
310 /* save monitorenter argument */
312 #if defined(USE_THREADS)
313 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
314 if (m->flags & ACC_STATIC) {
315 x86_64_mov_imm_reg(cd, (s8) m->class, REG_ITMP1);
316 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, rd->maxmemuse * 8);
319 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, rd->maxmemuse * 8);
324 /* copy argument registers to stack and call trace function with pointer
325 to arguments on stack.
328 x86_64_alu_imm_reg(cd, X86_64_SUB, (6 + 8 + 1 + 1) * 8, REG_SP);
330 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, 1 * 8);
331 x86_64_mov_reg_membase(cd, rd->argintregs[1], REG_SP, 2 * 8);
332 x86_64_mov_reg_membase(cd, rd->argintregs[2], REG_SP, 3 * 8);
333 x86_64_mov_reg_membase(cd, rd->argintregs[3], REG_SP, 4 * 8);
334 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 5 * 8);
335 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 6 * 8);
337 x86_64_movq_reg_membase(cd, rd->argfltregs[0], REG_SP, 7 * 8);
338 x86_64_movq_reg_membase(cd, rd->argfltregs[1], REG_SP, 8 * 8);
339 x86_64_movq_reg_membase(cd, rd->argfltregs[2], REG_SP, 9 * 8);
340 x86_64_movq_reg_membase(cd, rd->argfltregs[3], REG_SP, 10 * 8);
341 /* x86_64_movq_reg_membase(cd, rd->argfltregs[4], REG_SP, 11 * 8); */
342 /* x86_64_movq_reg_membase(cd, rd->argfltregs[5], REG_SP, 12 * 8); */
343 /* x86_64_movq_reg_membase(cd, rd->argfltregs[6], REG_SP, 13 * 8); */
344 /* x86_64_movq_reg_membase(cd, rd->argfltregs[7], REG_SP, 14 * 8); */
346 for (p = 0, l = 0; p < m->paramcount; p++) {
347 t = m->paramtypes[p];
349 if (IS_FLT_DBL_TYPE(t)) {
350 for (s1 = (m->paramcount > INT_ARG_CNT) ? INT_ARG_CNT - 2 : m->paramcount - 2; s1 >= p; s1--) {
351 x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
354 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[p]);
359 x86_64_mov_imm_reg(cd, (s8) m, REG_ITMP2);
360 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_SP, 0 * 8);
361 x86_64_mov_imm_reg(cd, (s8) builtin_trace_args, REG_ITMP1);
362 x86_64_call_reg(cd, REG_ITMP1);
364 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, rd->argintregs[0]);
365 x86_64_mov_membase_reg(cd, REG_SP, 2 * 8, rd->argintregs[1]);
366 x86_64_mov_membase_reg(cd, REG_SP, 3 * 8, rd->argintregs[2]);
367 x86_64_mov_membase_reg(cd, REG_SP, 4 * 8, rd->argintregs[3]);
368 x86_64_mov_membase_reg(cd, REG_SP, 5 * 8, rd->argintregs[4]);
369 x86_64_mov_membase_reg(cd, REG_SP, 6 * 8, rd->argintregs[5]);
371 x86_64_movq_membase_reg(cd, REG_SP, 7 * 8, rd->argfltregs[0]);
372 x86_64_movq_membase_reg(cd, REG_SP, 8 * 8, rd->argfltregs[1]);
373 x86_64_movq_membase_reg(cd, REG_SP, 9 * 8, rd->argfltregs[2]);
374 x86_64_movq_membase_reg(cd, REG_SP, 10 * 8, rd->argfltregs[3]);
375 /* x86_64_movq_membase_reg(cd, REG_SP, 11 * 8, rd->argfltregs[4]); */
376 /* x86_64_movq_membase_reg(cd, REG_SP, 12 * 8, rd->argfltregs[5]); */
377 /* x86_64_movq_membase_reg(cd, REG_SP, 13 * 8, rd->argfltregs[6]); */
378 /* x86_64_movq_membase_reg(cd, REG_SP, 14 * 8, rd->argfltregs[7]); */
380 x86_64_alu_imm_reg(cd, X86_64_ADD, (6 + 8 + 1 + 1) * 8, REG_SP);
383 /* take arguments out of register or stack frame */
385 for (p = 0, l = 0, s1 = 0, s2 = 0; p < m->paramcount; p++) {
386 t = m->paramtypes[p];
387 var = &(rd->locals[l][t]);
389 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
392 if (IS_INT_LNG_TYPE(t)) {
399 if (IS_INT_LNG_TYPE(t)) { /* integer args */
400 if (s1 < INT_ARG_CNT) { /* register arguments */
401 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
402 M_INTMOVE(rd->argintregs[s1], var->regoff);
404 } else { /* reg arg -> spilled */
405 x86_64_mov_reg_membase(cd, rd->argintregs[s1], REG_SP, var->regoff * 8);
408 } else { /* stack arguments */
409 pa = s1 - INT_ARG_CNT;
410 if (s2 >= FLT_ARG_CNT) {
411 pa += s2 - FLT_ARG_CNT;
413 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
414 x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff); /* + 8 for return address */
415 } else { /* stack arg -> spilled */
416 x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_ITMP1); /* + 8 for return address */
417 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, var->regoff * 8);
422 } else { /* floating args */
423 if (s2 < FLT_ARG_CNT) { /* register arguments */
424 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
425 M_FLTMOVE(rd->argfltregs[s2], var->regoff);
427 } else { /* reg arg -> spilled */
428 x86_64_movq_reg_membase(cd, rd->argfltregs[s2], REG_SP, var->regoff * 8);
431 } else { /* stack arguments */
432 pa = s2 - FLT_ARG_CNT;
433 if (s1 >= INT_ARG_CNT) {
434 pa += s1 - INT_ARG_CNT;
436 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
437 x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff);
440 x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_FTMP1);
441 x86_64_movq_reg_membase(cd, REG_FTMP1, REG_SP, var->regoff * 8);
448 /* call monitorenter function */
450 #if defined(USE_THREADS)
451 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
452 s8 func_enter = (m->flags & ACC_STATIC) ?
453 (s8) builtin_staticmonitorenter : (s8) builtin_monitorenter;
454 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
455 x86_64_mov_imm_reg(cd, func_enter, REG_ITMP1);
456 x86_64_call_reg(cd, REG_ITMP1);
461 /* end of header generation */
463 /* walk through all basic blocks */
464 for (bptr = m->basicblocks; bptr != NULL; bptr = bptr->next) {
466 bptr->mpc = (u4) ((u1 *) cd->mcodeptr - cd->mcodebase);
468 if (bptr->flags >= BBREACHED) {
470 /* branch resolving */
473 for (bref = bptr->branchrefs; bref != NULL; bref = bref->next) {
474 gen_resolvebranch((u1 *) cd->mcodebase + bref->branchpos,
479 /* copy interface registers to their destination */
483 MCODECHECK(64 + len);
484 while (src != NULL) {
486 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
487 if (bptr->type == BBTYPE_SBR) {
488 d = reg_of_var(rd, src, REG_ITMP1);
489 x86_64_pop_reg(cd, d);
490 store_reg_to_var_int(src, d);
492 } else if (bptr->type == BBTYPE_EXH) {
493 d = reg_of_var(rd, src, REG_ITMP1);
494 M_INTMOVE(REG_ITMP1, d);
495 store_reg_to_var_int(src, d);
499 d = reg_of_var(rd, src, REG_ITMP1);
500 if ((src->varkind != STACKVAR)) {
502 if (IS_FLT_DBL_TYPE(s2)) {
503 s1 = rd->interfaces[len][s2].regoff;
504 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
508 x86_64_movq_membase_reg(cd, REG_SP, s1 * 8, d);
510 store_reg_to_var_flt(src, d);
513 s1 = rd->interfaces[len][s2].regoff;
514 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
518 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, d);
520 store_reg_to_var_int(src, d);
527 /* walk through all instructions */
531 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
533 MCODECHECK(64); /* an instruction usually needs < 64 words */
536 case ICMD_NOP: /* ... ==> ... */
539 case ICMD_NULLCHECKPOP: /* ..., objectref ==> ... */
540 if (src->flags & INMEMORY) {
541 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
544 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
546 x86_64_jcc(cd, X86_64_CC_E, 0);
547 codegen_addxnullrefs(cd, cd->mcodeptr);
550 /* constant operations ************************************************/
552 case ICMD_ICONST: /* ... ==> ..., constant */
553 /* op1 = 0, val.i = constant */
555 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
556 if (iptr->val.i == 0) {
557 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
559 x86_64_movl_imm_reg(cd, iptr->val.i, d);
561 store_reg_to_var_int(iptr->dst, d);
564 case ICMD_ACONST: /* ... ==> ..., constant */
565 /* op1 = 0, val.a = constant */
567 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
568 if (iptr->val.a == 0) {
569 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
571 x86_64_mov_imm_reg(cd, (s8) iptr->val.a, d);
573 store_reg_to_var_int(iptr->dst, d);
576 case ICMD_LCONST: /* ... ==> ..., constant */
577 /* op1 = 0, val.l = constant */
579 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
580 if (iptr->val.l == 0) {
581 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
583 x86_64_mov_imm_reg(cd, iptr->val.l, d);
585 store_reg_to_var_int(iptr->dst, d);
588 case ICMD_FCONST: /* ... ==> ..., constant */
589 /* op1 = 0, val.f = constant */
591 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
592 a = dseg_addfloat(cd, iptr->val.f);
593 x86_64_movdl_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + ((d > 7) ? 9 : 8)) - (s8) cd->mcodebase) + a, d);
594 store_reg_to_var_flt(iptr->dst, d);
597 case ICMD_DCONST: /* ... ==> ..., constant */
598 /* op1 = 0, val.d = constant */
600 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
601 a = dseg_adddouble(cd, iptr->val.d);
602 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, d);
603 store_reg_to_var_flt(iptr->dst, d);
607 /* load/store operations **********************************************/
609 case ICMD_ILOAD: /* ... ==> ..., content of local variable */
610 /* op1 = local variable */
612 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
613 if ((iptr->dst->varkind == LOCALVAR) &&
614 (iptr->dst->varnum == iptr->op1)) {
617 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
618 if (var->flags & INMEMORY) {
619 x86_64_movl_membase_reg(cd, REG_SP, var->regoff * 8, d);
620 store_reg_to_var_int(iptr->dst, d);
623 if (iptr->dst->flags & INMEMORY) {
624 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
627 M_INTMOVE(var->regoff, d);
632 case ICMD_LLOAD: /* ... ==> ..., content of local variable */
633 case ICMD_ALOAD: /* op1 = local variable */
635 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
636 if ((iptr->dst->varkind == LOCALVAR) &&
637 (iptr->dst->varnum == iptr->op1)) {
640 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
641 if (var->flags & INMEMORY) {
642 x86_64_mov_membase_reg(cd, REG_SP, var->regoff * 8, d);
643 store_reg_to_var_int(iptr->dst, d);
646 if (iptr->dst->flags & INMEMORY) {
647 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
650 M_INTMOVE(var->regoff, d);
655 case ICMD_FLOAD: /* ... ==> ..., content of local variable */
656 case ICMD_DLOAD: /* op1 = local variable */
658 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
659 if ((iptr->dst->varkind == LOCALVAR) &&
660 (iptr->dst->varnum == iptr->op1)) {
663 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
664 if (var->flags & INMEMORY) {
665 x86_64_movq_membase_reg(cd, REG_SP, var->regoff * 8, d);
666 store_reg_to_var_flt(iptr->dst, d);
669 if (iptr->dst->flags & INMEMORY) {
670 x86_64_movq_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
673 M_FLTMOVE(var->regoff, d);
678 case ICMD_ISTORE: /* ..., value ==> ... */
679 case ICMD_LSTORE: /* op1 = local variable */
682 if ((src->varkind == LOCALVAR) &&
683 (src->varnum == iptr->op1)) {
686 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
687 if (var->flags & INMEMORY) {
688 var_to_reg_int(s1, src, REG_ITMP1);
689 x86_64_mov_reg_membase(cd, s1, REG_SP, var->regoff * 8);
692 var_to_reg_int(s1, src, var->regoff);
693 M_INTMOVE(s1, var->regoff);
697 case ICMD_FSTORE: /* ..., value ==> ... */
698 case ICMD_DSTORE: /* op1 = local variable */
700 if ((src->varkind == LOCALVAR) &&
701 (src->varnum == iptr->op1)) {
704 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
705 if (var->flags & INMEMORY) {
706 var_to_reg_flt(s1, src, REG_FTMP1);
707 x86_64_movq_reg_membase(cd, s1, REG_SP, var->regoff * 8);
710 var_to_reg_flt(s1, src, var->regoff);
711 M_FLTMOVE(s1, var->regoff);
716 /* pop/dup/swap operations ********************************************/
718 /* attention: double and longs are only one entry in CACAO ICMDs */
720 case ICMD_POP: /* ..., value ==> ... */
721 case ICMD_POP2: /* ..., value, value ==> ... */
724 case ICMD_DUP: /* ..., a ==> ..., a, a */
725 M_COPY(src, iptr->dst);
728 case ICMD_DUP_X1: /* ..., a, b ==> ..., b, a, b */
730 M_COPY(src, iptr->dst);
731 M_COPY(src->prev, iptr->dst->prev);
732 M_COPY(iptr->dst, iptr->dst->prev->prev);
735 case ICMD_DUP_X2: /* ..., a, b, c ==> ..., c, a, b, c */
737 M_COPY(src, iptr->dst);
738 M_COPY(src->prev, iptr->dst->prev);
739 M_COPY(src->prev->prev, iptr->dst->prev->prev);
740 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
743 case ICMD_DUP2: /* ..., a, b ==> ..., a, b, a, b */
745 M_COPY(src, iptr->dst);
746 M_COPY(src->prev, iptr->dst->prev);
749 case ICMD_DUP2_X1: /* ..., a, b, c ==> ..., b, c, a, b, c */
751 M_COPY(src, iptr->dst);
752 M_COPY(src->prev, iptr->dst->prev);
753 M_COPY(src->prev->prev, iptr->dst->prev->prev);
754 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
755 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev);
758 case ICMD_DUP2_X2: /* ..., a, b, c, d ==> ..., c, d, a, b, c, d */
760 M_COPY(src, iptr->dst);
761 M_COPY(src->prev, iptr->dst->prev);
762 M_COPY(src->prev->prev, iptr->dst->prev->prev);
763 M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
764 M_COPY(iptr->dst, iptr->dst->prev->prev->prev->prev);
765 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev->prev);
768 case ICMD_SWAP: /* ..., a, b ==> ..., b, a */
770 M_COPY(src, iptr->dst->prev);
771 M_COPY(src->prev, iptr->dst);
775 /* integer operations *************************************************/
777 case ICMD_INEG: /* ..., value ==> ..., - value */
779 d = reg_of_var(rd, iptr->dst, REG_NULL);
780 if (iptr->dst->flags & INMEMORY) {
781 if (src->flags & INMEMORY) {
782 if (src->regoff == iptr->dst->regoff) {
783 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
786 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
787 x86_64_negl_reg(cd, REG_ITMP1);
788 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
792 x86_64_movl_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
793 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
797 if (src->flags & INMEMORY) {
798 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
799 x86_64_negl_reg(cd, d);
802 M_INTMOVE(src->regoff, iptr->dst->regoff);
803 x86_64_negl_reg(cd, iptr->dst->regoff);
808 case ICMD_LNEG: /* ..., value ==> ..., - value */
810 d = reg_of_var(rd, iptr->dst, REG_NULL);
811 if (iptr->dst->flags & INMEMORY) {
812 if (src->flags & INMEMORY) {
813 if (src->regoff == iptr->dst->regoff) {
814 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
817 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
818 x86_64_neg_reg(cd, REG_ITMP1);
819 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
823 x86_64_mov_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
824 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
828 if (src->flags & INMEMORY) {
829 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
830 x86_64_neg_reg(cd, iptr->dst->regoff);
833 M_INTMOVE(src->regoff, iptr->dst->regoff);
834 x86_64_neg_reg(cd, iptr->dst->regoff);
839 case ICMD_I2L: /* ..., value ==> ..., value */
841 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
842 if (src->flags & INMEMORY) {
843 x86_64_movslq_membase_reg(cd, REG_SP, src->regoff * 8, d);
846 x86_64_movslq_reg_reg(cd, src->regoff, d);
848 store_reg_to_var_int(iptr->dst, d);
851 case ICMD_L2I: /* ..., value ==> ..., value */
853 var_to_reg_int(s1, src, REG_ITMP1);
854 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
856 store_reg_to_var_int(iptr->dst, d);
859 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
861 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
862 if (src->flags & INMEMORY) {
863 x86_64_movsbq_membase_reg(cd, REG_SP, src->regoff * 8, d);
866 x86_64_movsbq_reg_reg(cd, src->regoff, d);
868 store_reg_to_var_int(iptr->dst, d);
871 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
873 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
874 if (src->flags & INMEMORY) {
875 x86_64_movzwq_membase_reg(cd, REG_SP, src->regoff * 8, d);
878 x86_64_movzwq_reg_reg(cd, src->regoff, d);
880 store_reg_to_var_int(iptr->dst, d);
883 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
885 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
886 if (src->flags & INMEMORY) {
887 x86_64_movswq_membase_reg(cd, REG_SP, src->regoff * 8, d);
890 x86_64_movswq_reg_reg(cd, src->regoff, d);
892 store_reg_to_var_int(iptr->dst, d);
896 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
898 d = reg_of_var(rd, iptr->dst, REG_NULL);
899 x86_64_emit_ialu(cd, X86_64_ADD, src, iptr);
902 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
903 /* val.i = constant */
905 d = reg_of_var(rd, iptr->dst, REG_NULL);
906 x86_64_emit_ialuconst(cd, X86_64_ADD, src, iptr);
909 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
911 d = reg_of_var(rd, iptr->dst, REG_NULL);
912 x86_64_emit_lalu(cd, X86_64_ADD, src, iptr);
915 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
916 /* val.l = constant */
918 d = reg_of_var(rd, iptr->dst, REG_NULL);
919 x86_64_emit_laluconst(cd, X86_64_ADD, src, iptr);
922 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
924 d = reg_of_var(rd, iptr->dst, REG_NULL);
925 if (iptr->dst->flags & INMEMORY) {
926 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
927 if (src->prev->regoff == iptr->dst->regoff) {
928 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
929 x86_64_alul_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
932 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
933 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
934 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
937 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
938 M_INTMOVE(src->prev->regoff, REG_ITMP1);
939 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
940 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
942 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
943 if (src->prev->regoff == iptr->dst->regoff) {
944 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
947 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
948 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
949 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
953 x86_64_movl_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
954 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
958 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
959 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
960 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
962 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
963 M_INTMOVE(src->prev->regoff, d);
964 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
966 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
967 /* workaround for reg alloc */
968 if (src->regoff == iptr->dst->regoff) {
969 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
970 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
971 M_INTMOVE(REG_ITMP1, d);
974 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
975 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
979 /* workaround for reg alloc */
980 if (src->regoff == iptr->dst->regoff) {
981 M_INTMOVE(src->prev->regoff, REG_ITMP1);
982 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
983 M_INTMOVE(REG_ITMP1, d);
986 M_INTMOVE(src->prev->regoff, d);
987 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
993 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
994 /* val.i = constant */
996 d = reg_of_var(rd, iptr->dst, REG_NULL);
997 x86_64_emit_ialuconst(cd, X86_64_SUB, src, iptr);
1000 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1002 d = reg_of_var(rd, iptr->dst, REG_NULL);
1003 if (iptr->dst->flags & INMEMORY) {
1004 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1005 if (src->prev->regoff == iptr->dst->regoff) {
1006 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1007 x86_64_alu_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1010 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1011 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1012 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1015 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1016 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1017 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1018 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1020 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1021 if (src->prev->regoff == iptr->dst->regoff) {
1022 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1025 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1026 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1027 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1031 x86_64_mov_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1032 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1036 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1037 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1038 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1040 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1041 M_INTMOVE(src->prev->regoff, d);
1042 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1044 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1045 /* workaround for reg alloc */
1046 if (src->regoff == iptr->dst->regoff) {
1047 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1048 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1049 M_INTMOVE(REG_ITMP1, d);
1052 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1053 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1057 /* workaround for reg alloc */
1058 if (src->regoff == iptr->dst->regoff) {
1059 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1060 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1061 M_INTMOVE(REG_ITMP1, d);
1064 M_INTMOVE(src->prev->regoff, d);
1065 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1071 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
1072 /* val.l = constant */
1074 d = reg_of_var(rd, iptr->dst, REG_NULL);
1075 x86_64_emit_laluconst(cd, X86_64_SUB, src, iptr);
1078 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1080 d = reg_of_var(rd, iptr->dst, REG_NULL);
1081 if (iptr->dst->flags & INMEMORY) {
1082 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1083 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1084 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1085 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1087 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1088 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1089 x86_64_imull_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1090 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1092 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1093 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1094 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1095 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1098 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1099 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1100 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1104 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1105 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1106 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1108 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1109 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1110 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1112 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1113 M_INTMOVE(src->regoff, iptr->dst->regoff);
1114 x86_64_imull_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1117 if (src->regoff == iptr->dst->regoff) {
1118 x86_64_imull_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1121 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1122 x86_64_imull_reg_reg(cd, src->regoff, iptr->dst->regoff);
1128 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
1129 /* val.i = constant */
1131 d = reg_of_var(rd, iptr->dst, REG_NULL);
1132 if (iptr->dst->flags & INMEMORY) {
1133 if (src->flags & INMEMORY) {
1134 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1135 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1138 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, REG_ITMP1);
1139 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1143 if (src->flags & INMEMORY) {
1144 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1147 if (iptr->val.i == 2) {
1148 M_INTMOVE(src->regoff, iptr->dst->regoff);
1149 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1152 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, iptr->dst->regoff); /* 3 cycles */
1158 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1160 d = reg_of_var(rd, iptr->dst, REG_NULL);
1161 if (iptr->dst->flags & INMEMORY) {
1162 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1163 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1164 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1165 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1167 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1168 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1169 x86_64_imul_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1170 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1172 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1173 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1174 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1175 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1178 x86_64_mov_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1179 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1180 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1184 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1185 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1186 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1188 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1189 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1190 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1192 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1193 M_INTMOVE(src->regoff, iptr->dst->regoff);
1194 x86_64_imul_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1197 if (src->regoff == iptr->dst->regoff) {
1198 x86_64_imul_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1201 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1202 x86_64_imul_reg_reg(cd, src->regoff, iptr->dst->regoff);
1208 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
1209 /* val.l = constant */
1211 d = reg_of_var(rd, iptr->dst, REG_NULL);
1212 if (iptr->dst->flags & INMEMORY) {
1213 if (src->flags & INMEMORY) {
1214 if (x86_64_is_imm32(iptr->val.l)) {
1215 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1218 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1219 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1221 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1224 if (x86_64_is_imm32(iptr->val.l)) {
1225 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, REG_ITMP1);
1228 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1229 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1231 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1235 if (src->flags & INMEMORY) {
1236 if (x86_64_is_imm32(iptr->val.l)) {
1237 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1240 x86_64_mov_imm_reg(cd, iptr->val.l, iptr->dst->regoff);
1241 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1245 /* should match in many cases */
1246 if (iptr->val.l == 2) {
1247 M_INTMOVE(src->regoff, iptr->dst->regoff);
1248 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1251 if (x86_64_is_imm32(iptr->val.l)) {
1252 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, iptr->dst->regoff); /* 4 cycles */
1255 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1256 M_INTMOVE(src->regoff, iptr->dst->regoff);
1257 x86_64_imul_reg_reg(cd, REG_ITMP1, iptr->dst->regoff);
1264 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1266 d = reg_of_var(rd, iptr->dst, REG_NULL);
1267 if (src->prev->flags & INMEMORY) {
1268 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1271 M_INTMOVE(src->prev->regoff, RAX);
1274 if (src->flags & INMEMORY) {
1275 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1278 M_INTMOVE(src->regoff, REG_ITMP3);
1282 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1283 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1284 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1285 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1287 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1289 x86_64_idivl_reg(cd, REG_ITMP3);
1291 if (iptr->dst->flags & INMEMORY) {
1292 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1293 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1296 M_INTMOVE(RAX, iptr->dst->regoff);
1298 if (iptr->dst->regoff != RDX) {
1299 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1304 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1306 d = reg_of_var(rd, iptr->dst, REG_NULL);
1307 if (src->prev->flags & INMEMORY) {
1308 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1311 M_INTMOVE(src->prev->regoff, RAX);
1314 if (src->flags & INMEMORY) {
1315 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1318 M_INTMOVE(src->regoff, REG_ITMP3);
1322 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1323 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1324 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1325 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1326 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1328 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1330 x86_64_idivl_reg(cd, REG_ITMP3);
1332 if (iptr->dst->flags & INMEMORY) {
1333 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1334 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1337 M_INTMOVE(RDX, iptr->dst->regoff);
1339 if (iptr->dst->regoff != RDX) {
1340 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1345 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
1346 /* val.i = constant */
1348 var_to_reg_int(s1, src, REG_ITMP1);
1349 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1350 M_INTMOVE(s1, REG_ITMP1);
1351 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1352 x86_64_leal_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1353 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1354 x86_64_shiftl_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1355 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1356 store_reg_to_var_int(iptr->dst, d);
1359 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
1360 /* val.i = constant */
1362 var_to_reg_int(s1, src, REG_ITMP1);
1363 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1364 M_INTMOVE(s1, REG_ITMP1);
1365 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1366 x86_64_leal_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1367 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1368 x86_64_alul_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1369 x86_64_alul_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1370 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1371 store_reg_to_var_int(iptr->dst, d);
1375 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1377 d = reg_of_var(rd, iptr->dst, REG_NULL);
1378 if (src->prev->flags & INMEMORY) {
1379 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1382 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1385 if (src->flags & INMEMORY) {
1386 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1389 M_INTMOVE(src->regoff, REG_ITMP3);
1393 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1394 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1395 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1396 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1397 x86_64_jcc(cd, X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1399 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1401 x86_64_idiv_reg(cd, REG_ITMP3);
1403 if (iptr->dst->flags & INMEMORY) {
1404 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1405 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1408 M_INTMOVE(RAX, iptr->dst->regoff);
1410 if (iptr->dst->regoff != RDX) {
1411 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1416 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1418 d = reg_of_var(rd, iptr->dst, REG_NULL);
1419 if (src->prev->flags & INMEMORY) {
1420 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1423 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1426 if (src->flags & INMEMORY) {
1427 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1430 M_INTMOVE(src->regoff, REG_ITMP3);
1434 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1435 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1436 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1437 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1438 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1439 x86_64_jcc(cd, X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1441 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1443 x86_64_idiv_reg(cd, REG_ITMP3);
1445 if (iptr->dst->flags & INMEMORY) {
1446 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1447 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1450 M_INTMOVE(RDX, iptr->dst->regoff);
1452 if (iptr->dst->regoff != RDX) {
1453 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1458 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1459 /* val.i = constant */
1461 var_to_reg_int(s1, src, REG_ITMP1);
1462 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1463 M_INTMOVE(s1, REG_ITMP1);
1464 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1465 x86_64_lea_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1466 x86_64_cmovcc_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1467 x86_64_shift_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1468 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1469 store_reg_to_var_int(iptr->dst, d);
1472 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1473 /* val.l = constant */
1475 var_to_reg_int(s1, src, REG_ITMP1);
1476 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1477 M_INTMOVE(s1, REG_ITMP1);
1478 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1479 x86_64_lea_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1480 x86_64_cmovcc_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1481 x86_64_alu_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1482 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1483 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1484 store_reg_to_var_int(iptr->dst, d);
1487 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1489 d = reg_of_var(rd, iptr->dst, REG_NULL);
1490 x86_64_emit_ishift(cd, X86_64_SHL, src, iptr);
1493 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1494 /* val.i = constant */
1496 d = reg_of_var(rd, iptr->dst, REG_NULL);
1497 x86_64_emit_ishiftconst(cd, X86_64_SHL, src, iptr);
1500 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1502 d = reg_of_var(rd, iptr->dst, REG_NULL);
1503 x86_64_emit_ishift(cd, X86_64_SAR, src, iptr);
1506 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1507 /* val.i = constant */
1509 d = reg_of_var(rd, iptr->dst, REG_NULL);
1510 x86_64_emit_ishiftconst(cd, X86_64_SAR, src, iptr);
1513 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1515 d = reg_of_var(rd, iptr->dst, REG_NULL);
1516 x86_64_emit_ishift(cd, X86_64_SHR, src, iptr);
1519 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1520 /* val.i = constant */
1522 d = reg_of_var(rd, iptr->dst, REG_NULL);
1523 x86_64_emit_ishiftconst(cd, X86_64_SHR, src, iptr);
1526 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1528 d = reg_of_var(rd, iptr->dst, REG_NULL);
1529 x86_64_emit_lshift(cd, X86_64_SHL, src, iptr);
1532 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1533 /* val.i = constant */
1535 d = reg_of_var(rd, iptr->dst, REG_NULL);
1536 x86_64_emit_lshiftconst(cd, X86_64_SHL, src, iptr);
1539 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1541 d = reg_of_var(rd, iptr->dst, REG_NULL);
1542 x86_64_emit_lshift(cd, X86_64_SAR, src, iptr);
1545 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1546 /* val.i = constant */
1548 d = reg_of_var(rd, iptr->dst, REG_NULL);
1549 x86_64_emit_lshiftconst(cd, X86_64_SAR, src, iptr);
1552 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1554 d = reg_of_var(rd, iptr->dst, REG_NULL);
1555 x86_64_emit_lshift(cd, X86_64_SHR, src, iptr);
1558 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1559 /* val.l = constant */
1561 d = reg_of_var(rd, iptr->dst, REG_NULL);
1562 x86_64_emit_lshiftconst(cd, X86_64_SHR, src, iptr);
1565 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1567 d = reg_of_var(rd, iptr->dst, REG_NULL);
1568 x86_64_emit_ialu(cd, X86_64_AND, src, iptr);
1571 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1572 /* val.i = constant */
1574 d = reg_of_var(rd, iptr->dst, REG_NULL);
1575 x86_64_emit_ialuconst(cd, X86_64_AND, src, iptr);
1578 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1580 d = reg_of_var(rd, iptr->dst, REG_NULL);
1581 x86_64_emit_lalu(cd, X86_64_AND, src, iptr);
1584 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1585 /* val.l = constant */
1587 d = reg_of_var(rd, iptr->dst, REG_NULL);
1588 x86_64_emit_laluconst(cd, X86_64_AND, src, iptr);
1591 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1593 d = reg_of_var(rd, iptr->dst, REG_NULL);
1594 x86_64_emit_ialu(cd, X86_64_OR, src, iptr);
1597 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1598 /* val.i = constant */
1600 d = reg_of_var(rd, iptr->dst, REG_NULL);
1601 x86_64_emit_ialuconst(cd, X86_64_OR, src, iptr);
1604 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1606 d = reg_of_var(rd, iptr->dst, REG_NULL);
1607 x86_64_emit_lalu(cd, X86_64_OR, src, iptr);
1610 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1611 /* val.l = constant */
1613 d = reg_of_var(rd, iptr->dst, REG_NULL);
1614 x86_64_emit_laluconst(cd, X86_64_OR, src, iptr);
1617 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1619 d = reg_of_var(rd, iptr->dst, REG_NULL);
1620 x86_64_emit_ialu(cd, X86_64_XOR, src, iptr);
1623 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1624 /* val.i = constant */
1626 d = reg_of_var(rd, iptr->dst, REG_NULL);
1627 x86_64_emit_ialuconst(cd, X86_64_XOR, src, iptr);
1630 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1632 d = reg_of_var(rd, iptr->dst, REG_NULL);
1633 x86_64_emit_lalu(cd, X86_64_XOR, src, iptr);
1636 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1637 /* val.l = constant */
1639 d = reg_of_var(rd, iptr->dst, REG_NULL);
1640 x86_64_emit_laluconst(cd, X86_64_XOR, src, iptr);
1644 case ICMD_IINC: /* ..., value ==> ..., value + constant */
1645 /* op1 = variable, val.i = constant */
1647 /* using inc and dec is definitely faster than add -- tested */
1650 var = &(rd->locals[iptr->op1][TYPE_INT]);
1652 if (var->flags & INMEMORY) {
1653 if (iptr->val.i == 1) {
1654 x86_64_incl_membase(cd, REG_SP, d * 8);
1656 } else if (iptr->val.i == -1) {
1657 x86_64_decl_membase(cd, REG_SP, d * 8);
1660 x86_64_alul_imm_membase(cd, X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1664 if (iptr->val.i == 1) {
1665 x86_64_incl_reg(cd, d);
1667 } else if (iptr->val.i == -1) {
1668 x86_64_decl_reg(cd, d);
1671 x86_64_alul_imm_reg(cd, X86_64_ADD, iptr->val.i, d);
1677 /* floating operations ************************************************/
1679 case ICMD_FNEG: /* ..., value ==> ..., - value */
1681 var_to_reg_flt(s1, src, REG_FTMP1);
1682 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1683 a = dseg_adds4(cd, 0x80000000);
1685 x86_64_movss_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1686 x86_64_xorps_reg_reg(cd, REG_FTMP2, d);
1687 store_reg_to_var_flt(iptr->dst, d);
1690 case ICMD_DNEG: /* ..., value ==> ..., - value */
1692 var_to_reg_flt(s1, src, REG_FTMP1);
1693 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1694 a = dseg_adds8(cd, 0x8000000000000000);
1696 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1697 x86_64_xorpd_reg_reg(cd, REG_FTMP2, d);
1698 store_reg_to_var_flt(iptr->dst, d);
1701 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1703 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1704 var_to_reg_flt(s2, src, REG_FTMP2);
1705 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1707 x86_64_addss_reg_reg(cd, s2, d);
1708 } else if (s2 == d) {
1709 x86_64_addss_reg_reg(cd, s1, d);
1712 x86_64_addss_reg_reg(cd, s2, d);
1714 store_reg_to_var_flt(iptr->dst, d);
1717 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1719 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1720 var_to_reg_flt(s2, src, REG_FTMP2);
1721 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1723 x86_64_addsd_reg_reg(cd, s2, d);
1724 } else if (s2 == d) {
1725 x86_64_addsd_reg_reg(cd, s1, d);
1728 x86_64_addsd_reg_reg(cd, s2, d);
1730 store_reg_to_var_flt(iptr->dst, d);
1733 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1735 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1736 var_to_reg_flt(s2, src, REG_FTMP2);
1737 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1739 M_FLTMOVE(s2, REG_FTMP2);
1743 x86_64_subss_reg_reg(cd, s2, d);
1744 store_reg_to_var_flt(iptr->dst, d);
1747 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1749 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1750 var_to_reg_flt(s2, src, REG_FTMP2);
1751 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1753 M_FLTMOVE(s2, REG_FTMP2);
1757 x86_64_subsd_reg_reg(cd, s2, d);
1758 store_reg_to_var_flt(iptr->dst, d);
1761 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1763 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1764 var_to_reg_flt(s2, src, REG_FTMP2);
1765 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1767 x86_64_mulss_reg_reg(cd, s2, d);
1768 } else if (s2 == d) {
1769 x86_64_mulss_reg_reg(cd, s1, d);
1772 x86_64_mulss_reg_reg(cd, s2, d);
1774 store_reg_to_var_flt(iptr->dst, d);
1777 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1779 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1780 var_to_reg_flt(s2, src, REG_FTMP2);
1781 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1783 x86_64_mulsd_reg_reg(cd, s2, d);
1784 } else if (s2 == d) {
1785 x86_64_mulsd_reg_reg(cd, s1, d);
1788 x86_64_mulsd_reg_reg(cd, s2, d);
1790 store_reg_to_var_flt(iptr->dst, d);
1793 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1795 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1796 var_to_reg_flt(s2, src, REG_FTMP2);
1797 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1799 M_FLTMOVE(s2, REG_FTMP2);
1803 x86_64_divss_reg_reg(cd, s2, d);
1804 store_reg_to_var_flt(iptr->dst, d);
1807 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1809 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1810 var_to_reg_flt(s2, src, REG_FTMP2);
1811 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1813 M_FLTMOVE(s2, REG_FTMP2);
1817 x86_64_divsd_reg_reg(cd, s2, d);
1818 store_reg_to_var_flt(iptr->dst, d);
1821 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1823 var_to_reg_int(s1, src, REG_ITMP1);
1824 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1825 x86_64_cvtsi2ss_reg_reg(cd, s1, d);
1826 store_reg_to_var_flt(iptr->dst, d);
1829 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1831 var_to_reg_int(s1, src, REG_ITMP1);
1832 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1833 x86_64_cvtsi2sd_reg_reg(cd, s1, d);
1834 store_reg_to_var_flt(iptr->dst, d);
1837 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1839 var_to_reg_int(s1, src, REG_ITMP1);
1840 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1841 x86_64_cvtsi2ssq_reg_reg(cd, s1, d);
1842 store_reg_to_var_flt(iptr->dst, d);
1845 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1847 var_to_reg_int(s1, src, REG_ITMP1);
1848 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1849 x86_64_cvtsi2sdq_reg_reg(cd, s1, d);
1850 store_reg_to_var_flt(iptr->dst, d);
1853 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1855 var_to_reg_flt(s1, src, REG_FTMP1);
1856 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1857 x86_64_cvttss2si_reg_reg(cd, s1, d);
1858 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1859 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1860 x86_64_jcc(cd, X86_64_CC_NE, a);
1861 M_FLTMOVE(s1, REG_FTMP1);
1862 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2i, REG_ITMP2);
1863 x86_64_call_reg(cd, REG_ITMP2);
1864 M_INTMOVE(REG_RESULT, d);
1865 store_reg_to_var_int(iptr->dst, d);
1868 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1870 var_to_reg_flt(s1, src, REG_FTMP1);
1871 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1872 x86_64_cvttsd2si_reg_reg(cd, s1, d);
1873 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1874 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1875 x86_64_jcc(cd, X86_64_CC_NE, a);
1876 M_FLTMOVE(s1, REG_FTMP1);
1877 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2i, REG_ITMP2);
1878 x86_64_call_reg(cd, REG_ITMP2);
1879 M_INTMOVE(REG_RESULT, d);
1880 store_reg_to_var_int(iptr->dst, d);
1883 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1885 var_to_reg_flt(s1, src, REG_FTMP1);
1886 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1887 x86_64_cvttss2siq_reg_reg(cd, s1, d);
1888 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1889 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1890 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1891 x86_64_jcc(cd, X86_64_CC_NE, a);
1892 M_FLTMOVE(s1, REG_FTMP1);
1893 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2l, REG_ITMP2);
1894 x86_64_call_reg(cd, REG_ITMP2);
1895 M_INTMOVE(REG_RESULT, d);
1896 store_reg_to_var_int(iptr->dst, d);
1899 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1901 var_to_reg_flt(s1, src, REG_FTMP1);
1902 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1903 x86_64_cvttsd2siq_reg_reg(cd, s1, d);
1904 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1905 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1906 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1907 x86_64_jcc(cd, X86_64_CC_NE, a);
1908 M_FLTMOVE(s1, REG_FTMP1);
1909 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2l, REG_ITMP2);
1910 x86_64_call_reg(cd, REG_ITMP2);
1911 M_INTMOVE(REG_RESULT, d);
1912 store_reg_to_var_int(iptr->dst, d);
1915 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1917 var_to_reg_flt(s1, src, REG_FTMP1);
1918 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1919 x86_64_cvtss2sd_reg_reg(cd, s1, d);
1920 store_reg_to_var_flt(iptr->dst, d);
1923 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1925 var_to_reg_flt(s1, src, REG_FTMP1);
1926 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1927 x86_64_cvtsd2ss_reg_reg(cd, s1, d);
1928 store_reg_to_var_flt(iptr->dst, d);
1931 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1932 /* == => 0, < => 1, > => -1 */
1934 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1935 var_to_reg_flt(s2, src, REG_FTMP2);
1936 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1937 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1938 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1939 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1940 x86_64_ucomiss_reg_reg(cd, s1, s2);
1941 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1942 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1943 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1944 store_reg_to_var_int(iptr->dst, d);
1947 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1948 /* == => 0, < => 1, > => -1 */
1950 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1951 var_to_reg_flt(s2, src, REG_FTMP2);
1952 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1953 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1954 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1955 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1956 x86_64_ucomiss_reg_reg(cd, s1, s2);
1957 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1958 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1959 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1960 store_reg_to_var_int(iptr->dst, d);
1963 case ICMD_DCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1964 /* == => 0, < => 1, > => -1 */
1966 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1967 var_to_reg_flt(s2, src, REG_FTMP2);
1968 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1969 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1970 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1971 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1972 x86_64_ucomisd_reg_reg(cd, s1, s2);
1973 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1974 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1975 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1976 store_reg_to_var_int(iptr->dst, d);
1979 case ICMD_DCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1980 /* == => 0, < => 1, > => -1 */
1982 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1983 var_to_reg_flt(s2, src, REG_FTMP2);
1984 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1985 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1986 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1987 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1988 x86_64_ucomisd_reg_reg(cd, s1, s2);
1989 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1990 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1991 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1992 store_reg_to_var_int(iptr->dst, d);
1996 /* memory operations **************************************************/
1998 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., (int) length */
2000 var_to_reg_int(s1, src, REG_ITMP1);
2001 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2002 gen_nullptr_check(s1);
2003 x86_64_movl_membase_reg(cd, s1, OFFSET(java_arrayheader, size), d);
2004 store_reg_to_var_int(iptr->dst, d);
2007 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2009 var_to_reg_int(s1, src->prev, REG_ITMP1);
2010 var_to_reg_int(s2, src, REG_ITMP2);
2011 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2012 if (iptr->op1 == 0) {
2013 gen_nullptr_check(s1);
2016 x86_64_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
2017 store_reg_to_var_int(iptr->dst, d);
2020 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
2022 var_to_reg_int(s1, src->prev, REG_ITMP1);
2023 var_to_reg_int(s2, src, REG_ITMP2);
2024 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2025 if (iptr->op1 == 0) {
2026 gen_nullptr_check(s1);
2029 x86_64_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]), s1, s2, 3, d);
2030 store_reg_to_var_int(iptr->dst, d);
2033 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
2035 var_to_reg_int(s1, src->prev, REG_ITMP1);
2036 var_to_reg_int(s2, src, REG_ITMP2);
2037 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2038 if (iptr->op1 == 0) {
2039 gen_nullptr_check(s1);
2042 x86_64_movl_memindex_reg(cd, OFFSET(java_intarray, data[0]), s1, s2, 2, d);
2043 store_reg_to_var_int(iptr->dst, d);
2046 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2048 var_to_reg_int(s1, src->prev, REG_ITMP1);
2049 var_to_reg_int(s2, src, REG_ITMP2);
2050 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2051 if (iptr->op1 == 0) {
2052 gen_nullptr_check(s1);
2055 x86_64_movss_memindex_reg(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2056 store_reg_to_var_flt(iptr->dst, d);
2059 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2061 var_to_reg_int(s1, src->prev, REG_ITMP1);
2062 var_to_reg_int(s2, src, REG_ITMP2);
2063 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2064 if (iptr->op1 == 0) {
2065 gen_nullptr_check(s1);
2068 x86_64_movsd_memindex_reg(cd, OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2069 store_reg_to_var_flt(iptr->dst, d);
2072 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
2074 var_to_reg_int(s1, src->prev, REG_ITMP1);
2075 var_to_reg_int(s2, src, REG_ITMP2);
2076 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2077 if (iptr->op1 == 0) {
2078 gen_nullptr_check(s1);
2081 x86_64_movzwq_memindex_reg(cd, OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2082 store_reg_to_var_int(iptr->dst, d);
2085 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
2087 var_to_reg_int(s1, src->prev, REG_ITMP1);
2088 var_to_reg_int(s2, src, REG_ITMP2);
2089 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2090 if (iptr->op1 == 0) {
2091 gen_nullptr_check(s1);
2094 x86_64_movswq_memindex_reg(cd, OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2095 store_reg_to_var_int(iptr->dst, d);
2098 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
2100 var_to_reg_int(s1, src->prev, REG_ITMP1);
2101 var_to_reg_int(s2, src, REG_ITMP2);
2102 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2103 if (iptr->op1 == 0) {
2104 gen_nullptr_check(s1);
2107 x86_64_movsbq_memindex_reg(cd, OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2108 store_reg_to_var_int(iptr->dst, d);
2112 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2114 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2115 var_to_reg_int(s2, src->prev, REG_ITMP2);
2116 if (iptr->op1 == 0) {
2117 gen_nullptr_check(s1);
2120 var_to_reg_int(s3, src, REG_ITMP3);
2121 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2124 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2126 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2127 var_to_reg_int(s2, src->prev, REG_ITMP2);
2128 if (iptr->op1 == 0) {
2129 gen_nullptr_check(s1);
2132 var_to_reg_int(s3, src, REG_ITMP3);
2133 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2136 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2138 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2139 var_to_reg_int(s2, src->prev, REG_ITMP2);
2140 if (iptr->op1 == 0) {
2141 gen_nullptr_check(s1);
2144 var_to_reg_int(s3, src, REG_ITMP3);
2145 x86_64_movl_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2148 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2150 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2151 var_to_reg_int(s2, src->prev, REG_ITMP2);
2152 if (iptr->op1 == 0) {
2153 gen_nullptr_check(s1);
2156 var_to_reg_flt(s3, src, REG_FTMP3);
2157 x86_64_movss_reg_memindex(cd, s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2160 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2162 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2163 var_to_reg_int(s2, src->prev, REG_ITMP2);
2164 if (iptr->op1 == 0) {
2165 gen_nullptr_check(s1);
2168 var_to_reg_flt(s3, src, REG_FTMP3);
2169 x86_64_movsd_reg_memindex(cd, s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2172 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2174 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2175 var_to_reg_int(s2, src->prev, REG_ITMP2);
2176 if (iptr->op1 == 0) {
2177 gen_nullptr_check(s1);
2180 var_to_reg_int(s3, src, REG_ITMP3);
2181 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2184 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2186 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2187 var_to_reg_int(s2, src->prev, REG_ITMP2);
2188 if (iptr->op1 == 0) {
2189 gen_nullptr_check(s1);
2192 var_to_reg_int(s3, src, REG_ITMP3);
2193 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2196 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2198 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2199 var_to_reg_int(s2, src->prev, REG_ITMP2);
2200 if (iptr->op1 == 0) {
2201 gen_nullptr_check(s1);
2204 var_to_reg_int(s3, src, REG_ITMP3);
2205 x86_64_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2208 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2210 var_to_reg_int(s1, src->prev, REG_ITMP1);
2211 var_to_reg_int(s2, src, REG_ITMP2);
2212 if (iptr->op1 == 0) {
2213 gen_nullptr_check(s1);
2216 x86_64_movl_imm_memindex(cd, iptr->val.i, OFFSET(java_intarray, data[0]), s1, s2, 2);
2219 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2221 var_to_reg_int(s1, src->prev, REG_ITMP1);
2222 var_to_reg_int(s2, src, REG_ITMP2);
2223 if (iptr->op1 == 0) {
2224 gen_nullptr_check(s1);
2228 if (x86_64_is_imm32(iptr->val.l)) {
2229 x86_64_mov_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2232 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2233 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l >> 32), OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2237 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2239 var_to_reg_int(s1, src->prev, REG_ITMP1);
2240 var_to_reg_int(s2, src, REG_ITMP2);
2241 if (iptr->op1 == 0) {
2242 gen_nullptr_check(s1);
2245 x86_64_mov_imm_memindex(cd, 0, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2248 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2250 var_to_reg_int(s1, src->prev, REG_ITMP1);
2251 var_to_reg_int(s2, src, REG_ITMP2);
2252 if (iptr->op1 == 0) {
2253 gen_nullptr_check(s1);
2256 x86_64_movb_imm_memindex(cd, iptr->val.i, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2259 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2261 var_to_reg_int(s1, src->prev, REG_ITMP1);
2262 var_to_reg_int(s2, src, REG_ITMP2);
2263 if (iptr->op1 == 0) {
2264 gen_nullptr_check(s1);
2267 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_chararray, data[0]), s1, s2, 1);
2270 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2272 var_to_reg_int(s1, src->prev, REG_ITMP1);
2273 var_to_reg_int(s2, src, REG_ITMP2);
2274 if (iptr->op1 == 0) {
2275 gen_nullptr_check(s1);
2278 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2282 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2283 /* op1 = type, val.a = field address */
2285 /* If the static fields' class is not yet initialized, we do it */
2286 /* now. The call code is generated later. */
2287 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2288 codegen_addclinitref(cd, cd->mcodeptr, ((fieldinfo *) iptr->val.a)->class);
2290 /* This is just for debugging purposes. Is very difficult to */
2291 /* read patched code. Here we patch the following 5 nop's */
2292 /* so that the real code keeps untouched. */
2293 if (showdisassemble) {
2302 /* This approach is much faster than moving the field address */
2303 /* inline into a register. */
2304 a = dseg_addaddress(cd, &(((fieldinfo *) iptr->val.a)->value));
2305 x86_64_mov_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 7) - (s8) cd->mcodebase) + a, REG_ITMP2);
2306 switch (iptr->op1) {
2308 var_to_reg_int(s2, src, REG_ITMP1);
2309 x86_64_movl_reg_membase(cd, s2, REG_ITMP2, 0);
2313 var_to_reg_int(s2, src, REG_ITMP1);
2314 x86_64_mov_reg_membase(cd, s2, REG_ITMP2, 0);
2317 var_to_reg_flt(s2, src, REG_FTMP1);
2318 x86_64_movss_reg_membase(cd, s2, REG_ITMP2, 0);
2321 var_to_reg_flt(s2, src, REG_FTMP1);
2322 x86_64_movsd_reg_membase(cd, s2, REG_ITMP2, 0);
2325 throw_cacao_exception_exit(string_java_lang_InternalError,
2326 "Unknown PUTSTATIC operand type %d",
2331 case ICMD_GETSTATIC: /* ... ==> ..., value */
2332 /* op1 = type, val.a = field address */
2334 /* If the static fields' class is not yet initialized, we do it */
2335 /* now. The call code is generated later. */
2336 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2337 codegen_addclinitref(cd, cd->mcodeptr, ((fieldinfo *) iptr->val.a)->class);
2339 /* This is just for debugging purposes. Is very difficult to */
2340 /* read patched code. Here we patch the following 5 nop's */
2341 /* so that the real code keeps untouched. */
2342 if (showdisassemble) {
2351 /* This approach is much faster than moving the field address */
2352 /* inline into a register. */
2353 a = dseg_addaddress(cd, &(((fieldinfo *) iptr->val.a)->value));
2354 x86_64_mov_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 7) - (s8) cd->mcodebase) + a, REG_ITMP2);
2355 switch (iptr->op1) {
2357 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2358 x86_64_movl_membase_reg(cd, REG_ITMP2, 0, d);
2359 store_reg_to_var_int(iptr->dst, d);
2363 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2364 x86_64_mov_membase_reg(cd, REG_ITMP2, 0, d);
2365 store_reg_to_var_int(iptr->dst, d);
2368 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2369 x86_64_movss_membase_reg(cd, REG_ITMP2, 0, d);
2370 store_reg_to_var_flt(iptr->dst, d);
2373 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2374 x86_64_movsd_membase_reg(cd, REG_ITMP2, 0, d);
2375 store_reg_to_var_flt(iptr->dst, d);
2378 throw_cacao_exception_exit(string_java_lang_InternalError,
2379 "Unknown GETSTATIC operand type %d",
2384 case ICMD_PUTFIELD: /* ..., value ==> ... */
2385 /* op1 = type, val.i = field offset */
2387 a = ((fieldinfo *)(iptr->val.a))->offset;
2388 var_to_reg_int(s1, src->prev, REG_ITMP1);
2389 switch (iptr->op1) {
2391 var_to_reg_int(s2, src, REG_ITMP2);
2392 gen_nullptr_check(s1);
2393 x86_64_movl_reg_membase(cd, s2, s1, a);
2397 var_to_reg_int(s2, src, REG_ITMP2);
2398 gen_nullptr_check(s1);
2399 x86_64_mov_reg_membase(cd, s2, s1, a);
2402 var_to_reg_flt(s2, src, REG_FTMP2);
2403 gen_nullptr_check(s1);
2404 x86_64_movss_reg_membase(cd, s2, s1, a);
2407 var_to_reg_flt(s2, src, REG_FTMP2);
2408 gen_nullptr_check(s1);
2409 x86_64_movsd_reg_membase(cd, s2, s1, a);
2412 throw_cacao_exception_exit(string_java_lang_InternalError,
2413 "Unknown PUTFIELD operand type %d",
2418 case ICMD_GETFIELD: /* ... ==> ..., value */
2419 /* op1 = type, val.i = field offset */
2421 a = ((fieldinfo *)(iptr->val.a))->offset;
2422 var_to_reg_int(s1, src, REG_ITMP1);
2423 switch (iptr->op1) {
2425 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2426 gen_nullptr_check(s1);
2427 x86_64_movl_membase_reg(cd, s1, a, d);
2428 store_reg_to_var_int(iptr->dst, d);
2432 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2433 gen_nullptr_check(s1);
2434 x86_64_mov_membase_reg(cd, s1, a, d);
2435 store_reg_to_var_int(iptr->dst, d);
2438 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2439 gen_nullptr_check(s1);
2440 x86_64_movss_membase_reg(cd, s1, a, d);
2441 store_reg_to_var_flt(iptr->dst, d);
2444 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2445 gen_nullptr_check(s1);
2446 x86_64_movsd_membase_reg(cd, s1, a, d);
2447 store_reg_to_var_flt(iptr->dst, d);
2450 throw_cacao_exception_exit(string_java_lang_InternalError,
2451 "Unknown GETFIELD operand type %d",
2457 /* branch operations **************************************************/
2459 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2461 var_to_reg_int(s1, src, REG_ITMP1);
2462 M_INTMOVE(s1, REG_ITMP1_XPTR);
2464 x86_64_call_imm(cd, 0); /* passing exception pointer */
2465 x86_64_pop_reg(cd, REG_ITMP2_XPC);
2467 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
2468 x86_64_jmp_reg(cd, REG_ITMP3);
2472 case ICMD_GOTO: /* ... ==> ... */
2473 /* op1 = target JavaVM pc */
2475 x86_64_jmp_imm(cd, 0);
2476 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2480 case ICMD_JSR: /* ... ==> ... */
2481 /* op1 = target JavaVM pc */
2483 x86_64_call_imm(cd, 0);
2484 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2487 case ICMD_RET: /* ... ==> ... */
2488 /* op1 = local variable */
2490 var = &(rd->locals[iptr->op1][TYPE_ADR]);
2491 var_to_reg_int(s1, var, REG_ITMP1);
2492 x86_64_jmp_reg(cd, s1);
2495 case ICMD_IFNULL: /* ..., value ==> ... */
2496 /* op1 = target JavaVM pc */
2498 if (src->flags & INMEMORY) {
2499 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2502 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2504 x86_64_jcc(cd, X86_64_CC_E, 0);
2505 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2508 case ICMD_IFNONNULL: /* ..., value ==> ... */
2509 /* op1 = target JavaVM pc */
2511 if (src->flags & INMEMORY) {
2512 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2515 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2517 x86_64_jcc(cd, X86_64_CC_NE, 0);
2518 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2521 case ICMD_IFEQ: /* ..., value ==> ... */
2522 /* op1 = target JavaVM pc, val.i = constant */
2524 x86_64_emit_ifcc(cd, X86_64_CC_E, src, iptr);
2527 case ICMD_IFLT: /* ..., value ==> ... */
2528 /* op1 = target JavaVM pc, val.i = constant */
2530 x86_64_emit_ifcc(cd, X86_64_CC_L, src, iptr);
2533 case ICMD_IFLE: /* ..., value ==> ... */
2534 /* op1 = target JavaVM pc, val.i = constant */
2536 x86_64_emit_ifcc(cd, X86_64_CC_LE, src, iptr);
2539 case ICMD_IFNE: /* ..., value ==> ... */
2540 /* op1 = target JavaVM pc, val.i = constant */
2542 x86_64_emit_ifcc(cd, X86_64_CC_NE, src, iptr);
2545 case ICMD_IFGT: /* ..., value ==> ... */
2546 /* op1 = target JavaVM pc, val.i = constant */
2548 x86_64_emit_ifcc(cd, X86_64_CC_G, src, iptr);
2551 case ICMD_IFGE: /* ..., value ==> ... */
2552 /* op1 = target JavaVM pc, val.i = constant */
2554 x86_64_emit_ifcc(cd, X86_64_CC_GE, src, iptr);
2557 case ICMD_IF_LEQ: /* ..., value ==> ... */
2558 /* op1 = target JavaVM pc, val.l = constant */
2560 x86_64_emit_if_lcc(cd, X86_64_CC_E, src, iptr);
2563 case ICMD_IF_LLT: /* ..., value ==> ... */
2564 /* op1 = target JavaVM pc, val.l = constant */
2566 x86_64_emit_if_lcc(cd, X86_64_CC_L, src, iptr);
2569 case ICMD_IF_LLE: /* ..., value ==> ... */
2570 /* op1 = target JavaVM pc, val.l = constant */
2572 x86_64_emit_if_lcc(cd, X86_64_CC_LE, src, iptr);
2575 case ICMD_IF_LNE: /* ..., value ==> ... */
2576 /* op1 = target JavaVM pc, val.l = constant */
2578 x86_64_emit_if_lcc(cd, X86_64_CC_NE, src, iptr);
2581 case ICMD_IF_LGT: /* ..., value ==> ... */
2582 /* op1 = target JavaVM pc, val.l = constant */
2584 x86_64_emit_if_lcc(cd, X86_64_CC_G, src, iptr);
2587 case ICMD_IF_LGE: /* ..., value ==> ... */
2588 /* op1 = target JavaVM pc, val.l = constant */
2590 x86_64_emit_if_lcc(cd, X86_64_CC_GE, src, iptr);
2593 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2594 /* op1 = target JavaVM pc */
2596 x86_64_emit_if_icmpcc(cd, X86_64_CC_E, src, iptr);
2599 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2600 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2602 x86_64_emit_if_lcmpcc(cd, X86_64_CC_E, src, iptr);
2605 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2606 /* op1 = target JavaVM pc */
2608 x86_64_emit_if_icmpcc(cd, X86_64_CC_NE, src, iptr);
2611 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2612 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2614 x86_64_emit_if_lcmpcc(cd, X86_64_CC_NE, src, iptr);
2617 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2618 /* op1 = target JavaVM pc */
2620 x86_64_emit_if_icmpcc(cd, X86_64_CC_L, src, iptr);
2623 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2624 /* op1 = target JavaVM pc */
2626 x86_64_emit_if_lcmpcc(cd, X86_64_CC_L, src, iptr);
2629 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2630 /* op1 = target JavaVM pc */
2632 x86_64_emit_if_icmpcc(cd, X86_64_CC_G, src, iptr);
2635 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2636 /* op1 = target JavaVM pc */
2638 x86_64_emit_if_lcmpcc(cd, X86_64_CC_G, src, iptr);
2641 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2642 /* op1 = target JavaVM pc */
2644 x86_64_emit_if_icmpcc(cd, X86_64_CC_LE, src, iptr);
2647 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2648 /* op1 = target JavaVM pc */
2650 x86_64_emit_if_lcmpcc(cd, X86_64_CC_LE, src, iptr);
2653 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2654 /* op1 = target JavaVM pc */
2656 x86_64_emit_if_icmpcc(cd, X86_64_CC_GE, src, iptr);
2659 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2660 /* op1 = target JavaVM pc */
2662 x86_64_emit_if_lcmpcc(cd, X86_64_CC_GE, src, iptr);
2665 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST */
2667 case ICMD_ELSE_ICONST: /* handled by IFxx_ICONST */
2670 case ICMD_IFEQ_ICONST: /* ..., value ==> ..., constant */
2671 /* val.i = constant */
2673 var_to_reg_int(s1, src, REG_ITMP1);
2674 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2676 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2678 M_INTMOVE(s1, REG_ITMP1);
2681 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2683 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2684 x86_64_testl_reg_reg(cd, s1, s1);
2685 x86_64_cmovccl_reg_reg(cd, X86_64_CC_E, REG_ITMP2, d);
2686 store_reg_to_var_int(iptr->dst, d);
2689 case ICMD_IFNE_ICONST: /* ..., value ==> ..., constant */
2690 /* val.i = constant */
2692 var_to_reg_int(s1, src, REG_ITMP1);
2693 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2695 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2697 M_INTMOVE(s1, REG_ITMP1);
2700 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2702 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2703 x86_64_testl_reg_reg(cd, s1, s1);
2704 x86_64_cmovccl_reg_reg(cd, X86_64_CC_NE, REG_ITMP2, d);
2705 store_reg_to_var_int(iptr->dst, d);
2708 case ICMD_IFLT_ICONST: /* ..., value ==> ..., constant */
2709 /* val.i = constant */
2711 var_to_reg_int(s1, src, REG_ITMP1);
2712 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2714 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2716 M_INTMOVE(s1, REG_ITMP1);
2719 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2721 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2722 x86_64_testl_reg_reg(cd, s1, s1);
2723 x86_64_cmovccl_reg_reg(cd, X86_64_CC_L, REG_ITMP2, d);
2724 store_reg_to_var_int(iptr->dst, d);
2727 case ICMD_IFGE_ICONST: /* ..., value ==> ..., constant */
2728 /* val.i = constant */
2730 var_to_reg_int(s1, src, REG_ITMP1);
2731 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2733 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2735 M_INTMOVE(s1, REG_ITMP1);
2738 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2740 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2741 x86_64_testl_reg_reg(cd, s1, s1);
2742 x86_64_cmovccl_reg_reg(cd, X86_64_CC_GE, REG_ITMP2, d);
2743 store_reg_to_var_int(iptr->dst, d);
2746 case ICMD_IFGT_ICONST: /* ..., value ==> ..., constant */
2747 /* val.i = constant */
2749 var_to_reg_int(s1, src, REG_ITMP1);
2750 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2752 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2754 M_INTMOVE(s1, REG_ITMP1);
2757 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2759 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2760 x86_64_testl_reg_reg(cd, s1, s1);
2761 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP2, d);
2762 store_reg_to_var_int(iptr->dst, d);
2765 case ICMD_IFLE_ICONST: /* ..., value ==> ..., constant */
2766 /* val.i = constant */
2768 var_to_reg_int(s1, src, REG_ITMP1);
2769 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2771 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2773 M_INTMOVE(s1, REG_ITMP1);
2776 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2778 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2779 x86_64_testl_reg_reg(cd, s1, s1);
2780 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, d);
2781 store_reg_to_var_int(iptr->dst, d);
2785 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2789 var_to_reg_int(s1, src, REG_RESULT);
2790 M_INTMOVE(s1, REG_RESULT);
2792 #if defined(USE_THREADS)
2793 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2794 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
2795 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, rd->maxmemuse * 8);
2796 x86_64_mov_imm_reg(cd, (u8) builtin_monitorexit, REG_ITMP1);
2797 x86_64_call_reg(cd, REG_ITMP1);
2798 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_RESULT);
2802 goto nowperformreturn;
2804 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2807 var_to_reg_flt(s1, src, REG_FRESULT);
2808 M_FLTMOVE(s1, REG_FRESULT);
2810 #if defined(USE_THREADS)
2811 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2812 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
2813 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, rd->maxmemuse * 8);
2814 x86_64_mov_imm_reg(cd, (u8) builtin_monitorexit, REG_ITMP1);
2815 x86_64_call_reg(cd, REG_ITMP1);
2816 x86_64_movq_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_FRESULT);
2820 goto nowperformreturn;
2822 case ICMD_RETURN: /* ... ==> ... */
2824 #if defined(USE_THREADS)
2825 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2826 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
2827 x86_64_mov_imm_reg(cd, (u8) builtin_monitorexit, REG_ITMP1);
2828 x86_64_call_reg(cd, REG_ITMP1);
2836 p = parentargs_base;
2838 /* call trace function */
2840 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
2842 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
2843 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
2845 x86_64_mov_imm_reg(cd, (s8) m, rd->argintregs[0]);
2846 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
2847 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
2848 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
2850 x86_64_mov_imm_reg(cd, (s8) builtin_displaymethodstop, REG_ITMP1);
2851 x86_64_call_reg(cd, REG_ITMP1);
2853 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
2854 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
2856 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
2859 /* restore saved registers */
2860 for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
2861 p--; x86_64_mov_membase_reg(cd, REG_SP, p * 8, rd->savintregs[i]);
2863 for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
2864 p--; x86_64_movq_membase_reg(cd, REG_SP, p * 8, rd->savfltregs[i]);
2867 /* deallocate stack */
2868 if (parentargs_base) {
2869 x86_64_alu_imm_reg(cd, X86_64_ADD, parentargs_base * 8, REG_SP);
2878 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2883 tptr = (void **) iptr->target;
2885 s4ptr = iptr->val.a;
2886 l = s4ptr[1]; /* low */
2887 i = s4ptr[2]; /* high */
2889 var_to_reg_int(s1, src, REG_ITMP1);
2890 M_INTMOVE(s1, REG_ITMP1);
2892 x86_64_alul_imm_reg(cd, X86_64_SUB, l, REG_ITMP1);
2897 x86_64_alul_imm_reg(cd, X86_64_CMP, i - 1, REG_ITMP1);
2898 x86_64_jcc(cd, X86_64_CC_A, 0);
2900 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[0]), cd->mcodeptr); */
2901 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
2903 /* build jump table top down and use address of lowest entry */
2905 /* s4ptr += 3 + i; */
2909 /* dseg_addtarget(cd, BlockPtrOfPC(*--s4ptr)); */
2910 dseg_addtarget(cd, (basicblock *) tptr[0]);
2914 /* length of dataseg after last dseg_addtarget is used by load */
2916 x86_64_mov_imm_reg(cd, 0, REG_ITMP2);
2917 dseg_adddata(cd, cd->mcodeptr);
2918 x86_64_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
2919 x86_64_jmp_reg(cd, REG_ITMP1);
2925 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2927 s4 i, l, val, *s4ptr;
2930 tptr = (void **) iptr->target;
2932 s4ptr = iptr->val.a;
2933 l = s4ptr[0]; /* default */
2934 i = s4ptr[1]; /* count */
2936 MCODECHECK((i<<2)+8);
2937 var_to_reg_int(s1, src, REG_ITMP1); /* reg compare should always be faster */
2943 x86_64_alul_imm_reg(cd, X86_64_CMP, val, s1);
2944 x86_64_jcc(cd, X86_64_CC_E, 0);
2945 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[1]), cd->mcodeptr); */
2946 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
2949 x86_64_jmp_imm(cd, 0);
2950 /* codegen_addreference(cd, BlockPtrOfPC(l), cd->mcodeptr); */
2952 tptr = (void **) iptr->target;
2953 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
2960 case ICMD_BUILTIN3: /* ..., arg1, arg2, arg3 ==> ... */
2961 /* op1 = return type, val.a = function pointer*/
2965 case ICMD_BUILTIN2: /* ..., arg1, arg2 ==> ... */
2966 /* op1 = return type, val.a = function pointer*/
2970 case ICMD_BUILTIN1: /* ..., arg1 ==> ... */
2971 /* op1 = return type, val.a = function pointer*/
2975 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2976 /* op1 = arg count, val.a = method pointer */
2978 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2979 /* op1 = arg count, val.a = method pointer */
2981 case ICMD_INVOKEVIRTUAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2982 /* op1 = arg count, val.a = method pointer */
2984 case ICMD_INVOKEINTERFACE:/*.., objectref, [arg1, [arg2 ...]] ==> ... */
2985 /* op1 = arg count, val.a = method pointer */
2995 MCODECHECK((s3 << 1) + 64);
3002 /* copy arguments to registers or stack location */
3003 for (; --s3 >= 0; src = src->prev) {
3004 IS_INT_LNG_TYPE(src->type) ? iarg++ : farg++;
3010 s2 = (iarg > INT_ARG_CNT) ? iarg - INT_ARG_CNT : 0 + (farg > FLT_ARG_CNT) ? farg - FLT_ARG_CNT : 0;
3012 for (; --s3 >= 0; src = src->prev) {
3013 IS_INT_LNG_TYPE(src->type) ? iarg-- : farg--;
3014 if (src->varkind == ARGVAR) {
3015 if (IS_INT_LNG_TYPE(src->type)) {
3016 if (iarg >= INT_ARG_CNT) {
3020 if (farg >= FLT_ARG_CNT) {
3027 if (IS_INT_LNG_TYPE(src->type)) {
3028 if (iarg < INT_ARG_CNT) {
3029 s1 = rd->argintregs[iarg];
3030 var_to_reg_int(d, src, s1);
3034 var_to_reg_int(d, src, REG_ITMP1);
3036 x86_64_mov_reg_membase(cd, d, REG_SP, s2 * 8);
3040 if (farg < FLT_ARG_CNT) {
3041 s1 = rd->argfltregs[farg];
3042 var_to_reg_flt(d, src, s1);
3046 var_to_reg_flt(d, src, REG_FTMP1);
3048 x86_64_movq_reg_membase(cd, d, REG_SP, s2 * 8);
3054 switch (iptr->opc) {
3062 x86_64_mov_imm_reg(cd, a, REG_ITMP1);
3063 x86_64_call_reg(cd, REG_ITMP1);
3066 case ICMD_INVOKESTATIC:
3068 a = (s8) lm->stubroutine;
3071 x86_64_mov_imm_reg(cd, a, REG_ITMP2);
3072 x86_64_call_reg(cd, REG_ITMP2);
3075 case ICMD_INVOKESPECIAL:
3077 a = (s8) lm->stubroutine;
3080 gen_nullptr_check(rd->argintregs[0]); /* first argument contains pointer */
3081 x86_64_mov_membase_reg(cd, rd->argintregs[0], 0, REG_ITMP2); /* access memory for hardware nullptr */
3082 x86_64_mov_imm_reg(cd, a, REG_ITMP2);
3083 x86_64_call_reg(cd, REG_ITMP2);
3086 case ICMD_INVOKEVIRTUAL:
3090 gen_nullptr_check(rd->argintregs[0]);
3091 x86_64_mov_membase_reg(cd, rd->argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3092 x86_64_mov_membase32_reg(cd, REG_ITMP2, OFFSET(vftbl_t, table[0]) + sizeof(methodptr) * lm->vftblindex, REG_ITMP1);
3093 x86_64_call_reg(cd, REG_ITMP1);
3096 case ICMD_INVOKEINTERFACE:
3101 gen_nullptr_check(rd->argintregs[0]);
3102 x86_64_mov_membase_reg(cd, rd->argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3103 x86_64_mov_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, interfacetable[0]) - sizeof(methodptr) * ci->index, REG_ITMP2);
3104 x86_64_mov_membase32_reg(cd, REG_ITMP2, sizeof(methodptr) * (lm - ci->methods), REG_ITMP1);
3105 x86_64_call_reg(cd, REG_ITMP1);
3110 error("Unkown ICMD-Command: %d", iptr->opc);
3113 /* d contains return type */
3115 if (d != TYPE_VOID) {
3116 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3117 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3118 M_INTMOVE(REG_RESULT, s1);
3119 store_reg_to_var_int(iptr->dst, s1);
3122 s1 = reg_of_var(rd, iptr->dst, REG_FRESULT);
3123 M_FLTMOVE(REG_FRESULT, s1);
3124 store_reg_to_var_flt(iptr->dst, s1);
3131 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3133 /* op1: 0 == array, 1 == class */
3134 /* val.a: (classinfo*) superclass */
3136 /* superclass is an interface:
3138 * return (sub != NULL) &&
3139 * (sub->vftbl->interfacetablelength > super->index) &&
3140 * (sub->vftbl->interfacetable[-super->index] != NULL);
3142 * superclass is a class:
3144 * return ((sub != NULL) && (0
3145 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3146 * super->vftbl->diffvall));
3150 classinfo *super = (classinfo*) iptr->val.a;
3152 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3153 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3156 var_to_reg_int(s1, src, REG_ITMP1);
3157 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3159 M_INTMOVE(s1, REG_ITMP1);
3162 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3163 if (iptr->op1) { /* class/interface */
3164 if (super->flags & ACC_INTERFACE) { /* interface */
3165 x86_64_test_reg_reg(cd, s1, s1);
3167 /* TODO: clean up this calculation */
3168 a = 3; /* mov_membase_reg */
3169 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3171 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3172 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3175 CALCIMMEDIATEBYTES(a, super->index);
3180 a += 3; /* mov_membase_reg */
3181 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3186 x86_64_jcc(cd, X86_64_CC_E, a);
3188 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3189 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength), REG_ITMP2);
3190 x86_64_alu_imm_reg(cd, X86_64_SUB, super->index, REG_ITMP2);
3191 x86_64_test_reg_reg(cd, REG_ITMP2, REG_ITMP2);
3193 /* TODO: clean up this calculation */
3195 a += 3; /* mov_membase_reg */
3196 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3201 x86_64_jcc(cd, X86_64_CC_LE, a);
3202 x86_64_mov_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP1);
3203 x86_64_test_reg_reg(cd, REG_ITMP1, REG_ITMP1);
3204 x86_64_setcc_reg(cd, X86_64_CC_NE, d);
3206 } else { /* class */
3207 x86_64_test_reg_reg(cd, s1, s1);
3209 /* TODO: clean up this calculation */
3210 a = 3; /* mov_membase_reg */
3211 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3213 a += 10; /* mov_imm_reg */
3215 a += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3216 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, baseval));
3218 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3219 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3221 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3222 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, diffval));
3229 x86_64_jcc(cd, X86_64_CC_E, a);
3231 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3232 x86_64_mov_imm_reg(cd, (s8) super->vftbl, REG_ITMP2);
3233 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3234 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3236 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, baseval), REG_ITMP1);
3237 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP3);
3238 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, diffval), REG_ITMP2);
3239 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3240 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3242 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP1);
3243 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3244 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
3245 x86_64_setcc_reg(cd, X86_64_CC_BE, d);
3249 panic("internal error: no inlined array instanceof");
3251 store_reg_to_var_int(iptr->dst, d);
3254 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3256 /* op1: 0 == array, 1 == class */
3257 /* val.a: (classinfo*) superclass */
3259 /* superclass is an interface:
3261 * OK if ((sub == NULL) ||
3262 * (sub->vftbl->interfacetablelength > super->index) &&
3263 * (sub->vftbl->interfacetable[-super->index] != NULL));
3265 * superclass is a class:
3267 * OK if ((sub == NULL) || (0
3268 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3269 * super->vftbl->diffvall));
3273 classinfo *super = (classinfo*) iptr->val.a;
3275 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3276 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3278 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3279 var_to_reg_int(s1, src, d);
3280 if (iptr->op1) { /* class/interface */
3281 if (super->flags & ACC_INTERFACE) { /* interface */
3282 x86_64_test_reg_reg(cd, s1, s1);
3284 /* TODO: clean up this calculation */
3285 a = 3; /* mov_membase_reg */
3286 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3288 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3289 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3292 CALCIMMEDIATEBYTES(a, super->index);
3297 a += 3; /* mov_membase_reg */
3298 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3303 x86_64_jcc(cd, X86_64_CC_E, a);
3305 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3306 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength), REG_ITMP2);
3307 x86_64_alu_imm_reg(cd, X86_64_SUB, super->index, REG_ITMP2);
3308 x86_64_test_reg_reg(cd, REG_ITMP2, REG_ITMP2);
3309 x86_64_jcc(cd, X86_64_CC_LE, 0);
3310 codegen_addxcastrefs(cd, cd->mcodeptr);
3311 x86_64_mov_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP2);
3312 x86_64_test_reg_reg(cd, REG_ITMP2, REG_ITMP2);
3313 x86_64_jcc(cd, X86_64_CC_E, 0);
3314 codegen_addxcastrefs(cd, cd->mcodeptr);
3316 } else { /* class */
3317 x86_64_test_reg_reg(cd, s1, s1);
3319 /* TODO: clean up this calculation */
3320 a = 3; /* mov_membase_reg */
3321 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3322 a += 10; /* mov_imm_reg */
3323 a += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3324 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, baseval));
3326 if (d != REG_ITMP3) {
3327 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3328 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3329 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3330 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, diffval));
3334 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3335 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3337 a += 10; /* mov_imm_reg */
3338 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3339 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, diffval));
3345 x86_64_jcc(cd, X86_64_CC_E, a);
3347 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3348 x86_64_mov_imm_reg(cd, (s8) super->vftbl, REG_ITMP2);
3349 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3350 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3352 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, baseval), REG_ITMP1);
3353 if (d != REG_ITMP3) {
3354 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP3);
3355 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, diffval), REG_ITMP2);
3356 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3357 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3359 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP1);
3362 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP2);
3363 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
3364 x86_64_mov_imm_reg(cd, (s8) super->vftbl, REG_ITMP2);
3365 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, diffval), REG_ITMP2);
3366 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3367 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3370 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
3371 x86_64_jcc(cd, X86_64_CC_A, 0); /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3372 codegen_addxcastrefs(cd, cd->mcodeptr);
3376 panic("internal error: no inlined array checkcast");
3379 store_reg_to_var_int(iptr->dst, d);
3382 case ICMD_CHECKASIZE: /* ..., size ==> ..., size */
3384 if (src->flags & INMEMORY) {
3385 x86_64_alul_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
3388 x86_64_testl_reg_reg(cd, src->regoff, src->regoff);
3390 x86_64_jcc(cd, X86_64_CC_L, 0);
3391 codegen_addxcheckarefs(cd, cd->mcodeptr);
3394 case ICMD_CHECKEXCEPTION: /* ... ==> ... */
3396 x86_64_test_reg_reg(cd, REG_RESULT, REG_RESULT);
3397 x86_64_jcc(cd, X86_64_CC_E, 0);
3398 codegen_addxexceptionrefs(cd, cd->mcodeptr);
3401 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3402 /* op1 = dimension, val.a = array descriptor */
3404 /* check for negative sizes and copy sizes to stack if necessary */
3406 MCODECHECK((iptr->op1 << 1) + 64);
3408 for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3409 var_to_reg_int(s2, src, REG_ITMP1);
3410 x86_64_testl_reg_reg(cd, s2, s2);
3411 x86_64_jcc(cd, X86_64_CC_L, 0);
3412 codegen_addxcheckarefs(cd, cd->mcodeptr);
3414 /* copy sizes to stack (argument numbers >= INT_ARG_CNT) */
3416 if (src->varkind != ARGVAR) {
3417 x86_64_mov_reg_membase(cd, s2, REG_SP, (s1 + INT_ARG_CNT) * 8);
3421 /* a0 = dimension count */
3422 x86_64_mov_imm_reg(cd, iptr->op1, rd->argintregs[0]);
3424 /* a1 = arraydescriptor */
3425 x86_64_mov_imm_reg(cd, (s8) iptr->val.a, rd->argintregs[1]);
3427 /* a2 = pointer to dimensions = stack pointer */
3428 x86_64_mov_reg_reg(cd, REG_SP, rd->argintregs[2]);
3430 x86_64_mov_imm_reg(cd, (s8) builtin_nmultianewarray, REG_ITMP1);
3431 x86_64_call_reg(cd, REG_ITMP1);
3433 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3434 M_INTMOVE(REG_RESULT, s1);
3435 store_reg_to_var_int(iptr->dst, s1);
3439 throw_cacao_exception_exit(string_java_lang_InternalError,
3440 "Unknown ICMD %d", iptr->opc);
3443 } /* for instruction */
3445 /* copy values to interface registers */
3447 src = bptr->outstack;
3448 len = bptr->outdepth;
3449 MCODECHECK(64 + len);
3452 if ((src->varkind != STACKVAR)) {
3454 if (IS_FLT_DBL_TYPE(s2)) {
3455 var_to_reg_flt(s1, src, REG_FTMP1);
3456 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3457 M_FLTMOVE(s1, rd->interfaces[len][s2].regoff);
3460 x86_64_movq_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3464 var_to_reg_int(s1, src, REG_ITMP1);
3465 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3466 M_INTMOVE(s1, rd->interfaces[len][s2].regoff);
3469 x86_64_mov_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3475 } /* if (bptr -> flags >= BBREACHED) */
3476 } /* for basic block */
3480 /* generate bound check stubs */
3482 u1 *xcodeptr = NULL;
3485 for (bref = cd->xboundrefs; bref != NULL; bref = bref->next) {
3486 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3488 cd->mcodeptr - cd->mcodebase);
3492 /* move index register into REG_ITMP1 */
3493 x86_64_mov_reg_reg(cd, bref->reg, REG_ITMP1); /* 3 bytes */
3495 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3496 dseg_adddata(cd, cd->mcodeptr);
3497 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3498 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3500 if (xcodeptr != NULL) {
3501 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3504 xcodeptr = cd->mcodeptr;
3506 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3507 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3509 x86_64_mov_reg_reg(cd, REG_ITMP1, rd->argintregs[0]);
3510 x86_64_mov_imm_reg(cd, (s8) new_arrayindexoutofboundsexception, REG_ITMP3);
3511 x86_64_call_reg(cd, REG_ITMP3);
3513 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3514 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3516 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
3517 x86_64_jmp_reg(cd, REG_ITMP3);
3521 /* generate negative array size check stubs */
3525 for (bref = cd->xcheckarefs; bref != NULL; bref = bref->next) {
3526 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3527 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3529 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3533 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3535 cd->mcodeptr - cd->mcodebase);
3539 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3540 dseg_adddata(cd, cd->mcodeptr);
3541 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3542 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3544 if (xcodeptr != NULL) {
3545 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3548 xcodeptr = cd->mcodeptr;
3550 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3551 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3553 x86_64_mov_imm_reg(cd, (s8) new_negativearraysizeexception, REG_ITMP3);
3554 x86_64_call_reg(cd, REG_ITMP3);
3556 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3557 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3559 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
3560 x86_64_jmp_reg(cd, REG_ITMP3);
3564 /* generate cast check stubs */
3568 for (bref = cd->xcastrefs; bref != NULL; bref = bref->next) {
3569 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3570 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3572 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3576 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3578 cd->mcodeptr - cd->mcodebase);
3582 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3583 dseg_adddata(cd, cd->mcodeptr);
3584 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3585 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3587 if (xcodeptr != NULL) {
3588 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3591 xcodeptr = cd->mcodeptr;
3593 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3594 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3596 x86_64_mov_imm_reg(cd, (s8) new_classcastexception, REG_ITMP3);
3597 x86_64_call_reg(cd, REG_ITMP3);
3599 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3600 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3602 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
3603 x86_64_jmp_reg(cd, REG_ITMP3);
3607 /* generate divide by zero check stubs */
3611 for (bref = cd->xdivrefs; bref != NULL; bref = bref->next) {
3612 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3613 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3615 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3619 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3621 cd->mcodeptr - cd->mcodebase);
3625 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3626 dseg_adddata(cd, cd->mcodeptr);
3627 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3628 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3630 if (xcodeptr != NULL) {
3631 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3634 xcodeptr = cd->mcodeptr;
3636 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3637 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3639 x86_64_mov_imm_reg(cd, (u8) new_arithmeticexception, REG_ITMP3);
3640 x86_64_call_reg(cd, REG_ITMP3);
3642 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3643 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3645 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3646 x86_64_jmp_reg(cd, REG_ITMP3);
3650 /* generate exception check stubs */
3654 for (bref = cd->xexceptionrefs; bref != NULL; bref = bref->next) {
3655 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3656 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3658 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3662 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3664 cd->mcodeptr - cd->mcodebase);
3668 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3669 dseg_adddata(cd, cd->mcodeptr);
3670 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
3671 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3673 if (xcodeptr != NULL) {
3674 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3677 xcodeptr = cd->mcodeptr;
3679 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3680 x86_64_alu_imm_reg(cd, X86_64_SUB, 8, REG_SP);
3681 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0);
3682 x86_64_mov_imm_reg(cd, (u8) &builtin_get_exceptionptrptr, REG_ITMP1);
3683 x86_64_call_reg(cd, REG_ITMP1);
3684 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
3685 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
3686 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
3687 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC);
3688 x86_64_alu_imm_reg(cd, X86_64_ADD, 8, REG_SP);
3690 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
3691 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP1_XPTR);
3692 x86_64_mov_imm_membase(cd, 0, REG_ITMP3, 0);
3695 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3696 x86_64_jmp_reg(cd, REG_ITMP3);
3700 /* generate null pointer check stubs */
3704 for (bref = cd->xnullrefs; bref != NULL; bref = bref->next) {
3705 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3706 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3708 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3712 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3714 cd->mcodeptr - cd->mcodebase);
3718 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3719 dseg_adddata(cd, cd->mcodeptr);
3720 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
3721 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3723 if (xcodeptr != NULL) {
3724 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3727 xcodeptr = cd->mcodeptr;
3729 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3730 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3732 x86_64_mov_imm_reg(cd, (s8) new_nullpointerexception, REG_ITMP3);
3733 x86_64_call_reg(cd, REG_ITMP3);
3735 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3736 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3738 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
3739 x86_64_jmp_reg(cd, REG_ITMP3);
3743 /* generate put/getstatic stub call code */
3751 tmpcd = DNEW(codegendata);
3753 for (cref = cd->clinitrefs; cref != NULL; cref = cref->next) {
3754 /* Get machine code which is patched back in later. A */
3755 /* `call rel32' is 5 bytes long. */
3756 xcodeptr = cd->mcodebase + cref->branchpos;
3758 mcode = *((u4 *) (xcodeptr + 1));
3762 /* patch in `call rel32' to call the following code */
3763 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
3764 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
3766 /* Save current stack pointer into a temporary register. */
3767 x86_64_mov_reg_reg(cd, REG_SP, REG_ITMP1);
3769 /* Push machine code bytes to patch onto the stack. */
3770 x86_64_push_imm(cd, (u1) xmcode);
3771 x86_64_push_imm(cd, (u4) mcode);
3773 x86_64_push_imm(cd, (u8) cref->class);
3775 /* Push previously saved stack pointer onto stack. */
3776 x86_64_push_reg(cd, REG_ITMP1);
3778 x86_64_mov_imm_reg(cd, (u8) asm_check_clinit, REG_ITMP1);
3779 x86_64_jmp_reg(cd, REG_ITMP1);
3784 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
3788 /* function createcompilerstub *************************************************
3790 creates a stub routine which calls the compiler
3792 *******************************************************************************/
3794 #define COMPSTUBSIZE 23
3796 u1 *createcompilerstub(methodinfo *m)
3798 u1 *s = CNEW(u1, COMPSTUBSIZE); /* memory to hold the stub */
3802 /* mark start of dump memory area */
3804 dumpsize = dump_size();
3806 cd = DNEW(codegendata);
3809 /* code for the stub */
3810 x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP1); /* pass method to compiler */
3811 x86_64_mov_imm_reg(cd, (u8) asm_call_jit_compiler, REG_ITMP3);/* load address */
3812 x86_64_jmp_reg(cd, REG_ITMP3); /* jump to compiler */
3814 #if defined(STATISTICS)
3816 count_cstub_len += COMPSTUBSIZE;
3819 /* release dump area */
3821 dump_release(dumpsize);
3827 /* function removecompilerstub *************************************************
3829 deletes a compilerstub from memory (simply by freeing it)
3831 *******************************************************************************/
3833 void removecompilerstub(u1 *stub)
3835 CFREE(stub, COMPSTUBSIZE);
3839 /* function: createnativestub **************************************************
3841 creates a stub routine which calls a native method
3843 *******************************************************************************/
3845 /* #if defined(USE_THREADS) && defined(NATIVE_THREADS) */
3846 /* static java_objectheader **(*callgetexceptionptrptr)() = builtin_get_exceptionptrptr; */
3849 #define NATIVESTUBSIZE 420
3851 u1 *createnativestub(functionptr f, methodinfo *m)
3853 u1 *s = CNEW(u1, NATIVESTUBSIZE); /* memory to hold the stub */
3854 s4 stackframesize; /* size of stackframe if needed */
3857 t_inlining_globals *id;
3860 void **callAddrPatchPos=0;
3862 void **jmpInstrPatchPos=0;
3864 /* mark start of dump memory area */
3866 dumpsize = dump_size();
3868 cd = DNEW(codegendata);
3869 rd = DNEW(registerdata);
3870 id = DNEW(t_inlining_globals);
3872 /* setup registers before using it */
3874 inlining_setup(m, id);
3875 reg_setup(m, rd, id);
3877 /* set some required varibles which are normally set by codegen_setup */
3880 cd->clinitrefs = NULL;
3882 descriptor2types(m); /* set paramcount and paramtypes */
3884 /* if function is static, check for initialized */
3886 if (m->flags & ACC_STATIC) {
3887 /* if class isn't yet initialized, do it */
3888 if (!m->class->initialized) {
3889 codegen_addclinitref(cd, cd->mcodeptr, m->class);
3894 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
3896 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, 1 * 8);
3897 x86_64_mov_reg_membase(cd, rd->argintregs[1], REG_SP, 2 * 8);
3898 x86_64_mov_reg_membase(cd, rd->argintregs[2], REG_SP, 3 * 8);
3899 x86_64_mov_reg_membase(cd, rd->argintregs[3], REG_SP, 4 * 8);
3900 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 5 * 8);
3901 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 6 * 8);
3903 x86_64_movq_reg_membase(cd, rd->argfltregs[0], REG_SP, 7 * 8);
3904 x86_64_movq_reg_membase(cd, rd->argfltregs[1], REG_SP, 8 * 8);
3905 x86_64_movq_reg_membase(cd, rd->argfltregs[2], REG_SP, 9 * 8);
3906 x86_64_movq_reg_membase(cd, rd->argfltregs[3], REG_SP, 10 * 8);
3907 /* x86_64_movq_reg_membase(cd, rd->argfltregs[4], REG_SP, 11 * 8); */
3908 /* x86_64_movq_reg_membase(cd, rd->argfltregs[5], REG_SP, 12 * 8); */
3909 /* x86_64_movq_reg_membase(cd, rd->argfltregs[6], REG_SP, 13 * 8); */
3910 /* x86_64_movq_reg_membase(cd, rd->argfltregs[7], REG_SP, 14 * 8); */
3915 /* show integer hex code for float arguments */
3916 for (p = 0, l = 0; p < m->paramcount; p++) {
3917 if (IS_FLT_DBL_TYPE(m->paramtypes[p])) {
3918 for (s1 = (m->paramcount > INT_ARG_CNT) ? INT_ARG_CNT - 2 : m->paramcount - 2; s1 >= p; s1--) {
3919 x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
3922 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[p]);
3927 x86_64_mov_imm_reg(cd, (s8) m, REG_ITMP1);
3928 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, 0 * 8);
3929 x86_64_mov_imm_reg(cd, (s8) builtin_trace_args, REG_ITMP1);
3930 x86_64_call_reg(cd, REG_ITMP1);
3932 /* call method to resolve native function if needed */
3933 #ifndef STATIC_CLASSPATH
3934 if (f==0) { /* only if not already resolved */
3935 x86_64_jmp_imm(cd,0);
3936 jmpInstrPos=cd->mcodeptr-4; /*needed to patch a jump over this block*/
3937 x86_64_mov_imm_reg(cd,(u8)m,rd->argintregs[0]);
3938 x86_64_mov_imm_reg(cd,0,rd->argintregs[1]);
3939 callAddrPatchPos=cd->mcodeptr-8; /* at this position the place is specified where the native function adress should be patched into*/
3940 x86_64_mov_imm_reg(cd,0,rd->argintregs[2]);
3941 jmpInstrPatchPos=cd->mcodeptr-8;
3942 x86_64_mov_imm_reg(cd,jmpInstrPos,rd->argintregs[3]);
3943 x86_64_mov_imm_reg(cd,(s8)codegen_resolve_native,REG_ITMP1);
3944 x86_64_call_reg(cd,REG_ITMP1);
3945 *(jmpInstrPatchPos)=cd->mcodeptr-jmpInstrPos-1; /*=opcode jmp_imm size*/
3949 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, rd->argintregs[0]);
3950 x86_64_mov_membase_reg(cd, REG_SP, 2 * 8, rd->argintregs[1]);
3951 x86_64_mov_membase_reg(cd, REG_SP, 3 * 8, rd->argintregs[2]);
3952 x86_64_mov_membase_reg(cd, REG_SP, 4 * 8, rd->argintregs[3]);
3953 x86_64_mov_membase_reg(cd, REG_SP, 5 * 8, rd->argintregs[4]);
3954 x86_64_mov_membase_reg(cd, REG_SP, 6 * 8, rd->argintregs[5]);
3956 x86_64_movq_membase_reg(cd, REG_SP, 7 * 8, rd->argfltregs[0]);
3957 x86_64_movq_membase_reg(cd, REG_SP, 8 * 8, rd->argfltregs[1]);
3958 x86_64_movq_membase_reg(cd, REG_SP, 9 * 8, rd->argfltregs[2]);
3959 x86_64_movq_membase_reg(cd, REG_SP, 10 * 8, rd->argfltregs[3]);
3960 /* x86_64_movq_membase_reg(cd, REG_SP, 11 * 8, rd->argfltregs[4]); */
3961 /* x86_64_movq_membase_reg(cd, REG_SP, 12 * 8, rd->argfltregs[5]); */
3962 /* x86_64_movq_membase_reg(cd, REG_SP, 13 * 8, rd->argfltregs[6]); */
3963 /* x86_64_movq_membase_reg(cd, REG_SP, 14 * 8, rd->argfltregs[7]); */
3965 x86_64_alu_imm_reg(cd, X86_64_ADD, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
3969 x86_64_alu_imm_reg(cd, X86_64_SUB, 7 * 8, REG_SP); /* keep stack 16-byte aligned */
3971 /* save callee saved float registers */
3972 x86_64_movq_reg_membase(cd, XMM15, REG_SP, 0 * 8);
3973 x86_64_movq_reg_membase(cd, XMM14, REG_SP, 1 * 8);
3974 x86_64_movq_reg_membase(cd, XMM13, REG_SP, 2 * 8);
3975 x86_64_movq_reg_membase(cd, XMM12, REG_SP, 3 * 8);
3976 x86_64_movq_reg_membase(cd, XMM11, REG_SP, 4 * 8);
3977 x86_64_movq_reg_membase(cd, XMM10, REG_SP, 5 * 8);
3980 /* save argument registers on stack -- if we have to */
3981 if ((m->flags & ACC_STATIC && m->paramcount > (INT_ARG_CNT - 2)) || m->paramcount > (INT_ARG_CNT - 1)) {
3983 s4 paramshiftcnt = (m->flags & ACC_STATIC) ? 2 : 1;
3984 s4 stackparamcnt = (m->paramcount > INT_ARG_CNT) ? m->paramcount - INT_ARG_CNT : 0;
3986 stackframesize = stackparamcnt + paramshiftcnt;
3988 /* keep stack 16-byte aligned */
3989 if (!(stackframesize & 0x1))
3992 x86_64_alu_imm_reg(cd, X86_64_SUB, stackframesize * 8, REG_SP);
3994 /* copy stack arguments into new stack frame -- if any */
3995 for (i = 0; i < stackparamcnt; i++) {
3996 x86_64_mov_membase_reg(cd, REG_SP, (stackparamcnt + 1 + i) * 8, REG_ITMP1);
3997 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, (paramshiftcnt + i) * 8);
4000 if (m->flags & ACC_STATIC) {
4001 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 1 * 8);
4002 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 0 * 8);
4005 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 0 * 8);
4009 /* keep stack 16-byte aligned */
4010 x86_64_alu_imm_reg(cd, X86_64_SUB, 8, REG_SP);
4014 if (m->flags & ACC_STATIC) {
4015 x86_64_mov_reg_reg(cd, rd->argintregs[3], rd->argintregs[5]);
4016 x86_64_mov_reg_reg(cd, rd->argintregs[2], rd->argintregs[4]);
4017 x86_64_mov_reg_reg(cd, rd->argintregs[1], rd->argintregs[3]);
4018 x86_64_mov_reg_reg(cd, rd->argintregs[0], rd->argintregs[2]);
4020 /* put class into second argument register */
4021 x86_64_mov_imm_reg(cd, (u8) m->class, rd->argintregs[1]);
4024 x86_64_mov_reg_reg(cd, rd->argintregs[4], rd->argintregs[5]);
4025 x86_64_mov_reg_reg(cd, rd->argintregs[3], rd->argintregs[4]);
4026 x86_64_mov_reg_reg(cd, rd->argintregs[2], rd->argintregs[3]);
4027 x86_64_mov_reg_reg(cd, rd->argintregs[1], rd->argintregs[2]);
4028 x86_64_mov_reg_reg(cd, rd->argintregs[0], rd->argintregs[1]);
4031 /* put env into first argument register */
4032 x86_64_mov_imm_reg(cd, (u8) &env, rd->argintregs[0]);
4034 x86_64_mov_imm_reg(cd, (u8) f, REG_ITMP1);
4035 #ifndef STATIC_CLASSPATH
4037 (*callAddrPatchPos)=cd->mcodeptr-8;
4039 x86_64_call_reg(cd, REG_ITMP1);
4041 /* remove stackframe if there is one */
4042 if (stackframesize) {
4043 x86_64_alu_imm_reg(cd, X86_64_ADD, stackframesize * 8, REG_SP);
4047 x86_64_alu_imm_reg(cd, X86_64_SUB, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
4049 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
4050 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
4052 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
4053 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
4054 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
4055 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
4057 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
4058 x86_64_call_reg(cd, REG_ITMP1);
4060 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
4061 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
4063 x86_64_alu_imm_reg(cd, X86_64_ADD, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
4067 /* restore callee saved registers */
4068 x86_64_movq_membase_reg(cd, REG_SP, 0 * 8, XMM15);
4069 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, XMM14);
4070 x86_64_movq_membase_reg(cd, REG_SP, 2 * 8, XMM13);
4071 x86_64_movq_membase_reg(cd, REG_SP, 3 * 8, XMM12);
4072 x86_64_movq_membase_reg(cd, REG_SP, 4 * 8, XMM11);
4073 x86_64_movq_membase_reg(cd, REG_SP, 5 * 8, XMM10);
4075 x86_64_alu_imm_reg(cd, X86_64_ADD, 7 * 8, REG_SP); /* keep stack 16-byte aligned */
4078 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4079 x86_64_push_reg(cd, REG_RESULT);
4080 /* x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
4081 x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
4082 x86_64_call_reg(cd, REG_ITMP3);
4083 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
4084 x86_64_pop_reg(cd, REG_RESULT);
4086 x86_64_mov_imm_reg(cd, (s8) &_exceptionptr, REG_ITMP3);
4087 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP3);
4089 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
4090 x86_64_jcc(cd, X86_64_CC_NE, 1);
4094 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4095 x86_64_push_reg(cd, REG_ITMP3);
4096 /* x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
4097 x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
4098 x86_64_call_reg(cd, REG_ITMP3);
4099 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
4100 x86_64_pop_reg(cd, REG_ITMP1_XPTR);
4102 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
4103 x86_64_mov_imm_reg(cd, (s8) &_exceptionptr, REG_ITMP3);
4104 x86_64_alu_reg_reg(cd, X86_64_XOR, REG_ITMP2, REG_ITMP2);
4105 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_ITMP3, 0); /* clear exception pointer */
4108 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC); /* get return address from stack */
4109 x86_64_alu_imm_reg(cd, X86_64_SUB, 3, REG_ITMP2_XPC); /* callq */
4111 x86_64_mov_imm_reg(cd, (s8) asm_handle_nat_exception, REG_ITMP3);
4112 x86_64_jmp_reg(cd, REG_ITMP3);
4121 tmpcd = DNEW(codegendata);
4123 /* there can only be one clinit ref entry */
4124 cref = cd->clinitrefs;
4127 /* Get machine code which is patched back in later. A */
4128 /* `call rel32' is 5 bytes long. */
4129 xcodeptr = cd->mcodebase + cref->branchpos;
4131 mcode = *((u4 *) (xcodeptr + 1));
4133 /* patch in `call rel32' to call the following code */
4134 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
4135 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4137 /* Save current stack pointer into a temporary register. */
4138 x86_64_mov_reg_reg(cd, REG_SP, REG_ITMP1);
4140 /* Push machine code bytes to patch onto the stack. */
4141 x86_64_push_imm(cd, (u1) xmcode);
4142 x86_64_push_imm(cd, (u4) mcode);
4144 x86_64_push_imm(cd, (u8) cref->class);
4146 /* Push previously saved stack pointer onto stack. */
4147 x86_64_push_reg(cd, REG_ITMP1);
4149 x86_64_mov_imm_reg(cd, (u8) asm_check_clinit, REG_ITMP1);
4150 x86_64_jmp_reg(cd, REG_ITMP1);
4156 static int stubprinted;
4158 printf("stubsize: %d\n", ((long) cd->mcodeptr - (long) s));
4163 #if defined(STATISTICS)
4165 count_nstub_len += NATIVESTUBSIZE;
4168 /* release dump area */
4170 dump_release(dumpsize);
4176 /* function: removenativestub **************************************************
4178 removes a previously created native-stub from memory
4180 *******************************************************************************/
4182 void removenativestub(u1 *stub)
4184 CFREE(stub, NATIVESTUBSIZE);
4189 * These are local overrides for various environment variables in Emacs.
4190 * Please do not remove this and leave it at the end of the file, where
4191 * Emacs will automagically detect them.
4192 * ---------------------------------------------------------------------
4195 * indent-tabs-mode: t