1 /* jit/x86_64/codegen.c - machine code generator for x86_64
3 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 Institut f. Computersprachen, TU Wien
5 R. Grafl, A. Krall, C. Kruegel, C. Oates, R. Obermaisser, M. Probst,
6 S. Ring, E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich,
9 This file is part of CACAO.
11 This program is free software; you can redistribute it and/or
12 modify it under the terms of the GNU General Public License as
13 published by the Free Software Foundation; either version 2, or (at
14 your option) any later version.
16 This program is distributed in the hope that it will be useful, but
17 WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 General Public License for more details.
21 You should have received a copy of the GNU General Public License
22 along with this program; if not, write to the Free Software
23 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
26 Contact: cacao@complang.tuwien.ac.at
28 Authors: Andreas Krall
31 $Id: codegen.c 1451 2004-11-05 14:14:15Z twisti $
40 #include <sys/ucontext.h>
49 #include "jit/parse.h"
50 #include "jit/x86_64/codegen.h"
51 #include "jit/x86_64/emitfuncs.h"
52 #include "jit/x86_64/types.h"
55 /* register descripton - array ************************************************/
57 /* #define REG_RES 0 reserved register for OS or code generator */
58 /* #define REG_RET 1 return value register */
59 /* #define REG_EXC 2 exception value register (only old jit) */
60 /* #define REG_SAV 3 (callee) saved register */
61 /* #define REG_TMP 4 scratch temporary register (caller saved) */
62 /* #define REG_ARG 5 argument register (caller saved) */
64 /* #define REG_END -1 last entry in tables */
67 REG_RET, REG_ARG, REG_ARG, REG_TMP, REG_RES, REG_SAV, REG_ARG, REG_ARG,
68 REG_ARG, REG_ARG, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV,
73 int nregdescfloat[] = {
74 /* REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_TMP, REG_TMP, REG_TMP, REG_TMP, */
75 /* REG_RES, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV, REG_SAV, */
76 REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
77 REG_RES, REG_RES, REG_RES, REG_TMP, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
82 /* include independent code generation stuff -- include after register */
83 /* descriptions to avoid extern definitions */
85 #include "jit/codegen.inc"
86 #include "jit/reg.inc"
89 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
90 void thread_restartcriticalsection(ucontext_t *uc)
94 critical = thread_checkcritical((void *) uc->uc_mcontext.gregs[REG_RIP]);
97 uc->uc_mcontext.gregs[REG_RIP] = (u8) critical;
102 /* NullPointerException signal handler for hardware null pointer check */
104 void catch_NullPointerException(int sig, siginfo_t *siginfo, void *_p)
108 /* long faultaddr; */
110 struct ucontext *_uc = (struct ucontext *) _p;
111 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
112 struct sigaction act;
113 java_objectheader *xptr;
115 /* Reset signal handler - necessary for SysV, does no harm for BSD */
118 /* instr = *((int*)(sigctx->rip)); */
119 /* faultaddr = sigctx->sc_regs[(instr >> 16) & 0x1f]; */
121 /* if (faultaddr == 0) { */
122 act.sa_sigaction = (void *) catch_NullPointerException; /* reinstall handler */
123 act.sa_flags = SA_SIGINFO;
124 sigaction(sig, &act, NULL);
127 sigaddset(&nsig, sig);
128 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
130 xptr = new_nullpointerexception();
132 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
133 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
134 sigctx->rip = (u8) asm_handle_exception;
139 /* faultaddr += (long) ((instr << 16) >> 16); */
140 /* fprintf(stderr, "faulting address: 0x%08x\n", faultaddr); */
141 /* panic("Stack overflow"); */
146 /* ArithmeticException signal handler for hardware divide by zero check */
148 void catch_ArithmeticException(int sig, siginfo_t *siginfo, void *_p)
152 struct ucontext *_uc = (struct ucontext *) _p;
153 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
154 struct sigaction act;
155 java_objectheader *xptr;
157 /* Reset signal handler - necessary for SysV, does no harm for BSD */
159 act.sa_sigaction = (void *) catch_ArithmeticException; /* reinstall handler */
160 act.sa_flags = SA_SIGINFO;
161 sigaction(sig, &act, NULL);
164 sigaddset(&nsig, sig);
165 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
167 xptr = new_arithmeticexception();
169 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
170 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
171 sigctx->rip = (u8) asm_handle_exception;
177 void init_exceptions(void)
179 struct sigaction act;
181 /* install signal handlers we need to convert to exceptions */
182 sigemptyset(&act.sa_mask);
186 act.sa_sigaction = (void *) catch_NullPointerException;
187 act.sa_flags = SA_SIGINFO;
188 sigaction(SIGSEGV, &act, NULL);
192 act.sa_sigaction = (void *) catch_NullPointerException;
193 act.sa_flags = SA_SIGINFO;
194 sigaction(SIGBUS, &act, NULL);
198 act.sa_sigaction = (void *) catch_ArithmeticException;
199 act.sa_flags = SA_SIGINFO;
200 sigaction(SIGFPE, &act, NULL);
204 /* function gen_mcode **********************************************************
206 generates machine code
208 *******************************************************************************/
210 void codegen(methodinfo *m, codegendata *cd, registerdata *rd)
212 s4 len, s1, s2, s3, d;
227 /* space to save used callee saved registers */
229 savedregs_num += (rd->savintregcnt - rd->maxsavintreguse);
230 savedregs_num += (rd->savfltregcnt - rd->maxsavfltreguse);
232 parentargs_base = rd->maxmemuse + savedregs_num;
234 #if defined(USE_THREADS) /* space to save argument of monitor_enter */
236 if (checksync && (m->flags & ACC_SYNCHRONIZED))
241 /* keep stack 16-byte aligned for calls into native code e.g. libc or jni */
242 /* (alignment problems with movaps) */
244 if (!(parentargs_base & 0x1)) {
248 /* create method header */
250 (void) dseg_addaddress(cd, m); /* MethodPointer */
251 (void) dseg_adds4(cd, parentargs_base * 8); /* FrameSize */
253 #if defined(USE_THREADS)
255 /* IsSync contains the offset relative to the stack pointer for the
256 argument of monitor_exit used in the exception handler. Since the
257 offset could be zero and give a wrong meaning of the flag it is
261 if (checksync && (m->flags & ACC_SYNCHRONIZED))
262 (void) dseg_adds4(cd, (rd->maxmemuse + 1) * 8); /* IsSync */
267 (void) dseg_adds4(cd, 0); /* IsSync */
269 (void) dseg_adds4(cd, m->isleafmethod); /* IsLeaf */
270 (void) dseg_adds4(cd, rd->savintregcnt - rd->maxsavintreguse);/* IntSave */
271 (void) dseg_adds4(cd, rd->savfltregcnt - rd->maxsavfltreguse);/* FltSave */
272 (void) dseg_adds4(cd, cd->exceptiontablelength); /* ExTableSize */
274 /* create exception table */
276 for (ex = cd->exceptiontable; ex != NULL; ex = ex->down) {
277 dseg_addtarget(cd, ex->start);
278 dseg_addtarget(cd, ex->end);
279 dseg_addtarget(cd, ex->handler);
280 (void) dseg_addaddress(cd, ex->catchtype);
283 /* initialize mcode variables */
285 cd->mcodeptr = (u1 *) cd->mcodebase;
286 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
287 MCODECHECK(128 + m->paramcount);
289 /* create stack frame (if necessary) */
291 if (parentargs_base) {
292 x86_64_alu_imm_reg(cd, X86_64_SUB, parentargs_base * 8, REG_SP);
295 /* save return address and used callee saved registers */
298 for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
299 p--; x86_64_mov_reg_membase(cd, rd->savintregs[i], REG_SP, p * 8);
301 for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
302 p--; x86_64_movq_reg_membase(cd, rd->savfltregs[i], REG_SP, p * 8);
305 /* save monitorenter argument */
307 #if defined(USE_THREADS)
308 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
309 if (m->flags & ACC_STATIC) {
310 x86_64_mov_imm_reg(cd, (s8) m->class, REG_ITMP1);
311 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, rd->maxmemuse * 8);
314 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, rd->maxmemuse * 8);
319 /* copy argument registers to stack and call trace function with pointer
320 to arguments on stack.
323 x86_64_alu_imm_reg(cd, X86_64_SUB, (6 + 8 + 1 + 1) * 8, REG_SP);
325 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, 1 * 8);
326 x86_64_mov_reg_membase(cd, rd->argintregs[1], REG_SP, 2 * 8);
327 x86_64_mov_reg_membase(cd, rd->argintregs[2], REG_SP, 3 * 8);
328 x86_64_mov_reg_membase(cd, rd->argintregs[3], REG_SP, 4 * 8);
329 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 5 * 8);
330 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 6 * 8);
332 x86_64_movq_reg_membase(cd, rd->argfltregs[0], REG_SP, 7 * 8);
333 x86_64_movq_reg_membase(cd, rd->argfltregs[1], REG_SP, 8 * 8);
334 x86_64_movq_reg_membase(cd, rd->argfltregs[2], REG_SP, 9 * 8);
335 x86_64_movq_reg_membase(cd, rd->argfltregs[3], REG_SP, 10 * 8);
336 /* x86_64_movq_reg_membase(cd, rd->argfltregs[4], REG_SP, 11 * 8); */
337 /* x86_64_movq_reg_membase(cd, rd->argfltregs[5], REG_SP, 12 * 8); */
338 /* x86_64_movq_reg_membase(cd, rd->argfltregs[6], REG_SP, 13 * 8); */
339 /* x86_64_movq_reg_membase(cd, rd->argfltregs[7], REG_SP, 14 * 8); */
341 for (p = 0, l = 0; p < m->paramcount; p++) {
342 t = m->paramtypes[p];
344 if (IS_FLT_DBL_TYPE(t)) {
345 for (s1 = (m->paramcount > INT_ARG_CNT) ? INT_ARG_CNT - 2 : m->paramcount - 2; s1 >= p; s1--) {
346 x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
349 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[p]);
354 x86_64_mov_imm_reg(cd, (s8) m, REG_ITMP2);
355 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_SP, 0 * 8);
356 x86_64_mov_imm_reg(cd, (s8) builtin_trace_args, REG_ITMP1);
357 x86_64_call_reg(cd, REG_ITMP1);
359 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, rd->argintregs[0]);
360 x86_64_mov_membase_reg(cd, REG_SP, 2 * 8, rd->argintregs[1]);
361 x86_64_mov_membase_reg(cd, REG_SP, 3 * 8, rd->argintregs[2]);
362 x86_64_mov_membase_reg(cd, REG_SP, 4 * 8, rd->argintregs[3]);
363 x86_64_mov_membase_reg(cd, REG_SP, 5 * 8, rd->argintregs[4]);
364 x86_64_mov_membase_reg(cd, REG_SP, 6 * 8, rd->argintregs[5]);
366 x86_64_movq_membase_reg(cd, REG_SP, 7 * 8, rd->argfltregs[0]);
367 x86_64_movq_membase_reg(cd, REG_SP, 8 * 8, rd->argfltregs[1]);
368 x86_64_movq_membase_reg(cd, REG_SP, 9 * 8, rd->argfltregs[2]);
369 x86_64_movq_membase_reg(cd, REG_SP, 10 * 8, rd->argfltregs[3]);
370 /* x86_64_movq_membase_reg(cd, REG_SP, 11 * 8, rd->argfltregs[4]); */
371 /* x86_64_movq_membase_reg(cd, REG_SP, 12 * 8, rd->argfltregs[5]); */
372 /* x86_64_movq_membase_reg(cd, REG_SP, 13 * 8, rd->argfltregs[6]); */
373 /* x86_64_movq_membase_reg(cd, REG_SP, 14 * 8, rd->argfltregs[7]); */
375 x86_64_alu_imm_reg(cd, X86_64_ADD, (6 + 8 + 1 + 1) * 8, REG_SP);
378 /* take arguments out of register or stack frame */
380 for (p = 0, l = 0, s1 = 0, s2 = 0; p < m->paramcount; p++) {
381 t = m->paramtypes[p];
382 var = &(rd->locals[l][t]);
384 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
387 if (IS_INT_LNG_TYPE(t)) {
394 if (IS_INT_LNG_TYPE(t)) { /* integer args */
395 if (s1 < INT_ARG_CNT) { /* register arguments */
396 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
397 M_INTMOVE(rd->argintregs[s1], var->regoff);
399 } else { /* reg arg -> spilled */
400 x86_64_mov_reg_membase(cd, rd->argintregs[s1], REG_SP, var->regoff * 8);
403 } else { /* stack arguments */
404 pa = s1 - INT_ARG_CNT;
405 if (s2 >= FLT_ARG_CNT) {
406 pa += s2 - FLT_ARG_CNT;
408 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
409 x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff); /* + 8 for return address */
410 } else { /* stack arg -> spilled */
411 x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_ITMP1); /* + 8 for return address */
412 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, var->regoff * 8);
417 } else { /* floating args */
418 if (s2 < FLT_ARG_CNT) { /* register arguments */
419 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
420 M_FLTMOVE(rd->argfltregs[s2], var->regoff);
422 } else { /* reg arg -> spilled */
423 x86_64_movq_reg_membase(cd, rd->argfltregs[s2], REG_SP, var->regoff * 8);
426 } else { /* stack arguments */
427 pa = s2 - FLT_ARG_CNT;
428 if (s1 >= INT_ARG_CNT) {
429 pa += s1 - INT_ARG_CNT;
431 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
432 x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff);
435 x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_FTMP1);
436 x86_64_movq_reg_membase(cd, REG_FTMP1, REG_SP, var->regoff * 8);
443 /* call monitorenter function */
445 #if defined(USE_THREADS)
446 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
447 s8 func_enter = (m->flags & ACC_STATIC) ?
448 (s8) builtin_staticmonitorenter : (s8) builtin_monitorenter;
449 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
450 x86_64_mov_imm_reg(cd, func_enter, REG_ITMP1);
451 x86_64_call_reg(cd, REG_ITMP1);
456 /* end of header generation */
458 /* walk through all basic blocks */
459 for (bptr = m->basicblocks; bptr != NULL; bptr = bptr->next) {
461 bptr->mpc = (u4) ((u1 *) cd->mcodeptr - cd->mcodebase);
463 if (bptr->flags >= BBREACHED) {
465 /* branch resolving */
468 for (bref = bptr->branchrefs; bref != NULL; bref = bref->next) {
469 gen_resolvebranch((u1 *) cd->mcodebase + bref->branchpos,
474 /* copy interface registers to their destination */
478 MCODECHECK(64 + len);
479 while (src != NULL) {
481 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
482 if (bptr->type == BBTYPE_SBR) {
483 d = reg_of_var(rd, src, REG_ITMP1);
484 x86_64_pop_reg(cd, d);
485 store_reg_to_var_int(src, d);
487 } else if (bptr->type == BBTYPE_EXH) {
488 d = reg_of_var(rd, src, REG_ITMP1);
489 M_INTMOVE(REG_ITMP1, d);
490 store_reg_to_var_int(src, d);
494 d = reg_of_var(rd, src, REG_ITMP1);
495 if ((src->varkind != STACKVAR)) {
497 if (IS_FLT_DBL_TYPE(s2)) {
498 s1 = rd->interfaces[len][s2].regoff;
499 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
503 x86_64_movq_membase_reg(cd, REG_SP, s1 * 8, d);
505 store_reg_to_var_flt(src, d);
508 s1 = rd->interfaces[len][s2].regoff;
509 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
513 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, d);
515 store_reg_to_var_int(src, d);
522 /* walk through all instructions */
526 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
528 MCODECHECK(64); /* an instruction usually needs < 64 words */
531 case ICMD_NOP: /* ... ==> ... */
534 case ICMD_NULLCHECKPOP: /* ..., objectref ==> ... */
535 if (src->flags & INMEMORY) {
536 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
539 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
541 x86_64_jcc(cd, X86_64_CC_E, 0);
542 codegen_addxnullrefs(cd, cd->mcodeptr);
545 /* constant operations ************************************************/
547 case ICMD_ICONST: /* ... ==> ..., constant */
548 /* op1 = 0, val.i = constant */
550 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
551 if (iptr->val.i == 0) {
552 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
554 x86_64_movl_imm_reg(cd, iptr->val.i, d);
556 store_reg_to_var_int(iptr->dst, d);
559 case ICMD_ACONST: /* ... ==> ..., constant */
560 /* op1 = 0, val.a = constant */
562 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
563 if (iptr->val.a == 0) {
564 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
566 x86_64_mov_imm_reg(cd, (s8) iptr->val.a, d);
568 store_reg_to_var_int(iptr->dst, d);
571 case ICMD_LCONST: /* ... ==> ..., constant */
572 /* op1 = 0, val.l = constant */
574 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
575 if (iptr->val.l == 0) {
576 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
578 x86_64_mov_imm_reg(cd, iptr->val.l, d);
580 store_reg_to_var_int(iptr->dst, d);
583 case ICMD_FCONST: /* ... ==> ..., constant */
584 /* op1 = 0, val.f = constant */
586 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
587 a = dseg_addfloat(cd, iptr->val.f);
588 x86_64_movdl_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + ((d > 7) ? 9 : 8)) - (s8) cd->mcodebase) + a, d);
589 store_reg_to_var_flt(iptr->dst, d);
592 case ICMD_DCONST: /* ... ==> ..., constant */
593 /* op1 = 0, val.d = constant */
595 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
596 a = dseg_adddouble(cd, iptr->val.d);
597 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, d);
598 store_reg_to_var_flt(iptr->dst, d);
602 /* load/store operations **********************************************/
604 case ICMD_ILOAD: /* ... ==> ..., content of local variable */
605 /* op1 = local variable */
607 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
608 if ((iptr->dst->varkind == LOCALVAR) &&
609 (iptr->dst->varnum == iptr->op1)) {
612 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
613 if (var->flags & INMEMORY) {
614 x86_64_movl_membase_reg(cd, REG_SP, var->regoff * 8, d);
615 store_reg_to_var_int(iptr->dst, d);
618 if (iptr->dst->flags & INMEMORY) {
619 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
622 M_INTMOVE(var->regoff, d);
627 case ICMD_LLOAD: /* ... ==> ..., content of local variable */
628 case ICMD_ALOAD: /* op1 = local variable */
630 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
631 if ((iptr->dst->varkind == LOCALVAR) &&
632 (iptr->dst->varnum == iptr->op1)) {
635 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
636 if (var->flags & INMEMORY) {
637 x86_64_mov_membase_reg(cd, REG_SP, var->regoff * 8, d);
638 store_reg_to_var_int(iptr->dst, d);
641 if (iptr->dst->flags & INMEMORY) {
642 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
645 M_INTMOVE(var->regoff, d);
650 case ICMD_FLOAD: /* ... ==> ..., content of local variable */
651 case ICMD_DLOAD: /* op1 = local variable */
653 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
654 if ((iptr->dst->varkind == LOCALVAR) &&
655 (iptr->dst->varnum == iptr->op1)) {
658 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
659 if (var->flags & INMEMORY) {
660 x86_64_movq_membase_reg(cd, REG_SP, var->regoff * 8, d);
661 store_reg_to_var_flt(iptr->dst, d);
664 if (iptr->dst->flags & INMEMORY) {
665 x86_64_movq_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
668 M_FLTMOVE(var->regoff, d);
673 case ICMD_ISTORE: /* ..., value ==> ... */
674 case ICMD_LSTORE: /* op1 = local variable */
677 if ((src->varkind == LOCALVAR) &&
678 (src->varnum == iptr->op1)) {
681 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
682 if (var->flags & INMEMORY) {
683 var_to_reg_int(s1, src, REG_ITMP1);
684 x86_64_mov_reg_membase(cd, s1, REG_SP, var->regoff * 8);
687 var_to_reg_int(s1, src, var->regoff);
688 M_INTMOVE(s1, var->regoff);
692 case ICMD_FSTORE: /* ..., value ==> ... */
693 case ICMD_DSTORE: /* op1 = local variable */
695 if ((src->varkind == LOCALVAR) &&
696 (src->varnum == iptr->op1)) {
699 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
700 if (var->flags & INMEMORY) {
701 var_to_reg_flt(s1, src, REG_FTMP1);
702 x86_64_movq_reg_membase(cd, s1, REG_SP, var->regoff * 8);
705 var_to_reg_flt(s1, src, var->regoff);
706 M_FLTMOVE(s1, var->regoff);
711 /* pop/dup/swap operations ********************************************/
713 /* attention: double and longs are only one entry in CACAO ICMDs */
715 case ICMD_POP: /* ..., value ==> ... */
716 case ICMD_POP2: /* ..., value, value ==> ... */
719 case ICMD_DUP: /* ..., a ==> ..., a, a */
720 M_COPY(src, iptr->dst);
723 case ICMD_DUP_X1: /* ..., a, b ==> ..., b, a, b */
725 M_COPY(src, iptr->dst);
726 M_COPY(src->prev, iptr->dst->prev);
727 M_COPY(iptr->dst, iptr->dst->prev->prev);
730 case ICMD_DUP_X2: /* ..., a, b, c ==> ..., c, a, b, c */
732 M_COPY(src, iptr->dst);
733 M_COPY(src->prev, iptr->dst->prev);
734 M_COPY(src->prev->prev, iptr->dst->prev->prev);
735 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
738 case ICMD_DUP2: /* ..., a, b ==> ..., a, b, a, b */
740 M_COPY(src, iptr->dst);
741 M_COPY(src->prev, iptr->dst->prev);
744 case ICMD_DUP2_X1: /* ..., a, b, c ==> ..., b, c, a, b, c */
746 M_COPY(src, iptr->dst);
747 M_COPY(src->prev, iptr->dst->prev);
748 M_COPY(src->prev->prev, iptr->dst->prev->prev);
749 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
750 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev);
753 case ICMD_DUP2_X2: /* ..., a, b, c, d ==> ..., c, d, a, b, c, d */
755 M_COPY(src, iptr->dst);
756 M_COPY(src->prev, iptr->dst->prev);
757 M_COPY(src->prev->prev, iptr->dst->prev->prev);
758 M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
759 M_COPY(iptr->dst, iptr->dst->prev->prev->prev->prev);
760 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev->prev);
763 case ICMD_SWAP: /* ..., a, b ==> ..., b, a */
765 M_COPY(src, iptr->dst->prev);
766 M_COPY(src->prev, iptr->dst);
770 /* integer operations *************************************************/
772 case ICMD_INEG: /* ..., value ==> ..., - value */
774 d = reg_of_var(rd, iptr->dst, REG_NULL);
775 if (iptr->dst->flags & INMEMORY) {
776 if (src->flags & INMEMORY) {
777 if (src->regoff == iptr->dst->regoff) {
778 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
781 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
782 x86_64_negl_reg(cd, REG_ITMP1);
783 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
787 x86_64_movl_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
788 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
792 if (src->flags & INMEMORY) {
793 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
794 x86_64_negl_reg(cd, d);
797 M_INTMOVE(src->regoff, iptr->dst->regoff);
798 x86_64_negl_reg(cd, iptr->dst->regoff);
803 case ICMD_LNEG: /* ..., value ==> ..., - value */
805 d = reg_of_var(rd, iptr->dst, REG_NULL);
806 if (iptr->dst->flags & INMEMORY) {
807 if (src->flags & INMEMORY) {
808 if (src->regoff == iptr->dst->regoff) {
809 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
812 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
813 x86_64_neg_reg(cd, REG_ITMP1);
814 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
818 x86_64_mov_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
819 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
823 if (src->flags & INMEMORY) {
824 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
825 x86_64_neg_reg(cd, iptr->dst->regoff);
828 M_INTMOVE(src->regoff, iptr->dst->regoff);
829 x86_64_neg_reg(cd, iptr->dst->regoff);
834 case ICMD_I2L: /* ..., value ==> ..., value */
836 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
837 if (src->flags & INMEMORY) {
838 x86_64_movslq_membase_reg(cd, REG_SP, src->regoff * 8, d);
841 x86_64_movslq_reg_reg(cd, src->regoff, d);
843 store_reg_to_var_int(iptr->dst, d);
846 case ICMD_L2I: /* ..., value ==> ..., value */
848 var_to_reg_int(s1, src, REG_ITMP1);
849 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
851 store_reg_to_var_int(iptr->dst, d);
854 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
856 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
857 if (src->flags & INMEMORY) {
858 x86_64_movsbq_membase_reg(cd, REG_SP, src->regoff * 8, d);
861 x86_64_movsbq_reg_reg(cd, src->regoff, d);
863 store_reg_to_var_int(iptr->dst, d);
866 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
868 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
869 if (src->flags & INMEMORY) {
870 x86_64_movzwq_membase_reg(cd, REG_SP, src->regoff * 8, d);
873 x86_64_movzwq_reg_reg(cd, src->regoff, d);
875 store_reg_to_var_int(iptr->dst, d);
878 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
880 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
881 if (src->flags & INMEMORY) {
882 x86_64_movswq_membase_reg(cd, REG_SP, src->regoff * 8, d);
885 x86_64_movswq_reg_reg(cd, src->regoff, d);
887 store_reg_to_var_int(iptr->dst, d);
891 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
893 d = reg_of_var(rd, iptr->dst, REG_NULL);
894 x86_64_emit_ialu(cd, X86_64_ADD, src, iptr);
897 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
898 /* val.i = constant */
900 d = reg_of_var(rd, iptr->dst, REG_NULL);
901 x86_64_emit_ialuconst(cd, X86_64_ADD, src, iptr);
904 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
906 d = reg_of_var(rd, iptr->dst, REG_NULL);
907 x86_64_emit_lalu(cd, X86_64_ADD, src, iptr);
910 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
911 /* val.l = constant */
913 d = reg_of_var(rd, iptr->dst, REG_NULL);
914 x86_64_emit_laluconst(cd, X86_64_ADD, src, iptr);
917 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
919 d = reg_of_var(rd, iptr->dst, REG_NULL);
920 if (iptr->dst->flags & INMEMORY) {
921 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
922 if (src->prev->regoff == iptr->dst->regoff) {
923 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
924 x86_64_alul_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
927 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
928 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
929 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
932 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
933 M_INTMOVE(src->prev->regoff, REG_ITMP1);
934 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
935 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
937 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
938 if (src->prev->regoff == iptr->dst->regoff) {
939 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
942 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
943 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
944 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
948 x86_64_movl_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
949 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
953 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
954 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
955 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
957 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
958 M_INTMOVE(src->prev->regoff, d);
959 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
961 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
962 /* workaround for reg alloc */
963 if (src->regoff == iptr->dst->regoff) {
964 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
965 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
966 M_INTMOVE(REG_ITMP1, d);
969 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
970 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
974 /* workaround for reg alloc */
975 if (src->regoff == iptr->dst->regoff) {
976 M_INTMOVE(src->prev->regoff, REG_ITMP1);
977 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
978 M_INTMOVE(REG_ITMP1, d);
981 M_INTMOVE(src->prev->regoff, d);
982 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
988 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
989 /* val.i = constant */
991 d = reg_of_var(rd, iptr->dst, REG_NULL);
992 x86_64_emit_ialuconst(cd, X86_64_SUB, src, iptr);
995 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
997 d = reg_of_var(rd, iptr->dst, REG_NULL);
998 if (iptr->dst->flags & INMEMORY) {
999 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1000 if (src->prev->regoff == iptr->dst->regoff) {
1001 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1002 x86_64_alu_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1005 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1006 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1007 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1010 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1011 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1012 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1013 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1015 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1016 if (src->prev->regoff == iptr->dst->regoff) {
1017 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1020 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1021 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1022 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1026 x86_64_mov_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1027 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1031 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1032 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1033 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1035 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1036 M_INTMOVE(src->prev->regoff, d);
1037 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1039 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1040 /* workaround for reg alloc */
1041 if (src->regoff == iptr->dst->regoff) {
1042 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1043 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1044 M_INTMOVE(REG_ITMP1, d);
1047 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1048 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1052 /* workaround for reg alloc */
1053 if (src->regoff == iptr->dst->regoff) {
1054 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1055 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1056 M_INTMOVE(REG_ITMP1, d);
1059 M_INTMOVE(src->prev->regoff, d);
1060 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1066 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
1067 /* val.l = constant */
1069 d = reg_of_var(rd, iptr->dst, REG_NULL);
1070 x86_64_emit_laluconst(cd, X86_64_SUB, src, iptr);
1073 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1075 d = reg_of_var(rd, iptr->dst, REG_NULL);
1076 if (iptr->dst->flags & INMEMORY) {
1077 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1078 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1079 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1080 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1082 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1083 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1084 x86_64_imull_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1085 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1087 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1088 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1089 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1090 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1093 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1094 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1095 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1099 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1100 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1101 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1103 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1104 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1105 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1107 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1108 M_INTMOVE(src->regoff, iptr->dst->regoff);
1109 x86_64_imull_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1112 if (src->regoff == iptr->dst->regoff) {
1113 x86_64_imull_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1116 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1117 x86_64_imull_reg_reg(cd, src->regoff, iptr->dst->regoff);
1123 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
1124 /* val.i = constant */
1126 d = reg_of_var(rd, iptr->dst, REG_NULL);
1127 if (iptr->dst->flags & INMEMORY) {
1128 if (src->flags & INMEMORY) {
1129 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1130 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1133 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, REG_ITMP1);
1134 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1138 if (src->flags & INMEMORY) {
1139 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1142 if (iptr->val.i == 2) {
1143 M_INTMOVE(src->regoff, iptr->dst->regoff);
1144 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1147 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, iptr->dst->regoff); /* 3 cycles */
1153 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1155 d = reg_of_var(rd, iptr->dst, REG_NULL);
1156 if (iptr->dst->flags & INMEMORY) {
1157 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1158 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1159 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1160 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1162 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1163 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1164 x86_64_imul_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1165 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1167 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1168 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1169 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1170 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1173 x86_64_mov_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1174 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1175 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1179 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1180 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1181 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1183 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1184 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1185 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1187 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1188 M_INTMOVE(src->regoff, iptr->dst->regoff);
1189 x86_64_imul_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1192 if (src->regoff == iptr->dst->regoff) {
1193 x86_64_imul_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1196 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1197 x86_64_imul_reg_reg(cd, src->regoff, iptr->dst->regoff);
1203 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
1204 /* val.l = constant */
1206 d = reg_of_var(rd, iptr->dst, REG_NULL);
1207 if (iptr->dst->flags & INMEMORY) {
1208 if (src->flags & INMEMORY) {
1209 if (x86_64_is_imm32(iptr->val.l)) {
1210 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1213 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1214 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1216 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1219 if (x86_64_is_imm32(iptr->val.l)) {
1220 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, REG_ITMP1);
1223 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1224 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1226 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1230 if (src->flags & INMEMORY) {
1231 if (x86_64_is_imm32(iptr->val.l)) {
1232 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1235 x86_64_mov_imm_reg(cd, iptr->val.l, iptr->dst->regoff);
1236 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1240 /* should match in many cases */
1241 if (iptr->val.l == 2) {
1242 M_INTMOVE(src->regoff, iptr->dst->regoff);
1243 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1246 if (x86_64_is_imm32(iptr->val.l)) {
1247 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, iptr->dst->regoff); /* 4 cycles */
1250 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1251 M_INTMOVE(src->regoff, iptr->dst->regoff);
1252 x86_64_imul_reg_reg(cd, REG_ITMP1, iptr->dst->regoff);
1259 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1261 d = reg_of_var(rd, iptr->dst, REG_NULL);
1262 if (src->prev->flags & INMEMORY) {
1263 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1266 M_INTMOVE(src->prev->regoff, RAX);
1269 if (src->flags & INMEMORY) {
1270 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1273 M_INTMOVE(src->regoff, REG_ITMP3);
1277 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1278 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1279 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1280 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1282 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1284 x86_64_idivl_reg(cd, REG_ITMP3);
1286 if (iptr->dst->flags & INMEMORY) {
1287 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1288 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1291 M_INTMOVE(RAX, iptr->dst->regoff);
1293 if (iptr->dst->regoff != RDX) {
1294 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1299 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1301 d = reg_of_var(rd, iptr->dst, REG_NULL);
1302 if (src->prev->flags & INMEMORY) {
1303 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1306 M_INTMOVE(src->prev->regoff, RAX);
1309 if (src->flags & INMEMORY) {
1310 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1313 M_INTMOVE(src->regoff, REG_ITMP3);
1317 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1318 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1319 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1320 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1321 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1323 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1325 x86_64_idivl_reg(cd, REG_ITMP3);
1327 if (iptr->dst->flags & INMEMORY) {
1328 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1329 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1332 M_INTMOVE(RDX, iptr->dst->regoff);
1334 if (iptr->dst->regoff != RDX) {
1335 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1340 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
1341 /* val.i = constant */
1343 var_to_reg_int(s1, src, REG_ITMP1);
1344 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1345 M_INTMOVE(s1, REG_ITMP1);
1346 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1347 x86_64_leal_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1348 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1349 x86_64_shiftl_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1350 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1351 store_reg_to_var_int(iptr->dst, d);
1354 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
1355 /* val.i = constant */
1357 var_to_reg_int(s1, src, REG_ITMP1);
1358 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1359 M_INTMOVE(s1, REG_ITMP1);
1360 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1361 x86_64_leal_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1362 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1363 x86_64_alul_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1364 x86_64_alul_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1365 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1366 store_reg_to_var_int(iptr->dst, d);
1370 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1372 d = reg_of_var(rd, iptr->dst, REG_NULL);
1373 if (src->prev->flags & INMEMORY) {
1374 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1377 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1380 if (src->flags & INMEMORY) {
1381 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1384 M_INTMOVE(src->regoff, REG_ITMP3);
1388 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1389 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1390 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1391 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1392 x86_64_jcc(cd, X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1394 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1396 x86_64_idiv_reg(cd, REG_ITMP3);
1398 if (iptr->dst->flags & INMEMORY) {
1399 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1400 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1403 M_INTMOVE(RAX, iptr->dst->regoff);
1405 if (iptr->dst->regoff != RDX) {
1406 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1411 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1413 d = reg_of_var(rd, iptr->dst, REG_NULL);
1414 if (src->prev->flags & INMEMORY) {
1415 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1418 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1421 if (src->flags & INMEMORY) {
1422 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1425 M_INTMOVE(src->regoff, REG_ITMP3);
1429 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1430 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1431 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1432 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1433 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1434 x86_64_jcc(cd, X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1436 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1438 x86_64_idiv_reg(cd, REG_ITMP3);
1440 if (iptr->dst->flags & INMEMORY) {
1441 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1442 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1445 M_INTMOVE(RDX, iptr->dst->regoff);
1447 if (iptr->dst->regoff != RDX) {
1448 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1453 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1454 /* val.i = constant */
1456 var_to_reg_int(s1, src, REG_ITMP1);
1457 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1458 M_INTMOVE(s1, REG_ITMP1);
1459 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1460 x86_64_lea_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1461 x86_64_cmovcc_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1462 x86_64_shift_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1463 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1464 store_reg_to_var_int(iptr->dst, d);
1467 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1468 /* val.l = constant */
1470 var_to_reg_int(s1, src, REG_ITMP1);
1471 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1472 M_INTMOVE(s1, REG_ITMP1);
1473 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1474 x86_64_lea_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1475 x86_64_cmovcc_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1476 x86_64_alu_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1477 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1478 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1479 store_reg_to_var_int(iptr->dst, d);
1482 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1484 d = reg_of_var(rd, iptr->dst, REG_NULL);
1485 x86_64_emit_ishift(cd, X86_64_SHL, src, iptr);
1488 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1489 /* val.i = constant */
1491 d = reg_of_var(rd, iptr->dst, REG_NULL);
1492 x86_64_emit_ishiftconst(cd, X86_64_SHL, src, iptr);
1495 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1497 d = reg_of_var(rd, iptr->dst, REG_NULL);
1498 x86_64_emit_ishift(cd, X86_64_SAR, src, iptr);
1501 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1502 /* val.i = constant */
1504 d = reg_of_var(rd, iptr->dst, REG_NULL);
1505 x86_64_emit_ishiftconst(cd, X86_64_SAR, src, iptr);
1508 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1510 d = reg_of_var(rd, iptr->dst, REG_NULL);
1511 x86_64_emit_ishift(cd, X86_64_SHR, src, iptr);
1514 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1515 /* val.i = constant */
1517 d = reg_of_var(rd, iptr->dst, REG_NULL);
1518 x86_64_emit_ishiftconst(cd, X86_64_SHR, src, iptr);
1521 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1523 d = reg_of_var(rd, iptr->dst, REG_NULL);
1524 x86_64_emit_lshift(cd, X86_64_SHL, src, iptr);
1527 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1528 /* val.i = constant */
1530 d = reg_of_var(rd, iptr->dst, REG_NULL);
1531 x86_64_emit_lshiftconst(cd, X86_64_SHL, src, iptr);
1534 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1536 d = reg_of_var(rd, iptr->dst, REG_NULL);
1537 x86_64_emit_lshift(cd, X86_64_SAR, src, iptr);
1540 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1541 /* val.i = constant */
1543 d = reg_of_var(rd, iptr->dst, REG_NULL);
1544 x86_64_emit_lshiftconst(cd, X86_64_SAR, src, iptr);
1547 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1549 d = reg_of_var(rd, iptr->dst, REG_NULL);
1550 x86_64_emit_lshift(cd, X86_64_SHR, src, iptr);
1553 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1554 /* val.l = constant */
1556 d = reg_of_var(rd, iptr->dst, REG_NULL);
1557 x86_64_emit_lshiftconst(cd, X86_64_SHR, src, iptr);
1560 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1562 d = reg_of_var(rd, iptr->dst, REG_NULL);
1563 x86_64_emit_ialu(cd, X86_64_AND, src, iptr);
1566 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1567 /* val.i = constant */
1569 d = reg_of_var(rd, iptr->dst, REG_NULL);
1570 x86_64_emit_ialuconst(cd, X86_64_AND, src, iptr);
1573 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1575 d = reg_of_var(rd, iptr->dst, REG_NULL);
1576 x86_64_emit_lalu(cd, X86_64_AND, src, iptr);
1579 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1580 /* val.l = constant */
1582 d = reg_of_var(rd, iptr->dst, REG_NULL);
1583 x86_64_emit_laluconst(cd, X86_64_AND, src, iptr);
1586 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1588 d = reg_of_var(rd, iptr->dst, REG_NULL);
1589 x86_64_emit_ialu(cd, X86_64_OR, src, iptr);
1592 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1593 /* val.i = constant */
1595 d = reg_of_var(rd, iptr->dst, REG_NULL);
1596 x86_64_emit_ialuconst(cd, X86_64_OR, src, iptr);
1599 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1601 d = reg_of_var(rd, iptr->dst, REG_NULL);
1602 x86_64_emit_lalu(cd, X86_64_OR, src, iptr);
1605 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1606 /* val.l = constant */
1608 d = reg_of_var(rd, iptr->dst, REG_NULL);
1609 x86_64_emit_laluconst(cd, X86_64_OR, src, iptr);
1612 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1614 d = reg_of_var(rd, iptr->dst, REG_NULL);
1615 x86_64_emit_ialu(cd, X86_64_XOR, src, iptr);
1618 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1619 /* val.i = constant */
1621 d = reg_of_var(rd, iptr->dst, REG_NULL);
1622 x86_64_emit_ialuconst(cd, X86_64_XOR, src, iptr);
1625 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1627 d = reg_of_var(rd, iptr->dst, REG_NULL);
1628 x86_64_emit_lalu(cd, X86_64_XOR, src, iptr);
1631 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1632 /* val.l = constant */
1634 d = reg_of_var(rd, iptr->dst, REG_NULL);
1635 x86_64_emit_laluconst(cd, X86_64_XOR, src, iptr);
1639 case ICMD_IINC: /* ..., value ==> ..., value + constant */
1640 /* op1 = variable, val.i = constant */
1642 /* using inc and dec is definitely faster than add -- tested */
1645 var = &(rd->locals[iptr->op1][TYPE_INT]);
1647 if (var->flags & INMEMORY) {
1648 if (iptr->val.i == 1) {
1649 x86_64_incl_membase(cd, REG_SP, d * 8);
1651 } else if (iptr->val.i == -1) {
1652 x86_64_decl_membase(cd, REG_SP, d * 8);
1655 x86_64_alul_imm_membase(cd, X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1659 if (iptr->val.i == 1) {
1660 x86_64_incl_reg(cd, d);
1662 } else if (iptr->val.i == -1) {
1663 x86_64_decl_reg(cd, d);
1666 x86_64_alul_imm_reg(cd, X86_64_ADD, iptr->val.i, d);
1672 /* floating operations ************************************************/
1674 case ICMD_FNEG: /* ..., value ==> ..., - value */
1676 var_to_reg_flt(s1, src, REG_FTMP1);
1677 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1678 a = dseg_adds4(cd, 0x80000000);
1680 x86_64_movss_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1681 x86_64_xorps_reg_reg(cd, REG_FTMP2, d);
1682 store_reg_to_var_flt(iptr->dst, d);
1685 case ICMD_DNEG: /* ..., value ==> ..., - value */
1687 var_to_reg_flt(s1, src, REG_FTMP1);
1688 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1689 a = dseg_adds8(cd, 0x8000000000000000);
1691 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1692 x86_64_xorpd_reg_reg(cd, REG_FTMP2, d);
1693 store_reg_to_var_flt(iptr->dst, d);
1696 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1698 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1699 var_to_reg_flt(s2, src, REG_FTMP2);
1700 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1702 x86_64_addss_reg_reg(cd, s2, d);
1703 } else if (s2 == d) {
1704 x86_64_addss_reg_reg(cd, s1, d);
1707 x86_64_addss_reg_reg(cd, s2, d);
1709 store_reg_to_var_flt(iptr->dst, d);
1712 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1714 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1715 var_to_reg_flt(s2, src, REG_FTMP2);
1716 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1718 x86_64_addsd_reg_reg(cd, s2, d);
1719 } else if (s2 == d) {
1720 x86_64_addsd_reg_reg(cd, s1, d);
1723 x86_64_addsd_reg_reg(cd, s2, d);
1725 store_reg_to_var_flt(iptr->dst, d);
1728 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1730 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1731 var_to_reg_flt(s2, src, REG_FTMP2);
1732 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1734 M_FLTMOVE(s2, REG_FTMP2);
1738 x86_64_subss_reg_reg(cd, s2, d);
1739 store_reg_to_var_flt(iptr->dst, d);
1742 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1744 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1745 var_to_reg_flt(s2, src, REG_FTMP2);
1746 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1748 M_FLTMOVE(s2, REG_FTMP2);
1752 x86_64_subsd_reg_reg(cd, s2, d);
1753 store_reg_to_var_flt(iptr->dst, d);
1756 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1758 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1759 var_to_reg_flt(s2, src, REG_FTMP2);
1760 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1762 x86_64_mulss_reg_reg(cd, s2, d);
1763 } else if (s2 == d) {
1764 x86_64_mulss_reg_reg(cd, s1, d);
1767 x86_64_mulss_reg_reg(cd, s2, d);
1769 store_reg_to_var_flt(iptr->dst, d);
1772 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1774 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1775 var_to_reg_flt(s2, src, REG_FTMP2);
1776 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1778 x86_64_mulsd_reg_reg(cd, s2, d);
1779 } else if (s2 == d) {
1780 x86_64_mulsd_reg_reg(cd, s1, d);
1783 x86_64_mulsd_reg_reg(cd, s2, d);
1785 store_reg_to_var_flt(iptr->dst, d);
1788 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1790 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1791 var_to_reg_flt(s2, src, REG_FTMP2);
1792 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1794 M_FLTMOVE(s2, REG_FTMP2);
1798 x86_64_divss_reg_reg(cd, s2, d);
1799 store_reg_to_var_flt(iptr->dst, d);
1802 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1804 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1805 var_to_reg_flt(s2, src, REG_FTMP2);
1806 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1808 M_FLTMOVE(s2, REG_FTMP2);
1812 x86_64_divsd_reg_reg(cd, s2, d);
1813 store_reg_to_var_flt(iptr->dst, d);
1816 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1818 var_to_reg_int(s1, src, REG_ITMP1);
1819 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1820 x86_64_cvtsi2ss_reg_reg(cd, s1, d);
1821 store_reg_to_var_flt(iptr->dst, d);
1824 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1826 var_to_reg_int(s1, src, REG_ITMP1);
1827 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1828 x86_64_cvtsi2sd_reg_reg(cd, s1, d);
1829 store_reg_to_var_flt(iptr->dst, d);
1832 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1834 var_to_reg_int(s1, src, REG_ITMP1);
1835 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1836 x86_64_cvtsi2ssq_reg_reg(cd, s1, d);
1837 store_reg_to_var_flt(iptr->dst, d);
1840 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1842 var_to_reg_int(s1, src, REG_ITMP1);
1843 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1844 x86_64_cvtsi2sdq_reg_reg(cd, s1, d);
1845 store_reg_to_var_flt(iptr->dst, d);
1848 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1850 var_to_reg_flt(s1, src, REG_FTMP1);
1851 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1852 x86_64_cvttss2si_reg_reg(cd, s1, d);
1853 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1854 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1855 x86_64_jcc(cd, X86_64_CC_NE, a);
1856 M_FLTMOVE(s1, REG_FTMP1);
1857 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2i, REG_ITMP2);
1858 x86_64_call_reg(cd, REG_ITMP2);
1859 M_INTMOVE(REG_RESULT, d);
1860 store_reg_to_var_int(iptr->dst, d);
1863 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1865 var_to_reg_flt(s1, src, REG_FTMP1);
1866 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1867 x86_64_cvttsd2si_reg_reg(cd, s1, d);
1868 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1869 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1870 x86_64_jcc(cd, X86_64_CC_NE, a);
1871 M_FLTMOVE(s1, REG_FTMP1);
1872 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2i, REG_ITMP2);
1873 x86_64_call_reg(cd, REG_ITMP2);
1874 M_INTMOVE(REG_RESULT, d);
1875 store_reg_to_var_int(iptr->dst, d);
1878 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1880 var_to_reg_flt(s1, src, REG_FTMP1);
1881 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1882 x86_64_cvttss2siq_reg_reg(cd, s1, d);
1883 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1884 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1885 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1886 x86_64_jcc(cd, X86_64_CC_NE, a);
1887 M_FLTMOVE(s1, REG_FTMP1);
1888 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2l, REG_ITMP2);
1889 x86_64_call_reg(cd, REG_ITMP2);
1890 M_INTMOVE(REG_RESULT, d);
1891 store_reg_to_var_int(iptr->dst, d);
1894 case ICMD_D2L: /* ..., value ==> ..., (long) value */
1896 var_to_reg_flt(s1, src, REG_FTMP1);
1897 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1898 x86_64_cvttsd2siq_reg_reg(cd, s1, d);
1899 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1900 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
1901 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1902 x86_64_jcc(cd, X86_64_CC_NE, a);
1903 M_FLTMOVE(s1, REG_FTMP1);
1904 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2l, REG_ITMP2);
1905 x86_64_call_reg(cd, REG_ITMP2);
1906 M_INTMOVE(REG_RESULT, d);
1907 store_reg_to_var_int(iptr->dst, d);
1910 case ICMD_F2D: /* ..., value ==> ..., (double) value */
1912 var_to_reg_flt(s1, src, REG_FTMP1);
1913 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1914 x86_64_cvtss2sd_reg_reg(cd, s1, d);
1915 store_reg_to_var_flt(iptr->dst, d);
1918 case ICMD_D2F: /* ..., value ==> ..., (float) value */
1920 var_to_reg_flt(s1, src, REG_FTMP1);
1921 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1922 x86_64_cvtsd2ss_reg_reg(cd, s1, d);
1923 store_reg_to_var_flt(iptr->dst, d);
1926 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1927 /* == => 0, < => 1, > => -1 */
1929 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1930 var_to_reg_flt(s2, src, REG_FTMP2);
1931 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1932 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1933 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1934 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1935 x86_64_ucomiss_reg_reg(cd, s1, s2);
1936 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1937 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1938 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1939 store_reg_to_var_int(iptr->dst, d);
1942 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1943 /* == => 0, < => 1, > => -1 */
1945 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1946 var_to_reg_flt(s2, src, REG_FTMP2);
1947 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1948 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1949 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1950 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1951 x86_64_ucomiss_reg_reg(cd, s1, s2);
1952 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1953 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1954 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1955 store_reg_to_var_int(iptr->dst, d);
1958 case ICMD_DCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
1959 /* == => 0, < => 1, > => -1 */
1961 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1962 var_to_reg_flt(s2, src, REG_FTMP2);
1963 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1964 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1965 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1966 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1967 x86_64_ucomisd_reg_reg(cd, s1, s2);
1968 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1969 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1970 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
1971 store_reg_to_var_int(iptr->dst, d);
1974 case ICMD_DCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
1975 /* == => 0, < => 1, > => -1 */
1977 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1978 var_to_reg_flt(s2, src, REG_FTMP2);
1979 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1980 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1981 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1982 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1983 x86_64_ucomisd_reg_reg(cd, s1, s2);
1984 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1985 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1986 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
1987 store_reg_to_var_int(iptr->dst, d);
1991 /* memory operations **************************************************/
1993 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., (int) length */
1995 var_to_reg_int(s1, src, REG_ITMP1);
1996 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1997 gen_nullptr_check(s1);
1998 x86_64_movl_membase_reg(cd, s1, OFFSET(java_arrayheader, size), d);
1999 store_reg_to_var_int(iptr->dst, d);
2002 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2004 var_to_reg_int(s1, src->prev, REG_ITMP1);
2005 var_to_reg_int(s2, src, REG_ITMP2);
2006 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2007 if (iptr->op1 == 0) {
2008 gen_nullptr_check(s1);
2011 x86_64_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
2012 store_reg_to_var_int(iptr->dst, d);
2015 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
2017 var_to_reg_int(s1, src->prev, REG_ITMP1);
2018 var_to_reg_int(s2, src, REG_ITMP2);
2019 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2020 if (iptr->op1 == 0) {
2021 gen_nullptr_check(s1);
2024 x86_64_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]), s1, s2, 3, d);
2025 store_reg_to_var_int(iptr->dst, d);
2028 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
2030 var_to_reg_int(s1, src->prev, REG_ITMP1);
2031 var_to_reg_int(s2, src, REG_ITMP2);
2032 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2033 if (iptr->op1 == 0) {
2034 gen_nullptr_check(s1);
2037 x86_64_movl_memindex_reg(cd, OFFSET(java_intarray, data[0]), s1, s2, 2, d);
2038 store_reg_to_var_int(iptr->dst, d);
2041 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2043 var_to_reg_int(s1, src->prev, REG_ITMP1);
2044 var_to_reg_int(s2, src, REG_ITMP2);
2045 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2046 if (iptr->op1 == 0) {
2047 gen_nullptr_check(s1);
2050 x86_64_movss_memindex_reg(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2051 store_reg_to_var_flt(iptr->dst, d);
2054 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2056 var_to_reg_int(s1, src->prev, REG_ITMP1);
2057 var_to_reg_int(s2, src, REG_ITMP2);
2058 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2059 if (iptr->op1 == 0) {
2060 gen_nullptr_check(s1);
2063 x86_64_movsd_memindex_reg(cd, OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2064 store_reg_to_var_flt(iptr->dst, d);
2067 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
2069 var_to_reg_int(s1, src->prev, REG_ITMP1);
2070 var_to_reg_int(s2, src, REG_ITMP2);
2071 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2072 if (iptr->op1 == 0) {
2073 gen_nullptr_check(s1);
2076 x86_64_movzwq_memindex_reg(cd, OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2077 store_reg_to_var_int(iptr->dst, d);
2080 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
2082 var_to_reg_int(s1, src->prev, REG_ITMP1);
2083 var_to_reg_int(s2, src, REG_ITMP2);
2084 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2085 if (iptr->op1 == 0) {
2086 gen_nullptr_check(s1);
2089 x86_64_movswq_memindex_reg(cd, OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2090 store_reg_to_var_int(iptr->dst, d);
2093 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
2095 var_to_reg_int(s1, src->prev, REG_ITMP1);
2096 var_to_reg_int(s2, src, REG_ITMP2);
2097 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2098 if (iptr->op1 == 0) {
2099 gen_nullptr_check(s1);
2102 x86_64_movsbq_memindex_reg(cd, OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2103 store_reg_to_var_int(iptr->dst, d);
2107 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2109 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2110 var_to_reg_int(s2, src->prev, REG_ITMP2);
2111 if (iptr->op1 == 0) {
2112 gen_nullptr_check(s1);
2115 var_to_reg_int(s3, src, REG_ITMP3);
2116 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2119 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2121 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2122 var_to_reg_int(s2, src->prev, REG_ITMP2);
2123 if (iptr->op1 == 0) {
2124 gen_nullptr_check(s1);
2127 var_to_reg_int(s3, src, REG_ITMP3);
2128 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2131 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2133 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2134 var_to_reg_int(s2, src->prev, REG_ITMP2);
2135 if (iptr->op1 == 0) {
2136 gen_nullptr_check(s1);
2139 var_to_reg_int(s3, src, REG_ITMP3);
2140 x86_64_movl_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2143 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2145 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2146 var_to_reg_int(s2, src->prev, REG_ITMP2);
2147 if (iptr->op1 == 0) {
2148 gen_nullptr_check(s1);
2151 var_to_reg_flt(s3, src, REG_FTMP3);
2152 x86_64_movss_reg_memindex(cd, s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2155 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2157 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2158 var_to_reg_int(s2, src->prev, REG_ITMP2);
2159 if (iptr->op1 == 0) {
2160 gen_nullptr_check(s1);
2163 var_to_reg_flt(s3, src, REG_FTMP3);
2164 x86_64_movsd_reg_memindex(cd, s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2167 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2169 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2170 var_to_reg_int(s2, src->prev, REG_ITMP2);
2171 if (iptr->op1 == 0) {
2172 gen_nullptr_check(s1);
2175 var_to_reg_int(s3, src, REG_ITMP3);
2176 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2179 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2181 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2182 var_to_reg_int(s2, src->prev, REG_ITMP2);
2183 if (iptr->op1 == 0) {
2184 gen_nullptr_check(s1);
2187 var_to_reg_int(s3, src, REG_ITMP3);
2188 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2191 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2193 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2194 var_to_reg_int(s2, src->prev, REG_ITMP2);
2195 if (iptr->op1 == 0) {
2196 gen_nullptr_check(s1);
2199 var_to_reg_int(s3, src, REG_ITMP3);
2200 x86_64_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2203 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2205 var_to_reg_int(s1, src->prev, REG_ITMP1);
2206 var_to_reg_int(s2, src, REG_ITMP2);
2207 if (iptr->op1 == 0) {
2208 gen_nullptr_check(s1);
2211 x86_64_movl_imm_memindex(cd, iptr->val.i, OFFSET(java_intarray, data[0]), s1, s2, 2);
2214 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2216 var_to_reg_int(s1, src->prev, REG_ITMP1);
2217 var_to_reg_int(s2, src, REG_ITMP2);
2218 if (iptr->op1 == 0) {
2219 gen_nullptr_check(s1);
2223 if (x86_64_is_imm32(iptr->val.l)) {
2224 x86_64_mov_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2227 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2228 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l >> 32), OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2232 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2234 var_to_reg_int(s1, src->prev, REG_ITMP1);
2235 var_to_reg_int(s2, src, REG_ITMP2);
2236 if (iptr->op1 == 0) {
2237 gen_nullptr_check(s1);
2240 x86_64_mov_imm_memindex(cd, 0, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2243 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2245 var_to_reg_int(s1, src->prev, REG_ITMP1);
2246 var_to_reg_int(s2, src, REG_ITMP2);
2247 if (iptr->op1 == 0) {
2248 gen_nullptr_check(s1);
2251 x86_64_movb_imm_memindex(cd, iptr->val.i, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2254 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2256 var_to_reg_int(s1, src->prev, REG_ITMP1);
2257 var_to_reg_int(s2, src, REG_ITMP2);
2258 if (iptr->op1 == 0) {
2259 gen_nullptr_check(s1);
2262 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_chararray, data[0]), s1, s2, 1);
2265 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2267 var_to_reg_int(s1, src->prev, REG_ITMP1);
2268 var_to_reg_int(s2, src, REG_ITMP2);
2269 if (iptr->op1 == 0) {
2270 gen_nullptr_check(s1);
2273 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2277 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2278 /* op1 = type, val.a = field address */
2280 /* if class isn't yet initialized, do it */
2281 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2282 /* call helper function which patches this code */
2283 x86_64_mov_imm_reg(cd, (s8) ((fieldinfo *) iptr->val.a)->class, REG_ITMP1);
2284 x86_64_mov_imm_reg(cd, (s8) asm_check_clinit, REG_ITMP2);
2285 x86_64_call_reg(cd, REG_ITMP2);
2288 a = dseg_addaddress(cd, &(((fieldinfo *) iptr->val.a)->value));
2289 x86_64_mov_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 7) - (s8) cd->mcodebase) + a, REG_ITMP2);
2290 switch (iptr->op1) {
2292 var_to_reg_int(s2, src, REG_ITMP1);
2293 x86_64_movl_reg_membase(cd, s2, REG_ITMP2, 0);
2297 var_to_reg_int(s2, src, REG_ITMP1);
2298 x86_64_mov_reg_membase(cd, s2, REG_ITMP2, 0);
2301 var_to_reg_flt(s2, src, REG_FTMP1);
2302 x86_64_movss_reg_membase(cd, s2, REG_ITMP2, 0);
2305 var_to_reg_flt(s2, src, REG_FTMP1);
2306 x86_64_movsd_reg_membase(cd, s2, REG_ITMP2, 0);
2308 default: panic("internal error");
2312 case ICMD_GETSTATIC: /* ... ==> ..., value */
2313 /* op1 = type, val.a = field address */
2315 /* if class isn't yet initialized, do it */
2316 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2317 /* call helper function which patches this code */
2318 x86_64_mov_imm_reg(cd, (s8) ((fieldinfo *) iptr->val.a)->class, REG_ITMP1);
2319 x86_64_mov_imm_reg(cd, (s8) asm_check_clinit, REG_ITMP2);
2320 x86_64_call_reg(cd, REG_ITMP2);
2323 a = dseg_addaddress(cd, &(((fieldinfo *) iptr->val.a)->value));
2324 x86_64_mov_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 7) - (s8) cd->mcodebase) + a, REG_ITMP2);
2325 switch (iptr->op1) {
2327 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2328 x86_64_movl_membase_reg(cd, REG_ITMP2, 0, d);
2329 store_reg_to_var_int(iptr->dst, d);
2333 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2334 x86_64_mov_membase_reg(cd, REG_ITMP2, 0, d);
2335 store_reg_to_var_int(iptr->dst, d);
2338 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2339 x86_64_movss_membase_reg(cd, REG_ITMP2, 0, d);
2340 store_reg_to_var_flt(iptr->dst, d);
2343 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2344 x86_64_movsd_membase_reg(cd, REG_ITMP2, 0, d);
2345 store_reg_to_var_flt(iptr->dst, d);
2347 default: panic("internal error");
2351 case ICMD_PUTFIELD: /* ..., value ==> ... */
2352 /* op1 = type, val.i = field offset */
2354 /* if class isn't yet initialized, do it */
2355 if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2356 /* call helper function which patches this code */
2357 x86_64_mov_imm_reg(cd, (s8) ((fieldinfo *) iptr->val.a)->class, REG_ITMP1);
2358 x86_64_mov_imm_reg(cd, (s8) asm_check_clinit, REG_ITMP2);
2359 x86_64_call_reg(cd, REG_ITMP2);
2362 a = ((fieldinfo *)(iptr->val.a))->offset;
2363 var_to_reg_int(s1, src->prev, REG_ITMP1);
2364 switch (iptr->op1) {
2366 var_to_reg_int(s2, src, REG_ITMP2);
2367 gen_nullptr_check(s1);
2368 x86_64_movl_reg_membase(cd, s2, s1, a);
2372 var_to_reg_int(s2, src, REG_ITMP2);
2373 gen_nullptr_check(s1);
2374 x86_64_mov_reg_membase(cd, s2, s1, a);
2377 var_to_reg_flt(s2, src, REG_FTMP2);
2378 gen_nullptr_check(s1);
2379 x86_64_movss_reg_membase(cd, s2, s1, a);
2382 var_to_reg_flt(s2, src, REG_FTMP2);
2383 gen_nullptr_check(s1);
2384 x86_64_movsd_reg_membase(cd, s2, s1, a);
2386 default: panic ("internal error");
2390 case ICMD_GETFIELD: /* ... ==> ..., value */
2391 /* op1 = type, val.i = field offset */
2393 a = ((fieldinfo *)(iptr->val.a))->offset;
2394 var_to_reg_int(s1, src, REG_ITMP1);
2395 switch (iptr->op1) {
2397 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2398 gen_nullptr_check(s1);
2399 x86_64_movl_membase_reg(cd, s1, a, d);
2400 store_reg_to_var_int(iptr->dst, d);
2404 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2405 gen_nullptr_check(s1);
2406 x86_64_mov_membase_reg(cd, s1, a, d);
2407 store_reg_to_var_int(iptr->dst, d);
2410 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2411 gen_nullptr_check(s1);
2412 x86_64_movss_membase_reg(cd, s1, a, d);
2413 store_reg_to_var_flt(iptr->dst, d);
2416 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2417 gen_nullptr_check(s1);
2418 x86_64_movsd_membase_reg(cd, s1, a, d);
2419 store_reg_to_var_flt(iptr->dst, d);
2421 default: panic ("internal error");
2426 /* branch operations **************************************************/
2428 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2430 var_to_reg_int(s1, src, REG_ITMP1);
2431 M_INTMOVE(s1, REG_ITMP1_XPTR);
2433 x86_64_call_imm(cd, 0); /* passing exception pointer */
2434 x86_64_pop_reg(cd, REG_ITMP2_XPC);
2436 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
2437 x86_64_jmp_reg(cd, REG_ITMP3);
2441 case ICMD_GOTO: /* ... ==> ... */
2442 /* op1 = target JavaVM pc */
2444 x86_64_jmp_imm(cd, 0);
2445 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2449 case ICMD_JSR: /* ... ==> ... */
2450 /* op1 = target JavaVM pc */
2452 x86_64_call_imm(cd, 0);
2453 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2456 case ICMD_RET: /* ... ==> ... */
2457 /* op1 = local variable */
2459 var = &(rd->locals[iptr->op1][TYPE_ADR]);
2460 var_to_reg_int(s1, var, REG_ITMP1);
2461 x86_64_jmp_reg(cd, s1);
2464 case ICMD_IFNULL: /* ..., value ==> ... */
2465 /* op1 = target JavaVM pc */
2467 if (src->flags & INMEMORY) {
2468 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2471 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2473 x86_64_jcc(cd, X86_64_CC_E, 0);
2474 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2477 case ICMD_IFNONNULL: /* ..., value ==> ... */
2478 /* op1 = target JavaVM pc */
2480 if (src->flags & INMEMORY) {
2481 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2484 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2486 x86_64_jcc(cd, X86_64_CC_NE, 0);
2487 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2490 case ICMD_IFEQ: /* ..., value ==> ... */
2491 /* op1 = target JavaVM pc, val.i = constant */
2493 x86_64_emit_ifcc(cd, X86_64_CC_E, src, iptr);
2496 case ICMD_IFLT: /* ..., value ==> ... */
2497 /* op1 = target JavaVM pc, val.i = constant */
2499 x86_64_emit_ifcc(cd, X86_64_CC_L, src, iptr);
2502 case ICMD_IFLE: /* ..., value ==> ... */
2503 /* op1 = target JavaVM pc, val.i = constant */
2505 x86_64_emit_ifcc(cd, X86_64_CC_LE, src, iptr);
2508 case ICMD_IFNE: /* ..., value ==> ... */
2509 /* op1 = target JavaVM pc, val.i = constant */
2511 x86_64_emit_ifcc(cd, X86_64_CC_NE, src, iptr);
2514 case ICMD_IFGT: /* ..., value ==> ... */
2515 /* op1 = target JavaVM pc, val.i = constant */
2517 x86_64_emit_ifcc(cd, X86_64_CC_G, src, iptr);
2520 case ICMD_IFGE: /* ..., value ==> ... */
2521 /* op1 = target JavaVM pc, val.i = constant */
2523 x86_64_emit_ifcc(cd, X86_64_CC_GE, src, iptr);
2526 case ICMD_IF_LEQ: /* ..., value ==> ... */
2527 /* op1 = target JavaVM pc, val.l = constant */
2529 x86_64_emit_if_lcc(cd, X86_64_CC_E, src, iptr);
2532 case ICMD_IF_LLT: /* ..., value ==> ... */
2533 /* op1 = target JavaVM pc, val.l = constant */
2535 x86_64_emit_if_lcc(cd, X86_64_CC_L, src, iptr);
2538 case ICMD_IF_LLE: /* ..., value ==> ... */
2539 /* op1 = target JavaVM pc, val.l = constant */
2541 x86_64_emit_if_lcc(cd, X86_64_CC_LE, src, iptr);
2544 case ICMD_IF_LNE: /* ..., value ==> ... */
2545 /* op1 = target JavaVM pc, val.l = constant */
2547 x86_64_emit_if_lcc(cd, X86_64_CC_NE, src, iptr);
2550 case ICMD_IF_LGT: /* ..., value ==> ... */
2551 /* op1 = target JavaVM pc, val.l = constant */
2553 x86_64_emit_if_lcc(cd, X86_64_CC_G, src, iptr);
2556 case ICMD_IF_LGE: /* ..., value ==> ... */
2557 /* op1 = target JavaVM pc, val.l = constant */
2559 x86_64_emit_if_lcc(cd, X86_64_CC_GE, src, iptr);
2562 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2563 /* op1 = target JavaVM pc */
2565 x86_64_emit_if_icmpcc(cd, X86_64_CC_E, src, iptr);
2568 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2569 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2571 x86_64_emit_if_lcmpcc(cd, X86_64_CC_E, src, iptr);
2574 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2575 /* op1 = target JavaVM pc */
2577 x86_64_emit_if_icmpcc(cd, X86_64_CC_NE, src, iptr);
2580 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2581 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2583 x86_64_emit_if_lcmpcc(cd, X86_64_CC_NE, src, iptr);
2586 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2587 /* op1 = target JavaVM pc */
2589 x86_64_emit_if_icmpcc(cd, X86_64_CC_L, src, iptr);
2592 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2593 /* op1 = target JavaVM pc */
2595 x86_64_emit_if_lcmpcc(cd, X86_64_CC_L, src, iptr);
2598 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2599 /* op1 = target JavaVM pc */
2601 x86_64_emit_if_icmpcc(cd, X86_64_CC_G, src, iptr);
2604 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2605 /* op1 = target JavaVM pc */
2607 x86_64_emit_if_lcmpcc(cd, X86_64_CC_G, src, iptr);
2610 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2611 /* op1 = target JavaVM pc */
2613 x86_64_emit_if_icmpcc(cd, X86_64_CC_LE, src, iptr);
2616 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2617 /* op1 = target JavaVM pc */
2619 x86_64_emit_if_lcmpcc(cd, X86_64_CC_LE, src, iptr);
2622 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2623 /* op1 = target JavaVM pc */
2625 x86_64_emit_if_icmpcc(cd, X86_64_CC_GE, src, iptr);
2628 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2629 /* op1 = target JavaVM pc */
2631 x86_64_emit_if_lcmpcc(cd, X86_64_CC_GE, src, iptr);
2634 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST */
2636 case ICMD_ELSE_ICONST: /* handled by IFxx_ICONST */
2639 case ICMD_IFEQ_ICONST: /* ..., value ==> ..., constant */
2640 /* val.i = constant */
2642 var_to_reg_int(s1, src, REG_ITMP1);
2643 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2645 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2647 M_INTMOVE(s1, REG_ITMP1);
2650 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2652 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2653 x86_64_testl_reg_reg(cd, s1, s1);
2654 x86_64_cmovccl_reg_reg(cd, X86_64_CC_E, REG_ITMP2, d);
2655 store_reg_to_var_int(iptr->dst, d);
2658 case ICMD_IFNE_ICONST: /* ..., value ==> ..., constant */
2659 /* val.i = constant */
2661 var_to_reg_int(s1, src, REG_ITMP1);
2662 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2664 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2666 M_INTMOVE(s1, REG_ITMP1);
2669 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2671 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2672 x86_64_testl_reg_reg(cd, s1, s1);
2673 x86_64_cmovccl_reg_reg(cd, X86_64_CC_NE, REG_ITMP2, d);
2674 store_reg_to_var_int(iptr->dst, d);
2677 case ICMD_IFLT_ICONST: /* ..., value ==> ..., constant */
2678 /* val.i = constant */
2680 var_to_reg_int(s1, src, REG_ITMP1);
2681 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2683 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2685 M_INTMOVE(s1, REG_ITMP1);
2688 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2690 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2691 x86_64_testl_reg_reg(cd, s1, s1);
2692 x86_64_cmovccl_reg_reg(cd, X86_64_CC_L, REG_ITMP2, d);
2693 store_reg_to_var_int(iptr->dst, d);
2696 case ICMD_IFGE_ICONST: /* ..., value ==> ..., constant */
2697 /* val.i = constant */
2699 var_to_reg_int(s1, src, REG_ITMP1);
2700 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2702 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2704 M_INTMOVE(s1, REG_ITMP1);
2707 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2709 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2710 x86_64_testl_reg_reg(cd, s1, s1);
2711 x86_64_cmovccl_reg_reg(cd, X86_64_CC_GE, REG_ITMP2, d);
2712 store_reg_to_var_int(iptr->dst, d);
2715 case ICMD_IFGT_ICONST: /* ..., value ==> ..., constant */
2716 /* val.i = constant */
2718 var_to_reg_int(s1, src, REG_ITMP1);
2719 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2721 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2723 M_INTMOVE(s1, REG_ITMP1);
2726 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2728 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2729 x86_64_testl_reg_reg(cd, s1, s1);
2730 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP2, d);
2731 store_reg_to_var_int(iptr->dst, d);
2734 case ICMD_IFLE_ICONST: /* ..., value ==> ..., constant */
2735 /* val.i = constant */
2737 var_to_reg_int(s1, src, REG_ITMP1);
2738 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2740 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2742 M_INTMOVE(s1, REG_ITMP1);
2745 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2747 x86_64_movl_imm_reg(cd, s3, REG_ITMP2);
2748 x86_64_testl_reg_reg(cd, s1, s1);
2749 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, d);
2750 store_reg_to_var_int(iptr->dst, d);
2754 case ICMD_IRETURN: /* ..., retvalue ==> ... */
2758 var_to_reg_int(s1, src, REG_RESULT);
2759 M_INTMOVE(s1, REG_RESULT);
2761 #if defined(USE_THREADS)
2762 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2763 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
2764 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, rd->maxmemuse * 8);
2765 x86_64_mov_imm_reg(cd, (u8) builtin_monitorexit, REG_ITMP1);
2766 x86_64_call_reg(cd, REG_ITMP1);
2767 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_RESULT);
2771 goto nowperformreturn;
2773 case ICMD_FRETURN: /* ..., retvalue ==> ... */
2776 var_to_reg_flt(s1, src, REG_FRESULT);
2777 M_FLTMOVE(s1, REG_FRESULT);
2779 #if defined(USE_THREADS)
2780 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2781 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
2782 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, rd->maxmemuse * 8);
2783 x86_64_mov_imm_reg(cd, (u8) builtin_monitorexit, REG_ITMP1);
2784 x86_64_call_reg(cd, REG_ITMP1);
2785 x86_64_movq_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_FRESULT);
2789 goto nowperformreturn;
2791 case ICMD_RETURN: /* ... ==> ... */
2793 #if defined(USE_THREADS)
2794 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2795 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
2796 x86_64_mov_imm_reg(cd, (u8) builtin_monitorexit, REG_ITMP1);
2797 x86_64_call_reg(cd, REG_ITMP1);
2805 p = parentargs_base;
2807 /* call trace function */
2809 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
2811 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
2812 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
2814 x86_64_mov_imm_reg(cd, (s8) m, rd->argintregs[0]);
2815 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
2816 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
2817 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
2819 x86_64_mov_imm_reg(cd, (s8) builtin_displaymethodstop, REG_ITMP1);
2820 x86_64_call_reg(cd, REG_ITMP1);
2822 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
2823 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
2825 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
2828 /* restore saved registers */
2829 for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
2830 p--; x86_64_mov_membase_reg(cd, REG_SP, p * 8, rd->savintregs[i]);
2832 for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
2833 p--; x86_64_movq_membase_reg(cd, REG_SP, p * 8, rd->savfltregs[i]);
2836 /* deallocate stack */
2837 if (parentargs_base) {
2838 x86_64_alu_imm_reg(cd, X86_64_ADD, parentargs_base * 8, REG_SP);
2847 case ICMD_TABLESWITCH: /* ..., index ==> ... */
2852 tptr = (void **) iptr->target;
2854 s4ptr = iptr->val.a;
2855 l = s4ptr[1]; /* low */
2856 i = s4ptr[2]; /* high */
2858 var_to_reg_int(s1, src, REG_ITMP1);
2859 M_INTMOVE(s1, REG_ITMP1);
2861 x86_64_alul_imm_reg(cd, X86_64_SUB, l, REG_ITMP1);
2866 x86_64_alul_imm_reg(cd, X86_64_CMP, i - 1, REG_ITMP1);
2867 x86_64_jcc(cd, X86_64_CC_A, 0);
2869 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[0]), cd->mcodeptr); */
2870 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
2872 /* build jump table top down and use address of lowest entry */
2874 /* s4ptr += 3 + i; */
2878 /* dseg_addtarget(cd, BlockPtrOfPC(*--s4ptr)); */
2879 dseg_addtarget(cd, (basicblock *) tptr[0]);
2883 /* length of dataseg after last dseg_addtarget is used by load */
2885 x86_64_mov_imm_reg(cd, 0, REG_ITMP2);
2886 dseg_adddata(cd, cd->mcodeptr);
2887 x86_64_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
2888 x86_64_jmp_reg(cd, REG_ITMP1);
2894 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
2896 s4 i, l, val, *s4ptr;
2899 tptr = (void **) iptr->target;
2901 s4ptr = iptr->val.a;
2902 l = s4ptr[0]; /* default */
2903 i = s4ptr[1]; /* count */
2905 MCODECHECK((i<<2)+8);
2906 var_to_reg_int(s1, src, REG_ITMP1); /* reg compare should always be faster */
2912 x86_64_alul_imm_reg(cd, X86_64_CMP, val, s1);
2913 x86_64_jcc(cd, X86_64_CC_E, 0);
2914 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[1]), cd->mcodeptr); */
2915 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
2918 x86_64_jmp_imm(cd, 0);
2919 /* codegen_addreference(cd, BlockPtrOfPC(l), cd->mcodeptr); */
2921 tptr = (void **) iptr->target;
2922 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
2929 case ICMD_BUILTIN3: /* ..., arg1, arg2, arg3 ==> ... */
2930 /* op1 = return type, val.a = function pointer*/
2934 case ICMD_BUILTIN2: /* ..., arg1, arg2 ==> ... */
2935 /* op1 = return type, val.a = function pointer*/
2939 case ICMD_BUILTIN1: /* ..., arg1 ==> ... */
2940 /* op1 = return type, val.a = function pointer*/
2944 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
2945 /* op1 = arg count, val.a = method pointer */
2947 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2948 /* op1 = arg count, val.a = method pointer */
2950 case ICMD_INVOKEVIRTUAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2951 /* op1 = arg count, val.a = method pointer */
2953 case ICMD_INVOKEINTERFACE:/*.., objectref, [arg1, [arg2 ...]] ==> ... */
2954 /* op1 = arg count, val.a = method pointer */
2964 MCODECHECK((s3 << 1) + 64);
2971 /* copy arguments to registers or stack location */
2972 for (; --s3 >= 0; src = src->prev) {
2973 IS_INT_LNG_TYPE(src->type) ? iarg++ : farg++;
2979 s2 = (iarg > INT_ARG_CNT) ? iarg - INT_ARG_CNT : 0 + (farg > FLT_ARG_CNT) ? farg - FLT_ARG_CNT : 0;
2981 for (; --s3 >= 0; src = src->prev) {
2982 IS_INT_LNG_TYPE(src->type) ? iarg-- : farg--;
2983 if (src->varkind == ARGVAR) {
2984 if (IS_INT_LNG_TYPE(src->type)) {
2985 if (iarg >= INT_ARG_CNT) {
2989 if (farg >= FLT_ARG_CNT) {
2996 if (IS_INT_LNG_TYPE(src->type)) {
2997 if (iarg < INT_ARG_CNT) {
2998 s1 = rd->argintregs[iarg];
2999 var_to_reg_int(d, src, s1);
3003 var_to_reg_int(d, src, REG_ITMP1);
3005 x86_64_mov_reg_membase(cd, d, REG_SP, s2 * 8);
3009 if (farg < FLT_ARG_CNT) {
3010 s1 = rd->argfltregs[farg];
3011 var_to_reg_flt(d, src, s1);
3015 var_to_reg_flt(d, src, REG_FTMP1);
3017 x86_64_movq_reg_membase(cd, d, REG_SP, s2 * 8);
3023 switch (iptr->opc) {
3031 x86_64_mov_imm_reg(cd, a, REG_ITMP1);
3032 x86_64_call_reg(cd, REG_ITMP1);
3035 case ICMD_INVOKESTATIC:
3037 a = (s8) lm->stubroutine;
3040 x86_64_mov_imm_reg(cd, a, REG_ITMP2);
3041 x86_64_call_reg(cd, REG_ITMP2);
3044 case ICMD_INVOKESPECIAL:
3046 a = (s8) lm->stubroutine;
3049 gen_nullptr_check(rd->argintregs[0]); /* first argument contains pointer */
3050 x86_64_mov_membase_reg(cd, rd->argintregs[0], 0, REG_ITMP2); /* access memory for hardware nullptr */
3051 x86_64_mov_imm_reg(cd, a, REG_ITMP2);
3052 x86_64_call_reg(cd, REG_ITMP2);
3055 case ICMD_INVOKEVIRTUAL:
3059 gen_nullptr_check(rd->argintregs[0]);
3060 x86_64_mov_membase_reg(cd, rd->argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3061 x86_64_mov_membase32_reg(cd, REG_ITMP2, OFFSET(vftbl_t, table[0]) + sizeof(methodptr) * lm->vftblindex, REG_ITMP1);
3062 x86_64_call_reg(cd, REG_ITMP1);
3065 case ICMD_INVOKEINTERFACE:
3070 gen_nullptr_check(rd->argintregs[0]);
3071 x86_64_mov_membase_reg(cd, rd->argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3072 x86_64_mov_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, interfacetable[0]) - sizeof(methodptr) * ci->index, REG_ITMP2);
3073 x86_64_mov_membase32_reg(cd, REG_ITMP2, sizeof(methodptr) * (lm - ci->methods), REG_ITMP1);
3074 x86_64_call_reg(cd, REG_ITMP1);
3079 error("Unkown ICMD-Command: %d", iptr->opc);
3082 /* d contains return type */
3084 if (d != TYPE_VOID) {
3085 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3086 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3087 M_INTMOVE(REG_RESULT, s1);
3088 store_reg_to_var_int(iptr->dst, s1);
3091 s1 = reg_of_var(rd, iptr->dst, REG_FRESULT);
3092 M_FLTMOVE(REG_FRESULT, s1);
3093 store_reg_to_var_flt(iptr->dst, s1);
3100 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3102 /* op1: 0 == array, 1 == class */
3103 /* val.a: (classinfo*) superclass */
3105 /* superclass is an interface:
3107 * return (sub != NULL) &&
3108 * (sub->vftbl->interfacetablelength > super->index) &&
3109 * (sub->vftbl->interfacetable[-super->index] != NULL);
3111 * superclass is a class:
3113 * return ((sub != NULL) && (0
3114 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3115 * super->vftbl->diffvall));
3119 classinfo *super = (classinfo*) iptr->val.a;
3121 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3122 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3125 var_to_reg_int(s1, src, REG_ITMP1);
3126 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3128 M_INTMOVE(s1, REG_ITMP1);
3131 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3132 if (iptr->op1) { /* class/interface */
3133 if (super->flags & ACC_INTERFACE) { /* interface */
3134 x86_64_test_reg_reg(cd, s1, s1);
3136 /* TODO: clean up this calculation */
3137 a = 3; /* mov_membase_reg */
3138 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3140 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3141 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3144 CALCIMMEDIATEBYTES(a, super->index);
3149 a += 3; /* mov_membase_reg */
3150 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3155 x86_64_jcc(cd, X86_64_CC_E, a);
3157 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3158 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength), REG_ITMP2);
3159 x86_64_alu_imm_reg(cd, X86_64_SUB, super->index, REG_ITMP2);
3160 x86_64_test_reg_reg(cd, REG_ITMP2, REG_ITMP2);
3162 /* TODO: clean up this calculation */
3164 a += 3; /* mov_membase_reg */
3165 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3170 x86_64_jcc(cd, X86_64_CC_LE, a);
3171 x86_64_mov_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP1);
3172 x86_64_test_reg_reg(cd, REG_ITMP1, REG_ITMP1);
3173 x86_64_setcc_reg(cd, X86_64_CC_NE, d);
3175 } else { /* class */
3176 x86_64_test_reg_reg(cd, s1, s1);
3178 /* TODO: clean up this calculation */
3179 a = 3; /* mov_membase_reg */
3180 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3182 a += 10; /* mov_imm_reg */
3184 a += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3185 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, baseval));
3187 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3188 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3190 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3191 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, diffval));
3198 x86_64_jcc(cd, X86_64_CC_E, a);
3200 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3201 x86_64_mov_imm_reg(cd, (s8) super->vftbl, REG_ITMP2);
3202 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3203 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3205 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, baseval), REG_ITMP1);
3206 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP3);
3207 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, diffval), REG_ITMP2);
3208 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3209 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3211 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP1);
3212 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3213 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
3214 x86_64_setcc_reg(cd, X86_64_CC_BE, d);
3218 panic("internal error: no inlined array instanceof");
3220 store_reg_to_var_int(iptr->dst, d);
3223 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3225 /* op1: 0 == array, 1 == class */
3226 /* val.a: (classinfo*) superclass */
3228 /* superclass is an interface:
3230 * OK if ((sub == NULL) ||
3231 * (sub->vftbl->interfacetablelength > super->index) &&
3232 * (sub->vftbl->interfacetable[-super->index] != NULL));
3234 * superclass is a class:
3236 * OK if ((sub == NULL) || (0
3237 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3238 * super->vftbl->diffvall));
3242 classinfo *super = (classinfo*) iptr->val.a;
3244 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3245 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3247 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3248 var_to_reg_int(s1, src, d);
3249 if (iptr->op1) { /* class/interface */
3250 if (super->flags & ACC_INTERFACE) { /* interface */
3251 x86_64_test_reg_reg(cd, s1, s1);
3253 /* TODO: clean up this calculation */
3254 a = 3; /* mov_membase_reg */
3255 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3257 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3258 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3261 CALCIMMEDIATEBYTES(a, super->index);
3266 a += 3; /* mov_membase_reg */
3267 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3272 x86_64_jcc(cd, X86_64_CC_E, a);
3274 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3275 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength), REG_ITMP2);
3276 x86_64_alu_imm_reg(cd, X86_64_SUB, super->index, REG_ITMP2);
3277 x86_64_test_reg_reg(cd, REG_ITMP2, REG_ITMP2);
3278 x86_64_jcc(cd, X86_64_CC_LE, 0);
3279 codegen_addxcastrefs(cd, cd->mcodeptr);
3280 x86_64_mov_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP2);
3281 x86_64_test_reg_reg(cd, REG_ITMP2, REG_ITMP2);
3282 x86_64_jcc(cd, X86_64_CC_E, 0);
3283 codegen_addxcastrefs(cd, cd->mcodeptr);
3285 } else { /* class */
3286 x86_64_test_reg_reg(cd, s1, s1);
3288 /* TODO: clean up this calculation */
3289 a = 3; /* mov_membase_reg */
3290 CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3291 a += 10; /* mov_imm_reg */
3292 a += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3293 CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, baseval));
3295 if (d != REG_ITMP3) {
3296 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3297 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3298 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3299 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, diffval));
3303 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3304 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3306 a += 10; /* mov_imm_reg */
3307 a += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3308 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, diffval));
3314 x86_64_jcc(cd, X86_64_CC_E, a);
3316 x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3317 x86_64_mov_imm_reg(cd, (s8) super->vftbl, REG_ITMP2);
3318 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3319 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3321 x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, baseval), REG_ITMP1);
3322 if (d != REG_ITMP3) {
3323 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP3);
3324 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, diffval), REG_ITMP2);
3325 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3326 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3328 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP1);
3331 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP2);
3332 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
3333 x86_64_mov_imm_reg(cd, (s8) super->vftbl, REG_ITMP2);
3334 x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, diffval), REG_ITMP2);
3335 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3336 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3339 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
3340 x86_64_jcc(cd, X86_64_CC_A, 0); /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3341 codegen_addxcastrefs(cd, cd->mcodeptr);
3345 panic("internal error: no inlined array checkcast");
3348 store_reg_to_var_int(iptr->dst, d);
3351 case ICMD_CHECKASIZE: /* ..., size ==> ..., size */
3353 if (src->flags & INMEMORY) {
3354 x86_64_alul_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
3357 x86_64_testl_reg_reg(cd, src->regoff, src->regoff);
3359 x86_64_jcc(cd, X86_64_CC_L, 0);
3360 codegen_addxcheckarefs(cd, cd->mcodeptr);
3363 case ICMD_CHECKEXCEPTION: /* ... ==> ... */
3365 x86_64_test_reg_reg(cd, REG_RESULT, REG_RESULT);
3366 x86_64_jcc(cd, X86_64_CC_E, 0);
3367 codegen_addxexceptionrefs(cd, cd->mcodeptr);
3370 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3371 /* op1 = dimension, val.a = array descriptor */
3373 /* check for negative sizes and copy sizes to stack if necessary */
3375 MCODECHECK((iptr->op1 << 1) + 64);
3377 for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3378 var_to_reg_int(s2, src, REG_ITMP1);
3379 x86_64_testl_reg_reg(cd, s2, s2);
3380 x86_64_jcc(cd, X86_64_CC_L, 0);
3381 codegen_addxcheckarefs(cd, cd->mcodeptr);
3383 /* copy sizes to stack (argument numbers >= INT_ARG_CNT) */
3385 if (src->varkind != ARGVAR) {
3386 x86_64_mov_reg_membase(cd, s2, REG_SP, (s1 + INT_ARG_CNT) * 8);
3390 /* a0 = dimension count */
3391 x86_64_mov_imm_reg(cd, iptr->op1, rd->argintregs[0]);
3393 /* a1 = arraydescriptor */
3394 x86_64_mov_imm_reg(cd, (s8) iptr->val.a, rd->argintregs[1]);
3396 /* a2 = pointer to dimensions = stack pointer */
3397 x86_64_mov_reg_reg(cd, REG_SP, rd->argintregs[2]);
3399 x86_64_mov_imm_reg(cd, (s8) builtin_nmultianewarray, REG_ITMP1);
3400 x86_64_call_reg(cd, REG_ITMP1);
3402 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3403 M_INTMOVE(REG_RESULT, s1);
3404 store_reg_to_var_int(iptr->dst, s1);
3407 default: error("Unknown pseudo command: %d", iptr->opc);
3410 } /* for instruction */
3412 /* copy values to interface registers */
3414 src = bptr->outstack;
3415 len = bptr->outdepth;
3416 MCODECHECK(64 + len);
3419 if ((src->varkind != STACKVAR)) {
3421 if (IS_FLT_DBL_TYPE(s2)) {
3422 var_to_reg_flt(s1, src, REG_FTMP1);
3423 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3424 M_FLTMOVE(s1, rd->interfaces[len][s2].regoff);
3427 x86_64_movq_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3431 var_to_reg_int(s1, src, REG_ITMP1);
3432 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3433 M_INTMOVE(s1, rd->interfaces[len][s2].regoff);
3436 x86_64_mov_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3442 } /* if (bptr -> flags >= BBREACHED) */
3443 } /* for basic block */
3447 /* generate bound check stubs */
3449 u1 *xcodeptr = NULL;
3452 for (bref = cd->xboundrefs; bref != NULL; bref = bref->next) {
3453 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3455 cd->mcodeptr - cd->mcodebase);
3459 /* move index register into REG_ITMP1 */
3460 x86_64_mov_reg_reg(cd, bref->reg, REG_ITMP1); /* 3 bytes */
3462 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3463 dseg_adddata(cd, cd->mcodeptr);
3464 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3465 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3467 if (xcodeptr != NULL) {
3468 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3471 xcodeptr = cd->mcodeptr;
3473 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3474 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3476 x86_64_mov_reg_reg(cd, REG_ITMP1, rd->argintregs[0]);
3477 x86_64_mov_imm_reg(cd, (s8) new_arrayindexoutofboundsexception, REG_ITMP3);
3478 x86_64_call_reg(cd, REG_ITMP3);
3480 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3481 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3483 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
3484 x86_64_jmp_reg(cd, REG_ITMP3);
3488 /* generate negative array size check stubs */
3492 for (bref = cd->xcheckarefs; bref != NULL; bref = bref->next) {
3493 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3494 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3496 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3500 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3502 cd->mcodeptr - cd->mcodebase);
3506 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3507 dseg_adddata(cd, cd->mcodeptr);
3508 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3509 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3511 if (xcodeptr != NULL) {
3512 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3515 xcodeptr = cd->mcodeptr;
3517 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3518 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3520 x86_64_mov_imm_reg(cd, (s8) new_negativearraysizeexception, REG_ITMP3);
3521 x86_64_call_reg(cd, REG_ITMP3);
3523 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3524 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3526 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
3527 x86_64_jmp_reg(cd, REG_ITMP3);
3531 /* generate cast check stubs */
3535 for (bref = cd->xcastrefs; bref != NULL; bref = bref->next) {
3536 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3537 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3539 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3543 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3545 cd->mcodeptr - cd->mcodebase);
3549 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3550 dseg_adddata(cd, cd->mcodeptr);
3551 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3552 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3554 if (xcodeptr != NULL) {
3555 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3558 xcodeptr = cd->mcodeptr;
3560 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3561 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3563 x86_64_mov_imm_reg(cd, (s8) new_classcastexception, REG_ITMP3);
3564 x86_64_call_reg(cd, REG_ITMP3);
3566 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3567 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3569 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
3570 x86_64_jmp_reg(cd, REG_ITMP3);
3574 /* generate divide by zero check stubs */
3578 for (bref = cd->xdivrefs; bref != NULL; bref = bref->next) {
3579 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3580 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3582 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3586 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3588 cd->mcodeptr - cd->mcodebase);
3592 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3593 dseg_adddata(cd, cd->mcodeptr);
3594 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3595 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3597 if (xcodeptr != NULL) {
3598 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3601 xcodeptr = cd->mcodeptr;
3603 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3604 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3606 x86_64_mov_imm_reg(cd, (u8) new_arithmeticexception, REG_ITMP3);
3607 x86_64_call_reg(cd, REG_ITMP3);
3609 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3610 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3612 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3613 x86_64_jmp_reg(cd, REG_ITMP3);
3617 /* generate exception check stubs */
3621 for (bref = cd->xexceptionrefs; bref != NULL; bref = bref->next) {
3622 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3623 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3625 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3629 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3631 cd->mcodeptr - cd->mcodebase);
3635 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3636 dseg_adddata(cd, cd->mcodeptr);
3637 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
3638 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3640 if (xcodeptr != NULL) {
3641 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3644 xcodeptr = cd->mcodeptr;
3646 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3647 x86_64_alu_imm_reg(cd, X86_64_SUB, 8, REG_SP);
3648 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0);
3649 x86_64_mov_imm_reg(cd, (u8) &builtin_get_exceptionptrptr, REG_ITMP1);
3650 x86_64_call_reg(cd, REG_ITMP1);
3651 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
3652 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
3653 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
3654 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC);
3655 x86_64_alu_imm_reg(cd, X86_64_ADD, 8, REG_SP);
3657 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
3658 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP1_XPTR);
3659 x86_64_mov_imm_membase(cd, 0, REG_ITMP3, 0);
3662 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3663 x86_64_jmp_reg(cd, REG_ITMP3);
3667 /* generate null pointer check stubs */
3671 for (bref = cd->xnullrefs; bref != NULL; bref = bref->next) {
3672 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3673 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3675 xcodeptr - cd->mcodebase - (10 + 10 + 3));
3679 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3681 cd->mcodeptr - cd->mcodebase);
3685 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3686 dseg_adddata(cd, cd->mcodeptr);
3687 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
3688 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
3690 if (xcodeptr != NULL) {
3691 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3694 xcodeptr = cd->mcodeptr;
3696 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3697 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3699 x86_64_mov_imm_reg(cd, (s8) new_nullpointerexception, REG_ITMP3);
3700 x86_64_call_reg(cd, REG_ITMP3);
3702 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3703 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3705 x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
3706 x86_64_jmp_reg(cd, REG_ITMP3);
3711 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
3715 /* function createcompilerstub *************************************************
3717 creates a stub routine which calls the compiler
3719 *******************************************************************************/
3721 #define COMPSTUBSIZE 23
3723 u1 *createcompilerstub(methodinfo *m)
3725 u1 *s = CNEW(u1, COMPSTUBSIZE); /* memory to hold the stub */
3726 codegendata *cd = NEW(codegendata);
3728 /* allocate code and data memory, with using inline stuff */
3729 //codegen_setup(m, cd, NULL);
3732 /* code for the stub */
3733 x86_64_mov_imm_reg(cd, (s8) m, REG_ITMP1); /* pass method to compiler */
3734 x86_64_mov_imm_reg(cd, (s8) asm_call_jit_compiler, REG_ITMP3);/* load address */
3735 x86_64_jmp_reg(cd, REG_ITMP3); /* jump to compiler */
3737 #if defined(STATISTICS)
3739 count_cstub_len += COMPSTUBSIZE;
3742 /* free code and data memory */
3743 //codegen_close(m, cd);
3745 FREE(cd, codegendata);
3751 /* function removecompilerstub *************************************************
3753 deletes a compilerstub from memory (simply by freeing it)
3755 *******************************************************************************/
3757 void removecompilerstub(u1 *stub)
3759 CFREE(stub, COMPSTUBSIZE);
3763 /* function: createnativestub **************************************************
3765 creates a stub routine which calls a native method
3767 *******************************************************************************/
3769 /* #if defined(USE_THREADS) && defined(NATIVE_THREADS) */
3770 /* static java_objectheader **(*callgetexceptionptrptr)() = builtin_get_exceptionptrptr; */
3773 #define NATIVESTUBSIZE 420
3775 u1 *createnativestub(functionptr f, methodinfo *m)
3777 u1 *s = CNEW(u1, NATIVESTUBSIZE); /* memory to hold the stub */
3778 s4 stackframesize; /* size of stackframe if needed */
3779 codegendata *cd = NEW(codegendata);
3780 registerdata *rd = NEW(registerdata);
3781 t_inlining_globals *id = NEW(t_inlining_globals);
3783 /* setup registers before using it */
3784 inlining_setup(m, id);
3785 reg_setup(m, rd, id);
3789 descriptor2types(m); /* set paramcount and paramtypes */
3791 /* if function is static, check for initialized */
3793 if (m->flags & ACC_STATIC) {
3794 /* if class isn't yet initialized, do it */
3795 if (!m->class->initialized) {
3796 /* call helper function which patches this code */
3797 x86_64_mov_imm_reg(cd, (u8) m->class, REG_ITMP1);
3798 x86_64_mov_imm_reg(cd, (u8) asm_check_clinit, REG_ITMP2);
3799 x86_64_call_reg(cd, REG_ITMP2);
3806 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
3808 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, 1 * 8);
3809 x86_64_mov_reg_membase(cd, rd->argintregs[1], REG_SP, 2 * 8);
3810 x86_64_mov_reg_membase(cd, rd->argintregs[2], REG_SP, 3 * 8);
3811 x86_64_mov_reg_membase(cd, rd->argintregs[3], REG_SP, 4 * 8);
3812 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 5 * 8);
3813 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 6 * 8);
3815 x86_64_movq_reg_membase(cd, rd->argfltregs[0], REG_SP, 7 * 8);
3816 x86_64_movq_reg_membase(cd, rd->argfltregs[1], REG_SP, 8 * 8);
3817 x86_64_movq_reg_membase(cd, rd->argfltregs[2], REG_SP, 9 * 8);
3818 x86_64_movq_reg_membase(cd, rd->argfltregs[3], REG_SP, 10 * 8);
3819 /* x86_64_movq_reg_membase(cd, rd->argfltregs[4], REG_SP, 11 * 8); */
3820 /* x86_64_movq_reg_membase(cd, rd->argfltregs[5], REG_SP, 12 * 8); */
3821 /* x86_64_movq_reg_membase(cd, rd->argfltregs[6], REG_SP, 13 * 8); */
3822 /* x86_64_movq_reg_membase(cd, rd->argfltregs[7], REG_SP, 14 * 8); */
3824 /* show integer hex code for float arguments */
3825 for (p = 0, l = 0; p < m->paramcount; p++) {
3826 if (IS_FLT_DBL_TYPE(m->paramtypes[p])) {
3827 for (s1 = (m->paramcount > INT_ARG_CNT) ? INT_ARG_CNT - 2 : m->paramcount - 2; s1 >= p; s1--) {
3828 x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
3831 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[p]);
3836 x86_64_mov_imm_reg(cd, (s8) m, REG_ITMP1);
3837 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, 0 * 8);
3838 x86_64_mov_imm_reg(cd, (s8) builtin_trace_args, REG_ITMP1);
3839 x86_64_call_reg(cd, REG_ITMP1);
3841 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, rd->argintregs[0]);
3842 x86_64_mov_membase_reg(cd, REG_SP, 2 * 8, rd->argintregs[1]);
3843 x86_64_mov_membase_reg(cd, REG_SP, 3 * 8, rd->argintregs[2]);
3844 x86_64_mov_membase_reg(cd, REG_SP, 4 * 8, rd->argintregs[3]);
3845 x86_64_mov_membase_reg(cd, REG_SP, 5 * 8, rd->argintregs[4]);
3846 x86_64_mov_membase_reg(cd, REG_SP, 6 * 8, rd->argintregs[5]);
3848 x86_64_movq_membase_reg(cd, REG_SP, 7 * 8, rd->argfltregs[0]);
3849 x86_64_movq_membase_reg(cd, REG_SP, 8 * 8, rd->argfltregs[1]);
3850 x86_64_movq_membase_reg(cd, REG_SP, 9 * 8, rd->argfltregs[2]);
3851 x86_64_movq_membase_reg(cd, REG_SP, 10 * 8, rd->argfltregs[3]);
3852 /* x86_64_movq_membase_reg(cd, REG_SP, 11 * 8, rd->argfltregs[4]); */
3853 /* x86_64_movq_membase_reg(cd, REG_SP, 12 * 8, rd->argfltregs[5]); */
3854 /* x86_64_movq_membase_reg(cd, REG_SP, 13 * 8, rd->argfltregs[6]); */
3855 /* x86_64_movq_membase_reg(cd, REG_SP, 14 * 8, rd->argfltregs[7]); */
3857 x86_64_alu_imm_reg(cd, X86_64_ADD, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
3861 x86_64_alu_imm_reg(cd, X86_64_SUB, 7 * 8, REG_SP); /* keep stack 16-byte aligned */
3863 /* save callee saved float registers */
3864 x86_64_movq_reg_membase(cd, XMM15, REG_SP, 0 * 8);
3865 x86_64_movq_reg_membase(cd, XMM14, REG_SP, 1 * 8);
3866 x86_64_movq_reg_membase(cd, XMM13, REG_SP, 2 * 8);
3867 x86_64_movq_reg_membase(cd, XMM12, REG_SP, 3 * 8);
3868 x86_64_movq_reg_membase(cd, XMM11, REG_SP, 4 * 8);
3869 x86_64_movq_reg_membase(cd, XMM10, REG_SP, 5 * 8);
3872 /* save argument registers on stack -- if we have to */
3873 if ((m->flags & ACC_STATIC && m->paramcount > (INT_ARG_CNT - 2)) || m->paramcount > (INT_ARG_CNT - 1)) {
3875 s4 paramshiftcnt = (m->flags & ACC_STATIC) ? 2 : 1;
3876 s4 stackparamcnt = (m->paramcount > INT_ARG_CNT) ? m->paramcount - INT_ARG_CNT : 0;
3878 stackframesize = stackparamcnt + paramshiftcnt;
3880 /* keep stack 16-byte aligned */
3881 if (!(stackframesize & 0x1))
3884 x86_64_alu_imm_reg(cd, X86_64_SUB, stackframesize * 8, REG_SP);
3886 /* copy stack arguments into new stack frame -- if any */
3887 for (i = 0; i < stackparamcnt; i++) {
3888 x86_64_mov_membase_reg(cd, REG_SP, (stackparamcnt + 1 + i) * 8, REG_ITMP1);
3889 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, (paramshiftcnt + i) * 8);
3892 if (m->flags & ACC_STATIC) {
3893 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 1 * 8);
3894 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 0 * 8);
3897 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 0 * 8);
3901 /* keep stack 16-byte aligned */
3902 x86_64_alu_imm_reg(cd, X86_64_SUB, 8, REG_SP);
3906 if (m->flags & ACC_STATIC) {
3907 x86_64_mov_reg_reg(cd, rd->argintregs[3], rd->argintregs[5]);
3908 x86_64_mov_reg_reg(cd, rd->argintregs[2], rd->argintregs[4]);
3909 x86_64_mov_reg_reg(cd, rd->argintregs[1], rd->argintregs[3]);
3910 x86_64_mov_reg_reg(cd, rd->argintregs[0], rd->argintregs[2]);
3912 /* put class into second argument register */
3913 x86_64_mov_imm_reg(cd, (u8) m->class, rd->argintregs[1]);
3916 x86_64_mov_reg_reg(cd, rd->argintregs[4], rd->argintregs[5]);
3917 x86_64_mov_reg_reg(cd, rd->argintregs[3], rd->argintregs[4]);
3918 x86_64_mov_reg_reg(cd, rd->argintregs[2], rd->argintregs[3]);
3919 x86_64_mov_reg_reg(cd, rd->argintregs[1], rd->argintregs[2]);
3920 x86_64_mov_reg_reg(cd, rd->argintregs[0], rd->argintregs[1]);
3923 /* put env into first argument register */
3924 x86_64_mov_imm_reg(cd, (u8) &env, rd->argintregs[0]);
3926 x86_64_mov_imm_reg(cd, (u8) f, REG_ITMP1);
3927 x86_64_call_reg(cd, REG_ITMP1);
3929 /* remove stackframe if there is one */
3930 if (stackframesize) {
3931 x86_64_alu_imm_reg(cd, X86_64_ADD, stackframesize * 8, REG_SP);
3935 x86_64_alu_imm_reg(cd, X86_64_SUB, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
3937 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
3938 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
3940 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
3941 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
3942 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
3943 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
3945 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
3946 x86_64_call_reg(cd, REG_ITMP1);
3948 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
3949 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
3951 x86_64_alu_imm_reg(cd, X86_64_ADD, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
3955 /* restore callee saved registers */
3956 x86_64_movq_membase_reg(cd, REG_SP, 0 * 8, XMM15);
3957 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, XMM14);
3958 x86_64_movq_membase_reg(cd, REG_SP, 2 * 8, XMM13);
3959 x86_64_movq_membase_reg(cd, REG_SP, 3 * 8, XMM12);
3960 x86_64_movq_membase_reg(cd, REG_SP, 4 * 8, XMM11);
3961 x86_64_movq_membase_reg(cd, REG_SP, 5 * 8, XMM10);
3963 x86_64_alu_imm_reg(cd, X86_64_ADD, 7 * 8, REG_SP); /* keep stack 16-byte aligned */
3966 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3967 x86_64_push_reg(cd, REG_RESULT);
3968 /* x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
3969 x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
3970 x86_64_call_reg(cd, REG_ITMP3);
3971 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
3972 x86_64_pop_reg(cd, REG_RESULT);
3974 x86_64_mov_imm_reg(cd, (s8) &_exceptionptr, REG_ITMP3);
3975 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP3);
3977 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3978 x86_64_jcc(cd, X86_64_CC_NE, 1);
3982 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3983 x86_64_push_reg(cd, REG_ITMP3);
3984 /* x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
3985 x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
3986 x86_64_call_reg(cd, REG_ITMP3);
3987 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
3988 x86_64_pop_reg(cd, REG_ITMP1_XPTR);
3990 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
3991 x86_64_mov_imm_reg(cd, (s8) &_exceptionptr, REG_ITMP3);
3992 x86_64_alu_reg_reg(cd, X86_64_XOR, REG_ITMP2, REG_ITMP2);
3993 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_ITMP3, 0); /* clear exception pointer */
3996 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC); /* get return address from stack */
3997 x86_64_alu_imm_reg(cd, X86_64_SUB, 3, REG_ITMP2_XPC); /* callq */
3999 x86_64_mov_imm_reg(cd, (s8) asm_handle_nat_exception, REG_ITMP3);
4000 x86_64_jmp_reg(cd, REG_ITMP3);
4004 static int stubprinted;
4006 printf("stubsize: %d\n", ((long) cd->mcodeptr - (long) s));
4011 #if defined(STATISTICS)
4013 count_nstub_len += NATIVESTUBSIZE;
4016 FREE(cd, codegendata);
4017 FREE(rd, registerdata);
4023 /* function: removenativestub **************************************************
4025 removes a previously created native-stub from memory
4027 *******************************************************************************/
4029 void removenativestub(u1 *stub)
4031 CFREE(stub, NATIVESTUBSIZE);
4036 * These are local overrides for various environment variables in Emacs.
4037 * Please do not remove this and leave it at the end of the file, where
4038 * Emacs will automagically detect them.
4039 * ---------------------------------------------------------------------
4042 * indent-tabs-mode: t