1 /* vm/jit/x86_64/codegen.c - machine code generator for x86_64
3 Copyright (C) 1996-2005 R. Grafl, A. Krall, C. Kruegel, C. Oates,
4 R. Obermaisser, M. Platter, M. Probst, S. Ring, E. Steiner,
5 C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich, J. Wenninger,
6 Institut f. Computersprachen - TU Wien
8 This file is part of CACAO.
10 This program is free software; you can redistribute it and/or
11 modify it under the terms of the GNU General Public License as
12 published by the Free Software Foundation; either version 2, or (at
13 your option) any later version.
15 This program is distributed in the hope that it will be useful, but
16 WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program; if not, write to the Free Software
22 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
25 Contact: cacao@complang.tuwien.ac.at
27 Authors: Andreas Krall
30 $Id: codegen.c 2442 2005-05-11 12:05:08Z twisti $
40 #include "cacao/cacao.h"
41 #include "native/native.h"
42 #include "vm/global.h"
43 #include "vm/builtin.h"
44 #include "vm/loader.h"
45 #include "vm/tables.h"
46 #include "vm/jit/asmpart.h"
47 #include "vm/jit/jit.h"
48 #include "vm/jit/reg.h"
49 #include "vm/jit/parse.h"
50 #include "vm/jit/patcher.h"
51 #include "vm/jit/x86_64/arch.h"
52 #include "vm/jit/x86_64/codegen.h"
53 #include "vm/jit/x86_64/emitfuncs.h"
54 #include "vm/jit/x86_64/types.h"
55 #include "vm/jit/x86_64/asmoffsets.h"
56 #include "vm/jit/helper.h"
57 #include "vm/statistics.h"
59 /* register descripton - array ************************************************/
61 /* #define REG_RES 0 reserved register for OS or code generator */
62 /* #define REG_RET 1 return value register */
63 /* #define REG_EXC 2 exception value register (only old jit) */
64 /* #define REG_SAV 3 (callee) saved register */
65 /* #define REG_TMP 4 scratch temporary register (caller saved) */
66 /* #define REG_ARG 5 argument register (caller saved) */
68 /* #define REG_END -1 last entry in tables */
70 static int nregdescint[] = {
71 REG_RET, REG_ARG, REG_ARG, REG_TMP, REG_RES, REG_SAV, REG_ARG, REG_ARG,
72 REG_ARG, REG_ARG, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV,
77 static int nregdescfloat[] = {
78 REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG,
79 REG_RES, REG_RES, REG_RES, REG_TMP, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
84 /* Include independent code generation stuff -- include after register */
85 /* descriptions to avoid extern definitions. */
87 #include "vm/jit/codegen.inc"
88 #include "vm/jit/reg.inc"
90 #include "vm/jit/lsra.inc"
94 #define JWNATIVEDEBUG(x) true
96 #define JWNATIVEDEBUG(x) x
98 void codegen_dummy_func() { log_text("codegen_dummy_func"); }
100 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
101 void thread_restartcriticalsection(ucontext_t *uc)
105 critical = thread_checkcritical((void *) uc->uc_mcontext.gregs[REG_RIP]);
108 uc->uc_mcontext.gregs[REG_RIP] = (u8) critical;
113 /* NullPointerException signal handler for hardware null pointer check */
115 void catch_NullPointerException(int sig, siginfo_t *siginfo, void *_p)
119 struct ucontext *_uc = (struct ucontext *) _p;
120 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
121 struct sigaction act;
122 java_objectheader *xptr;
124 /* Reset signal handler - necessary for SysV, does no harm for BSD */
126 act.sa_sigaction = catch_NullPointerException; /* reinstall handler */
127 act.sa_flags = SA_SIGINFO;
128 sigaction(sig, &act, NULL);
131 sigaddset(&nsig, sig);
132 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
135 xptr = new_nullpointerexception();
137 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
138 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
139 sigctx->rip = (u8) asm_handle_exception;
142 sigctx->rax = (u8) string_java_lang_NullPointerException;
143 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
144 sigctx->rip = (u8) asm_throw_and_handle_exception;
150 /* ArithmeticException signal handler for hardware divide by zero check */
152 void catch_ArithmeticException(int sig, siginfo_t *siginfo, void *_p)
156 struct ucontext *_uc = (struct ucontext *) _p;
157 struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
158 struct sigaction act;
159 java_objectheader *xptr;
161 /* Reset signal handler - necessary for SysV, does no harm for BSD */
163 act.sa_sigaction = catch_ArithmeticException; /* reinstall handler */
164 act.sa_flags = SA_SIGINFO;
165 sigaction(sig, &act, NULL);
168 sigaddset(&nsig, sig);
169 sigprocmask(SIG_UNBLOCK, &nsig, NULL); /* unblock signal */
172 xptr = new_arithmeticexception();
174 sigctx->rax = (u8) xptr; /* REG_ITMP1_XPTR */
176 sigctx->r10 = sigctx->rip; /* REG_ITMP2_XPC */
177 sigctx->rip = (u8) asm_throw_and_handle_hardware_arithmetic_exception;
183 void init_exceptions(void)
185 struct sigaction act;
187 /* install signal handlers we need to convert to exceptions */
188 sigemptyset(&act.sa_mask);
192 act.sa_sigaction = catch_NullPointerException;
193 act.sa_flags = SA_SIGINFO;
194 sigaction(SIGSEGV, &act, NULL);
198 act.sa_sigaction = catch_NullPointerException;
199 act.sa_flags = SA_SIGINFO;
200 sigaction(SIGBUS, &act, NULL);
204 act.sa_sigaction = catch_ArithmeticException;
205 act.sa_flags = SA_SIGINFO;
206 sigaction(SIGFPE, &act, NULL);
210 /* function gen_mcode **********************************************************
212 generates machine code
214 *******************************************************************************/
216 void codegen(methodinfo *m, codegendata *cd, registerdata *rd)
218 s4 len, s1, s2, s3, d;
234 /* space to save used callee saved registers */
236 savedregs_num += (rd->savintregcnt - rd->maxsavintreguse);
237 savedregs_num += (rd->savfltregcnt - rd->maxsavfltreguse);
239 parentargs_base = rd->maxmemuse + savedregs_num;
241 #if defined(USE_THREADS) /* space to save argument of monitor_enter */
243 if (checksync && (m->flags & ACC_SYNCHRONIZED))
248 /* Keep stack of non-leaf functions 16-byte aligned for calls into native */
249 /* code e.g. libc or jni (alignment problems with movaps). */
251 if (!m->isleafmethod || runverbose)
252 parentargs_base |= 0x1;
254 /* create method header */
256 (void) dseg_addaddress(cd, m); /* MethodPointer */
257 (void) dseg_adds4(cd, parentargs_base * 8); /* FrameSize */
259 #if defined(USE_THREADS)
261 /* IsSync contains the offset relative to the stack pointer for the
262 argument of monitor_exit used in the exception handler. Since the
263 offset could be zero and give a wrong meaning of the flag it is
267 if (checksync && (m->flags & ACC_SYNCHRONIZED))
268 (void) dseg_adds4(cd, (rd->maxmemuse + 1) * 8); /* IsSync */
273 (void) dseg_adds4(cd, 0); /* IsSync */
275 (void) dseg_adds4(cd, m->isleafmethod); /* IsLeaf */
276 (void) dseg_adds4(cd, rd->savintregcnt - rd->maxsavintreguse);/* IntSave */
277 (void) dseg_adds4(cd, rd->savfltregcnt - rd->maxsavfltreguse);/* FltSave */
279 (void) dseg_addlinenumbertablesize(cd);
281 (void) dseg_adds4(cd, cd->exceptiontablelength); /* ExTableSize */
283 /* create exception table */
285 for (ex = cd->exceptiontable; ex != NULL; ex = ex->down) {
286 dseg_addtarget(cd, ex->start);
287 dseg_addtarget(cd, ex->end);
288 dseg_addtarget(cd, ex->handler);
289 (void) dseg_addaddress(cd, ex->catchtype.cls);
292 /* initialize mcode variables */
294 cd->mcodeptr = (u1 *) cd->mcodebase;
295 cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
296 MCODECHECK(128 + m->paramcount);
298 /* create stack frame (if necessary) */
300 if (parentargs_base) {
301 x86_64_alu_imm_reg(cd, X86_64_SUB, parentargs_base * 8, REG_SP);
304 /* save used callee saved registers */
307 for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
308 p--; x86_64_mov_reg_membase(cd, rd->savintregs[i], REG_SP, p * 8);
310 for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
311 p--; x86_64_movq_reg_membase(cd, rd->savfltregs[i], REG_SP, p * 8);
314 /* take arguments out of register or stack frame */
316 for (p = 0, l = 0, s1 = 0, s2 = 0; p < m->paramcount; p++) {
317 t = m->paramtypes[p];
318 var = &(rd->locals[l][t]);
320 if (IS_2_WORD_TYPE(t)) /* increment local counter for 2 word types */
323 if (IS_INT_LNG_TYPE(t))
329 if (IS_INT_LNG_TYPE(t)) { /* integer args */
330 if (s1 < INT_ARG_CNT) { /* register arguments */
331 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
332 M_INTMOVE(rd->argintregs[s1], var->regoff);
334 } else { /* reg arg -> spilled */
335 x86_64_mov_reg_membase(cd, rd->argintregs[s1], REG_SP, var->regoff * 8);
338 } else { /* stack arguments */
339 pa = s1 - INT_ARG_CNT;
340 if (s2 >= FLT_ARG_CNT) {
341 pa += s2 - FLT_ARG_CNT;
343 if (!(var->flags & INMEMORY)) { /* stack arg -> register */
344 x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff); /* + 8 for return address */
345 } else { /* stack arg -> spilled */
346 /* x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_ITMP1); */ /* + 8 for return address */
347 /* x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, var->regoff * 8); */
348 var->regoff = parentargs_base + pa + 1;
353 } else { /* floating args */
354 if (s2 < FLT_ARG_CNT) { /* register arguments */
355 if (!(var->flags & INMEMORY)) { /* reg arg -> register */
356 M_FLTMOVE(rd->argfltregs[s2], var->regoff);
358 } else { /* reg arg -> spilled */
359 x86_64_movq_reg_membase(cd, rd->argfltregs[s2], REG_SP, var->regoff * 8);
362 } else { /* stack arguments */
363 pa = s2 - FLT_ARG_CNT;
364 if (s1 >= INT_ARG_CNT) {
365 pa += s1 - INT_ARG_CNT;
367 if (!(var->flags & INMEMORY)) { /* stack-arg -> register */
368 x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff);
371 /* x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_FTMP1); */
372 /* x86_64_movq_reg_membase(cd, REG_FTMP1, REG_SP, var->regoff * 8); */
373 var->regoff = parentargs_base + pa + 1;
380 /* save monitorenter argument */
382 #if defined(USE_THREADS)
383 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
384 /* stack offset for monitor argument */
389 M_LSUB_IMM((INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + FLT_TMP_CNT) * 8, REG_SP);
391 for (p = 0; p < INT_ARG_CNT; p++)
392 M_LST(rd->argintregs[p], REG_SP, p * 8);
394 for (p = 0; p < FLT_ARG_CNT; p++)
395 M_DST(rd->argfltregs[p], REG_SP, (INT_ARG_CNT + p) * 8);
397 if (m->isleafmethod) {
398 for (p = 0; p < INT_TMP_CNT; p++)
399 M_LST(rd->tmpintregs[p], REG_SP, (INT_ARG_CNT + FLT_ARG_CNT + p) * 8);
401 for (p = 0; p < FLT_TMP_CNT; p++)
402 M_DST(rd->tmpfltregs[p], REG_SP, (INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + p) * 8);
405 s1 += INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + FLT_TMP_CNT;
408 /* decide which monitor enter function to call */
410 if (m->flags & ACC_STATIC) {
411 x86_64_mov_imm_reg(cd, (ptrint) m->class, REG_ITMP1);
412 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, s1 * 8);
413 M_INTMOVE(REG_ITMP1, rd->argintregs[0]);
414 x86_64_mov_imm_reg(cd, (ptrint) BUILTIN_staticmonitorenter, REG_ITMP1);
415 x86_64_call_reg(cd, REG_ITMP1);
418 x86_64_test_reg_reg(cd, rd->argintregs[0], rd->argintregs[0]);
419 x86_64_jcc(cd, X86_64_CC_Z, 0);
420 codegen_addxnullrefs(cd, cd->mcodeptr);
421 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, s1 * 8);
422 x86_64_mov_imm_reg(cd, (ptrint) BUILTIN_monitorenter, REG_ITMP1);
423 x86_64_call_reg(cd, REG_ITMP1);
427 for (p = 0; p < INT_ARG_CNT; p++)
428 M_LLD(rd->argintregs[p], REG_SP, p * 8);
430 for (p = 0; p < FLT_ARG_CNT; p++)
431 M_DLD(rd->argfltregs[p], REG_SP, (INT_ARG_CNT + p) * 8);
434 if (m->isleafmethod) {
435 for (p = 0; p < INT_TMP_CNT; p++)
436 M_LLD(rd->tmpintregs[p], REG_SP, (INT_ARG_CNT + FLT_ARG_CNT + p) * 8);
438 for (p = 0; p < FLT_TMP_CNT; p++)
439 M_DLD(rd->tmpfltregs[p], REG_SP, (INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + p) * 8);
442 M_LADD_IMM((INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + FLT_TMP_CNT) * 8, REG_SP);
447 /* Copy argument registers to stack and call trace function with pointer */
448 /* to arguments on stack. */
450 if (runverbose || opt_stat) {
451 M_LSUB_IMM((INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + FLT_TMP_CNT + 1 + 1) * 8, REG_SP);
453 /* save integer argument registers */
455 for (p = 0; p < INT_ARG_CNT; p++)
456 M_LST(rd->argintregs[p], REG_SP, (1 + p) * 8);
458 /* save float argument registers */
460 for (p = 0; p < FLT_ARG_CNT; p++)
461 M_DST(rd->argfltregs[p], REG_SP, (1 + INT_ARG_CNT + p) * 8);
463 /* save temporary registers for leaf methods */
465 if (m->isleafmethod) {
466 for (p = 0; p < INT_TMP_CNT; p++)
467 M_LST(rd->tmpintregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + p) * 8);
469 for (p = 0; p < FLT_TMP_CNT; p++)
470 M_DST(rd->tmpfltregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + p) * 8);
474 /* show integer hex code for float arguments */
476 for (p = 0, l = 0; p < m->paramcount && p < INT_ARG_CNT; p++) {
477 t = m->paramtypes[p];
479 /* if the paramtype is a float, we have to right shift all */
480 /* following integer registers */
482 if (IS_FLT_DBL_TYPE(t)) {
483 for (s1 = INT_ARG_CNT - 2; s1 >= p; s1--) {
484 x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
487 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[p]);
492 x86_64_mov_imm_reg(cd, (ptrint) m, REG_ITMP2);
493 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_SP, 0 * 8);
494 x86_64_mov_imm_reg(cd, (ptrint) builtin_trace_args, REG_ITMP1);
495 x86_64_call_reg(cd, REG_ITMP1);
498 x86_64_mov_imm_reg(cd,(u8)compiledinvokation,REG_ITMP1);
499 x86_64_call_reg(cd,REG_ITMP1);
502 /* restore integer argument registers */
504 for (p = 0; p < INT_ARG_CNT; p++)
505 M_LLD(rd->argintregs[p], REG_SP, (1 + p) * 8);
507 /* restore float argument registers */
509 for (p = 0; p < FLT_ARG_CNT; p++)
510 M_DLD(rd->argfltregs[p], REG_SP, (1 + INT_ARG_CNT + p) * 8);
512 /* restore temporary registers for leaf methods */
514 if (m->isleafmethod) {
515 for (p = 0; p < INT_TMP_CNT; p++)
516 M_LLD(rd->tmpintregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + p) * 8);
518 for (p = 0; p < FLT_TMP_CNT; p++)
519 M_DLD(rd->tmpfltregs[p], REG_SP, (1 + INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + p) * 8);
522 M_LADD_IMM((INT_ARG_CNT + FLT_ARG_CNT + INT_TMP_CNT + FLT_TMP_CNT + 1 + 1) * 8, REG_SP);
527 /* end of header generation */
529 /* walk through all basic blocks */
530 for (bptr = m->basicblocks; bptr != NULL; bptr = bptr->next) {
532 bptr->mpc = (u4) ((u1 *) cd->mcodeptr - cd->mcodebase);
534 if (bptr->flags >= BBREACHED) {
536 /* branch resolving */
539 for (bref = bptr->branchrefs; bref != NULL; bref = bref->next) {
540 gen_resolvebranch((u1 *) cd->mcodebase + bref->branchpos,
545 /* copy interface registers to their destination */
549 MCODECHECK(64 + len);
553 while (src != NULL) {
555 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
556 if (bptr->type == BBTYPE_SBR) {
557 /* d = reg_of_var(rd, src, REG_ITMP1); */
558 if (!(src->flags & INMEMORY))
562 x86_64_pop_reg(cd, d);
563 store_reg_to_var_int(src, d);
565 } else if (bptr->type == BBTYPE_EXH) {
566 /* d = reg_of_var(rd, src, REG_ITMP1); */
567 if (!(src->flags & INMEMORY))
571 M_INTMOVE(REG_ITMP1, d);
572 store_reg_to_var_int(src, d);
581 while (src != NULL) {
583 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
584 if (bptr->type == BBTYPE_SBR) {
585 d = reg_of_var(rd, src, REG_ITMP1);
586 x86_64_pop_reg(cd, d);
587 store_reg_to_var_int(src, d);
589 } else if (bptr->type == BBTYPE_EXH) {
590 d = reg_of_var(rd, src, REG_ITMP1);
591 M_INTMOVE(REG_ITMP1, d);
592 store_reg_to_var_int(src, d);
596 d = reg_of_var(rd, src, REG_ITMP1);
597 if ((src->varkind != STACKVAR)) {
599 if (IS_FLT_DBL_TYPE(s2)) {
600 s1 = rd->interfaces[len][s2].regoff;
601 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
605 x86_64_movq_membase_reg(cd, REG_SP, s1 * 8, d);
607 store_reg_to_var_flt(src, d);
610 s1 = rd->interfaces[len][s2].regoff;
611 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
615 x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, d);
617 store_reg_to_var_int(src, d);
626 /* walk through all instructions */
631 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
632 if (iptr->line != currentline) {
633 dseg_addlinenumber(cd, iptr->line, cd->mcodeptr);
634 /*printf("%s : %d\n",m->name->text,iptr->line);*/
635 currentline = iptr->line;
638 MCODECHECK(128); /* XXX are 128 bytes enough? */
641 case ICMD_INLINE_START: /* internal ICMDs */
642 case ICMD_INLINE_END:
645 case ICMD_NOP: /* ... ==> ... */
648 case ICMD_CHECKNULL: /* ..., objectref ==> ..., objectref */
649 if (src->flags & INMEMORY) {
650 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
653 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
655 x86_64_jcc(cd, X86_64_CC_Z, 0);
656 codegen_addxnullrefs(cd, cd->mcodeptr);
659 /* constant operations ************************************************/
661 case ICMD_ICONST: /* ... ==> ..., constant */
662 /* op1 = 0, val.i = constant */
664 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
665 if (iptr->val.i == 0) {
666 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
668 x86_64_movl_imm_reg(cd, iptr->val.i, d);
670 store_reg_to_var_int(iptr->dst, d);
673 case ICMD_ACONST: /* ... ==> ..., constant */
674 /* op1 = 0, val.a = constant */
676 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
677 if (iptr->val.a == 0) {
678 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
680 x86_64_mov_imm_reg(cd, (s8) iptr->val.a, d);
682 store_reg_to_var_int(iptr->dst, d);
685 case ICMD_LCONST: /* ... ==> ..., constant */
686 /* op1 = 0, val.l = constant */
688 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
689 if (iptr->val.l == 0) {
690 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
692 x86_64_mov_imm_reg(cd, iptr->val.l, d);
694 store_reg_to_var_int(iptr->dst, d);
697 case ICMD_FCONST: /* ... ==> ..., constant */
698 /* op1 = 0, val.f = constant */
700 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
701 a = dseg_addfloat(cd, iptr->val.f);
702 x86_64_movdl_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + ((d > 7) ? 9 : 8)) - (s8) cd->mcodebase) + a, d);
703 store_reg_to_var_flt(iptr->dst, d);
706 case ICMD_DCONST: /* ... ==> ..., constant */
707 /* op1 = 0, val.d = constant */
709 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
710 a = dseg_adddouble(cd, iptr->val.d);
711 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, d);
712 store_reg_to_var_flt(iptr->dst, d);
716 /* load/store operations **********************************************/
718 case ICMD_ILOAD: /* ... ==> ..., content of local variable */
719 /* op1 = local variable */
721 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
722 if ((iptr->dst->varkind == LOCALVAR) &&
723 (iptr->dst->varnum == iptr->op1)) {
726 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
727 if (var->flags & INMEMORY) {
728 x86_64_movl_membase_reg(cd, REG_SP, var->regoff * 8, d);
729 store_reg_to_var_int(iptr->dst, d);
732 if (iptr->dst->flags & INMEMORY) {
733 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
736 M_INTMOVE(var->regoff, d);
741 case ICMD_LLOAD: /* ... ==> ..., content of local variable */
742 case ICMD_ALOAD: /* op1 = local variable */
744 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
745 if ((iptr->dst->varkind == LOCALVAR) &&
746 (iptr->dst->varnum == iptr->op1)) {
749 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
750 if (var->flags & INMEMORY) {
751 x86_64_mov_membase_reg(cd, REG_SP, var->regoff * 8, d);
752 store_reg_to_var_int(iptr->dst, d);
755 if (iptr->dst->flags & INMEMORY) {
756 x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
759 M_INTMOVE(var->regoff, d);
764 case ICMD_FLOAD: /* ... ==> ..., content of local variable */
765 case ICMD_DLOAD: /* op1 = local variable */
767 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
768 if ((iptr->dst->varkind == LOCALVAR) &&
769 (iptr->dst->varnum == iptr->op1)) {
772 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
773 if (var->flags & INMEMORY) {
774 x86_64_movq_membase_reg(cd, REG_SP, var->regoff * 8, d);
775 store_reg_to_var_flt(iptr->dst, d);
778 if (iptr->dst->flags & INMEMORY) {
779 x86_64_movq_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
782 M_FLTMOVE(var->regoff, d);
787 case ICMD_ISTORE: /* ..., value ==> ... */
788 case ICMD_LSTORE: /* op1 = local variable */
791 if ((src->varkind == LOCALVAR) &&
792 (src->varnum == iptr->op1)) {
795 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
796 if (var->flags & INMEMORY) {
797 var_to_reg_int(s1, src, REG_ITMP1);
798 x86_64_mov_reg_membase(cd, s1, REG_SP, var->regoff * 8);
801 var_to_reg_int(s1, src, var->regoff);
802 M_INTMOVE(s1, var->regoff);
806 case ICMD_FSTORE: /* ..., value ==> ... */
807 case ICMD_DSTORE: /* op1 = local variable */
809 if ((src->varkind == LOCALVAR) &&
810 (src->varnum == iptr->op1)) {
813 var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
814 if (var->flags & INMEMORY) {
815 var_to_reg_flt(s1, src, REG_FTMP1);
816 x86_64_movq_reg_membase(cd, s1, REG_SP, var->regoff * 8);
819 var_to_reg_flt(s1, src, var->regoff);
820 M_FLTMOVE(s1, var->regoff);
825 /* pop/dup/swap operations ********************************************/
827 /* attention: double and longs are only one entry in CACAO ICMDs */
829 case ICMD_POP: /* ..., value ==> ... */
830 case ICMD_POP2: /* ..., value, value ==> ... */
833 case ICMD_DUP: /* ..., a ==> ..., a, a */
834 M_COPY(src, iptr->dst);
837 case ICMD_DUP_X1: /* ..., a, b ==> ..., b, a, b */
839 M_COPY(src, iptr->dst);
840 M_COPY(src->prev, iptr->dst->prev);
841 M_COPY(iptr->dst, iptr->dst->prev->prev);
844 case ICMD_DUP_X2: /* ..., a, b, c ==> ..., c, a, b, c */
846 M_COPY(src, iptr->dst);
847 M_COPY(src->prev, iptr->dst->prev);
848 M_COPY(src->prev->prev, iptr->dst->prev->prev);
849 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
852 case ICMD_DUP2: /* ..., a, b ==> ..., a, b, a, b */
854 M_COPY(src, iptr->dst);
855 M_COPY(src->prev, iptr->dst->prev);
858 case ICMD_DUP2_X1: /* ..., a, b, c ==> ..., b, c, a, b, c */
860 M_COPY(src, iptr->dst);
861 M_COPY(src->prev, iptr->dst->prev);
862 M_COPY(src->prev->prev, iptr->dst->prev->prev);
863 M_COPY(iptr->dst, iptr->dst->prev->prev->prev);
864 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev);
867 case ICMD_DUP2_X2: /* ..., a, b, c, d ==> ..., c, d, a, b, c, d */
869 M_COPY(src, iptr->dst);
870 M_COPY(src->prev, iptr->dst->prev);
871 M_COPY(src->prev->prev, iptr->dst->prev->prev);
872 M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
873 M_COPY(iptr->dst, iptr->dst->prev->prev->prev->prev);
874 M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev->prev);
877 case ICMD_SWAP: /* ..., a, b ==> ..., b, a */
879 M_COPY(src, iptr->dst->prev);
880 M_COPY(src->prev, iptr->dst);
884 /* integer operations *************************************************/
886 case ICMD_INEG: /* ..., value ==> ..., - value */
888 d = reg_of_var(rd, iptr->dst, REG_NULL);
889 if (iptr->dst->flags & INMEMORY) {
890 if (src->flags & INMEMORY) {
891 if (src->regoff == iptr->dst->regoff) {
892 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
895 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
896 x86_64_negl_reg(cd, REG_ITMP1);
897 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
901 x86_64_movl_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
902 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
906 if (src->flags & INMEMORY) {
907 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
908 x86_64_negl_reg(cd, d);
911 M_INTMOVE(src->regoff, iptr->dst->regoff);
912 x86_64_negl_reg(cd, iptr->dst->regoff);
917 case ICMD_LNEG: /* ..., value ==> ..., - value */
919 d = reg_of_var(rd, iptr->dst, REG_NULL);
920 if (iptr->dst->flags & INMEMORY) {
921 if (src->flags & INMEMORY) {
922 if (src->regoff == iptr->dst->regoff) {
923 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
926 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
927 x86_64_neg_reg(cd, REG_ITMP1);
928 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
932 x86_64_mov_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
933 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
937 if (src->flags & INMEMORY) {
938 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
939 x86_64_neg_reg(cd, iptr->dst->regoff);
942 M_INTMOVE(src->regoff, iptr->dst->regoff);
943 x86_64_neg_reg(cd, iptr->dst->regoff);
948 case ICMD_I2L: /* ..., value ==> ..., value */
950 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
951 if (src->flags & INMEMORY) {
952 x86_64_movslq_membase_reg(cd, REG_SP, src->regoff * 8, d);
955 x86_64_movslq_reg_reg(cd, src->regoff, d);
957 store_reg_to_var_int(iptr->dst, d);
960 case ICMD_L2I: /* ..., value ==> ..., value */
962 var_to_reg_int(s1, src, REG_ITMP1);
963 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
965 store_reg_to_var_int(iptr->dst, d);
968 case ICMD_INT2BYTE: /* ..., value ==> ..., value */
970 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
971 if (src->flags & INMEMORY) {
972 x86_64_movsbq_membase_reg(cd, REG_SP, src->regoff * 8, d);
975 x86_64_movsbq_reg_reg(cd, src->regoff, d);
977 store_reg_to_var_int(iptr->dst, d);
980 case ICMD_INT2CHAR: /* ..., value ==> ..., value */
982 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
983 if (src->flags & INMEMORY) {
984 x86_64_movzwq_membase_reg(cd, REG_SP, src->regoff * 8, d);
987 x86_64_movzwq_reg_reg(cd, src->regoff, d);
989 store_reg_to_var_int(iptr->dst, d);
992 case ICMD_INT2SHORT: /* ..., value ==> ..., value */
994 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
995 if (src->flags & INMEMORY) {
996 x86_64_movswq_membase_reg(cd, REG_SP, src->regoff * 8, d);
999 x86_64_movswq_reg_reg(cd, src->regoff, d);
1001 store_reg_to_var_int(iptr->dst, d);
1005 case ICMD_IADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1007 d = reg_of_var(rd, iptr->dst, REG_NULL);
1008 x86_64_emit_ialu(cd, X86_64_ADD, src, iptr);
1011 case ICMD_IADDCONST: /* ..., value ==> ..., value + constant */
1012 /* val.i = constant */
1014 d = reg_of_var(rd, iptr->dst, REG_NULL);
1015 x86_64_emit_ialuconst(cd, X86_64_ADD, src, iptr);
1018 case ICMD_LADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1020 d = reg_of_var(rd, iptr->dst, REG_NULL);
1021 x86_64_emit_lalu(cd, X86_64_ADD, src, iptr);
1024 case ICMD_LADDCONST: /* ..., value ==> ..., value + constant */
1025 /* val.l = constant */
1027 d = reg_of_var(rd, iptr->dst, REG_NULL);
1028 x86_64_emit_laluconst(cd, X86_64_ADD, src, iptr);
1031 case ICMD_ISUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1033 d = reg_of_var(rd, iptr->dst, REG_NULL);
1034 if (iptr->dst->flags & INMEMORY) {
1035 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1036 if (src->prev->regoff == iptr->dst->regoff) {
1037 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1038 x86_64_alul_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1041 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1042 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1043 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1046 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1047 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1048 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1049 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1051 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1052 if (src->prev->regoff == iptr->dst->regoff) {
1053 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1056 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1057 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1058 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1062 x86_64_movl_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1063 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1067 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1068 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1069 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1071 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1072 M_INTMOVE(src->prev->regoff, d);
1073 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1075 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1076 /* workaround for reg alloc */
1077 if (src->regoff == iptr->dst->regoff) {
1078 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1079 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1080 M_INTMOVE(REG_ITMP1, d);
1083 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1084 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
1088 /* workaround for reg alloc */
1089 if (src->regoff == iptr->dst->regoff) {
1090 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1091 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1092 M_INTMOVE(REG_ITMP1, d);
1095 M_INTMOVE(src->prev->regoff, d);
1096 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
1102 case ICMD_ISUBCONST: /* ..., value ==> ..., value + constant */
1103 /* val.i = constant */
1105 d = reg_of_var(rd, iptr->dst, REG_NULL);
1106 x86_64_emit_ialuconst(cd, X86_64_SUB, src, iptr);
1109 case ICMD_LSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1111 d = reg_of_var(rd, iptr->dst, REG_NULL);
1112 if (iptr->dst->flags & INMEMORY) {
1113 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1114 if (src->prev->regoff == iptr->dst->regoff) {
1115 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1116 x86_64_alu_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1119 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1120 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1121 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1124 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1125 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1126 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1127 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1129 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1130 if (src->prev->regoff == iptr->dst->regoff) {
1131 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1134 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1135 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1136 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1140 x86_64_mov_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1141 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1145 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1146 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1147 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1149 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1150 M_INTMOVE(src->prev->regoff, d);
1151 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1153 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1154 /* workaround for reg alloc */
1155 if (src->regoff == iptr->dst->regoff) {
1156 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1157 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1158 M_INTMOVE(REG_ITMP1, d);
1161 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1162 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1166 /* workaround for reg alloc */
1167 if (src->regoff == iptr->dst->regoff) {
1168 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1169 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1170 M_INTMOVE(REG_ITMP1, d);
1173 M_INTMOVE(src->prev->regoff, d);
1174 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1180 case ICMD_LSUBCONST: /* ..., value ==> ..., value - constant */
1181 /* val.l = constant */
1183 d = reg_of_var(rd, iptr->dst, REG_NULL);
1184 x86_64_emit_laluconst(cd, X86_64_SUB, src, iptr);
1187 case ICMD_IMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1189 d = reg_of_var(rd, iptr->dst, REG_NULL);
1190 if (iptr->dst->flags & INMEMORY) {
1191 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1192 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1193 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1194 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1196 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1197 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1198 x86_64_imull_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1199 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1201 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1202 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1203 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1204 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1207 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1208 x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1209 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1213 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1214 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1215 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1217 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1218 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1219 x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1221 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1222 M_INTMOVE(src->regoff, iptr->dst->regoff);
1223 x86_64_imull_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1226 if (src->regoff == iptr->dst->regoff) {
1227 x86_64_imull_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1230 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1231 x86_64_imull_reg_reg(cd, src->regoff, iptr->dst->regoff);
1237 case ICMD_IMULCONST: /* ..., value ==> ..., value * constant */
1238 /* val.i = constant */
1240 d = reg_of_var(rd, iptr->dst, REG_NULL);
1241 if (iptr->dst->flags & INMEMORY) {
1242 if (src->flags & INMEMORY) {
1243 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1244 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1247 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, REG_ITMP1);
1248 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1252 if (src->flags & INMEMORY) {
1253 x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1256 if (iptr->val.i == 2) {
1257 M_INTMOVE(src->regoff, iptr->dst->regoff);
1258 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1261 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, iptr->dst->regoff); /* 3 cycles */
1267 case ICMD_LMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1269 d = reg_of_var(rd, iptr->dst, REG_NULL);
1270 if (iptr->dst->flags & INMEMORY) {
1271 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1272 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1273 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1274 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1276 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1277 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1278 x86_64_imul_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1279 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1281 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1282 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1283 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1284 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1287 x86_64_mov_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1288 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1289 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1293 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1294 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1295 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1297 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1298 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1299 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1301 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1302 M_INTMOVE(src->regoff, iptr->dst->regoff);
1303 x86_64_imul_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1306 if (src->regoff == iptr->dst->regoff) {
1307 x86_64_imul_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1310 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1311 x86_64_imul_reg_reg(cd, src->regoff, iptr->dst->regoff);
1317 case ICMD_LMULCONST: /* ..., value ==> ..., value * constant */
1318 /* val.l = constant */
1320 d = reg_of_var(rd, iptr->dst, REG_NULL);
1321 if (iptr->dst->flags & INMEMORY) {
1322 if (src->flags & INMEMORY) {
1323 if (IS_IMM32(iptr->val.l)) {
1324 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1327 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1328 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1330 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1333 if (IS_IMM32(iptr->val.l)) {
1334 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, REG_ITMP1);
1337 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1338 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1340 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1344 if (src->flags & INMEMORY) {
1345 if (IS_IMM32(iptr->val.l)) {
1346 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1349 x86_64_mov_imm_reg(cd, iptr->val.l, iptr->dst->regoff);
1350 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1354 /* should match in many cases */
1355 if (iptr->val.l == 2) {
1356 M_INTMOVE(src->regoff, iptr->dst->regoff);
1357 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1360 if (IS_IMM32(iptr->val.l)) {
1361 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, iptr->dst->regoff); /* 4 cycles */
1364 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1365 M_INTMOVE(src->regoff, iptr->dst->regoff);
1366 x86_64_imul_reg_reg(cd, REG_ITMP1, iptr->dst->regoff);
1373 case ICMD_IDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1375 d = reg_of_var(rd, iptr->dst, REG_NULL);
1376 if (src->prev->flags & INMEMORY) {
1377 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1380 M_INTMOVE(src->prev->regoff, RAX);
1383 if (src->flags & INMEMORY) {
1384 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1387 M_INTMOVE(src->regoff, REG_ITMP3);
1391 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1392 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1393 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1394 x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3); /* 6 bytes */
1396 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1398 x86_64_idivl_reg(cd, REG_ITMP3);
1400 if (iptr->dst->flags & INMEMORY) {
1401 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1402 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1405 M_INTMOVE(RAX, iptr->dst->regoff);
1407 if (iptr->dst->regoff != RDX) {
1408 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1413 case ICMD_IREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1414 d = reg_of_var(rd, iptr->dst, REG_NULL);
1415 if (src->prev->flags & INMEMORY) {
1416 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1419 M_INTMOVE(src->prev->regoff, RAX);
1422 if (src->flags & INMEMORY) {
1423 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1426 M_INTMOVE(src->regoff, REG_ITMP3);
1430 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1432 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX); /* check as described in jvm spec */
1433 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1436 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1437 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1438 x86_64_jcc(cd, X86_64_CC_E, 1 + 3); /* 6 bytes */
1441 x86_64_idivl_reg(cd, REG_ITMP3);
1443 if (iptr->dst->flags & INMEMORY) {
1444 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1445 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1448 M_INTMOVE(RDX, iptr->dst->regoff);
1450 if (iptr->dst->regoff != RDX) {
1451 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1456 case ICMD_IDIVPOW2: /* ..., value ==> ..., value >> constant */
1457 /* val.i = constant */
1459 var_to_reg_int(s1, src, REG_ITMP1);
1460 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1461 M_INTMOVE(s1, REG_ITMP1);
1462 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1463 x86_64_leal_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1464 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1465 x86_64_shiftl_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1466 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1467 store_reg_to_var_int(iptr->dst, d);
1470 case ICMD_IREMPOW2: /* ..., value ==> ..., value % constant */
1471 /* val.i = constant */
1473 var_to_reg_int(s1, src, REG_ITMP1);
1474 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1475 M_INTMOVE(s1, REG_ITMP1);
1476 x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1477 x86_64_leal_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1478 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1479 x86_64_alul_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1480 x86_64_alul_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1481 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1482 store_reg_to_var_int(iptr->dst, d);
1486 case ICMD_LDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1488 d = reg_of_var(rd, iptr->dst, REG_NULL);
1489 if (src->prev->flags & INMEMORY) {
1490 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1493 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1496 if (src->flags & INMEMORY) {
1497 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1500 M_INTMOVE(src->regoff, REG_ITMP3);
1504 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1505 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1506 x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1507 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1508 x86_64_jcc(cd, X86_64_CC_E, 3 + 2 + 3); /* 6 bytes */
1510 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1512 x86_64_idiv_reg(cd, REG_ITMP3);
1514 if (iptr->dst->flags & INMEMORY) {
1515 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1516 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1519 M_INTMOVE(RAX, iptr->dst->regoff);
1521 if (iptr->dst->regoff != RDX) {
1522 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1527 case ICMD_LREM: /* ..., val1, val2 ==> ..., val1 % val2 */
1529 d = reg_of_var(rd, iptr->dst, REG_NULL);
1530 if (src->prev->flags & INMEMORY) {
1531 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1534 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1537 if (src->flags & INMEMORY) {
1538 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1541 M_INTMOVE(src->regoff, REG_ITMP3);
1545 x86_64_mov_reg_reg(cd, RDX, REG_ITMP2); /* save %rdx, cause it's an argument register */
1547 x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2); /* check as described in jvm spec */
1548 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1549 x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1552 x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX); /* 2 bytes */
1553 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3); /* 4 bytes */
1554 x86_64_jcc(cd, X86_64_CC_E, 2 + 3); /* 6 bytes */
1557 x86_64_idiv_reg(cd, REG_ITMP3);
1559 if (iptr->dst->flags & INMEMORY) {
1560 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1561 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1564 M_INTMOVE(RDX, iptr->dst->regoff);
1566 if (iptr->dst->regoff != RDX) {
1567 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX); /* restore %rdx */
1572 case ICMD_LDIVPOW2: /* ..., value ==> ..., value >> constant */
1573 /* val.i = constant */
1575 var_to_reg_int(s1, src, REG_ITMP1);
1576 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1577 M_INTMOVE(s1, REG_ITMP1);
1578 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1579 x86_64_lea_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1580 x86_64_cmovcc_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1581 x86_64_shift_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1582 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1583 store_reg_to_var_int(iptr->dst, d);
1586 case ICMD_LREMPOW2: /* ..., value ==> ..., value % constant */
1587 /* val.l = constant */
1589 var_to_reg_int(s1, src, REG_ITMP1);
1590 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1591 M_INTMOVE(s1, REG_ITMP1);
1592 x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1593 x86_64_lea_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1594 x86_64_cmovcc_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1595 x86_64_alu_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1596 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1597 x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1598 store_reg_to_var_int(iptr->dst, d);
1601 case ICMD_ISHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1603 d = reg_of_var(rd, iptr->dst, REG_NULL);
1604 x86_64_emit_ishift(cd, X86_64_SHL, src, iptr);
1607 case ICMD_ISHLCONST: /* ..., value ==> ..., value << constant */
1608 /* val.i = constant */
1610 d = reg_of_var(rd, iptr->dst, REG_NULL);
1611 x86_64_emit_ishiftconst(cd, X86_64_SHL, src, iptr);
1614 case ICMD_ISHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1616 d = reg_of_var(rd, iptr->dst, REG_NULL);
1617 x86_64_emit_ishift(cd, X86_64_SAR, src, iptr);
1620 case ICMD_ISHRCONST: /* ..., value ==> ..., value >> constant */
1621 /* val.i = constant */
1623 d = reg_of_var(rd, iptr->dst, REG_NULL);
1624 x86_64_emit_ishiftconst(cd, X86_64_SAR, src, iptr);
1627 case ICMD_IUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1629 d = reg_of_var(rd, iptr->dst, REG_NULL);
1630 x86_64_emit_ishift(cd, X86_64_SHR, src, iptr);
1633 case ICMD_IUSHRCONST: /* ..., value ==> ..., value >>> constant */
1634 /* val.i = constant */
1636 d = reg_of_var(rd, iptr->dst, REG_NULL);
1637 x86_64_emit_ishiftconst(cd, X86_64_SHR, src, iptr);
1640 case ICMD_LSHL: /* ..., val1, val2 ==> ..., val1 << val2 */
1642 d = reg_of_var(rd, iptr->dst, REG_NULL);
1643 x86_64_emit_lshift(cd, X86_64_SHL, src, iptr);
1646 case ICMD_LSHLCONST: /* ..., value ==> ..., value << constant */
1647 /* val.i = constant */
1649 d = reg_of_var(rd, iptr->dst, REG_NULL);
1650 x86_64_emit_lshiftconst(cd, X86_64_SHL, src, iptr);
1653 case ICMD_LSHR: /* ..., val1, val2 ==> ..., val1 >> val2 */
1655 d = reg_of_var(rd, iptr->dst, REG_NULL);
1656 x86_64_emit_lshift(cd, X86_64_SAR, src, iptr);
1659 case ICMD_LSHRCONST: /* ..., value ==> ..., value >> constant */
1660 /* val.i = constant */
1662 d = reg_of_var(rd, iptr->dst, REG_NULL);
1663 x86_64_emit_lshiftconst(cd, X86_64_SAR, src, iptr);
1666 case ICMD_LUSHR: /* ..., val1, val2 ==> ..., val1 >>> val2 */
1668 d = reg_of_var(rd, iptr->dst, REG_NULL);
1669 x86_64_emit_lshift(cd, X86_64_SHR, src, iptr);
1672 case ICMD_LUSHRCONST: /* ..., value ==> ..., value >>> constant */
1673 /* val.l = constant */
1675 d = reg_of_var(rd, iptr->dst, REG_NULL);
1676 x86_64_emit_lshiftconst(cd, X86_64_SHR, src, iptr);
1679 case ICMD_IAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1681 d = reg_of_var(rd, iptr->dst, REG_NULL);
1682 x86_64_emit_ialu(cd, X86_64_AND, src, iptr);
1685 case ICMD_IANDCONST: /* ..., value ==> ..., value & constant */
1686 /* val.i = constant */
1688 d = reg_of_var(rd, iptr->dst, REG_NULL);
1689 x86_64_emit_ialuconst(cd, X86_64_AND, src, iptr);
1692 case ICMD_LAND: /* ..., val1, val2 ==> ..., val1 & val2 */
1694 d = reg_of_var(rd, iptr->dst, REG_NULL);
1695 x86_64_emit_lalu(cd, X86_64_AND, src, iptr);
1698 case ICMD_LANDCONST: /* ..., value ==> ..., value & constant */
1699 /* val.l = constant */
1701 d = reg_of_var(rd, iptr->dst, REG_NULL);
1702 x86_64_emit_laluconst(cd, X86_64_AND, src, iptr);
1705 case ICMD_IOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1707 d = reg_of_var(rd, iptr->dst, REG_NULL);
1708 x86_64_emit_ialu(cd, X86_64_OR, src, iptr);
1711 case ICMD_IORCONST: /* ..., value ==> ..., value | constant */
1712 /* val.i = constant */
1714 d = reg_of_var(rd, iptr->dst, REG_NULL);
1715 x86_64_emit_ialuconst(cd, X86_64_OR, src, iptr);
1718 case ICMD_LOR: /* ..., val1, val2 ==> ..., val1 | val2 */
1720 d = reg_of_var(rd, iptr->dst, REG_NULL);
1721 x86_64_emit_lalu(cd, X86_64_OR, src, iptr);
1724 case ICMD_LORCONST: /* ..., value ==> ..., value | constant */
1725 /* val.l = constant */
1727 d = reg_of_var(rd, iptr->dst, REG_NULL);
1728 x86_64_emit_laluconst(cd, X86_64_OR, src, iptr);
1731 case ICMD_IXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1733 d = reg_of_var(rd, iptr->dst, REG_NULL);
1734 x86_64_emit_ialu(cd, X86_64_XOR, src, iptr);
1737 case ICMD_IXORCONST: /* ..., value ==> ..., value ^ constant */
1738 /* val.i = constant */
1740 d = reg_of_var(rd, iptr->dst, REG_NULL);
1741 x86_64_emit_ialuconst(cd, X86_64_XOR, src, iptr);
1744 case ICMD_LXOR: /* ..., val1, val2 ==> ..., val1 ^ val2 */
1746 d = reg_of_var(rd, iptr->dst, REG_NULL);
1747 x86_64_emit_lalu(cd, X86_64_XOR, src, iptr);
1750 case ICMD_LXORCONST: /* ..., value ==> ..., value ^ constant */
1751 /* val.l = constant */
1753 d = reg_of_var(rd, iptr->dst, REG_NULL);
1754 x86_64_emit_laluconst(cd, X86_64_XOR, src, iptr);
1758 case ICMD_IINC: /* ..., value ==> ..., value + constant */
1759 /* op1 = variable, val.i = constant */
1761 /* using inc and dec is definitely faster than add -- tested */
1764 var = &(rd->locals[iptr->op1][TYPE_INT]);
1766 if (var->flags & INMEMORY) {
1767 if (iptr->val.i == 1) {
1768 x86_64_incl_membase(cd, REG_SP, d * 8);
1770 } else if (iptr->val.i == -1) {
1771 x86_64_decl_membase(cd, REG_SP, d * 8);
1774 x86_64_alul_imm_membase(cd, X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1778 if (iptr->val.i == 1) {
1779 x86_64_incl_reg(cd, d);
1781 } else if (iptr->val.i == -1) {
1782 x86_64_decl_reg(cd, d);
1785 x86_64_alul_imm_reg(cd, X86_64_ADD, iptr->val.i, d);
1791 /* floating operations ************************************************/
1793 case ICMD_FNEG: /* ..., value ==> ..., - value */
1795 var_to_reg_flt(s1, src, REG_FTMP1);
1796 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1797 a = dseg_adds4(cd, 0x80000000);
1799 x86_64_movss_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1800 x86_64_xorps_reg_reg(cd, REG_FTMP2, d);
1801 store_reg_to_var_flt(iptr->dst, d);
1804 case ICMD_DNEG: /* ..., value ==> ..., - value */
1806 var_to_reg_flt(s1, src, REG_FTMP1);
1807 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1808 a = dseg_adds8(cd, 0x8000000000000000);
1810 x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1811 x86_64_xorpd_reg_reg(cd, REG_FTMP2, d);
1812 store_reg_to_var_flt(iptr->dst, d);
1815 case ICMD_FADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1817 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1818 var_to_reg_flt(s2, src, REG_FTMP2);
1819 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1821 x86_64_addss_reg_reg(cd, s2, d);
1822 } else if (s2 == d) {
1823 x86_64_addss_reg_reg(cd, s1, d);
1826 x86_64_addss_reg_reg(cd, s2, d);
1828 store_reg_to_var_flt(iptr->dst, d);
1831 case ICMD_DADD: /* ..., val1, val2 ==> ..., val1 + val2 */
1833 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1834 var_to_reg_flt(s2, src, REG_FTMP2);
1835 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1837 x86_64_addsd_reg_reg(cd, s2, d);
1838 } else if (s2 == d) {
1839 x86_64_addsd_reg_reg(cd, s1, d);
1842 x86_64_addsd_reg_reg(cd, s2, d);
1844 store_reg_to_var_flt(iptr->dst, d);
1847 case ICMD_FSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1849 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1850 var_to_reg_flt(s2, src, REG_FTMP2);
1851 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1853 M_FLTMOVE(s2, REG_FTMP2);
1857 x86_64_subss_reg_reg(cd, s2, d);
1858 store_reg_to_var_flt(iptr->dst, d);
1861 case ICMD_DSUB: /* ..., val1, val2 ==> ..., val1 - val2 */
1863 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1864 var_to_reg_flt(s2, src, REG_FTMP2);
1865 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1867 M_FLTMOVE(s2, REG_FTMP2);
1871 x86_64_subsd_reg_reg(cd, s2, d);
1872 store_reg_to_var_flt(iptr->dst, d);
1875 case ICMD_FMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1877 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1878 var_to_reg_flt(s2, src, REG_FTMP2);
1879 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1881 x86_64_mulss_reg_reg(cd, s2, d);
1882 } else if (s2 == d) {
1883 x86_64_mulss_reg_reg(cd, s1, d);
1886 x86_64_mulss_reg_reg(cd, s2, d);
1888 store_reg_to_var_flt(iptr->dst, d);
1891 case ICMD_DMUL: /* ..., val1, val2 ==> ..., val1 * val2 */
1893 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1894 var_to_reg_flt(s2, src, REG_FTMP2);
1895 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1897 x86_64_mulsd_reg_reg(cd, s2, d);
1898 } else if (s2 == d) {
1899 x86_64_mulsd_reg_reg(cd, s1, d);
1902 x86_64_mulsd_reg_reg(cd, s2, d);
1904 store_reg_to_var_flt(iptr->dst, d);
1907 case ICMD_FDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1909 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1910 var_to_reg_flt(s2, src, REG_FTMP2);
1911 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1913 M_FLTMOVE(s2, REG_FTMP2);
1917 x86_64_divss_reg_reg(cd, s2, d);
1918 store_reg_to_var_flt(iptr->dst, d);
1921 case ICMD_DDIV: /* ..., val1, val2 ==> ..., val1 / val2 */
1923 var_to_reg_flt(s1, src->prev, REG_FTMP1);
1924 var_to_reg_flt(s2, src, REG_FTMP2);
1925 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1927 M_FLTMOVE(s2, REG_FTMP2);
1931 x86_64_divsd_reg_reg(cd, s2, d);
1932 store_reg_to_var_flt(iptr->dst, d);
1935 case ICMD_I2F: /* ..., value ==> ..., (float) value */
1937 var_to_reg_int(s1, src, REG_ITMP1);
1938 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1939 x86_64_cvtsi2ss_reg_reg(cd, s1, d);
1940 store_reg_to_var_flt(iptr->dst, d);
1943 case ICMD_I2D: /* ..., value ==> ..., (double) value */
1945 var_to_reg_int(s1, src, REG_ITMP1);
1946 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1947 x86_64_cvtsi2sd_reg_reg(cd, s1, d);
1948 store_reg_to_var_flt(iptr->dst, d);
1951 case ICMD_L2F: /* ..., value ==> ..., (float) value */
1953 var_to_reg_int(s1, src, REG_ITMP1);
1954 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1955 x86_64_cvtsi2ssq_reg_reg(cd, s1, d);
1956 store_reg_to_var_flt(iptr->dst, d);
1959 case ICMD_L2D: /* ..., value ==> ..., (double) value */
1961 var_to_reg_int(s1, src, REG_ITMP1);
1962 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1963 x86_64_cvtsi2sdq_reg_reg(cd, s1, d);
1964 store_reg_to_var_flt(iptr->dst, d);
1967 case ICMD_F2I: /* ..., value ==> ..., (int) value */
1969 var_to_reg_flt(s1, src, REG_FTMP1);
1970 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1971 x86_64_cvttss2si_reg_reg(cd, s1, d);
1972 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1973 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1974 x86_64_jcc(cd, X86_64_CC_NE, a);
1975 M_FLTMOVE(s1, REG_FTMP1);
1976 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2i, REG_ITMP2);
1977 x86_64_call_reg(cd, REG_ITMP2);
1978 M_INTMOVE(REG_RESULT, d);
1979 store_reg_to_var_int(iptr->dst, d);
1982 case ICMD_D2I: /* ..., value ==> ..., (int) value */
1984 var_to_reg_flt(s1, src, REG_FTMP1);
1985 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1986 x86_64_cvttsd2si_reg_reg(cd, s1, d);
1987 x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d); /* corner cases */
1988 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1989 x86_64_jcc(cd, X86_64_CC_NE, a);
1990 M_FLTMOVE(s1, REG_FTMP1);
1991 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2i, REG_ITMP2);
1992 x86_64_call_reg(cd, REG_ITMP2);
1993 M_INTMOVE(REG_RESULT, d);
1994 store_reg_to_var_int(iptr->dst, d);
1997 case ICMD_F2L: /* ..., value ==> ..., (long) value */
1999 var_to_reg_flt(s1, src, REG_FTMP1);
2000 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2001 x86_64_cvttss2siq_reg_reg(cd, s1, d);
2002 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
2003 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
2004 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
2005 x86_64_jcc(cd, X86_64_CC_NE, a);
2006 M_FLTMOVE(s1, REG_FTMP1);
2007 x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2l, REG_ITMP2);
2008 x86_64_call_reg(cd, REG_ITMP2);
2009 M_INTMOVE(REG_RESULT, d);
2010 store_reg_to_var_int(iptr->dst, d);
2013 case ICMD_D2L: /* ..., value ==> ..., (long) value */
2015 var_to_reg_flt(s1, src, REG_FTMP1);
2016 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2017 x86_64_cvttsd2siq_reg_reg(cd, s1, d);
2018 x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
2019 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d); /* corner cases */
2020 a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
2021 x86_64_jcc(cd, X86_64_CC_NE, a);
2022 M_FLTMOVE(s1, REG_FTMP1);
2023 x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2l, REG_ITMP2);
2024 x86_64_call_reg(cd, REG_ITMP2);
2025 M_INTMOVE(REG_RESULT, d);
2026 store_reg_to_var_int(iptr->dst, d);
2029 case ICMD_F2D: /* ..., value ==> ..., (double) value */
2031 var_to_reg_flt(s1, src, REG_FTMP1);
2032 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2033 x86_64_cvtss2sd_reg_reg(cd, s1, d);
2034 store_reg_to_var_flt(iptr->dst, d);
2037 case ICMD_D2F: /* ..., value ==> ..., (float) value */
2039 var_to_reg_flt(s1, src, REG_FTMP1);
2040 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2041 x86_64_cvtsd2ss_reg_reg(cd, s1, d);
2042 store_reg_to_var_flt(iptr->dst, d);
2045 case ICMD_FCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
2046 /* == => 0, < => 1, > => -1 */
2048 var_to_reg_flt(s1, src->prev, REG_FTMP1);
2049 var_to_reg_flt(s2, src, REG_FTMP2);
2050 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2051 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
2052 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
2053 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
2054 x86_64_ucomiss_reg_reg(cd, s1, s2);
2055 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
2056 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
2057 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
2058 store_reg_to_var_int(iptr->dst, d);
2061 case ICMD_FCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
2062 /* == => 0, < => 1, > => -1 */
2064 var_to_reg_flt(s1, src->prev, REG_FTMP1);
2065 var_to_reg_flt(s2, src, REG_FTMP2);
2066 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2067 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
2068 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
2069 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
2070 x86_64_ucomiss_reg_reg(cd, s1, s2);
2071 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
2072 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
2073 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
2074 store_reg_to_var_int(iptr->dst, d);
2077 case ICMD_DCMPL: /* ..., val1, val2 ==> ..., val1 fcmpl val2 */
2078 /* == => 0, < => 1, > => -1 */
2080 var_to_reg_flt(s1, src->prev, REG_FTMP1);
2081 var_to_reg_flt(s2, src, REG_FTMP2);
2082 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2083 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
2084 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
2085 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
2086 x86_64_ucomisd_reg_reg(cd, s1, s2);
2087 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
2088 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
2089 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d); /* treat unordered as GT */
2090 store_reg_to_var_int(iptr->dst, d);
2093 case ICMD_DCMPG: /* ..., val1, val2 ==> ..., val1 fcmpg val2 */
2094 /* == => 0, < => 1, > => -1 */
2096 var_to_reg_flt(s1, src->prev, REG_FTMP1);
2097 var_to_reg_flt(s2, src, REG_FTMP2);
2098 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2099 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
2100 x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
2101 x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
2102 x86_64_ucomisd_reg_reg(cd, s1, s2);
2103 x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
2104 x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
2105 x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d); /* treat unordered as LT */
2106 store_reg_to_var_int(iptr->dst, d);
2110 /* memory operations **************************************************/
2112 case ICMD_ARRAYLENGTH: /* ..., arrayref ==> ..., (int) length */
2114 var_to_reg_int(s1, src, REG_ITMP1);
2115 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2116 gen_nullptr_check(s1);
2117 x86_64_movl_membase_reg(cd, s1, OFFSET(java_arrayheader, size), d);
2118 store_reg_to_var_int(iptr->dst, d);
2121 case ICMD_AALOAD: /* ..., arrayref, index ==> ..., value */
2123 var_to_reg_int(s1, src->prev, REG_ITMP1);
2124 var_to_reg_int(s2, src, REG_ITMP2);
2125 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2126 if (iptr->op1 == 0) {
2127 gen_nullptr_check(s1);
2130 x86_64_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
2131 store_reg_to_var_int(iptr->dst, d);
2134 case ICMD_LALOAD: /* ..., arrayref, index ==> ..., value */
2136 var_to_reg_int(s1, src->prev, REG_ITMP1);
2137 var_to_reg_int(s2, src, REG_ITMP2);
2138 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2139 if (iptr->op1 == 0) {
2140 gen_nullptr_check(s1);
2143 x86_64_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]), s1, s2, 3, d);
2144 store_reg_to_var_int(iptr->dst, d);
2147 case ICMD_IALOAD: /* ..., arrayref, index ==> ..., value */
2149 var_to_reg_int(s1, src->prev, REG_ITMP1);
2150 var_to_reg_int(s2, src, REG_ITMP2);
2151 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2152 if (iptr->op1 == 0) {
2153 gen_nullptr_check(s1);
2156 x86_64_movl_memindex_reg(cd, OFFSET(java_intarray, data[0]), s1, s2, 2, d);
2157 store_reg_to_var_int(iptr->dst, d);
2160 case ICMD_FALOAD: /* ..., arrayref, index ==> ..., value */
2162 var_to_reg_int(s1, src->prev, REG_ITMP1);
2163 var_to_reg_int(s2, src, REG_ITMP2);
2164 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2165 if (iptr->op1 == 0) {
2166 gen_nullptr_check(s1);
2169 x86_64_movss_memindex_reg(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2170 store_reg_to_var_flt(iptr->dst, d);
2173 case ICMD_DALOAD: /* ..., arrayref, index ==> ..., value */
2175 var_to_reg_int(s1, src->prev, REG_ITMP1);
2176 var_to_reg_int(s2, src, REG_ITMP2);
2177 d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2178 if (iptr->op1 == 0) {
2179 gen_nullptr_check(s1);
2182 x86_64_movsd_memindex_reg(cd, OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2183 store_reg_to_var_flt(iptr->dst, d);
2186 case ICMD_CALOAD: /* ..., arrayref, index ==> ..., value */
2188 var_to_reg_int(s1, src->prev, REG_ITMP1);
2189 var_to_reg_int(s2, src, REG_ITMP2);
2190 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2191 if (iptr->op1 == 0) {
2192 gen_nullptr_check(s1);
2195 x86_64_movzwq_memindex_reg(cd, OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2196 store_reg_to_var_int(iptr->dst, d);
2199 case ICMD_SALOAD: /* ..., arrayref, index ==> ..., value */
2201 var_to_reg_int(s1, src->prev, REG_ITMP1);
2202 var_to_reg_int(s2, src, REG_ITMP2);
2203 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2204 if (iptr->op1 == 0) {
2205 gen_nullptr_check(s1);
2208 x86_64_movswq_memindex_reg(cd, OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2209 store_reg_to_var_int(iptr->dst, d);
2212 case ICMD_BALOAD: /* ..., arrayref, index ==> ..., value */
2214 var_to_reg_int(s1, src->prev, REG_ITMP1);
2215 var_to_reg_int(s2, src, REG_ITMP2);
2216 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2217 if (iptr->op1 == 0) {
2218 gen_nullptr_check(s1);
2221 x86_64_movsbq_memindex_reg(cd, OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2222 store_reg_to_var_int(iptr->dst, d);
2226 case ICMD_AASTORE: /* ..., arrayref, index, value ==> ... */
2228 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2229 var_to_reg_int(s2, src->prev, REG_ITMP2);
2230 if (iptr->op1 == 0) {
2231 gen_nullptr_check(s1);
2234 var_to_reg_int(s3, src, REG_ITMP3);
2235 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2238 case ICMD_LASTORE: /* ..., arrayref, index, value ==> ... */
2240 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2241 var_to_reg_int(s2, src->prev, REG_ITMP2);
2242 if (iptr->op1 == 0) {
2243 gen_nullptr_check(s1);
2246 var_to_reg_int(s3, src, REG_ITMP3);
2247 x86_64_mov_reg_memindex(cd, s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2250 case ICMD_IASTORE: /* ..., arrayref, index, value ==> ... */
2252 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2253 var_to_reg_int(s2, src->prev, REG_ITMP2);
2254 if (iptr->op1 == 0) {
2255 gen_nullptr_check(s1);
2258 var_to_reg_int(s3, src, REG_ITMP3);
2259 x86_64_movl_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2262 case ICMD_FASTORE: /* ..., arrayref, index, value ==> ... */
2264 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2265 var_to_reg_int(s2, src->prev, REG_ITMP2);
2266 if (iptr->op1 == 0) {
2267 gen_nullptr_check(s1);
2270 var_to_reg_flt(s3, src, REG_FTMP3);
2271 x86_64_movss_reg_memindex(cd, s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2274 case ICMD_DASTORE: /* ..., arrayref, index, value ==> ... */
2276 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2277 var_to_reg_int(s2, src->prev, REG_ITMP2);
2278 if (iptr->op1 == 0) {
2279 gen_nullptr_check(s1);
2282 var_to_reg_flt(s3, src, REG_FTMP3);
2283 x86_64_movsd_reg_memindex(cd, s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2286 case ICMD_CASTORE: /* ..., arrayref, index, value ==> ... */
2288 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2289 var_to_reg_int(s2, src->prev, REG_ITMP2);
2290 if (iptr->op1 == 0) {
2291 gen_nullptr_check(s1);
2294 var_to_reg_int(s3, src, REG_ITMP3);
2295 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2298 case ICMD_SASTORE: /* ..., arrayref, index, value ==> ... */
2300 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2301 var_to_reg_int(s2, src->prev, REG_ITMP2);
2302 if (iptr->op1 == 0) {
2303 gen_nullptr_check(s1);
2306 var_to_reg_int(s3, src, REG_ITMP3);
2307 x86_64_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2310 case ICMD_BASTORE: /* ..., arrayref, index, value ==> ... */
2312 var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2313 var_to_reg_int(s2, src->prev, REG_ITMP2);
2314 if (iptr->op1 == 0) {
2315 gen_nullptr_check(s1);
2318 var_to_reg_int(s3, src, REG_ITMP3);
2319 x86_64_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2322 case ICMD_IASTORECONST: /* ..., arrayref, index ==> ... */
2324 var_to_reg_int(s1, src->prev, REG_ITMP1);
2325 var_to_reg_int(s2, src, REG_ITMP2);
2326 if (iptr->op1 == 0) {
2327 gen_nullptr_check(s1);
2330 x86_64_movl_imm_memindex(cd, iptr->val.i, OFFSET(java_intarray, data[0]), s1, s2, 2);
2333 case ICMD_LASTORECONST: /* ..., arrayref, index ==> ... */
2335 var_to_reg_int(s1, src->prev, REG_ITMP1);
2336 var_to_reg_int(s2, src, REG_ITMP2);
2337 if (iptr->op1 == 0) {
2338 gen_nullptr_check(s1);
2342 if (IS_IMM32(iptr->val.l)) {
2343 x86_64_mov_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2346 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2347 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l >> 32), OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2351 case ICMD_AASTORECONST: /* ..., arrayref, index ==> ... */
2353 var_to_reg_int(s1, src->prev, REG_ITMP1);
2354 var_to_reg_int(s2, src, REG_ITMP2);
2355 if (iptr->op1 == 0) {
2356 gen_nullptr_check(s1);
2359 x86_64_mov_imm_memindex(cd, 0, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2362 case ICMD_BASTORECONST: /* ..., arrayref, index ==> ... */
2364 var_to_reg_int(s1, src->prev, REG_ITMP1);
2365 var_to_reg_int(s2, src, REG_ITMP2);
2366 if (iptr->op1 == 0) {
2367 gen_nullptr_check(s1);
2370 x86_64_movb_imm_memindex(cd, iptr->val.i, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2373 case ICMD_CASTORECONST: /* ..., arrayref, index ==> ... */
2375 var_to_reg_int(s1, src->prev, REG_ITMP1);
2376 var_to_reg_int(s2, src, REG_ITMP2);
2377 if (iptr->op1 == 0) {
2378 gen_nullptr_check(s1);
2381 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_chararray, data[0]), s1, s2, 1);
2384 case ICMD_SASTORECONST: /* ..., arrayref, index ==> ... */
2386 var_to_reg_int(s1, src->prev, REG_ITMP1);
2387 var_to_reg_int(s2, src, REG_ITMP2);
2388 if (iptr->op1 == 0) {
2389 gen_nullptr_check(s1);
2392 x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2396 case ICMD_GETSTATIC: /* ... ==> ..., value */
2397 /* op1 = type, val.a = field address */
2400 codegen_addpatchref(cd, cd->mcodeptr,
2401 PATCHER_get_putstatic,
2402 (unresolved_field *) iptr->target);
2404 if (showdisassemble) {
2405 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2411 fieldinfo *fi = iptr->val.a;
2413 if (!fi->class->initialized) {
2414 codegen_addpatchref(cd, cd->mcodeptr,
2415 PATCHER_clinit, fi->class);
2417 if (showdisassemble) {
2418 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2422 a = (ptrint) &(fi->value);
2425 /* This approach is much faster than moving the field address */
2426 /* inline into a register. */
2427 a = dseg_addaddress(cd, a);
2428 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP2);
2429 switch (iptr->op1) {
2431 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2432 x86_64_movl_membase_reg(cd, REG_ITMP2, 0, d);
2433 store_reg_to_var_int(iptr->dst, d);
2437 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2438 x86_64_mov_membase_reg(cd, REG_ITMP2, 0, d);
2439 store_reg_to_var_int(iptr->dst, d);
2442 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2443 x86_64_movss_membase_reg(cd, REG_ITMP2, 0, d);
2444 store_reg_to_var_flt(iptr->dst, d);
2447 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2448 x86_64_movsd_membase_reg(cd, REG_ITMP2, 0, d);
2449 store_reg_to_var_flt(iptr->dst, d);
2454 case ICMD_PUTSTATIC: /* ..., value ==> ... */
2455 /* op1 = type, val.a = field address */
2458 codegen_addpatchref(cd, cd->mcodeptr,
2459 PATCHER_get_putstatic,
2460 (unresolved_field *) iptr->target);
2462 if (showdisassemble) {
2463 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2469 fieldinfo *fi = iptr->val.a;
2471 if (!fi->class->initialized) {
2472 codegen_addpatchref(cd, cd->mcodeptr,
2473 PATCHER_clinit, fi->class);
2475 if (showdisassemble) {
2476 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2480 a = (ptrint) &(fi->value);
2483 /* This approach is much faster than moving the field address */
2484 /* inline into a register. */
2485 a = dseg_addaddress(cd, a);
2486 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP2);
2487 switch (iptr->op1) {
2489 var_to_reg_int(s2, src, REG_ITMP1);
2490 x86_64_movl_reg_membase(cd, s2, REG_ITMP2, 0);
2494 var_to_reg_int(s2, src, REG_ITMP1);
2495 x86_64_mov_reg_membase(cd, s2, REG_ITMP2, 0);
2498 var_to_reg_flt(s2, src, REG_FTMP1);
2499 x86_64_movss_reg_membase(cd, s2, REG_ITMP2, 0);
2502 var_to_reg_flt(s2, src, REG_FTMP1);
2503 x86_64_movsd_reg_membase(cd, s2, REG_ITMP2, 0);
2508 case ICMD_PUTSTATICCONST: /* ... ==> ... */
2509 /* val = value (in current instruction) */
2510 /* op1 = type, val.a = field address (in */
2511 /* following NOP) */
2513 if (!iptr[1].val.a) {
2514 codegen_addpatchref(cd, cd->mcodeptr,
2515 PATCHER_get_putstatic,
2516 (unresolved_field *) iptr[1].target);
2518 if (showdisassemble) {
2519 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2525 fieldinfo *fi = iptr[1].val.a;
2527 if (!fi->class->initialized) {
2528 codegen_addpatchref(cd, cd->mcodeptr,
2529 PATCHER_clinit, fi->class);
2531 if (showdisassemble) {
2532 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2536 a = (ptrint) &(fi->value);
2539 /* This approach is much faster than moving the field address */
2540 /* inline into a register. */
2541 a = dseg_addaddress(cd, a);
2542 x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP1);
2543 switch (iptr->op1) {
2546 x86_64_movl_imm_membase(cd, iptr->val.i, REG_ITMP1, 0);
2551 if (IS_IMM32(iptr->val.l)) {
2552 x86_64_mov_imm_membase(cd, iptr->val.l, REG_ITMP1, 0);
2554 x86_64_movl_imm_membase(cd, iptr->val.l, REG_ITMP1, 0);
2555 x86_64_movl_imm_membase(cd, iptr->val.l >> 32, REG_ITMP1, 4);
2561 case ICMD_GETFIELD: /* ... ==> ..., value */
2562 /* op1 = type, val.i = field offset */
2564 var_to_reg_int(s1, src, REG_ITMP1);
2565 gen_nullptr_check(s1);
2568 codegen_addpatchref(cd, cd->mcodeptr,
2569 PATCHER_get_putfield,
2570 (unresolved_field *) iptr->target);
2572 if (showdisassemble) {
2573 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2579 a = ((fieldinfo *) (iptr->val.a))->offset;
2582 switch (iptr->op1) {
2584 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2585 x86_64_movl_membase32_reg(cd, s1, a, d);
2586 store_reg_to_var_int(iptr->dst, d);
2590 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2591 x86_64_mov_membase32_reg(cd, s1, a, d);
2592 store_reg_to_var_int(iptr->dst, d);
2595 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2596 x86_64_movss_membase32_reg(cd, s1, a, d);
2597 store_reg_to_var_flt(iptr->dst, d);
2600 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2601 x86_64_movsd_membase32_reg(cd, s1, a, d);
2602 store_reg_to_var_flt(iptr->dst, d);
2607 case ICMD_PUTFIELD: /* ..., objectref, value ==> ... */
2608 /* op1 = type, val.i = field offset */
2610 var_to_reg_int(s1, src->prev, REG_ITMP1);
2611 gen_nullptr_check(s1);
2612 if (IS_INT_LNG_TYPE(iptr->op1)) {
2613 var_to_reg_int(s2, src, REG_ITMP2);
2615 var_to_reg_flt(s2, src, REG_FTMP2);
2619 codegen_addpatchref(cd, cd->mcodeptr,
2620 PATCHER_get_putfield,
2621 (unresolved_field *) iptr->target);
2623 if (showdisassemble) {
2624 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2630 a = ((fieldinfo *) (iptr->val.a))->offset;
2633 switch (iptr->op1) {
2635 x86_64_movl_reg_membase32(cd, s2, s1, a);
2639 x86_64_mov_reg_membase32(cd, s2, s1, a);
2642 x86_64_movss_reg_membase32(cd, s2, s1, a);
2645 x86_64_movsd_reg_membase32(cd, s2, s1, a);
2650 case ICMD_PUTFIELDCONST: /* ..., objectref, value ==> ... */
2651 /* val = value (in current instruction) */
2652 /* op1 = type, val.a = field address (in */
2653 /* following NOP) */
2655 var_to_reg_int(s1, src, REG_ITMP1);
2656 gen_nullptr_check(s1);
2658 if (!iptr[1].val.a) {
2659 codegen_addpatchref(cd, cd->mcodeptr,
2660 PATCHER_putfieldconst,
2661 (unresolved_field *) iptr[1].target);
2663 if (showdisassemble) {
2664 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
2670 a = ((fieldinfo *) (iptr[1].val.a))->offset;
2673 switch (iptr->op1) {
2676 x86_64_movl_imm_membase32(cd, iptr->val.i, s1, a);
2681 /* We can only optimize the move, if the class is resolved. */
2682 /* Otherwise we don't know what to patch. */
2683 if (iptr[1].val.a && IS_IMM32(iptr->val.l)) {
2684 x86_64_mov_imm_membase32(cd, iptr->val.l, s1, a);
2686 x86_64_movl_imm_membase32(cd, iptr->val.l, s1, a);
2687 x86_64_movl_imm_membase32(cd, iptr->val.l >> 32, s1, a + 4);
2694 /* branch operations **************************************************/
2696 case ICMD_ATHROW: /* ..., objectref ==> ... (, objectref) */
2698 var_to_reg_int(s1, src, REG_ITMP1);
2699 M_INTMOVE(s1, REG_ITMP1_XPTR);
2701 x86_64_call_imm(cd, 0); /* passing exception pointer */
2702 x86_64_pop_reg(cd, REG_ITMP2_XPC);
2704 x86_64_mov_imm_reg(cd, (ptrint) asm_handle_exception, REG_ITMP3);
2705 x86_64_jmp_reg(cd, REG_ITMP3);
2708 case ICMD_GOTO: /* ... ==> ... */
2709 /* op1 = target JavaVM pc */
2711 x86_64_jmp_imm(cd, 0);
2712 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2715 case ICMD_JSR: /* ... ==> ... */
2716 /* op1 = target JavaVM pc */
2718 x86_64_call_imm(cd, 0);
2719 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2722 case ICMD_RET: /* ... ==> ... */
2723 /* op1 = local variable */
2725 var = &(rd->locals[iptr->op1][TYPE_ADR]);
2726 var_to_reg_int(s1, var, REG_ITMP1);
2727 x86_64_jmp_reg(cd, s1);
2730 case ICMD_IFNULL: /* ..., value ==> ... */
2731 /* op1 = target JavaVM pc */
2733 if (src->flags & INMEMORY) {
2734 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2737 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2739 x86_64_jcc(cd, X86_64_CC_E, 0);
2740 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2743 case ICMD_IFNONNULL: /* ..., value ==> ... */
2744 /* op1 = target JavaVM pc */
2746 if (src->flags & INMEMORY) {
2747 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2750 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2752 x86_64_jcc(cd, X86_64_CC_NE, 0);
2753 codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2756 case ICMD_IFEQ: /* ..., value ==> ... */
2757 /* op1 = target JavaVM pc, val.i = constant */
2759 x86_64_emit_ifcc(cd, X86_64_CC_E, src, iptr);
2762 case ICMD_IFLT: /* ..., value ==> ... */
2763 /* op1 = target JavaVM pc, val.i = constant */
2765 x86_64_emit_ifcc(cd, X86_64_CC_L, src, iptr);
2768 case ICMD_IFLE: /* ..., value ==> ... */
2769 /* op1 = target JavaVM pc, val.i = constant */
2771 x86_64_emit_ifcc(cd, X86_64_CC_LE, src, iptr);
2774 case ICMD_IFNE: /* ..., value ==> ... */
2775 /* op1 = target JavaVM pc, val.i = constant */
2777 x86_64_emit_ifcc(cd, X86_64_CC_NE, src, iptr);
2780 case ICMD_IFGT: /* ..., value ==> ... */
2781 /* op1 = target JavaVM pc, val.i = constant */
2783 x86_64_emit_ifcc(cd, X86_64_CC_G, src, iptr);
2786 case ICMD_IFGE: /* ..., value ==> ... */
2787 /* op1 = target JavaVM pc, val.i = constant */
2789 x86_64_emit_ifcc(cd, X86_64_CC_GE, src, iptr);
2792 case ICMD_IF_LEQ: /* ..., value ==> ... */
2793 /* op1 = target JavaVM pc, val.l = constant */
2795 x86_64_emit_if_lcc(cd, X86_64_CC_E, src, iptr);
2798 case ICMD_IF_LLT: /* ..., value ==> ... */
2799 /* op1 = target JavaVM pc, val.l = constant */
2801 x86_64_emit_if_lcc(cd, X86_64_CC_L, src, iptr);
2804 case ICMD_IF_LLE: /* ..., value ==> ... */
2805 /* op1 = target JavaVM pc, val.l = constant */
2807 x86_64_emit_if_lcc(cd, X86_64_CC_LE, src, iptr);
2810 case ICMD_IF_LNE: /* ..., value ==> ... */
2811 /* op1 = target JavaVM pc, val.l = constant */
2813 x86_64_emit_if_lcc(cd, X86_64_CC_NE, src, iptr);
2816 case ICMD_IF_LGT: /* ..., value ==> ... */
2817 /* op1 = target JavaVM pc, val.l = constant */
2819 x86_64_emit_if_lcc(cd, X86_64_CC_G, src, iptr);
2822 case ICMD_IF_LGE: /* ..., value ==> ... */
2823 /* op1 = target JavaVM pc, val.l = constant */
2825 x86_64_emit_if_lcc(cd, X86_64_CC_GE, src, iptr);
2828 case ICMD_IF_ICMPEQ: /* ..., value, value ==> ... */
2829 /* op1 = target JavaVM pc */
2831 x86_64_emit_if_icmpcc(cd, X86_64_CC_E, src, iptr);
2834 case ICMD_IF_LCMPEQ: /* ..., value, value ==> ... */
2835 case ICMD_IF_ACMPEQ: /* op1 = target JavaVM pc */
2837 x86_64_emit_if_lcmpcc(cd, X86_64_CC_E, src, iptr);
2840 case ICMD_IF_ICMPNE: /* ..., value, value ==> ... */
2841 /* op1 = target JavaVM pc */
2843 x86_64_emit_if_icmpcc(cd, X86_64_CC_NE, src, iptr);
2846 case ICMD_IF_LCMPNE: /* ..., value, value ==> ... */
2847 case ICMD_IF_ACMPNE: /* op1 = target JavaVM pc */
2849 x86_64_emit_if_lcmpcc(cd, X86_64_CC_NE, src, iptr);
2852 case ICMD_IF_ICMPLT: /* ..., value, value ==> ... */
2853 /* op1 = target JavaVM pc */
2855 x86_64_emit_if_icmpcc(cd, X86_64_CC_L, src, iptr);
2858 case ICMD_IF_LCMPLT: /* ..., value, value ==> ... */
2859 /* op1 = target JavaVM pc */
2861 x86_64_emit_if_lcmpcc(cd, X86_64_CC_L, src, iptr);
2864 case ICMD_IF_ICMPGT: /* ..., value, value ==> ... */
2865 /* op1 = target JavaVM pc */
2867 x86_64_emit_if_icmpcc(cd, X86_64_CC_G, src, iptr);
2870 case ICMD_IF_LCMPGT: /* ..., value, value ==> ... */
2871 /* op1 = target JavaVM pc */
2873 x86_64_emit_if_lcmpcc(cd, X86_64_CC_G, src, iptr);
2876 case ICMD_IF_ICMPLE: /* ..., value, value ==> ... */
2877 /* op1 = target JavaVM pc */
2879 x86_64_emit_if_icmpcc(cd, X86_64_CC_LE, src, iptr);
2882 case ICMD_IF_LCMPLE: /* ..., value, value ==> ... */
2883 /* op1 = target JavaVM pc */
2885 x86_64_emit_if_lcmpcc(cd, X86_64_CC_LE, src, iptr);
2888 case ICMD_IF_ICMPGE: /* ..., value, value ==> ... */
2889 /* op1 = target JavaVM pc */
2891 x86_64_emit_if_icmpcc(cd, X86_64_CC_GE, src, iptr);
2894 case ICMD_IF_LCMPGE: /* ..., value, value ==> ... */
2895 /* op1 = target JavaVM pc */
2897 x86_64_emit_if_lcmpcc(cd, X86_64_CC_GE, src, iptr);
2900 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST */
2902 case ICMD_ELSE_ICONST: /* handled by IFxx_ICONST */
2905 case ICMD_IFEQ_ICONST: /* ..., value ==> ..., constant */
2906 /* val.i = constant */
2908 var_to_reg_int(s1, src, REG_ITMP1);
2909 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2910 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2912 M_INTMOVE(s1, REG_ITMP1);
2915 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2917 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2918 x86_64_testl_reg_reg(cd, s1, s1);
2919 x86_64_cmovccl_reg_reg(cd, X86_64_CC_E, REG_ITMP2, d);
2920 store_reg_to_var_int(iptr->dst, d);
2923 case ICMD_IFNE_ICONST: /* ..., value ==> ..., constant */
2924 /* val.i = constant */
2926 var_to_reg_int(s1, src, REG_ITMP1);
2927 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2928 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2930 M_INTMOVE(s1, REG_ITMP1);
2933 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2935 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2936 x86_64_testl_reg_reg(cd, s1, s1);
2937 x86_64_cmovccl_reg_reg(cd, X86_64_CC_NE, REG_ITMP2, d);
2938 store_reg_to_var_int(iptr->dst, d);
2941 case ICMD_IFLT_ICONST: /* ..., value ==> ..., constant */
2942 /* val.i = constant */
2944 var_to_reg_int(s1, src, REG_ITMP1);
2945 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2946 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2948 M_INTMOVE(s1, REG_ITMP1);
2951 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2953 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2954 x86_64_testl_reg_reg(cd, s1, s1);
2955 x86_64_cmovccl_reg_reg(cd, X86_64_CC_L, REG_ITMP2, d);
2956 store_reg_to_var_int(iptr->dst, d);
2959 case ICMD_IFGE_ICONST: /* ..., value ==> ..., constant */
2960 /* val.i = constant */
2962 var_to_reg_int(s1, src, REG_ITMP1);
2963 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2964 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2966 M_INTMOVE(s1, REG_ITMP1);
2969 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2971 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2972 x86_64_testl_reg_reg(cd, s1, s1);
2973 x86_64_cmovccl_reg_reg(cd, X86_64_CC_GE, REG_ITMP2, d);
2974 store_reg_to_var_int(iptr->dst, d);
2977 case ICMD_IFGT_ICONST: /* ..., value ==> ..., constant */
2978 /* val.i = constant */
2980 var_to_reg_int(s1, src, REG_ITMP1);
2981 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2982 if (iptr[1].opc == ICMD_ELSE_ICONST) {
2984 M_INTMOVE(s1, REG_ITMP1);
2987 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2989 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2990 x86_64_testl_reg_reg(cd, s1, s1);
2991 x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP2, d);
2992 store_reg_to_var_int(iptr->dst, d);
2995 case ICMD_IFLE_ICONST: /* ..., value ==> ..., constant */
2996 /* val.i = constant */
2998 var_to_reg_int(s1, src, REG_ITMP1);
2999 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3000 if (iptr[1].opc == ICMD_ELSE_ICONST) {
3002 M_INTMOVE(s1, REG_ITMP1);
3005 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
3007 x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
3008 x86_64_testl_reg_reg(cd, s1, s1);
3009 x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, d);
3010 store_reg_to_var_int(iptr->dst, d);
3014 case ICMD_IRETURN: /* ..., retvalue ==> ... */
3018 var_to_reg_int(s1, src, REG_RESULT);
3019 M_INTMOVE(s1, REG_RESULT);
3021 goto nowperformreturn;
3023 case ICMD_FRETURN: /* ..., retvalue ==> ... */
3026 var_to_reg_flt(s1, src, REG_FRESULT);
3027 M_FLTMOVE(s1, REG_FRESULT);
3029 goto nowperformreturn;
3031 case ICMD_RETURN: /* ... ==> ... */
3037 p = parentargs_base;
3039 /* call trace function */
3041 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3043 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
3044 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
3046 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
3047 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
3048 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
3049 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
3051 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
3052 x86_64_call_reg(cd, REG_ITMP1);
3054 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
3055 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
3057 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3060 #if defined(USE_THREADS)
3061 if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
3062 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
3064 /* we need to save the proper return value */
3065 switch (iptr->opc) {
3069 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, rd->maxmemuse * 8);
3073 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, rd->maxmemuse * 8);
3077 x86_64_mov_imm_reg(cd, (ptrint) builtin_monitorexit, REG_ITMP1);
3078 x86_64_call_reg(cd, REG_ITMP1);
3080 /* and now restore the proper return value */
3081 switch (iptr->opc) {
3085 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_RESULT);
3089 x86_64_movq_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_FRESULT);
3095 /* restore saved registers */
3096 for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
3097 p--; x86_64_mov_membase_reg(cd, REG_SP, p * 8, rd->savintregs[i]);
3099 for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
3100 p--; x86_64_movq_membase_reg(cd, REG_SP, p * 8, rd->savfltregs[i]);
3103 /* deallocate stack */
3104 if (parentargs_base) {
3105 x86_64_alu_imm_reg(cd, X86_64_ADD, parentargs_base * 8, REG_SP);
3113 case ICMD_TABLESWITCH: /* ..., index ==> ... */
3118 tptr = (void **) iptr->target;
3120 s4ptr = iptr->val.a;
3121 l = s4ptr[1]; /* low */
3122 i = s4ptr[2]; /* high */
3124 var_to_reg_int(s1, src, REG_ITMP1);
3125 M_INTMOVE(s1, REG_ITMP1);
3127 x86_64_alul_imm_reg(cd, X86_64_SUB, l, REG_ITMP1);
3132 x86_64_alul_imm_reg(cd, X86_64_CMP, i - 1, REG_ITMP1);
3133 x86_64_jcc(cd, X86_64_CC_A, 0);
3135 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[0]), cd->mcodeptr); */
3136 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3138 /* build jump table top down and use address of lowest entry */
3140 /* s4ptr += 3 + i; */
3144 /* dseg_addtarget(cd, BlockPtrOfPC(*--s4ptr)); */
3145 dseg_addtarget(cd, (basicblock *) tptr[0]);
3149 /* length of dataseg after last dseg_addtarget is used by load */
3151 x86_64_mov_imm_reg(cd, 0, REG_ITMP2);
3152 dseg_adddata(cd, cd->mcodeptr);
3153 x86_64_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
3154 x86_64_jmp_reg(cd, REG_ITMP1);
3159 case ICMD_LOOKUPSWITCH: /* ..., key ==> ... */
3161 s4 i, l, val, *s4ptr;
3164 tptr = (void **) iptr->target;
3166 s4ptr = iptr->val.a;
3167 l = s4ptr[0]; /* default */
3168 i = s4ptr[1]; /* count */
3170 MCODECHECK((i<<2)+8);
3171 var_to_reg_int(s1, src, REG_ITMP1); /* reg compare should always be faster */
3177 x86_64_alul_imm_reg(cd, X86_64_CMP, val, s1);
3178 x86_64_jcc(cd, X86_64_CC_E, 0);
3179 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[1]), cd->mcodeptr); */
3180 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3183 x86_64_jmp_imm(cd, 0);
3184 /* codegen_addreference(cd, BlockPtrOfPC(l), cd->mcodeptr); */
3186 tptr = (void **) iptr->target;
3187 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3192 case ICMD_BUILTIN3: /* ..., arg1, arg2, arg3 ==> ... */
3193 /* op1 = return type, val.a = function pointer*/
3197 case ICMD_BUILTIN2: /* ..., arg1, arg2 ==> ... */
3198 /* op1 = return type, val.a = function pointer*/
3202 case ICMD_BUILTIN1: /* ..., arg1 ==> ... */
3203 /* op1 = return type, val.a = function pointer*/
3207 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ... */
3208 /* op1 = arg count, val.a = method pointer */
3210 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3211 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer */
3212 case ICMD_INVOKEINTERFACE:
3222 MCODECHECK((s3 << 1) + 64);
3224 /* copy arguments to registers or stack location ******************/
3226 /* count integer and float arguments */
3231 for (s2 = s3, tmpsrc = src; --s2 >= 0; tmpsrc = tmpsrc->prev) {
3232 IS_INT_LNG_TYPE(tmpsrc->type) ? iarg++ : farg++;
3235 /* calculate amount of arguments to be on stack */
3237 s2 = (iarg > INT_ARG_CNT) ? iarg - INT_ARG_CNT : 0 +
3238 (farg > FLT_ARG_CNT) ? farg - FLT_ARG_CNT : 0;
3240 for (; --s3 >= 0; src = src->prev) {
3241 /* decrement the current argument type */
3242 IS_INT_LNG_TYPE(src->type) ? iarg-- : farg--;
3244 if (src->varkind == ARGVAR) {
3245 if (IS_INT_LNG_TYPE(src->type)) {
3246 if (iarg >= INT_ARG_CNT) {
3250 if (farg >= FLT_ARG_CNT) {
3257 if (IS_INT_LNG_TYPE(src->type)) {
3258 if (iarg < INT_ARG_CNT) {
3259 s1 = rd->argintregs[iarg];
3260 var_to_reg_int(d, src, s1);
3264 var_to_reg_int(d, src, REG_ITMP1);
3266 x86_64_mov_reg_membase(cd, d, REG_SP, s2 * 8);
3270 if (farg < FLT_ARG_CNT) {
3271 s1 = rd->argfltregs[farg];
3272 var_to_reg_flt(d, src, s1);
3276 var_to_reg_flt(d, src, REG_FTMP1);
3278 x86_64_movq_reg_membase(cd, d, REG_SP, s2 * 8);
3284 switch (iptr->opc) {
3291 codegen_addpatchref(cd, cd->mcodeptr,
3292 iptr->val.fp, iptr->target);
3294 if (showdisassemble) {
3295 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3304 x86_64_mov_imm_reg(cd, a, REG_ITMP1);
3305 x86_64_call_reg(cd, REG_ITMP1);
3308 case ICMD_INVOKESPECIAL:
3309 x86_64_test_reg_reg(cd, rd->argintregs[0], rd->argintregs[0]);
3310 x86_64_jcc(cd, X86_64_CC_Z, 0);
3311 codegen_addxnullrefs(cd, cd->mcodeptr);
3313 /* first argument contains pointer */
3314 /* gen_nullptr_check(rd->argintregs[0]); */
3316 /* access memory for hardware nullptr */
3317 /* x86_64_mov_membase_reg(cd, rd->argintregs[0], 0, REG_ITMP2); */
3321 case ICMD_INVOKESTATIC:
3323 unresolved_method *um = iptr->target;
3325 codegen_addpatchref(cd, cd->mcodeptr,
3326 PATCHER_invokestatic_special, um);
3328 if (showdisassemble) {
3329 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3333 d = um->methodref->parseddesc.md->returntype.type;
3336 a = (ptrint) lm->stubroutine;
3337 d = lm->parseddesc->returntype.type;
3340 x86_64_mov_imm_reg(cd, a, REG_ITMP2);
3341 x86_64_call_reg(cd, REG_ITMP2);
3344 case ICMD_INVOKEVIRTUAL:
3345 gen_nullptr_check(rd->argintregs[0]);
3348 unresolved_method *um = iptr->target;
3350 codegen_addpatchref(cd, cd->mcodeptr,
3351 PATCHER_invokevirtual, um);
3353 if (showdisassemble) {
3354 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3358 d = um->methodref->parseddesc.md->returntype.type;
3361 s1 = OFFSET(vftbl_t, table[0]) +
3362 sizeof(methodptr) * lm->vftblindex;
3363 d = lm->parseddesc->returntype.type;
3366 x86_64_mov_membase_reg(cd, rd->argintregs[0],
3367 OFFSET(java_objectheader, vftbl),
3369 x86_64_mov_membase32_reg(cd, REG_ITMP2, s1, REG_ITMP1);
3370 x86_64_call_reg(cd, REG_ITMP1);
3373 case ICMD_INVOKEINTERFACE:
3374 gen_nullptr_check(rd->argintregs[0]);
3377 unresolved_method *um = iptr->target;
3379 codegen_addpatchref(cd, cd->mcodeptr,
3380 PATCHER_invokeinterface, um);
3382 if (showdisassemble) {
3383 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3388 d = um->methodref->parseddesc.md->returntype.type;
3391 s1 = OFFSET(vftbl_t, interfacetable[0]) -
3392 sizeof(methodptr) * lm->class->index;
3394 s2 = sizeof(methodptr) * (lm - lm->class->methods);
3396 d = lm->parseddesc->returntype.type;
3399 x86_64_mov_membase_reg(cd, rd->argintregs[0],
3400 OFFSET(java_objectheader, vftbl),
3402 x86_64_mov_membase32_reg(cd, REG_ITMP2, s1, REG_ITMP2);
3403 x86_64_mov_membase32_reg(cd, REG_ITMP2, s2, REG_ITMP1);
3404 x86_64_call_reg(cd, REG_ITMP1);
3408 /* d contains return type */
3410 if (d != TYPE_VOID) {
3411 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3412 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3413 M_INTMOVE(REG_RESULT, s1);
3414 store_reg_to_var_int(iptr->dst, s1);
3417 s1 = reg_of_var(rd, iptr->dst, REG_FRESULT);
3418 M_FLTMOVE(REG_FRESULT, s1);
3419 store_reg_to_var_flt(iptr->dst, s1);
3426 case ICMD_CHECKCAST: /* ..., objectref ==> ..., objectref */
3428 /* op1: 0 == array, 1 == class */
3429 /* val.a: (classinfo *) superclass */
3431 /* superclass is an interface:
3433 * OK if ((sub == NULL) ||
3434 * (sub->vftbl->interfacetablelength > super->index) &&
3435 * (sub->vftbl->interfacetable[-super->index] != NULL));
3437 * superclass is a class:
3439 * OK if ((sub == NULL) || (0
3440 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3441 * super->vftbl->diffval));
3446 vftbl_t *supervftbl;
3449 super = (classinfo *) iptr->val.a;
3456 superindex = super->index;
3457 supervftbl = super->vftbl;
3460 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3461 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3463 var_to_reg_int(s1, src, REG_ITMP1);
3465 /* calculate interface checkcast code size */
3467 s2 = 3; /* mov_membase_reg */
3468 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3470 s2 += 3 + 4 /* movl_membase32_reg */ + 3 + 4 /* sub imm32 */ +
3471 3 /* test */ + 6 /* jcc */ + 3 + 4 /* mov_membase32_reg */ +
3472 3 /* test */ + 6 /* jcc */;
3475 s2 += (showdisassemble ? 5 : 0);
3477 /* calculate class checkcast code size */
3479 s3 = 3; /* mov_membase_reg */
3480 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3481 s3 += 10 /* mov_imm_reg */ + 3 + 4 /* movl_membase32_reg */;
3484 if (s1 != REG_ITMP1) {
3485 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3486 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, baseval));
3487 a += 3; /* movl_membase_reg - only if REG_ITMP3 == R11 */
3488 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, diffval));
3494 s3 += 3 + 4 /* movl_membase32_reg */ + 3 /* sub */ +
3495 10 /* mov_imm_reg */ + 3 /* movl_membase_reg */;
3496 CALCOFFSETBYTES(s3, REG_ITMP3, OFFSET(vftbl_t, diffval));
3499 s3 += 3 /* cmp */ + 6 /* jcc */;
3502 s3 += (showdisassemble ? 5 : 0);
3504 /* if class is not resolved, check which code to call */
3507 x86_64_test_reg_reg(cd, s1, s1);
3508 x86_64_jcc(cd, X86_64_CC_Z, 6 + (showdisassemble ? 5 : 0) + 7 + 6 + s2 + 5 + s3);
3510 codegen_addpatchref(cd, cd->mcodeptr,
3511 PATCHER_checkcast_instanceof_flags,
3512 (constant_classref *) iptr->target);
3514 if (showdisassemble) {
3515 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3518 x86_64_movl_imm_reg(cd, 0, REG_ITMP2); /* super->flags */
3519 x86_64_alul_imm_reg(cd, X86_64_AND, ACC_INTERFACE, REG_ITMP2);
3520 x86_64_jcc(cd, X86_64_CC_Z, s2 + 5);
3523 /* interface checkcast code */
3525 if (!super || (super->flags & ACC_INTERFACE)) {
3527 x86_64_test_reg_reg(cd, s1, s1);
3528 x86_64_jcc(cd, X86_64_CC_Z, s2);
3531 x86_64_mov_membase_reg(cd, s1,
3532 OFFSET(java_objectheader, vftbl),
3536 codegen_addpatchref(cd, cd->mcodeptr,
3537 PATCHER_checkcast_instanceof_interface,
3538 (constant_classref *) iptr->target);
3540 if (showdisassemble) {
3541 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3545 x86_64_movl_membase32_reg(cd, REG_ITMP2,
3546 OFFSET(vftbl_t, interfacetablelength),
3548 x86_64_alu_imm32_reg(cd, X86_64_SUB, superindex, REG_ITMP3);
3549 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3550 x86_64_jcc(cd, X86_64_CC_LE, 0);
3551 codegen_addxcastrefs(cd, cd->mcodeptr);
3552 x86_64_mov_membase32_reg(cd, REG_ITMP2,
3553 OFFSET(vftbl_t, interfacetable[0]) -
3554 superindex * sizeof(methodptr*),
3556 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3557 x86_64_jcc(cd, X86_64_CC_E, 0);
3558 codegen_addxcastrefs(cd, cd->mcodeptr);
3561 x86_64_jmp_imm(cd, s3);
3564 /* class checkcast code */
3566 if (!super || !(super->flags & ACC_INTERFACE)) {
3568 x86_64_test_reg_reg(cd, s1, s1);
3569 x86_64_jcc(cd, X86_64_CC_Z, s3);
3572 x86_64_mov_membase_reg(cd, s1,
3573 OFFSET(java_objectheader, vftbl),
3577 codegen_addpatchref(cd, cd->mcodeptr,
3578 PATCHER_checkcast_class,
3579 (constant_classref *) iptr->target);
3581 if (showdisassemble) {
3582 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3586 x86_64_mov_imm_reg(cd, (ptrint) supervftbl, REG_ITMP3);
3587 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3588 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3590 x86_64_movl_membase32_reg(cd, REG_ITMP2,
3591 OFFSET(vftbl_t, baseval),
3593 /* if (s1 != REG_ITMP1) { */
3594 /* x86_64_movl_membase_reg(cd, REG_ITMP3, */
3595 /* OFFSET(vftbl_t, baseval), */
3597 /* x86_64_movl_membase_reg(cd, REG_ITMP3, */
3598 /* OFFSET(vftbl_t, diffval), */
3600 /* #if defined(USE_THREADS) && defined(NATIVE_THREADS) */
3601 /* codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase); */
3603 /* x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP1, REG_ITMP2); */
3606 x86_64_movl_membase32_reg(cd, REG_ITMP3,
3607 OFFSET(vftbl_t, baseval),
3609 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP2);
3610 x86_64_mov_imm_reg(cd, (ptrint) supervftbl, REG_ITMP3);
3611 x86_64_movl_membase_reg(cd, REG_ITMP3,
3612 OFFSET(vftbl_t, diffval),
3615 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3616 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3618 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP3, REG_ITMP2);
3619 x86_64_jcc(cd, X86_64_CC_A, 0); /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3620 codegen_addxcastrefs(cd, cd->mcodeptr);
3622 d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3624 store_reg_to_var_int(iptr->dst, d);
3625 /* if (iptr->dst->flags & INMEMORY) { */
3626 /* x86_64_mov_reg_membase(cd, s1, REG_SP, iptr->dst->regoff * 8); */
3628 /* M_INTMOVE(s1, iptr->dst->regoff); */
3633 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult */
3635 /* op1: 0 == array, 1 == class */
3636 /* val.a: (classinfo *) superclass */
3638 /* superclass is an interface:
3640 * return (sub != NULL) &&
3641 * (sub->vftbl->interfacetablelength > super->index) &&
3642 * (sub->vftbl->interfacetable[-super->index] != NULL);
3644 * superclass is a class:
3646 * return ((sub != NULL) && (0
3647 * <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3648 * super->vftbl->diffvall));
3653 vftbl_t *supervftbl;
3656 super = (classinfo *) iptr->val.a;
3663 superindex = super->index;
3664 supervftbl = super->vftbl;
3667 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3668 codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3671 var_to_reg_int(s1, src, REG_ITMP1);
3672 d = reg_of_var(rd, iptr->dst, REG_ITMP2);
3674 M_INTMOVE(s1, REG_ITMP1);
3678 /* calculate interface instanceof code size */
3680 s2 = 3; /* mov_membase_reg */
3681 CALCOFFSETBYTES(s2, s1, OFFSET(java_objectheader, vftbl));
3682 s2 += 3 + 4 /* movl_membase32_reg */ + 3 + 4 /* sub_imm32 */ +
3683 3 /* test */ + 6 /* jcc */ + 3 + 4 /* mov_membase32_reg */ +
3684 3 /* test */ + 4 /* setcc */;
3687 s2 += (showdisassemble ? 5 : 0);
3689 /* calculate class instanceof code size */
3691 s3 = 3; /* mov_membase_reg */
3692 CALCOFFSETBYTES(s3, s1, OFFSET(java_objectheader, vftbl));
3693 s3 += 10; /* mov_imm_reg */
3694 s3 += 2; /* movl_membase_reg - only if REG_ITMP1 == RAX */
3695 CALCOFFSETBYTES(s3, REG_ITMP1, OFFSET(vftbl_t, baseval));
3696 s3 += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3697 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, baseval));
3698 s3 += 3; /* movl_membase_reg - only if REG_ITMP2 == R10 */
3699 CALCOFFSETBYTES(s3, REG_ITMP2, OFFSET(vftbl_t, diffval));
3700 s3 += 3 /* sub */ + 3 /* xor */ + 3 /* cmp */ + 4 /* setcc */;
3703 s3 += (showdisassemble ? 5 : 0);
3705 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3707 /* if class is not resolved, check which code to call */
3710 x86_64_test_reg_reg(cd, s1, s1);
3711 x86_64_jcc(cd, X86_64_CC_Z, (6 + (showdisassemble ? 5 : 0) +
3712 7 + 6 + s2 + 5 + s3));
3714 codegen_addpatchref(cd, cd->mcodeptr,
3715 PATCHER_checkcast_instanceof_flags,
3716 (constant_classref *) iptr->target);
3718 if (showdisassemble) {
3719 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3722 x86_64_movl_imm_reg(cd, 0, REG_ITMP3); /* super->flags */
3723 x86_64_alul_imm_reg(cd, X86_64_AND, ACC_INTERFACE, REG_ITMP3);
3724 x86_64_jcc(cd, X86_64_CC_Z, s2 + 5);
3727 /* interface instanceof code */
3729 if (!super || (super->flags & ACC_INTERFACE)) {
3731 x86_64_test_reg_reg(cd, s1, s1);
3732 x86_64_jcc(cd, X86_64_CC_Z, s2);
3735 x86_64_mov_membase_reg(cd, s1,
3736 OFFSET(java_objectheader, vftbl),
3739 codegen_addpatchref(cd, cd->mcodeptr,
3740 PATCHER_checkcast_instanceof_interface,
3741 (constant_classref *) iptr->target);
3743 if (showdisassemble) {
3744 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3748 x86_64_movl_membase32_reg(cd, REG_ITMP1,
3749 OFFSET(vftbl_t, interfacetablelength),
3751 x86_64_alu_imm32_reg(cd, X86_64_SUB, superindex, REG_ITMP3);
3752 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3754 a = 3 + 4 /* mov_membase32_reg */ + 3 /* test */ + 4 /* setcc */;
3756 x86_64_jcc(cd, X86_64_CC_LE, a);
3757 x86_64_mov_membase32_reg(cd, REG_ITMP1,
3758 OFFSET(vftbl_t, interfacetable[0]) -
3759 superindex * sizeof(methodptr*),
3761 x86_64_test_reg_reg(cd, REG_ITMP1, REG_ITMP1);
3762 x86_64_setcc_reg(cd, X86_64_CC_NE, d);
3765 x86_64_jmp_imm(cd, s3);
3768 /* class instanceof code */
3770 if (!super || !(super->flags & ACC_INTERFACE)) {
3772 x86_64_test_reg_reg(cd, s1, s1);
3773 x86_64_jcc(cd, X86_64_CC_E, s3);
3776 x86_64_mov_membase_reg(cd, s1,
3777 OFFSET(java_objectheader, vftbl),
3781 codegen_addpatchref(cd, cd->mcodeptr,
3782 PATCHER_instanceof_class,
3783 (constant_classref *) iptr->target);
3785 if (showdisassemble) {
3786 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3790 x86_64_mov_imm_reg(cd, (ptrint) supervftbl, REG_ITMP2);
3791 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3792 codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3794 x86_64_movl_membase_reg(cd, REG_ITMP1,
3795 OFFSET(vftbl_t, baseval),
3797 x86_64_movl_membase_reg(cd, REG_ITMP2,
3798 OFFSET(vftbl_t, diffval),
3800 x86_64_movl_membase_reg(cd, REG_ITMP2,
3801 OFFSET(vftbl_t, baseval),
3803 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3804 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3806 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
3807 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d); /* may be REG_ITMP2 */
3808 x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP3, REG_ITMP1);
3809 x86_64_setcc_reg(cd, X86_64_CC_BE, d);
3811 store_reg_to_var_int(iptr->dst, d);
3815 case ICMD_CHECKASIZE: /* ..., size ==> ..., size */
3817 if (src->flags & INMEMORY) {
3818 x86_64_alul_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
3821 x86_64_testl_reg_reg(cd, src->regoff, src->regoff);
3823 x86_64_jcc(cd, X86_64_CC_L, 0);
3824 codegen_addxcheckarefs(cd, cd->mcodeptr);
3827 case ICMD_CHECKEXCEPTION: /* ... ==> ... */
3829 x86_64_test_reg_reg(cd, REG_RESULT, REG_RESULT);
3830 x86_64_jcc(cd, X86_64_CC_E, 0);
3831 codegen_addxexceptionrefs(cd, cd->mcodeptr);
3834 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref */
3835 /* op1 = dimension, val.a = array descriptor */
3837 /* check for negative sizes and copy sizes to stack if necessary */
3839 MCODECHECK((iptr->op1 << 1) + 64);
3841 for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3842 var_to_reg_int(s2, src, REG_ITMP1);
3843 x86_64_testl_reg_reg(cd, s2, s2);
3844 x86_64_jcc(cd, X86_64_CC_L, 0);
3845 codegen_addxcheckarefs(cd, cd->mcodeptr);
3847 /* copy SAVEDVAR sizes to stack */
3849 if (src->varkind != ARGVAR) {
3850 x86_64_mov_reg_membase(cd, s2, REG_SP, s1 * 8);
3854 /* is a patcher function set? */
3857 codegen_addpatchref(cd, cd->mcodeptr,
3858 (functionptr) (ptrint) iptr->target,
3861 if (showdisassemble) {
3862 M_NOP; M_NOP; M_NOP; M_NOP; M_NOP;
3868 a = (ptrint) iptr->val.a;
3871 /* a0 = dimension count */
3873 x86_64_mov_imm_reg(cd, iptr->op1, rd->argintregs[0]);
3875 /* a1 = arrayvftbl */
3877 x86_64_mov_imm_reg(cd, (ptrint) iptr->val.a, rd->argintregs[1]);
3879 /* a2 = pointer to dimensions = stack pointer */
3881 x86_64_mov_reg_reg(cd, REG_SP, rd->argintregs[2]);
3883 x86_64_mov_imm_reg(cd, (ptrint) BUILTIN_multianewarray, REG_ITMP1);
3884 x86_64_call_reg(cd, REG_ITMP1);
3886 s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3887 M_INTMOVE(REG_RESULT, s1);
3888 store_reg_to_var_int(iptr->dst, s1);
3892 throw_cacao_exception_exit(string_java_lang_InternalError,
3893 "Unknown ICMD %d", iptr->opc);
3896 } /* for instruction */
3898 /* copy values to interface registers */
3900 src = bptr->outstack;
3901 len = bptr->outdepth;
3902 MCODECHECK(64 + len);
3908 if ((src->varkind != STACKVAR)) {
3910 if (IS_FLT_DBL_TYPE(s2)) {
3911 var_to_reg_flt(s1, src, REG_FTMP1);
3912 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3913 M_FLTMOVE(s1, rd->interfaces[len][s2].regoff);
3916 x86_64_movq_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3920 var_to_reg_int(s1, src, REG_ITMP1);
3921 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3922 M_INTMOVE(s1, rd->interfaces[len][s2].regoff);
3925 x86_64_mov_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3931 } /* if (bptr -> flags >= BBREACHED) */
3932 } /* for basic block */
3934 codegen_createlinenumbertable(cd);
3938 /* generate bound check stubs */
3940 u1 *xcodeptr = NULL;
3943 for (bref = cd->xboundrefs; bref != NULL; bref = bref->next) {
3944 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3946 cd->mcodeptr - cd->mcodebase);
3950 /* move index register into REG_ITMP1 */
3951 x86_64_mov_reg_reg(cd, bref->reg, REG_ITMP1); /* 3 bytes */
3953 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
3954 dseg_adddata(cd, cd->mcodeptr);
3955 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
3956 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
3958 if (xcodeptr != NULL) {
3959 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3962 xcodeptr = cd->mcodeptr;
3965 /*create stackinfo -- begin*/
3966 x86_64_alu_imm_reg(cd, X86_64_SUB, 4 * 8, REG_SP);
3967 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3 * 8);
3968 x86_64_mov_imm_membase(cd,0,REG_SP,2*8);
3969 x86_64_mov_imm_membase(cd,0,REG_SP,1*8);
3970 x86_64_mov_imm_reg(cd,(ptrint)asm_prepare_native_stackinfo,REG_ITMP3);
3971 x86_64_call_reg(cd,REG_ITMP3);
3972 /*create stackinfo -- end*/
3974 x86_64_mov_reg_reg(cd, REG_ITMP1, rd->argintregs[0]);
3975 x86_64_mov_imm_reg(cd, (ptrint) new_arrayindexoutofboundsexception, REG_ITMP3);
3976 x86_64_call_reg(cd, REG_ITMP3);
3978 /*remove stackinfo -- begin*/
3979 x86_64_mov_imm_reg(cd,(ptrint)asm_remove_native_stackinfo,REG_ITMP3);
3980 x86_64_call_reg(cd,REG_ITMP3);
3981 /*remove stackinfo -- end*/
3983 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, REG_ITMP2_XPC);
3984 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3986 x86_64_mov_imm_reg(cd, (ptrint) asm_handle_exception, REG_ITMP3);
3987 x86_64_jmp_reg(cd, REG_ITMP3);
3991 /* generate negative array size check stubs */
3995 for (bref = cd->xcheckarefs; bref != NULL; bref = bref->next) {
3996 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3997 gen_resolvebranch(cd->mcodebase + bref->branchpos,
3999 xcodeptr - cd->mcodebase - (10 + 10 + 3));
4003 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4005 cd->mcodeptr - cd->mcodebase);
4009 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
4010 dseg_adddata(cd, cd->mcodeptr);
4011 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
4012 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
4014 if (xcodeptr != NULL) {
4015 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
4018 xcodeptr = cd->mcodeptr;
4021 /*create stackinfo -- begin*/
4022 x86_64_alu_imm_reg(cd, X86_64_SUB, 4 * 8, REG_SP);
4023 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3 * 8);
4024 x86_64_mov_imm_membase(cd,0,REG_SP,2*8);
4025 x86_64_mov_imm_membase(cd,0,REG_SP,1*8);
4026 x86_64_mov_imm_reg(cd,(ptrint)asm_prepare_native_stackinfo,REG_ITMP3);
4027 x86_64_call_reg(cd,REG_ITMP3);
4028 /*create stackinfo -- end*/
4030 x86_64_mov_imm_reg(cd, (u8) new_negativearraysizeexception, REG_ITMP3);
4031 x86_64_call_reg(cd, REG_ITMP3);
4033 /*remove stackinfo -- begin*/
4034 x86_64_mov_imm_reg(cd,(ptrint)asm_remove_native_stackinfo,REG_ITMP3);
4035 x86_64_call_reg(cd,REG_ITMP3);
4036 /*remove stackinfo -- end*/
4038 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, REG_ITMP2_XPC);
4039 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
4041 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
4042 x86_64_jmp_reg(cd, REG_ITMP3);
4046 /* generate cast check stubs */
4050 for (bref = cd->xcastrefs; bref != NULL; bref = bref->next) {
4051 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
4052 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4054 xcodeptr - cd->mcodebase - (10 + 10 + 3));
4058 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4060 cd->mcodeptr - cd->mcodebase);
4064 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
4065 dseg_adddata(cd, cd->mcodeptr);
4066 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
4067 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
4069 if (xcodeptr != NULL) {
4070 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
4073 xcodeptr = cd->mcodeptr;
4075 /*create stackinfo -- begin*/
4076 x86_64_alu_imm_reg(cd, X86_64_SUB, 4 * 8, REG_SP);
4077 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3 * 8);
4078 x86_64_mov_imm_membase(cd,0,REG_SP,2*8);
4079 x86_64_mov_imm_membase(cd,0,REG_SP,1*8);
4080 x86_64_mov_imm_reg(cd,(ptrint)asm_prepare_native_stackinfo,REG_ITMP3);
4081 x86_64_call_reg(cd,REG_ITMP3);
4082 /*create stackinfo -- end*/
4085 x86_64_mov_imm_reg(cd, (u8) new_classcastexception, REG_ITMP3);
4086 x86_64_call_reg(cd, REG_ITMP3);
4088 /*remove stackinfo -- begin*/
4089 x86_64_mov_imm_reg(cd,(ptrint)asm_remove_native_stackinfo,REG_ITMP3);
4090 x86_64_call_reg(cd,REG_ITMP3);
4091 /*remove stackinfo -- end*/
4093 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, REG_ITMP2_XPC);
4094 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
4096 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
4097 x86_64_jmp_reg(cd, REG_ITMP3);
4101 /* generate divide by zero check stubs */
4105 for (bref = cd->xdivrefs; bref != NULL; bref = bref->next) {
4106 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
4107 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4109 xcodeptr - cd->mcodebase - (10 + 10 + 3));
4113 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4115 cd->mcodeptr - cd->mcodebase);
4119 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
4120 dseg_adddata(cd, cd->mcodeptr);
4121 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3); /* 10 bytes */
4122 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes */
4124 if (xcodeptr != NULL) {
4125 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
4128 xcodeptr = cd->mcodeptr;
4130 /*create stackinfo -- begin*/
4131 x86_64_alu_imm_reg(cd, X86_64_SUB, 4 * 8, REG_SP);
4132 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3 * 8);
4133 x86_64_mov_imm_membase(cd,0,REG_SP,2*8);
4134 x86_64_mov_imm_membase(cd,0,REG_SP,1*8);
4135 x86_64_mov_imm_reg(cd,(ptrint)asm_prepare_native_stackinfo,REG_ITMP3);
4136 x86_64_call_reg(cd,REG_ITMP3);
4137 /*create stackinfo -- end*/
4139 x86_64_mov_imm_reg(cd, (u8) new_arithmeticexception, REG_ITMP3);
4140 x86_64_call_reg(cd, REG_ITMP3);
4142 /*remove stackinfo -- begin*/
4143 x86_64_mov_imm_reg(cd,(ptrint)asm_remove_native_stackinfo,REG_ITMP3);
4144 x86_64_call_reg(cd,REG_ITMP3);
4145 /*remove stackinfo -- end*/
4147 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, REG_ITMP2_XPC);
4148 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
4150 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
4151 x86_64_jmp_reg(cd, REG_ITMP3);
4155 /* generate exception check stubs */
4159 for (bref = cd->xexceptionrefs; bref != NULL; bref = bref->next) {
4160 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
4161 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4163 xcodeptr - cd->mcodebase - (10 + 10 + 3));
4167 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4169 cd->mcodeptr - cd->mcodebase);
4173 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
4174 dseg_adddata(cd, cd->mcodeptr);
4175 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
4176 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
4178 if (xcodeptr != NULL) {
4179 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
4182 xcodeptr = cd->mcodeptr;
4186 x86_64_alu_imm_reg(cd, X86_64_SUB, 4*8, REG_SP);
4187 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3*8);
4188 x86_64_mov_imm_membase(cd, 0, REG_SP, 2*8);
4189 x86_64_mov_imm_membase(cd, 0, REG_SP, 1*8);
4190 x86_64_mov_imm_membase(cd, 0, REG_SP, 0*8);
4191 x86_64_mov_imm_reg(cd,(u8) asm_prepare_native_stackinfo,REG_ITMP1);
4192 x86_64_call_reg(cd,REG_ITMP1);
4195 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4196 x86_64_mov_imm_reg(cd, (u8) &builtin_get_exceptionptrptr, REG_ITMP1);
4197 x86_64_call_reg(cd, REG_ITMP1);
4198 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
4199 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
4200 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
4202 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
4203 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP1_XPTR);
4204 x86_64_mov_imm_membase(cd, 0, REG_ITMP3, 0);
4206 x86_64_mov_reg_reg(cd,REG_ITMP1_XPTR,RDI);
4207 x86_64_mov_imm_reg(cd,(u8) helper_fillin_stacktrace_always,REG_ITMP1);
4208 x86_64_call_reg(cd,REG_ITMP1);
4209 x86_64_mov_reg_reg(cd,REG_RESULT,REG_ITMP1_XPTR);
4211 x86_64_mov_imm_reg(cd,(u8) asm_remove_native_stackinfo,REG_ITMP2);
4212 x86_64_call_reg(cd,REG_ITMP2);
4214 x86_64_alu_imm_reg(cd, X86_64_ADD, 8, REG_SP);
4215 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC);
4216 x86_64_alu_imm_reg(cd, X86_64_ADD, 8, REG_SP);
4219 x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
4220 x86_64_jmp_reg(cd, REG_ITMP3);
4224 /* generate null pointer check stubs */
4228 for (bref = cd->xnullrefs; bref != NULL; bref = bref->next) {
4229 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
4230 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4232 xcodeptr - cd->mcodebase - (10 + 10 + 3));
4236 gen_resolvebranch(cd->mcodebase + bref->branchpos,
4238 cd->mcodeptr - cd->mcodebase);
4242 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC); /* 10 bytes */
4243 dseg_adddata(cd, cd->mcodeptr);
4244 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1); /* 10 bytes */
4245 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes */
4247 if (xcodeptr != NULL) {
4248 x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
4251 xcodeptr = cd->mcodeptr;
4253 /*create stackinfo -- begin*/
4254 x86_64_alu_imm_reg(cd, X86_64_SUB, 4 * 8, REG_SP);
4255 x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 3 * 8);
4256 x86_64_mov_imm_membase(cd,0,REG_SP,2*8);
4257 x86_64_mov_imm_membase(cd,0,REG_SP,1*8);
4258 x86_64_mov_imm_reg(cd,(ptrint)asm_prepare_native_stackinfo,REG_ITMP3);
4259 x86_64_call_reg(cd,REG_ITMP3);
4260 /*create stackinfo -- end*/
4263 x86_64_mov_imm_reg(cd, (ptrint) new_nullpointerexception, REG_ITMP3);
4264 x86_64_call_reg(cd, REG_ITMP3);
4266 /*remove stackinfo -- begin*/
4267 x86_64_mov_imm_reg(cd,(ptrint)asm_remove_native_stackinfo,REG_ITMP3);
4268 x86_64_call_reg(cd,REG_ITMP3);
4269 /*remove stackinfo -- end*/
4271 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, REG_ITMP2_XPC);
4272 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
4274 x86_64_mov_imm_reg(cd, (ptrint) asm_handle_exception, REG_ITMP3);
4275 x86_64_jmp_reg(cd, REG_ITMP3);
4279 /* generate code patching stub call code */
4286 tmpcd = DNEW(codegendata);
4288 for (pref = cd->patchrefs; pref != NULL; pref = pref->next) {
4289 /* check size of code segment */
4291 MCODECHECK(2 * 8 + 128);
4293 /* Get machine code which is patched back in later. A */
4294 /* `call rel32' is 5 bytes long (but read 8 bytes). */
4296 xcodeptr = cd->mcodebase + pref->branchpos;
4297 mcode = *((ptrint *) xcodeptr);
4299 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4300 /* create a virtual java_objectheader */
4302 *((ptrint *) (cd->mcodeptr + 0)) = 0; /* vftbl */
4303 *((ptrint *) (cd->mcodeptr + 8)) = (ptrint) get_dummyLR(); /* monitorPtr */
4308 /* patch in `call rel32' to call the following code */
4310 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
4311 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4313 /* move pointer to java_objectheader onto stack */
4315 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4316 x86_64_call_imm(cd, 0);
4317 x86_64_alu_imm_membase(cd, X86_64_SUB, 5 + 2 * 8, REG_SP, 0);
4319 x86_64_push_imm(cd, 0);
4322 /* move machine code bytes and classinfo pointer into registers */
4324 x86_64_mov_imm_reg(cd, (ptrint) mcode, REG_ITMP3);
4325 x86_64_push_reg(cd, REG_ITMP3);
4326 x86_64_mov_imm_reg(cd, (ptrint) pref->ref, REG_ITMP3);
4327 x86_64_push_reg(cd, REG_ITMP3);
4329 x86_64_mov_imm_reg(cd, (ptrint) pref->patcher, REG_ITMP3);
4330 x86_64_push_reg(cd, REG_ITMP3);
4332 x86_64_mov_imm_reg(cd, (ptrint) asm_wrapper_patcher, REG_ITMP3);
4333 x86_64_jmp_reg(cd, REG_ITMP3);
4338 codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
4342 /* function createcompilerstub *************************************************
4344 creates a stub routine which calls the compiler
4346 *******************************************************************************/
4348 #define COMPSTUBSIZE 23
4350 u1 *createcompilerstub(methodinfo *m)
4352 u1 *s = CNEW(u1, COMPSTUBSIZE); /* memory to hold the stub */
4356 /* mark start of dump memory area */
4358 dumpsize = dump_size();
4360 cd = DNEW(codegendata);
4363 /* code for the stub */
4365 x86_64_mov_imm_reg(cd, (ptrint) m, REG_ITMP1); /* pass method to compiler */
4366 x86_64_mov_imm_reg(cd, (ptrint) asm_call_jit_compiler, REG_ITMP3);
4367 x86_64_jmp_reg(cd, REG_ITMP3);
4369 #if defined(STATISTICS)
4371 count_cstub_len += COMPSTUBSIZE;
4374 /* release dump area */
4376 dump_release(dumpsize);
4382 /* function removecompilerstub *************************************************
4384 deletes a compilerstub from memory (simply by freeing it)
4386 *******************************************************************************/
4388 void removecompilerstub(u1 *stub)
4390 CFREE(stub, COMPSTUBSIZE);
4394 /* function: createnativestub **************************************************
4396 creates a stub routine which calls a native method
4398 *******************************************************************************/
4400 /* #if defined(USE_THREADS) && defined(NATIVE_THREADS) */
4401 /* static java_objectheader **(*callgetexceptionptrptr)() = builtin_get_exceptionptrptr; */
4404 #define NATIVESTUBSIZE 1024 /* keep this size high enough! */
4405 #define NATIVESTUB_DATA_SIZE (7*8)
4407 u1 *createnativestub(functionptr f, methodinfo *m)
4409 u1 *s; /* pointer to stub memory */
4412 t_inlining_globals *id;
4414 s4 stackframesize; /* size of stackframe if needed */
4416 s4 iargs; /* count of integer arguments */
4417 s4 fargs; /* count of float arguments */
4421 bool require_clinit_call;
4423 void **callAddrPatchPos=0;
4425 void **jmpInstrPatchPos=0;
4427 /* initialize variables */
4432 /* mark start of dump memory area */
4434 dumpsize = dump_size();
4436 cd = DNEW(codegendata);
4437 rd = DNEW(registerdata);
4438 id = DNEW(t_inlining_globals);
4440 /* setup registers before using it */
4442 inlining_setup(m, id);
4443 reg_setup(m, rd, id);
4445 /* set paramcount and paramtypes */
4447 method_descriptor2types(m);
4449 /* count integer and float arguments */
4451 tptr = m->paramtypes;
4452 for (i = 0; i < m->paramcount; i++) {
4453 IS_INT_LNG_TYPE(*tptr++) ? iargs++ : fargs++;
4456 stubsize=NATIVESTUBSIZE;
4457 require_clinit_call= ((m->flags & ACC_STATIC) && !m->class->initialized);
4458 if (require_clinit_call) stubsize+=NATIVESTUB_DATA_SIZE;
4459 s = CNEW(u1, stubsize); /* memory to hold the stub */
4461 if (require_clinit_call) {
4462 cs = (u8*) (s+NATIVESTUB_DATA_SIZE);
4463 *(cs - 7) = 0; /* extable size,padding */
4464 *(cs - 6) = 0; /* line number table start */
4465 *(cs - 5) = 0; /* line number table size */
4466 *(cs - 4) = 0; /* padding,fltsave */
4467 *(cs - 3) = 0; /* intsave=0,isleaf=0 */
4468 *(cs - 2) = 0x0000000000000000; /* frame size=0 (stack misalignment) issync=0 */
4470 *(cs - 2) = 0x0000000100000000; /* frame size=1 issync=0 */
4472 *(cs - 1) = (u8) m; /* method pointer */
4477 /* set some required varibles which are normally set by codegen_setup */
4478 cd->mcodebase = (u1*)cs;
4479 cd->mcodeptr = (u1*)cs;
4480 cd->patchrefs = NULL;
4482 /* if function is static, check for initialized */
4484 if (require_clinit_call) {
4485 codegen_addpatchref(cd, cd->mcodeptr, PATCHER_clinit, m->class);
4488 if (JWNATIVEDEBUG(runverbose)) {
4491 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4493 /* save integer and float argument registers */
4495 for (i = 0; i < INT_ARG_CNT; i++) {
4496 x86_64_mov_reg_membase(cd, rd->argintregs[i], REG_SP, (1 + i) * 8);
4499 for (i = 0; i < FLT_ARG_CNT; i++) {
4500 x86_64_movq_reg_membase(cd, rd->argfltregs[i], REG_SP, (1 + INT_ARG_CNT + i) * 8);
4503 /* show integer hex code for float arguments */
4505 for (i = 0, l = 0; i < m->paramcount && i < INT_ARG_CNT; i++) {
4506 /* if the paramtype is a float, we have to right shift all */
4507 /* following integer registers */
4509 if (IS_FLT_DBL_TYPE(m->paramtypes[i])) {
4510 for (s1 = INT_ARG_CNT - 2; s1 >= i; s1--) {
4511 x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
4514 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[i]);
4519 x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP1);
4520 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, 0 * 8);
4521 x86_64_mov_imm_reg(cd, (u8) builtin_trace_args, REG_ITMP1);
4522 x86_64_call_reg(cd, REG_ITMP1);
4524 /* restore integer and float argument registers */
4526 for (i = 0; i < INT_ARG_CNT; i++) {
4527 x86_64_mov_membase_reg(cd, REG_SP, (1 + i) * 8, rd->argintregs[i]);
4530 for (i = 0; i < FLT_ARG_CNT; i++) {
4531 x86_64_movq_membase_reg(cd, REG_SP, (1 + INT_ARG_CNT + i) * 8, rd->argfltregs[i]);
4534 x86_64_alu_imm_reg(cd, X86_64_ADD, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4537 /* 4 == additional size needed for native stack frame information*/
4538 x86_64_alu_imm_reg(cd, X86_64_SUB, (4+INT_ARG_CNT + FLT_ARG_CNT+1) * 8, REG_SP);
4540 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, 0 * 8);
4541 x86_64_mov_reg_membase(cd, rd->argintregs[1], REG_SP, 1 * 8);
4542 x86_64_mov_reg_membase(cd, rd->argintregs[2], REG_SP, 2 * 8);
4543 x86_64_mov_reg_membase(cd, rd->argintregs[3], REG_SP, 3 * 8);
4544 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 4 * 8);
4545 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 5 * 8);
4547 x86_64_movq_reg_membase(cd, rd->argfltregs[0], REG_SP, 6 * 8);
4548 x86_64_movq_reg_membase(cd, rd->argfltregs[1], REG_SP, 7 * 8);
4549 x86_64_movq_reg_membase(cd, rd->argfltregs[2], REG_SP, 8 * 8);
4550 x86_64_movq_reg_membase(cd, rd->argfltregs[3], REG_SP, 9 * 8);
4551 x86_64_movq_reg_membase(cd, rd->argfltregs[4], REG_SP, 10 * 8);
4552 x86_64_movq_reg_membase(cd, rd->argfltregs[5], REG_SP, 11 * 8);
4553 x86_64_movq_reg_membase(cd, rd->argfltregs[6], REG_SP, 12 * 8);
4554 x86_64_movq_reg_membase(cd, rd->argfltregs[7], REG_SP, 13 * 8);
4558 0*8 void *oldThreadspecificHeadValue;
4559 1*8 void **addressOfThreadspecificHead;
4560 2*8 methodinfo *method;
4561 3*8 void *beginOfJavaStackframe; only used if != 0
4562 4*8 void *returnToFromNative;
4565 /* CREATE DYNAMIC STACK INFO -- BEGIN offsets:15,16,17,18*/
4566 x86_64_mov_imm_membase(cd, 0, REG_SP, 18*8);
4567 x86_64_mov_imm_membase(cd, (ptrint)m, REG_SP, 17*8);
4568 /*x86_64_mov_imm_membase(cd, 0, REG_SP, 17*8);*/
4570 x86_64_mov_imm_reg(cd, (u8) builtin_asm_get_stackframeinfo,REG_ITMP1);
4571 x86_64_call_reg(cd,REG_ITMP1);
4573 x86_64_mov_reg_membase(cd,REG_RESULT,REG_SP,16*8);
4574 x86_64_mov_membase_reg(cd,REG_RESULT,0,REG_ITMP2);
4575 x86_64_mov_reg_membase(cd,REG_ITMP2,REG_SP,15*8);
4576 x86_64_mov_reg_reg(cd,REG_SP,REG_ITMP2);
4577 x86_64_alu_imm_reg(cd, X86_64_ADD, (1+INT_ARG_CNT + FLT_ARG_CNT) * 8, REG_ITMP2);
4578 x86_64_mov_reg_membase(cd,REG_ITMP2,REG_RESULT,0);
4582 i386_mov_imm_membase(cd,0,REG_SP,stackframesize-4);
4583 i386_mov_imm_membase(cd, (s4) m, REG_SP,stackframesize-8);
4584 i386_mov_imm_reg(cd, (s4) builtin_asm_get_stackframeinfo, REG_ITMP1);
4585 i386_call_reg(cd, REG_ITMP1);
4586 i386_mov_reg_membase(cd, REG_RESULT,REG_SP,stackframesize-12); /*save thread specific pointer*/
4587 i386_mov_membase_reg(cd, REG_RESULT,0,REG_ITMP2);
4588 i386_mov_reg_membase(cd, REG_ITMP2,REG_SP,stackframesize-16); /*save previous value of memory adress pointed to by thread specific pointer*/
4589 i386_mov_reg_reg(cd, REG_SP,REG_ITMP2);
4590 i386_alu_imm_reg(cd, I386_ADD,stackframesize-16,REG_ITMP2);
4591 i386_mov_reg_membase(cd, REG_ITMP2,REG_RESULT,0);
4593 /* CREATE DYNAMIC STACK INFO -- END*/
4596 x86_64_mov_imm_reg(cd,(u8)nativeinvokation,REG_ITMP1);
4597 x86_64_call_reg(cd,REG_ITMP1);
4600 #if !defined(STATIC_CLASSPATH)
4601 /* call method to resolve native function if needed */
4603 /* needed to patch a jump over this block */
4604 x86_64_jmp_imm(cd, 0);
4605 jmpInstrPos = cd->mcodeptr - 4;
4608 x86_64_mov_imm_reg(cd, (ptrint) m, rd->argintregs[0]);
4610 x86_64_mov_imm_reg(cd, 0, rd->argintregs[1]);
4611 callAddrPatchPos = cd->mcodeptr - 8; /* at this position the place is specified where the native function adress should be patched into*/
4613 x86_64_mov_imm_reg(cd, 0, rd->argintregs[2]);
4614 jmpInstrPatchPos = cd->mcodeptr - 8;
4616 x86_64_mov_imm_reg(cd, (ptrint) jmpInstrPos, rd->argintregs[3]);
4618 x86_64_mov_imm_reg(cd, (ptrint) codegen_resolve_native, REG_ITMP1);
4619 x86_64_call_reg(cd, REG_ITMP1);
4621 *(jmpInstrPatchPos) = cd->mcodeptr - jmpInstrPos - 1-3; /*=opcode jmp_imm size*/
4628 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, rd->argintregs[0]);
4629 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, rd->argintregs[1]);
4630 x86_64_mov_membase_reg(cd, REG_SP, 2 * 8, rd->argintregs[2]);
4631 x86_64_mov_membase_reg(cd, REG_SP, 3 * 8, rd->argintregs[3]);
4632 x86_64_mov_membase_reg(cd, REG_SP, 4 * 8, rd->argintregs[4]);
4633 x86_64_mov_membase_reg(cd, REG_SP, 5 * 8, rd->argintregs[5]);
4635 x86_64_movq_membase_reg(cd, REG_SP, 6 * 8, rd->argfltregs[0]);
4636 x86_64_movq_membase_reg(cd, REG_SP, 7 * 8, rd->argfltregs[1]);
4637 x86_64_movq_membase_reg(cd, REG_SP, 8 * 8, rd->argfltregs[2]);
4638 x86_64_movq_membase_reg(cd, REG_SP, 9 * 8, rd->argfltregs[3]);
4639 x86_64_movq_membase_reg(cd, REG_SP, 10 * 8, rd->argfltregs[4]);
4640 x86_64_movq_membase_reg(cd, REG_SP, 11 * 8, rd->argfltregs[5]);
4641 x86_64_movq_membase_reg(cd, REG_SP, 12 * 8, rd->argfltregs[6]);
4642 x86_64_movq_membase_reg(cd, REG_SP, 13 * 8, rd->argfltregs[7]);
4644 x86_64_alu_imm_reg(cd, X86_64_ADD, (INT_ARG_CNT + FLT_ARG_CNT+1) * 8, REG_SP);
4646 /* save argument registers on stack -- if we have to */
4648 if ((((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) || iargs > (INT_ARG_CNT - 1)) ||
4649 (fargs > FLT_ARG_CNT)) {
4656 /* do we need to shift integer argument register onto stack? */
4658 if ((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) {
4659 /* do we need to shift 2 arguments? */
4660 if (iargs > (INT_ARG_CNT - 1)) {
4667 } else if (iargs > (INT_ARG_CNT - 1)) {
4671 /* calculate required stack space */
4673 stackparamcnt += (iargs > INT_ARG_CNT) ? iargs - INT_ARG_CNT : 0;
4674 stackparamcnt += (fargs > FLT_ARG_CNT) ? fargs - FLT_ARG_CNT : 0;
4677 stackframesize = stackparamcnt + paramshiftcnt;
4679 /* keep stack 16-byte aligned */
4680 if (!(stackframesize & 0x1))
4683 x86_64_alu_imm_reg(cd, X86_64_SUB, stackframesize * 8, REG_SP);
4685 /* shift integer arguments if required */
4687 if ((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) {
4688 /* do we need to shift 2 arguments? */
4689 if (iargs > (INT_ARG_CNT - 1))
4690 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 1 * 8);
4692 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 0 * 8);
4694 } else if (iargs > (INT_ARG_CNT - 1)) {
4695 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 0 * 8);
4698 /* copy stack arguments into new stack frame -- if any */
4699 for (i = 0; i < stackparamcnt; i++) {
4700 x86_64_mov_membase_reg(cd, REG_SP, (stackframesize + 1 + i+4) * 8, REG_ITMP1); /* 4==additional size for stackrace data*/
4701 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, (paramshiftcnt + i) * 8);
4705 /* keep stack 16-byte aligned */
4706 x86_64_alu_imm_reg(cd, X86_64_SUB, 1 * 8, REG_SP);
4710 /* shift integer arguments for `env' and `class' arguments */
4712 if (m->flags & ACC_STATIC) {
4713 /* shift iargs count if less than INT_ARG_CNT, or all */
4714 for (i = (iargs < (INT_ARG_CNT - 2)) ? iargs : (INT_ARG_CNT - 2); i >= 0; i--) {
4715 x86_64_mov_reg_reg(cd, rd->argintregs[i], rd->argintregs[i + 2]);
4718 /* put class into second argument register */
4719 x86_64_mov_imm_reg(cd, (u8) m->class, rd->argintregs[1]);
4722 /* shift iargs count if less than INT_ARG_CNT, or all */
4723 for (i = (iargs < (INT_ARG_CNT - 1)) ? iargs : (INT_ARG_CNT - 1); i >= 0; i--) {
4724 x86_64_mov_reg_reg(cd, rd->argintregs[i], rd->argintregs[i + 1]);
4728 /* put env into first argument register */
4729 x86_64_mov_imm_reg(cd, (u8) &env, rd->argintregs[0]);
4731 /* do the native function call */
4732 x86_64_mov_imm_reg(cd, (u8) f, REG_ITMP1);
4733 #if !defined(STATIC_CLASSPATH)
4735 (*callAddrPatchPos) = cd->mcodeptr - 8;
4737 x86_64_call_reg(cd, REG_ITMP1);
4739 /* remove stackframe if there is one */
4740 if (stackframesize) {
4741 x86_64_alu_imm_reg(cd, X86_64_ADD, stackframesize * 8, REG_SP);
4744 /*REMOVE DYNAMIC STACK INFO -BEGIN */
4745 x86_64_mov_reg_membase(cd,REG_RESULT,REG_SP,2*8);
4746 x86_64_mov_membase_reg(cd,REG_SP,0*8,REG_ITMP2);
4747 x86_64_mov_membase_reg(cd,REG_SP,1*8,REG_RESULT);
4748 x86_64_mov_reg_membase(cd,REG_ITMP2,REG_RESULT,0);
4749 x86_64_mov_membase_reg(cd,REG_SP,2*8,REG_RESULT);
4751 i386_push_reg(cd, REG_RESULT2);
4752 i386_mov_membase_reg(cd, REG_SP,stackframesize-12,REG_ITMP2); /*old value*/
4753 i386_mov_membase_reg(cd, REG_SP,stackframesize-8,REG_RESULT2); /*pointer*/
4754 i386_mov_reg_membase(cd, REG_ITMP2,REG_RESULT2,0);
4755 i386_pop_reg(cd, REG_RESULT2);
4757 /*REMOVE DYNAMIC STACK INFO -END */
4759 x86_64_alu_imm_reg(cd, X86_64_ADD, 4 * 8, REG_SP);
4761 if (JWNATIVEDEBUG(runverbose)) {
4762 x86_64_alu_imm_reg(cd, X86_64_SUB, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
4764 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
4765 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
4767 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
4768 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
4769 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
4770 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
4772 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
4773 x86_64_call_reg(cd, REG_ITMP1);
4775 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
4776 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
4778 x86_64_alu_imm_reg(cd, X86_64_ADD, 3 * 8, REG_SP); /* keep stack 16-byte aligned */
4781 /* check for exception */
4783 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4784 x86_64_push_reg(cd, REG_RESULT);
4785 /* x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
4786 x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
4787 x86_64_call_reg(cd, REG_ITMP3);
4788 x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
4789 x86_64_pop_reg(cd, REG_RESULT);
4791 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
4792 x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP3);
4794 x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
4795 x86_64_jcc(cd, X86_64_CC_NE, 1);
4799 /* handle exception */
4801 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4802 x86_64_push_reg(cd, REG_ITMP3);
4803 /* x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
4804 x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
4805 x86_64_call_reg(cd, REG_ITMP3);
4806 x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
4807 x86_64_pop_reg(cd, REG_ITMP1_XPTR);
4809 x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
4810 x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
4811 x86_64_alu_reg_reg(cd, X86_64_XOR, REG_ITMP2, REG_ITMP2);
4812 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_ITMP3, 0); /* clear exception pointer */
4815 x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC); /* get return address from stack */
4816 x86_64_alu_imm_reg(cd, X86_64_SUB, 3, REG_ITMP2_XPC); /* callq */
4818 x86_64_mov_imm_reg(cd, (u8) asm_handle_nat_exception, REG_ITMP3);
4819 x86_64_jmp_reg(cd, REG_ITMP3);
4822 /* patch in a <clinit> call if required ***********************************/
4830 tmpcd = DNEW(codegendata);
4832 /* there can only be one patch ref entry */
4833 pref = cd->patchrefs;
4836 /* Get machine code which is patched back in later. A */
4837 /* `call rel32' is 5 bytes long (but read 8 bytes). */
4839 xcodeptr = cd->mcodebase + pref->branchpos;
4840 mcode = *((ptrint *) xcodeptr);
4842 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4843 /* create a virtual java_objectheader */
4845 *((ptrint *) (cd->mcodeptr + 0)) = 0; /* vftbl */
4846 *((ptrint *) (cd->mcodeptr + 8)) = (ptrint) get_dummyLR(); /* monitorPtr */
4851 /* patch in `call rel32' to call the following code */
4853 tmpcd->mcodeptr = xcodeptr; /* set dummy mcode pointer */
4854 x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4856 /* move pointer to java_objectheader onto stack */
4858 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4859 x86_64_call_imm(cd, 0);
4860 x86_64_alu_imm_membase(cd, X86_64_SUB, 5 + 2 * 8, REG_SP, 0);
4862 x86_64_push_imm(cd, 0);
4865 /* move machine code bytes and classinfo pointer into registers */
4867 x86_64_mov_imm_reg(cd, (ptrint) mcode, REG_ITMP3);
4868 x86_64_push_reg(cd, REG_ITMP3);
4869 x86_64_mov_imm_reg(cd, (ptrint) pref->ref, REG_ITMP3);
4870 x86_64_push_reg(cd, REG_ITMP3);
4872 x86_64_mov_imm_reg(cd, (ptrint) pref->patcher, REG_ITMP3);
4873 x86_64_push_reg(cd, REG_ITMP3);
4875 x86_64_mov_imm_reg(cd, (ptrint) asm_wrapper_patcher, REG_ITMP3);
4876 x86_64_jmp_reg(cd, REG_ITMP3);
4878 codegen_insertmethod((functionptr) cs, (functionptr) cd->mcodeptr);
4879 /*printf("codegen_insertmethod (nativestub) %s.%s: %p %p\n",
4880 m->class->name->text,m->name->text,cs,cd->mcodeptr);*/
4884 /*printf("(nativestub) %s.%s: %p %p\n",m->class->name->text,m->name->text,cs,cd->mcodeptr);*/
4886 /* Check if the stub size is big enough to hold the whole stub generated. */
4887 /* If not, this can lead into unpredictable crashes, because of heap */
4889 if ((s4) (cd->mcodeptr - s) > stubsize) {
4890 throw_cacao_exception_exit(string_java_lang_InternalError,
4891 "Native stub size %d is to small for current stub size %d",
4892 stubsize, (s4) (cd->mcodeptr - s));
4896 #if defined(STATISTICS)
4898 count_nstub_len += stubsize;
4901 /* release dump area */
4903 dump_release(dumpsize);
4909 /* function: removenativestub **************************************************
4911 removes a previously created native-stub from memory
4913 *******************************************************************************/
4915 void removenativestub(u1 *stub)
4917 CFREE(stub, NATIVESTUBSIZE);
4922 * These are local overrides for various environment variables in Emacs.
4923 * Please do not remove this and leave it at the end of the file, where
4924 * Emacs will automagically detect them.
4925 * ---------------------------------------------------------------------
4928 * indent-tabs-mode: t