Patched back in the s1 == REG_ITMP1 (former d == REG_ITMP3) optimization.
[cacao.git] / src / vm / jit / x86_64 / codegen.c
1 /* vm/jit/x86_64/codegen.c - machine code generator for x86_64
2
3    Copyright (C) 1996-2005 R. Grafl, A. Krall, C. Kruegel, C. Oates,
4    R. Obermaisser, M. Platter, M. Probst, S. Ring, E. Steiner,
5    C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich, J. Wenninger,
6    Institut f. Computersprachen - TU Wien
7
8    This file is part of CACAO.
9
10    This program is free software; you can redistribute it and/or
11    modify it under the terms of the GNU General Public License as
12    published by the Free Software Foundation; either version 2, or (at
13    your option) any later version.
14
15    This program is distributed in the hope that it will be useful, but
16    WITHOUT ANY WARRANTY; without even the implied warranty of
17    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
18    General Public License for more details.
19
20    You should have received a copy of the GNU General Public License
21    along with this program; if not, write to the Free Software
22    Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
23    02111-1307, USA.
24
25    Contact: cacao@complang.tuwien.ac.at
26
27    Authors: Andreas Krall
28             Christian Thalinger
29
30    $Id: codegen.c 2179 2005-04-01 13:28:16Z twisti $
31
32 */
33
34
35 #define _GNU_SOURCE
36
37 #include <stdio.h>
38 #include <ucontext.h>
39
40 #include "cacao/cacao.h"
41 #include "native/native.h"
42 #include "vm/global.h"
43 #include "vm/builtin.h"
44 #include "vm/loader.h"
45 #include "vm/tables.h"
46 #include "vm/jit/asmpart.h"
47 #include "vm/jit/jit.h"
48 #include "vm/jit/reg.h"
49 #include "vm/jit/parse.h"
50 #include "vm/jit/x86_64/arch.h"
51 #include "vm/jit/x86_64/codegen.h"
52 #include "vm/jit/x86_64/emitfuncs.h"
53 #include "vm/jit/x86_64/types.h"
54 #include "vm/jit/x86_64/asmoffsets.h"
55
56
57 /* register descripton - array ************************************************/
58
59 /* #define REG_RES   0         reserved register for OS or code generator     */
60 /* #define REG_RET   1         return value register                          */
61 /* #define REG_EXC   2         exception value register (only old jit)        */
62 /* #define REG_SAV   3         (callee) saved register                        */
63 /* #define REG_TMP   4         scratch temporary register (caller saved)      */
64 /* #define REG_ARG   5         argument register (caller saved)               */
65
66 /* #define REG_END   -1        last entry in tables                           */
67
68 static int nregdescint[] = {
69     REG_RET, REG_ARG, REG_ARG, REG_TMP, REG_RES, REG_SAV, REG_ARG, REG_ARG,
70     REG_ARG, REG_ARG, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV,
71     REG_END
72 };
73
74
75 static int nregdescfloat[] = {
76     REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG,
77     REG_RES, REG_RES, REG_RES, REG_TMP, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
78     REG_END
79 };
80
81
82 /* Include independent code generation stuff -- include after register        */
83 /* descriptions to avoid extern definitions.                                  */
84
85 #include "vm/jit/codegen.inc"
86 #include "vm/jit/reg.inc"
87 #ifdef LSRA
88 #include "vm/jit/lsra.inc"
89 #endif
90
91
92 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
93 void thread_restartcriticalsection(ucontext_t *uc)
94 {
95         void *critical;
96
97         critical = thread_checkcritical((void *) uc->uc_mcontext.gregs[REG_RIP]);
98
99         if (critical)
100                 uc->uc_mcontext.gregs[REG_RIP] = (u8) critical;
101 }
102 #endif
103
104
105 /* NullPointerException signal handler for hardware null pointer check */
106
107 void catch_NullPointerException(int sig, siginfo_t *siginfo, void *_p)
108 {
109         sigset_t nsig;
110
111         struct ucontext *_uc = (struct ucontext *) _p;
112         struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
113         struct sigaction act;
114         java_objectheader *xptr;
115
116         /* Reset signal handler - necessary for SysV, does no harm for BSD */
117         
118         act.sa_sigaction = catch_NullPointerException;       /* reinstall handler */
119         act.sa_flags = SA_SIGINFO;
120         sigaction(sig, &act, NULL);
121         
122         sigemptyset(&nsig);
123         sigaddset(&nsig, sig);
124         sigprocmask(SIG_UNBLOCK, &nsig, NULL);               /* unblock signal    */
125
126         xptr = new_nullpointerexception();
127
128         sigctx->rax = (u8) xptr;                             /* REG_ITMP1_XPTR    */
129         sigctx->r10 = sigctx->rip;                           /* REG_ITMP2_XPC     */
130         sigctx->rip = (u8) asm_handle_exception;
131
132         return;
133 }
134
135
136 /* ArithmeticException signal handler for hardware divide by zero check */
137
138 void catch_ArithmeticException(int sig, siginfo_t *siginfo, void *_p)
139 {
140         sigset_t nsig;
141
142         struct ucontext *_uc = (struct ucontext *) _p;
143         struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
144         struct sigaction act;
145         java_objectheader *xptr;
146
147         /* Reset signal handler - necessary for SysV, does no harm for BSD */
148
149         act.sa_sigaction = catch_ArithmeticException;        /* reinstall handler */
150         act.sa_flags = SA_SIGINFO;
151         sigaction(sig, &act, NULL);
152
153         sigemptyset(&nsig);
154         sigaddset(&nsig, sig);
155         sigprocmask(SIG_UNBLOCK, &nsig, NULL);               /* unblock signal    */
156
157         xptr = new_arithmeticexception();
158
159         sigctx->rax = (u8) xptr;                             /* REG_ITMP1_XPTR    */
160         sigctx->r10 = sigctx->rip;                           /* REG_ITMP2_XPC     */
161         sigctx->rip = (u8) asm_handle_exception;
162
163         return;
164 }
165
166
167 void init_exceptions(void)
168 {
169         struct sigaction act;
170
171         /* install signal handlers we need to convert to exceptions */
172         sigemptyset(&act.sa_mask);
173
174         if (!checknull) {
175 #if defined(SIGSEGV)
176                 act.sa_sigaction = catch_NullPointerException;
177                 act.sa_flags = SA_SIGINFO;
178                 sigaction(SIGSEGV, &act, NULL);
179 #endif
180
181 #if defined(SIGBUS)
182                 act.sa_sigaction = catch_NullPointerException;
183                 act.sa_flags = SA_SIGINFO;
184                 sigaction(SIGBUS, &act, NULL);
185 #endif
186         }
187
188         act.sa_sigaction = catch_ArithmeticException;
189         act.sa_flags = SA_SIGINFO;
190         sigaction(SIGFPE, &act, NULL);
191 }
192
193
194 /* function gen_mcode **********************************************************
195
196         generates machine code
197
198 *******************************************************************************/
199
200 void codegen(methodinfo *m, codegendata *cd, registerdata *rd)
201 {
202         s4 len, s1, s2, s3, d;
203         s8 a;
204         s4 parentargs_base;
205         stackptr        src;
206         varinfo        *var;
207         basicblock     *bptr;
208         instruction    *iptr;
209         exceptiontable *ex;
210
211         {
212         s4 i, p, pa, t, l;
213         s4 savedregs_num;
214
215         savedregs_num = 0;
216
217         /* space to save used callee saved registers */
218
219         savedregs_num += (rd->savintregcnt - rd->maxsavintreguse);
220         savedregs_num += (rd->savfltregcnt - rd->maxsavfltreguse);
221
222         parentargs_base = rd->maxmemuse + savedregs_num;
223
224 #if defined(USE_THREADS)           /* space to save argument of monitor_enter */
225
226         if (checksync && (m->flags & ACC_SYNCHRONIZED))
227                 parentargs_base++;
228
229 #endif
230
231     /* Keep stack of non-leaf functions 16-byte aligned for calls into native */
232         /* code e.g. libc or jni (alignment problems with movaps).                */
233
234         if (!m->isleafmethod || runverbose) {
235                 parentargs_base |= 0x1;
236         }
237
238         /* create method header */
239
240         (void) dseg_addaddress(cd, m);                          /* MethodPointer  */
241         (void) dseg_adds4(cd, parentargs_base * 8);             /* FrameSize      */
242
243 #if defined(USE_THREADS)
244
245         /* IsSync contains the offset relative to the stack pointer for the
246            argument of monitor_exit used in the exception handler. Since the
247            offset could be zero and give a wrong meaning of the flag it is
248            offset by one.
249         */
250
251         if (checksync && (m->flags & ACC_SYNCHRONIZED))
252                 (void) dseg_adds4(cd, (rd->maxmemuse + 1) * 8);     /* IsSync         */
253         else
254
255 #endif
256
257                 (void) dseg_adds4(cd, 0);                           /* IsSync         */
258                                                
259         (void) dseg_adds4(cd, m->isleafmethod);                 /* IsLeaf         */
260         (void) dseg_adds4(cd, rd->savintregcnt - rd->maxsavintreguse);/* IntSave  */
261         (void) dseg_adds4(cd, rd->savfltregcnt - rd->maxsavfltreguse);/* FltSave  */
262         (void) dseg_adds4(cd, cd->exceptiontablelength);        /* ExTableSize    */
263
264         /* create exception table */
265
266         for (ex = cd->exceptiontable; ex != NULL; ex = ex->down) {
267                 dseg_addtarget(cd, ex->start);
268                 dseg_addtarget(cd, ex->end);
269                 dseg_addtarget(cd, ex->handler);
270                 (void) dseg_addaddress(cd, ex->catchtype);
271         }
272         
273         /* initialize mcode variables */
274         
275         cd->mcodeptr = (u1 *) cd->mcodebase;
276         cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
277         MCODECHECK(128 + m->paramcount);
278
279         /* create stack frame (if necessary) */
280
281         if (parentargs_base) {
282                 x86_64_alu_imm_reg(cd, X86_64_SUB, parentargs_base * 8, REG_SP);
283         }
284
285         /* save used callee saved registers */
286
287         p = parentargs_base;
288         for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
289                 p--; x86_64_mov_reg_membase(cd, rd->savintregs[i], REG_SP, p * 8);
290         }
291         for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
292                 p--; x86_64_movq_reg_membase(cd, rd->savfltregs[i], REG_SP, p * 8);
293         }
294
295         /* take arguments out of register or stack frame */
296
297         for (p = 0, l = 0, s1 = 0, s2 = 0; p < m->paramcount; p++) {
298                 t = m->paramtypes[p];
299                 var = &(rd->locals[l][t]);
300                 l++;
301                 if (IS_2_WORD_TYPE(t))    /* increment local counter for 2 word types */
302                         l++;
303                 if (var->type < 0) {
304                         if (IS_INT_LNG_TYPE(t)) {
305                                 s1++;
306                         } else {
307                                 s2++;
308                         }
309                         continue;
310                 }
311                 if (IS_INT_LNG_TYPE(t)) {                    /* integer args          */
312                         if (s1 < INT_ARG_CNT) {                  /* register arguments    */
313                                 if (!(var->flags & INMEMORY)) {      /* reg arg -> register   */
314                                         M_INTMOVE(rd->argintregs[s1], var->regoff);
315
316                                 } else {                             /* reg arg -> spilled    */
317                                     x86_64_mov_reg_membase(cd, rd->argintregs[s1], REG_SP, var->regoff * 8);
318                                 }
319
320                         } else {                                 /* stack arguments       */
321                                 pa = s1 - INT_ARG_CNT;
322                                 if (s2 >= FLT_ARG_CNT) {
323                                         pa += s2 - FLT_ARG_CNT;
324                                 }
325                                 if (!(var->flags & INMEMORY)) {      /* stack arg -> register */
326                                         x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff);    /* + 8 for return address */
327                                 } else {                             /* stack arg -> spilled  */
328                                         x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_ITMP1);    /* + 8 for return address */
329                                         x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, var->regoff * 8);
330                                 }
331                         }
332                         s1++;
333
334                 } else {                                     /* floating args         */
335                         if (s2 < FLT_ARG_CNT) {                  /* register arguments    */
336                                 if (!(var->flags & INMEMORY)) {      /* reg arg -> register   */
337                                         M_FLTMOVE(rd->argfltregs[s2], var->regoff);
338
339                                 } else {                                         /* reg arg -> spilled    */
340                                         x86_64_movq_reg_membase(cd, rd->argfltregs[s2], REG_SP, var->regoff * 8);
341                                 }
342
343                         } else {                                 /* stack arguments       */
344                                 pa = s2 - FLT_ARG_CNT;
345                                 if (s1 >= INT_ARG_CNT) {
346                                         pa += s1 - INT_ARG_CNT;
347                                 }
348                                 if (!(var->flags & INMEMORY)) {      /* stack-arg -> register */
349                                         x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff);
350
351                                 } else {
352                                         x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_FTMP1);
353                                         x86_64_movq_reg_membase(cd, REG_FTMP1, REG_SP, var->regoff * 8);
354                                 }
355                         }
356                         s2++;
357                 }
358         }  /* end for */
359
360         /* save monitorenter argument */
361
362 #if defined(USE_THREADS)
363         if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
364                 u8 func_enter;
365
366                 if (m->flags & ACC_STATIC) {
367                         func_enter = (u8) builtin_staticmonitorenter;
368                         x86_64_mov_imm_reg(cd, (s8) m->class, REG_ITMP1);
369                         x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, rd->maxmemuse * 8);
370
371                 } else {
372                         func_enter = (u8) builtin_monitorenter;
373                         x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, rd->maxmemuse * 8);
374                 }
375
376                 /* call monitorenter function */
377
378                 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
379                 x86_64_mov_imm_reg(cd, func_enter, REG_ITMP1);
380                 x86_64_call_reg(cd, REG_ITMP1);
381         }
382 #endif
383
384         /* Copy argument registers to stack and call trace function with pointer  */
385         /* to arguments on stack.                                                 */
386
387         if (runverbose) {
388                 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1 + 1) * 8, REG_SP);
389
390                 /* save integer argument registers */
391
392                 for (p = 0; p < INT_ARG_CNT; p++) {
393                         x86_64_mov_reg_membase(cd, rd->argintregs[p], REG_SP, (1 + p) * 8);
394                 }
395
396                 /* save float argument registers */
397
398                 for (p = 0; p < FLT_ARG_CNT; p++) {
399                         x86_64_movq_reg_membase(cd, rd->argfltregs[p], REG_SP, (1 + INT_ARG_CNT + p) * 8);
400                 }
401
402                 /* show integer hex code for float arguments */
403
404                 for (p = 0, l = 0; p < m->paramcount && p < INT_ARG_CNT; p++) {
405                         t = m->paramtypes[p];
406
407                         /* if the paramtype is a float, we have to right shift all        */
408                         /* following integer registers                                    */
409
410                         if (IS_FLT_DBL_TYPE(t)) {
411                                 for (s1 = INT_ARG_CNT - 2; s1 >= p; s1--) {
412                                         x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
413                                 }
414
415                                 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[p]);
416                                 l++;
417                         }
418                 }
419
420                 x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP2);
421                 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_SP, 0 * 8);
422                 x86_64_mov_imm_reg(cd, (u8) builtin_trace_args, REG_ITMP1);
423                 x86_64_call_reg(cd, REG_ITMP1);
424
425                 /* restore integer argument registers */
426
427                 for (p = 0; p < INT_ARG_CNT; p++) {
428                         x86_64_mov_membase_reg(cd, REG_SP, (1 + p) * 8, rd->argintregs[p]);
429                 }
430
431                 /* restore float argument registers */
432
433                 for (p = 0; p < FLT_ARG_CNT; p++) {
434                         x86_64_movq_membase_reg(cd, REG_SP, (1 + INT_ARG_CNT + p) * 8, rd->argfltregs[p]);
435                 }
436
437                 x86_64_alu_imm_reg(cd, X86_64_ADD, (6 + 8 + 1 + 1) * 8, REG_SP);
438         }
439
440         }
441
442         /* end of header generation */
443
444         /* walk through all basic blocks */
445         for (bptr = m->basicblocks; bptr != NULL; bptr = bptr->next) {
446
447                 bptr->mpc = (u4) ((u1 *) cd->mcodeptr - cd->mcodebase);
448
449                 if (bptr->flags >= BBREACHED) {
450
451                         /* branch resolving */
452
453                         branchref *bref;
454                         for (bref = bptr->branchrefs; bref != NULL; bref = bref->next) {
455                                 gen_resolvebranch((u1 *) cd->mcodebase + bref->branchpos, 
456                                                                   bref->branchpos,
457                                                                   bptr->mpc);
458                         }
459
460                 /* copy interface registers to their destination */
461
462                 src = bptr->instack;
463                 len = bptr->indepth;
464                 MCODECHECK(64 + len);
465
466 #ifdef LSRA
467                 if (opt_lsra) {
468                         while (src != NULL) {
469                                 len--;
470                                 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
471                                         if (bptr->type == BBTYPE_SBR) {
472                                                 /*                                      d = reg_of_var(rd, src, REG_ITMP1); */
473                                                 if (!(src->flags & INMEMORY))
474                                                         d= src->regoff;
475                                                 else
476                                                         d=REG_ITMP1;
477                                                 x86_64_pop_reg(cd, d);
478                                                 store_reg_to_var_int(src, d);
479
480                                         } else if (bptr->type == BBTYPE_EXH) {
481                                                 /*                                      d = reg_of_var(rd, src, REG_ITMP1); */
482                                                 if (!(src->flags & INMEMORY))
483                                                         d= src->regoff;
484                                                 else
485                                                         d=REG_ITMP1;
486                                                 M_INTMOVE(REG_ITMP1, d);
487                                                 store_reg_to_var_int(src, d);
488                                         }
489                                 }
490                                 src = src->prev;
491                         }
492                         
493                 } else {
494 #endif
495
496         while (src != NULL) {
497                         len--;
498                         if ((len == 0) && (bptr->type != BBTYPE_STD)) {
499                                 if (bptr->type == BBTYPE_SBR) {
500                                         d = reg_of_var(rd, src, REG_ITMP1);
501                                         x86_64_pop_reg(cd, d);
502                                         store_reg_to_var_int(src, d);
503
504                                 } else if (bptr->type == BBTYPE_EXH) {
505                                         d = reg_of_var(rd, src, REG_ITMP1);
506                                         M_INTMOVE(REG_ITMP1, d);
507                                         store_reg_to_var_int(src, d);
508                                 }
509
510                         } else {
511                                 d = reg_of_var(rd, src, REG_ITMP1);
512                                 if ((src->varkind != STACKVAR)) {
513                                         s2 = src->type;
514                                         if (IS_FLT_DBL_TYPE(s2)) {
515                                                 s1 = rd->interfaces[len][s2].regoff;
516                                                 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
517                                                         M_FLTMOVE(s1, d);
518
519                                                 } else {
520                                                         x86_64_movq_membase_reg(cd, REG_SP, s1 * 8, d);
521                                                 }
522                                                 store_reg_to_var_flt(src, d);
523
524                                         } else {
525                                                 s1 = rd->interfaces[len][s2].regoff;
526                                                 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
527                                                         M_INTMOVE(s1, d);
528
529                                                 } else {
530                                                         x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, d);
531                                                 }
532                                                 store_reg_to_var_int(src, d);
533                                         }
534                                 }
535                         }
536                         src = src->prev;
537                 }
538 #ifdef LSRA
539                 }
540 #endif
541                 /* walk through all instructions */
542                 
543                 src = bptr->instack;
544                 len = bptr->icount;
545                 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
546
547                         MCODECHECK(64);   /* an instruction usually needs < 64 words      */
548                         switch (iptr->opc) {
549                         case ICMD_INLINE_START: /* internal ICMDs                         */
550                         case ICMD_INLINE_END:
551                                 break;
552
553                         case ICMD_NOP:    /* ...  ==> ...                                 */
554                                 break;
555
556                         case ICMD_NULLCHECKPOP: /* ..., objectref  ==> ...                */
557                                 if (src->flags & INMEMORY) {
558                                         x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
559
560                                 } else {
561                                         x86_64_test_reg_reg(cd, src->regoff, src->regoff);
562                                 }
563                                 x86_64_jcc(cd, X86_64_CC_E, 0);
564                                 codegen_addxnullrefs(cd, cd->mcodeptr);
565                                 break;
566
567                 /* constant operations ************************************************/
568
569                 case ICMD_ICONST:     /* ...  ==> ..., constant                       */
570                                       /* op1 = 0, val.i = constant                    */
571
572                         d = reg_of_var(rd, iptr->dst, REG_ITMP1);
573                         if (iptr->val.i == 0) {
574                                 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
575                         } else {
576                                 x86_64_movl_imm_reg(cd, iptr->val.i, d);
577                         }
578                         store_reg_to_var_int(iptr->dst, d);
579                         break;
580
581                 case ICMD_ACONST:     /* ...  ==> ..., constant                       */
582                                       /* op1 = 0, val.a = constant                    */
583
584                         d = reg_of_var(rd, iptr->dst, REG_ITMP1);
585                         if (iptr->val.a == 0) {
586                                 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
587                         } else {
588                                 x86_64_mov_imm_reg(cd, (s8) iptr->val.a, d);
589                         }
590                         store_reg_to_var_int(iptr->dst, d);
591                         break;
592
593                 case ICMD_LCONST:     /* ...  ==> ..., constant                       */
594                                       /* op1 = 0, val.l = constant                    */
595
596                         d = reg_of_var(rd, iptr->dst, REG_ITMP1);
597                         if (iptr->val.l == 0) {
598                                 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
599                         } else {
600                                 x86_64_mov_imm_reg(cd, iptr->val.l, d);
601                         }
602                         store_reg_to_var_int(iptr->dst, d);
603                         break;
604
605                 case ICMD_FCONST:     /* ...  ==> ..., constant                       */
606                                       /* op1 = 0, val.f = constant                    */
607
608                         d = reg_of_var(rd, iptr->dst, REG_FTMP1);
609                         a = dseg_addfloat(cd, iptr->val.f);
610                         x86_64_movdl_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + ((d > 7) ? 9 : 8)) - (s8) cd->mcodebase) + a, d);
611                         store_reg_to_var_flt(iptr->dst, d);
612                         break;
613                 
614                 case ICMD_DCONST:     /* ...  ==> ..., constant                       */
615                                       /* op1 = 0, val.d = constant                    */
616
617                         d = reg_of_var(rd, iptr->dst, REG_FTMP1);
618                         a = dseg_adddouble(cd, iptr->val.d);
619                         x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, d);
620                         store_reg_to_var_flt(iptr->dst, d);
621                         break;
622
623
624                 /* load/store operations **********************************************/
625
626                 case ICMD_ILOAD:      /* ...  ==> ..., content of local variable      */
627                                       /* op1 = local variable                         */
628
629                         d = reg_of_var(rd, iptr->dst, REG_ITMP1);
630                         if ((iptr->dst->varkind == LOCALVAR) &&
631                             (iptr->dst->varnum == iptr->op1)) {
632                                 break;
633                         }
634                         var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
635                         if (var->flags & INMEMORY) {
636                                 x86_64_movl_membase_reg(cd, REG_SP, var->regoff * 8, d);
637                                 store_reg_to_var_int(iptr->dst, d);
638
639                         } else {
640                                 if (iptr->dst->flags & INMEMORY) {
641                                         x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
642
643                                 } else {
644                                         M_INTMOVE(var->regoff, d);
645                                 }
646                         }
647                         break;
648
649                 case ICMD_LLOAD:      /* ...  ==> ..., content of local variable      */
650                 case ICMD_ALOAD:      /* op1 = local variable                         */
651
652                         d = reg_of_var(rd, iptr->dst, REG_ITMP1);
653                         if ((iptr->dst->varkind == LOCALVAR) &&
654                             (iptr->dst->varnum == iptr->op1)) {
655                                 break;
656                         }
657                         var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
658                         if (var->flags & INMEMORY) {
659                                 x86_64_mov_membase_reg(cd, REG_SP, var->regoff * 8, d);
660                                 store_reg_to_var_int(iptr->dst, d);
661
662                         } else {
663                                 if (iptr->dst->flags & INMEMORY) {
664                                         x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
665
666                                 } else {
667                                         M_INTMOVE(var->regoff, d);
668                                 }
669                         }
670                         break;
671
672                 case ICMD_FLOAD:      /* ...  ==> ..., content of local variable      */
673                 case ICMD_DLOAD:      /* op1 = local variable                         */
674
675                         d = reg_of_var(rd, iptr->dst, REG_FTMP1);
676                         if ((iptr->dst->varkind == LOCALVAR) &&
677                             (iptr->dst->varnum == iptr->op1)) {
678                                 break;
679                         }
680                         var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
681                         if (var->flags & INMEMORY) {
682                                 x86_64_movq_membase_reg(cd, REG_SP, var->regoff * 8, d);
683                                 store_reg_to_var_flt(iptr->dst, d);
684
685                         } else {
686                                 if (iptr->dst->flags & INMEMORY) {
687                                         x86_64_movq_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
688
689                                 } else {
690                                         M_FLTMOVE(var->regoff, d);
691                                 }
692                         }
693                         break;
694
695                 case ICMD_ISTORE:     /* ..., value  ==> ...                          */
696                 case ICMD_LSTORE:     /* op1 = local variable                         */
697                 case ICMD_ASTORE:
698
699                         if ((src->varkind == LOCALVAR) &&
700                             (src->varnum == iptr->op1)) {
701                                 break;
702                         }
703                         var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
704                         if (var->flags & INMEMORY) {
705                                 var_to_reg_int(s1, src, REG_ITMP1);
706                                 x86_64_mov_reg_membase(cd, s1, REG_SP, var->regoff * 8);
707
708                         } else {
709                                 var_to_reg_int(s1, src, var->regoff);
710                                 M_INTMOVE(s1, var->regoff);
711                         }
712                         break;
713
714                 case ICMD_FSTORE:     /* ..., value  ==> ...                          */
715                 case ICMD_DSTORE:     /* op1 = local variable                         */
716
717                         if ((src->varkind == LOCALVAR) &&
718                             (src->varnum == iptr->op1)) {
719                                 break;
720                         }
721                         var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
722                         if (var->flags & INMEMORY) {
723                                 var_to_reg_flt(s1, src, REG_FTMP1);
724                                 x86_64_movq_reg_membase(cd, s1, REG_SP, var->regoff * 8);
725
726                         } else {
727                                 var_to_reg_flt(s1, src, var->regoff);
728                                 M_FLTMOVE(s1, var->regoff);
729                         }
730                         break;
731
732
733                 /* pop/dup/swap operations ********************************************/
734
735                 /* attention: double and longs are only one entry in CACAO ICMDs      */
736
737                 case ICMD_POP:        /* ..., value  ==> ...                          */
738                 case ICMD_POP2:       /* ..., value, value  ==> ...                   */
739                         break;
740
741                 case ICMD_DUP:        /* ..., a ==> ..., a, a                         */
742                         M_COPY(src, iptr->dst);
743                         break;
744
745                 case ICMD_DUP_X1:     /* ..., a, b ==> ..., b, a, b                   */
746
747                         M_COPY(src,       iptr->dst);
748                         M_COPY(src->prev, iptr->dst->prev);
749                         M_COPY(iptr->dst, iptr->dst->prev->prev);
750                         break;
751
752                 case ICMD_DUP_X2:     /* ..., a, b, c ==> ..., c, a, b, c             */
753
754                         M_COPY(src,             iptr->dst);
755                         M_COPY(src->prev,       iptr->dst->prev);
756                         M_COPY(src->prev->prev, iptr->dst->prev->prev);
757                         M_COPY(iptr->dst,       iptr->dst->prev->prev->prev);
758                         break;
759
760                 case ICMD_DUP2:       /* ..., a, b ==> ..., a, b, a, b                */
761
762                         M_COPY(src,       iptr->dst);
763                         M_COPY(src->prev, iptr->dst->prev);
764                         break;
765
766                 case ICMD_DUP2_X1:    /* ..., a, b, c ==> ..., b, c, a, b, c          */
767
768                         M_COPY(src,             iptr->dst);
769                         M_COPY(src->prev,       iptr->dst->prev);
770                         M_COPY(src->prev->prev, iptr->dst->prev->prev);
771                         M_COPY(iptr->dst,       iptr->dst->prev->prev->prev);
772                         M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev);
773                         break;
774
775                 case ICMD_DUP2_X2:    /* ..., a, b, c, d ==> ..., c, d, a, b, c, d    */
776
777                         M_COPY(src,                   iptr->dst);
778                         M_COPY(src->prev,             iptr->dst->prev);
779                         M_COPY(src->prev->prev,       iptr->dst->prev->prev);
780                         M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
781                         M_COPY(iptr->dst,             iptr->dst->prev->prev->prev->prev);
782                         M_COPY(iptr->dst->prev,       iptr->dst->prev->prev->prev->prev->prev);
783                         break;
784
785                 case ICMD_SWAP:       /* ..., a, b ==> ..., b, a                      */
786
787                         M_COPY(src,       iptr->dst->prev);
788                         M_COPY(src->prev, iptr->dst);
789                         break;
790
791
792                 /* integer operations *************************************************/
793
794                 case ICMD_INEG:       /* ..., value  ==> ..., - value                 */
795
796                         d = reg_of_var(rd, iptr->dst, REG_NULL);
797                         if (iptr->dst->flags & INMEMORY) {
798                                 if (src->flags & INMEMORY) {
799                                         if (src->regoff == iptr->dst->regoff) {
800                                                 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
801
802                                         } else {
803                                                 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
804                                                 x86_64_negl_reg(cd, REG_ITMP1);
805                                                 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
806                                         }
807
808                                 } else {
809                                         x86_64_movl_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
810                                         x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
811                                 }
812
813                         } else {
814                                 if (src->flags & INMEMORY) {
815                                         x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
816                                         x86_64_negl_reg(cd, d);
817
818                                 } else {
819                                         M_INTMOVE(src->regoff, iptr->dst->regoff);
820                                         x86_64_negl_reg(cd, iptr->dst->regoff);
821                                 }
822                         }
823                         break;
824
825                 case ICMD_LNEG:       /* ..., value  ==> ..., - value                 */
826
827                         d = reg_of_var(rd, iptr->dst, REG_NULL);
828                         if (iptr->dst->flags & INMEMORY) {
829                                 if (src->flags & INMEMORY) {
830                                         if (src->regoff == iptr->dst->regoff) {
831                                                 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
832
833                                         } else {
834                                                 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
835                                                 x86_64_neg_reg(cd, REG_ITMP1);
836                                                 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
837                                         }
838
839                                 } else {
840                                         x86_64_mov_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
841                                         x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
842                                 }
843
844                         } else {
845                                 if (src->flags & INMEMORY) {
846                                         x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
847                                         x86_64_neg_reg(cd, iptr->dst->regoff);
848
849                                 } else {
850                                         M_INTMOVE(src->regoff, iptr->dst->regoff);
851                                         x86_64_neg_reg(cd, iptr->dst->regoff);
852                                 }
853                         }
854                         break;
855
856                 case ICMD_I2L:        /* ..., value  ==> ..., value                   */
857
858                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
859                         if (src->flags & INMEMORY) {
860                                 x86_64_movslq_membase_reg(cd, REG_SP, src->regoff * 8, d);
861
862                         } else {
863                                 x86_64_movslq_reg_reg(cd, src->regoff, d);
864                         }
865                         store_reg_to_var_int(iptr->dst, d);
866                         break;
867
868                 case ICMD_L2I:        /* ..., value  ==> ..., value                   */
869
870                         var_to_reg_int(s1, src, REG_ITMP1);
871                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
872                         M_INTMOVE(s1, d);
873                         store_reg_to_var_int(iptr->dst, d);
874                         break;
875
876                 case ICMD_INT2BYTE:   /* ..., value  ==> ..., value                   */
877
878                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
879                         if (src->flags & INMEMORY) {
880                                 x86_64_movsbq_membase_reg(cd, REG_SP, src->regoff * 8, d);
881
882                         } else {
883                                 x86_64_movsbq_reg_reg(cd, src->regoff, d);
884                         }
885                         store_reg_to_var_int(iptr->dst, d);
886                         break;
887
888                 case ICMD_INT2CHAR:   /* ..., value  ==> ..., value                   */
889
890                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
891                         if (src->flags & INMEMORY) {
892                                 x86_64_movzwq_membase_reg(cd, REG_SP, src->regoff * 8, d);
893
894                         } else {
895                                 x86_64_movzwq_reg_reg(cd, src->regoff, d);
896                         }
897                         store_reg_to_var_int(iptr->dst, d);
898                         break;
899
900                 case ICMD_INT2SHORT:  /* ..., value  ==> ..., value                   */
901
902                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
903                         if (src->flags & INMEMORY) {
904                                 x86_64_movswq_membase_reg(cd, REG_SP, src->regoff * 8, d);
905
906                         } else {
907                                 x86_64_movswq_reg_reg(cd, src->regoff, d);
908                         }
909                         store_reg_to_var_int(iptr->dst, d);
910                         break;
911
912
913                 case ICMD_IADD:       /* ..., val1, val2  ==> ..., val1 + val2        */
914
915                         d = reg_of_var(rd, iptr->dst, REG_NULL);
916                         x86_64_emit_ialu(cd, X86_64_ADD, src, iptr);
917                         break;
918
919                 case ICMD_IADDCONST:  /* ..., value  ==> ..., value + constant        */
920                                       /* val.i = constant                             */
921
922                         d = reg_of_var(rd, iptr->dst, REG_NULL);
923                         x86_64_emit_ialuconst(cd, X86_64_ADD, src, iptr);
924                         break;
925
926                 case ICMD_LADD:       /* ..., val1, val2  ==> ..., val1 + val2        */
927
928                         d = reg_of_var(rd, iptr->dst, REG_NULL);
929                         x86_64_emit_lalu(cd, X86_64_ADD, src, iptr);
930                         break;
931
932                 case ICMD_LADDCONST:  /* ..., value  ==> ..., value + constant        */
933                                       /* val.l = constant                             */
934
935                         d = reg_of_var(rd, iptr->dst, REG_NULL);
936                         x86_64_emit_laluconst(cd, X86_64_ADD, src, iptr);
937                         break;
938
939                 case ICMD_ISUB:       /* ..., val1, val2  ==> ..., val1 - val2        */
940
941                         d = reg_of_var(rd, iptr->dst, REG_NULL);
942                         if (iptr->dst->flags & INMEMORY) {
943                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
944                                         if (src->prev->regoff == iptr->dst->regoff) {
945                                                 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
946                                                 x86_64_alul_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
947
948                                         } else {
949                                                 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
950                                                 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
951                                                 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
952                                         }
953
954                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
955                                         M_INTMOVE(src->prev->regoff, REG_ITMP1);
956                                         x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
957                                         x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
958
959                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
960                                         if (src->prev->regoff == iptr->dst->regoff) {
961                                                 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
962
963                                         } else {
964                                                 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
965                                                 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
966                                                 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
967                                         }
968
969                                 } else {
970                                         x86_64_movl_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
971                                         x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
972                                 }
973
974                         } else {
975                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
976                                         x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
977                                         x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
978
979                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
980                                         M_INTMOVE(src->prev->regoff, d);
981                                         x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
982
983                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
984                                         /* workaround for reg alloc */
985                                         if (src->regoff == iptr->dst->regoff) {
986                                                 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
987                                                 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
988                                                 M_INTMOVE(REG_ITMP1, d);
989
990                                         } else {
991                                                 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
992                                                 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
993                                         }
994
995                                 } else {
996                                         /* workaround for reg alloc */
997                                         if (src->regoff == iptr->dst->regoff) {
998                                                 M_INTMOVE(src->prev->regoff, REG_ITMP1);
999                                                 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1000                                                 M_INTMOVE(REG_ITMP1, d);
1001
1002                                         } else {
1003                                                 M_INTMOVE(src->prev->regoff, d);
1004                                                 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
1005                                         }
1006                                 }
1007                         }
1008                         break;
1009
1010                 case ICMD_ISUBCONST:  /* ..., value  ==> ..., value + constant        */
1011                                       /* val.i = constant                             */
1012
1013                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1014                         x86_64_emit_ialuconst(cd, X86_64_SUB, src, iptr);
1015                         break;
1016
1017                 case ICMD_LSUB:       /* ..., val1, val2  ==> ..., val1 - val2        */
1018
1019                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1020                         if (iptr->dst->flags & INMEMORY) {
1021                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1022                                         if (src->prev->regoff == iptr->dst->regoff) {
1023                                                 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1024                                                 x86_64_alu_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1025
1026                                         } else {
1027                                                 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1028                                                 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1029                                                 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1030                                         }
1031
1032                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1033                                         M_INTMOVE(src->prev->regoff, REG_ITMP1);
1034                                         x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1035                                         x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1036
1037                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1038                                         if (src->prev->regoff == iptr->dst->regoff) {
1039                                                 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1040
1041                                         } else {
1042                                                 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1043                                                 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1044                                                 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1045                                         }
1046
1047                                 } else {
1048                                         x86_64_mov_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1049                                         x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1050                                 }
1051
1052                         } else {
1053                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1054                                         x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1055                                         x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1056
1057                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1058                                         M_INTMOVE(src->prev->regoff, d);
1059                                         x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1060
1061                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1062                                         /* workaround for reg alloc */
1063                                         if (src->regoff == iptr->dst->regoff) {
1064                                                 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1065                                                 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1066                                                 M_INTMOVE(REG_ITMP1, d);
1067
1068                                         } else {
1069                                                 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1070                                                 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1071                                         }
1072
1073                                 } else {
1074                                         /* workaround for reg alloc */
1075                                         if (src->regoff == iptr->dst->regoff) {
1076                                                 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1077                                                 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1078                                                 M_INTMOVE(REG_ITMP1, d);
1079
1080                                         } else {
1081                                                 M_INTMOVE(src->prev->regoff, d);
1082                                                 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1083                                         }
1084                                 }
1085                         }
1086                         break;
1087
1088                 case ICMD_LSUBCONST:  /* ..., value  ==> ..., value - constant        */
1089                                       /* val.l = constant                             */
1090
1091                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1092                         x86_64_emit_laluconst(cd, X86_64_SUB, src, iptr);
1093                         break;
1094
1095                 case ICMD_IMUL:       /* ..., val1, val2  ==> ..., val1 * val2        */
1096
1097                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1098                         if (iptr->dst->flags & INMEMORY) {
1099                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1100                                         x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1101                                         x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1102                                         x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1103
1104                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1105                                         x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1106                                         x86_64_imull_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1107                                         x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1108
1109                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1110                                         x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1111                                         x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1112                                         x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1113
1114                                 } else {
1115                                         M_INTMOVE(src->prev->regoff, REG_ITMP1);
1116                                         x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1117                                         x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1118                                 }
1119
1120                         } else {
1121                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1122                                         x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1123                                         x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1124
1125                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1126                                         M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1127                                         x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1128
1129                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1130                                         M_INTMOVE(src->regoff, iptr->dst->regoff);
1131                                         x86_64_imull_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1132
1133                                 } else {
1134                                         if (src->regoff == iptr->dst->regoff) {
1135                                                 x86_64_imull_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1136
1137                                         } else {
1138                                                 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1139                                                 x86_64_imull_reg_reg(cd, src->regoff, iptr->dst->regoff);
1140                                         }
1141                                 }
1142                         }
1143                         break;
1144
1145                 case ICMD_IMULCONST:  /* ..., value  ==> ..., value * constant        */
1146                                       /* val.i = constant                             */
1147
1148                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1149                         if (iptr->dst->flags & INMEMORY) {
1150                                 if (src->flags & INMEMORY) {
1151                                         x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1152                                         x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1153
1154                                 } else {
1155                                         x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, REG_ITMP1);
1156                                         x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1157                                 }
1158
1159                         } else {
1160                                 if (src->flags & INMEMORY) {
1161                                         x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1162
1163                                 } else {
1164                                         if (iptr->val.i == 2) {
1165                                                 M_INTMOVE(src->regoff, iptr->dst->regoff);
1166                                                 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1167
1168                                         } else {
1169                                                 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, iptr->dst->regoff);    /* 3 cycles */
1170                                         }
1171                                 }
1172                         }
1173                         break;
1174
1175                 case ICMD_LMUL:       /* ..., val1, val2  ==> ..., val1 * val2        */
1176
1177                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1178                         if (iptr->dst->flags & INMEMORY) {
1179                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1180                                         x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1181                                         x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1182                                         x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1183
1184                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1185                                         x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1186                                         x86_64_imul_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1187                                         x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1188
1189                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1190                                         x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1191                                         x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1192                                         x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1193
1194                                 } else {
1195                                         x86_64_mov_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1196                                         x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1197                                         x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1198                                 }
1199
1200                         } else {
1201                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1202                                         x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1203                                         x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1204
1205                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1206                                         M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1207                                         x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1208
1209                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1210                                         M_INTMOVE(src->regoff, iptr->dst->regoff);
1211                                         x86_64_imul_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1212
1213                                 } else {
1214                                         if (src->regoff == iptr->dst->regoff) {
1215                                                 x86_64_imul_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1216
1217                                         } else {
1218                                                 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1219                                                 x86_64_imul_reg_reg(cd, src->regoff, iptr->dst->regoff);
1220                                         }
1221                                 }
1222                         }
1223                         break;
1224
1225                 case ICMD_LMULCONST:  /* ..., value  ==> ..., value * constant        */
1226                                       /* val.l = constant                             */
1227
1228                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1229                         if (iptr->dst->flags & INMEMORY) {
1230                                 if (src->flags & INMEMORY) {
1231                                         if (IS_IMM32(iptr->val.l)) {
1232                                                 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1233
1234                                         } else {
1235                                                 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1236                                                 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1237                                         }
1238                                         x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1239                                         
1240                                 } else {
1241                                         if (IS_IMM32(iptr->val.l)) {
1242                                                 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, REG_ITMP1);
1243
1244                                         } else {
1245                                                 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1246                                                 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1247                                         }
1248                                         x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1249                                 }
1250
1251                         } else {
1252                                 if (src->flags & INMEMORY) {
1253                                         if (IS_IMM32(iptr->val.l)) {
1254                                                 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1255
1256                                         } else {
1257                                                 x86_64_mov_imm_reg(cd, iptr->val.l, iptr->dst->regoff);
1258                                                 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1259                                         }
1260
1261                                 } else {
1262                                         /* should match in many cases */
1263                                         if (iptr->val.l == 2) {
1264                                                 M_INTMOVE(src->regoff, iptr->dst->regoff);
1265                                                 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1266
1267                                         } else {
1268                                                 if (IS_IMM32(iptr->val.l)) {
1269                                                         x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, iptr->dst->regoff);    /* 4 cycles */
1270
1271                                                 } else {
1272                                                         x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1273                                                         M_INTMOVE(src->regoff, iptr->dst->regoff);
1274                                                         x86_64_imul_reg_reg(cd, REG_ITMP1, iptr->dst->regoff);
1275                                                 }
1276                                         }
1277                                 }
1278                         }
1279                         break;
1280
1281                 case ICMD_IDIV:       /* ..., val1, val2  ==> ..., val1 / val2        */
1282
1283                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1284                 if (src->prev->flags & INMEMORY) {
1285                                 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1286
1287                         } else {
1288                                 M_INTMOVE(src->prev->regoff, RAX);
1289                         }
1290                         
1291                         if (src->flags & INMEMORY) {
1292                                 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1293
1294                         } else {
1295                                 M_INTMOVE(src->regoff, REG_ITMP3);
1296                         }
1297                         gen_div_check(src);
1298
1299                         x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX);    /* check as described in jvm spec */
1300                         x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1301                         x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3);      /* 4 bytes */
1302                         x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3);                  /* 6 bytes */
1303
1304                         x86_64_mov_reg_reg(cd, RDX, REG_ITMP2);    /* save %rdx, cause it's an argument register */
1305                         x86_64_cltd(cd);
1306                         x86_64_idivl_reg(cd, REG_ITMP3);
1307
1308                         if (iptr->dst->flags & INMEMORY) {
1309                                 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1310                                 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX);    /* restore %rdx */
1311
1312                         } else {
1313                                 M_INTMOVE(RAX, iptr->dst->regoff);
1314
1315                                 if (iptr->dst->regoff != RDX) {
1316                                         x86_64_mov_reg_reg(cd, REG_ITMP2, RDX);    /* restore %rdx */
1317                                 }
1318                         }
1319                         break;
1320
1321                 case ICMD_IREM:       /* ..., val1, val2  ==> ..., val1 % val2        */
1322
1323                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1324                         if (src->prev->flags & INMEMORY) {
1325                                 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1326
1327                         } else {
1328                                 M_INTMOVE(src->prev->regoff, RAX);
1329                         }
1330                         
1331                         if (src->flags & INMEMORY) {
1332                                 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1333
1334                         } else {
1335                                 M_INTMOVE(src->regoff, REG_ITMP3);
1336                         }
1337                         gen_div_check(src);
1338
1339                         x86_64_mov_reg_reg(cd, RDX, REG_ITMP2);    /* save %rdx, cause it's an argument register */
1340
1341                         x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX);    /* check as described in jvm spec */
1342                         x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1343
1344
1345                         x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX);           /* 2 bytes */
1346                         x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3);      /* 4 bytes */
1347                         x86_64_jcc(cd, X86_64_CC_E, 1 + 3);                      /* 6 bytes */
1348
1349                         x86_64_cltd(cd);
1350                         x86_64_idivl_reg(cd, REG_ITMP3);
1351
1352                         if (iptr->dst->flags & INMEMORY) {
1353                                 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1354                                 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX);    /* restore %rdx */
1355
1356                         } else {
1357                                 M_INTMOVE(RDX, iptr->dst->regoff);
1358
1359                                 if (iptr->dst->regoff != RDX) {
1360                                         x86_64_mov_reg_reg(cd, REG_ITMP2, RDX);    /* restore %rdx */
1361                                 }
1362                         }
1363                         break;
1364
1365                 case ICMD_IDIVPOW2:   /* ..., value  ==> ..., value >> constant       */
1366                                       /* val.i = constant                             */
1367
1368                         var_to_reg_int(s1, src, REG_ITMP1);
1369                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1370                         M_INTMOVE(s1, REG_ITMP1);
1371                         x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1372                         x86_64_leal_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1373                         x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1374                         x86_64_shiftl_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1375                         x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1376                         store_reg_to_var_int(iptr->dst, d);
1377                         break;
1378
1379                 case ICMD_IREMPOW2:   /* ..., value  ==> ..., value % constant        */
1380                                       /* val.i = constant                             */
1381
1382                         var_to_reg_int(s1, src, REG_ITMP1);
1383                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1384                         M_INTMOVE(s1, REG_ITMP1);
1385                         x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1386                         x86_64_leal_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1387                         x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1388                         x86_64_alul_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1389                         x86_64_alul_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1390                         x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1391                         store_reg_to_var_int(iptr->dst, d);
1392                         break;
1393
1394
1395                 case ICMD_LDIV:       /* ..., val1, val2  ==> ..., val1 / val2        */
1396
1397                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1398                 if (src->prev->flags & INMEMORY) {
1399                                 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1400
1401                         } else {
1402                                 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1403                         }
1404                         
1405                         if (src->flags & INMEMORY) {
1406                                 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1407
1408                         } else {
1409                                 M_INTMOVE(src->regoff, REG_ITMP3);
1410                         }
1411                         gen_div_check(src);
1412
1413                         x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2);    /* check as described in jvm spec */
1414                         x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1415                         x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1416                         x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3);          /* 4 bytes */
1417                         x86_64_jcc(cd, X86_64_CC_E, 3 + 2 + 3);                     /* 6 bytes */
1418
1419                         x86_64_mov_reg_reg(cd, RDX, REG_ITMP2);    /* save %rdx, cause it's an argument register */
1420                         x86_64_cqto(cd);
1421                         x86_64_idiv_reg(cd, REG_ITMP3);
1422
1423                         if (iptr->dst->flags & INMEMORY) {
1424                                 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1425                                 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX);    /* restore %rdx */
1426
1427                         } else {
1428                                 M_INTMOVE(RAX, iptr->dst->regoff);
1429
1430                                 if (iptr->dst->regoff != RDX) {
1431                                         x86_64_mov_reg_reg(cd, REG_ITMP2, RDX);    /* restore %rdx */
1432                                 }
1433                         }
1434                         break;
1435
1436                 case ICMD_LREM:       /* ..., val1, val2  ==> ..., val1 % val2        */
1437
1438                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1439                         if (src->prev->flags & INMEMORY) {
1440                                 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1441
1442                         } else {
1443                                 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1444                         }
1445                         
1446                         if (src->flags & INMEMORY) {
1447                                 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1448
1449                         } else {
1450                                 M_INTMOVE(src->regoff, REG_ITMP3);
1451                         }
1452                         gen_div_check(src);
1453
1454                         x86_64_mov_reg_reg(cd, RDX, REG_ITMP2);    /* save %rdx, cause it's an argument register */
1455
1456                         x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2);    /* check as described in jvm spec */
1457                         x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1458                         x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1459
1460
1461                         x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX);              /* 2 bytes */
1462                         x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3);          /* 4 bytes */
1463                         x86_64_jcc(cd, X86_64_CC_E, 2 + 3);                         /* 6 bytes */
1464
1465                         x86_64_cqto(cd);
1466                         x86_64_idiv_reg(cd, REG_ITMP3);
1467
1468                         if (iptr->dst->flags & INMEMORY) {
1469                                 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1470                                 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX);    /* restore %rdx */
1471
1472                         } else {
1473                                 M_INTMOVE(RDX, iptr->dst->regoff);
1474
1475                                 if (iptr->dst->regoff != RDX) {
1476                                         x86_64_mov_reg_reg(cd, REG_ITMP2, RDX);    /* restore %rdx */
1477                                 }
1478                         }
1479                         break;
1480
1481                 case ICMD_LDIVPOW2:   /* ..., value  ==> ..., value >> constant       */
1482                                       /* val.i = constant                             */
1483
1484                         var_to_reg_int(s1, src, REG_ITMP1);
1485                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1486                         M_INTMOVE(s1, REG_ITMP1);
1487                         x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1488                         x86_64_lea_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1489                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1490                         x86_64_shift_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1491                         x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1492                         store_reg_to_var_int(iptr->dst, d);
1493                         break;
1494
1495                 case ICMD_LREMPOW2:   /* ..., value  ==> ..., value % constant        */
1496                                       /* val.l = constant                             */
1497
1498                         var_to_reg_int(s1, src, REG_ITMP1);
1499                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1500                         M_INTMOVE(s1, REG_ITMP1);
1501                         x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1502                         x86_64_lea_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1503                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1504                         x86_64_alu_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1505                         x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1506                         x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1507                         store_reg_to_var_int(iptr->dst, d);
1508                         break;
1509
1510                 case ICMD_ISHL:       /* ..., val1, val2  ==> ..., val1 << val2       */
1511
1512                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1513                         x86_64_emit_ishift(cd, X86_64_SHL, src, iptr);
1514                         break;
1515
1516                 case ICMD_ISHLCONST:  /* ..., value  ==> ..., value << constant       */
1517                                       /* val.i = constant                             */
1518
1519                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1520                         x86_64_emit_ishiftconst(cd, X86_64_SHL, src, iptr);
1521                         break;
1522
1523                 case ICMD_ISHR:       /* ..., val1, val2  ==> ..., val1 >> val2       */
1524
1525                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1526                         x86_64_emit_ishift(cd, X86_64_SAR, src, iptr);
1527                         break;
1528
1529                 case ICMD_ISHRCONST:  /* ..., value  ==> ..., value >> constant       */
1530                                       /* val.i = constant                             */
1531
1532                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1533                         x86_64_emit_ishiftconst(cd, X86_64_SAR, src, iptr);
1534                         break;
1535
1536                 case ICMD_IUSHR:      /* ..., val1, val2  ==> ..., val1 >>> val2      */
1537
1538                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1539                         x86_64_emit_ishift(cd, X86_64_SHR, src, iptr);
1540                         break;
1541
1542                 case ICMD_IUSHRCONST: /* ..., value  ==> ..., value >>> constant      */
1543                                       /* val.i = constant                             */
1544
1545                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1546                         x86_64_emit_ishiftconst(cd, X86_64_SHR, src, iptr);
1547                         break;
1548
1549                 case ICMD_LSHL:       /* ..., val1, val2  ==> ..., val1 << val2       */
1550
1551                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1552                         x86_64_emit_lshift(cd, X86_64_SHL, src, iptr);
1553                         break;
1554
1555         case ICMD_LSHLCONST:  /* ..., value  ==> ..., value << constant       */
1556                                           /* val.i = constant                             */
1557
1558                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1559                         x86_64_emit_lshiftconst(cd, X86_64_SHL, src, iptr);
1560                         break;
1561
1562                 case ICMD_LSHR:       /* ..., val1, val2  ==> ..., val1 >> val2       */
1563
1564                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1565                         x86_64_emit_lshift(cd, X86_64_SAR, src, iptr);
1566                         break;
1567
1568                 case ICMD_LSHRCONST:  /* ..., value  ==> ..., value >> constant       */
1569                                       /* val.i = constant                             */
1570
1571                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1572                         x86_64_emit_lshiftconst(cd, X86_64_SAR, src, iptr);
1573                         break;
1574
1575                 case ICMD_LUSHR:      /* ..., val1, val2  ==> ..., val1 >>> val2      */
1576
1577                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1578                         x86_64_emit_lshift(cd, X86_64_SHR, src, iptr);
1579                         break;
1580
1581                 case ICMD_LUSHRCONST: /* ..., value  ==> ..., value >>> constant      */
1582                                       /* val.l = constant                             */
1583
1584                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1585                         x86_64_emit_lshiftconst(cd, X86_64_SHR, src, iptr);
1586                         break;
1587
1588                 case ICMD_IAND:       /* ..., val1, val2  ==> ..., val1 & val2        */
1589
1590                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1591                         x86_64_emit_ialu(cd, X86_64_AND, src, iptr);
1592                         break;
1593
1594                 case ICMD_IANDCONST:  /* ..., value  ==> ..., value & constant        */
1595                                       /* val.i = constant                             */
1596
1597                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1598                         x86_64_emit_ialuconst(cd, X86_64_AND, src, iptr);
1599                         break;
1600
1601                 case ICMD_LAND:       /* ..., val1, val2  ==> ..., val1 & val2        */
1602
1603                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1604                         x86_64_emit_lalu(cd, X86_64_AND, src, iptr);
1605                         break;
1606
1607                 case ICMD_LANDCONST:  /* ..., value  ==> ..., value & constant        */
1608                                       /* val.l = constant                             */
1609
1610                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1611                         x86_64_emit_laluconst(cd, X86_64_AND, src, iptr);
1612                         break;
1613
1614                 case ICMD_IOR:        /* ..., val1, val2  ==> ..., val1 | val2        */
1615
1616                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1617                         x86_64_emit_ialu(cd, X86_64_OR, src, iptr);
1618                         break;
1619
1620                 case ICMD_IORCONST:   /* ..., value  ==> ..., value | constant        */
1621                                       /* val.i = constant                             */
1622
1623                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1624                         x86_64_emit_ialuconst(cd, X86_64_OR, src, iptr);
1625                         break;
1626
1627                 case ICMD_LOR:        /* ..., val1, val2  ==> ..., val1 | val2        */
1628
1629                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1630                         x86_64_emit_lalu(cd, X86_64_OR, src, iptr);
1631                         break;
1632
1633                 case ICMD_LORCONST:   /* ..., value  ==> ..., value | constant        */
1634                                       /* val.l = constant                             */
1635
1636                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1637                         x86_64_emit_laluconst(cd, X86_64_OR, src, iptr);
1638                         break;
1639
1640                 case ICMD_IXOR:       /* ..., val1, val2  ==> ..., val1 ^ val2        */
1641
1642                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1643                         x86_64_emit_ialu(cd, X86_64_XOR, src, iptr);
1644                         break;
1645
1646                 case ICMD_IXORCONST:  /* ..., value  ==> ..., value ^ constant        */
1647                                       /* val.i = constant                             */
1648
1649                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1650                         x86_64_emit_ialuconst(cd, X86_64_XOR, src, iptr);
1651                         break;
1652
1653                 case ICMD_LXOR:       /* ..., val1, val2  ==> ..., val1 ^ val2        */
1654
1655                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1656                         x86_64_emit_lalu(cd, X86_64_XOR, src, iptr);
1657                         break;
1658
1659                 case ICMD_LXORCONST:  /* ..., value  ==> ..., value ^ constant        */
1660                                       /* val.l = constant                             */
1661
1662                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1663                         x86_64_emit_laluconst(cd, X86_64_XOR, src, iptr);
1664                         break;
1665
1666
1667                 case ICMD_IINC:       /* ..., value  ==> ..., value + constant        */
1668                                       /* op1 = variable, val.i = constant             */
1669
1670                         /* using inc and dec is definitely faster than add -- tested      */
1671                         /* with sieve                                                     */
1672
1673                         var = &(rd->locals[iptr->op1][TYPE_INT]);
1674                         d = var->regoff;
1675                         if (var->flags & INMEMORY) {
1676                                 if (iptr->val.i == 1) {
1677                                         x86_64_incl_membase(cd, REG_SP, d * 8);
1678  
1679                                 } else if (iptr->val.i == -1) {
1680                                         x86_64_decl_membase(cd, REG_SP, d * 8);
1681
1682                                 } else {
1683                                         x86_64_alul_imm_membase(cd, X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1684                                 }
1685
1686                         } else {
1687                                 if (iptr->val.i == 1) {
1688                                         x86_64_incl_reg(cd, d);
1689  
1690                                 } else if (iptr->val.i == -1) {
1691                                         x86_64_decl_reg(cd, d);
1692
1693                                 } else {
1694                                         x86_64_alul_imm_reg(cd, X86_64_ADD, iptr->val.i, d);
1695                                 }
1696                         }
1697                         break;
1698
1699
1700                 /* floating operations ************************************************/
1701
1702                 case ICMD_FNEG:       /* ..., value  ==> ..., - value                 */
1703
1704                         var_to_reg_flt(s1, src, REG_FTMP1);
1705                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1706                         a = dseg_adds4(cd, 0x80000000);
1707                         M_FLTMOVE(s1, d);
1708                         x86_64_movss_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1709                         x86_64_xorps_reg_reg(cd, REG_FTMP2, d);
1710                         store_reg_to_var_flt(iptr->dst, d);
1711                         break;
1712
1713                 case ICMD_DNEG:       /* ..., value  ==> ..., - value                 */
1714
1715                         var_to_reg_flt(s1, src, REG_FTMP1);
1716                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1717                         a = dseg_adds8(cd, 0x8000000000000000);
1718                         M_FLTMOVE(s1, d);
1719                         x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1720                         x86_64_xorpd_reg_reg(cd, REG_FTMP2, d);
1721                         store_reg_to_var_flt(iptr->dst, d);
1722                         break;
1723
1724                 case ICMD_FADD:       /* ..., val1, val2  ==> ..., val1 + val2        */
1725
1726                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1727                         var_to_reg_flt(s2, src, REG_FTMP2);
1728                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1729                         if (s1 == d) {
1730                                 x86_64_addss_reg_reg(cd, s2, d);
1731                         } else if (s2 == d) {
1732                                 x86_64_addss_reg_reg(cd, s1, d);
1733                         } else {
1734                                 M_FLTMOVE(s1, d);
1735                                 x86_64_addss_reg_reg(cd, s2, d);
1736                         }
1737                         store_reg_to_var_flt(iptr->dst, d);
1738                         break;
1739
1740                 case ICMD_DADD:       /* ..., val1, val2  ==> ..., val1 + val2        */
1741
1742                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1743                         var_to_reg_flt(s2, src, REG_FTMP2);
1744                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1745                         if (s1 == d) {
1746                                 x86_64_addsd_reg_reg(cd, s2, d);
1747                         } else if (s2 == d) {
1748                                 x86_64_addsd_reg_reg(cd, s1, d);
1749                         } else {
1750                                 M_FLTMOVE(s1, d);
1751                                 x86_64_addsd_reg_reg(cd, s2, d);
1752                         }
1753                         store_reg_to_var_flt(iptr->dst, d);
1754                         break;
1755
1756                 case ICMD_FSUB:       /* ..., val1, val2  ==> ..., val1 - val2        */
1757
1758                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1759                         var_to_reg_flt(s2, src, REG_FTMP2);
1760                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1761                         if (s2 == d) {
1762                                 M_FLTMOVE(s2, REG_FTMP2);
1763                                 s2 = REG_FTMP2;
1764                         }
1765                         M_FLTMOVE(s1, d);
1766                         x86_64_subss_reg_reg(cd, s2, d);
1767                         store_reg_to_var_flt(iptr->dst, d);
1768                         break;
1769
1770                 case ICMD_DSUB:       /* ..., val1, val2  ==> ..., val1 - val2        */
1771
1772                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1773                         var_to_reg_flt(s2, src, REG_FTMP2);
1774                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1775                         if (s2 == d) {
1776                                 M_FLTMOVE(s2, REG_FTMP2);
1777                                 s2 = REG_FTMP2;
1778                         }
1779                         M_FLTMOVE(s1, d);
1780                         x86_64_subsd_reg_reg(cd, s2, d);
1781                         store_reg_to_var_flt(iptr->dst, d);
1782                         break;
1783
1784                 case ICMD_FMUL:       /* ..., val1, val2  ==> ..., val1 * val2        */
1785
1786                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1787                         var_to_reg_flt(s2, src, REG_FTMP2);
1788                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1789                         if (s1 == d) {
1790                                 x86_64_mulss_reg_reg(cd, s2, d);
1791                         } else if (s2 == d) {
1792                                 x86_64_mulss_reg_reg(cd, s1, d);
1793                         } else {
1794                                 M_FLTMOVE(s1, d);
1795                                 x86_64_mulss_reg_reg(cd, s2, d);
1796                         }
1797                         store_reg_to_var_flt(iptr->dst, d);
1798                         break;
1799
1800                 case ICMD_DMUL:       /* ..., val1, val2  ==> ..., val1 * val2        */
1801
1802                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1803                         var_to_reg_flt(s2, src, REG_FTMP2);
1804                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1805                         if (s1 == d) {
1806                                 x86_64_mulsd_reg_reg(cd, s2, d);
1807                         } else if (s2 == d) {
1808                                 x86_64_mulsd_reg_reg(cd, s1, d);
1809                         } else {
1810                                 M_FLTMOVE(s1, d);
1811                                 x86_64_mulsd_reg_reg(cd, s2, d);
1812                         }
1813                         store_reg_to_var_flt(iptr->dst, d);
1814                         break;
1815
1816                 case ICMD_FDIV:       /* ..., val1, val2  ==> ..., val1 / val2        */
1817
1818                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1819                         var_to_reg_flt(s2, src, REG_FTMP2);
1820                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1821                         if (s2 == d) {
1822                                 M_FLTMOVE(s2, REG_FTMP2);
1823                                 s2 = REG_FTMP2;
1824                         }
1825                         M_FLTMOVE(s1, d);
1826                         x86_64_divss_reg_reg(cd, s2, d);
1827                         store_reg_to_var_flt(iptr->dst, d);
1828                         break;
1829
1830                 case ICMD_DDIV:       /* ..., val1, val2  ==> ..., val1 / val2        */
1831
1832                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1833                         var_to_reg_flt(s2, src, REG_FTMP2);
1834                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1835                         if (s2 == d) {
1836                                 M_FLTMOVE(s2, REG_FTMP2);
1837                                 s2 = REG_FTMP2;
1838                         }
1839                         M_FLTMOVE(s1, d);
1840                         x86_64_divsd_reg_reg(cd, s2, d);
1841                         store_reg_to_var_flt(iptr->dst, d);
1842                         break;
1843
1844                 case ICMD_I2F:       /* ..., value  ==> ..., (float) value            */
1845
1846                         var_to_reg_int(s1, src, REG_ITMP1);
1847                         d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1848                         x86_64_cvtsi2ss_reg_reg(cd, s1, d);
1849                         store_reg_to_var_flt(iptr->dst, d);
1850                         break;
1851
1852                 case ICMD_I2D:       /* ..., value  ==> ..., (double) value           */
1853
1854                         var_to_reg_int(s1, src, REG_ITMP1);
1855                         d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1856                         x86_64_cvtsi2sd_reg_reg(cd, s1, d);
1857                         store_reg_to_var_flt(iptr->dst, d);
1858                         break;
1859
1860                 case ICMD_L2F:       /* ..., value  ==> ..., (float) value            */
1861
1862                         var_to_reg_int(s1, src, REG_ITMP1);
1863                         d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1864                         x86_64_cvtsi2ssq_reg_reg(cd, s1, d);
1865                         store_reg_to_var_flt(iptr->dst, d);
1866                         break;
1867                         
1868                 case ICMD_L2D:       /* ..., value  ==> ..., (double) value           */
1869
1870                         var_to_reg_int(s1, src, REG_ITMP1);
1871                         d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1872                         x86_64_cvtsi2sdq_reg_reg(cd, s1, d);
1873                         store_reg_to_var_flt(iptr->dst, d);
1874                         break;
1875                         
1876                 case ICMD_F2I:       /* ..., value  ==> ..., (int) value              */
1877
1878                         var_to_reg_flt(s1, src, REG_FTMP1);
1879                         d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1880                         x86_64_cvttss2si_reg_reg(cd, s1, d);
1881                         x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d);    /* corner cases */
1882                         a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1883                         x86_64_jcc(cd, X86_64_CC_NE, a);
1884                         M_FLTMOVE(s1, REG_FTMP1);
1885                         x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2i, REG_ITMP2);
1886                         x86_64_call_reg(cd, REG_ITMP2);
1887                         M_INTMOVE(REG_RESULT, d);
1888                         store_reg_to_var_int(iptr->dst, d);
1889                         break;
1890
1891                 case ICMD_D2I:       /* ..., value  ==> ..., (int) value              */
1892
1893                         var_to_reg_flt(s1, src, REG_FTMP1);
1894                         d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1895                         x86_64_cvttsd2si_reg_reg(cd, s1, d);
1896                         x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d);    /* corner cases */
1897                         a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1898                         x86_64_jcc(cd, X86_64_CC_NE, a);
1899                         M_FLTMOVE(s1, REG_FTMP1);
1900                         x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2i, REG_ITMP2);
1901                         x86_64_call_reg(cd, REG_ITMP2);
1902                         M_INTMOVE(REG_RESULT, d);
1903                         store_reg_to_var_int(iptr->dst, d);
1904                         break;
1905
1906                 case ICMD_F2L:       /* ..., value  ==> ..., (long) value             */
1907
1908                         var_to_reg_flt(s1, src, REG_FTMP1);
1909                         d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1910                         x86_64_cvttss2siq_reg_reg(cd, s1, d);
1911                         x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1912                         x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d);     /* corner cases */
1913                         a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1914                         x86_64_jcc(cd, X86_64_CC_NE, a);
1915                         M_FLTMOVE(s1, REG_FTMP1);
1916                         x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2l, REG_ITMP2);
1917                         x86_64_call_reg(cd, REG_ITMP2);
1918                         M_INTMOVE(REG_RESULT, d);
1919                         store_reg_to_var_int(iptr->dst, d);
1920                         break;
1921
1922                 case ICMD_D2L:       /* ..., value  ==> ..., (long) value             */
1923
1924                         var_to_reg_flt(s1, src, REG_FTMP1);
1925                         d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1926                         x86_64_cvttsd2siq_reg_reg(cd, s1, d);
1927                         x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1928                         x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d);     /* corner cases */
1929                         a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1930                         x86_64_jcc(cd, X86_64_CC_NE, a);
1931                         M_FLTMOVE(s1, REG_FTMP1);
1932                         x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2l, REG_ITMP2);
1933                         x86_64_call_reg(cd, REG_ITMP2);
1934                         M_INTMOVE(REG_RESULT, d);
1935                         store_reg_to_var_int(iptr->dst, d);
1936                         break;
1937
1938                 case ICMD_F2D:       /* ..., value  ==> ..., (double) value           */
1939
1940                         var_to_reg_flt(s1, src, REG_FTMP1);
1941                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1942                         x86_64_cvtss2sd_reg_reg(cd, s1, d);
1943                         store_reg_to_var_flt(iptr->dst, d);
1944                         break;
1945
1946                 case ICMD_D2F:       /* ..., value  ==> ..., (float) value            */
1947
1948                         var_to_reg_flt(s1, src, REG_FTMP1);
1949                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1950                         x86_64_cvtsd2ss_reg_reg(cd, s1, d);
1951                         store_reg_to_var_flt(iptr->dst, d);
1952                         break;
1953
1954                 case ICMD_FCMPL:      /* ..., val1, val2  ==> ..., val1 fcmpl val2    */
1955                                           /* == => 0, < => 1, > => -1 */
1956
1957                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1958                         var_to_reg_flt(s2, src, REG_FTMP2);
1959                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1960                         x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1961                         x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1962                         x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1963                         x86_64_ucomiss_reg_reg(cd, s1, s2);
1964                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1965                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1966                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d);    /* treat unordered as GT */
1967                         store_reg_to_var_int(iptr->dst, d);
1968                         break;
1969
1970                 case ICMD_FCMPG:      /* ..., val1, val2  ==> ..., val1 fcmpg val2    */
1971                                           /* == => 0, < => 1, > => -1 */
1972
1973                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1974                         var_to_reg_flt(s2, src, REG_FTMP2);
1975                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1976                         x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1977                         x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1978                         x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1979                         x86_64_ucomiss_reg_reg(cd, s1, s2);
1980                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1981                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1982                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d);    /* treat unordered as LT */
1983                         store_reg_to_var_int(iptr->dst, d);
1984                         break;
1985
1986                 case ICMD_DCMPL:      /* ..., val1, val2  ==> ..., val1 fcmpl val2    */
1987                                           /* == => 0, < => 1, > => -1 */
1988
1989                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1990                         var_to_reg_flt(s2, src, REG_FTMP2);
1991                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1992                         x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1993                         x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1994                         x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1995                         x86_64_ucomisd_reg_reg(cd, s1, s2);
1996                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1997                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1998                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d);    /* treat unordered as GT */
1999                         store_reg_to_var_int(iptr->dst, d);
2000                         break;
2001
2002                 case ICMD_DCMPG:      /* ..., val1, val2  ==> ..., val1 fcmpg val2    */
2003                                           /* == => 0, < => 1, > => -1 */
2004
2005                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
2006                         var_to_reg_flt(s2, src, REG_FTMP2);
2007                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2008                         x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
2009                         x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
2010                         x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
2011                         x86_64_ucomisd_reg_reg(cd, s1, s2);
2012                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
2013                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
2014                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d);    /* treat unordered as LT */
2015                         store_reg_to_var_int(iptr->dst, d);
2016                         break;
2017
2018
2019                 /* memory operations **************************************************/
2020
2021                 case ICMD_ARRAYLENGTH: /* ..., arrayref  ==> ..., (int) length        */
2022
2023                         var_to_reg_int(s1, src, REG_ITMP1);
2024                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2025                         gen_nullptr_check(s1);
2026                         x86_64_movl_membase_reg(cd, s1, OFFSET(java_arrayheader, size), d);
2027                         store_reg_to_var_int(iptr->dst, d);
2028                         break;
2029
2030                 case ICMD_AALOAD:     /* ..., arrayref, index  ==> ..., value         */
2031
2032                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2033                         var_to_reg_int(s2, src, REG_ITMP2);
2034                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2035                         if (iptr->op1 == 0) {
2036                                 gen_nullptr_check(s1);
2037                                 gen_bound_check;
2038                         }
2039                         x86_64_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
2040                         store_reg_to_var_int(iptr->dst, d);
2041                         break;
2042
2043                 case ICMD_LALOAD:     /* ..., arrayref, index  ==> ..., value         */
2044
2045                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2046                         var_to_reg_int(s2, src, REG_ITMP2);
2047                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2048                         if (iptr->op1 == 0) {
2049                                 gen_nullptr_check(s1);
2050                                 gen_bound_check;
2051                         }
2052                         x86_64_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]), s1, s2, 3, d);
2053                         store_reg_to_var_int(iptr->dst, d);
2054                         break;
2055
2056                 case ICMD_IALOAD:     /* ..., arrayref, index  ==> ..., value         */
2057
2058                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2059                         var_to_reg_int(s2, src, REG_ITMP2);
2060                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2061                         if (iptr->op1 == 0) {
2062                                 gen_nullptr_check(s1);
2063                                 gen_bound_check;
2064                         }
2065                         x86_64_movl_memindex_reg(cd, OFFSET(java_intarray, data[0]), s1, s2, 2, d);
2066                         store_reg_to_var_int(iptr->dst, d);
2067                         break;
2068
2069                 case ICMD_FALOAD:     /* ..., arrayref, index  ==> ..., value         */
2070
2071                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2072                         var_to_reg_int(s2, src, REG_ITMP2);
2073                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2074                         if (iptr->op1 == 0) {
2075                                 gen_nullptr_check(s1);
2076                                 gen_bound_check;
2077                         }
2078                         x86_64_movss_memindex_reg(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2079                         store_reg_to_var_flt(iptr->dst, d);
2080                         break;
2081
2082                 case ICMD_DALOAD:     /* ..., arrayref, index  ==> ..., value         */
2083
2084                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2085                         var_to_reg_int(s2, src, REG_ITMP2);
2086                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2087                         if (iptr->op1 == 0) {
2088                                 gen_nullptr_check(s1);
2089                                 gen_bound_check;
2090                         }
2091                         x86_64_movsd_memindex_reg(cd, OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2092                         store_reg_to_var_flt(iptr->dst, d);
2093                         break;
2094
2095                 case ICMD_CALOAD:     /* ..., arrayref, index  ==> ..., value         */
2096
2097                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2098                         var_to_reg_int(s2, src, REG_ITMP2);
2099                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2100                         if (iptr->op1 == 0) {
2101                                 gen_nullptr_check(s1);
2102                                 gen_bound_check;
2103                         }
2104                         x86_64_movzwq_memindex_reg(cd, OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2105                         store_reg_to_var_int(iptr->dst, d);
2106                         break;                  
2107
2108                 case ICMD_SALOAD:     /* ..., arrayref, index  ==> ..., value         */
2109
2110                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2111                         var_to_reg_int(s2, src, REG_ITMP2);
2112                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2113                         if (iptr->op1 == 0) {
2114                                 gen_nullptr_check(s1);
2115                                 gen_bound_check;
2116                         }
2117                         x86_64_movswq_memindex_reg(cd, OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2118                         store_reg_to_var_int(iptr->dst, d);
2119                         break;
2120
2121                 case ICMD_BALOAD:     /* ..., arrayref, index  ==> ..., value         */
2122
2123                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2124                         var_to_reg_int(s2, src, REG_ITMP2);
2125                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2126                         if (iptr->op1 == 0) {
2127                                 gen_nullptr_check(s1);
2128                                 gen_bound_check;
2129                         }
2130                         x86_64_movsbq_memindex_reg(cd, OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2131                         store_reg_to_var_int(iptr->dst, d);
2132                         break;
2133
2134
2135                 case ICMD_AASTORE:    /* ..., arrayref, index, value  ==> ...         */
2136
2137                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2138                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2139                         if (iptr->op1 == 0) {
2140                                 gen_nullptr_check(s1);
2141                                 gen_bound_check;
2142                         }
2143                         var_to_reg_int(s3, src, REG_ITMP3);
2144                         x86_64_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2145                         break;
2146
2147                 case ICMD_LASTORE:    /* ..., arrayref, index, value  ==> ...         */
2148
2149                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2150                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2151                         if (iptr->op1 == 0) {
2152                                 gen_nullptr_check(s1);
2153                                 gen_bound_check;
2154                         }
2155                         var_to_reg_int(s3, src, REG_ITMP3);
2156                         x86_64_mov_reg_memindex(cd, s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2157                         break;
2158
2159                 case ICMD_IASTORE:    /* ..., arrayref, index, value  ==> ...         */
2160
2161                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2162                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2163                         if (iptr->op1 == 0) {
2164                                 gen_nullptr_check(s1);
2165                                 gen_bound_check;
2166                         }
2167                         var_to_reg_int(s3, src, REG_ITMP3);
2168                         x86_64_movl_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2169                         break;
2170
2171                 case ICMD_FASTORE:    /* ..., arrayref, index, value  ==> ...         */
2172
2173                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2174                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2175                         if (iptr->op1 == 0) {
2176                                 gen_nullptr_check(s1);
2177                                 gen_bound_check;
2178                         }
2179                         var_to_reg_flt(s3, src, REG_FTMP3);
2180                         x86_64_movss_reg_memindex(cd, s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2181                         break;
2182
2183                 case ICMD_DASTORE:    /* ..., arrayref, index, value  ==> ...         */
2184
2185                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2186                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2187                         if (iptr->op1 == 0) {
2188                                 gen_nullptr_check(s1);
2189                                 gen_bound_check;
2190                         }
2191                         var_to_reg_flt(s3, src, REG_FTMP3);
2192                         x86_64_movsd_reg_memindex(cd, s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2193                         break;
2194
2195                 case ICMD_CASTORE:    /* ..., arrayref, index, value  ==> ...         */
2196
2197                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2198                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2199                         if (iptr->op1 == 0) {
2200                                 gen_nullptr_check(s1);
2201                                 gen_bound_check;
2202                         }
2203                         var_to_reg_int(s3, src, REG_ITMP3);
2204                         x86_64_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2205                         break;
2206
2207                 case ICMD_SASTORE:    /* ..., arrayref, index, value  ==> ...         */
2208
2209                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2210                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2211                         if (iptr->op1 == 0) {
2212                                 gen_nullptr_check(s1);
2213                                 gen_bound_check;
2214                         }
2215                         var_to_reg_int(s3, src, REG_ITMP3);
2216                         x86_64_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2217                         break;
2218
2219                 case ICMD_BASTORE:    /* ..., arrayref, index, value  ==> ...         */
2220
2221                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2222                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2223                         if (iptr->op1 == 0) {
2224                                 gen_nullptr_check(s1);
2225                                 gen_bound_check;
2226                         }
2227                         var_to_reg_int(s3, src, REG_ITMP3);
2228                         x86_64_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2229                         break;
2230
2231                 case ICMD_IASTORECONST: /* ..., arrayref, index  ==> ...              */
2232
2233                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2234                         var_to_reg_int(s2, src, REG_ITMP2);
2235                         if (iptr->op1 == 0) {
2236                                 gen_nullptr_check(s1);
2237                                 gen_bound_check;
2238                         }
2239                         x86_64_movl_imm_memindex(cd, iptr->val.i, OFFSET(java_intarray, data[0]), s1, s2, 2);
2240                         break;
2241
2242                 case ICMD_LASTORECONST: /* ..., arrayref, index  ==> ...              */
2243
2244                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2245                         var_to_reg_int(s2, src, REG_ITMP2);
2246                         if (iptr->op1 == 0) {
2247                                 gen_nullptr_check(s1);
2248                                 gen_bound_check;
2249                         }
2250
2251                         if (IS_IMM32(iptr->val.l)) {
2252                                 x86_64_mov_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2253
2254                         } else {
2255                                 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2256                                 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l >> 32), OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2257                         }
2258                         break;
2259
2260                 case ICMD_AASTORECONST: /* ..., arrayref, index  ==> ...              */
2261
2262                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2263                         var_to_reg_int(s2, src, REG_ITMP2);
2264                         if (iptr->op1 == 0) {
2265                                 gen_nullptr_check(s1);
2266                                 gen_bound_check;
2267                         }
2268                         x86_64_mov_imm_memindex(cd, 0, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2269                         break;
2270
2271                 case ICMD_BASTORECONST: /* ..., arrayref, index  ==> ...              */
2272
2273                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2274                         var_to_reg_int(s2, src, REG_ITMP2);
2275                         if (iptr->op1 == 0) {
2276                                 gen_nullptr_check(s1);
2277                                 gen_bound_check;
2278                         }
2279                         x86_64_movb_imm_memindex(cd, iptr->val.i, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2280                         break;
2281
2282                 case ICMD_CASTORECONST:   /* ..., arrayref, index  ==> ...            */
2283
2284                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2285                         var_to_reg_int(s2, src, REG_ITMP2);
2286                         if (iptr->op1 == 0) {
2287                                 gen_nullptr_check(s1);
2288                                 gen_bound_check;
2289                         }
2290                         x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_chararray, data[0]), s1, s2, 1);
2291                         break;
2292
2293                 case ICMD_SASTORECONST:   /* ..., arrayref, index  ==> ...            */
2294
2295                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2296                         var_to_reg_int(s2, src, REG_ITMP2);
2297                         if (iptr->op1 == 0) {
2298                                 gen_nullptr_check(s1);
2299                                 gen_bound_check;
2300                         }
2301                         x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2302                         break;
2303
2304
2305                 case ICMD_PUTSTATIC:  /* ..., value  ==> ...                          */
2306                                       /* op1 = type, val.a = field address            */
2307
2308                         /* If the static fields' class is not yet initialized, we do it   */
2309                         /* now. The call code is generated later.                         */
2310                         if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2311                                 codegen_addclinitref(cd, cd->mcodeptr, ((fieldinfo *) iptr->val.a)->class);
2312
2313                                 /* This is just for debugging purposes. Is very difficult to  */
2314                                 /* read patched code. Here we patch the following 5 nop's     */
2315                                 /* so that the real code keeps untouched.                     */
2316                                 if (showdisassemble) {
2317                                         x86_64_nop(cd); x86_64_nop(cd); x86_64_nop(cd);
2318                                         x86_64_nop(cd); x86_64_nop(cd);
2319                                 }
2320                         }
2321
2322                         /* This approach is much faster than moving the field address     */
2323                         /* inline into a register. */
2324                         a = dseg_addaddress(cd, &(((fieldinfo *) iptr->val.a)->value));
2325                         x86_64_mov_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 7) - (s8) cd->mcodebase) + a, REG_ITMP2);
2326                         switch (iptr->op1) {
2327                         case TYPE_INT:
2328                                 var_to_reg_int(s2, src, REG_ITMP1);
2329                                 x86_64_movl_reg_membase(cd, s2, REG_ITMP2, 0);
2330                                 break;
2331                         case TYPE_LNG:
2332                         case TYPE_ADR:
2333                                 var_to_reg_int(s2, src, REG_ITMP1);
2334                                 x86_64_mov_reg_membase(cd, s2, REG_ITMP2, 0);
2335                                 break;
2336                         case TYPE_FLT:
2337                                 var_to_reg_flt(s2, src, REG_FTMP1);
2338                                 x86_64_movss_reg_membase(cd, s2, REG_ITMP2, 0);
2339                                 break;
2340                         case TYPE_DBL:
2341                                 var_to_reg_flt(s2, src, REG_FTMP1);
2342                                 x86_64_movsd_reg_membase(cd, s2, REG_ITMP2, 0);
2343                                 break;
2344                         }
2345                         break;
2346
2347                 case ICMD_PUTSTATICCONST: /* ...  ==> ...                             */
2348                                           /* val = value (in current instruction)     */
2349                                           /* op1 = type, val.a = field address (in    */
2350                                           /* following NOP)                           */
2351
2352                         /* If the static fields' class is not yet initialized, we do it   */
2353                         /* now. The call code is generated later.                         */
2354                         if (!((fieldinfo *) iptr[1].val.a)->class->initialized) {
2355                                 codegen_addclinitref(cd, cd->mcodeptr, ((fieldinfo *) iptr[1].val.a)->class);
2356
2357                                 /* This is just for debugging purposes. Is very difficult to  */
2358                                 /* read patched code. Here we patch the following 5 nop's     */
2359                                 /* so that the real code keeps untouched.                     */
2360                                 if (showdisassemble) {
2361                                         x86_64_nop(cd); x86_64_nop(cd); x86_64_nop(cd);
2362                                         x86_64_nop(cd); x86_64_nop(cd);
2363                                 }
2364                         }
2365
2366                         /* This approach is much faster than moving the field address     */
2367                         /* inline into a register. */
2368                         a = dseg_addaddress(cd, &(((fieldinfo *) iptr[1].val.a)->value));
2369                         x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP1);
2370                         switch (iptr->op1) {
2371                         case TYPE_INT:
2372                         case TYPE_FLT:
2373                                 x86_64_movl_imm_membase(cd, iptr->val.i, REG_ITMP1, 0);
2374                                 break;
2375                         case TYPE_LNG:
2376                         case TYPE_ADR:
2377                         case TYPE_DBL:
2378                                 if (IS_IMM32(iptr->val.l)) {
2379                                         x86_64_mov_imm_membase(cd, iptr->val.l, REG_ITMP1, 0);
2380                                 } else {
2381                                         x86_64_movl_imm_membase(cd, iptr->val.l, REG_ITMP1, 0);
2382                                         x86_64_movl_imm_membase(cd, iptr->val.l >> 32, REG_ITMP1, 4);
2383                                 }
2384                                 break;
2385                         }
2386                         break;
2387
2388                 case ICMD_GETSTATIC:  /* ...  ==> ..., value                          */
2389                                       /* op1 = type, val.a = field address            */
2390
2391                         /* If the static fields' class is not yet initialized, we do it   */
2392                         /* now. The call code is generated later.                         */
2393                         if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2394                                 codegen_addclinitref(cd, cd->mcodeptr, ((fieldinfo *) iptr->val.a)->class);
2395
2396                                 /* This is just for debugging purposes. Is very difficult to  */
2397                                 /* read patched code. Here we patch the following 5 nop's     */
2398                                 /* so that the real code keeps untouched.                     */
2399                                 if (showdisassemble) {
2400                                         x86_64_nop(cd); x86_64_nop(cd); x86_64_nop(cd);
2401                                         x86_64_nop(cd); x86_64_nop(cd);
2402                                 }
2403                         }
2404
2405                         /* This approach is much faster than moving the field address     */
2406                         /* inline into a register. */
2407                         a = dseg_addaddress(cd, &(((fieldinfo *) iptr->val.a)->value));
2408                         x86_64_mov_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 7) - (s8) cd->mcodebase) + a, REG_ITMP2);
2409                         switch (iptr->op1) {
2410                         case TYPE_INT:
2411                                 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2412                                 x86_64_movl_membase_reg(cd, REG_ITMP2, 0, d);
2413                                 store_reg_to_var_int(iptr->dst, d);
2414                                 break;
2415                         case TYPE_LNG:
2416                         case TYPE_ADR:
2417                                 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2418                                 x86_64_mov_membase_reg(cd, REG_ITMP2, 0, d);
2419                                 store_reg_to_var_int(iptr->dst, d);
2420                                 break;
2421                         case TYPE_FLT:
2422                                 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2423                                 x86_64_movss_membase_reg(cd, REG_ITMP2, 0, d);
2424                                 store_reg_to_var_flt(iptr->dst, d);
2425                                 break;
2426                         case TYPE_DBL:                          
2427                                 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2428                                 x86_64_movsd_membase_reg(cd, REG_ITMP2, 0, d);
2429                                 store_reg_to_var_flt(iptr->dst, d);
2430                                 break;
2431                         }
2432                         break;
2433
2434                 case ICMD_PUTFIELD:   /* ..., objectref, value  ==> ...               */
2435                                       /* op1 = type, val.i = field offset             */
2436
2437                         a = ((fieldinfo *)(iptr->val.a))->offset;
2438                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2439                         gen_nullptr_check(s1);
2440                         switch (iptr->op1) {
2441                         case TYPE_INT:
2442                                 var_to_reg_int(s2, src, REG_ITMP2);
2443                                 x86_64_movl_reg_membase(cd, s2, s1, a);
2444                                 break;
2445                         case TYPE_LNG:
2446                         case TYPE_ADR:
2447                                 var_to_reg_int(s2, src, REG_ITMP2);
2448                                 x86_64_mov_reg_membase(cd, s2, s1, a);
2449                                 break;
2450                         case TYPE_FLT:
2451                                 var_to_reg_flt(s2, src, REG_FTMP2);
2452                                 x86_64_movss_reg_membase(cd, s2, s1, a);
2453                                 break;
2454                         case TYPE_DBL:
2455                                 var_to_reg_flt(s2, src, REG_FTMP2);
2456                                 x86_64_movsd_reg_membase(cd, s2, s1, a);
2457                                 break;
2458                         }
2459                         break;
2460
2461                 case ICMD_PUTFIELDCONST:  /* ..., objectref, value  ==> ...           */
2462                                           /* val = value (in current instruction)     */
2463                                           /* op1 = type, val.a = field address (in    */
2464                                           /* following NOP)                           */
2465
2466                         a = ((fieldinfo *) iptr[1].val.a)->offset;
2467                         var_to_reg_int(s1, src, REG_ITMP1);
2468                         gen_nullptr_check(s1);
2469                         switch (iptr->op1) {
2470                         case TYPE_INT:
2471                         case TYPE_FLT:
2472                                 x86_64_movl_imm_membase(cd, iptr->val.i, s1, a);
2473                                 break;
2474                         case TYPE_LNG:
2475                         case TYPE_ADR:
2476                         case TYPE_DBL:
2477                                 if (IS_IMM32(iptr->val.l)) {
2478                                         x86_64_mov_imm_membase(cd, iptr->val.l, s1, a);
2479                                 } else {
2480                                         x86_64_movl_imm_membase(cd, iptr->val.l, s1, a);
2481                                         x86_64_movl_imm_membase(cd, iptr->val.l >> 32, s1, a + 4);
2482                                 }
2483                                 break;
2484                         }
2485                         break;
2486
2487                 case ICMD_GETFIELD:   /* ...  ==> ..., value                          */
2488                                       /* op1 = type, val.i = field offset             */
2489
2490                         a = ((fieldinfo *)(iptr->val.a))->offset;
2491                         var_to_reg_int(s1, src, REG_ITMP1);
2492                         gen_nullptr_check(s1);
2493                         switch (iptr->op1) {
2494                         case TYPE_INT:
2495                                 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2496                                 x86_64_movl_membase_reg(cd, s1, a, d);
2497                                 store_reg_to_var_int(iptr->dst, d);
2498                                 break;
2499                         case TYPE_LNG:
2500                         case TYPE_ADR:
2501                                 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2502                                 x86_64_mov_membase_reg(cd, s1, a, d);
2503                                 store_reg_to_var_int(iptr->dst, d);
2504                                 break;
2505                         case TYPE_FLT:
2506                                 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2507                                 x86_64_movss_membase_reg(cd, s1, a, d);
2508                                 store_reg_to_var_flt(iptr->dst, d);
2509                                 break;
2510                         case TYPE_DBL:                          
2511                                 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2512                                 x86_64_movsd_membase_reg(cd, s1, a, d);
2513                                 store_reg_to_var_flt(iptr->dst, d);
2514                                 break;
2515                         }
2516                         break;
2517
2518
2519                 /* branch operations **************************************************/
2520
2521                 case ICMD_ATHROW:       /* ..., objectref ==> ... (, objectref)       */
2522
2523                         var_to_reg_int(s1, src, REG_ITMP1);
2524                         M_INTMOVE(s1, REG_ITMP1_XPTR);
2525
2526                         x86_64_call_imm(cd, 0); /* passing exception pointer              */
2527                         x86_64_pop_reg(cd, REG_ITMP2_XPC);
2528
2529                         x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
2530                         x86_64_jmp_reg(cd, REG_ITMP3);
2531                         break;
2532
2533                 case ICMD_GOTO:         /* ... ==> ...                                */
2534                                         /* op1 = target JavaVM pc                     */
2535
2536                         x86_64_jmp_imm(cd, 0);
2537                         codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2538                         break;
2539
2540                 case ICMD_JSR:          /* ... ==> ...                                */
2541                                         /* op1 = target JavaVM pc                     */
2542
2543                         x86_64_call_imm(cd, 0);
2544                         codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2545                         break;
2546                         
2547                 case ICMD_RET:          /* ... ==> ...                                */
2548                                         /* op1 = local variable                       */
2549
2550                         var = &(rd->locals[iptr->op1][TYPE_ADR]);
2551                         var_to_reg_int(s1, var, REG_ITMP1);
2552                         x86_64_jmp_reg(cd, s1);
2553                         break;
2554
2555                 case ICMD_IFNULL:       /* ..., value ==> ...                         */
2556                                         /* op1 = target JavaVM pc                     */
2557
2558                         if (src->flags & INMEMORY) {
2559                                 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2560
2561                         } else {
2562                                 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2563                         }
2564                         x86_64_jcc(cd, X86_64_CC_E, 0);
2565                         codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2566                         break;
2567
2568                 case ICMD_IFNONNULL:    /* ..., value ==> ...                         */
2569                                         /* op1 = target JavaVM pc                     */
2570
2571                         if (src->flags & INMEMORY) {
2572                                 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2573
2574                         } else {
2575                                 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2576                         }
2577                         x86_64_jcc(cd, X86_64_CC_NE, 0);
2578                         codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2579                         break;
2580
2581                 case ICMD_IFEQ:         /* ..., value ==> ...                         */
2582                                         /* op1 = target JavaVM pc, val.i = constant   */
2583
2584                         x86_64_emit_ifcc(cd, X86_64_CC_E, src, iptr);
2585                         break;
2586
2587                 case ICMD_IFLT:         /* ..., value ==> ...                         */
2588                                         /* op1 = target JavaVM pc, val.i = constant   */
2589
2590                         x86_64_emit_ifcc(cd, X86_64_CC_L, src, iptr);
2591                         break;
2592
2593                 case ICMD_IFLE:         /* ..., value ==> ...                         */
2594                                         /* op1 = target JavaVM pc, val.i = constant   */
2595
2596                         x86_64_emit_ifcc(cd, X86_64_CC_LE, src, iptr);
2597                         break;
2598
2599                 case ICMD_IFNE:         /* ..., value ==> ...                         */
2600                                         /* op1 = target JavaVM pc, val.i = constant   */
2601
2602                         x86_64_emit_ifcc(cd, X86_64_CC_NE, src, iptr);
2603                         break;
2604
2605                 case ICMD_IFGT:         /* ..., value ==> ...                         */
2606                                         /* op1 = target JavaVM pc, val.i = constant   */
2607
2608                         x86_64_emit_ifcc(cd, X86_64_CC_G, src, iptr);
2609                         break;
2610
2611                 case ICMD_IFGE:         /* ..., value ==> ...                         */
2612                                         /* op1 = target JavaVM pc, val.i = constant   */
2613
2614                         x86_64_emit_ifcc(cd, X86_64_CC_GE, src, iptr);
2615                         break;
2616
2617                 case ICMD_IF_LEQ:       /* ..., value ==> ...                         */
2618                                         /* op1 = target JavaVM pc, val.l = constant   */
2619
2620                         x86_64_emit_if_lcc(cd, X86_64_CC_E, src, iptr);
2621                         break;
2622
2623                 case ICMD_IF_LLT:       /* ..., value ==> ...                         */
2624                                         /* op1 = target JavaVM pc, val.l = constant   */
2625
2626                         x86_64_emit_if_lcc(cd, X86_64_CC_L, src, iptr);
2627                         break;
2628
2629                 case ICMD_IF_LLE:       /* ..., value ==> ...                         */
2630                                         /* op1 = target JavaVM pc, val.l = constant   */
2631
2632                         x86_64_emit_if_lcc(cd, X86_64_CC_LE, src, iptr);
2633                         break;
2634
2635                 case ICMD_IF_LNE:       /* ..., value ==> ...                         */
2636                                         /* op1 = target JavaVM pc, val.l = constant   */
2637
2638                         x86_64_emit_if_lcc(cd, X86_64_CC_NE, src, iptr);
2639                         break;
2640
2641                 case ICMD_IF_LGT:       /* ..., value ==> ...                         */
2642                                         /* op1 = target JavaVM pc, val.l = constant   */
2643
2644                         x86_64_emit_if_lcc(cd, X86_64_CC_G, src, iptr);
2645                         break;
2646
2647                 case ICMD_IF_LGE:       /* ..., value ==> ...                         */
2648                                         /* op1 = target JavaVM pc, val.l = constant   */
2649
2650                         x86_64_emit_if_lcc(cd, X86_64_CC_GE, src, iptr);
2651                         break;
2652
2653                 case ICMD_IF_ICMPEQ:    /* ..., value, value ==> ...                  */
2654                                         /* op1 = target JavaVM pc                     */
2655
2656                         x86_64_emit_if_icmpcc(cd, X86_64_CC_E, src, iptr);
2657                         break;
2658
2659                 case ICMD_IF_LCMPEQ:    /* ..., value, value ==> ...                  */
2660                 case ICMD_IF_ACMPEQ:    /* op1 = target JavaVM pc                     */
2661
2662                         x86_64_emit_if_lcmpcc(cd, X86_64_CC_E, src, iptr);
2663                         break;
2664
2665                 case ICMD_IF_ICMPNE:    /* ..., value, value ==> ...                  */
2666                                         /* op1 = target JavaVM pc                     */
2667
2668                         x86_64_emit_if_icmpcc(cd, X86_64_CC_NE, src, iptr);
2669                         break;
2670
2671                 case ICMD_IF_LCMPNE:    /* ..., value, value ==> ...                  */
2672                 case ICMD_IF_ACMPNE:    /* op1 = target JavaVM pc                     */
2673
2674                         x86_64_emit_if_lcmpcc(cd, X86_64_CC_NE, src, iptr);
2675                         break;
2676
2677                 case ICMD_IF_ICMPLT:    /* ..., value, value ==> ...                  */
2678                                         /* op1 = target JavaVM pc                     */
2679
2680                         x86_64_emit_if_icmpcc(cd, X86_64_CC_L, src, iptr);
2681                         break;
2682
2683                 case ICMD_IF_LCMPLT:    /* ..., value, value ==> ...                  */
2684                                     /* op1 = target JavaVM pc                     */
2685
2686                         x86_64_emit_if_lcmpcc(cd, X86_64_CC_L, src, iptr);
2687                         break;
2688
2689                 case ICMD_IF_ICMPGT:    /* ..., value, value ==> ...                  */
2690                                         /* op1 = target JavaVM pc                     */
2691
2692                         x86_64_emit_if_icmpcc(cd, X86_64_CC_G, src, iptr);
2693                         break;
2694
2695                 case ICMD_IF_LCMPGT:    /* ..., value, value ==> ...                  */
2696                                 /* op1 = target JavaVM pc                     */
2697
2698                         x86_64_emit_if_lcmpcc(cd, X86_64_CC_G, src, iptr);
2699                         break;
2700
2701                 case ICMD_IF_ICMPLE:    /* ..., value, value ==> ...                  */
2702                                         /* op1 = target JavaVM pc                     */
2703
2704                         x86_64_emit_if_icmpcc(cd, X86_64_CC_LE, src, iptr);
2705                         break;
2706
2707                 case ICMD_IF_LCMPLE:    /* ..., value, value ==> ...                  */
2708                                         /* op1 = target JavaVM pc                     */
2709
2710                         x86_64_emit_if_lcmpcc(cd, X86_64_CC_LE, src, iptr);
2711                         break;
2712
2713                 case ICMD_IF_ICMPGE:    /* ..., value, value ==> ...                  */
2714                                         /* op1 = target JavaVM pc                     */
2715
2716                         x86_64_emit_if_icmpcc(cd, X86_64_CC_GE, src, iptr);
2717                         break;
2718
2719                 case ICMD_IF_LCMPGE:    /* ..., value, value ==> ...                  */
2720                                     /* op1 = target JavaVM pc                     */
2721
2722                         x86_64_emit_if_lcmpcc(cd, X86_64_CC_GE, src, iptr);
2723                         break;
2724
2725                 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST                           */
2726
2727                 case ICMD_ELSE_ICONST:  /* handled by IFxx_ICONST                     */
2728                         break;
2729
2730                 case ICMD_IFEQ_ICONST:  /* ..., value ==> ..., constant               */
2731                                         /* val.i = constant                           */
2732
2733                         var_to_reg_int(s1, src, REG_ITMP1);
2734                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2735                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2736                                 if (s1 == d) {
2737                                         M_INTMOVE(s1, REG_ITMP1);
2738                                         s1 = REG_ITMP1;
2739                                 }
2740                                 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2741                         }
2742                         x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2743                         x86_64_testl_reg_reg(cd, s1, s1);
2744                         x86_64_cmovccl_reg_reg(cd, X86_64_CC_E, REG_ITMP2, d);
2745                         store_reg_to_var_int(iptr->dst, d);
2746                         break;
2747
2748                 case ICMD_IFNE_ICONST:  /* ..., value ==> ..., constant               */
2749                                         /* val.i = constant                           */
2750
2751                         var_to_reg_int(s1, src, REG_ITMP1);
2752                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2753                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2754                                 if (s1 == d) {
2755                                         M_INTMOVE(s1, REG_ITMP1);
2756                                         s1 = REG_ITMP1;
2757                                 }
2758                                 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2759                         }
2760                         x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2761                         x86_64_testl_reg_reg(cd, s1, s1);
2762                         x86_64_cmovccl_reg_reg(cd, X86_64_CC_NE, REG_ITMP2, d);
2763                         store_reg_to_var_int(iptr->dst, d);
2764                         break;
2765
2766                 case ICMD_IFLT_ICONST:  /* ..., value ==> ..., constant               */
2767                                         /* val.i = constant                           */
2768
2769                         var_to_reg_int(s1, src, REG_ITMP1);
2770                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2771                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2772                                 if (s1 == d) {
2773                                         M_INTMOVE(s1, REG_ITMP1);
2774                                         s1 = REG_ITMP1;
2775                                 }
2776                                 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2777                         }
2778                         x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2779                         x86_64_testl_reg_reg(cd, s1, s1);
2780                         x86_64_cmovccl_reg_reg(cd, X86_64_CC_L, REG_ITMP2, d);
2781                         store_reg_to_var_int(iptr->dst, d);
2782                         break;
2783
2784                 case ICMD_IFGE_ICONST:  /* ..., value ==> ..., constant               */
2785                                         /* val.i = constant                           */
2786
2787                         var_to_reg_int(s1, src, REG_ITMP1);
2788                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2789                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2790                                 if (s1 == d) {
2791                                         M_INTMOVE(s1, REG_ITMP1);
2792                                         s1 = REG_ITMP1;
2793                                 }
2794                                 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2795                         }
2796                         x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2797                         x86_64_testl_reg_reg(cd, s1, s1);
2798                         x86_64_cmovccl_reg_reg(cd, X86_64_CC_GE, REG_ITMP2, d);
2799                         store_reg_to_var_int(iptr->dst, d);
2800                         break;
2801
2802                 case ICMD_IFGT_ICONST:  /* ..., value ==> ..., constant               */
2803                                         /* val.i = constant                           */
2804
2805                         var_to_reg_int(s1, src, REG_ITMP1);
2806                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2807                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2808                                 if (s1 == d) {
2809                                         M_INTMOVE(s1, REG_ITMP1);
2810                                         s1 = REG_ITMP1;
2811                                 }
2812                                 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2813                         }
2814                         x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2815                         x86_64_testl_reg_reg(cd, s1, s1);
2816                         x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP2, d);
2817                         store_reg_to_var_int(iptr->dst, d);
2818                         break;
2819
2820                 case ICMD_IFLE_ICONST:  /* ..., value ==> ..., constant               */
2821                                         /* val.i = constant                           */
2822
2823                         var_to_reg_int(s1, src, REG_ITMP1);
2824                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2825                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2826                                 if (s1 == d) {
2827                                         M_INTMOVE(s1, REG_ITMP1);
2828                                         s1 = REG_ITMP1;
2829                                 }
2830                                 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2831                         }
2832                         x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2833                         x86_64_testl_reg_reg(cd, s1, s1);
2834                         x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, d);
2835                         store_reg_to_var_int(iptr->dst, d);
2836                         break;
2837
2838
2839                 case ICMD_IRETURN:      /* ..., retvalue ==> ...                      */
2840                 case ICMD_LRETURN:
2841                 case ICMD_ARETURN:
2842
2843                         var_to_reg_int(s1, src, REG_RESULT);
2844                         M_INTMOVE(s1, REG_RESULT);
2845
2846                         goto nowperformreturn;
2847
2848                 case ICMD_FRETURN:      /* ..., retvalue ==> ...                      */
2849                 case ICMD_DRETURN:
2850
2851                         var_to_reg_flt(s1, src, REG_FRESULT);
2852                         M_FLTMOVE(s1, REG_FRESULT);
2853
2854                         goto nowperformreturn;
2855
2856                 case ICMD_RETURN:      /* ...  ==> ...                                */
2857
2858 nowperformreturn:
2859                         {
2860                         s4 i, p;
2861                         
2862                         p = parentargs_base;
2863                         
2864                         /* call trace function */
2865                         if (runverbose) {
2866                                 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
2867
2868                                 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
2869                                 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
2870
2871                                 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
2872                                 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
2873                                 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
2874                                 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
2875
2876                                 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
2877                                 x86_64_call_reg(cd, REG_ITMP1);
2878
2879                                 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
2880                                 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
2881
2882                                 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
2883                         }
2884
2885 #if defined(USE_THREADS)
2886                         if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2887                                 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
2888         
2889                                 /* we need to save the proper return value */
2890                                 switch (iptr->opc) {
2891                                 case ICMD_IRETURN:
2892                                 case ICMD_ARETURN:
2893                                 case ICMD_LRETURN:
2894                                         x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, rd->maxmemuse * 8);
2895                                         break;
2896                                 case ICMD_FRETURN:
2897                                 case ICMD_DRETURN:
2898                                         x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, rd->maxmemuse * 8);
2899                                         break;
2900                                 }
2901
2902                                 x86_64_mov_imm_reg(cd, (u8) builtin_monitorexit, REG_ITMP1);
2903                                 x86_64_call_reg(cd, REG_ITMP1);
2904
2905                                 /* and now restore the proper return value */
2906                                 switch (iptr->opc) {
2907                                 case ICMD_IRETURN:
2908                                 case ICMD_ARETURN:
2909                                 case ICMD_LRETURN:
2910                                         x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_RESULT);
2911                                         break;
2912                                 case ICMD_FRETURN:
2913                                 case ICMD_DRETURN:
2914                                         x86_64_movq_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_FRESULT);
2915                                         break;
2916                                 }
2917                         }
2918 #endif
2919
2920                         /* restore saved registers                                        */
2921                         for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
2922                                 p--; x86_64_mov_membase_reg(cd, REG_SP, p * 8, rd->savintregs[i]);
2923                         }
2924                         for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
2925                                 p--; x86_64_movq_membase_reg(cd, REG_SP, p * 8, rd->savfltregs[i]);
2926                         }
2927
2928                         /* deallocate stack                                               */
2929                         if (parentargs_base) {
2930                                 x86_64_alu_imm_reg(cd, X86_64_ADD, parentargs_base * 8, REG_SP);
2931                         }
2932
2933                         x86_64_ret(cd);
2934                         }
2935                         break;
2936
2937
2938                 case ICMD_TABLESWITCH:  /* ..., index ==> ...                         */
2939                         {
2940                                 s4 i, l, *s4ptr;
2941                                 void **tptr;
2942
2943                                 tptr = (void **) iptr->target;
2944
2945                                 s4ptr = iptr->val.a;
2946                                 l = s4ptr[1];                          /* low     */
2947                                 i = s4ptr[2];                          /* high    */
2948
2949                                 var_to_reg_int(s1, src, REG_ITMP1);
2950                                 M_INTMOVE(s1, REG_ITMP1);
2951                                 if (l != 0) {
2952                                         x86_64_alul_imm_reg(cd, X86_64_SUB, l, REG_ITMP1);
2953                                 }
2954                                 i = i - l + 1;
2955
2956                 /* range check */
2957                                 x86_64_alul_imm_reg(cd, X86_64_CMP, i - 1, REG_ITMP1);
2958                                 x86_64_jcc(cd, X86_64_CC_A, 0);
2959
2960                 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[0]), cd->mcodeptr); */
2961                                 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
2962
2963                                 /* build jump table top down and use address of lowest entry */
2964
2965                 /* s4ptr += 3 + i; */
2966                                 tptr += i;
2967
2968                                 while (--i >= 0) {
2969                                         /* dseg_addtarget(cd, BlockPtrOfPC(*--s4ptr)); */
2970                                         dseg_addtarget(cd, (basicblock *) tptr[0]); 
2971                                         --tptr;
2972                                 }
2973
2974                                 /* length of dataseg after last dseg_addtarget is used by load */
2975
2976                                 x86_64_mov_imm_reg(cd, 0, REG_ITMP2);
2977                                 dseg_adddata(cd, cd->mcodeptr);
2978                                 x86_64_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
2979                                 x86_64_jmp_reg(cd, REG_ITMP1);
2980                         }
2981                         break;
2982
2983
2984                 case ICMD_LOOKUPSWITCH: /* ..., key ==> ...                           */
2985                         {
2986                                 s4 i, l, val, *s4ptr;
2987                                 void **tptr;
2988
2989                                 tptr = (void **) iptr->target;
2990
2991                                 s4ptr = iptr->val.a;
2992                                 l = s4ptr[0];                          /* default  */
2993                                 i = s4ptr[1];                          /* count    */
2994                         
2995                                 MCODECHECK((i<<2)+8);
2996                                 var_to_reg_int(s1, src, REG_ITMP1);    /* reg compare should always be faster */
2997                                 while (--i >= 0) {
2998                                         s4ptr += 2;
2999                                         ++tptr;
3000
3001                                         val = s4ptr[0];
3002                                         x86_64_alul_imm_reg(cd, X86_64_CMP, val, s1);
3003                                         x86_64_jcc(cd, X86_64_CC_E, 0);
3004                                         /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[1]), cd->mcodeptr); */
3005                                         codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr); 
3006                                 }
3007
3008                                 x86_64_jmp_imm(cd, 0);
3009                                 /* codegen_addreference(cd, BlockPtrOfPC(l), cd->mcodeptr); */
3010                         
3011                                 tptr = (void **) iptr->target;
3012                                 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3013                         }
3014                         break;
3015
3016
3017                 case ICMD_BUILTIN3:     /* ..., arg1, arg2, arg3 ==> ...              */
3018                                         /* op1 = return type, val.a = function pointer*/
3019                         s3 = 3;
3020                         goto gen_method;
3021
3022                 case ICMD_BUILTIN2:     /* ..., arg1, arg2 ==> ...                    */
3023                                         /* op1 = return type, val.a = function pointer*/
3024                         s3 = 2;
3025                         goto gen_method;
3026
3027                 case ICMD_BUILTIN1:     /* ..., arg1 ==> ...                          */
3028                                         /* op1 = return type, val.a = function pointer*/
3029                         s3 = 1;
3030                         goto gen_method;
3031
3032                 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ...            */
3033                                         /* op1 = arg count, val.a = method pointer    */
3034
3035                 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3036                 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer    */
3037                 case ICMD_INVOKEINTERFACE:
3038
3039                         s3 = iptr->op1;
3040
3041 gen_method: {
3042                         methodinfo   *lm;
3043                         classinfo    *ci;
3044                         stackptr      tmpsrc;
3045                         s4 iarg, farg;
3046
3047                         MCODECHECK((s3 << 1) + 64);
3048
3049                         tmpsrc = src;
3050                         s2 = s3;
3051                         iarg = 0;
3052                         farg = 0;
3053
3054                         /* copy arguments to registers or stack location ******************/
3055
3056                         /* count integer and float arguments */
3057
3058                         for (; --s3 >= 0; src = src->prev) {
3059                                 IS_INT_LNG_TYPE(src->type) ? iarg++ : farg++;
3060                         }
3061
3062                         src = tmpsrc;
3063                         s3 = s2;
3064
3065                         /* calculate amount of arguments to be on stack */
3066
3067                         s2 = (iarg > INT_ARG_CNT) ? iarg - INT_ARG_CNT : 0 +
3068                                 (farg > FLT_ARG_CNT) ? farg - FLT_ARG_CNT : 0;
3069
3070                         for (; --s3 >= 0; src = src->prev) {
3071                                 /* decrement the current argument type */
3072                                 IS_INT_LNG_TYPE(src->type) ? iarg-- : farg--;
3073
3074                                 if (src->varkind == ARGVAR) {
3075                                         if (IS_INT_LNG_TYPE(src->type)) {
3076                                                 if (iarg >= INT_ARG_CNT) {
3077                                                         s2--;
3078                                                 }
3079                                         } else {
3080                                                 if (farg >= FLT_ARG_CNT) {
3081                                                         s2--;
3082                                                 }
3083                                         }
3084                                         continue;
3085                                 }
3086
3087                                 if (IS_INT_LNG_TYPE(src->type)) {
3088                                         if (iarg < INT_ARG_CNT) {
3089                                                 s1 = rd->argintregs[iarg];
3090                                                 var_to_reg_int(d, src, s1);
3091                                                 M_INTMOVE(d, s1);
3092
3093                                         } else {
3094                                                 var_to_reg_int(d, src, REG_ITMP1);
3095                                                 s2--;
3096                                                 x86_64_mov_reg_membase(cd, d, REG_SP, s2 * 8);
3097                                         }
3098
3099                                 } else {
3100                                         if (farg < FLT_ARG_CNT) {
3101                                                 s1 = rd->argfltregs[farg];
3102                                                 var_to_reg_flt(d, src, s1);
3103                                                 M_FLTMOVE(d, s1);
3104
3105                                         } else {
3106                                                 var_to_reg_flt(d, src, REG_FTMP1);
3107                                                 s2--;
3108                                                 x86_64_movq_reg_membase(cd, d, REG_SP, s2 * 8);
3109                                         }
3110                                 }
3111                         } /* end of for */
3112
3113                         lm = iptr->val.a;
3114                         switch (iptr->opc) {
3115                         case ICMD_BUILTIN3:
3116                         case ICMD_BUILTIN2:
3117                         case ICMD_BUILTIN1:
3118                                 a = (s8) lm;
3119                                 d = iptr->op1;
3120
3121                                 x86_64_mov_imm_reg(cd, a, REG_ITMP1);
3122                                 x86_64_call_reg(cd, REG_ITMP1);
3123                                 break;
3124
3125                         case ICMD_INVOKESTATIC:
3126                                 a = (s8) lm->stubroutine;
3127                                 d = lm->returntype;
3128
3129                                 x86_64_mov_imm_reg(cd, a, REG_ITMP2);
3130                                 x86_64_call_reg(cd, REG_ITMP2);
3131                                 break;
3132
3133                         case ICMD_INVOKESPECIAL:
3134                                 a = (s8) lm->stubroutine;
3135                                 d = lm->returntype;
3136
3137                                 gen_nullptr_check(rd->argintregs[0]);    /* first argument contains pointer */
3138                                 x86_64_mov_membase_reg(cd, rd->argintregs[0], 0, REG_ITMP2); /* access memory for hardware nullptr */
3139                                 x86_64_mov_imm_reg(cd, a, REG_ITMP2);
3140                                 x86_64_call_reg(cd, REG_ITMP2);
3141                                 break;
3142
3143                         case ICMD_INVOKEVIRTUAL:
3144                                 d = lm->returntype;
3145
3146                                 gen_nullptr_check(rd->argintregs[0]);
3147                                 x86_64_mov_membase_reg(cd, rd->argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3148                                 x86_64_mov_membase32_reg(cd, REG_ITMP2, OFFSET(vftbl_t, table[0]) + sizeof(methodptr) * lm->vftblindex, REG_ITMP1);
3149                                 x86_64_call_reg(cd, REG_ITMP1);
3150                                 break;
3151
3152                         case ICMD_INVOKEINTERFACE:
3153                                 ci = lm->class;
3154                                 d = lm->returntype;
3155
3156                                 gen_nullptr_check(rd->argintregs[0]);
3157                                 x86_64_mov_membase_reg(cd, rd->argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3158                                 x86_64_mov_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, interfacetable[0]) - sizeof(methodptr) * ci->index, REG_ITMP2);
3159                                 x86_64_mov_membase32_reg(cd, REG_ITMP2, sizeof(methodptr) * (lm - ci->methods), REG_ITMP1);
3160                                 x86_64_call_reg(cd, REG_ITMP1);
3161                                 break;
3162                         }
3163
3164                         /* d contains return type */
3165
3166                         if (d != TYPE_VOID) {
3167                                 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3168                                         s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3169                                         M_INTMOVE(REG_RESULT, s1);
3170                                         store_reg_to_var_int(iptr->dst, s1);
3171
3172                                 } else {
3173                                         s1 = reg_of_var(rd, iptr->dst, REG_FRESULT);
3174                                         M_FLTMOVE(REG_FRESULT, s1);
3175                                         store_reg_to_var_flt(iptr->dst, s1);
3176                                 }
3177                         }
3178                         }
3179                         break;
3180
3181
3182                 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult            */
3183
3184                                       /* op1:   0 == array, 1 == class                */
3185                                       /* val.a: (classinfo*) superclass               */
3186
3187 /*          superclass is an interface:
3188  *
3189  *          return (sub != NULL) &&
3190  *                 (sub->vftbl->interfacetablelength > super->index) &&
3191  *                 (sub->vftbl->interfacetable[-super->index] != NULL);
3192  *
3193  *          superclass is a class:
3194  *
3195  *          return ((sub != NULL) && (0
3196  *                  <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3197  *                  super->vftbl->diffvall));
3198  */
3199
3200                         {
3201                         classinfo *super = (classinfo *) iptr->val.a;
3202                         
3203 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3204             codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3205 #endif
3206
3207                         var_to_reg_int(s1, src, REG_ITMP1);
3208                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3209                         if (s1 == d) {
3210                                 M_INTMOVE(s1, REG_ITMP1);
3211                                 s1 = REG_ITMP1;
3212                         }
3213                         x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3214                         if (iptr->op1) {                               /* class/interface */
3215                                 if (super->flags & ACC_INTERFACE) {        /* interface       */
3216                                         x86_64_test_reg_reg(cd, s1, s1);
3217
3218                                         /* TODO: clean up this calculation */
3219                                         a = 3;    /* mov_membase_reg */
3220                                         CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3221
3222                                         a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3223                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3224                                         
3225                                         a += 3;    /* sub */
3226                                         CALCIMMEDIATEBYTES(a, super->index);
3227                                         
3228                                         a += 3;    /* test */
3229
3230                                         a += 6;    /* jcc */
3231                                         a += 3;    /* mov_membase_reg */
3232                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3233
3234                                         a += 3;    /* test */
3235                                         a += 4;    /* setcc */
3236
3237                                         x86_64_jcc(cd, X86_64_CC_E, a);
3238
3239                                         x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3240                                         x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength), REG_ITMP2);
3241                                         x86_64_alu_imm_reg(cd, X86_64_SUB, super->index, REG_ITMP2);
3242                                         x86_64_test_reg_reg(cd, REG_ITMP2, REG_ITMP2);
3243
3244                                         /* TODO: clean up this calculation */
3245                                         a = 0;
3246                                         a += 3;    /* mov_membase_reg */
3247                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3248
3249                                         a += 3;    /* test */
3250                                         a += 4;    /* setcc */
3251
3252                                         x86_64_jcc(cd, X86_64_CC_LE, a);
3253                                         x86_64_mov_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP1);
3254                                         x86_64_test_reg_reg(cd, REG_ITMP1, REG_ITMP1);
3255                                         x86_64_setcc_reg(cd, X86_64_CC_NE, d);
3256
3257                                 } else {                                   /* class           */
3258                                         x86_64_test_reg_reg(cd, s1, s1);
3259
3260                                         /* TODO: clean up this calculation */
3261                                         a = 3;    /* mov_membase_reg */
3262                                         CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3263
3264                                         a += 10;   /* mov_imm_reg */
3265
3266                                         a += 2;    /* movl_membase_reg - only if REG_ITMP1 == RAX */
3267                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, baseval));
3268                                         
3269                                         a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3270                                         CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3271                                         
3272                                         a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3273                                         CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, diffval));
3274                                         
3275                                         a += 3;    /* sub */
3276                                         a += 3;    /* xor */
3277                                         a += 3;    /* cmp */
3278                                         a += 4;    /* setcc */
3279
3280                                         x86_64_jcc(cd, X86_64_CC_E, a);
3281
3282                                         x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3283                                         x86_64_mov_imm_reg(cd, (ptrint) super->vftbl, REG_ITMP2);
3284 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3285                                         codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3286 #endif
3287                                         x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, baseval), REG_ITMP1);
3288                                         x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP3);
3289                                         x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, diffval), REG_ITMP2);
3290 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3291                     codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3292 #endif
3293                                         x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP1);
3294                                         x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3295                                         x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
3296                                         x86_64_setcc_reg(cd, X86_64_CC_BE, d);
3297                                 }
3298                         }
3299                         else
3300                                 panic("internal error: no inlined array instanceof");
3301                         }
3302                         store_reg_to_var_int(iptr->dst, d);
3303                         break;
3304
3305                 case ICMD_CHECKCAST:  /* ..., objectref ==> ..., objectref            */
3306
3307                                       /* op1:   0 == array, 1 == class                */
3308                                       /* val.a: (classinfo*) superclass               */
3309
3310                         /*  superclass is an interface:
3311                          *      
3312                          *  OK if ((sub == NULL) ||
3313                          *         (sub->vftbl->interfacetablelength > super->index) &&
3314                          *         (sub->vftbl->interfacetable[-super->index] != NULL));
3315                          *      
3316                          *  superclass is a class:
3317                          *      
3318                          *  OK if ((sub == NULL) || (0
3319                          *         <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3320                          *         super->vftbl->diffval));
3321                          */
3322
3323                         {
3324                         classinfo *super = (classinfo *) iptr->val.a;
3325                         
3326 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3327             codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3328 #endif
3329                         var_to_reg_int(s1, src, REG_ITMP1);
3330                         if (iptr->op1) {                               /* class/interface */
3331                                 if (super->flags & ACC_INTERFACE) {        /* interface       */
3332                                         x86_64_test_reg_reg(cd, s1, s1);
3333
3334                                         /* TODO: clean up this calculation */
3335                                         a = 3;    /* mov_membase_reg */
3336                                         CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3337
3338                                         a += 3;    /* movl_membase_reg - if REG_ITMP3 == R10 */
3339                                         CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, interfacetablelength));
3340
3341                                         a += 3;    /* sub */
3342                                         CALCIMMEDIATEBYTES(a, super->index);
3343
3344                                         a += 3;    /* test */
3345                                         a += 6;    /* jcc */
3346
3347                                         a += 3;    /* mov_membase_reg */
3348                                         CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3349
3350                                         a += 3;    /* test */
3351                                         a += 6;    /* jcc */
3352
3353                                         x86_64_jcc(cd, X86_64_CC_E, a);
3354
3355                                         x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP2);
3356                                         x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, interfacetablelength), REG_ITMP3);
3357                                         x86_64_alu_imm_reg(cd, X86_64_SUB, super->index, REG_ITMP3);
3358                                         x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3359                                         x86_64_jcc(cd, X86_64_CC_LE, 0);
3360                                         codegen_addxcastrefs(cd, cd->mcodeptr);
3361                                         x86_64_mov_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP3);
3362                                         x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3363                                         x86_64_jcc(cd, X86_64_CC_E, 0);
3364                                         codegen_addxcastrefs(cd, cd->mcodeptr);
3365
3366                                 } else {                                   /* class           */
3367                                         x86_64_test_reg_reg(cd, s1, s1);
3368
3369                                         /* TODO: clean up this calculation */
3370                                         a = 3;     /* mov_membase_reg */
3371                                         CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3372                                         a += 10;   /* mov_imm_reg */
3373                                         a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3374                                         CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3375
3376                                         if (s1 != REG_ITMP1) {
3377                                                 a += 3;    /* movl_membase_reg - only if REG_ITMP3 == R11 */
3378                                                 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, baseval));
3379                                                 a += 3;    /* movl_membase_reg - only if REG_ITMP3 == R11 */
3380                                                 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, diffval));
3381                                                 a += 3;    /* sub */
3382
3383                                         } else {
3384                                                 a += 3;    /* movl_membase_reg - only if REG_ITMP3 == R11 */
3385                                                 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, baseval));
3386                                                 a += 3;    /* sub */
3387                                                 a += 10;   /* mov_imm_reg */
3388                                                 a += 3;    /* movl_membase_reg - only if REG_ITMP3 == R11 */
3389                                                 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, diffval));
3390                                         }
3391
3392                                         a += 3;    /* cmp */
3393                                         a += 6;    /* jcc */
3394
3395                                         x86_64_jcc(cd, X86_64_CC_E, a);
3396
3397                                         x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP2);
3398                                         x86_64_mov_imm_reg(cd, (ptrint) super->vftbl, REG_ITMP3);
3399 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3400                                         codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3401 #endif
3402                                         x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP2);
3403                                         if (s1 != REG_ITMP1) {
3404                                                 x86_64_movl_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP1);
3405                                                 x86_64_movl_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3);
3406 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3407                                                 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3408 #endif
3409                                                 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP1, REG_ITMP2);
3410
3411                                         } else {
3412                                                 x86_64_movl_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP3);
3413                                                 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP2);
3414                                                 x86_64_mov_imm_reg(cd, (ptrint) super->vftbl, REG_ITMP3);
3415                                                 x86_64_movl_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3);
3416                                         }
3417 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3418                                         codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3419 #endif
3420                                         x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP3, REG_ITMP2);
3421                                         x86_64_jcc(cd, X86_64_CC_A, 0);    /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3422                                         codegen_addxcastrefs(cd, cd->mcodeptr);
3423                                 }
3424
3425                         } else
3426                                 panic("internal error: no inlined array checkcast");
3427                         }
3428                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3429                         M_INTMOVE(s1, d);
3430                         store_reg_to_var_int(iptr->dst, d);
3431 /*                      if (iptr->dst->flags & INMEMORY) { */
3432 /*                              x86_64_mov_reg_membase(cd, s1, REG_SP, iptr->dst->regoff * 8); */
3433 /*                      } else { */
3434 /*                              M_INTMOVE(s1, iptr->dst->regoff); */
3435 /*                      } */
3436                         break;
3437
3438                 case ICMD_CHECKASIZE:  /* ..., size ==> ..., size                     */
3439
3440                         if (src->flags & INMEMORY) {
3441                                 x86_64_alul_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
3442                                 
3443                         } else {
3444                                 x86_64_testl_reg_reg(cd, src->regoff, src->regoff);
3445                         }
3446                         x86_64_jcc(cd, X86_64_CC_L, 0);
3447                         codegen_addxcheckarefs(cd, cd->mcodeptr);
3448                         break;
3449
3450                 case ICMD_CHECKEXCEPTION:    /* ... ==> ...                           */
3451
3452                         x86_64_test_reg_reg(cd, REG_RESULT, REG_RESULT);
3453                         x86_64_jcc(cd, X86_64_CC_E, 0);
3454                         codegen_addxexceptionrefs(cd, cd->mcodeptr);
3455                         break;
3456
3457                 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref  */
3458                                          /* op1 = dimension, val.a = array descriptor */
3459
3460                         /* check for negative sizes and copy sizes to stack if necessary  */
3461
3462                         MCODECHECK((iptr->op1 << 1) + 64);
3463
3464                         for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3465                                 var_to_reg_int(s2, src, REG_ITMP1);
3466                                 x86_64_testl_reg_reg(cd, s2, s2);
3467                                 x86_64_jcc(cd, X86_64_CC_L, 0);
3468                                 codegen_addxcheckarefs(cd, cd->mcodeptr);
3469
3470                                 /* copy SAVEDVAR sizes to stack */
3471
3472                                 if (src->varkind != ARGVAR) {
3473                                         x86_64_mov_reg_membase(cd, s2, REG_SP, s1 * 8);
3474                                 }
3475                         }
3476
3477                         /* a0 = dimension count */
3478                         x86_64_mov_imm_reg(cd, iptr->op1, rd->argintregs[0]);
3479
3480                         /* a1 = arraydescriptor */
3481                         x86_64_mov_imm_reg(cd, (u8) iptr->val.a, rd->argintregs[1]);
3482
3483                         /* a2 = pointer to dimensions = stack pointer */
3484                         x86_64_mov_reg_reg(cd, REG_SP, rd->argintregs[2]);
3485
3486                         x86_64_mov_imm_reg(cd, (u8) builtin_nmultianewarray, REG_ITMP1);
3487                         x86_64_call_reg(cd, REG_ITMP1);
3488
3489                         s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3490                         M_INTMOVE(REG_RESULT, s1);
3491                         store_reg_to_var_int(iptr->dst, s1);
3492                         break;
3493
3494                 default:
3495                         throw_cacao_exception_exit(string_java_lang_InternalError,
3496                                                                            "Unknown ICMD %d", iptr->opc);
3497         } /* switch */
3498                 
3499         } /* for instruction */
3500                 
3501         /* copy values to interface registers */
3502
3503         src = bptr->outstack;
3504         len = bptr->outdepth;
3505         MCODECHECK(64 + len);
3506 #ifdef LSRA
3507         if (!opt_lsra)
3508 #endif
3509         while (src) {
3510                 len--;
3511                 if ((src->varkind != STACKVAR)) {
3512                         s2 = src->type;
3513                         if (IS_FLT_DBL_TYPE(s2)) {
3514                                 var_to_reg_flt(s1, src, REG_FTMP1);
3515                                 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3516                                         M_FLTMOVE(s1, rd->interfaces[len][s2].regoff);
3517
3518                                 } else {
3519                                         x86_64_movq_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3520                                 }
3521
3522                         } else {
3523                                 var_to_reg_int(s1, src, REG_ITMP1);
3524                                 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3525                                         M_INTMOVE(s1, rd->interfaces[len][s2].regoff);
3526
3527                                 } else {
3528                                         x86_64_mov_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3529                                 }
3530                         }
3531                 }
3532                 src = src->prev;
3533         }
3534         } /* if (bptr -> flags >= BBREACHED) */
3535         } /* for basic block */
3536
3537         {
3538
3539         /* generate bound check stubs */
3540
3541         u1 *xcodeptr = NULL;
3542         branchref *bref;
3543
3544         for (bref = cd->xboundrefs; bref != NULL; bref = bref->next) {
3545                 gen_resolvebranch(cd->mcodebase + bref->branchpos, 
3546                                   bref->branchpos,
3547                                                   cd->mcodeptr - cd->mcodebase);
3548
3549                 MCODECHECK(100);
3550
3551                 /* move index register into REG_ITMP1 */
3552                 x86_64_mov_reg_reg(cd, bref->reg, REG_ITMP1);             /* 3 bytes  */
3553
3554                 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC);                 /* 10 bytes */
3555                 dseg_adddata(cd, cd->mcodeptr);
3556                 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3);   /* 10 bytes */
3557                 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes  */
3558
3559                 if (xcodeptr != NULL) {
3560                         x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3561
3562                 } else {
3563                         xcodeptr = cd->mcodeptr;
3564
3565                         x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3566                         x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3567
3568                         x86_64_mov_reg_reg(cd, REG_ITMP1, rd->argintregs[0]);
3569                         x86_64_mov_imm_reg(cd, (u8) new_arrayindexoutofboundsexception, REG_ITMP3);
3570                         x86_64_call_reg(cd, REG_ITMP3);
3571
3572                         x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3573                         x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3574
3575                         x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3576                         x86_64_jmp_reg(cd, REG_ITMP3);
3577                 }
3578         }
3579
3580         /* generate negative array size check stubs */
3581
3582         xcodeptr = NULL;
3583         
3584         for (bref = cd->xcheckarefs; bref != NULL; bref = bref->next) {
3585                 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3586                         gen_resolvebranch(cd->mcodebase + bref->branchpos, 
3587                                                           bref->branchpos,
3588                                                           xcodeptr - cd->mcodebase - (10 + 10 + 3));
3589                         continue;
3590                 }
3591
3592                 gen_resolvebranch(cd->mcodebase + bref->branchpos, 
3593                                   bref->branchpos,
3594                                                   cd->mcodeptr - cd->mcodebase);
3595
3596                 MCODECHECK(100);
3597
3598                 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC);                 /* 10 bytes */
3599                 dseg_adddata(cd, cd->mcodeptr);
3600                 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3);   /* 10 bytes */
3601                 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes  */
3602
3603                 if (xcodeptr != NULL) {
3604                         x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3605
3606                 } else {
3607                         xcodeptr = cd->mcodeptr;
3608
3609                         x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3610                         x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3611
3612                         x86_64_mov_imm_reg(cd, (u8) new_negativearraysizeexception, REG_ITMP3);
3613                         x86_64_call_reg(cd, REG_ITMP3);
3614
3615                         x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3616                         x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3617
3618                         x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3619                         x86_64_jmp_reg(cd, REG_ITMP3);
3620                 }
3621         }
3622
3623         /* generate cast check stubs */
3624
3625         xcodeptr = NULL;
3626         
3627         for (bref = cd->xcastrefs; bref != NULL; bref = bref->next) {
3628                 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3629                         gen_resolvebranch(cd->mcodebase + bref->branchpos, 
3630                                                           bref->branchpos,
3631                                                           xcodeptr - cd->mcodebase - (10 + 10 + 3));
3632                         continue;
3633                 }
3634
3635                 gen_resolvebranch(cd->mcodebase + bref->branchpos, 
3636                                   bref->branchpos,
3637                                                   cd->mcodeptr - cd->mcodebase);
3638
3639                 MCODECHECK(100);
3640
3641                 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC);                 /* 10 bytes */
3642                 dseg_adddata(cd, cd->mcodeptr);
3643                 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3);   /* 10 bytes */
3644                 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes  */
3645
3646                 if (xcodeptr != NULL) {
3647                         x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3648                 
3649                 } else {
3650                         xcodeptr = cd->mcodeptr;
3651
3652                         x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3653                         x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3654
3655                         x86_64_mov_imm_reg(cd, (u8) new_classcastexception, REG_ITMP3);
3656                         x86_64_call_reg(cd, REG_ITMP3);
3657
3658                         x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3659                         x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3660
3661                         x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3662                         x86_64_jmp_reg(cd, REG_ITMP3);
3663                 }
3664         }
3665
3666         /* generate divide by zero check stubs */
3667
3668         xcodeptr = NULL;
3669         
3670         for (bref = cd->xdivrefs; bref != NULL; bref = bref->next) {
3671                 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3672                         gen_resolvebranch(cd->mcodebase + bref->branchpos, 
3673                                                           bref->branchpos,
3674                                                           xcodeptr - cd->mcodebase - (10 + 10 + 3));
3675                         continue;
3676                 }
3677
3678                 gen_resolvebranch(cd->mcodebase + bref->branchpos, 
3679                                   bref->branchpos,
3680                                                   cd->mcodeptr - cd->mcodebase);
3681
3682                 MCODECHECK(100);
3683
3684                 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC);                 /* 10 bytes */
3685                 dseg_adddata(cd, cd->mcodeptr);
3686                 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3);   /* 10 bytes */
3687                 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes  */
3688
3689                 if (xcodeptr != NULL) {
3690                         x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3691                 
3692                 } else {
3693                         xcodeptr = cd->mcodeptr;
3694
3695                         x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3696                         x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3697
3698                         x86_64_mov_imm_reg(cd, (u8) new_arithmeticexception, REG_ITMP3);
3699                         x86_64_call_reg(cd, REG_ITMP3);
3700
3701                         x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3702                         x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3703
3704                         x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3705                         x86_64_jmp_reg(cd, REG_ITMP3);
3706                 }
3707         }
3708
3709         /* generate exception check stubs */
3710
3711         xcodeptr = NULL;
3712         
3713         for (bref = cd->xexceptionrefs; bref != NULL; bref = bref->next) {
3714                 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3715                         gen_resolvebranch(cd->mcodebase + bref->branchpos, 
3716                                                           bref->branchpos,
3717                                                           xcodeptr - cd->mcodebase - (10 + 10 + 3));
3718                         continue;
3719                 }
3720
3721                 gen_resolvebranch(cd->mcodebase + bref->branchpos, 
3722                                   bref->branchpos,
3723                                                   cd->mcodeptr - cd->mcodebase);
3724
3725                 MCODECHECK(100);
3726
3727                 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC);                 /* 10 bytes */
3728                 dseg_adddata(cd, cd->mcodeptr);
3729                 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1);   /* 10 bytes */
3730                 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes  */
3731
3732                 if (xcodeptr != NULL) {
3733                         x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3734                 
3735                 } else {
3736                         xcodeptr = cd->mcodeptr;
3737
3738 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3739                         x86_64_alu_imm_reg(cd, X86_64_SUB, 8, REG_SP);
3740                         x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0);
3741                         x86_64_mov_imm_reg(cd, (u8) &builtin_get_exceptionptrptr, REG_ITMP1);
3742                         x86_64_call_reg(cd, REG_ITMP1);
3743                         x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
3744                         x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
3745                         x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
3746                         x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC);
3747                         x86_64_alu_imm_reg(cd, X86_64_ADD, 8, REG_SP);
3748 #else
3749                         x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
3750                         x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP1_XPTR);
3751                         x86_64_mov_imm_membase(cd, 0, REG_ITMP3, 0);
3752 #endif
3753
3754                         x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3755                         x86_64_jmp_reg(cd, REG_ITMP3);
3756                 }
3757         }
3758
3759         /* generate null pointer check stubs */
3760
3761         xcodeptr = NULL;
3762         
3763         for (bref = cd->xnullrefs; bref != NULL; bref = bref->next) {
3764                 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3765                         gen_resolvebranch(cd->mcodebase + bref->branchpos, 
3766                                                           bref->branchpos,
3767                                                           xcodeptr - cd->mcodebase - (10 + 10 + 3));
3768                         continue;
3769                 }
3770
3771                 gen_resolvebranch(cd->mcodebase + bref->branchpos, 
3772                                   bref->branchpos,
3773                                                   cd->mcodeptr - cd->mcodebase);
3774
3775                 MCODECHECK(100);
3776
3777                 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC);                 /* 10 bytes */
3778                 dseg_adddata(cd, cd->mcodeptr);
3779                 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1);   /* 10 bytes */
3780                 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes  */
3781
3782                 if (xcodeptr != NULL) {
3783                         x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3784                 
3785                 } else {
3786                         xcodeptr = cd->mcodeptr;
3787
3788                         x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3789                         x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3790
3791                         x86_64_mov_imm_reg(cd, (u8) new_nullpointerexception, REG_ITMP3);
3792                         x86_64_call_reg(cd, REG_ITMP3);
3793
3794                         x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3795                         x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3796
3797                         x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3798                         x86_64_jmp_reg(cd, REG_ITMP3);
3799                 }
3800         }
3801
3802         /* generate put/getstatic stub call code */
3803
3804         {
3805                 clinitref   *cref;
3806                 codegendata *tmpcd;
3807                 u1           xmcode;
3808                 u4           mcode;
3809
3810                 tmpcd = DNEW(codegendata);
3811
3812                 for (cref = cd->clinitrefs; cref != NULL; cref = cref->next) {
3813                         /* Get machine code which is patched back in later. A             */
3814                         /* `call rel32' is 5 bytes long.                                  */
3815                         xcodeptr = cd->mcodebase + cref->branchpos;
3816                         xmcode = *xcodeptr;
3817                         mcode = *((u4 *) (xcodeptr + 1));
3818
3819                         MCODECHECK(50);
3820
3821                         /* patch in `call rel32' to call the following code               */
3822                         tmpcd->mcodeptr = xcodeptr;     /* set dummy mcode pointer        */
3823                         x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
3824
3825                         /* Push machine code bytes to patch onto the stack.               */
3826                         x86_64_push_imm(cd, (u1) xmcode);
3827                         x86_64_push_imm(cd, (u4) mcode);
3828
3829                         x86_64_push_imm(cd, (u8) cref->class);
3830
3831                         x86_64_mov_imm_reg(cd, (u8) asm_check_clinit, REG_ITMP1);
3832                         x86_64_jmp_reg(cd, REG_ITMP1);
3833                 }
3834         }
3835         }
3836
3837         codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
3838 }
3839
3840
3841 /* function createcompilerstub *************************************************
3842
3843         creates a stub routine which calls the compiler
3844         
3845 *******************************************************************************/
3846
3847 #define COMPSTUBSIZE    23
3848
3849 u1 *createcompilerstub(methodinfo *m)
3850 {
3851         u1 *s = CNEW(u1, COMPSTUBSIZE);     /* memory to hold the stub            */
3852         codegendata *cd;
3853         s4 dumpsize;
3854
3855         /* mark start of dump memory area */
3856
3857         dumpsize = dump_size();
3858
3859         cd = DNEW(codegendata);
3860         cd->mcodeptr = s;
3861
3862         /* code for the stub */
3863         x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP1); /* pass method to compiler     */
3864         x86_64_mov_imm_reg(cd, (u8) asm_call_jit_compiler, REG_ITMP3);/* load address */
3865         x86_64_jmp_reg(cd, REG_ITMP3);      /* jump to compiler                   */
3866
3867 #if defined(STATISTICS)
3868         if (opt_stat)
3869                 count_cstub_len += COMPSTUBSIZE;
3870 #endif
3871
3872         /* release dump area */
3873
3874         dump_release(dumpsize);
3875
3876         return s;
3877 }
3878
3879
3880 /* function removecompilerstub *************************************************
3881
3882      deletes a compilerstub from memory  (simply by freeing it)
3883
3884 *******************************************************************************/
3885
3886 void removecompilerstub(u1 *stub) 
3887 {
3888         CFREE(stub, COMPSTUBSIZE);
3889 }
3890
3891
3892 /* function: createnativestub **************************************************
3893
3894         creates a stub routine which calls a native method
3895
3896 *******************************************************************************/
3897
3898 /* #if defined(USE_THREADS) && defined(NATIVE_THREADS) */
3899 /* static java_objectheader **(*callgetexceptionptrptr)() = builtin_get_exceptionptrptr; */
3900 /* #endif */
3901
3902 #define NATIVESTUBSIZE    700           /* keep this size high enough!        */
3903
3904 u1 *createnativestub(functionptr f, methodinfo *m)
3905 {
3906         u1                 *s;              /* pointer to stub memory             */
3907         codegendata        *cd;
3908         registerdata       *rd;
3909         t_inlining_globals *id;
3910         s4                  dumpsize;
3911         s4                  stackframesize; /* size of stackframe if needed       */
3912         u1                 *tptr;
3913         s4                  iargs;          /* count of integer arguments         */
3914         s4                  fargs;          /* count of float arguments           */
3915         s4                  i;              /* counter                            */
3916
3917         void **callAddrPatchPos=0;
3918         u1 *jmpInstrPos=0;
3919         void **jmpInstrPatchPos=0;
3920
3921         /* initialize variables */
3922
3923         iargs = 0;
3924         fargs = 0;
3925
3926         /* mark start of dump memory area */
3927
3928         dumpsize = dump_size();
3929
3930         cd = DNEW(codegendata);
3931         rd = DNEW(registerdata);
3932         id = DNEW(t_inlining_globals);
3933
3934         /* setup registers before using it */
3935
3936         inlining_setup(m, id);
3937         reg_setup(m, rd, id);
3938
3939         /* set paramcount and paramtypes      */
3940
3941         descriptor2types(m);
3942
3943         /* count integer and float arguments */
3944
3945         tptr = m->paramtypes;
3946         for (i = 0; i < m->paramcount; i++) {
3947                 IS_INT_LNG_TYPE(*tptr++) ? iargs++ : fargs++;
3948         }
3949
3950         s = CNEW(u1, NATIVESTUBSIZE);       /* memory to hold the stub            */
3951
3952         /* set some required varibles which are normally set by codegen_setup */
3953         cd->mcodebase = s;
3954         cd->mcodeptr = s;
3955         cd->clinitrefs = NULL;
3956
3957         /* if function is static, check for initialized */
3958
3959         if ((m->flags & ACC_STATIC) && !m->class->initialized) {
3960                 codegen_addclinitref(cd, cd->mcodeptr, m->class);
3961         }
3962
3963         if (runverbose) {
3964                 s4 l, s1;
3965
3966                 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
3967
3968                 /* save integer and float argument registers */
3969
3970                 for (i = 0; i < INT_ARG_CNT; i++) {
3971                         x86_64_mov_reg_membase(cd, rd->argintregs[i], REG_SP, (1 + i) * 8);
3972                 }
3973
3974                 for (i = 0; i < FLT_ARG_CNT; i++) {
3975                         x86_64_movq_reg_membase(cd, rd->argfltregs[i], REG_SP, (1 + INT_ARG_CNT + i) * 8);
3976                 }
3977
3978                 /* show integer hex code for float arguments */
3979
3980                 for (i = 0, l = 0; i < m->paramcount && i < INT_ARG_CNT; i++) {
3981                         /* if the paramtype is a float, we have to right shift all        */
3982                         /* following integer registers                                    */
3983
3984                         if (IS_FLT_DBL_TYPE(m->paramtypes[i])) {
3985                                 for (s1 = INT_ARG_CNT - 2; s1 >= i; s1--) {
3986                                         x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
3987                                 }
3988
3989                                 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[i]);
3990                                 l++;
3991                         }
3992                 }
3993
3994                 x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP1);
3995                 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, 0 * 8);
3996                 x86_64_mov_imm_reg(cd, (u8) builtin_trace_args, REG_ITMP1);
3997                 x86_64_call_reg(cd, REG_ITMP1);
3998
3999                 /* restore integer and float argument registers */
4000
4001                 for (i = 0; i < INT_ARG_CNT; i++) {
4002                         x86_64_mov_membase_reg(cd, REG_SP, (1 + i) * 8, rd->argintregs[i]);
4003                 }
4004
4005                 for (i = 0; i < FLT_ARG_CNT; i++) {
4006                         x86_64_movq_membase_reg(cd, REG_SP, (1 + INT_ARG_CNT + i) * 8, rd->argfltregs[i]);
4007                 }
4008
4009                 x86_64_alu_imm_reg(cd, X86_64_ADD, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4010         }
4011
4012 #if !defined(STATIC_CLASSPATH)
4013         /* call method to resolve native function if needed */
4014         if (f == NULL) {
4015                 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4016
4017                 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, 1 * 8);
4018                 x86_64_mov_reg_membase(cd, rd->argintregs[1], REG_SP, 2 * 8);
4019                 x86_64_mov_reg_membase(cd, rd->argintregs[2], REG_SP, 3 * 8);
4020                 x86_64_mov_reg_membase(cd, rd->argintregs[3], REG_SP, 4 * 8);
4021                 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 5 * 8);
4022                 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 6 * 8);
4023
4024                 x86_64_movq_reg_membase(cd, rd->argfltregs[0], REG_SP, 7 * 8);
4025                 x86_64_movq_reg_membase(cd, rd->argfltregs[1], REG_SP, 8 * 8);
4026                 x86_64_movq_reg_membase(cd, rd->argfltregs[2], REG_SP, 9 * 8);
4027                 x86_64_movq_reg_membase(cd, rd->argfltregs[3], REG_SP, 10 * 8);
4028                 x86_64_movq_reg_membase(cd, rd->argfltregs[4], REG_SP, 11 * 8);
4029                 x86_64_movq_reg_membase(cd, rd->argfltregs[5], REG_SP, 12 * 8);
4030                 x86_64_movq_reg_membase(cd, rd->argfltregs[6], REG_SP, 13 * 8);
4031                 x86_64_movq_reg_membase(cd, rd->argfltregs[7], REG_SP, 14 * 8);
4032
4033                 /* needed to patch a jump over this block */
4034                 x86_64_jmp_imm(cd, 0);
4035                 jmpInstrPos = cd->mcodeptr - 4;
4036
4037                 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
4038
4039                 x86_64_mov_imm_reg(cd, 0, rd->argintregs[1]);
4040                 callAddrPatchPos = cd->mcodeptr - 8; /* at this position the place is specified where the native function adress should be patched into*/
4041
4042                 x86_64_mov_imm_reg(cd, 0, rd->argintregs[2]);
4043                 jmpInstrPatchPos = cd->mcodeptr - 8;
4044
4045                 x86_64_mov_imm_reg(cd, jmpInstrPos, rd->argintregs[3]);
4046
4047                 x86_64_mov_imm_reg(cd, (u8) codegen_resolve_native, REG_ITMP1);
4048                 x86_64_call_reg(cd, REG_ITMP1);
4049
4050                 *(jmpInstrPatchPos) = cd->mcodeptr - jmpInstrPos - 1; /*=opcode jmp_imm size*/
4051
4052                 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, rd->argintregs[0]);
4053                 x86_64_mov_membase_reg(cd, REG_SP, 2 * 8, rd->argintregs[1]);
4054                 x86_64_mov_membase_reg(cd, REG_SP, 3 * 8, rd->argintregs[2]);
4055                 x86_64_mov_membase_reg(cd, REG_SP, 4 * 8, rd->argintregs[3]);
4056                 x86_64_mov_membase_reg(cd, REG_SP, 5 * 8, rd->argintregs[4]);
4057                 x86_64_mov_membase_reg(cd, REG_SP, 6 * 8, rd->argintregs[5]);
4058
4059                 x86_64_movq_membase_reg(cd, REG_SP, 7 * 8, rd->argfltregs[0]);
4060                 x86_64_movq_membase_reg(cd, REG_SP, 8 * 8, rd->argfltregs[1]);
4061                 x86_64_movq_membase_reg(cd, REG_SP, 9 * 8, rd->argfltregs[2]);
4062                 x86_64_movq_membase_reg(cd, REG_SP, 10 * 8, rd->argfltregs[3]);
4063                 x86_64_movq_membase_reg(cd, REG_SP, 11 * 8, rd->argfltregs[4]);
4064                 x86_64_movq_membase_reg(cd, REG_SP, 12 * 8, rd->argfltregs[5]);
4065                 x86_64_movq_membase_reg(cd, REG_SP, 13 * 8, rd->argfltregs[6]);
4066                 x86_64_movq_membase_reg(cd, REG_SP, 14 * 8, rd->argfltregs[7]);
4067
4068                 x86_64_alu_imm_reg(cd, X86_64_ADD, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4069         }
4070 #endif
4071
4072         /* save argument registers on stack -- if we have to */
4073
4074         if ((((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) || iargs > (INT_ARG_CNT - 1)) ||
4075                 (fargs > FLT_ARG_CNT)) {
4076                 s4 paramshiftcnt;
4077                 s4 stackparamcnt;
4078
4079                 paramshiftcnt = 0;
4080                 stackparamcnt = 0;
4081
4082                 /* do we need to shift integer argument register onto stack? */
4083
4084                 if ((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) {
4085                         /* do we need to shift 2 arguments? */
4086                         if (iargs > (INT_ARG_CNT - 1)) {
4087                                 paramshiftcnt = 2;
4088
4089                         } else {
4090                                 paramshiftcnt = 1;
4091                         }
4092
4093                 } else if (iargs > (INT_ARG_CNT - 1)) {
4094                         paramshiftcnt = 1;
4095                 }
4096
4097                 /* calculate required stack space */
4098
4099                 stackparamcnt += (iargs > INT_ARG_CNT) ? iargs - INT_ARG_CNT : 0;
4100                 stackparamcnt += (fargs > FLT_ARG_CNT) ? fargs - FLT_ARG_CNT : 0;
4101
4102                 stackframesize = stackparamcnt + paramshiftcnt;
4103
4104                 /* keep stack 16-byte aligned */
4105                 if (!(stackframesize & 0x1))
4106                         stackframesize++;
4107
4108                 x86_64_alu_imm_reg(cd, X86_64_SUB, stackframesize * 8, REG_SP);
4109
4110                 /* shift integer arguments if required */
4111
4112                 if ((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) {
4113                         /* do we need to shift 2 arguments? */
4114                         if (iargs > (INT_ARG_CNT - 1))
4115                                 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 1 * 8);
4116
4117                         x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 0 * 8);
4118
4119                 } else if (iargs > (INT_ARG_CNT - 1)) {
4120                         x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 0 * 8);
4121                 }
4122
4123                 /* copy stack arguments into new stack frame -- if any */
4124                 for (i = 0; i < stackparamcnt; i++) {
4125                         x86_64_mov_membase_reg(cd, REG_SP, (stackframesize + 1 + i) * 8, REG_ITMP1);
4126                         x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, (paramshiftcnt + i) * 8);
4127                 }
4128
4129         } else {
4130                 /* keep stack 16-byte aligned */
4131                 x86_64_alu_imm_reg(cd, X86_64_SUB, 1 * 8, REG_SP);
4132                 stackframesize = 1;
4133         }
4134
4135         /* shift integer arguments for `env' and `class' arguments */
4136
4137         if (m->flags & ACC_STATIC) {
4138                 /* shift iargs count if less than INT_ARG_CNT, or all */
4139                 for (i = (iargs < (INT_ARG_CNT - 2)) ? iargs : (INT_ARG_CNT - 2); i >= 0; i--) {
4140                         x86_64_mov_reg_reg(cd, rd->argintregs[i], rd->argintregs[i + 2]);
4141                 }
4142
4143                 /* put class into second argument register */
4144                 x86_64_mov_imm_reg(cd, (u8) m->class, rd->argintregs[1]);
4145
4146         } else {
4147                 /* shift iargs count if less than INT_ARG_CNT, or all */
4148                 for (i = (iargs < (INT_ARG_CNT - 1)) ? iargs : (INT_ARG_CNT - 1); i >= 0; i--) {
4149                         x86_64_mov_reg_reg(cd, rd->argintregs[i], rd->argintregs[i + 1]);
4150                 }
4151         }
4152
4153         /* put env into first argument register */
4154         x86_64_mov_imm_reg(cd, (u8) &env, rd->argintregs[0]);
4155
4156         /* do the native function call */
4157         x86_64_mov_imm_reg(cd, (u8) f, REG_ITMP1);
4158 #if !defined(STATIC_CLASSPATH)
4159         if (f == NULL)
4160                 (*callAddrPatchPos) = cd->mcodeptr - 8;
4161 #endif
4162         x86_64_call_reg(cd, REG_ITMP1);
4163
4164         /* remove stackframe if there is one */
4165         if (stackframesize) {
4166                 x86_64_alu_imm_reg(cd, X86_64_ADD, stackframesize * 8, REG_SP);
4167         }
4168
4169         if (runverbose) {
4170                 x86_64_alu_imm_reg(cd, X86_64_SUB, 3 * 8, REG_SP);    /* keep stack 16-byte aligned */
4171
4172                 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
4173                 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
4174
4175                 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
4176                 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
4177                 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
4178                 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
4179
4180                 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
4181                 x86_64_call_reg(cd, REG_ITMP1);
4182
4183                 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
4184                 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
4185
4186                 x86_64_alu_imm_reg(cd, X86_64_ADD, 3 * 8, REG_SP);    /* keep stack 16-byte aligned */
4187         }
4188
4189         /* check for exception */
4190
4191 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4192         x86_64_push_reg(cd, REG_RESULT);
4193 /*      x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
4194         x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
4195         x86_64_call_reg(cd, REG_ITMP3);
4196         x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
4197         x86_64_pop_reg(cd, REG_RESULT);
4198 #else
4199         x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
4200         x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP3);
4201 #endif
4202         x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
4203         x86_64_jcc(cd, X86_64_CC_NE, 1);
4204
4205         x86_64_ret(cd);
4206
4207         /* handle exception */
4208
4209 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4210         x86_64_push_reg(cd, REG_ITMP3);
4211 /*      x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
4212         x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
4213         x86_64_call_reg(cd, REG_ITMP3);
4214         x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
4215         x86_64_pop_reg(cd, REG_ITMP1_XPTR);
4216 #else
4217         x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
4218         x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
4219         x86_64_alu_reg_reg(cd, X86_64_XOR, REG_ITMP2, REG_ITMP2);
4220         x86_64_mov_reg_membase(cd, REG_ITMP2, REG_ITMP3, 0);    /* clear exception pointer */
4221 #endif
4222
4223         x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC);    /* get return address from stack */
4224         x86_64_alu_imm_reg(cd, X86_64_SUB, 3, REG_ITMP2_XPC);    /* callq */
4225
4226         x86_64_mov_imm_reg(cd, (u8) asm_handle_nat_exception, REG_ITMP3);
4227         x86_64_jmp_reg(cd, REG_ITMP3);
4228
4229
4230         /* patch in a clinit call if required *************************************/
4231
4232         {
4233                 u1          *xcodeptr;
4234                 clinitref   *cref;
4235                 codegendata *tmpcd;
4236                 u1           xmcode;
4237                 u4           mcode;
4238
4239                 tmpcd = DNEW(codegendata);
4240
4241                 /* there can only be one clinit ref entry                             */
4242                 cref = cd->clinitrefs;
4243
4244                 if (cref) {
4245                         /* Get machine code which is patched back in later. A             */
4246                         /* `call rel32' is 5 bytes long.                                  */
4247                         xcodeptr = cd->mcodebase + cref->branchpos;
4248                         xmcode = *xcodeptr;
4249                         mcode = *((u4 *) (xcodeptr + 1));
4250
4251                         /* patch in `call rel32' to call the following code               */
4252                         tmpcd->mcodeptr = xcodeptr;     /* set dummy mcode pointer        */
4253                         x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4254
4255                         /* Push machine code bytes to patch onto the stack.               */
4256                         x86_64_push_imm(cd, (u1) xmcode);
4257                         x86_64_push_imm(cd, (u4) mcode);
4258
4259                         x86_64_push_imm(cd, (u8) cref->class);
4260
4261                         x86_64_mov_imm_reg(cd, (u8) asm_check_clinit, REG_ITMP1);
4262                         x86_64_jmp_reg(cd, REG_ITMP1);
4263                 }
4264         }
4265
4266         /* Check if the stub size is big enough to hold the whole stub generated. */
4267         /* If not, this can lead into unpredictable crashes, because of heap      */
4268         /* corruption.                                                            */
4269         if ((s4) (cd->mcodeptr - s) > NATIVESTUBSIZE) {
4270                 throw_cacao_exception_exit(string_java_lang_InternalError,
4271                                                                    "Native stub size %d is to small for current stub size %d",
4272                                                                    NATIVESTUBSIZE, (s4) (cd->mcodeptr - s));
4273         }
4274
4275 #if defined(STATISTICS)
4276         if (opt_stat)
4277                 count_nstub_len += NATIVESTUBSIZE;
4278 #endif
4279
4280         /* release dump area */
4281
4282         dump_release(dumpsize);
4283
4284         return s;
4285 }
4286
4287
4288 /* function: removenativestub **************************************************
4289
4290     removes a previously created native-stub from memory
4291     
4292 *******************************************************************************/
4293
4294 void removenativestub(u1 *stub)
4295 {
4296         CFREE(stub, NATIVESTUBSIZE);
4297 }
4298
4299
4300 /*
4301  * These are local overrides for various environment variables in Emacs.
4302  * Please do not remove this and leave it at the end of the file, where
4303  * Emacs will automagically detect them.
4304  * ---------------------------------------------------------------------
4305  * Local variables:
4306  * mode: c
4307  * indent-tabs-mode: t
4308  * c-basic-offset: 4
4309  * tab-width: 4
4310  * End:
4311  */