- changes to use codegen_addpatchref
[cacao.git] / src / vm / jit / x86_64 / codegen.c
1 /* vm/jit/x86_64/codegen.c - machine code generator for x86_64
2
3    Copyright (C) 1996-2005 R. Grafl, A. Krall, C. Kruegel, C. Oates,
4    R. Obermaisser, M. Platter, M. Probst, S. Ring, E. Steiner,
5    C. Thalinger, D. Thuernbeck, P. Tomsich, C. Ullrich, J. Wenninger,
6    Institut f. Computersprachen - TU Wien
7
8    This file is part of CACAO.
9
10    This program is free software; you can redistribute it and/or
11    modify it under the terms of the GNU General Public License as
12    published by the Free Software Foundation; either version 2, or (at
13    your option) any later version.
14
15    This program is distributed in the hope that it will be useful, but
16    WITHOUT ANY WARRANTY; without even the implied warranty of
17    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
18    General Public License for more details.
19
20    You should have received a copy of the GNU General Public License
21    along with this program; if not, write to the Free Software
22    Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
23    02111-1307, USA.
24
25    Contact: cacao@complang.tuwien.ac.at
26
27    Authors: Andreas Krall
28             Christian Thalinger
29
30    $Id: codegen.c 2237 2005-04-06 12:12:40Z twisti $
31
32 */
33
34
35 #define _GNU_SOURCE
36
37 #include <stdio.h>
38 #include <ucontext.h>
39
40 #include "cacao/cacao.h"
41 #include "native/native.h"
42 #include "vm/global.h"
43 #include "vm/builtin.h"
44 #include "vm/loader.h"
45 #include "vm/tables.h"
46 #include "vm/jit/asmpart.h"
47 #include "vm/jit/jit.h"
48 #include "vm/jit/reg.h"
49 #include "vm/jit/parse.h"
50 #include "vm/jit/x86_64/arch.h"
51 #include "vm/jit/x86_64/codegen.h"
52 #include "vm/jit/x86_64/emitfuncs.h"
53 #include "vm/jit/x86_64/types.h"
54 #include "vm/jit/x86_64/asmoffsets.h"
55
56
57 /* register descripton - array ************************************************/
58
59 /* #define REG_RES   0         reserved register for OS or code generator     */
60 /* #define REG_RET   1         return value register                          */
61 /* #define REG_EXC   2         exception value register (only old jit)        */
62 /* #define REG_SAV   3         (callee) saved register                        */
63 /* #define REG_TMP   4         scratch temporary register (caller saved)      */
64 /* #define REG_ARG   5         argument register (caller saved)               */
65
66 /* #define REG_END   -1        last entry in tables                           */
67
68 static int nregdescint[] = {
69     REG_RET, REG_ARG, REG_ARG, REG_TMP, REG_RES, REG_SAV, REG_ARG, REG_ARG,
70     REG_ARG, REG_ARG, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV,
71     REG_END
72 };
73
74
75 static int nregdescfloat[] = {
76     REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_ARG,
77     REG_RES, REG_RES, REG_RES, REG_TMP, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
78     REG_END
79 };
80
81
82 /* Include independent code generation stuff -- include after register        */
83 /* descriptions to avoid extern definitions.                                  */
84
85 #include "vm/jit/codegen.inc"
86 #include "vm/jit/reg.inc"
87 #ifdef LSRA
88 #include "vm/jit/lsra.inc"
89 #endif
90
91
92 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
93 void thread_restartcriticalsection(ucontext_t *uc)
94 {
95         void *critical;
96
97         critical = thread_checkcritical((void *) uc->uc_mcontext.gregs[REG_RIP]);
98
99         if (critical)
100                 uc->uc_mcontext.gregs[REG_RIP] = (u8) critical;
101 }
102 #endif
103
104
105 /* NullPointerException signal handler for hardware null pointer check */
106
107 void catch_NullPointerException(int sig, siginfo_t *siginfo, void *_p)
108 {
109         sigset_t nsig;
110
111         struct ucontext *_uc = (struct ucontext *) _p;
112         struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
113         struct sigaction act;
114         java_objectheader *xptr;
115
116         /* Reset signal handler - necessary for SysV, does no harm for BSD */
117         
118         act.sa_sigaction = catch_NullPointerException;       /* reinstall handler */
119         act.sa_flags = SA_SIGINFO;
120         sigaction(sig, &act, NULL);
121         
122         sigemptyset(&nsig);
123         sigaddset(&nsig, sig);
124         sigprocmask(SIG_UNBLOCK, &nsig, NULL);               /* unblock signal    */
125
126         xptr = new_nullpointerexception();
127
128         sigctx->rax = (u8) xptr;                             /* REG_ITMP1_XPTR    */
129         sigctx->r10 = sigctx->rip;                           /* REG_ITMP2_XPC     */
130         sigctx->rip = (u8) asm_handle_exception;
131
132         return;
133 }
134
135
136 /* ArithmeticException signal handler for hardware divide by zero check */
137
138 void catch_ArithmeticException(int sig, siginfo_t *siginfo, void *_p)
139 {
140         sigset_t nsig;
141
142         struct ucontext *_uc = (struct ucontext *) _p;
143         struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
144         struct sigaction act;
145         java_objectheader *xptr;
146
147         /* Reset signal handler - necessary for SysV, does no harm for BSD */
148
149         act.sa_sigaction = catch_ArithmeticException;        /* reinstall handler */
150         act.sa_flags = SA_SIGINFO;
151         sigaction(sig, &act, NULL);
152
153         sigemptyset(&nsig);
154         sigaddset(&nsig, sig);
155         sigprocmask(SIG_UNBLOCK, &nsig, NULL);               /* unblock signal    */
156
157         xptr = new_arithmeticexception();
158
159         sigctx->rax = (u8) xptr;                             /* REG_ITMP1_XPTR    */
160         sigctx->r10 = sigctx->rip;                           /* REG_ITMP2_XPC     */
161         sigctx->rip = (u8) asm_handle_exception;
162
163         return;
164 }
165
166
167 void init_exceptions(void)
168 {
169         struct sigaction act;
170
171         /* install signal handlers we need to convert to exceptions */
172         sigemptyset(&act.sa_mask);
173
174         if (!checknull) {
175 #if defined(SIGSEGV)
176                 act.sa_sigaction = catch_NullPointerException;
177                 act.sa_flags = SA_SIGINFO;
178                 sigaction(SIGSEGV, &act, NULL);
179 #endif
180
181 #if defined(SIGBUS)
182                 act.sa_sigaction = catch_NullPointerException;
183                 act.sa_flags = SA_SIGINFO;
184                 sigaction(SIGBUS, &act, NULL);
185 #endif
186         }
187
188         act.sa_sigaction = catch_ArithmeticException;
189         act.sa_flags = SA_SIGINFO;
190         sigaction(SIGFPE, &act, NULL);
191 }
192
193
194 /* function gen_mcode **********************************************************
195
196         generates machine code
197
198 *******************************************************************************/
199
200 void codegen(methodinfo *m, codegendata *cd, registerdata *rd)
201 {
202         s4 len, s1, s2, s3, d;
203         s8 a;
204         s4 parentargs_base;
205         stackptr        src;
206         varinfo        *var;
207         basicblock     *bptr;
208         instruction    *iptr;
209         exceptiontable *ex;
210
211         {
212         s4 i, p, pa, t, l;
213         s4 savedregs_num;
214
215         savedregs_num = 0;
216
217         /* space to save used callee saved registers */
218
219         savedregs_num += (rd->savintregcnt - rd->maxsavintreguse);
220         savedregs_num += (rd->savfltregcnt - rd->maxsavfltreguse);
221
222         parentargs_base = rd->maxmemuse + savedregs_num;
223
224 #if defined(USE_THREADS)           /* space to save argument of monitor_enter */
225
226         if (checksync && (m->flags & ACC_SYNCHRONIZED))
227                 parentargs_base++;
228
229 #endif
230
231     /* Keep stack of non-leaf functions 16-byte aligned for calls into native */
232         /* code e.g. libc or jni (alignment problems with movaps).                */
233
234         if (!m->isleafmethod || runverbose) {
235                 parentargs_base |= 0x1;
236         }
237
238         /* create method header */
239
240         (void) dseg_addaddress(cd, m);                          /* MethodPointer  */
241         (void) dseg_adds4(cd, parentargs_base * 8);             /* FrameSize      */
242
243 #if defined(USE_THREADS)
244
245         /* IsSync contains the offset relative to the stack pointer for the
246            argument of monitor_exit used in the exception handler. Since the
247            offset could be zero and give a wrong meaning of the flag it is
248            offset by one.
249         */
250
251         if (checksync && (m->flags & ACC_SYNCHRONIZED))
252                 (void) dseg_adds4(cd, (rd->maxmemuse + 1) * 8);     /* IsSync         */
253         else
254
255 #endif
256
257                 (void) dseg_adds4(cd, 0);                           /* IsSync         */
258                                                
259         (void) dseg_adds4(cd, m->isleafmethod);                 /* IsLeaf         */
260         (void) dseg_adds4(cd, rd->savintregcnt - rd->maxsavintreguse);/* IntSave  */
261         (void) dseg_adds4(cd, rd->savfltregcnt - rd->maxsavfltreguse);/* FltSave  */
262         (void) dseg_adds4(cd, cd->exceptiontablelength);        /* ExTableSize    */
263
264         /* create exception table */
265
266         for (ex = cd->exceptiontable; ex != NULL; ex = ex->down) {
267                 dseg_addtarget(cd, ex->start);
268                 dseg_addtarget(cd, ex->end);
269                 dseg_addtarget(cd, ex->handler);
270                 (void) dseg_addaddress(cd, ex->catchtype.cls);
271         }
272         
273         /* initialize mcode variables */
274         
275         cd->mcodeptr = (u1 *) cd->mcodebase;
276         cd->mcodeend = (s4 *) (cd->mcodebase + cd->mcodesize);
277         MCODECHECK(128 + m->paramcount);
278
279         /* create stack frame (if necessary) */
280
281         if (parentargs_base) {
282                 x86_64_alu_imm_reg(cd, X86_64_SUB, parentargs_base * 8, REG_SP);
283         }
284
285         /* save used callee saved registers */
286
287         p = parentargs_base;
288         for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
289                 p--; x86_64_mov_reg_membase(cd, rd->savintregs[i], REG_SP, p * 8);
290         }
291         for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
292                 p--; x86_64_movq_reg_membase(cd, rd->savfltregs[i], REG_SP, p * 8);
293         }
294
295         /* take arguments out of register or stack frame */
296
297         for (p = 0, l = 0, s1 = 0, s2 = 0; p < m->paramcount; p++) {
298                 t = m->paramtypes[p];
299                 var = &(rd->locals[l][t]);
300                 l++;
301                 if (IS_2_WORD_TYPE(t))    /* increment local counter for 2 word types */
302                         l++;
303                 if (var->type < 0) {
304                         if (IS_INT_LNG_TYPE(t)) {
305                                 s1++;
306                         } else {
307                                 s2++;
308                         }
309                         continue;
310                 }
311                 if (IS_INT_LNG_TYPE(t)) {                    /* integer args          */
312                         if (s1 < INT_ARG_CNT) {                  /* register arguments    */
313                                 if (!(var->flags & INMEMORY)) {      /* reg arg -> register   */
314                                         M_INTMOVE(rd->argintregs[s1], var->regoff);
315
316                                 } else {                             /* reg arg -> spilled    */
317                                     x86_64_mov_reg_membase(cd, rd->argintregs[s1], REG_SP, var->regoff * 8);
318                                 }
319
320                         } else {                                 /* stack arguments       */
321                                 pa = s1 - INT_ARG_CNT;
322                                 if (s2 >= FLT_ARG_CNT) {
323                                         pa += s2 - FLT_ARG_CNT;
324                                 }
325                                 if (!(var->flags & INMEMORY)) {      /* stack arg -> register */
326                                         x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff);    /* + 8 for return address */
327                                 } else {                             /* stack arg -> spilled  */
328                                         x86_64_mov_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_ITMP1);    /* + 8 for return address */
329                                         x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, var->regoff * 8);
330                                 }
331                         }
332                         s1++;
333
334                 } else {                                     /* floating args         */
335                         if (s2 < FLT_ARG_CNT) {                  /* register arguments    */
336                                 if (!(var->flags & INMEMORY)) {      /* reg arg -> register   */
337                                         M_FLTMOVE(rd->argfltregs[s2], var->regoff);
338
339                                 } else {                                         /* reg arg -> spilled    */
340                                         x86_64_movq_reg_membase(cd, rd->argfltregs[s2], REG_SP, var->regoff * 8);
341                                 }
342
343                         } else {                                 /* stack arguments       */
344                                 pa = s2 - FLT_ARG_CNT;
345                                 if (s1 >= INT_ARG_CNT) {
346                                         pa += s1 - INT_ARG_CNT;
347                                 }
348                                 if (!(var->flags & INMEMORY)) {      /* stack-arg -> register */
349                                         x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, var->regoff);
350
351                                 } else {
352                                         x86_64_movq_membase_reg(cd, REG_SP, (parentargs_base + pa) * 8 + 8, REG_FTMP1);
353                                         x86_64_movq_reg_membase(cd, REG_FTMP1, REG_SP, var->regoff * 8);
354                                 }
355                         }
356                         s2++;
357                 }
358         }  /* end for */
359
360         /* save monitorenter argument */
361
362 #if defined(USE_THREADS)
363         if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
364                 u8 func_enter;
365
366                 if (m->flags & ACC_STATIC) {
367                         func_enter = (u8) builtin_staticmonitorenter;
368                         x86_64_mov_imm_reg(cd, (s8) m->class, REG_ITMP1);
369                         x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, rd->maxmemuse * 8);
370
371                 } else {
372                         func_enter = (u8) builtin_monitorenter;
373                         x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, rd->maxmemuse * 8);
374                 }
375
376                 /* call monitorenter function */
377
378                 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
379                 x86_64_mov_imm_reg(cd, func_enter, REG_ITMP1);
380                 x86_64_call_reg(cd, REG_ITMP1);
381         }
382 #endif
383
384         /* Copy argument registers to stack and call trace function with pointer  */
385         /* to arguments on stack.                                                 */
386
387         if (runverbose) {
388                 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1 + 1) * 8, REG_SP);
389
390                 /* save integer argument registers */
391
392                 for (p = 0; p < INT_ARG_CNT; p++) {
393                         x86_64_mov_reg_membase(cd, rd->argintregs[p], REG_SP, (1 + p) * 8);
394                 }
395
396                 /* save float argument registers */
397
398                 for (p = 0; p < FLT_ARG_CNT; p++) {
399                         x86_64_movq_reg_membase(cd, rd->argfltregs[p], REG_SP, (1 + INT_ARG_CNT + p) * 8);
400                 }
401
402                 /* show integer hex code for float arguments */
403
404                 for (p = 0, l = 0; p < m->paramcount && p < INT_ARG_CNT; p++) {
405                         t = m->paramtypes[p];
406
407                         /* if the paramtype is a float, we have to right shift all        */
408                         /* following integer registers                                    */
409
410                         if (IS_FLT_DBL_TYPE(t)) {
411                                 for (s1 = INT_ARG_CNT - 2; s1 >= p; s1--) {
412                                         x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
413                                 }
414
415                                 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[p]);
416                                 l++;
417                         }
418                 }
419
420                 x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP2);
421                 x86_64_mov_reg_membase(cd, REG_ITMP2, REG_SP, 0 * 8);
422                 x86_64_mov_imm_reg(cd, (u8) builtin_trace_args, REG_ITMP1);
423                 x86_64_call_reg(cd, REG_ITMP1);
424
425                 /* restore integer argument registers */
426
427                 for (p = 0; p < INT_ARG_CNT; p++) {
428                         x86_64_mov_membase_reg(cd, REG_SP, (1 + p) * 8, rd->argintregs[p]);
429                 }
430
431                 /* restore float argument registers */
432
433                 for (p = 0; p < FLT_ARG_CNT; p++) {
434                         x86_64_movq_membase_reg(cd, REG_SP, (1 + INT_ARG_CNT + p) * 8, rd->argfltregs[p]);
435                 }
436
437                 x86_64_alu_imm_reg(cd, X86_64_ADD, (6 + 8 + 1 + 1) * 8, REG_SP);
438         }
439
440         }
441
442         /* end of header generation */
443
444         /* walk through all basic blocks */
445         for (bptr = m->basicblocks; bptr != NULL; bptr = bptr->next) {
446
447                 bptr->mpc = (u4) ((u1 *) cd->mcodeptr - cd->mcodebase);
448
449                 if (bptr->flags >= BBREACHED) {
450
451                         /* branch resolving */
452
453                         branchref *bref;
454                         for (bref = bptr->branchrefs; bref != NULL; bref = bref->next) {
455                                 gen_resolvebranch((u1 *) cd->mcodebase + bref->branchpos, 
456                                                                   bref->branchpos,
457                                                                   bptr->mpc);
458                         }
459
460                 /* copy interface registers to their destination */
461
462                 src = bptr->instack;
463                 len = bptr->indepth;
464                 MCODECHECK(64 + len);
465
466 #ifdef LSRA
467                 if (opt_lsra) {
468                         while (src != NULL) {
469                                 len--;
470                                 if ((len == 0) && (bptr->type != BBTYPE_STD)) {
471                                         if (bptr->type == BBTYPE_SBR) {
472                                                 /*                                      d = reg_of_var(rd, src, REG_ITMP1); */
473                                                 if (!(src->flags & INMEMORY))
474                                                         d= src->regoff;
475                                                 else
476                                                         d=REG_ITMP1;
477                                                 x86_64_pop_reg(cd, d);
478                                                 store_reg_to_var_int(src, d);
479
480                                         } else if (bptr->type == BBTYPE_EXH) {
481                                                 /*                                      d = reg_of_var(rd, src, REG_ITMP1); */
482                                                 if (!(src->flags & INMEMORY))
483                                                         d= src->regoff;
484                                                 else
485                                                         d=REG_ITMP1;
486                                                 M_INTMOVE(REG_ITMP1, d);
487                                                 store_reg_to_var_int(src, d);
488                                         }
489                                 }
490                                 src = src->prev;
491                         }
492                         
493                 } else {
494 #endif
495
496         while (src != NULL) {
497                         len--;
498                         if ((len == 0) && (bptr->type != BBTYPE_STD)) {
499                                 if (bptr->type == BBTYPE_SBR) {
500                                         d = reg_of_var(rd, src, REG_ITMP1);
501                                         x86_64_pop_reg(cd, d);
502                                         store_reg_to_var_int(src, d);
503
504                                 } else if (bptr->type == BBTYPE_EXH) {
505                                         d = reg_of_var(rd, src, REG_ITMP1);
506                                         M_INTMOVE(REG_ITMP1, d);
507                                         store_reg_to_var_int(src, d);
508                                 }
509
510                         } else {
511                                 d = reg_of_var(rd, src, REG_ITMP1);
512                                 if ((src->varkind != STACKVAR)) {
513                                         s2 = src->type;
514                                         if (IS_FLT_DBL_TYPE(s2)) {
515                                                 s1 = rd->interfaces[len][s2].regoff;
516                                                 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
517                                                         M_FLTMOVE(s1, d);
518
519                                                 } else {
520                                                         x86_64_movq_membase_reg(cd, REG_SP, s1 * 8, d);
521                                                 }
522                                                 store_reg_to_var_flt(src, d);
523
524                                         } else {
525                                                 s1 = rd->interfaces[len][s2].regoff;
526                                                 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
527                                                         M_INTMOVE(s1, d);
528
529                                                 } else {
530                                                         x86_64_mov_membase_reg(cd, REG_SP, s1 * 8, d);
531                                                 }
532                                                 store_reg_to_var_int(src, d);
533                                         }
534                                 }
535                         }
536                         src = src->prev;
537                 }
538 #ifdef LSRA
539                 }
540 #endif
541                 /* walk through all instructions */
542                 
543                 src = bptr->instack;
544                 len = bptr->icount;
545                 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
546
547                         MCODECHECK(64);   /* an instruction usually needs < 64 words      */
548                         switch (iptr->opc) {
549                         case ICMD_INLINE_START: /* internal ICMDs                         */
550                         case ICMD_INLINE_END:
551                                 break;
552
553                         case ICMD_NOP:    /* ...  ==> ...                                 */
554                                 break;
555
556                         case ICMD_NULLCHECKPOP: /* ..., objectref  ==> ...                */
557                                 if (src->flags & INMEMORY) {
558                                         x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
559
560                                 } else {
561                                         x86_64_test_reg_reg(cd, src->regoff, src->regoff);
562                                 }
563                                 x86_64_jcc(cd, X86_64_CC_E, 0);
564                                 codegen_addxnullrefs(cd, cd->mcodeptr);
565                                 break;
566
567                 /* constant operations ************************************************/
568
569                 case ICMD_ICONST:     /* ...  ==> ..., constant                       */
570                                       /* op1 = 0, val.i = constant                    */
571
572                         d = reg_of_var(rd, iptr->dst, REG_ITMP1);
573                         if (iptr->val.i == 0) {
574                                 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
575                         } else {
576                                 x86_64_movl_imm_reg(cd, iptr->val.i, d);
577                         }
578                         store_reg_to_var_int(iptr->dst, d);
579                         break;
580
581                 case ICMD_ACONST:     /* ...  ==> ..., constant                       */
582                                       /* op1 = 0, val.a = constant                    */
583
584                         d = reg_of_var(rd, iptr->dst, REG_ITMP1);
585                         if (iptr->val.a == 0) {
586                                 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
587                         } else {
588                                 x86_64_mov_imm_reg(cd, (s8) iptr->val.a, d);
589                         }
590                         store_reg_to_var_int(iptr->dst, d);
591                         break;
592
593                 case ICMD_LCONST:     /* ...  ==> ..., constant                       */
594                                       /* op1 = 0, val.l = constant                    */
595
596                         d = reg_of_var(rd, iptr->dst, REG_ITMP1);
597                         if (iptr->val.l == 0) {
598                                 x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
599                         } else {
600                                 x86_64_mov_imm_reg(cd, iptr->val.l, d);
601                         }
602                         store_reg_to_var_int(iptr->dst, d);
603                         break;
604
605                 case ICMD_FCONST:     /* ...  ==> ..., constant                       */
606                                       /* op1 = 0, val.f = constant                    */
607
608                         d = reg_of_var(rd, iptr->dst, REG_FTMP1);
609                         a = dseg_addfloat(cd, iptr->val.f);
610                         x86_64_movdl_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + ((d > 7) ? 9 : 8)) - (s8) cd->mcodebase) + a, d);
611                         store_reg_to_var_flt(iptr->dst, d);
612                         break;
613                 
614                 case ICMD_DCONST:     /* ...  ==> ..., constant                       */
615                                       /* op1 = 0, val.d = constant                    */
616
617                         d = reg_of_var(rd, iptr->dst, REG_FTMP1);
618                         a = dseg_adddouble(cd, iptr->val.d);
619                         x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, d);
620                         store_reg_to_var_flt(iptr->dst, d);
621                         break;
622
623
624                 /* load/store operations **********************************************/
625
626                 case ICMD_ILOAD:      /* ...  ==> ..., content of local variable      */
627                                       /* op1 = local variable                         */
628
629                         d = reg_of_var(rd, iptr->dst, REG_ITMP1);
630                         if ((iptr->dst->varkind == LOCALVAR) &&
631                             (iptr->dst->varnum == iptr->op1)) {
632                                 break;
633                         }
634                         var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
635                         if (var->flags & INMEMORY) {
636                                 x86_64_movl_membase_reg(cd, REG_SP, var->regoff * 8, d);
637                                 store_reg_to_var_int(iptr->dst, d);
638
639                         } else {
640                                 if (iptr->dst->flags & INMEMORY) {
641                                         x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
642
643                                 } else {
644                                         M_INTMOVE(var->regoff, d);
645                                 }
646                         }
647                         break;
648
649                 case ICMD_LLOAD:      /* ...  ==> ..., content of local variable      */
650                 case ICMD_ALOAD:      /* op1 = local variable                         */
651
652                         d = reg_of_var(rd, iptr->dst, REG_ITMP1);
653                         if ((iptr->dst->varkind == LOCALVAR) &&
654                             (iptr->dst->varnum == iptr->op1)) {
655                                 break;
656                         }
657                         var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
658                         if (var->flags & INMEMORY) {
659                                 x86_64_mov_membase_reg(cd, REG_SP, var->regoff * 8, d);
660                                 store_reg_to_var_int(iptr->dst, d);
661
662                         } else {
663                                 if (iptr->dst->flags & INMEMORY) {
664                                         x86_64_mov_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
665
666                                 } else {
667                                         M_INTMOVE(var->regoff, d);
668                                 }
669                         }
670                         break;
671
672                 case ICMD_FLOAD:      /* ...  ==> ..., content of local variable      */
673                 case ICMD_DLOAD:      /* op1 = local variable                         */
674
675                         d = reg_of_var(rd, iptr->dst, REG_FTMP1);
676                         if ((iptr->dst->varkind == LOCALVAR) &&
677                             (iptr->dst->varnum == iptr->op1)) {
678                                 break;
679                         }
680                         var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
681                         if (var->flags & INMEMORY) {
682                                 x86_64_movq_membase_reg(cd, REG_SP, var->regoff * 8, d);
683                                 store_reg_to_var_flt(iptr->dst, d);
684
685                         } else {
686                                 if (iptr->dst->flags & INMEMORY) {
687                                         x86_64_movq_reg_membase(cd, var->regoff, REG_SP, iptr->dst->regoff * 8);
688
689                                 } else {
690                                         M_FLTMOVE(var->regoff, d);
691                                 }
692                         }
693                         break;
694
695                 case ICMD_ISTORE:     /* ..., value  ==> ...                          */
696                 case ICMD_LSTORE:     /* op1 = local variable                         */
697                 case ICMD_ASTORE:
698
699                         if ((src->varkind == LOCALVAR) &&
700                             (src->varnum == iptr->op1)) {
701                                 break;
702                         }
703                         var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
704                         if (var->flags & INMEMORY) {
705                                 var_to_reg_int(s1, src, REG_ITMP1);
706                                 x86_64_mov_reg_membase(cd, s1, REG_SP, var->regoff * 8);
707
708                         } else {
709                                 var_to_reg_int(s1, src, var->regoff);
710                                 M_INTMOVE(s1, var->regoff);
711                         }
712                         break;
713
714                 case ICMD_FSTORE:     /* ..., value  ==> ...                          */
715                 case ICMD_DSTORE:     /* op1 = local variable                         */
716
717                         if ((src->varkind == LOCALVAR) &&
718                             (src->varnum == iptr->op1)) {
719                                 break;
720                         }
721                         var = &(rd->locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
722                         if (var->flags & INMEMORY) {
723                                 var_to_reg_flt(s1, src, REG_FTMP1);
724                                 x86_64_movq_reg_membase(cd, s1, REG_SP, var->regoff * 8);
725
726                         } else {
727                                 var_to_reg_flt(s1, src, var->regoff);
728                                 M_FLTMOVE(s1, var->regoff);
729                         }
730                         break;
731
732
733                 /* pop/dup/swap operations ********************************************/
734
735                 /* attention: double and longs are only one entry in CACAO ICMDs      */
736
737                 case ICMD_POP:        /* ..., value  ==> ...                          */
738                 case ICMD_POP2:       /* ..., value, value  ==> ...                   */
739                         break;
740
741                 case ICMD_DUP:        /* ..., a ==> ..., a, a                         */
742                         M_COPY(src, iptr->dst);
743                         break;
744
745                 case ICMD_DUP_X1:     /* ..., a, b ==> ..., b, a, b                   */
746
747                         M_COPY(src,       iptr->dst);
748                         M_COPY(src->prev, iptr->dst->prev);
749                         M_COPY(iptr->dst, iptr->dst->prev->prev);
750                         break;
751
752                 case ICMD_DUP_X2:     /* ..., a, b, c ==> ..., c, a, b, c             */
753
754                         M_COPY(src,             iptr->dst);
755                         M_COPY(src->prev,       iptr->dst->prev);
756                         M_COPY(src->prev->prev, iptr->dst->prev->prev);
757                         M_COPY(iptr->dst,       iptr->dst->prev->prev->prev);
758                         break;
759
760                 case ICMD_DUP2:       /* ..., a, b ==> ..., a, b, a, b                */
761
762                         M_COPY(src,       iptr->dst);
763                         M_COPY(src->prev, iptr->dst->prev);
764                         break;
765
766                 case ICMD_DUP2_X1:    /* ..., a, b, c ==> ..., b, c, a, b, c          */
767
768                         M_COPY(src,             iptr->dst);
769                         M_COPY(src->prev,       iptr->dst->prev);
770                         M_COPY(src->prev->prev, iptr->dst->prev->prev);
771                         M_COPY(iptr->dst,       iptr->dst->prev->prev->prev);
772                         M_COPY(iptr->dst->prev, iptr->dst->prev->prev->prev->prev);
773                         break;
774
775                 case ICMD_DUP2_X2:    /* ..., a, b, c, d ==> ..., c, d, a, b, c, d    */
776
777                         M_COPY(src,                   iptr->dst);
778                         M_COPY(src->prev,             iptr->dst->prev);
779                         M_COPY(src->prev->prev,       iptr->dst->prev->prev);
780                         M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
781                         M_COPY(iptr->dst,             iptr->dst->prev->prev->prev->prev);
782                         M_COPY(iptr->dst->prev,       iptr->dst->prev->prev->prev->prev->prev);
783                         break;
784
785                 case ICMD_SWAP:       /* ..., a, b ==> ..., b, a                      */
786
787                         M_COPY(src,       iptr->dst->prev);
788                         M_COPY(src->prev, iptr->dst);
789                         break;
790
791
792                 /* integer operations *************************************************/
793
794                 case ICMD_INEG:       /* ..., value  ==> ..., - value                 */
795
796                         d = reg_of_var(rd, iptr->dst, REG_NULL);
797                         if (iptr->dst->flags & INMEMORY) {
798                                 if (src->flags & INMEMORY) {
799                                         if (src->regoff == iptr->dst->regoff) {
800                                                 x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
801
802                                         } else {
803                                                 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
804                                                 x86_64_negl_reg(cd, REG_ITMP1);
805                                                 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
806                                         }
807
808                                 } else {
809                                         x86_64_movl_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
810                                         x86_64_negl_membase(cd, REG_SP, iptr->dst->regoff * 8);
811                                 }
812
813                         } else {
814                                 if (src->flags & INMEMORY) {
815                                         x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
816                                         x86_64_negl_reg(cd, d);
817
818                                 } else {
819                                         M_INTMOVE(src->regoff, iptr->dst->regoff);
820                                         x86_64_negl_reg(cd, iptr->dst->regoff);
821                                 }
822                         }
823                         break;
824
825                 case ICMD_LNEG:       /* ..., value  ==> ..., - value                 */
826
827                         d = reg_of_var(rd, iptr->dst, REG_NULL);
828                         if (iptr->dst->flags & INMEMORY) {
829                                 if (src->flags & INMEMORY) {
830                                         if (src->regoff == iptr->dst->regoff) {
831                                                 x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
832
833                                         } else {
834                                                 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
835                                                 x86_64_neg_reg(cd, REG_ITMP1);
836                                                 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
837                                         }
838
839                                 } else {
840                                         x86_64_mov_reg_membase(cd, src->regoff, REG_SP, iptr->dst->regoff * 8);
841                                         x86_64_neg_membase(cd, REG_SP, iptr->dst->regoff * 8);
842                                 }
843
844                         } else {
845                                 if (src->flags & INMEMORY) {
846                                         x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
847                                         x86_64_neg_reg(cd, iptr->dst->regoff);
848
849                                 } else {
850                                         M_INTMOVE(src->regoff, iptr->dst->regoff);
851                                         x86_64_neg_reg(cd, iptr->dst->regoff);
852                                 }
853                         }
854                         break;
855
856                 case ICMD_I2L:        /* ..., value  ==> ..., value                   */
857
858                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
859                         if (src->flags & INMEMORY) {
860                                 x86_64_movslq_membase_reg(cd, REG_SP, src->regoff * 8, d);
861
862                         } else {
863                                 x86_64_movslq_reg_reg(cd, src->regoff, d);
864                         }
865                         store_reg_to_var_int(iptr->dst, d);
866                         break;
867
868                 case ICMD_L2I:        /* ..., value  ==> ..., value                   */
869
870                         var_to_reg_int(s1, src, REG_ITMP1);
871                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
872                         M_INTMOVE(s1, d);
873                         store_reg_to_var_int(iptr->dst, d);
874                         break;
875
876                 case ICMD_INT2BYTE:   /* ..., value  ==> ..., value                   */
877
878                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
879                         if (src->flags & INMEMORY) {
880                                 x86_64_movsbq_membase_reg(cd, REG_SP, src->regoff * 8, d);
881
882                         } else {
883                                 x86_64_movsbq_reg_reg(cd, src->regoff, d);
884                         }
885                         store_reg_to_var_int(iptr->dst, d);
886                         break;
887
888                 case ICMD_INT2CHAR:   /* ..., value  ==> ..., value                   */
889
890                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
891                         if (src->flags & INMEMORY) {
892                                 x86_64_movzwq_membase_reg(cd, REG_SP, src->regoff * 8, d);
893
894                         } else {
895                                 x86_64_movzwq_reg_reg(cd, src->regoff, d);
896                         }
897                         store_reg_to_var_int(iptr->dst, d);
898                         break;
899
900                 case ICMD_INT2SHORT:  /* ..., value  ==> ..., value                   */
901
902                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
903                         if (src->flags & INMEMORY) {
904                                 x86_64_movswq_membase_reg(cd, REG_SP, src->regoff * 8, d);
905
906                         } else {
907                                 x86_64_movswq_reg_reg(cd, src->regoff, d);
908                         }
909                         store_reg_to_var_int(iptr->dst, d);
910                         break;
911
912
913                 case ICMD_IADD:       /* ..., val1, val2  ==> ..., val1 + val2        */
914
915                         d = reg_of_var(rd, iptr->dst, REG_NULL);
916                         x86_64_emit_ialu(cd, X86_64_ADD, src, iptr);
917                         break;
918
919                 case ICMD_IADDCONST:  /* ..., value  ==> ..., value + constant        */
920                                       /* val.i = constant                             */
921
922                         d = reg_of_var(rd, iptr->dst, REG_NULL);
923                         x86_64_emit_ialuconst(cd, X86_64_ADD, src, iptr);
924                         break;
925
926                 case ICMD_LADD:       /* ..., val1, val2  ==> ..., val1 + val2        */
927
928                         d = reg_of_var(rd, iptr->dst, REG_NULL);
929                         x86_64_emit_lalu(cd, X86_64_ADD, src, iptr);
930                         break;
931
932                 case ICMD_LADDCONST:  /* ..., value  ==> ..., value + constant        */
933                                       /* val.l = constant                             */
934
935                         d = reg_of_var(rd, iptr->dst, REG_NULL);
936                         x86_64_emit_laluconst(cd, X86_64_ADD, src, iptr);
937                         break;
938
939                 case ICMD_ISUB:       /* ..., val1, val2  ==> ..., val1 - val2        */
940
941                         d = reg_of_var(rd, iptr->dst, REG_NULL);
942                         if (iptr->dst->flags & INMEMORY) {
943                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
944                                         if (src->prev->regoff == iptr->dst->regoff) {
945                                                 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
946                                                 x86_64_alul_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
947
948                                         } else {
949                                                 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
950                                                 x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
951                                                 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
952                                         }
953
954                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
955                                         M_INTMOVE(src->prev->regoff, REG_ITMP1);
956                                         x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
957                                         x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
958
959                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
960                                         if (src->prev->regoff == iptr->dst->regoff) {
961                                                 x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
962
963                                         } else {
964                                                 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
965                                                 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
966                                                 x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
967                                         }
968
969                                 } else {
970                                         x86_64_movl_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
971                                         x86_64_alul_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
972                                 }
973
974                         } else {
975                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
976                                         x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
977                                         x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
978
979                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
980                                         M_INTMOVE(src->prev->regoff, d);
981                                         x86_64_alul_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
982
983                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
984                                         /* workaround for reg alloc */
985                                         if (src->regoff == iptr->dst->regoff) {
986                                                 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
987                                                 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
988                                                 M_INTMOVE(REG_ITMP1, d);
989
990                                         } else {
991                                                 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
992                                                 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
993                                         }
994
995                                 } else {
996                                         /* workaround for reg alloc */
997                                         if (src->regoff == iptr->dst->regoff) {
998                                                 M_INTMOVE(src->prev->regoff, REG_ITMP1);
999                                                 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1000                                                 M_INTMOVE(REG_ITMP1, d);
1001
1002                                         } else {
1003                                                 M_INTMOVE(src->prev->regoff, d);
1004                                                 x86_64_alul_reg_reg(cd, X86_64_SUB, src->regoff, d);
1005                                         }
1006                                 }
1007                         }
1008                         break;
1009
1010                 case ICMD_ISUBCONST:  /* ..., value  ==> ..., value + constant        */
1011                                       /* val.i = constant                             */
1012
1013                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1014                         x86_64_emit_ialuconst(cd, X86_64_SUB, src, iptr);
1015                         break;
1016
1017                 case ICMD_LSUB:       /* ..., val1, val2  ==> ..., val1 - val2        */
1018
1019                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1020                         if (iptr->dst->flags & INMEMORY) {
1021                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1022                                         if (src->prev->regoff == iptr->dst->regoff) {
1023                                                 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1024                                                 x86_64_alu_reg_membase(cd, X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1025
1026                                         } else {
1027                                                 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1028                                                 x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1029                                                 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1030                                         }
1031
1032                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1033                                         M_INTMOVE(src->prev->regoff, REG_ITMP1);
1034                                         x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1035                                         x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1036
1037                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1038                                         if (src->prev->regoff == iptr->dst->regoff) {
1039                                                 x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1040
1041                                         } else {
1042                                                 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1043                                                 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1044                                                 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1045                                         }
1046
1047                                 } else {
1048                                         x86_64_mov_reg_membase(cd, src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1049                                         x86_64_alu_reg_membase(cd, X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1050                                 }
1051
1052                         } else {
1053                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1054                                         x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1055                                         x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1056
1057                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1058                                         M_INTMOVE(src->prev->regoff, d);
1059                                         x86_64_alu_membase_reg(cd, X86_64_SUB, REG_SP, src->regoff * 8, d);
1060
1061                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1062                                         /* workaround for reg alloc */
1063                                         if (src->regoff == iptr->dst->regoff) {
1064                                                 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1065                                                 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1066                                                 M_INTMOVE(REG_ITMP1, d);
1067
1068                                         } else {
1069                                                 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, d);
1070                                                 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1071                                         }
1072
1073                                 } else {
1074                                         /* workaround for reg alloc */
1075                                         if (src->regoff == iptr->dst->regoff) {
1076                                                 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1077                                                 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, REG_ITMP1);
1078                                                 M_INTMOVE(REG_ITMP1, d);
1079
1080                                         } else {
1081                                                 M_INTMOVE(src->prev->regoff, d);
1082                                                 x86_64_alu_reg_reg(cd, X86_64_SUB, src->regoff, d);
1083                                         }
1084                                 }
1085                         }
1086                         break;
1087
1088                 case ICMD_LSUBCONST:  /* ..., value  ==> ..., value - constant        */
1089                                       /* val.l = constant                             */
1090
1091                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1092                         x86_64_emit_laluconst(cd, X86_64_SUB, src, iptr);
1093                         break;
1094
1095                 case ICMD_IMUL:       /* ..., val1, val2  ==> ..., val1 * val2        */
1096
1097                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1098                         if (iptr->dst->flags & INMEMORY) {
1099                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1100                                         x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1101                                         x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1102                                         x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1103
1104                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1105                                         x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1106                                         x86_64_imull_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1107                                         x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1108
1109                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1110                                         x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1111                                         x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1112                                         x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1113
1114                                 } else {
1115                                         M_INTMOVE(src->prev->regoff, REG_ITMP1);
1116                                         x86_64_imull_reg_reg(cd, src->regoff, REG_ITMP1);
1117                                         x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1118                                 }
1119
1120                         } else {
1121                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1122                                         x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1123                                         x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1124
1125                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1126                                         M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1127                                         x86_64_imull_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1128
1129                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1130                                         M_INTMOVE(src->regoff, iptr->dst->regoff);
1131                                         x86_64_imull_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1132
1133                                 } else {
1134                                         if (src->regoff == iptr->dst->regoff) {
1135                                                 x86_64_imull_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1136
1137                                         } else {
1138                                                 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1139                                                 x86_64_imull_reg_reg(cd, src->regoff, iptr->dst->regoff);
1140                                         }
1141                                 }
1142                         }
1143                         break;
1144
1145                 case ICMD_IMULCONST:  /* ..., value  ==> ..., value * constant        */
1146                                       /* val.i = constant                             */
1147
1148                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1149                         if (iptr->dst->flags & INMEMORY) {
1150                                 if (src->flags & INMEMORY) {
1151                                         x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1152                                         x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1153
1154                                 } else {
1155                                         x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, REG_ITMP1);
1156                                         x86_64_movl_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1157                                 }
1158
1159                         } else {
1160                                 if (src->flags & INMEMORY) {
1161                                         x86_64_imull_imm_membase_reg(cd, iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1162
1163                                 } else {
1164                                         if (iptr->val.i == 2) {
1165                                                 M_INTMOVE(src->regoff, iptr->dst->regoff);
1166                                                 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1167
1168                                         } else {
1169                                                 x86_64_imull_imm_reg_reg(cd, iptr->val.i, src->regoff, iptr->dst->regoff);    /* 3 cycles */
1170                                         }
1171                                 }
1172                         }
1173                         break;
1174
1175                 case ICMD_LMUL:       /* ..., val1, val2  ==> ..., val1 * val2        */
1176
1177                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1178                         if (iptr->dst->flags & INMEMORY) {
1179                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1180                                         x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1181                                         x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1182                                         x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1183
1184                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1185                                         x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1186                                         x86_64_imul_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1187                                         x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1188
1189                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1190                                         x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1191                                         x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1192                                         x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1193
1194                                 } else {
1195                                         x86_64_mov_reg_reg(cd, src->prev->regoff, REG_ITMP1);
1196                                         x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1197                                         x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1198                                 }
1199
1200                         } else {
1201                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1202                                         x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1203                                         x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1204
1205                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1206                                         M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1207                                         x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1208
1209                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1210                                         M_INTMOVE(src->regoff, iptr->dst->regoff);
1211                                         x86_64_imul_membase_reg(cd, REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1212
1213                                 } else {
1214                                         if (src->regoff == iptr->dst->regoff) {
1215                                                 x86_64_imul_reg_reg(cd, src->prev->regoff, iptr->dst->regoff);
1216
1217                                         } else {
1218                                                 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1219                                                 x86_64_imul_reg_reg(cd, src->regoff, iptr->dst->regoff);
1220                                         }
1221                                 }
1222                         }
1223                         break;
1224
1225                 case ICMD_LMULCONST:  /* ..., value  ==> ..., value * constant        */
1226                                       /* val.l = constant                             */
1227
1228                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1229                         if (iptr->dst->flags & INMEMORY) {
1230                                 if (src->flags & INMEMORY) {
1231                                         if (IS_IMM32(iptr->val.l)) {
1232                                                 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1233
1234                                         } else {
1235                                                 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1236                                                 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP1);
1237                                         }
1238                                         x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1239                                         
1240                                 } else {
1241                                         if (IS_IMM32(iptr->val.l)) {
1242                                                 x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, REG_ITMP1);
1243
1244                                         } else {
1245                                                 x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1246                                                 x86_64_imul_reg_reg(cd, src->regoff, REG_ITMP1);
1247                                         }
1248                                         x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1249                                 }
1250
1251                         } else {
1252                                 if (src->flags & INMEMORY) {
1253                                         if (IS_IMM32(iptr->val.l)) {
1254                                                 x86_64_imul_imm_membase_reg(cd, iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1255
1256                                         } else {
1257                                                 x86_64_mov_imm_reg(cd, iptr->val.l, iptr->dst->regoff);
1258                                                 x86_64_imul_membase_reg(cd, REG_SP, src->regoff * 8, iptr->dst->regoff);
1259                                         }
1260
1261                                 } else {
1262                                         /* should match in many cases */
1263                                         if (iptr->val.l == 2) {
1264                                                 M_INTMOVE(src->regoff, iptr->dst->regoff);
1265                                                 x86_64_alul_reg_reg(cd, X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1266
1267                                         } else {
1268                                                 if (IS_IMM32(iptr->val.l)) {
1269                                                         x86_64_imul_imm_reg_reg(cd, iptr->val.l, src->regoff, iptr->dst->regoff);    /* 4 cycles */
1270
1271                                                 } else {
1272                                                         x86_64_mov_imm_reg(cd, iptr->val.l, REG_ITMP1);
1273                                                         M_INTMOVE(src->regoff, iptr->dst->regoff);
1274                                                         x86_64_imul_reg_reg(cd, REG_ITMP1, iptr->dst->regoff);
1275                                                 }
1276                                         }
1277                                 }
1278                         }
1279                         break;
1280
1281                 case ICMD_IDIV:       /* ..., val1, val2  ==> ..., val1 / val2        */
1282
1283                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1284                 if (src->prev->flags & INMEMORY) {
1285                                 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1286
1287                         } else {
1288                                 M_INTMOVE(src->prev->regoff, RAX);
1289                         }
1290                         
1291                         if (src->flags & INMEMORY) {
1292                                 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1293
1294                         } else {
1295                                 M_INTMOVE(src->regoff, REG_ITMP3);
1296                         }
1297                         gen_div_check(src);
1298
1299                         x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX);    /* check as described in jvm spec */
1300                         x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1301                         x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3);      /* 4 bytes */
1302                         x86_64_jcc(cd, X86_64_CC_E, 3 + 1 + 3);                  /* 6 bytes */
1303
1304                         x86_64_mov_reg_reg(cd, RDX, REG_ITMP2);    /* save %rdx, cause it's an argument register */
1305                         x86_64_cltd(cd);
1306                         x86_64_idivl_reg(cd, REG_ITMP3);
1307
1308                         if (iptr->dst->flags & INMEMORY) {
1309                                 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1310                                 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX);    /* restore %rdx */
1311
1312                         } else {
1313                                 M_INTMOVE(RAX, iptr->dst->regoff);
1314
1315                                 if (iptr->dst->regoff != RDX) {
1316                                         x86_64_mov_reg_reg(cd, REG_ITMP2, RDX);    /* restore %rdx */
1317                                 }
1318                         }
1319                         break;
1320
1321                 case ICMD_IREM:       /* ..., val1, val2  ==> ..., val1 % val2        */
1322
1323                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1324                         if (src->prev->flags & INMEMORY) {
1325                                 x86_64_movl_membase_reg(cd, REG_SP, src->prev->regoff * 8, RAX);
1326
1327                         } else {
1328                                 M_INTMOVE(src->prev->regoff, RAX);
1329                         }
1330                         
1331                         if (src->flags & INMEMORY) {
1332                                 x86_64_movl_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1333
1334                         } else {
1335                                 M_INTMOVE(src->regoff, REG_ITMP3);
1336                         }
1337                         gen_div_check(src);
1338
1339                         x86_64_mov_reg_reg(cd, RDX, REG_ITMP2);    /* save %rdx, cause it's an argument register */
1340
1341                         x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, RAX);    /* check as described in jvm spec */
1342                         x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1343
1344
1345                         x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX);           /* 2 bytes */
1346                         x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3);      /* 4 bytes */
1347                         x86_64_jcc(cd, X86_64_CC_E, 1 + 3);                      /* 6 bytes */
1348
1349                         x86_64_cltd(cd);
1350                         x86_64_idivl_reg(cd, REG_ITMP3);
1351
1352                         if (iptr->dst->flags & INMEMORY) {
1353                                 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1354                                 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX);    /* restore %rdx */
1355
1356                         } else {
1357                                 M_INTMOVE(RDX, iptr->dst->regoff);
1358
1359                                 if (iptr->dst->regoff != RDX) {
1360                                         x86_64_mov_reg_reg(cd, REG_ITMP2, RDX);    /* restore %rdx */
1361                                 }
1362                         }
1363                         break;
1364
1365                 case ICMD_IDIVPOW2:   /* ..., value  ==> ..., value >> constant       */
1366                                       /* val.i = constant                             */
1367
1368                         var_to_reg_int(s1, src, REG_ITMP1);
1369                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1370                         M_INTMOVE(s1, REG_ITMP1);
1371                         x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1372                         x86_64_leal_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1373                         x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1374                         x86_64_shiftl_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1375                         x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1376                         store_reg_to_var_int(iptr->dst, d);
1377                         break;
1378
1379                 case ICMD_IREMPOW2:   /* ..., value  ==> ..., value % constant        */
1380                                       /* val.i = constant                             */
1381
1382                         var_to_reg_int(s1, src, REG_ITMP1);
1383                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1384                         M_INTMOVE(s1, REG_ITMP1);
1385                         x86_64_alul_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1386                         x86_64_leal_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1387                         x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1388                         x86_64_alul_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1389                         x86_64_alul_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1390                         x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1391                         store_reg_to_var_int(iptr->dst, d);
1392                         break;
1393
1394
1395                 case ICMD_LDIV:       /* ..., val1, val2  ==> ..., val1 / val2        */
1396
1397                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1398                 if (src->prev->flags & INMEMORY) {
1399                                 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1400
1401                         } else {
1402                                 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1403                         }
1404                         
1405                         if (src->flags & INMEMORY) {
1406                                 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1407
1408                         } else {
1409                                 M_INTMOVE(src->regoff, REG_ITMP3);
1410                         }
1411                         gen_div_check(src);
1412
1413                         x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2);    /* check as described in jvm spec */
1414                         x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1415                         x86_64_jcc(cd, X86_64_CC_NE, 4 + 6);
1416                         x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3);          /* 4 bytes */
1417                         x86_64_jcc(cd, X86_64_CC_E, 3 + 2 + 3);                     /* 6 bytes */
1418
1419                         x86_64_mov_reg_reg(cd, RDX, REG_ITMP2);    /* save %rdx, cause it's an argument register */
1420                         x86_64_cqto(cd);
1421                         x86_64_idiv_reg(cd, REG_ITMP3);
1422
1423                         if (iptr->dst->flags & INMEMORY) {
1424                                 x86_64_mov_reg_membase(cd, RAX, REG_SP, iptr->dst->regoff * 8);
1425                                 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX);    /* restore %rdx */
1426
1427                         } else {
1428                                 M_INTMOVE(RAX, iptr->dst->regoff);
1429
1430                                 if (iptr->dst->regoff != RDX) {
1431                                         x86_64_mov_reg_reg(cd, REG_ITMP2, RDX);    /* restore %rdx */
1432                                 }
1433                         }
1434                         break;
1435
1436                 case ICMD_LREM:       /* ..., val1, val2  ==> ..., val1 % val2        */
1437
1438                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1439                         if (src->prev->flags & INMEMORY) {
1440                                 x86_64_mov_membase_reg(cd, REG_SP, src->prev->regoff * 8, REG_ITMP1);
1441
1442                         } else {
1443                                 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1444                         }
1445                         
1446                         if (src->flags & INMEMORY) {
1447                                 x86_64_mov_membase_reg(cd, REG_SP, src->regoff * 8, REG_ITMP3);
1448
1449                         } else {
1450                                 M_INTMOVE(src->regoff, REG_ITMP3);
1451                         }
1452                         gen_div_check(src);
1453
1454                         x86_64_mov_reg_reg(cd, RDX, REG_ITMP2);    /* save %rdx, cause it's an argument register */
1455
1456                         x86_64_mov_imm_reg(cd, 0x8000000000000000LL, REG_ITMP2);    /* check as described in jvm spec */
1457                         x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
1458                         x86_64_jcc(cd, X86_64_CC_NE, 2 + 4 + 6);
1459
1460
1461                         x86_64_alul_reg_reg(cd, X86_64_XOR, RDX, RDX);              /* 2 bytes */
1462                         x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP3);          /* 4 bytes */
1463                         x86_64_jcc(cd, X86_64_CC_E, 2 + 3);                         /* 6 bytes */
1464
1465                         x86_64_cqto(cd);
1466                         x86_64_idiv_reg(cd, REG_ITMP3);
1467
1468                         if (iptr->dst->flags & INMEMORY) {
1469                                 x86_64_mov_reg_membase(cd, RDX, REG_SP, iptr->dst->regoff * 8);
1470                                 x86_64_mov_reg_reg(cd, REG_ITMP2, RDX);    /* restore %rdx */
1471
1472                         } else {
1473                                 M_INTMOVE(RDX, iptr->dst->regoff);
1474
1475                                 if (iptr->dst->regoff != RDX) {
1476                                         x86_64_mov_reg_reg(cd, REG_ITMP2, RDX);    /* restore %rdx */
1477                                 }
1478                         }
1479                         break;
1480
1481                 case ICMD_LDIVPOW2:   /* ..., value  ==> ..., value >> constant       */
1482                                       /* val.i = constant                             */
1483
1484                         var_to_reg_int(s1, src, REG_ITMP1);
1485                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1486                         M_INTMOVE(s1, REG_ITMP1);
1487                         x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1488                         x86_64_lea_membase_reg(cd, REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1489                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1490                         x86_64_shift_imm_reg(cd, X86_64_SAR, iptr->val.i, REG_ITMP1);
1491                         x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1492                         store_reg_to_var_int(iptr->dst, d);
1493                         break;
1494
1495                 case ICMD_LREMPOW2:   /* ..., value  ==> ..., value % constant        */
1496                                       /* val.l = constant                             */
1497
1498                         var_to_reg_int(s1, src, REG_ITMP1);
1499                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1500                         M_INTMOVE(s1, REG_ITMP1);
1501                         x86_64_alu_imm_reg(cd, X86_64_CMP, -1, REG_ITMP1);
1502                         x86_64_lea_membase_reg(cd, REG_ITMP1, iptr->val.i, REG_ITMP2);
1503                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1504                         x86_64_alu_imm_reg(cd, X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1505                         x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP2, REG_ITMP1);
1506                         x86_64_mov_reg_reg(cd, REG_ITMP1, d);
1507                         store_reg_to_var_int(iptr->dst, d);
1508                         break;
1509
1510                 case ICMD_ISHL:       /* ..., val1, val2  ==> ..., val1 << val2       */
1511
1512                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1513                         x86_64_emit_ishift(cd, X86_64_SHL, src, iptr);
1514                         break;
1515
1516                 case ICMD_ISHLCONST:  /* ..., value  ==> ..., value << constant       */
1517                                       /* val.i = constant                             */
1518
1519                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1520                         x86_64_emit_ishiftconst(cd, X86_64_SHL, src, iptr);
1521                         break;
1522
1523                 case ICMD_ISHR:       /* ..., val1, val2  ==> ..., val1 >> val2       */
1524
1525                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1526                         x86_64_emit_ishift(cd, X86_64_SAR, src, iptr);
1527                         break;
1528
1529                 case ICMD_ISHRCONST:  /* ..., value  ==> ..., value >> constant       */
1530                                       /* val.i = constant                             */
1531
1532                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1533                         x86_64_emit_ishiftconst(cd, X86_64_SAR, src, iptr);
1534                         break;
1535
1536                 case ICMD_IUSHR:      /* ..., val1, val2  ==> ..., val1 >>> val2      */
1537
1538                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1539                         x86_64_emit_ishift(cd, X86_64_SHR, src, iptr);
1540                         break;
1541
1542                 case ICMD_IUSHRCONST: /* ..., value  ==> ..., value >>> constant      */
1543                                       /* val.i = constant                             */
1544
1545                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1546                         x86_64_emit_ishiftconst(cd, X86_64_SHR, src, iptr);
1547                         break;
1548
1549                 case ICMD_LSHL:       /* ..., val1, val2  ==> ..., val1 << val2       */
1550
1551                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1552                         x86_64_emit_lshift(cd, X86_64_SHL, src, iptr);
1553                         break;
1554
1555         case ICMD_LSHLCONST:  /* ..., value  ==> ..., value << constant       */
1556                                           /* val.i = constant                             */
1557
1558                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1559                         x86_64_emit_lshiftconst(cd, X86_64_SHL, src, iptr);
1560                         break;
1561
1562                 case ICMD_LSHR:       /* ..., val1, val2  ==> ..., val1 >> val2       */
1563
1564                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1565                         x86_64_emit_lshift(cd, X86_64_SAR, src, iptr);
1566                         break;
1567
1568                 case ICMD_LSHRCONST:  /* ..., value  ==> ..., value >> constant       */
1569                                       /* val.i = constant                             */
1570
1571                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1572                         x86_64_emit_lshiftconst(cd, X86_64_SAR, src, iptr);
1573                         break;
1574
1575                 case ICMD_LUSHR:      /* ..., val1, val2  ==> ..., val1 >>> val2      */
1576
1577                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1578                         x86_64_emit_lshift(cd, X86_64_SHR, src, iptr);
1579                         break;
1580
1581                 case ICMD_LUSHRCONST: /* ..., value  ==> ..., value >>> constant      */
1582                                       /* val.l = constant                             */
1583
1584                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1585                         x86_64_emit_lshiftconst(cd, X86_64_SHR, src, iptr);
1586                         break;
1587
1588                 case ICMD_IAND:       /* ..., val1, val2  ==> ..., val1 & val2        */
1589
1590                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1591                         x86_64_emit_ialu(cd, X86_64_AND, src, iptr);
1592                         break;
1593
1594                 case ICMD_IANDCONST:  /* ..., value  ==> ..., value & constant        */
1595                                       /* val.i = constant                             */
1596
1597                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1598                         x86_64_emit_ialuconst(cd, X86_64_AND, src, iptr);
1599                         break;
1600
1601                 case ICMD_LAND:       /* ..., val1, val2  ==> ..., val1 & val2        */
1602
1603                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1604                         x86_64_emit_lalu(cd, X86_64_AND, src, iptr);
1605                         break;
1606
1607                 case ICMD_LANDCONST:  /* ..., value  ==> ..., value & constant        */
1608                                       /* val.l = constant                             */
1609
1610                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1611                         x86_64_emit_laluconst(cd, X86_64_AND, src, iptr);
1612                         break;
1613
1614                 case ICMD_IOR:        /* ..., val1, val2  ==> ..., val1 | val2        */
1615
1616                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1617                         x86_64_emit_ialu(cd, X86_64_OR, src, iptr);
1618                         break;
1619
1620                 case ICMD_IORCONST:   /* ..., value  ==> ..., value | constant        */
1621                                       /* val.i = constant                             */
1622
1623                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1624                         x86_64_emit_ialuconst(cd, X86_64_OR, src, iptr);
1625                         break;
1626
1627                 case ICMD_LOR:        /* ..., val1, val2  ==> ..., val1 | val2        */
1628
1629                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1630                         x86_64_emit_lalu(cd, X86_64_OR, src, iptr);
1631                         break;
1632
1633                 case ICMD_LORCONST:   /* ..., value  ==> ..., value | constant        */
1634                                       /* val.l = constant                             */
1635
1636                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1637                         x86_64_emit_laluconst(cd, X86_64_OR, src, iptr);
1638                         break;
1639
1640                 case ICMD_IXOR:       /* ..., val1, val2  ==> ..., val1 ^ val2        */
1641
1642                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1643                         x86_64_emit_ialu(cd, X86_64_XOR, src, iptr);
1644                         break;
1645
1646                 case ICMD_IXORCONST:  /* ..., value  ==> ..., value ^ constant        */
1647                                       /* val.i = constant                             */
1648
1649                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1650                         x86_64_emit_ialuconst(cd, X86_64_XOR, src, iptr);
1651                         break;
1652
1653                 case ICMD_LXOR:       /* ..., val1, val2  ==> ..., val1 ^ val2        */
1654
1655                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1656                         x86_64_emit_lalu(cd, X86_64_XOR, src, iptr);
1657                         break;
1658
1659                 case ICMD_LXORCONST:  /* ..., value  ==> ..., value ^ constant        */
1660                                       /* val.l = constant                             */
1661
1662                         d = reg_of_var(rd, iptr->dst, REG_NULL);
1663                         x86_64_emit_laluconst(cd, X86_64_XOR, src, iptr);
1664                         break;
1665
1666
1667                 case ICMD_IINC:       /* ..., value  ==> ..., value + constant        */
1668                                       /* op1 = variable, val.i = constant             */
1669
1670                         /* using inc and dec is definitely faster than add -- tested      */
1671                         /* with sieve                                                     */
1672
1673                         var = &(rd->locals[iptr->op1][TYPE_INT]);
1674                         d = var->regoff;
1675                         if (var->flags & INMEMORY) {
1676                                 if (iptr->val.i == 1) {
1677                                         x86_64_incl_membase(cd, REG_SP, d * 8);
1678  
1679                                 } else if (iptr->val.i == -1) {
1680                                         x86_64_decl_membase(cd, REG_SP, d * 8);
1681
1682                                 } else {
1683                                         x86_64_alul_imm_membase(cd, X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1684                                 }
1685
1686                         } else {
1687                                 if (iptr->val.i == 1) {
1688                                         x86_64_incl_reg(cd, d);
1689  
1690                                 } else if (iptr->val.i == -1) {
1691                                         x86_64_decl_reg(cd, d);
1692
1693                                 } else {
1694                                         x86_64_alul_imm_reg(cd, X86_64_ADD, iptr->val.i, d);
1695                                 }
1696                         }
1697                         break;
1698
1699
1700                 /* floating operations ************************************************/
1701
1702                 case ICMD_FNEG:       /* ..., value  ==> ..., - value                 */
1703
1704                         var_to_reg_flt(s1, src, REG_FTMP1);
1705                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1706                         a = dseg_adds4(cd, 0x80000000);
1707                         M_FLTMOVE(s1, d);
1708                         x86_64_movss_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1709                         x86_64_xorps_reg_reg(cd, REG_FTMP2, d);
1710                         store_reg_to_var_flt(iptr->dst, d);
1711                         break;
1712
1713                 case ICMD_DNEG:       /* ..., value  ==> ..., - value                 */
1714
1715                         var_to_reg_flt(s1, src, REG_FTMP1);
1716                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1717                         a = dseg_adds8(cd, 0x8000000000000000);
1718                         M_FLTMOVE(s1, d);
1719                         x86_64_movd_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 9) - (s8) cd->mcodebase) + a, REG_FTMP2);
1720                         x86_64_xorpd_reg_reg(cd, REG_FTMP2, d);
1721                         store_reg_to_var_flt(iptr->dst, d);
1722                         break;
1723
1724                 case ICMD_FADD:       /* ..., val1, val2  ==> ..., val1 + val2        */
1725
1726                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1727                         var_to_reg_flt(s2, src, REG_FTMP2);
1728                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1729                         if (s1 == d) {
1730                                 x86_64_addss_reg_reg(cd, s2, d);
1731                         } else if (s2 == d) {
1732                                 x86_64_addss_reg_reg(cd, s1, d);
1733                         } else {
1734                                 M_FLTMOVE(s1, d);
1735                                 x86_64_addss_reg_reg(cd, s2, d);
1736                         }
1737                         store_reg_to_var_flt(iptr->dst, d);
1738                         break;
1739
1740                 case ICMD_DADD:       /* ..., val1, val2  ==> ..., val1 + val2        */
1741
1742                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1743                         var_to_reg_flt(s2, src, REG_FTMP2);
1744                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1745                         if (s1 == d) {
1746                                 x86_64_addsd_reg_reg(cd, s2, d);
1747                         } else if (s2 == d) {
1748                                 x86_64_addsd_reg_reg(cd, s1, d);
1749                         } else {
1750                                 M_FLTMOVE(s1, d);
1751                                 x86_64_addsd_reg_reg(cd, s2, d);
1752                         }
1753                         store_reg_to_var_flt(iptr->dst, d);
1754                         break;
1755
1756                 case ICMD_FSUB:       /* ..., val1, val2  ==> ..., val1 - val2        */
1757
1758                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1759                         var_to_reg_flt(s2, src, REG_FTMP2);
1760                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1761                         if (s2 == d) {
1762                                 M_FLTMOVE(s2, REG_FTMP2);
1763                                 s2 = REG_FTMP2;
1764                         }
1765                         M_FLTMOVE(s1, d);
1766                         x86_64_subss_reg_reg(cd, s2, d);
1767                         store_reg_to_var_flt(iptr->dst, d);
1768                         break;
1769
1770                 case ICMD_DSUB:       /* ..., val1, val2  ==> ..., val1 - val2        */
1771
1772                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1773                         var_to_reg_flt(s2, src, REG_FTMP2);
1774                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1775                         if (s2 == d) {
1776                                 M_FLTMOVE(s2, REG_FTMP2);
1777                                 s2 = REG_FTMP2;
1778                         }
1779                         M_FLTMOVE(s1, d);
1780                         x86_64_subsd_reg_reg(cd, s2, d);
1781                         store_reg_to_var_flt(iptr->dst, d);
1782                         break;
1783
1784                 case ICMD_FMUL:       /* ..., val1, val2  ==> ..., val1 * val2        */
1785
1786                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1787                         var_to_reg_flt(s2, src, REG_FTMP2);
1788                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1789                         if (s1 == d) {
1790                                 x86_64_mulss_reg_reg(cd, s2, d);
1791                         } else if (s2 == d) {
1792                                 x86_64_mulss_reg_reg(cd, s1, d);
1793                         } else {
1794                                 M_FLTMOVE(s1, d);
1795                                 x86_64_mulss_reg_reg(cd, s2, d);
1796                         }
1797                         store_reg_to_var_flt(iptr->dst, d);
1798                         break;
1799
1800                 case ICMD_DMUL:       /* ..., val1, val2  ==> ..., val1 * val2        */
1801
1802                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1803                         var_to_reg_flt(s2, src, REG_FTMP2);
1804                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1805                         if (s1 == d) {
1806                                 x86_64_mulsd_reg_reg(cd, s2, d);
1807                         } else if (s2 == d) {
1808                                 x86_64_mulsd_reg_reg(cd, s1, d);
1809                         } else {
1810                                 M_FLTMOVE(s1, d);
1811                                 x86_64_mulsd_reg_reg(cd, s2, d);
1812                         }
1813                         store_reg_to_var_flt(iptr->dst, d);
1814                         break;
1815
1816                 case ICMD_FDIV:       /* ..., val1, val2  ==> ..., val1 / val2        */
1817
1818                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1819                         var_to_reg_flt(s2, src, REG_FTMP2);
1820                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1821                         if (s2 == d) {
1822                                 M_FLTMOVE(s2, REG_FTMP2);
1823                                 s2 = REG_FTMP2;
1824                         }
1825                         M_FLTMOVE(s1, d);
1826                         x86_64_divss_reg_reg(cd, s2, d);
1827                         store_reg_to_var_flt(iptr->dst, d);
1828                         break;
1829
1830                 case ICMD_DDIV:       /* ..., val1, val2  ==> ..., val1 / val2        */
1831
1832                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1833                         var_to_reg_flt(s2, src, REG_FTMP2);
1834                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1835                         if (s2 == d) {
1836                                 M_FLTMOVE(s2, REG_FTMP2);
1837                                 s2 = REG_FTMP2;
1838                         }
1839                         M_FLTMOVE(s1, d);
1840                         x86_64_divsd_reg_reg(cd, s2, d);
1841                         store_reg_to_var_flt(iptr->dst, d);
1842                         break;
1843
1844                 case ICMD_I2F:       /* ..., value  ==> ..., (float) value            */
1845
1846                         var_to_reg_int(s1, src, REG_ITMP1);
1847                         d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1848                         x86_64_cvtsi2ss_reg_reg(cd, s1, d);
1849                         store_reg_to_var_flt(iptr->dst, d);
1850                         break;
1851
1852                 case ICMD_I2D:       /* ..., value  ==> ..., (double) value           */
1853
1854                         var_to_reg_int(s1, src, REG_ITMP1);
1855                         d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1856                         x86_64_cvtsi2sd_reg_reg(cd, s1, d);
1857                         store_reg_to_var_flt(iptr->dst, d);
1858                         break;
1859
1860                 case ICMD_L2F:       /* ..., value  ==> ..., (float) value            */
1861
1862                         var_to_reg_int(s1, src, REG_ITMP1);
1863                         d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1864                         x86_64_cvtsi2ssq_reg_reg(cd, s1, d);
1865                         store_reg_to_var_flt(iptr->dst, d);
1866                         break;
1867                         
1868                 case ICMD_L2D:       /* ..., value  ==> ..., (double) value           */
1869
1870                         var_to_reg_int(s1, src, REG_ITMP1);
1871                         d = reg_of_var(rd, iptr->dst, REG_FTMP1);
1872                         x86_64_cvtsi2sdq_reg_reg(cd, s1, d);
1873                         store_reg_to_var_flt(iptr->dst, d);
1874                         break;
1875                         
1876                 case ICMD_F2I:       /* ..., value  ==> ..., (int) value              */
1877
1878                         var_to_reg_flt(s1, src, REG_FTMP1);
1879                         d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1880                         x86_64_cvttss2si_reg_reg(cd, s1, d);
1881                         x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d);    /* corner cases */
1882                         a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1883                         x86_64_jcc(cd, X86_64_CC_NE, a);
1884                         M_FLTMOVE(s1, REG_FTMP1);
1885                         x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2i, REG_ITMP2);
1886                         x86_64_call_reg(cd, REG_ITMP2);
1887                         M_INTMOVE(REG_RESULT, d);
1888                         store_reg_to_var_int(iptr->dst, d);
1889                         break;
1890
1891                 case ICMD_D2I:       /* ..., value  ==> ..., (int) value              */
1892
1893                         var_to_reg_flt(s1, src, REG_FTMP1);
1894                         d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1895                         x86_64_cvttsd2si_reg_reg(cd, s1, d);
1896                         x86_64_alul_imm_reg(cd, X86_64_CMP, 0x80000000, d);    /* corner cases */
1897                         a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1898                         x86_64_jcc(cd, X86_64_CC_NE, a);
1899                         M_FLTMOVE(s1, REG_FTMP1);
1900                         x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2i, REG_ITMP2);
1901                         x86_64_call_reg(cd, REG_ITMP2);
1902                         M_INTMOVE(REG_RESULT, d);
1903                         store_reg_to_var_int(iptr->dst, d);
1904                         break;
1905
1906                 case ICMD_F2L:       /* ..., value  ==> ..., (long) value             */
1907
1908                         var_to_reg_flt(s1, src, REG_FTMP1);
1909                         d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1910                         x86_64_cvttss2siq_reg_reg(cd, s1, d);
1911                         x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1912                         x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d);     /* corner cases */
1913                         a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1914                         x86_64_jcc(cd, X86_64_CC_NE, a);
1915                         M_FLTMOVE(s1, REG_FTMP1);
1916                         x86_64_mov_imm_reg(cd, (s8) asm_builtin_f2l, REG_ITMP2);
1917                         x86_64_call_reg(cd, REG_ITMP2);
1918                         M_INTMOVE(REG_RESULT, d);
1919                         store_reg_to_var_int(iptr->dst, d);
1920                         break;
1921
1922                 case ICMD_D2L:       /* ..., value  ==> ..., (long) value             */
1923
1924                         var_to_reg_flt(s1, src, REG_FTMP1);
1925                         d = reg_of_var(rd, iptr->dst, REG_ITMP1);
1926                         x86_64_cvttsd2siq_reg_reg(cd, s1, d);
1927                         x86_64_mov_imm_reg(cd, 0x8000000000000000, REG_ITMP2);
1928                         x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, d);     /* corner cases */
1929                         a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1930                         x86_64_jcc(cd, X86_64_CC_NE, a);
1931                         M_FLTMOVE(s1, REG_FTMP1);
1932                         x86_64_mov_imm_reg(cd, (s8) asm_builtin_d2l, REG_ITMP2);
1933                         x86_64_call_reg(cd, REG_ITMP2);
1934                         M_INTMOVE(REG_RESULT, d);
1935                         store_reg_to_var_int(iptr->dst, d);
1936                         break;
1937
1938                 case ICMD_F2D:       /* ..., value  ==> ..., (double) value           */
1939
1940                         var_to_reg_flt(s1, src, REG_FTMP1);
1941                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1942                         x86_64_cvtss2sd_reg_reg(cd, s1, d);
1943                         store_reg_to_var_flt(iptr->dst, d);
1944                         break;
1945
1946                 case ICMD_D2F:       /* ..., value  ==> ..., (float) value            */
1947
1948                         var_to_reg_flt(s1, src, REG_FTMP1);
1949                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
1950                         x86_64_cvtsd2ss_reg_reg(cd, s1, d);
1951                         store_reg_to_var_flt(iptr->dst, d);
1952                         break;
1953
1954                 case ICMD_FCMPL:      /* ..., val1, val2  ==> ..., val1 fcmpl val2    */
1955                                           /* == => 0, < => 1, > => -1 */
1956
1957                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1958                         var_to_reg_flt(s2, src, REG_FTMP2);
1959                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1960                         x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1961                         x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1962                         x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1963                         x86_64_ucomiss_reg_reg(cd, s1, s2);
1964                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1965                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1966                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d);    /* treat unordered as GT */
1967                         store_reg_to_var_int(iptr->dst, d);
1968                         break;
1969
1970                 case ICMD_FCMPG:      /* ..., val1, val2  ==> ..., val1 fcmpg val2    */
1971                                           /* == => 0, < => 1, > => -1 */
1972
1973                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1974                         var_to_reg_flt(s2, src, REG_FTMP2);
1975                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1976                         x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1977                         x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1978                         x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1979                         x86_64_ucomiss_reg_reg(cd, s1, s2);
1980                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1981                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1982                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d);    /* treat unordered as LT */
1983                         store_reg_to_var_int(iptr->dst, d);
1984                         break;
1985
1986                 case ICMD_DCMPL:      /* ..., val1, val2  ==> ..., val1 fcmpl val2    */
1987                                           /* == => 0, < => 1, > => -1 */
1988
1989                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1990                         var_to_reg_flt(s2, src, REG_FTMP2);
1991                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
1992                         x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
1993                         x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
1994                         x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
1995                         x86_64_ucomisd_reg_reg(cd, s1, s2);
1996                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
1997                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
1998                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP2, d);    /* treat unordered as GT */
1999                         store_reg_to_var_int(iptr->dst, d);
2000                         break;
2001
2002                 case ICMD_DCMPG:      /* ..., val1, val2  ==> ..., val1 fcmpg val2    */
2003                                           /* == => 0, < => 1, > => -1 */
2004
2005                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
2006                         var_to_reg_flt(s2, src, REG_FTMP2);
2007                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2008                         x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
2009                         x86_64_mov_imm_reg(cd, 1, REG_ITMP1);
2010                         x86_64_mov_imm_reg(cd, -1, REG_ITMP2);
2011                         x86_64_ucomisd_reg_reg(cd, s1, s2);
2012                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_B, REG_ITMP1, d);
2013                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_A, REG_ITMP2, d);
2014                         x86_64_cmovcc_reg_reg(cd, X86_64_CC_P, REG_ITMP1, d);    /* treat unordered as LT */
2015                         store_reg_to_var_int(iptr->dst, d);
2016                         break;
2017
2018
2019                 /* memory operations **************************************************/
2020
2021                 case ICMD_ARRAYLENGTH: /* ..., arrayref  ==> ..., (int) length        */
2022
2023                         var_to_reg_int(s1, src, REG_ITMP1);
2024                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2025                         gen_nullptr_check(s1);
2026                         x86_64_movl_membase_reg(cd, s1, OFFSET(java_arrayheader, size), d);
2027                         store_reg_to_var_int(iptr->dst, d);
2028                         break;
2029
2030                 case ICMD_AALOAD:     /* ..., arrayref, index  ==> ..., value         */
2031
2032                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2033                         var_to_reg_int(s2, src, REG_ITMP2);
2034                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2035                         if (iptr->op1 == 0) {
2036                                 gen_nullptr_check(s1);
2037                                 gen_bound_check;
2038                         }
2039                         x86_64_mov_memindex_reg(cd, OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
2040                         store_reg_to_var_int(iptr->dst, d);
2041                         break;
2042
2043                 case ICMD_LALOAD:     /* ..., arrayref, index  ==> ..., value         */
2044
2045                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2046                         var_to_reg_int(s2, src, REG_ITMP2);
2047                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2048                         if (iptr->op1 == 0) {
2049                                 gen_nullptr_check(s1);
2050                                 gen_bound_check;
2051                         }
2052                         x86_64_mov_memindex_reg(cd, OFFSET(java_longarray, data[0]), s1, s2, 3, d);
2053                         store_reg_to_var_int(iptr->dst, d);
2054                         break;
2055
2056                 case ICMD_IALOAD:     /* ..., arrayref, index  ==> ..., value         */
2057
2058                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2059                         var_to_reg_int(s2, src, REG_ITMP2);
2060                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2061                         if (iptr->op1 == 0) {
2062                                 gen_nullptr_check(s1);
2063                                 gen_bound_check;
2064                         }
2065                         x86_64_movl_memindex_reg(cd, OFFSET(java_intarray, data[0]), s1, s2, 2, d);
2066                         store_reg_to_var_int(iptr->dst, d);
2067                         break;
2068
2069                 case ICMD_FALOAD:     /* ..., arrayref, index  ==> ..., value         */
2070
2071                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2072                         var_to_reg_int(s2, src, REG_ITMP2);
2073                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2074                         if (iptr->op1 == 0) {
2075                                 gen_nullptr_check(s1);
2076                                 gen_bound_check;
2077                         }
2078                         x86_64_movss_memindex_reg(cd, OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2079                         store_reg_to_var_flt(iptr->dst, d);
2080                         break;
2081
2082                 case ICMD_DALOAD:     /* ..., arrayref, index  ==> ..., value         */
2083
2084                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2085                         var_to_reg_int(s2, src, REG_ITMP2);
2086                         d = reg_of_var(rd, iptr->dst, REG_FTMP3);
2087                         if (iptr->op1 == 0) {
2088                                 gen_nullptr_check(s1);
2089                                 gen_bound_check;
2090                         }
2091                         x86_64_movsd_memindex_reg(cd, OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2092                         store_reg_to_var_flt(iptr->dst, d);
2093                         break;
2094
2095                 case ICMD_CALOAD:     /* ..., arrayref, index  ==> ..., value         */
2096
2097                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2098                         var_to_reg_int(s2, src, REG_ITMP2);
2099                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2100                         if (iptr->op1 == 0) {
2101                                 gen_nullptr_check(s1);
2102                                 gen_bound_check;
2103                         }
2104                         x86_64_movzwq_memindex_reg(cd, OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2105                         store_reg_to_var_int(iptr->dst, d);
2106                         break;                  
2107
2108                 case ICMD_SALOAD:     /* ..., arrayref, index  ==> ..., value         */
2109
2110                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2111                         var_to_reg_int(s2, src, REG_ITMP2);
2112                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2113                         if (iptr->op1 == 0) {
2114                                 gen_nullptr_check(s1);
2115                                 gen_bound_check;
2116                         }
2117                         x86_64_movswq_memindex_reg(cd, OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2118                         store_reg_to_var_int(iptr->dst, d);
2119                         break;
2120
2121                 case ICMD_BALOAD:     /* ..., arrayref, index  ==> ..., value         */
2122
2123                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2124                         var_to_reg_int(s2, src, REG_ITMP2);
2125                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2126                         if (iptr->op1 == 0) {
2127                                 gen_nullptr_check(s1);
2128                                 gen_bound_check;
2129                         }
2130                         x86_64_movsbq_memindex_reg(cd, OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2131                         store_reg_to_var_int(iptr->dst, d);
2132                         break;
2133
2134
2135                 case ICMD_AASTORE:    /* ..., arrayref, index, value  ==> ...         */
2136
2137                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2138                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2139                         if (iptr->op1 == 0) {
2140                                 gen_nullptr_check(s1);
2141                                 gen_bound_check;
2142                         }
2143                         var_to_reg_int(s3, src, REG_ITMP3);
2144                         x86_64_mov_reg_memindex(cd, s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2145                         break;
2146
2147                 case ICMD_LASTORE:    /* ..., arrayref, index, value  ==> ...         */
2148
2149                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2150                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2151                         if (iptr->op1 == 0) {
2152                                 gen_nullptr_check(s1);
2153                                 gen_bound_check;
2154                         }
2155                         var_to_reg_int(s3, src, REG_ITMP3);
2156                         x86_64_mov_reg_memindex(cd, s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2157                         break;
2158
2159                 case ICMD_IASTORE:    /* ..., arrayref, index, value  ==> ...         */
2160
2161                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2162                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2163                         if (iptr->op1 == 0) {
2164                                 gen_nullptr_check(s1);
2165                                 gen_bound_check;
2166                         }
2167                         var_to_reg_int(s3, src, REG_ITMP3);
2168                         x86_64_movl_reg_memindex(cd, s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2169                         break;
2170
2171                 case ICMD_FASTORE:    /* ..., arrayref, index, value  ==> ...         */
2172
2173                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2174                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2175                         if (iptr->op1 == 0) {
2176                                 gen_nullptr_check(s1);
2177                                 gen_bound_check;
2178                         }
2179                         var_to_reg_flt(s3, src, REG_FTMP3);
2180                         x86_64_movss_reg_memindex(cd, s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2181                         break;
2182
2183                 case ICMD_DASTORE:    /* ..., arrayref, index, value  ==> ...         */
2184
2185                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2186                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2187                         if (iptr->op1 == 0) {
2188                                 gen_nullptr_check(s1);
2189                                 gen_bound_check;
2190                         }
2191                         var_to_reg_flt(s3, src, REG_FTMP3);
2192                         x86_64_movsd_reg_memindex(cd, s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2193                         break;
2194
2195                 case ICMD_CASTORE:    /* ..., arrayref, index, value  ==> ...         */
2196
2197                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2198                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2199                         if (iptr->op1 == 0) {
2200                                 gen_nullptr_check(s1);
2201                                 gen_bound_check;
2202                         }
2203                         var_to_reg_int(s3, src, REG_ITMP3);
2204                         x86_64_movw_reg_memindex(cd, s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2205                         break;
2206
2207                 case ICMD_SASTORE:    /* ..., arrayref, index, value  ==> ...         */
2208
2209                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2210                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2211                         if (iptr->op1 == 0) {
2212                                 gen_nullptr_check(s1);
2213                                 gen_bound_check;
2214                         }
2215                         var_to_reg_int(s3, src, REG_ITMP3);
2216                         x86_64_movw_reg_memindex(cd, s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2217                         break;
2218
2219                 case ICMD_BASTORE:    /* ..., arrayref, index, value  ==> ...         */
2220
2221                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2222                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2223                         if (iptr->op1 == 0) {
2224                                 gen_nullptr_check(s1);
2225                                 gen_bound_check;
2226                         }
2227                         var_to_reg_int(s3, src, REG_ITMP3);
2228                         x86_64_movb_reg_memindex(cd, s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2229                         break;
2230
2231                 case ICMD_IASTORECONST: /* ..., arrayref, index  ==> ...              */
2232
2233                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2234                         var_to_reg_int(s2, src, REG_ITMP2);
2235                         if (iptr->op1 == 0) {
2236                                 gen_nullptr_check(s1);
2237                                 gen_bound_check;
2238                         }
2239                         x86_64_movl_imm_memindex(cd, iptr->val.i, OFFSET(java_intarray, data[0]), s1, s2, 2);
2240                         break;
2241
2242                 case ICMD_LASTORECONST: /* ..., arrayref, index  ==> ...              */
2243
2244                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2245                         var_to_reg_int(s2, src, REG_ITMP2);
2246                         if (iptr->op1 == 0) {
2247                                 gen_nullptr_check(s1);
2248                                 gen_bound_check;
2249                         }
2250
2251                         if (IS_IMM32(iptr->val.l)) {
2252                                 x86_64_mov_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2253
2254                         } else {
2255                                 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l & 0x00000000ffffffff), OFFSET(java_longarray, data[0]), s1, s2, 3);
2256                                 x86_64_movl_imm_memindex(cd, (u4) (iptr->val.l >> 32), OFFSET(java_longarray, data[0]) + 4, s1, s2, 3);
2257                         }
2258                         break;
2259
2260                 case ICMD_AASTORECONST: /* ..., arrayref, index  ==> ...              */
2261
2262                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2263                         var_to_reg_int(s2, src, REG_ITMP2);
2264                         if (iptr->op1 == 0) {
2265                                 gen_nullptr_check(s1);
2266                                 gen_bound_check;
2267                         }
2268                         x86_64_mov_imm_memindex(cd, 0, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2269                         break;
2270
2271                 case ICMD_BASTORECONST: /* ..., arrayref, index  ==> ...              */
2272
2273                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2274                         var_to_reg_int(s2, src, REG_ITMP2);
2275                         if (iptr->op1 == 0) {
2276                                 gen_nullptr_check(s1);
2277                                 gen_bound_check;
2278                         }
2279                         x86_64_movb_imm_memindex(cd, iptr->val.i, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2280                         break;
2281
2282                 case ICMD_CASTORECONST:   /* ..., arrayref, index  ==> ...            */
2283
2284                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2285                         var_to_reg_int(s2, src, REG_ITMP2);
2286                         if (iptr->op1 == 0) {
2287                                 gen_nullptr_check(s1);
2288                                 gen_bound_check;
2289                         }
2290                         x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_chararray, data[0]), s1, s2, 1);
2291                         break;
2292
2293                 case ICMD_SASTORECONST:   /* ..., arrayref, index  ==> ...            */
2294
2295                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2296                         var_to_reg_int(s2, src, REG_ITMP2);
2297                         if (iptr->op1 == 0) {
2298                                 gen_nullptr_check(s1);
2299                                 gen_bound_check;
2300                         }
2301                         x86_64_movw_imm_memindex(cd, iptr->val.i, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2302                         break;
2303
2304
2305                 case ICMD_PUTSTATIC:  /* ..., value  ==> ...                          */
2306                                       /* op1 = type, val.a = field address            */
2307
2308                         /* If the static fields' class is not yet initialized, we do it   */
2309                         /* now. The call code is generated later.                         */
2310                         if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2311                                 codegen_addpatchref(cd, cd->mcodeptr, asm_check_clinit, ((fieldinfo *) iptr->val.a)->class);
2312
2313                                 /* This is just for debugging purposes. Is very difficult to  */
2314                                 /* read patched code. Here we patch the following 5 nop's     */
2315                                 /* so that the real code keeps untouched.                     */
2316                                 if (showdisassemble) {
2317                                         x86_64_nop(cd); x86_64_nop(cd); x86_64_nop(cd);
2318                                         x86_64_nop(cd); x86_64_nop(cd);
2319                                 }
2320                         }
2321
2322                         /* This approach is much faster than moving the field address     */
2323                         /* inline into a register. */
2324                         a = dseg_addaddress(cd, &(((fieldinfo *) iptr->val.a)->value));
2325                         x86_64_mov_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 7) - (s8) cd->mcodebase) + a, REG_ITMP2);
2326                         switch (iptr->op1) {
2327                         case TYPE_INT:
2328                                 var_to_reg_int(s2, src, REG_ITMP1);
2329                                 x86_64_movl_reg_membase(cd, s2, REG_ITMP2, 0);
2330                                 break;
2331                         case TYPE_LNG:
2332                         case TYPE_ADR:
2333                                 var_to_reg_int(s2, src, REG_ITMP1);
2334                                 x86_64_mov_reg_membase(cd, s2, REG_ITMP2, 0);
2335                                 break;
2336                         case TYPE_FLT:
2337                                 var_to_reg_flt(s2, src, REG_FTMP1);
2338                                 x86_64_movss_reg_membase(cd, s2, REG_ITMP2, 0);
2339                                 break;
2340                         case TYPE_DBL:
2341                                 var_to_reg_flt(s2, src, REG_FTMP1);
2342                                 x86_64_movsd_reg_membase(cd, s2, REG_ITMP2, 0);
2343                                 break;
2344                         }
2345                         break;
2346
2347                 case ICMD_PUTSTATICCONST: /* ...  ==> ...                             */
2348                                           /* val = value (in current instruction)     */
2349                                           /* op1 = type, val.a = field address (in    */
2350                                           /* following NOP)                           */
2351
2352                         /* If the static fields' class is not yet initialized, we do it   */
2353                         /* now. The call code is generated later.                         */
2354                         if (!((fieldinfo *) iptr[1].val.a)->class->initialized) {
2355                                 codegen_addpatchref(cd, cd->mcodeptr, asm_check_clinit, ((fieldinfo *) iptr[1].val.a)->class);
2356
2357                                 /* This is just for debugging purposes. Is very difficult to  */
2358                                 /* read patched code. Here we patch the following 5 nop's     */
2359                                 /* so that the real code keeps untouched.                     */
2360                                 if (showdisassemble) {
2361                                         x86_64_nop(cd); x86_64_nop(cd); x86_64_nop(cd);
2362                                         x86_64_nop(cd); x86_64_nop(cd);
2363                                 }
2364                         }
2365
2366                         /* This approach is much faster than moving the field address     */
2367                         /* inline into a register. */
2368                         a = dseg_addaddress(cd, &(((fieldinfo *) iptr[1].val.a)->value));
2369                         x86_64_mov_membase_reg(cd, RIP, -(((ptrint) cd->mcodeptr + 7) - (ptrint) cd->mcodebase) + a, REG_ITMP1);
2370                         switch (iptr->op1) {
2371                         case TYPE_INT:
2372                         case TYPE_FLT:
2373                                 x86_64_movl_imm_membase(cd, iptr->val.i, REG_ITMP1, 0);
2374                                 break;
2375                         case TYPE_LNG:
2376                         case TYPE_ADR:
2377                         case TYPE_DBL:
2378                                 if (IS_IMM32(iptr->val.l)) {
2379                                         x86_64_mov_imm_membase(cd, iptr->val.l, REG_ITMP1, 0);
2380                                 } else {
2381                                         x86_64_movl_imm_membase(cd, iptr->val.l, REG_ITMP1, 0);
2382                                         x86_64_movl_imm_membase(cd, iptr->val.l >> 32, REG_ITMP1, 4);
2383                                 }
2384                                 break;
2385                         }
2386                         break;
2387
2388                 case ICMD_GETSTATIC:  /* ...  ==> ..., value                          */
2389                                       /* op1 = type, val.a = field address            */
2390
2391                         /* If the static fields' class is not yet initialized, we do it   */
2392                         /* now. The call code is generated later.                         */
2393                         if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2394                                 codegen_addpatchref(cd, cd->mcodeptr, asm_check_clinit, ((fieldinfo *) iptr->val.a)->class);
2395
2396                                 /* This is just for debugging purposes. Is very difficult to  */
2397                                 /* read patched code. Here we patch the following 5 nop's     */
2398                                 /* so that the real code keeps untouched.                     */
2399                                 if (showdisassemble) {
2400                                         x86_64_nop(cd); x86_64_nop(cd); x86_64_nop(cd);
2401                                         x86_64_nop(cd); x86_64_nop(cd);
2402                                 }
2403                         }
2404
2405                         /* This approach is much faster than moving the field address     */
2406                         /* inline into a register. */
2407                         a = dseg_addaddress(cd, &(((fieldinfo *) iptr->val.a)->value));
2408                         x86_64_mov_membase_reg(cd, RIP, -(((s8) cd->mcodeptr + 7) - (s8) cd->mcodebase) + a, REG_ITMP2);
2409                         switch (iptr->op1) {
2410                         case TYPE_INT:
2411                                 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2412                                 x86_64_movl_membase_reg(cd, REG_ITMP2, 0, d);
2413                                 store_reg_to_var_int(iptr->dst, d);
2414                                 break;
2415                         case TYPE_LNG:
2416                         case TYPE_ADR:
2417                                 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2418                                 x86_64_mov_membase_reg(cd, REG_ITMP2, 0, d);
2419                                 store_reg_to_var_int(iptr->dst, d);
2420                                 break;
2421                         case TYPE_FLT:
2422                                 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2423                                 x86_64_movss_membase_reg(cd, REG_ITMP2, 0, d);
2424                                 store_reg_to_var_flt(iptr->dst, d);
2425                                 break;
2426                         case TYPE_DBL:                          
2427                                 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2428                                 x86_64_movsd_membase_reg(cd, REG_ITMP2, 0, d);
2429                                 store_reg_to_var_flt(iptr->dst, d);
2430                                 break;
2431                         }
2432                         break;
2433
2434                 case ICMD_PUTFIELD:   /* ..., objectref, value  ==> ...               */
2435                                       /* op1 = type, val.i = field offset             */
2436
2437                         a = ((fieldinfo *)(iptr->val.a))->offset;
2438                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2439                         gen_nullptr_check(s1);
2440                         switch (iptr->op1) {
2441                         case TYPE_INT:
2442                                 var_to_reg_int(s2, src, REG_ITMP2);
2443                                 x86_64_movl_reg_membase(cd, s2, s1, a);
2444                                 break;
2445                         case TYPE_LNG:
2446                         case TYPE_ADR:
2447                                 var_to_reg_int(s2, src, REG_ITMP2);
2448                                 x86_64_mov_reg_membase(cd, s2, s1, a);
2449                                 break;
2450                         case TYPE_FLT:
2451                                 var_to_reg_flt(s2, src, REG_FTMP2);
2452                                 x86_64_movss_reg_membase(cd, s2, s1, a);
2453                                 break;
2454                         case TYPE_DBL:
2455                                 var_to_reg_flt(s2, src, REG_FTMP2);
2456                                 x86_64_movsd_reg_membase(cd, s2, s1, a);
2457                                 break;
2458                         }
2459                         break;
2460
2461                 case ICMD_PUTFIELDCONST:  /* ..., objectref, value  ==> ...           */
2462                                           /* val = value (in current instruction)     */
2463                                           /* op1 = type, val.a = field address (in    */
2464                                           /* following NOP)                           */
2465
2466                         a = ((fieldinfo *) iptr[1].val.a)->offset;
2467                         var_to_reg_int(s1, src, REG_ITMP1);
2468                         gen_nullptr_check(s1);
2469                         switch (iptr->op1) {
2470                         case TYPE_INT:
2471                         case TYPE_FLT:
2472                                 x86_64_movl_imm_membase(cd, iptr->val.i, s1, a);
2473                                 break;
2474                         case TYPE_LNG:
2475                         case TYPE_ADR:
2476                         case TYPE_DBL:
2477                                 if (IS_IMM32(iptr->val.l)) {
2478                                         x86_64_mov_imm_membase(cd, iptr->val.l, s1, a);
2479                                 } else {
2480                                         x86_64_movl_imm_membase(cd, iptr->val.l, s1, a);
2481                                         x86_64_movl_imm_membase(cd, iptr->val.l >> 32, s1, a + 4);
2482                                 }
2483                                 break;
2484                         }
2485                         break;
2486
2487                 case ICMD_GETFIELD:   /* ...  ==> ..., value                          */
2488                                       /* op1 = type, val.i = field offset             */
2489
2490                         a = ((fieldinfo *)(iptr->val.a))->offset;
2491                         var_to_reg_int(s1, src, REG_ITMP1);
2492                         gen_nullptr_check(s1);
2493                         switch (iptr->op1) {
2494                         case TYPE_INT:
2495                                 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2496                                 x86_64_movl_membase_reg(cd, s1, a, d);
2497                                 store_reg_to_var_int(iptr->dst, d);
2498                                 break;
2499                         case TYPE_LNG:
2500                         case TYPE_ADR:
2501                                 d = reg_of_var(rd, iptr->dst, REG_ITMP1);
2502                                 x86_64_mov_membase_reg(cd, s1, a, d);
2503                                 store_reg_to_var_int(iptr->dst, d);
2504                                 break;
2505                         case TYPE_FLT:
2506                                 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2507                                 x86_64_movss_membase_reg(cd, s1, a, d);
2508                                 store_reg_to_var_flt(iptr->dst, d);
2509                                 break;
2510                         case TYPE_DBL:                          
2511                                 d = reg_of_var(rd, iptr->dst, REG_FTMP1);
2512                                 x86_64_movsd_membase_reg(cd, s1, a, d);
2513                                 store_reg_to_var_flt(iptr->dst, d);
2514                                 break;
2515                         }
2516                         break;
2517
2518
2519                 /* branch operations **************************************************/
2520
2521                 case ICMD_ATHROW:       /* ..., objectref ==> ... (, objectref)       */
2522
2523                         var_to_reg_int(s1, src, REG_ITMP1);
2524                         M_INTMOVE(s1, REG_ITMP1_XPTR);
2525
2526                         x86_64_call_imm(cd, 0); /* passing exception pointer              */
2527                         x86_64_pop_reg(cd, REG_ITMP2_XPC);
2528
2529                         x86_64_mov_imm_reg(cd, (s8) asm_handle_exception, REG_ITMP3);
2530                         x86_64_jmp_reg(cd, REG_ITMP3);
2531                         break;
2532
2533                 case ICMD_GOTO:         /* ... ==> ...                                */
2534                                         /* op1 = target JavaVM pc                     */
2535
2536                         x86_64_jmp_imm(cd, 0);
2537                         codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2538                         break;
2539
2540                 case ICMD_JSR:          /* ... ==> ...                                */
2541                                         /* op1 = target JavaVM pc                     */
2542
2543                         x86_64_call_imm(cd, 0);
2544                         codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2545                         break;
2546                         
2547                 case ICMD_RET:          /* ... ==> ...                                */
2548                                         /* op1 = local variable                       */
2549
2550                         var = &(rd->locals[iptr->op1][TYPE_ADR]);
2551                         var_to_reg_int(s1, var, REG_ITMP1);
2552                         x86_64_jmp_reg(cd, s1);
2553                         break;
2554
2555                 case ICMD_IFNULL:       /* ..., value ==> ...                         */
2556                                         /* op1 = target JavaVM pc                     */
2557
2558                         if (src->flags & INMEMORY) {
2559                                 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2560
2561                         } else {
2562                                 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2563                         }
2564                         x86_64_jcc(cd, X86_64_CC_E, 0);
2565                         codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2566                         break;
2567
2568                 case ICMD_IFNONNULL:    /* ..., value ==> ...                         */
2569                                         /* op1 = target JavaVM pc                     */
2570
2571                         if (src->flags & INMEMORY) {
2572                                 x86_64_alu_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
2573
2574                         } else {
2575                                 x86_64_test_reg_reg(cd, src->regoff, src->regoff);
2576                         }
2577                         x86_64_jcc(cd, X86_64_CC_NE, 0);
2578                         codegen_addreference(cd, BlockPtrOfPC(iptr->op1), cd->mcodeptr);
2579                         break;
2580
2581                 case ICMD_IFEQ:         /* ..., value ==> ...                         */
2582                                         /* op1 = target JavaVM pc, val.i = constant   */
2583
2584                         x86_64_emit_ifcc(cd, X86_64_CC_E, src, iptr);
2585                         break;
2586
2587                 case ICMD_IFLT:         /* ..., value ==> ...                         */
2588                                         /* op1 = target JavaVM pc, val.i = constant   */
2589
2590                         x86_64_emit_ifcc(cd, X86_64_CC_L, src, iptr);
2591                         break;
2592
2593                 case ICMD_IFLE:         /* ..., value ==> ...                         */
2594                                         /* op1 = target JavaVM pc, val.i = constant   */
2595
2596                         x86_64_emit_ifcc(cd, X86_64_CC_LE, src, iptr);
2597                         break;
2598
2599                 case ICMD_IFNE:         /* ..., value ==> ...                         */
2600                                         /* op1 = target JavaVM pc, val.i = constant   */
2601
2602                         x86_64_emit_ifcc(cd, X86_64_CC_NE, src, iptr);
2603                         break;
2604
2605                 case ICMD_IFGT:         /* ..., value ==> ...                         */
2606                                         /* op1 = target JavaVM pc, val.i = constant   */
2607
2608                         x86_64_emit_ifcc(cd, X86_64_CC_G, src, iptr);
2609                         break;
2610
2611                 case ICMD_IFGE:         /* ..., value ==> ...                         */
2612                                         /* op1 = target JavaVM pc, val.i = constant   */
2613
2614                         x86_64_emit_ifcc(cd, X86_64_CC_GE, src, iptr);
2615                         break;
2616
2617                 case ICMD_IF_LEQ:       /* ..., value ==> ...                         */
2618                                         /* op1 = target JavaVM pc, val.l = constant   */
2619
2620                         x86_64_emit_if_lcc(cd, X86_64_CC_E, src, iptr);
2621                         break;
2622
2623                 case ICMD_IF_LLT:       /* ..., value ==> ...                         */
2624                                         /* op1 = target JavaVM pc, val.l = constant   */
2625
2626                         x86_64_emit_if_lcc(cd, X86_64_CC_L, src, iptr);
2627                         break;
2628
2629                 case ICMD_IF_LLE:       /* ..., value ==> ...                         */
2630                                         /* op1 = target JavaVM pc, val.l = constant   */
2631
2632                         x86_64_emit_if_lcc(cd, X86_64_CC_LE, src, iptr);
2633                         break;
2634
2635                 case ICMD_IF_LNE:       /* ..., value ==> ...                         */
2636                                         /* op1 = target JavaVM pc, val.l = constant   */
2637
2638                         x86_64_emit_if_lcc(cd, X86_64_CC_NE, src, iptr);
2639                         break;
2640
2641                 case ICMD_IF_LGT:       /* ..., value ==> ...                         */
2642                                         /* op1 = target JavaVM pc, val.l = constant   */
2643
2644                         x86_64_emit_if_lcc(cd, X86_64_CC_G, src, iptr);
2645                         break;
2646
2647                 case ICMD_IF_LGE:       /* ..., value ==> ...                         */
2648                                         /* op1 = target JavaVM pc, val.l = constant   */
2649
2650                         x86_64_emit_if_lcc(cd, X86_64_CC_GE, src, iptr);
2651                         break;
2652
2653                 case ICMD_IF_ICMPEQ:    /* ..., value, value ==> ...                  */
2654                                         /* op1 = target JavaVM pc                     */
2655
2656                         x86_64_emit_if_icmpcc(cd, X86_64_CC_E, src, iptr);
2657                         break;
2658
2659                 case ICMD_IF_LCMPEQ:    /* ..., value, value ==> ...                  */
2660                 case ICMD_IF_ACMPEQ:    /* op1 = target JavaVM pc                     */
2661
2662                         x86_64_emit_if_lcmpcc(cd, X86_64_CC_E, src, iptr);
2663                         break;
2664
2665                 case ICMD_IF_ICMPNE:    /* ..., value, value ==> ...                  */
2666                                         /* op1 = target JavaVM pc                     */
2667
2668                         x86_64_emit_if_icmpcc(cd, X86_64_CC_NE, src, iptr);
2669                         break;
2670
2671                 case ICMD_IF_LCMPNE:    /* ..., value, value ==> ...                  */
2672                 case ICMD_IF_ACMPNE:    /* op1 = target JavaVM pc                     */
2673
2674                         x86_64_emit_if_lcmpcc(cd, X86_64_CC_NE, src, iptr);
2675                         break;
2676
2677                 case ICMD_IF_ICMPLT:    /* ..., value, value ==> ...                  */
2678                                         /* op1 = target JavaVM pc                     */
2679
2680                         x86_64_emit_if_icmpcc(cd, X86_64_CC_L, src, iptr);
2681                         break;
2682
2683                 case ICMD_IF_LCMPLT:    /* ..., value, value ==> ...                  */
2684                                     /* op1 = target JavaVM pc                     */
2685
2686                         x86_64_emit_if_lcmpcc(cd, X86_64_CC_L, src, iptr);
2687                         break;
2688
2689                 case ICMD_IF_ICMPGT:    /* ..., value, value ==> ...                  */
2690                                         /* op1 = target JavaVM pc                     */
2691
2692                         x86_64_emit_if_icmpcc(cd, X86_64_CC_G, src, iptr);
2693                         break;
2694
2695                 case ICMD_IF_LCMPGT:    /* ..., value, value ==> ...                  */
2696                                 /* op1 = target JavaVM pc                     */
2697
2698                         x86_64_emit_if_lcmpcc(cd, X86_64_CC_G, src, iptr);
2699                         break;
2700
2701                 case ICMD_IF_ICMPLE:    /* ..., value, value ==> ...                  */
2702                                         /* op1 = target JavaVM pc                     */
2703
2704                         x86_64_emit_if_icmpcc(cd, X86_64_CC_LE, src, iptr);
2705                         break;
2706
2707                 case ICMD_IF_LCMPLE:    /* ..., value, value ==> ...                  */
2708                                         /* op1 = target JavaVM pc                     */
2709
2710                         x86_64_emit_if_lcmpcc(cd, X86_64_CC_LE, src, iptr);
2711                         break;
2712
2713                 case ICMD_IF_ICMPGE:    /* ..., value, value ==> ...                  */
2714                                         /* op1 = target JavaVM pc                     */
2715
2716                         x86_64_emit_if_icmpcc(cd, X86_64_CC_GE, src, iptr);
2717                         break;
2718
2719                 case ICMD_IF_LCMPGE:    /* ..., value, value ==> ...                  */
2720                                     /* op1 = target JavaVM pc                     */
2721
2722                         x86_64_emit_if_lcmpcc(cd, X86_64_CC_GE, src, iptr);
2723                         break;
2724
2725                 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST                           */
2726
2727                 case ICMD_ELSE_ICONST:  /* handled by IFxx_ICONST                     */
2728                         break;
2729
2730                 case ICMD_IFEQ_ICONST:  /* ..., value ==> ..., constant               */
2731                                         /* val.i = constant                           */
2732
2733                         var_to_reg_int(s1, src, REG_ITMP1);
2734                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2735                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2736                                 if (s1 == d) {
2737                                         M_INTMOVE(s1, REG_ITMP1);
2738                                         s1 = REG_ITMP1;
2739                                 }
2740                                 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2741                         }
2742                         x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2743                         x86_64_testl_reg_reg(cd, s1, s1);
2744                         x86_64_cmovccl_reg_reg(cd, X86_64_CC_E, REG_ITMP2, d);
2745                         store_reg_to_var_int(iptr->dst, d);
2746                         break;
2747
2748                 case ICMD_IFNE_ICONST:  /* ..., value ==> ..., constant               */
2749                                         /* val.i = constant                           */
2750
2751                         var_to_reg_int(s1, src, REG_ITMP1);
2752                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2753                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2754                                 if (s1 == d) {
2755                                         M_INTMOVE(s1, REG_ITMP1);
2756                                         s1 = REG_ITMP1;
2757                                 }
2758                                 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2759                         }
2760                         x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2761                         x86_64_testl_reg_reg(cd, s1, s1);
2762                         x86_64_cmovccl_reg_reg(cd, X86_64_CC_NE, REG_ITMP2, d);
2763                         store_reg_to_var_int(iptr->dst, d);
2764                         break;
2765
2766                 case ICMD_IFLT_ICONST:  /* ..., value ==> ..., constant               */
2767                                         /* val.i = constant                           */
2768
2769                         var_to_reg_int(s1, src, REG_ITMP1);
2770                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2771                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2772                                 if (s1 == d) {
2773                                         M_INTMOVE(s1, REG_ITMP1);
2774                                         s1 = REG_ITMP1;
2775                                 }
2776                                 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2777                         }
2778                         x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2779                         x86_64_testl_reg_reg(cd, s1, s1);
2780                         x86_64_cmovccl_reg_reg(cd, X86_64_CC_L, REG_ITMP2, d);
2781                         store_reg_to_var_int(iptr->dst, d);
2782                         break;
2783
2784                 case ICMD_IFGE_ICONST:  /* ..., value ==> ..., constant               */
2785                                         /* val.i = constant                           */
2786
2787                         var_to_reg_int(s1, src, REG_ITMP1);
2788                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2789                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2790                                 if (s1 == d) {
2791                                         M_INTMOVE(s1, REG_ITMP1);
2792                                         s1 = REG_ITMP1;
2793                                 }
2794                                 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2795                         }
2796                         x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2797                         x86_64_testl_reg_reg(cd, s1, s1);
2798                         x86_64_cmovccl_reg_reg(cd, X86_64_CC_GE, REG_ITMP2, d);
2799                         store_reg_to_var_int(iptr->dst, d);
2800                         break;
2801
2802                 case ICMD_IFGT_ICONST:  /* ..., value ==> ..., constant               */
2803                                         /* val.i = constant                           */
2804
2805                         var_to_reg_int(s1, src, REG_ITMP1);
2806                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2807                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2808                                 if (s1 == d) {
2809                                         M_INTMOVE(s1, REG_ITMP1);
2810                                         s1 = REG_ITMP1;
2811                                 }
2812                                 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2813                         }
2814                         x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2815                         x86_64_testl_reg_reg(cd, s1, s1);
2816                         x86_64_cmovccl_reg_reg(cd, X86_64_CC_G, REG_ITMP2, d);
2817                         store_reg_to_var_int(iptr->dst, d);
2818                         break;
2819
2820                 case ICMD_IFLE_ICONST:  /* ..., value ==> ..., constant               */
2821                                         /* val.i = constant                           */
2822
2823                         var_to_reg_int(s1, src, REG_ITMP1);
2824                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
2825                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2826                                 if (s1 == d) {
2827                                         M_INTMOVE(s1, REG_ITMP1);
2828                                         s1 = REG_ITMP1;
2829                                 }
2830                                 x86_64_movl_imm_reg(cd, iptr[1].val.i, d);
2831                         }
2832                         x86_64_movl_imm_reg(cd, iptr->val.i, REG_ITMP2);
2833                         x86_64_testl_reg_reg(cd, s1, s1);
2834                         x86_64_cmovccl_reg_reg(cd, X86_64_CC_LE, REG_ITMP2, d);
2835                         store_reg_to_var_int(iptr->dst, d);
2836                         break;
2837
2838
2839                 case ICMD_IRETURN:      /* ..., retvalue ==> ...                      */
2840                 case ICMD_LRETURN:
2841                 case ICMD_ARETURN:
2842
2843                         var_to_reg_int(s1, src, REG_RESULT);
2844                         M_INTMOVE(s1, REG_RESULT);
2845
2846                         goto nowperformreturn;
2847
2848                 case ICMD_FRETURN:      /* ..., retvalue ==> ...                      */
2849                 case ICMD_DRETURN:
2850
2851                         var_to_reg_flt(s1, src, REG_FRESULT);
2852                         M_FLTMOVE(s1, REG_FRESULT);
2853
2854                         goto nowperformreturn;
2855
2856                 case ICMD_RETURN:      /* ...  ==> ...                                */
2857
2858 nowperformreturn:
2859                         {
2860                         s4 i, p;
2861                         
2862                         p = parentargs_base;
2863                         
2864                         /* call trace function */
2865                         if (runverbose) {
2866                                 x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
2867
2868                                 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
2869                                 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
2870
2871                                 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
2872                                 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
2873                                 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
2874                                 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
2875
2876                                 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
2877                                 x86_64_call_reg(cd, REG_ITMP1);
2878
2879                                 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
2880                                 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
2881
2882                                 x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
2883                         }
2884
2885 #if defined(USE_THREADS)
2886                         if (checksync && (m->flags & ACC_SYNCHRONIZED)) {
2887                                 x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, rd->argintregs[0]);
2888         
2889                                 /* we need to save the proper return value */
2890                                 switch (iptr->opc) {
2891                                 case ICMD_IRETURN:
2892                                 case ICMD_ARETURN:
2893                                 case ICMD_LRETURN:
2894                                         x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, rd->maxmemuse * 8);
2895                                         break;
2896                                 case ICMD_FRETURN:
2897                                 case ICMD_DRETURN:
2898                                         x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, rd->maxmemuse * 8);
2899                                         break;
2900                                 }
2901
2902                                 x86_64_mov_imm_reg(cd, (u8) builtin_monitorexit, REG_ITMP1);
2903                                 x86_64_call_reg(cd, REG_ITMP1);
2904
2905                                 /* and now restore the proper return value */
2906                                 switch (iptr->opc) {
2907                                 case ICMD_IRETURN:
2908                                 case ICMD_ARETURN:
2909                                 case ICMD_LRETURN:
2910                                         x86_64_mov_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_RESULT);
2911                                         break;
2912                                 case ICMD_FRETURN:
2913                                 case ICMD_DRETURN:
2914                                         x86_64_movq_membase_reg(cd, REG_SP, rd->maxmemuse * 8, REG_FRESULT);
2915                                         break;
2916                                 }
2917                         }
2918 #endif
2919
2920                         /* restore saved registers                                        */
2921                         for (i = rd->savintregcnt - 1; i >= rd->maxsavintreguse; i--) {
2922                                 p--; x86_64_mov_membase_reg(cd, REG_SP, p * 8, rd->savintregs[i]);
2923                         }
2924                         for (i = rd->savfltregcnt - 1; i >= rd->maxsavfltreguse; i--) {
2925                                 p--; x86_64_movq_membase_reg(cd, REG_SP, p * 8, rd->savfltregs[i]);
2926                         }
2927
2928                         /* deallocate stack                                               */
2929                         if (parentargs_base) {
2930                                 x86_64_alu_imm_reg(cd, X86_64_ADD, parentargs_base * 8, REG_SP);
2931                         }
2932
2933                         x86_64_ret(cd);
2934                         }
2935                         break;
2936
2937
2938                 case ICMD_TABLESWITCH:  /* ..., index ==> ...                         */
2939                         {
2940                                 s4 i, l, *s4ptr;
2941                                 void **tptr;
2942
2943                                 tptr = (void **) iptr->target;
2944
2945                                 s4ptr = iptr->val.a;
2946                                 l = s4ptr[1];                          /* low     */
2947                                 i = s4ptr[2];                          /* high    */
2948
2949                                 var_to_reg_int(s1, src, REG_ITMP1);
2950                                 M_INTMOVE(s1, REG_ITMP1);
2951                                 if (l != 0) {
2952                                         x86_64_alul_imm_reg(cd, X86_64_SUB, l, REG_ITMP1);
2953                                 }
2954                                 i = i - l + 1;
2955
2956                 /* range check */
2957                                 x86_64_alul_imm_reg(cd, X86_64_CMP, i - 1, REG_ITMP1);
2958                                 x86_64_jcc(cd, X86_64_CC_A, 0);
2959
2960                 /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[0]), cd->mcodeptr); */
2961                                 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
2962
2963                                 /* build jump table top down and use address of lowest entry */
2964
2965                 /* s4ptr += 3 + i; */
2966                                 tptr += i;
2967
2968                                 while (--i >= 0) {
2969                                         /* dseg_addtarget(cd, BlockPtrOfPC(*--s4ptr)); */
2970                                         dseg_addtarget(cd, (basicblock *) tptr[0]); 
2971                                         --tptr;
2972                                 }
2973
2974                                 /* length of dataseg after last dseg_addtarget is used by load */
2975
2976                                 x86_64_mov_imm_reg(cd, 0, REG_ITMP2);
2977                                 dseg_adddata(cd, cd->mcodeptr);
2978                                 x86_64_mov_memindex_reg(cd, -(cd->dseglen), REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
2979                                 x86_64_jmp_reg(cd, REG_ITMP1);
2980                         }
2981                         break;
2982
2983
2984                 case ICMD_LOOKUPSWITCH: /* ..., key ==> ...                           */
2985                         {
2986                                 s4 i, l, val, *s4ptr;
2987                                 void **tptr;
2988
2989                                 tptr = (void **) iptr->target;
2990
2991                                 s4ptr = iptr->val.a;
2992                                 l = s4ptr[0];                          /* default  */
2993                                 i = s4ptr[1];                          /* count    */
2994                         
2995                                 MCODECHECK((i<<2)+8);
2996                                 var_to_reg_int(s1, src, REG_ITMP1);    /* reg compare should always be faster */
2997                                 while (--i >= 0) {
2998                                         s4ptr += 2;
2999                                         ++tptr;
3000
3001                                         val = s4ptr[0];
3002                                         x86_64_alul_imm_reg(cd, X86_64_CMP, val, s1);
3003                                         x86_64_jcc(cd, X86_64_CC_E, 0);
3004                                         /* codegen_addreference(cd, BlockPtrOfPC(s4ptr[1]), cd->mcodeptr); */
3005                                         codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr); 
3006                                 }
3007
3008                                 x86_64_jmp_imm(cd, 0);
3009                                 /* codegen_addreference(cd, BlockPtrOfPC(l), cd->mcodeptr); */
3010                         
3011                                 tptr = (void **) iptr->target;
3012                                 codegen_addreference(cd, (basicblock *) tptr[0], cd->mcodeptr);
3013                         }
3014                         break;
3015
3016
3017                 case ICMD_BUILTIN3:     /* ..., arg1, arg2, arg3 ==> ...              */
3018                                         /* op1 = return type, val.a = function pointer*/
3019                         s3 = 3;
3020                         goto gen_method;
3021
3022                 case ICMD_BUILTIN2:     /* ..., arg1, arg2 ==> ...                    */
3023                                         /* op1 = return type, val.a = function pointer*/
3024                         s3 = 2;
3025                         goto gen_method;
3026
3027                 case ICMD_BUILTIN1:     /* ..., arg1 ==> ...                          */
3028                                         /* op1 = return type, val.a = function pointer*/
3029                         s3 = 1;
3030                         goto gen_method;
3031
3032                 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ...            */
3033                                         /* op1 = arg count, val.a = method pointer    */
3034
3035                 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3036                 case ICMD_INVOKEVIRTUAL:/* op1 = arg count, val.a = method pointer    */
3037                 case ICMD_INVOKEINTERFACE:
3038
3039                         s3 = iptr->op1;
3040
3041 gen_method: {
3042                         methodinfo *lm;
3043                         classinfo  *ci;
3044                         stackptr    tmpsrc;
3045                         s4          iarg;
3046                         s4          farg;
3047
3048                         MCODECHECK((s3 << 1) + 64);
3049
3050                         /* copy arguments to registers or stack location ******************/
3051
3052                         /* count integer and float arguments */
3053
3054                         iarg = 0;
3055                         farg = 0;
3056
3057                         for (s2 = s3, tmpsrc = src; --s2 >= 0; tmpsrc = tmpsrc->prev) {
3058                                 IS_INT_LNG_TYPE(tmpsrc->type) ? iarg++ : farg++;
3059                         }
3060
3061                         /* calculate amount of arguments to be on stack */
3062
3063                         s2 = (iarg > INT_ARG_CNT) ? iarg - INT_ARG_CNT : 0 +
3064                                 (farg > FLT_ARG_CNT) ? farg - FLT_ARG_CNT : 0;
3065
3066                         for (; --s3 >= 0; src = src->prev) {
3067                                 /* decrement the current argument type */
3068                                 IS_INT_LNG_TYPE(src->type) ? iarg-- : farg--;
3069
3070                                 if (src->varkind == ARGVAR) {
3071                                         if (IS_INT_LNG_TYPE(src->type)) {
3072                                                 if (iarg >= INT_ARG_CNT) {
3073                                                         s2--;
3074                                                 }
3075                                         } else {
3076                                                 if (farg >= FLT_ARG_CNT) {
3077                                                         s2--;
3078                                                 }
3079                                         }
3080                                         continue;
3081                                 }
3082
3083                                 if (IS_INT_LNG_TYPE(src->type)) {
3084                                         if (iarg < INT_ARG_CNT) {
3085                                                 s1 = rd->argintregs[iarg];
3086                                                 var_to_reg_int(d, src, s1);
3087                                                 M_INTMOVE(d, s1);
3088
3089                                         } else {
3090                                                 var_to_reg_int(d, src, REG_ITMP1);
3091                                                 s2--;
3092                                                 x86_64_mov_reg_membase(cd, d, REG_SP, s2 * 8);
3093                                         }
3094
3095                                 } else {
3096                                         if (farg < FLT_ARG_CNT) {
3097                                                 s1 = rd->argfltregs[farg];
3098                                                 var_to_reg_flt(d, src, s1);
3099                                                 M_FLTMOVE(d, s1);
3100
3101                                         } else {
3102                                                 var_to_reg_flt(d, src, REG_FTMP1);
3103                                                 s2--;
3104                                                 x86_64_movq_reg_membase(cd, d, REG_SP, s2 * 8);
3105                                         }
3106                                 }
3107                         } /* end of for */
3108
3109                         lm = iptr->val.a;
3110                         switch (iptr->opc) {
3111                         case ICMD_BUILTIN3:
3112                         case ICMD_BUILTIN2:
3113                         case ICMD_BUILTIN1:
3114                                 a = (ptrint) lm;
3115                                 d = iptr->op1;
3116
3117                                 if (a == (ptrint) BUILTIN_new)
3118                                         a = (ptrint) asm_builtin_new;
3119
3120
3121                                 x86_64_mov_imm_reg(cd, a, REG_ITMP1);
3122                                 x86_64_call_reg(cd, REG_ITMP1);
3123                                 break;
3124
3125                         case ICMD_INVOKESTATIC:
3126                                 a = (ptrint) lm->stubroutine;
3127                                 d = lm->returntype;
3128
3129                                 x86_64_mov_imm_reg(cd, a, REG_ITMP2);
3130                                 x86_64_call_reg(cd, REG_ITMP2);
3131                                 break;
3132
3133                         case ICMD_INVOKESPECIAL:
3134                                 /* methodinfo* is not resolved, call the assembler function */
3135
3136                                 if (!lm) {
3137                                         unresolved_method *um = iptr->target;
3138
3139                                         codegen_addpatchref(cd, cd->mcodeptr, asm_invokespecial, um);
3140
3141 /*                                      if (showdisassemble) { */
3142 /*                                              x86_64_nop(cd); x86_64_nop(cd); x86_64_nop(cd); */
3143 /*                                              x86_64_nop(cd); x86_64_nop(cd); */
3144 /*                                      } */
3145
3146                                         a = 0;
3147                                         d = um->methodref->parseddesc.md->returntype.type;
3148
3149                                 } else {
3150                                         a = (ptrint) lm->stubroutine;
3151                                         d = lm->parseddesc->returntype.type;
3152                                 }
3153
3154                                 gen_nullptr_check(rd->argintregs[0]);    /* first argument contains pointer */
3155                                 x86_64_mov_membase_reg(cd, rd->argintregs[0], 0, REG_ITMP2); /* access memory for hardware nullptr */
3156                                 x86_64_mov_imm_reg(cd, a, REG_ITMP2);
3157                                 x86_64_call_reg(cd, REG_ITMP2);
3158                                 break;
3159
3160                         case ICMD_INVOKEVIRTUAL:
3161                                 d = lm->returntype;
3162
3163                                 gen_nullptr_check(rd->argintregs[0]);
3164                                 x86_64_mov_membase_reg(cd, rd->argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3165                                 x86_64_mov_membase32_reg(cd, REG_ITMP2, OFFSET(vftbl_t, table[0]) + sizeof(methodptr) * lm->vftblindex, REG_ITMP1);
3166                                 x86_64_call_reg(cd, REG_ITMP1);
3167                                 break;
3168
3169                         case ICMD_INVOKEINTERFACE:
3170                                 ci = lm->class;
3171                                 d = lm->returntype;
3172
3173                                 gen_nullptr_check(rd->argintregs[0]);
3174                                 x86_64_mov_membase_reg(cd, rd->argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3175                                 x86_64_mov_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, interfacetable[0]) - sizeof(methodptr) * ci->index, REG_ITMP2);
3176                                 x86_64_mov_membase32_reg(cd, REG_ITMP2, sizeof(methodptr) * (lm - ci->methods), REG_ITMP1);
3177                                 x86_64_call_reg(cd, REG_ITMP1);
3178                                 break;
3179                         }
3180
3181                         /* d contains return type */
3182
3183                         if (d != TYPE_VOID) {
3184                                 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3185                                         s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3186                                         M_INTMOVE(REG_RESULT, s1);
3187                                         store_reg_to_var_int(iptr->dst, s1);
3188
3189                                 } else {
3190                                         s1 = reg_of_var(rd, iptr->dst, REG_FRESULT);
3191                                         M_FLTMOVE(REG_FRESULT, s1);
3192                                         store_reg_to_var_flt(iptr->dst, s1);
3193                                 }
3194                         }
3195                         }
3196                         break;
3197
3198
3199                 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult            */
3200
3201                                       /* op1:   0 == array, 1 == class                */
3202                                       /* val.a: (classinfo*) superclass               */
3203
3204 /*          superclass is an interface:
3205  *
3206  *          return (sub != NULL) &&
3207  *                 (sub->vftbl->interfacetablelength > super->index) &&
3208  *                 (sub->vftbl->interfacetable[-super->index] != NULL);
3209  *
3210  *          superclass is a class:
3211  *
3212  *          return ((sub != NULL) && (0
3213  *                  <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3214  *                  super->vftbl->diffvall));
3215  */
3216
3217                         {
3218                         classinfo *super = (classinfo *) iptr->val.a;
3219                         
3220 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3221             codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3222 #endif
3223
3224                         var_to_reg_int(s1, src, REG_ITMP1);
3225                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3226                         if (s1 == d) {
3227                                 M_INTMOVE(s1, REG_ITMP1);
3228                                 s1 = REG_ITMP1;
3229                         }
3230                         x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3231                         if (iptr->op1) {                               /* class/interface */
3232                                 if (super->flags & ACC_INTERFACE) {        /* interface       */
3233                                         x86_64_test_reg_reg(cd, s1, s1);
3234
3235                                         /* TODO: clean up this calculation */
3236                                         a = 3;    /* mov_membase_reg */
3237                                         CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3238
3239                                         a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3240                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength));
3241                                         
3242                                         a += 3;    /* sub */
3243                                         CALCIMMEDIATEBYTES(a, super->index);
3244                                         
3245                                         a += 3;    /* test */
3246
3247                                         a += 6;    /* jcc */
3248                                         a += 3;    /* mov_membase_reg */
3249                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3250
3251                                         a += 3;    /* test */
3252                                         a += 4;    /* setcc */
3253
3254                                         x86_64_jcc(cd, X86_64_CC_E, a);
3255
3256                                         x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3257                                         x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetablelength), REG_ITMP2);
3258                                         x86_64_alu_imm_reg(cd, X86_64_SUB, super->index, REG_ITMP2);
3259                                         x86_64_test_reg_reg(cd, REG_ITMP2, REG_ITMP2);
3260
3261                                         /* TODO: clean up this calculation */
3262                                         a = 0;
3263                                         a += 3;    /* mov_membase_reg */
3264                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3265
3266                                         a += 3;    /* test */
3267                                         a += 4;    /* setcc */
3268
3269                                         x86_64_jcc(cd, X86_64_CC_LE, a);
3270                                         x86_64_mov_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP1);
3271                                         x86_64_test_reg_reg(cd, REG_ITMP1, REG_ITMP1);
3272                                         x86_64_setcc_reg(cd, X86_64_CC_NE, d);
3273
3274                                 } else {                                   /* class           */
3275                                         x86_64_test_reg_reg(cd, s1, s1);
3276
3277                                         /* TODO: clean up this calculation */
3278                                         a = 3;    /* mov_membase_reg */
3279                                         CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3280
3281                                         a += 10;   /* mov_imm_reg */
3282
3283                                         a += 2;    /* movl_membase_reg - only if REG_ITMP1 == RAX */
3284                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl_t, baseval));
3285                                         
3286                                         a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3287                                         CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3288                                         
3289                                         a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3290                                         CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, diffval));
3291                                         
3292                                         a += 3;    /* sub */
3293                                         a += 3;    /* xor */
3294                                         a += 3;    /* cmp */
3295                                         a += 4;    /* setcc */
3296
3297                                         x86_64_jcc(cd, X86_64_CC_E, a);
3298
3299                                         x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3300                                         x86_64_mov_imm_reg(cd, (ptrint) super->vftbl, REG_ITMP2);
3301 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3302                                         codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3303 #endif
3304                                         x86_64_movl_membase_reg(cd, REG_ITMP1, OFFSET(vftbl_t, baseval), REG_ITMP1);
3305                                         x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP3);
3306                                         x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, diffval), REG_ITMP2);
3307 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3308                     codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3309 #endif
3310                                         x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP1);
3311                                         x86_64_alu_reg_reg(cd, X86_64_XOR, d, d);
3312                                         x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP2, REG_ITMP1);
3313                                         x86_64_setcc_reg(cd, X86_64_CC_BE, d);
3314                                 }
3315                         }
3316                         else
3317                                 panic("internal error: no inlined array instanceof");
3318                         }
3319                         store_reg_to_var_int(iptr->dst, d);
3320                         break;
3321
3322                 case ICMD_CHECKCAST:  /* ..., objectref ==> ..., objectref            */
3323
3324                                       /* op1:   0 == array, 1 == class                */
3325                                       /* val.a: (classinfo*) superclass               */
3326
3327                         /*  superclass is an interface:
3328                          *      
3329                          *  OK if ((sub == NULL) ||
3330                          *         (sub->vftbl->interfacetablelength > super->index) &&
3331                          *         (sub->vftbl->interfacetable[-super->index] != NULL));
3332                          *      
3333                          *  superclass is a class:
3334                          *      
3335                          *  OK if ((sub == NULL) || (0
3336                          *         <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3337                          *         super->vftbl->diffval));
3338                          */
3339
3340                         {
3341                         classinfo *super = (classinfo *) iptr->val.a;
3342                         
3343 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3344             codegen_threadcritrestart(cd, cd->mcodeptr - cd->mcodebase);
3345 #endif
3346                         var_to_reg_int(s1, src, REG_ITMP1);
3347                         if (iptr->op1) {                               /* class/interface */
3348                                 if (super->flags & ACC_INTERFACE) {        /* interface       */
3349                                         x86_64_test_reg_reg(cd, s1, s1);
3350
3351                                         /* TODO: clean up this calculation */
3352                                         a = 3;    /* mov_membase_reg */
3353                                         CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3354
3355                                         a += 3;    /* movl_membase_reg - if REG_ITMP3 == R10 */
3356                                         CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, interfacetablelength));
3357
3358                                         a += 3;    /* sub */
3359                                         CALCIMMEDIATEBYTES(a, super->index);
3360
3361                                         a += 3;    /* test */
3362                                         a += 6;    /* jcc */
3363
3364                                         a += 3;    /* mov_membase_reg */
3365                                         CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*));
3366
3367                                         a += 3;    /* test */
3368                                         a += 6;    /* jcc */
3369
3370                                         x86_64_jcc(cd, X86_64_CC_E, a);
3371
3372                                         x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP2);
3373                                         x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, interfacetablelength), REG_ITMP3);
3374                                         x86_64_alu_imm_reg(cd, X86_64_SUB, super->index, REG_ITMP3);
3375                                         x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3376                                         x86_64_jcc(cd, X86_64_CC_LE, 0);
3377                                         codegen_addxcastrefs(cd, cd->mcodeptr);
3378                                         x86_64_mov_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP3);
3379                                         x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
3380                                         x86_64_jcc(cd, X86_64_CC_E, 0);
3381                                         codegen_addxcastrefs(cd, cd->mcodeptr);
3382
3383                                 } else {                                   /* class           */
3384                                         x86_64_test_reg_reg(cd, s1, s1);
3385
3386                                         /* TODO: clean up this calculation */
3387                                         a = 3;     /* mov_membase_reg */
3388                                         CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3389                                         a += 10;   /* mov_imm_reg */
3390                                         a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3391                                         CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl_t, baseval));
3392
3393                                         if (s1 != REG_ITMP1) {
3394                                                 a += 3;    /* movl_membase_reg - only if REG_ITMP3 == R11 */
3395                                                 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, baseval));
3396                                                 a += 3;    /* movl_membase_reg - only if REG_ITMP3 == R11 */
3397                                                 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, diffval));
3398                                                 a += 3;    /* sub */
3399
3400                                         } else {
3401                                                 a += 3;    /* movl_membase_reg - only if REG_ITMP3 == R11 */
3402                                                 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, baseval));
3403                                                 a += 3;    /* sub */
3404                                                 a += 10;   /* mov_imm_reg */
3405                                                 a += 3;    /* movl_membase_reg - only if REG_ITMP3 == R11 */
3406                                                 CALCOFFSETBYTES(a, REG_ITMP3, OFFSET(vftbl_t, diffval));
3407                                         }
3408
3409                                         a += 3;    /* cmp */
3410                                         a += 6;    /* jcc */
3411
3412                                         x86_64_jcc(cd, X86_64_CC_E, a);
3413
3414                                         x86_64_mov_membase_reg(cd, s1, OFFSET(java_objectheader, vftbl), REG_ITMP2);
3415                                         x86_64_mov_imm_reg(cd, (ptrint) super->vftbl, REG_ITMP3);
3416 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3417                                         codegen_threadcritstart(cd, cd->mcodeptr - cd->mcodebase);
3418 #endif
3419                                         x86_64_movl_membase_reg(cd, REG_ITMP2, OFFSET(vftbl_t, baseval), REG_ITMP2);
3420                                         if (s1 != REG_ITMP1) {
3421                                                 x86_64_movl_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP1);
3422                                                 x86_64_movl_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3);
3423 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3424                                                 codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3425 #endif
3426                                                 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP1, REG_ITMP2);
3427
3428                                         } else {
3429                                                 x86_64_movl_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, baseval), REG_ITMP3);
3430                                                 x86_64_alu_reg_reg(cd, X86_64_SUB, REG_ITMP3, REG_ITMP2);
3431                                                 x86_64_mov_imm_reg(cd, (ptrint) super->vftbl, REG_ITMP3);
3432                                                 x86_64_movl_membase_reg(cd, REG_ITMP3, OFFSET(vftbl_t, diffval), REG_ITMP3);
3433                                         }
3434 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3435                                         codegen_threadcritstop(cd, cd->mcodeptr - cd->mcodebase);
3436 #endif
3437                                         x86_64_alu_reg_reg(cd, X86_64_CMP, REG_ITMP3, REG_ITMP2);
3438                                         x86_64_jcc(cd, X86_64_CC_A, 0);    /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3439                                         codegen_addxcastrefs(cd, cd->mcodeptr);
3440                                 }
3441
3442                         } else
3443                                 panic("internal error: no inlined array checkcast");
3444                         }
3445                         d = reg_of_var(rd, iptr->dst, REG_ITMP3);
3446                         M_INTMOVE(s1, d);
3447                         store_reg_to_var_int(iptr->dst, d);
3448 /*                      if (iptr->dst->flags & INMEMORY) { */
3449 /*                              x86_64_mov_reg_membase(cd, s1, REG_SP, iptr->dst->regoff * 8); */
3450 /*                      } else { */
3451 /*                              M_INTMOVE(s1, iptr->dst->regoff); */
3452 /*                      } */
3453                         break;
3454
3455                 case ICMD_CHECKASIZE:  /* ..., size ==> ..., size                     */
3456
3457                         if (src->flags & INMEMORY) {
3458                                 x86_64_alul_imm_membase(cd, X86_64_CMP, 0, REG_SP, src->regoff * 8);
3459                                 
3460                         } else {
3461                                 x86_64_testl_reg_reg(cd, src->regoff, src->regoff);
3462                         }
3463                         x86_64_jcc(cd, X86_64_CC_L, 0);
3464                         codegen_addxcheckarefs(cd, cd->mcodeptr);
3465                         break;
3466
3467                 case ICMD_CHECKEXCEPTION:    /* ... ==> ...                           */
3468
3469                         x86_64_test_reg_reg(cd, REG_RESULT, REG_RESULT);
3470                         x86_64_jcc(cd, X86_64_CC_E, 0);
3471                         codegen_addxexceptionrefs(cd, cd->mcodeptr);
3472                         break;
3473
3474                 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref  */
3475                                          /* op1 = dimension, val.a = array descriptor */
3476
3477                         /* check for negative sizes and copy sizes to stack if necessary  */
3478
3479                         MCODECHECK((iptr->op1 << 1) + 64);
3480
3481                         for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3482                                 var_to_reg_int(s2, src, REG_ITMP1);
3483                                 x86_64_testl_reg_reg(cd, s2, s2);
3484                                 x86_64_jcc(cd, X86_64_CC_L, 0);
3485                                 codegen_addxcheckarefs(cd, cd->mcodeptr);
3486
3487                                 /* copy SAVEDVAR sizes to stack */
3488
3489                                 if (src->varkind != ARGVAR) {
3490                                         x86_64_mov_reg_membase(cd, s2, REG_SP, s1 * 8);
3491                                 }
3492                         }
3493
3494                         /* a0 = dimension count */
3495                         x86_64_mov_imm_reg(cd, iptr->op1, rd->argintregs[0]);
3496
3497                         /* a1 = arraydescriptor */
3498                         x86_64_mov_imm_reg(cd, (u8) iptr->val.a, rd->argintregs[1]);
3499
3500                         /* a2 = pointer to dimensions = stack pointer */
3501                         x86_64_mov_reg_reg(cd, REG_SP, rd->argintregs[2]);
3502
3503                         x86_64_mov_imm_reg(cd, (u8) builtin_nmultianewarray, REG_ITMP1);
3504                         x86_64_call_reg(cd, REG_ITMP1);
3505
3506                         s1 = reg_of_var(rd, iptr->dst, REG_RESULT);
3507                         M_INTMOVE(REG_RESULT, s1);
3508                         store_reg_to_var_int(iptr->dst, s1);
3509                         break;
3510
3511                 default:
3512                         throw_cacao_exception_exit(string_java_lang_InternalError,
3513                                                                            "Unknown ICMD %d", iptr->opc);
3514         } /* switch */
3515                 
3516         } /* for instruction */
3517                 
3518         /* copy values to interface registers */
3519
3520         src = bptr->outstack;
3521         len = bptr->outdepth;
3522         MCODECHECK(64 + len);
3523 #ifdef LSRA
3524         if (!opt_lsra)
3525 #endif
3526         while (src) {
3527                 len--;
3528                 if ((src->varkind != STACKVAR)) {
3529                         s2 = src->type;
3530                         if (IS_FLT_DBL_TYPE(s2)) {
3531                                 var_to_reg_flt(s1, src, REG_FTMP1);
3532                                 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3533                                         M_FLTMOVE(s1, rd->interfaces[len][s2].regoff);
3534
3535                                 } else {
3536                                         x86_64_movq_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3537                                 }
3538
3539                         } else {
3540                                 var_to_reg_int(s1, src, REG_ITMP1);
3541                                 if (!(rd->interfaces[len][s2].flags & INMEMORY)) {
3542                                         M_INTMOVE(s1, rd->interfaces[len][s2].regoff);
3543
3544                                 } else {
3545                                         x86_64_mov_reg_membase(cd, s1, REG_SP, rd->interfaces[len][s2].regoff * 8);
3546                                 }
3547                         }
3548                 }
3549                 src = src->prev;
3550         }
3551         } /* if (bptr -> flags >= BBREACHED) */
3552         } /* for basic block */
3553
3554         {
3555
3556         /* generate bound check stubs */
3557
3558         u1 *xcodeptr = NULL;
3559         branchref *bref;
3560
3561         for (bref = cd->xboundrefs; bref != NULL; bref = bref->next) {
3562                 gen_resolvebranch(cd->mcodebase + bref->branchpos, 
3563                                   bref->branchpos,
3564                                                   cd->mcodeptr - cd->mcodebase);
3565
3566                 MCODECHECK(100);
3567
3568                 /* move index register into REG_ITMP1 */
3569                 x86_64_mov_reg_reg(cd, bref->reg, REG_ITMP1);             /* 3 bytes  */
3570
3571                 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC);                 /* 10 bytes */
3572                 dseg_adddata(cd, cd->mcodeptr);
3573                 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3);   /* 10 bytes */
3574                 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes  */
3575
3576                 if (xcodeptr != NULL) {
3577                         x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3578
3579                 } else {
3580                         xcodeptr = cd->mcodeptr;
3581
3582                         x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3583                         x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3584
3585                         x86_64_mov_reg_reg(cd, REG_ITMP1, rd->argintregs[0]);
3586                         x86_64_mov_imm_reg(cd, (u8) new_arrayindexoutofboundsexception, REG_ITMP3);
3587                         x86_64_call_reg(cd, REG_ITMP3);
3588
3589                         x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3590                         x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3591
3592                         x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3593                         x86_64_jmp_reg(cd, REG_ITMP3);
3594                 }
3595         }
3596
3597         /* generate negative array size check stubs */
3598
3599         xcodeptr = NULL;
3600         
3601         for (bref = cd->xcheckarefs; bref != NULL; bref = bref->next) {
3602                 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3603                         gen_resolvebranch(cd->mcodebase + bref->branchpos, 
3604                                                           bref->branchpos,
3605                                                           xcodeptr - cd->mcodebase - (10 + 10 + 3));
3606                         continue;
3607                 }
3608
3609                 gen_resolvebranch(cd->mcodebase + bref->branchpos, 
3610                                   bref->branchpos,
3611                                                   cd->mcodeptr - cd->mcodebase);
3612
3613                 MCODECHECK(100);
3614
3615                 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC);                 /* 10 bytes */
3616                 dseg_adddata(cd, cd->mcodeptr);
3617                 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3);   /* 10 bytes */
3618                 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes  */
3619
3620                 if (xcodeptr != NULL) {
3621                         x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3622
3623                 } else {
3624                         xcodeptr = cd->mcodeptr;
3625
3626                         x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3627                         x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3628
3629                         x86_64_mov_imm_reg(cd, (u8) new_negativearraysizeexception, REG_ITMP3);
3630                         x86_64_call_reg(cd, REG_ITMP3);
3631
3632                         x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3633                         x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3634
3635                         x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3636                         x86_64_jmp_reg(cd, REG_ITMP3);
3637                 }
3638         }
3639
3640         /* generate cast check stubs */
3641
3642         xcodeptr = NULL;
3643         
3644         for (bref = cd->xcastrefs; bref != NULL; bref = bref->next) {
3645                 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3646                         gen_resolvebranch(cd->mcodebase + bref->branchpos, 
3647                                                           bref->branchpos,
3648                                                           xcodeptr - cd->mcodebase - (10 + 10 + 3));
3649                         continue;
3650                 }
3651
3652                 gen_resolvebranch(cd->mcodebase + bref->branchpos, 
3653                                   bref->branchpos,
3654                                                   cd->mcodeptr - cd->mcodebase);
3655
3656                 MCODECHECK(100);
3657
3658                 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC);                 /* 10 bytes */
3659                 dseg_adddata(cd, cd->mcodeptr);
3660                 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3);   /* 10 bytes */
3661                 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes  */
3662
3663                 if (xcodeptr != NULL) {
3664                         x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3665                 
3666                 } else {
3667                         xcodeptr = cd->mcodeptr;
3668
3669                         x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3670                         x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3671
3672                         x86_64_mov_imm_reg(cd, (u8) new_classcastexception, REG_ITMP3);
3673                         x86_64_call_reg(cd, REG_ITMP3);
3674
3675                         x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3676                         x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3677
3678                         x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3679                         x86_64_jmp_reg(cd, REG_ITMP3);
3680                 }
3681         }
3682
3683         /* generate divide by zero check stubs */
3684
3685         xcodeptr = NULL;
3686         
3687         for (bref = cd->xdivrefs; bref != NULL; bref = bref->next) {
3688                 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3689                         gen_resolvebranch(cd->mcodebase + bref->branchpos, 
3690                                                           bref->branchpos,
3691                                                           xcodeptr - cd->mcodebase - (10 + 10 + 3));
3692                         continue;
3693                 }
3694
3695                 gen_resolvebranch(cd->mcodebase + bref->branchpos, 
3696                                   bref->branchpos,
3697                                                   cd->mcodeptr - cd->mcodebase);
3698
3699                 MCODECHECK(100);
3700
3701                 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC);                 /* 10 bytes */
3702                 dseg_adddata(cd, cd->mcodeptr);
3703                 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP3);   /* 10 bytes */
3704                 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC); /* 3 bytes  */
3705
3706                 if (xcodeptr != NULL) {
3707                         x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3708                 
3709                 } else {
3710                         xcodeptr = cd->mcodeptr;
3711
3712                         x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3713                         x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3714
3715                         x86_64_mov_imm_reg(cd, (u8) new_arithmeticexception, REG_ITMP3);
3716                         x86_64_call_reg(cd, REG_ITMP3);
3717
3718                         x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3719                         x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3720
3721                         x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3722                         x86_64_jmp_reg(cd, REG_ITMP3);
3723                 }
3724         }
3725
3726         /* generate exception check stubs */
3727
3728         xcodeptr = NULL;
3729         
3730         for (bref = cd->xexceptionrefs; bref != NULL; bref = bref->next) {
3731                 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3732                         gen_resolvebranch(cd->mcodebase + bref->branchpos, 
3733                                                           bref->branchpos,
3734                                                           xcodeptr - cd->mcodebase - (10 + 10 + 3));
3735                         continue;
3736                 }
3737
3738                 gen_resolvebranch(cd->mcodebase + bref->branchpos, 
3739                                   bref->branchpos,
3740                                                   cd->mcodeptr - cd->mcodebase);
3741
3742                 MCODECHECK(100);
3743
3744                 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC);                 /* 10 bytes */
3745                 dseg_adddata(cd, cd->mcodeptr);
3746                 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1);   /* 10 bytes */
3747                 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes  */
3748
3749                 if (xcodeptr != NULL) {
3750                         x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3751                 
3752                 } else {
3753                         xcodeptr = cd->mcodeptr;
3754
3755 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3756                         x86_64_alu_imm_reg(cd, X86_64_SUB, 8, REG_SP);
3757                         x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0);
3758                         x86_64_mov_imm_reg(cd, (u8) &builtin_get_exceptionptrptr, REG_ITMP1);
3759                         x86_64_call_reg(cd, REG_ITMP1);
3760                         x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
3761                         x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
3762                         x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
3763                         x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC);
3764                         x86_64_alu_imm_reg(cd, X86_64_ADD, 8, REG_SP);
3765 #else
3766                         x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
3767                         x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP1_XPTR);
3768                         x86_64_mov_imm_membase(cd, 0, REG_ITMP3, 0);
3769 #endif
3770
3771                         x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3772                         x86_64_jmp_reg(cd, REG_ITMP3);
3773                 }
3774         }
3775
3776         /* generate null pointer check stubs */
3777
3778         xcodeptr = NULL;
3779         
3780         for (bref = cd->xnullrefs; bref != NULL; bref = bref->next) {
3781                 if ((cd->exceptiontablelength == 0) && (xcodeptr != NULL)) {
3782                         gen_resolvebranch(cd->mcodebase + bref->branchpos, 
3783                                                           bref->branchpos,
3784                                                           xcodeptr - cd->mcodebase - (10 + 10 + 3));
3785                         continue;
3786                 }
3787
3788                 gen_resolvebranch(cd->mcodebase + bref->branchpos, 
3789                                   bref->branchpos,
3790                                                   cd->mcodeptr - cd->mcodebase);
3791
3792                 MCODECHECK(100);
3793
3794                 x86_64_mov_imm_reg(cd, 0, REG_ITMP2_XPC);                 /* 10 bytes */
3795                 dseg_adddata(cd, cd->mcodeptr);
3796                 x86_64_mov_imm_reg(cd, bref->branchpos - 6, REG_ITMP1);   /* 10 bytes */
3797                 x86_64_alu_reg_reg(cd, X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC); /* 3 bytes  */
3798
3799                 if (xcodeptr != NULL) {
3800                         x86_64_jmp_imm(cd, xcodeptr - cd->mcodeptr - 5);
3801                 
3802                 } else {
3803                         xcodeptr = cd->mcodeptr;
3804
3805                         x86_64_alu_imm_reg(cd, X86_64_SUB, 2 * 8, REG_SP);
3806                         x86_64_mov_reg_membase(cd, REG_ITMP2_XPC, REG_SP, 0 * 8);
3807
3808                         x86_64_mov_imm_reg(cd, (u8) new_nullpointerexception, REG_ITMP3);
3809                         x86_64_call_reg(cd, REG_ITMP3);
3810
3811                         x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_ITMP2_XPC);
3812                         x86_64_alu_imm_reg(cd, X86_64_ADD, 2 * 8, REG_SP);
3813
3814                         x86_64_mov_imm_reg(cd, (u8) asm_handle_exception, REG_ITMP3);
3815                         x86_64_jmp_reg(cd, REG_ITMP3);
3816                 }
3817         }
3818
3819         /* generate put/getstatic stub call code */
3820
3821         {
3822                 patchref    *pref;
3823                 codegendata *tmpcd;
3824                 u8           mcode;
3825
3826                 tmpcd = DNEW(codegendata);
3827
3828                 for (pref = cd->patchrefs; pref != NULL; pref = pref->next) {
3829                         /* Get machine code which is patched back in later. A             */
3830                         /* `call rel32' is 5 bytes long (but read 8 bytes).               */
3831                         xcodeptr = cd->mcodebase + pref->branchpos;
3832                         mcode = *((ptrint *) xcodeptr);
3833
3834                         MCODECHECK(50);
3835
3836                         /* patch in `call rel32' to call the following code               */
3837                         tmpcd->mcodeptr = xcodeptr;     /* set dummy mcode pointer        */
3838                         x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
3839
3840                         /* move classinfo pointer and machine code bytes into registers */
3841                         x86_64_mov_imm_reg(cd, (ptrint) pref->ref, REG_ITMP1);
3842                         x86_64_mov_imm_reg(cd, (ptrint) mcode, REG_ITMP2);
3843
3844                         x86_64_mov_imm_reg(cd, (ptrint) pref->asmwrapper, REG_ITMP3);
3845                         x86_64_jmp_reg(cd, REG_ITMP3);
3846                 }
3847         }
3848         }
3849
3850         codegen_finish(m, cd, (s4) ((u1 *) cd->mcodeptr - cd->mcodebase));
3851 }
3852
3853
3854 /* function createcompilerstub *************************************************
3855
3856         creates a stub routine which calls the compiler
3857         
3858 *******************************************************************************/
3859
3860 #define COMPSTUBSIZE    23
3861
3862 u1 *createcompilerstub(methodinfo *m)
3863 {
3864         u1 *s = CNEW(u1, COMPSTUBSIZE);     /* memory to hold the stub            */
3865         codegendata *cd;
3866         s4 dumpsize;
3867
3868         /* mark start of dump memory area */
3869
3870         dumpsize = dump_size();
3871
3872         cd = DNEW(codegendata);
3873         cd->mcodeptr = s;
3874
3875         /* code for the stub */
3876         x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP1); /* pass method to compiler     */
3877         x86_64_mov_imm_reg(cd, (u8) asm_call_jit_compiler, REG_ITMP3);/* load address */
3878         x86_64_jmp_reg(cd, REG_ITMP3);      /* jump to compiler                   */
3879
3880 #if defined(STATISTICS)
3881         if (opt_stat)
3882                 count_cstub_len += COMPSTUBSIZE;
3883 #endif
3884
3885         /* release dump area */
3886
3887         dump_release(dumpsize);
3888
3889         return s;
3890 }
3891
3892
3893 /* function removecompilerstub *************************************************
3894
3895      deletes a compilerstub from memory  (simply by freeing it)
3896
3897 *******************************************************************************/
3898
3899 void removecompilerstub(u1 *stub) 
3900 {
3901         CFREE(stub, COMPSTUBSIZE);
3902 }
3903
3904
3905 /* function: createnativestub **************************************************
3906
3907         creates a stub routine which calls a native method
3908
3909 *******************************************************************************/
3910
3911 /* #if defined(USE_THREADS) && defined(NATIVE_THREADS) */
3912 /* static java_objectheader **(*callgetexceptionptrptr)() = builtin_get_exceptionptrptr; */
3913 /* #endif */
3914
3915 #define NATIVESTUBSIZE    700           /* keep this size high enough!        */
3916
3917 u1 *createnativestub(functionptr f, methodinfo *m)
3918 {
3919         u1                 *s;              /* pointer to stub memory             */
3920         codegendata        *cd;
3921         registerdata       *rd;
3922         t_inlining_globals *id;
3923         s4                  dumpsize;
3924         s4                  stackframesize; /* size of stackframe if needed       */
3925         u1                 *tptr;
3926         s4                  iargs;          /* count of integer arguments         */
3927         s4                  fargs;          /* count of float arguments           */
3928         s4                  i;              /* counter                            */
3929
3930         void **callAddrPatchPos=0;
3931         u1 *jmpInstrPos=0;
3932         void **jmpInstrPatchPos=0;
3933
3934         /* initialize variables */
3935
3936         iargs = 0;
3937         fargs = 0;
3938
3939         /* mark start of dump memory area */
3940
3941         dumpsize = dump_size();
3942
3943         cd = DNEW(codegendata);
3944         rd = DNEW(registerdata);
3945         id = DNEW(t_inlining_globals);
3946
3947         /* setup registers before using it */
3948
3949         inlining_setup(m, id);
3950         reg_setup(m, rd, id);
3951
3952         /* set paramcount and paramtypes      */
3953
3954         method_descriptor2types(m);
3955
3956         /* count integer and float arguments */
3957
3958         tptr = m->paramtypes;
3959         for (i = 0; i < m->paramcount; i++) {
3960                 IS_INT_LNG_TYPE(*tptr++) ? iargs++ : fargs++;
3961         }
3962
3963         s = CNEW(u1, NATIVESTUBSIZE);       /* memory to hold the stub            */
3964
3965         /* set some required varibles which are normally set by codegen_setup */
3966         cd->mcodebase = s;
3967         cd->mcodeptr = s;
3968         cd->patchrefs = NULL;
3969
3970         /* if function is static, check for initialized */
3971
3972         if ((m->flags & ACC_STATIC) && !m->class->initialized) {
3973                 codegen_addpatchref(cd, cd->mcodeptr, asm_check_clinit, m->class);
3974         }
3975
3976         if (runverbose) {
3977                 s4 l, s1;
3978
3979                 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
3980
3981                 /* save integer and float argument registers */
3982
3983                 for (i = 0; i < INT_ARG_CNT; i++) {
3984                         x86_64_mov_reg_membase(cd, rd->argintregs[i], REG_SP, (1 + i) * 8);
3985                 }
3986
3987                 for (i = 0; i < FLT_ARG_CNT; i++) {
3988                         x86_64_movq_reg_membase(cd, rd->argfltregs[i], REG_SP, (1 + INT_ARG_CNT + i) * 8);
3989                 }
3990
3991                 /* show integer hex code for float arguments */
3992
3993                 for (i = 0, l = 0; i < m->paramcount && i < INT_ARG_CNT; i++) {
3994                         /* if the paramtype is a float, we have to right shift all        */
3995                         /* following integer registers                                    */
3996
3997                         if (IS_FLT_DBL_TYPE(m->paramtypes[i])) {
3998                                 for (s1 = INT_ARG_CNT - 2; s1 >= i; s1--) {
3999                                         x86_64_mov_reg_reg(cd, rd->argintregs[s1], rd->argintregs[s1 + 1]);
4000                                 }
4001
4002                                 x86_64_movd_freg_reg(cd, rd->argfltregs[l], rd->argintregs[i]);
4003                                 l++;
4004                         }
4005                 }
4006
4007                 x86_64_mov_imm_reg(cd, (u8) m, REG_ITMP1);
4008                 x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, 0 * 8);
4009                 x86_64_mov_imm_reg(cd, (u8) builtin_trace_args, REG_ITMP1);
4010                 x86_64_call_reg(cd, REG_ITMP1);
4011
4012                 /* restore integer and float argument registers */
4013
4014                 for (i = 0; i < INT_ARG_CNT; i++) {
4015                         x86_64_mov_membase_reg(cd, REG_SP, (1 + i) * 8, rd->argintregs[i]);
4016                 }
4017
4018                 for (i = 0; i < FLT_ARG_CNT; i++) {
4019                         x86_64_movq_membase_reg(cd, REG_SP, (1 + INT_ARG_CNT + i) * 8, rd->argfltregs[i]);
4020                 }
4021
4022                 x86_64_alu_imm_reg(cd, X86_64_ADD, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4023         }
4024
4025 #if !defined(STATIC_CLASSPATH)
4026         /* call method to resolve native function if needed */
4027         if (f == NULL) {
4028                 x86_64_alu_imm_reg(cd, X86_64_SUB, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4029
4030                 x86_64_mov_reg_membase(cd, rd->argintregs[0], REG_SP, 1 * 8);
4031                 x86_64_mov_reg_membase(cd, rd->argintregs[1], REG_SP, 2 * 8);
4032                 x86_64_mov_reg_membase(cd, rd->argintregs[2], REG_SP, 3 * 8);
4033                 x86_64_mov_reg_membase(cd, rd->argintregs[3], REG_SP, 4 * 8);
4034                 x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 5 * 8);
4035                 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 6 * 8);
4036
4037                 x86_64_movq_reg_membase(cd, rd->argfltregs[0], REG_SP, 7 * 8);
4038                 x86_64_movq_reg_membase(cd, rd->argfltregs[1], REG_SP, 8 * 8);
4039                 x86_64_movq_reg_membase(cd, rd->argfltregs[2], REG_SP, 9 * 8);
4040                 x86_64_movq_reg_membase(cd, rd->argfltregs[3], REG_SP, 10 * 8);
4041                 x86_64_movq_reg_membase(cd, rd->argfltregs[4], REG_SP, 11 * 8);
4042                 x86_64_movq_reg_membase(cd, rd->argfltregs[5], REG_SP, 12 * 8);
4043                 x86_64_movq_reg_membase(cd, rd->argfltregs[6], REG_SP, 13 * 8);
4044                 x86_64_movq_reg_membase(cd, rd->argfltregs[7], REG_SP, 14 * 8);
4045
4046                 /* needed to patch a jump over this block */
4047                 x86_64_jmp_imm(cd, 0);
4048                 jmpInstrPos = cd->mcodeptr - 4;
4049
4050                 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
4051
4052                 x86_64_mov_imm_reg(cd, 0, rd->argintregs[1]);
4053                 callAddrPatchPos = cd->mcodeptr - 8; /* at this position the place is specified where the native function adress should be patched into*/
4054
4055                 x86_64_mov_imm_reg(cd, 0, rd->argintregs[2]);
4056                 jmpInstrPatchPos = cd->mcodeptr - 8;
4057
4058                 x86_64_mov_imm_reg(cd, jmpInstrPos, rd->argintregs[3]);
4059
4060                 x86_64_mov_imm_reg(cd, (u8) codegen_resolve_native, REG_ITMP1);
4061                 x86_64_call_reg(cd, REG_ITMP1);
4062
4063                 *(jmpInstrPatchPos) = cd->mcodeptr - jmpInstrPos - 1; /*=opcode jmp_imm size*/
4064
4065                 x86_64_mov_membase_reg(cd, REG_SP, 1 * 8, rd->argintregs[0]);
4066                 x86_64_mov_membase_reg(cd, REG_SP, 2 * 8, rd->argintregs[1]);
4067                 x86_64_mov_membase_reg(cd, REG_SP, 3 * 8, rd->argintregs[2]);
4068                 x86_64_mov_membase_reg(cd, REG_SP, 4 * 8, rd->argintregs[3]);
4069                 x86_64_mov_membase_reg(cd, REG_SP, 5 * 8, rd->argintregs[4]);
4070                 x86_64_mov_membase_reg(cd, REG_SP, 6 * 8, rd->argintregs[5]);
4071
4072                 x86_64_movq_membase_reg(cd, REG_SP, 7 * 8, rd->argfltregs[0]);
4073                 x86_64_movq_membase_reg(cd, REG_SP, 8 * 8, rd->argfltregs[1]);
4074                 x86_64_movq_membase_reg(cd, REG_SP, 9 * 8, rd->argfltregs[2]);
4075                 x86_64_movq_membase_reg(cd, REG_SP, 10 * 8, rd->argfltregs[3]);
4076                 x86_64_movq_membase_reg(cd, REG_SP, 11 * 8, rd->argfltregs[4]);
4077                 x86_64_movq_membase_reg(cd, REG_SP, 12 * 8, rd->argfltregs[5]);
4078                 x86_64_movq_membase_reg(cd, REG_SP, 13 * 8, rd->argfltregs[6]);
4079                 x86_64_movq_membase_reg(cd, REG_SP, 14 * 8, rd->argfltregs[7]);
4080
4081                 x86_64_alu_imm_reg(cd, X86_64_ADD, (INT_ARG_CNT + FLT_ARG_CNT + 1) * 8, REG_SP);
4082         }
4083 #endif
4084
4085         /* save argument registers on stack -- if we have to */
4086
4087         if ((((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) || iargs > (INT_ARG_CNT - 1)) ||
4088                 (fargs > FLT_ARG_CNT)) {
4089                 s4 paramshiftcnt;
4090                 s4 stackparamcnt;
4091
4092                 paramshiftcnt = 0;
4093                 stackparamcnt = 0;
4094
4095                 /* do we need to shift integer argument register onto stack? */
4096
4097                 if ((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) {
4098                         /* do we need to shift 2 arguments? */
4099                         if (iargs > (INT_ARG_CNT - 1)) {
4100                                 paramshiftcnt = 2;
4101
4102                         } else {
4103                                 paramshiftcnt = 1;
4104                         }
4105
4106                 } else if (iargs > (INT_ARG_CNT - 1)) {
4107                         paramshiftcnt = 1;
4108                 }
4109
4110                 /* calculate required stack space */
4111
4112                 stackparamcnt += (iargs > INT_ARG_CNT) ? iargs - INT_ARG_CNT : 0;
4113                 stackparamcnt += (fargs > FLT_ARG_CNT) ? fargs - FLT_ARG_CNT : 0;
4114
4115                 stackframesize = stackparamcnt + paramshiftcnt;
4116
4117                 /* keep stack 16-byte aligned */
4118                 if (!(stackframesize & 0x1))
4119                         stackframesize++;
4120
4121                 x86_64_alu_imm_reg(cd, X86_64_SUB, stackframesize * 8, REG_SP);
4122
4123                 /* shift integer arguments if required */
4124
4125                 if ((m->flags & ACC_STATIC) && iargs > (INT_ARG_CNT - 2)) {
4126                         /* do we need to shift 2 arguments? */
4127                         if (iargs > (INT_ARG_CNT - 1))
4128                                 x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 1 * 8);
4129
4130                         x86_64_mov_reg_membase(cd, rd->argintregs[4], REG_SP, 0 * 8);
4131
4132                 } else if (iargs > (INT_ARG_CNT - 1)) {
4133                         x86_64_mov_reg_membase(cd, rd->argintregs[5], REG_SP, 0 * 8);
4134                 }
4135
4136                 /* copy stack arguments into new stack frame -- if any */
4137                 for (i = 0; i < stackparamcnt; i++) {
4138                         x86_64_mov_membase_reg(cd, REG_SP, (stackframesize + 1 + i) * 8, REG_ITMP1);
4139                         x86_64_mov_reg_membase(cd, REG_ITMP1, REG_SP, (paramshiftcnt + i) * 8);
4140                 }
4141
4142         } else {
4143                 /* keep stack 16-byte aligned */
4144                 x86_64_alu_imm_reg(cd, X86_64_SUB, 1 * 8, REG_SP);
4145                 stackframesize = 1;
4146         }
4147
4148         /* shift integer arguments for `env' and `class' arguments */
4149
4150         if (m->flags & ACC_STATIC) {
4151                 /* shift iargs count if less than INT_ARG_CNT, or all */
4152                 for (i = (iargs < (INT_ARG_CNT - 2)) ? iargs : (INT_ARG_CNT - 2); i >= 0; i--) {
4153                         x86_64_mov_reg_reg(cd, rd->argintregs[i], rd->argintregs[i + 2]);
4154                 }
4155
4156                 /* put class into second argument register */
4157                 x86_64_mov_imm_reg(cd, (u8) m->class, rd->argintregs[1]);
4158
4159         } else {
4160                 /* shift iargs count if less than INT_ARG_CNT, or all */
4161                 for (i = (iargs < (INT_ARG_CNT - 1)) ? iargs : (INT_ARG_CNT - 1); i >= 0; i--) {
4162                         x86_64_mov_reg_reg(cd, rd->argintregs[i], rd->argintregs[i + 1]);
4163                 }
4164         }
4165
4166         /* put env into first argument register */
4167         x86_64_mov_imm_reg(cd, (u8) &env, rd->argintregs[0]);
4168
4169         /* do the native function call */
4170         x86_64_mov_imm_reg(cd, (u8) f, REG_ITMP1);
4171 #if !defined(STATIC_CLASSPATH)
4172         if (f == NULL)
4173                 (*callAddrPatchPos) = cd->mcodeptr - 8;
4174 #endif
4175         x86_64_call_reg(cd, REG_ITMP1);
4176
4177         /* remove stackframe if there is one */
4178         if (stackframesize) {
4179                 x86_64_alu_imm_reg(cd, X86_64_ADD, stackframesize * 8, REG_SP);
4180         }
4181
4182         if (runverbose) {
4183                 x86_64_alu_imm_reg(cd, X86_64_SUB, 3 * 8, REG_SP);    /* keep stack 16-byte aligned */
4184
4185                 x86_64_mov_reg_membase(cd, REG_RESULT, REG_SP, 0 * 8);
4186                 x86_64_movq_reg_membase(cd, REG_FRESULT, REG_SP, 1 * 8);
4187
4188                 x86_64_mov_imm_reg(cd, (u8) m, rd->argintregs[0]);
4189                 x86_64_mov_reg_reg(cd, REG_RESULT, rd->argintregs[1]);
4190                 M_FLTMOVE(REG_FRESULT, rd->argfltregs[0]);
4191                 M_FLTMOVE(REG_FRESULT, rd->argfltregs[1]);
4192
4193                 x86_64_mov_imm_reg(cd, (u8) builtin_displaymethodstop, REG_ITMP1);
4194                 x86_64_call_reg(cd, REG_ITMP1);
4195
4196                 x86_64_mov_membase_reg(cd, REG_SP, 0 * 8, REG_RESULT);
4197                 x86_64_movq_membase_reg(cd, REG_SP, 1 * 8, REG_FRESULT);
4198
4199                 x86_64_alu_imm_reg(cd, X86_64_ADD, 3 * 8, REG_SP);    /* keep stack 16-byte aligned */
4200         }
4201
4202         /* check for exception */
4203
4204 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4205         x86_64_push_reg(cd, REG_RESULT);
4206 /*      x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
4207         x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
4208         x86_64_call_reg(cd, REG_ITMP3);
4209         x86_64_mov_membase_reg(cd, REG_RESULT, 0, REG_ITMP3);
4210         x86_64_pop_reg(cd, REG_RESULT);
4211 #else
4212         x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
4213         x86_64_mov_membase_reg(cd, REG_ITMP3, 0, REG_ITMP3);
4214 #endif
4215         x86_64_test_reg_reg(cd, REG_ITMP3, REG_ITMP3);
4216         x86_64_jcc(cd, X86_64_CC_NE, 1);
4217
4218         x86_64_ret(cd);
4219
4220         /* handle exception */
4221
4222 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
4223         x86_64_push_reg(cd, REG_ITMP3);
4224 /*      x86_64_call_mem(cd, (u8) &callgetexceptionptrptr); */
4225         x86_64_mov_imm_reg(cd, (u8) builtin_get_exceptionptrptr, REG_ITMP3);
4226         x86_64_call_reg(cd, REG_ITMP3);
4227         x86_64_mov_imm_membase(cd, 0, REG_RESULT, 0);
4228         x86_64_pop_reg(cd, REG_ITMP1_XPTR);
4229 #else
4230         x86_64_mov_reg_reg(cd, REG_ITMP3, REG_ITMP1_XPTR);
4231         x86_64_mov_imm_reg(cd, (u8) &_exceptionptr, REG_ITMP3);
4232         x86_64_alu_reg_reg(cd, X86_64_XOR, REG_ITMP2, REG_ITMP2);
4233         x86_64_mov_reg_membase(cd, REG_ITMP2, REG_ITMP3, 0);    /* clear exception pointer */
4234 #endif
4235
4236         x86_64_mov_membase_reg(cd, REG_SP, 0, REG_ITMP2_XPC);    /* get return address from stack */
4237         x86_64_alu_imm_reg(cd, X86_64_SUB, 3, REG_ITMP2_XPC);    /* callq */
4238
4239         x86_64_mov_imm_reg(cd, (u8) asm_handle_nat_exception, REG_ITMP3);
4240         x86_64_jmp_reg(cd, REG_ITMP3);
4241
4242
4243         /* patch in a <clinit> call if required ***********************************/
4244
4245         {
4246                 u1          *xcodeptr;
4247                 patchref    *pref;
4248                 codegendata *tmpcd;
4249                 u8           mcode;
4250
4251                 tmpcd = DNEW(codegendata);
4252
4253                 /* there can only be one patch ref entry                              */
4254                 pref = cd->patchrefs;
4255
4256                 if (pref) {
4257                         /* Get machine code which is patched back in later. A             */
4258                         /* `call rel32' is 5 bytes long (but read 8 bytes).               */
4259                         xcodeptr = cd->mcodebase + pref->branchpos;
4260                         mcode = *((ptrint *) xcodeptr);
4261
4262                         /* patch in `call rel32' to call the following code               */
4263                         tmpcd->mcodeptr = xcodeptr;     /* set dummy mcode pointer        */
4264                         x86_64_call_imm(tmpcd, cd->mcodeptr - (xcodeptr + 5));
4265
4266                         /* move classinfo pointer and machine code bytes into registers */
4267                         x86_64_mov_imm_reg(cd, (ptrint) pref->ref, REG_ITMP1);
4268                         x86_64_mov_imm_reg(cd, (ptrint) mcode, REG_ITMP2);
4269
4270                         x86_64_mov_imm_reg(cd, (ptrint) pref->asmwrapper, REG_ITMP3);
4271                         x86_64_jmp_reg(cd, REG_ITMP3);
4272                 }
4273         }
4274
4275         /* Check if the stub size is big enough to hold the whole stub generated. */
4276         /* If not, this can lead into unpredictable crashes, because of heap      */
4277         /* corruption.                                                            */
4278         if ((s4) (cd->mcodeptr - s) > NATIVESTUBSIZE) {
4279                 throw_cacao_exception_exit(string_java_lang_InternalError,
4280                                                                    "Native stub size %d is to small for current stub size %d",
4281                                                                    NATIVESTUBSIZE, (s4) (cd->mcodeptr - s));
4282         }
4283
4284 #if defined(STATISTICS)
4285         if (opt_stat)
4286                 count_nstub_len += NATIVESTUBSIZE;
4287 #endif
4288
4289         /* release dump area */
4290
4291         dump_release(dumpsize);
4292
4293         return s;
4294 }
4295
4296
4297 /* function: removenativestub **************************************************
4298
4299     removes a previously created native-stub from memory
4300     
4301 *******************************************************************************/
4302
4303 void removenativestub(u1 *stub)
4304 {
4305         CFREE(stub, NATIVESTUBSIZE);
4306 }
4307
4308
4309 /*
4310  * These are local overrides for various environment variables in Emacs.
4311  * Please do not remove this and leave it at the end of the file, where
4312  * Emacs will automagically detect them.
4313  * ---------------------------------------------------------------------
4314  * Local variables:
4315  * mode: c
4316  * indent-tabs-mode: t
4317  * c-basic-offset: 4
4318  * tab-width: 4
4319  * End:
4320  */