Comment
[cacao.git] / x86_64 / ngen.c
1 /* x86_64/ngen.c ***************************************************************
2
3         Copyright (c) 1997 A. Krall, R. Grafl, M. Gschwind, M. Probst
4
5         See file COPYRIGHT for information on usage and disclaimer of warranties
6
7         Contains the codegenerator for an x86_64 processor.
8         This module generates x86_64 machine code for a sequence of
9         pseudo commands (ICMDs).
10
11         Authors: Andreas  Krall      EMAIL: cacao@complang.tuwien.ac.at
12                  Reinhard Grafl      EMAIL: cacao@complang.tuwien.ac.at
13                          Christian Thalinger EMAIL: cacao@complang.tuwien.ac.at
14
15         Last Change: $Id: ngen.c 546 2003-11-01 19:21:58Z twisti $
16
17 *******************************************************************************/
18
19 #include "jitdef.h"   /* phil */
20 #include "methodtable.c"
21
22 /* additional functions and macros to generate code ***************************/
23
24 #define BlockPtrOfPC(pc)  ((basicblock *) iptr->target)
25
26
27 #ifdef STATISTICS
28 #define COUNT_SPILLS count_spills++
29 #else
30 #define COUNT_SPILLS
31 #endif
32
33
34 #define CALCOFFSETBYTES(var, reg, val) \
35     if ((s4) (val) < -128 || (s4) (val) > 127) (var) += 4; \
36     else if ((s4) (val) != 0) (var) += 1; \
37     else if ((reg) == RBP || (reg) == RSP || (reg) == R12 || (reg) == R13) (var) += 1;
38
39
40 #define CALCREGOFFBYTES(var, val) \
41     if ((val) > 15) (var) += 4; \
42     else if ((val) != 0) (var) += 1;
43
44
45 #define CALCIMMEDIATEBYTES(var, val) \
46     if ((s4) (val) < -128 || (s4) (val) > 127) (var) += 4; \
47     else (var) += 1;
48
49
50 /* gen_nullptr_check(objreg) */
51
52 #ifdef SOFTNULLPTRCHECK
53 #define gen_nullptr_check(objreg) \
54         if (checknull) { \
55         x86_64_test_reg_reg((objreg), (objreg)); \
56         x86_64_jcc(X86_64_CC_E, 0); \
57             mcode_addxnullrefs(mcodeptr); \
58         }
59 #else
60 #define gen_nullptr_check(objreg)
61 #endif
62
63
64 /* MCODECHECK(icnt) */
65
66 #define MCODECHECK(icnt) \
67         if ((mcodeptr + (icnt)) > (u1*) mcodeend) mcodeptr = (u1*) mcode_increase((u1*) mcodeptr)
68
69 /* M_INTMOVE:
70     generates an integer-move from register a to b.
71     if a and b are the same int-register, no code will be generated.
72 */ 
73
74 #define M_INTMOVE(reg,dreg) \
75     if ((reg) != (dreg)) { \
76         x86_64_mov_reg_reg((reg),(dreg)); \
77     }
78
79
80 /* M_FLTMOVE:
81     generates a floating-point-move from register a to b.
82     if a and b are the same float-register, no code will be generated
83 */ 
84
85 #define M_FLTMOVE(reg,dreg) \
86     if ((reg) != (dreg)) { \
87         x86_64_movq_reg_reg((reg),(dreg)); \
88     }
89
90
91 /* var_to_reg_xxx:
92     this function generates code to fetch data from a pseudo-register
93     into a real register. 
94     If the pseudo-register has actually been assigned to a real 
95     register, no code will be emitted, since following operations
96     can use this register directly.
97     
98     v: pseudoregister to be fetched from
99     tempregnum: temporary register to be used if v is actually spilled to ram
100
101     return: the register number, where the operand can be found after 
102             fetching (this wil be either tempregnum or the register
103             number allready given to v)
104 */
105
106 #define var_to_reg_int(regnr,v,tempnr) \
107     if ((v)->flags & INMEMORY) { \
108         COUNT_SPILLS; \
109         if ((v)->type == TYPE_INT) { \
110             x86_64_movl_membase_reg(REG_SP, (v)->regoff * 8, tempnr); \
111         } else { \
112             x86_64_mov_membase_reg(REG_SP, (v)->regoff * 8, tempnr); \
113         } \
114         regnr = tempnr; \
115     } else { \
116         regnr = (v)->regoff; \
117     }
118
119
120
121 #define var_to_reg_flt(regnr,v,tempnr) \
122     if ((v)->flags & INMEMORY) { \
123         COUNT_SPILLS; \
124         if ((v)->type == TYPE_FLT) { \
125             x86_64_movlps_membase_reg(REG_SP, (v)->regoff * 8, tempnr); \
126         } else { \
127             x86_64_movlpd_membase_reg(REG_SP, (v)->regoff * 8, tempnr); \
128         } \
129 /*        x86_64_movq_membase_reg(REG_SP, (v)->regoff * 8, tempnr);*/ \
130         regnr = tempnr; \
131     } else { \
132         regnr = (v)->regoff; \
133     }
134
135
136 /* reg_of_var:
137     This function determines a register, to which the result of an operation
138     should go, when it is ultimatively intended to store the result in
139     pseudoregister v.
140     If v is assigned to an actual register, this register will be returned.
141     Otherwise (when v is spilled) this function returns tempregnum.
142     If not already done, regoff and flags are set in the stack location.
143 */        
144
145 static int reg_of_var(stackptr v, int tempregnum)
146 {
147         varinfo      *var;
148
149         switch (v->varkind) {
150                 case TEMPVAR:
151                         if (!(v->flags & INMEMORY))
152                                 return(v->regoff);
153                         break;
154                 case STACKVAR:
155                         var = &(interfaces[v->varnum][v->type]);
156                         v->regoff = var->regoff;
157                         if (!(var->flags & INMEMORY))
158                                 return(var->regoff);
159                         break;
160                 case LOCALVAR:
161                         var = &(locals[v->varnum][v->type]);
162                         v->regoff = var->regoff;
163                         if (!(var->flags & INMEMORY))
164                                 return(var->regoff);
165                         break;
166                 case ARGVAR:
167                         v->regoff = v->varnum;
168                         if (IS_FLT_DBL_TYPE(v->type)) {
169                                 if (v->varnum < fltreg_argnum) {
170                                         v->regoff = argfltregs[v->varnum];
171                                         return(argfltregs[v->varnum]);
172                                 }
173                         } else {
174                                 if (v->varnum < intreg_argnum) {
175                                         v->regoff = argintregs[v->varnum];
176                                         return(argintregs[v->varnum]);
177                                 }
178                         }
179                         v->regoff -= intreg_argnum;
180                         break;
181                 }
182         v->flags |= INMEMORY;
183         return tempregnum;
184 }
185
186
187 /* store_reg_to_var_xxx:
188     This function generates the code to store the result of an operation
189     back into a spilled pseudo-variable.
190     If the pseudo-variable has not been spilled in the first place, this 
191     function will generate nothing.
192     
193     v ............ Pseudovariable
194     tempregnum ... Number of the temporary registers as returned by
195                    reg_of_var.
196 */      
197
198 #define store_reg_to_var_int(sptr, tempregnum) \
199     if ((sptr)->flags & INMEMORY) { \
200         COUNT_SPILLS; \
201         x86_64_mov_reg_membase(tempregnum, REG_SP, (sptr)->regoff * 8); \
202     }
203
204
205 #define store_reg_to_var_flt(sptr, tempregnum) \
206     if ((sptr)->flags & INMEMORY) { \
207          COUNT_SPILLS; \
208          x86_64_movq_reg_membase(tempregnum, REG_SP, (sptr)->regoff * 8); \
209     }
210
211
212 /* NullPointerException signal handler for hardware null pointer check */
213
214 void catch_NullPointerException(int sig, siginfo_t *siginfo, void *_p)
215 {
216         sigset_t nsig;
217 /*      int      instr; */
218 /*      long     faultaddr; */
219
220         struct ucontext *_uc = (struct ucontext *) _p;
221         struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
222
223         /* Reset signal handler - necessary for SysV, does no harm for BSD */
224
225         
226 /*      instr = *((int*)(sigctx->rip)); */
227 /*      faultaddr = sigctx->sc_regs[(instr >> 16) & 0x1f]; */
228
229 /*      if (faultaddr == 0) { */
230                 signal(sig, (void *) catch_NullPointerException);          /* reinstall handler */
231                 sigemptyset(&nsig);
232                 sigaddset(&nsig, sig);
233                 sigprocmask(SIG_UNBLOCK, &nsig, NULL);                     /* unblock signal    */
234                 sigctx->rax = (long) proto_java_lang_NullPointerException; /* REG_ITMP1_XPTR    */
235                 sigctx->r10 = sigctx->rip;                                 /* REG_ITMP2_XPC     */
236                 sigctx->rip = (long) asm_handle_exception;
237
238                 return;
239
240 /*      } else { */
241 /*              faultaddr += (long) ((instr << 16) >> 16); */
242 /*              fprintf(stderr, "faulting address: 0x%08x\n", faultaddr); */
243 /*              panic("Stack overflow"); */
244 /*      } */
245 }
246
247
248 /* ArithmeticException signal handler for hardware divide by zero check */
249
250 void catch_ArithmeticException(int sig, siginfo_t *siginfo, void *_p)
251 {
252         sigset_t nsig;
253
254         struct ucontext *_uc = (struct ucontext *) _p;
255         struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
256
257         classinfo *c;
258         java_objectheader *p;
259         methodinfo *m;
260
261         /* Reset signal handler - necessary for SysV, does no harm for BSD */
262
263         signal(sig, (void *) catch_ArithmeticException);     /* reinstall handler */
264         sigemptyset(&nsig);
265         sigaddset(&nsig, sig);
266         sigprocmask(SIG_UNBLOCK, &nsig, NULL);               /* unblock signal    */
267
268         c = loader_load(utf_new_char("java/lang/ArithmeticException"));
269         p = builtin_new(c);
270         m = class_findmethod(c, 
271                                                  utf_new_char("<init>"), 
272                                                  utf_new_char("(Ljava/lang/String;)V"));
273
274         asm_calljavamethod(m, p, javastring_new_char("/ by zero"), NULL, NULL);
275
276         sigctx->rax = (long) p;                              /* REG_ITMP1_XPTR    */
277         sigctx->r10 = sigctx->rip;                           /* REG_ITMP2_XPC     */
278         sigctx->rip = (long) asm_handle_exception;
279
280         return;
281 }
282
283
284 void init_exceptions(void)
285 {
286         /* install signal handlers we need to convert to exceptions */
287
288         if (!checknull) {
289 #if defined(SIGSEGV)
290                 signal(SIGSEGV, (void *) catch_NullPointerException);
291 #endif
292
293 #if defined(SIGBUS)
294                 signal(SIGBUS, (void *) catch_NullPointerException);
295 #endif
296         }
297
298         signal(SIGFPE, (void *) catch_ArithmeticException);
299 }
300
301
302 /* function gen_mcode **********************************************************
303
304         generates machine code
305
306 *******************************************************************************/
307
308 u1          *mcodeptr;
309
310 static void gen_mcode()
311 {
312         int  len, s1, s2, s3, d /*, bbs */;
313         s8   a;
314         stackptr    src;
315         varinfo     *var;
316 /*      varinfo     *dst; */
317         basicblock  *bptr;
318         instruction *iptr;
319
320         xtable *ex;
321
322         {
323         int p, pa, t, l, r;
324
325         savedregs_num = 0;
326
327         /* space to save used callee saved registers */
328
329         savedregs_num += (savintregcnt - maxsavintreguse);
330         savedregs_num += (savfltregcnt - maxsavfltreguse);
331
332         parentargs_base = maxmemuse + savedregs_num;
333
334 #ifdef USE_THREADS                 /* space to save argument of monitor_enter */
335
336         if (checksync && (method->flags & ACC_SYNCHRONIZED))
337                 parentargs_base++;
338
339 #endif
340
341     /* keep stack 16-byte aligned for calls into libc */
342
343         if (!isleafmethod || runverbose) {
344                 if ((parentargs_base % 2) == 0) {
345                         parentargs_base++;
346                 }
347         }
348
349         /* create method header */
350
351         (void) dseg_addaddress(method);                         /* MethodPointer  */
352         (void) dseg_adds4(parentargs_base * 8);                 /* FrameSize      */
353
354 #ifdef USE_THREADS
355
356         /* IsSync contains the offset relative to the stack pointer for the
357            argument of monitor_exit used in the exception handler. Since the
358            offset could be zero and give a wrong meaning of the flag it is
359            offset by one.
360         */
361
362         if (checksync && (method->flags & ACC_SYNCHRONIZED))
363                 (void) dseg_adds4((maxmemuse + 1) * 8);             /* IsSync         */
364         else
365
366 #endif
367
368         (void) dseg_adds4(0);                                   /* IsSync         */
369                                                
370         (void) dseg_adds4(isleafmethod);                        /* IsLeaf         */
371         (void) dseg_adds4(savintregcnt - maxsavintreguse);      /* IntSave        */
372         (void) dseg_adds4(savfltregcnt - maxsavfltreguse);      /* FltSave        */
373         (void) dseg_adds4(exceptiontablelength);                /* ExTableSize    */
374
375         /* create exception table */
376
377         for (ex = extable; ex != NULL; ex = ex->down) {
378
379 #ifdef LOOP_DEBUG       
380                 if (ex->start != NULL)
381                         printf("adding start - %d - ", ex->start->debug_nr);
382                 else {
383                         printf("PANIC - start is NULL");
384                         exit(-1);
385                 }
386 #endif
387
388                 dseg_addtarget(ex->start);
389
390 #ifdef LOOP_DEBUG                       
391                 if (ex->end != NULL)
392                         printf("adding end - %d - ", ex->end->debug_nr);
393                 else {
394                         printf("PANIC - end is NULL");
395                         exit(-1);
396                 }
397 #endif
398
399                 dseg_addtarget(ex->end);
400
401 #ifdef LOOP_DEBUG               
402                 if (ex->handler != NULL)
403                         printf("adding handler - %d\n", ex->handler->debug_nr);
404                 else {
405                         printf("PANIC - handler is NULL");
406                         exit(-1);
407                 }
408 #endif
409
410                 dseg_addtarget(ex->handler);
411            
412                 (void) dseg_addaddress(ex->catchtype);
413         }
414         
415         /* initialize mcode variables */
416         
417         mcodeptr = (u1*) mcodebase;
418         mcodeend = (s4*) (mcodebase + mcodesize);
419         MCODECHECK(128 + mparamcount);
420
421         /* create stack frame (if necessary) */
422
423         if (parentargs_base) {
424                 x86_64_alu_imm_reg(X86_64_SUB, parentargs_base * 8, REG_SP);
425         }
426
427         /* save return address and used callee saved registers */
428
429         p = parentargs_base;
430         for (r = savintregcnt - 1; r >= maxsavintreguse; r--) {
431                 p--; x86_64_mov_reg_membase(savintregs[r], REG_SP, p * 8);
432         }
433         for (r = savfltregcnt - 1; r >= maxsavfltreguse; r--) {
434                 p--; x86_64_movq_reg_membase(savfltregs[r], REG_SP, p * 8);
435         }
436
437         /* save monitorenter argument */
438
439 #ifdef USE_THREADS
440         if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
441                 if (method->flags & ACC_STATIC) {
442                         x86_64_mov_imm_reg((s8) class, REG_ITMP1);
443                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, maxmemuse * 8);
444
445                 } else {
446                         x86_64_mov_reg_membase(argintregs[0], REG_SP, maxmemuse * 8);
447                 }
448         }                       
449 #endif
450
451         /* copy argument registers to stack and call trace function with pointer
452            to arguments on stack.
453         */
454         if (runverbose) {
455                 x86_64_alu_imm_reg(X86_64_SUB, (6 + 8 + 1 + 1) * 8, REG_SP);
456
457                 x86_64_mov_reg_membase(argintregs[0], REG_SP, 1 * 8);
458                 x86_64_mov_reg_membase(argintregs[1], REG_SP, 2 * 8);
459                 x86_64_mov_reg_membase(argintregs[2], REG_SP, 3 * 8);
460                 x86_64_mov_reg_membase(argintregs[3], REG_SP, 4 * 8);
461                 x86_64_mov_reg_membase(argintregs[4], REG_SP, 5 * 8);
462                 x86_64_mov_reg_membase(argintregs[5], REG_SP, 6 * 8);
463
464                 x86_64_movq_reg_membase(argfltregs[0], REG_SP, 7 * 8);
465                 x86_64_movq_reg_membase(argfltregs[1], REG_SP, 8 * 8);
466                 x86_64_movq_reg_membase(argfltregs[2], REG_SP, 9 * 8);
467                 x86_64_movq_reg_membase(argfltregs[3], REG_SP, 10 * 8);
468 /*              x86_64_movq_reg_membase(argfltregs[4], REG_SP, 11 * 8); */
469 /*              x86_64_movq_reg_membase(argfltregs[5], REG_SP, 12 * 8); */
470 /*              x86_64_movq_reg_membase(argfltregs[6], REG_SP, 13 * 8); */
471 /*              x86_64_movq_reg_membase(argfltregs[7], REG_SP, 14 * 8); */
472
473                 for (p = 0, l = 0; p < mparamcount; p++) {
474                         t = mparamtypes[p];
475
476                         if (IS_FLT_DBL_TYPE(t)) {
477                                 for (s1 = (mparamcount > intreg_argnum) ? intreg_argnum - 2 : mparamcount - 2; s1 >= p; s1--) {
478                                         x86_64_mov_reg_reg(argintregs[s1], argintregs[s1 + 1]);
479                                 }
480
481                                 x86_64_movd_freg_reg(argfltregs[l], argintregs[p]);
482                                 l++;
483                         }
484                 }
485
486                 x86_64_mov_imm_reg((s8) method, REG_ITMP2);
487                 x86_64_mov_reg_membase(REG_ITMP2, REG_SP, 0 * 8);
488                 x86_64_mov_imm_reg((s8) builtin_trace_args, REG_ITMP1);
489                 x86_64_call_reg(REG_ITMP1);
490
491                 x86_64_mov_membase_reg(REG_SP, 1 * 8, argintregs[0]);
492                 x86_64_mov_membase_reg(REG_SP, 2 * 8, argintregs[1]);
493                 x86_64_mov_membase_reg(REG_SP, 3 * 8, argintregs[2]);
494                 x86_64_mov_membase_reg(REG_SP, 4 * 8, argintregs[3]);
495                 x86_64_mov_membase_reg(REG_SP, 5 * 8, argintregs[4]);
496                 x86_64_mov_membase_reg(REG_SP, 6 * 8, argintregs[5]);
497
498                 x86_64_movq_membase_reg(REG_SP, 7 * 8, argfltregs[0]);
499                 x86_64_movq_membase_reg(REG_SP, 8 * 8, argfltregs[1]);
500                 x86_64_movq_membase_reg(REG_SP, 9 * 8, argfltregs[2]);
501                 x86_64_movq_membase_reg(REG_SP, 10 * 8, argfltregs[3]);
502 /*              x86_64_movq_membase_reg(REG_SP, 11 * 8, argfltregs[4]); */
503 /*              x86_64_movq_membase_reg(REG_SP, 12 * 8, argfltregs[5]); */
504 /*              x86_64_movq_membase_reg(REG_SP, 13 * 8, argfltregs[6]); */
505 /*              x86_64_movq_membase_reg(REG_SP, 14 * 8, argfltregs[7]); */
506
507                 x86_64_alu_imm_reg(X86_64_ADD, (6 + 8 + 1 + 1) * 8, REG_SP);
508         }
509
510         /* take arguments out of register or stack frame */
511
512         for (p = 0, l = 0, s1 = 0, s2 = 0; p < mparamcount; p++) {
513                 t = mparamtypes[p];
514                 var = &(locals[l][t]);
515                 l++;
516                 if (IS_2_WORD_TYPE(t))    /* increment local counter for 2 word types */
517                         l++;
518                 if (var->type < 0) {
519                         if (IS_INT_LNG_TYPE(t)) {
520                                 s1++;
521                         } else {
522                                 s2++;
523                         }
524                         continue;
525                 }
526                 r = var->regoff; 
527                 if (IS_INT_LNG_TYPE(t)) {                    /* integer args          */
528                         if (s1 < intreg_argnum) {                /* register arguments    */
529                                 if (!(var->flags & INMEMORY)) {      /* reg arg -> register   */
530                                         M_INTMOVE(argintregs[s1], r);
531
532                                 } else {                             /* reg arg -> spilled    */
533                                     x86_64_mov_reg_membase(argintregs[s1], REG_SP, r * 8);
534                                 }
535                         } else {                                 /* stack arguments       */
536                                 pa = s1 - intreg_argnum;
537                                 if (s2 >= fltreg_argnum) {
538                                         pa += s2 - fltreg_argnum;
539                                 }
540                                 if (!(var->flags & INMEMORY)) {      /* stack arg -> register */ 
541                                         x86_64_mov_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, r);    /* + 8 for return address */
542                                 } else {                             /* stack arg -> spilled  */
543                                         x86_64_mov_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, REG_ITMP1);    /* + 8 for return address */
544                                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, r * 8);
545                                 }
546                         }
547                         s1++;
548
549                 } else {                                     /* floating args         */   
550                         if (s2 < fltreg_argnum) {                /* register arguments    */
551                                 if (!(var->flags & INMEMORY)) {      /* reg arg -> register   */
552                                         M_FLTMOVE(argfltregs[s2], r);
553
554                                 } else {                                         /* reg arg -> spilled    */
555                                         x86_64_movq_reg_membase(argfltregs[s2], REG_SP, r * 8);
556                                 }
557
558                         } else {                                 /* stack arguments       */
559                                 pa = s2 - fltreg_argnum;
560                                 if (s1 >= intreg_argnum) {
561                                         pa += s1 - intreg_argnum;
562                                 }
563                                 if (!(var->flags & INMEMORY)) {      /* stack-arg -> register */
564                                         x86_64_movq_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, r);
565
566                                 } else {
567                                         x86_64_movq_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, REG_FTMP1);
568                                         x86_64_movq_reg_membase(REG_FTMP1, REG_SP, r * 8);
569                                 }
570                         }
571                         s2++;
572                 }
573         }  /* end for */
574
575         /* call monitorenter function */
576
577 #ifdef USE_THREADS
578         if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
579                 x86_64_mov_membase_reg(REG_SP, 8 * maxmemuse, argintregs[0]);
580                 x86_64_mov_imm_reg((s8) builtin_monitorenter, REG_ITMP1);
581                 x86_64_call_reg(REG_ITMP1);
582         }                       
583 #endif
584         }
585
586         /* end of header generation */
587
588         /* walk through all basic blocks */
589         for (/* bbs = block_count, */ bptr = block; /* --bbs >= 0 */ bptr != NULL; bptr = bptr->next) {
590
591                 bptr->mpc = (int)((u1*) mcodeptr - mcodebase);
592
593                 if (bptr->flags >= BBREACHED) {
594
595                 /* branch resolving */
596
597                 branchref *brefs;
598                 for (brefs = bptr->branchrefs; brefs != NULL; brefs = brefs->next) {
599                         gen_resolvebranch((u1*) mcodebase + brefs->branchpos, 
600                                           brefs->branchpos, bptr->mpc);
601                 }
602
603                 /* copy interface registers to their destination */
604
605                 src = bptr->instack;
606                 len = bptr->indepth;
607                 MCODECHECK(64+len);
608                 while (src != NULL) {
609                         len--;
610                         if ((len == 0) && (bptr->type != BBTYPE_STD)) {
611                                 if (bptr->type == BBTYPE_SBR) {
612                                         d = reg_of_var(src, REG_ITMP1);
613                                         x86_64_pop_reg(d);
614                                         store_reg_to_var_int(src, d);
615
616                                 } else if (bptr->type == BBTYPE_EXH) {
617                                         d = reg_of_var(src, REG_ITMP1);
618                                         M_INTMOVE(REG_ITMP1, d);
619                                         store_reg_to_var_int(src, d);
620                                 }
621
622                         } else {
623                                 d = reg_of_var(src, REG_ITMP1);
624                                 if ((src->varkind != STACKVAR)) {
625                                         s2 = src->type;
626                                         if (IS_FLT_DBL_TYPE(s2)) {
627                                                 s1 = interfaces[len][s2].regoff;
628                                                 if (!(interfaces[len][s2].flags & INMEMORY)) {
629                                                         M_FLTMOVE(s1, d);
630
631                                                 } else {
632                                                         x86_64_movq_membase_reg(REG_SP, s1 * 8, d);
633                                                 }
634                                                 store_reg_to_var_flt(src, d);
635
636                                         } else {
637                                                 s1 = interfaces[len][s2].regoff;
638                                                 if (!(interfaces[len][s2].flags & INMEMORY)) {
639                                                         M_INTMOVE(s1, d);
640
641                                                 } else {
642                                                         x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
643                                                 }
644                                                 store_reg_to_var_int(src, d);
645                                         }
646                                 }
647                         }
648                         src = src->prev;
649                 }
650
651                 /* walk through all instructions */
652                 
653                 src = bptr->instack;
654                 len = bptr->icount;
655                 for (iptr = bptr->iinstr;
656                     len > 0;
657                     src = iptr->dst, len--, iptr++) {
658
659         MCODECHECK(64);           /* an instruction usually needs < 64 words      */
660         switch (iptr->opc) {
661
662                 case ICMD_NOP:        /* ...  ==> ...                                 */
663                         break;
664
665                 case ICMD_NULLCHECKPOP: /* ..., objectref  ==> ...                    */
666                         if (src->flags & INMEMORY) {
667                                 x86_64_alu_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
668
669                         } else {
670                                 x86_64_test_reg_reg(src->regoff, src->regoff);
671                         }
672                         x86_64_jcc(X86_64_CC_E, 0);
673                         mcode_addxnullrefs(mcodeptr);
674                         break;
675
676                 /* constant operations ************************************************/
677
678                 case ICMD_ICONST:     /* ...  ==> ..., constant                       */
679                                       /* op1 = 0, val.i = constant                    */
680
681 /*                      d = reg_of_var(iptr->dst, REG_ITMP1); */
682 /*                      if (iptr->dst->flags & INMEMORY) { */
683 /*                              x86_64_movl_imm_membase(iptr->val.i, REG_SP, iptr->dst->regoff * 8); */
684
685 /*                      } else { */
686 /*                              x86_64_movl_imm_reg(iptr->val.i, d); */
687 /*                      } */
688                         d = reg_of_var(iptr->dst, REG_ITMP1);
689                         if (iptr->val.i == 0) {
690                                 x86_64_alu_reg_reg(X86_64_XOR, d, d);
691                         } else {
692                                 x86_64_movl_imm_reg(iptr->val.i, d);
693                         }
694                         store_reg_to_var_int(iptr->dst, d);
695                         break;
696
697                 case ICMD_ACONST:     /* ...  ==> ..., constant                       */
698                                       /* op1 = 0, val.a = constant                    */
699
700                         d = reg_of_var(iptr->dst, REG_ITMP1);
701                         if (iptr->val.a == 0) {
702                                 x86_64_alu_reg_reg(X86_64_XOR, d, d);
703                         } else {
704                                 x86_64_mov_imm_reg((s8) iptr->val.a, d);
705                         }
706                         store_reg_to_var_int(iptr->dst, d);
707                         break;
708
709                 case ICMD_LCONST:     /* ...  ==> ..., constant                       */
710                                       /* op1 = 0, val.l = constant                    */
711
712                         d = reg_of_var(iptr->dst, REG_ITMP1);
713                         if (iptr->val.l == 0) {
714                                 x86_64_alu_reg_reg(X86_64_XOR, d, d);
715                         } else {
716                                 x86_64_mov_imm_reg(iptr->val.l, d);
717                         }
718                         store_reg_to_var_int(iptr->dst, d);
719                         break;
720
721                 case ICMD_FCONST:     /* ...  ==> ..., constant                       */
722                                       /* op1 = 0, val.f = constant                    */
723
724                         d = reg_of_var(iptr->dst, REG_FTMP1);
725                         a = dseg_addfloat(iptr->val.f);
726                         x86_64_movdl_membase_reg(RIP, -(((s8) mcodeptr + ((d > 7) ? 9 : 8)) - (s8) mcodebase) + a, d);
727                         store_reg_to_var_flt(iptr->dst, d);
728                         break;
729                 
730                 case ICMD_DCONST:     /* ...  ==> ..., constant                       */
731                                       /* op1 = 0, val.d = constant                    */
732
733                         d = reg_of_var(iptr->dst, REG_FTMP1);
734                         a = dseg_adddouble(iptr->val.d);
735                         x86_64_movd_membase_reg(RIP, -(((s8) mcodeptr + 9) - (s8) mcodebase) + a, d);
736                         store_reg_to_var_flt(iptr->dst, d);
737                         break;
738
739
740                 /* load/store operations **********************************************/
741
742                 case ICMD_ILOAD:      /* ...  ==> ..., content of local variable      */
743                                       /* op1 = local variable                         */
744
745                         d = reg_of_var(iptr->dst, REG_ITMP1);
746                         if ((iptr->dst->varkind == LOCALVAR) &&
747                             (iptr->dst->varnum == iptr->op1)) {
748                                 break;
749                         }
750                         var = &(locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
751                         if (var->flags & INMEMORY) {
752                                 x86_64_movl_membase_reg(REG_SP, var->regoff * 8, d);
753                                 store_reg_to_var_int(iptr->dst, d);
754
755                         } else {
756                                 if (iptr->dst->flags & INMEMORY) {
757                                         x86_64_mov_reg_membase(var->regoff, REG_SP, iptr->dst->regoff * 8);
758
759                                 } else {
760                                         M_INTMOVE(var->regoff, d);
761                                 }
762                         }
763                         break;
764
765                 case ICMD_LLOAD:      /* ...  ==> ..., content of local variable      */
766                 case ICMD_ALOAD:      /* op1 = local variable                         */
767
768                         d = reg_of_var(iptr->dst, REG_ITMP1);
769                         if ((iptr->dst->varkind == LOCALVAR) &&
770                             (iptr->dst->varnum == iptr->op1)) {
771                                 break;
772                         }
773                         var = &(locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
774                         if (var->flags & INMEMORY) {
775                                 x86_64_mov_membase_reg(REG_SP, var->regoff * 8, d);
776                                 store_reg_to_var_int(iptr->dst, d);
777
778                         } else {
779                                 if (iptr->dst->flags & INMEMORY) {
780                                         x86_64_mov_reg_membase(var->regoff, REG_SP, iptr->dst->regoff * 8);
781
782                                 } else {
783                                         M_INTMOVE(var->regoff, d);
784                                 }
785                         }
786                         break;
787
788                 case ICMD_FLOAD:      /* ...  ==> ..., content of local variable      */
789                 case ICMD_DLOAD:      /* op1 = local variable                         */
790
791                         d = reg_of_var(iptr->dst, REG_FTMP1);
792                         if ((iptr->dst->varkind == LOCALVAR) &&
793                             (iptr->dst->varnum == iptr->op1)) {
794                                 break;
795                         }
796                         var = &(locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
797                         if (var->flags & INMEMORY) {
798                                 x86_64_movq_membase_reg(REG_SP, var->regoff * 8, d);
799                                 store_reg_to_var_flt(iptr->dst, d);
800
801                         } else {
802                                 if (iptr->dst->flags & INMEMORY) {
803                                         x86_64_movq_reg_membase(var->regoff, REG_SP, iptr->dst->regoff * 8);
804
805                                 } else {
806                                         M_FLTMOVE(var->regoff, d);
807                                 }
808                         }
809                         break;
810
811                 case ICMD_ISTORE:     /* ..., value  ==> ...                          */
812                 case ICMD_LSTORE:     /* op1 = local variable                         */
813                 case ICMD_ASTORE:
814
815                         if ((src->varkind == LOCALVAR) &&
816                             (src->varnum == iptr->op1)) {
817                                 break;
818                         }
819                         var = &(locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
820                         if (var->flags & INMEMORY) {
821                                 var_to_reg_int(s1, src, REG_ITMP1);
822                                 x86_64_mov_reg_membase(s1, REG_SP, var->regoff * 8);
823
824                         } else {
825                                 var_to_reg_int(s1, src, var->regoff);
826                                 M_INTMOVE(s1, var->regoff);
827                         }
828                         break;
829
830                 case ICMD_FSTORE:     /* ..., value  ==> ...                          */
831                 case ICMD_DSTORE:     /* op1 = local variable                         */
832
833                         if ((src->varkind == LOCALVAR) &&
834                             (src->varnum == iptr->op1)) {
835                                 break;
836                         }
837                         var = &(locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
838                         if (var->flags & INMEMORY) {
839                                 var_to_reg_flt(s1, src, REG_FTMP1);
840                                 x86_64_movq_reg_membase(s1, REG_SP, var->regoff * 8);
841
842                         } else {
843                                 var_to_reg_flt(s1, src, var->regoff);
844                                 M_FLTMOVE(s1, var->regoff);
845                         }
846                         break;
847
848
849                 /* pop/dup/swap operations ********************************************/
850
851                 /* attention: double and longs are only one entry in CACAO ICMDs      */
852
853                 case ICMD_POP:        /* ..., value  ==> ...                          */
854                 case ICMD_POP2:       /* ..., value, value  ==> ...                   */
855                         break;
856
857 #define M_COPY(from,to) \
858                 d = reg_of_var(to, REG_ITMP1); \
859                         if ((from->regoff != to->regoff) || \
860                             ((from->flags ^ to->flags) & INMEMORY)) { \
861                                 if (IS_FLT_DBL_TYPE(from->type)) { \
862                                         var_to_reg_flt(s1, from, d); \
863                                         M_FLTMOVE(s1, d); \
864                                         store_reg_to_var_flt(to, d); \
865                                 } else { \
866                                         var_to_reg_int(s1, from, d); \
867                                         M_INTMOVE(s1, d); \
868                                         store_reg_to_var_int(to, d); \
869                                 } \
870                         }
871
872                 case ICMD_DUP:        /* ..., a ==> ..., a, a                         */
873                         M_COPY(src, iptr->dst);
874                         break;
875
876                 case ICMD_DUP_X1:     /* ..., a, b ==> ..., b, a, b                   */
877
878                         M_COPY(src,       iptr->dst->prev->prev);
879
880                 case ICMD_DUP2:       /* ..., a, b ==> ..., a, b, a, b                */
881
882                         M_COPY(src,       iptr->dst);
883                         M_COPY(src->prev, iptr->dst->prev);
884                         break;
885
886                 case ICMD_DUP2_X1:    /* ..., a, b, c ==> ..., b, c, a, b, c          */
887
888                         M_COPY(src->prev,       iptr->dst->prev->prev->prev);
889
890                 case ICMD_DUP_X2:     /* ..., a, b, c ==> ..., c, a, b, c             */
891
892                         M_COPY(src,             iptr->dst);
893                         M_COPY(src->prev,       iptr->dst->prev);
894                         M_COPY(src->prev->prev, iptr->dst->prev->prev);
895                         M_COPY(src, iptr->dst->prev->prev->prev);
896                         break;
897
898                 case ICMD_DUP2_X2:    /* ..., a, b, c, d ==> ..., c, d, a, b, c, d    */
899
900                         M_COPY(src,                   iptr->dst);
901                         M_COPY(src->prev,             iptr->dst->prev);
902                         M_COPY(src->prev->prev,       iptr->dst->prev->prev);
903                         M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
904                         M_COPY(src,       iptr->dst->prev->prev->prev->prev);
905                         M_COPY(src->prev, iptr->dst->prev->prev->prev->prev->prev);
906                         break;
907
908                 case ICMD_SWAP:       /* ..., a, b ==> ..., b, a                      */
909
910                         M_COPY(src, iptr->dst->prev);
911                         M_COPY(src->prev, iptr->dst);
912                         break;
913
914
915                 /* integer operations *************************************************/
916
917                 case ICMD_INEG:       /* ..., value  ==> ..., - value                 */
918
919                         d = reg_of_var(iptr->dst, REG_NULL);
920                         if (iptr->dst->flags & INMEMORY) {
921                                 if (src->flags & INMEMORY) {
922                                         if (src->regoff == iptr->dst->regoff) {
923                                                 x86_64_negl_membase(REG_SP, iptr->dst->regoff * 8);
924
925                                         } else {
926                                                 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
927                                                 x86_64_negl_reg(REG_ITMP1);
928                                                 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
929                                         }
930
931                                 } else {
932                                         x86_64_movl_reg_membase(src->regoff, REG_SP, iptr->dst->regoff * 8);
933                                         x86_64_negl_membase(REG_SP, iptr->dst->regoff * 8);
934                                 }
935
936                         } else {
937                                 if (src->flags & INMEMORY) {
938                                         x86_64_movl_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
939                                         x86_64_negl_reg(d);
940
941                                 } else {
942                                         M_INTMOVE(src->regoff, iptr->dst->regoff);
943                                         x86_64_negl_reg(iptr->dst->regoff);
944                                 }
945                         }
946                         break;
947
948                 case ICMD_LNEG:       /* ..., value  ==> ..., - value                 */
949
950                         d = reg_of_var(iptr->dst, REG_NULL);
951                         if (iptr->dst->flags & INMEMORY) {
952                                 if (src->flags & INMEMORY) {
953                                         if (src->regoff == iptr->dst->regoff) {
954                                                 x86_64_neg_membase(REG_SP, iptr->dst->regoff * 8);
955
956                                         } else {
957                                                 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
958                                                 x86_64_neg_reg(REG_ITMP1);
959                                                 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
960                                         }
961
962                                 } else {
963                                         x86_64_mov_reg_membase(src->regoff, REG_SP, iptr->dst->regoff * 8);
964                                         x86_64_neg_membase(REG_SP, iptr->dst->regoff * 8);
965                                 }
966
967                         } else {
968                                 if (src->flags & INMEMORY) {
969                                         x86_64_mov_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
970                                         x86_64_neg_reg(iptr->dst->regoff);
971
972                                 } else {
973                                         M_INTMOVE(src->regoff, iptr->dst->regoff);
974                                         x86_64_neg_reg(iptr->dst->regoff);
975                                 }
976                         }
977                         break;
978
979                 case ICMD_I2L:        /* ..., value  ==> ..., value                   */
980
981                         d = reg_of_var(iptr->dst, REG_ITMP3);
982                         if (src->flags & INMEMORY) {
983                                 x86_64_movslq_membase_reg(REG_SP, src->regoff * 8, d);
984
985                         } else {
986                                 x86_64_movslq_reg_reg(src->regoff, d);
987                         }
988                         store_reg_to_var_int(iptr->dst, d);
989                         break;
990
991                 case ICMD_L2I:        /* ..., value  ==> ..., value                   */
992
993                         var_to_reg_int(s1, src, REG_ITMP1);
994                         d = reg_of_var(iptr->dst, REG_ITMP3);
995                         M_INTMOVE(s1, d);
996                         store_reg_to_var_int(iptr->dst, d);
997                         break;
998
999                 case ICMD_INT2BYTE:   /* ..., value  ==> ..., value                   */
1000
1001                         d = reg_of_var(iptr->dst, REG_ITMP3);
1002                         if (src->flags & INMEMORY) {
1003                                 x86_64_movsbq_membase_reg(REG_SP, src->regoff * 8, d);
1004
1005                         } else {
1006                                 x86_64_movsbq_reg_reg(src->regoff, d);
1007                         }
1008                         store_reg_to_var_int(iptr->dst, d);
1009                         break;
1010
1011                 case ICMD_INT2CHAR:   /* ..., value  ==> ..., value                   */
1012
1013                         d = reg_of_var(iptr->dst, REG_ITMP3);
1014                         if (src->flags & INMEMORY) {
1015                                 x86_64_movzwq_membase_reg(REG_SP, src->regoff * 8, d);
1016
1017                         } else {
1018                                 x86_64_movzwq_reg_reg(src->regoff, d);
1019                         }
1020                         store_reg_to_var_int(iptr->dst, d);
1021                         break;
1022
1023                 case ICMD_INT2SHORT:  /* ..., value  ==> ..., value                   */
1024
1025                         d = reg_of_var(iptr->dst, REG_ITMP3);
1026                         if (src->flags & INMEMORY) {
1027                                 x86_64_movswq_membase_reg(REG_SP, src->regoff * 8, d);
1028
1029                         } else {
1030                                 x86_64_movswq_reg_reg(src->regoff, d);
1031                         }
1032                         store_reg_to_var_int(iptr->dst, d);
1033                         break;
1034
1035
1036                 case ICMD_IADD:       /* ..., val1, val2  ==> ..., val1 + val2        */
1037
1038                         d = reg_of_var(iptr->dst, REG_NULL);
1039                         x86_64_emit_ialu(X86_64_ADD, src, iptr);
1040                         break;
1041
1042                 case ICMD_IADDCONST:  /* ..., value  ==> ..., value + constant        */
1043                                       /* val.i = constant                             */
1044
1045                         d = reg_of_var(iptr->dst, REG_NULL);
1046                         x86_64_emit_ialuconst(X86_64_ADD, src, iptr);
1047                         break;
1048
1049                 case ICMD_LADD:       /* ..., val1, val2  ==> ..., val1 + val2        */
1050
1051                         d = reg_of_var(iptr->dst, REG_NULL);
1052                         x86_64_emit_lalu(X86_64_ADD, src, iptr);
1053                         break;
1054
1055                 case ICMD_LADDCONST:  /* ..., value  ==> ..., value + constant        */
1056                                       /* val.l = constant                             */
1057
1058                         d = reg_of_var(iptr->dst, REG_NULL);
1059                         x86_64_emit_laluconst(X86_64_ADD, src, iptr);
1060                         break;
1061
1062                 case ICMD_ISUB:       /* ..., val1, val2  ==> ..., val1 - val2        */
1063
1064                         d = reg_of_var(iptr->dst, REG_NULL);
1065                         if (iptr->dst->flags & INMEMORY) {
1066                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1067                                         if (src->prev->regoff == iptr->dst->regoff) {
1068                                                 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1069                                                 x86_64_alul_reg_membase(X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1070
1071                                         } else {
1072                                                 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1073                                                 x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1074                                                 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1075                                         }
1076
1077                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1078                                         M_INTMOVE(src->prev->regoff, REG_ITMP1);
1079                                         x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1080                                         x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1081
1082                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1083                                         if (src->prev->regoff == iptr->dst->regoff) {
1084                                                 x86_64_alul_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1085
1086                                         } else {
1087                                                 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1088                                                 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1089                                                 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1090                                         }
1091
1092                                 } else {
1093                                         x86_64_movl_reg_membase(src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1094                                         x86_64_alul_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1095                                 }
1096
1097                         } else {
1098                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1099                                         x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, d);
1100                                         x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1101
1102                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1103                                         M_INTMOVE(src->prev->regoff, d);
1104                                         x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1105
1106                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1107                                         /* workaround for reg alloc */
1108                                         if (src->regoff == iptr->dst->regoff) {
1109                                                 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1110                                                 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1111                                                 M_INTMOVE(REG_ITMP1, d);
1112
1113                                         } else {
1114                                                 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, d);
1115                                                 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, d);
1116                                         }
1117
1118                                 } else {
1119                                         /* workaround for reg alloc */
1120                                         if (src->regoff == iptr->dst->regoff) {
1121                                                 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1122                                                 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1123                                                 M_INTMOVE(REG_ITMP1, d);
1124
1125                                         } else {
1126                                                 M_INTMOVE(src->prev->regoff, d);
1127                                                 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, d);
1128                                         }
1129                                 }
1130                         }
1131                         break;
1132
1133                 case ICMD_ISUBCONST:  /* ..., value  ==> ..., value + constant        */
1134                                       /* val.i = constant                             */
1135
1136                         d = reg_of_var(iptr->dst, REG_NULL);
1137                         x86_64_emit_ialuconst(X86_64_SUB, src, iptr);
1138                         break;
1139
1140                 case ICMD_LSUB:       /* ..., val1, val2  ==> ..., val1 - val2        */
1141
1142                         d = reg_of_var(iptr->dst, REG_NULL);
1143                         if (iptr->dst->flags & INMEMORY) {
1144                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1145                                         if (src->prev->regoff == iptr->dst->regoff) {
1146                                                 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1147                                                 x86_64_alu_reg_membase(X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1148
1149                                         } else {
1150                                                 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1151                                                 x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1152                                                 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1153                                         }
1154
1155                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1156                                         M_INTMOVE(src->prev->regoff, REG_ITMP1);
1157                                         x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1158                                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1159
1160                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1161                                         if (src->prev->regoff == iptr->dst->regoff) {
1162                                                 x86_64_alu_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1163
1164                                         } else {
1165                                                 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1166                                                 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1167                                                 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1168                                         }
1169
1170                                 } else {
1171                                         x86_64_mov_reg_membase(src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1172                                         x86_64_alu_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1173                                 }
1174
1175                         } else {
1176                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1177                                         x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, d);
1178                                         x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1179
1180                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1181                                         M_INTMOVE(src->prev->regoff, d);
1182                                         x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1183
1184                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1185                                         /* workaround for reg alloc */
1186                                         if (src->regoff == iptr->dst->regoff) {
1187                                                 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1188                                                 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1189                                                 M_INTMOVE(REG_ITMP1, d);
1190
1191                                         } else {
1192                                                 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, d);
1193                                                 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, d);
1194                                         }
1195
1196                                 } else {
1197                                         /* workaround for reg alloc */
1198                                         if (src->regoff == iptr->dst->regoff) {
1199                                                 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1200                                                 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1201                                                 M_INTMOVE(REG_ITMP1, d);
1202
1203                                         } else {
1204                                                 M_INTMOVE(src->prev->regoff, d);
1205                                                 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, d);
1206                                         }
1207                                 }
1208                         }
1209                         break;
1210
1211                 case ICMD_LSUBCONST:  /* ..., value  ==> ..., value - constant        */
1212                                       /* val.l = constant                             */
1213
1214                         d = reg_of_var(iptr->dst, REG_NULL);
1215                         x86_64_emit_laluconst(X86_64_SUB, src, iptr);
1216                         break;
1217
1218                 case ICMD_IMUL:       /* ..., val1, val2  ==> ..., val1 * val2        */
1219
1220                         d = reg_of_var(iptr->dst, REG_NULL);
1221                         if (iptr->dst->flags & INMEMORY) {
1222                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1223                                         x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1224                                         x86_64_imull_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1225                                         x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1226
1227                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1228                                         x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1229                                         x86_64_imull_reg_reg(src->prev->regoff, REG_ITMP1);
1230                                         x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1231
1232                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1233                                         x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1234                                         x86_64_imull_reg_reg(src->regoff, REG_ITMP1);
1235                                         x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1236
1237                                 } else {
1238                                         M_INTMOVE(src->prev->regoff, REG_ITMP1);
1239                                         x86_64_imull_reg_reg(src->regoff, REG_ITMP1);
1240                                         x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1241                                 }
1242
1243                         } else {
1244                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1245                                         x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1246                                         x86_64_imull_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1247
1248                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1249                                         M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1250                                         x86_64_imull_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1251
1252                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1253                                         M_INTMOVE(src->regoff, iptr->dst->regoff);
1254                                         x86_64_imull_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1255
1256                                 } else {
1257                                         if (src->regoff == iptr->dst->regoff) {
1258                                                 x86_64_imull_reg_reg(src->prev->regoff, iptr->dst->regoff);
1259
1260                                         } else {
1261                                                 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1262                                                 x86_64_imull_reg_reg(src->regoff, iptr->dst->regoff);
1263                                         }
1264                                 }
1265                         }
1266                         break;
1267
1268                 case ICMD_IMULCONST:  /* ..., value  ==> ..., value * constant        */
1269                                       /* val.i = constant                             */
1270
1271                         d = reg_of_var(iptr->dst, REG_NULL);
1272                         if (iptr->dst->flags & INMEMORY) {
1273                                 if (src->flags & INMEMORY) {
1274                                         x86_64_imull_imm_membase_reg(iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1275                                         x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1276
1277                                 } else {
1278                                         x86_64_imull_imm_reg_reg(iptr->val.i, src->regoff, REG_ITMP1);
1279                                         x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1280                                 }
1281
1282                         } else {
1283                                 if (src->flags & INMEMORY) {
1284                                         x86_64_imull_imm_membase_reg(iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1285
1286                                 } else {
1287                                         if (iptr->val.i == 2) {
1288                                                 M_INTMOVE(src->regoff, iptr->dst->regoff);
1289                                                 x86_64_alul_reg_reg(X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1290
1291                                         } else {
1292                                                 x86_64_imull_imm_reg_reg(iptr->val.i, src->regoff, iptr->dst->regoff);    /* 3 cycles */
1293                                         }
1294                                 }
1295                         }
1296                         break;
1297
1298                 case ICMD_LMUL:       /* ..., val1, val2  ==> ..., val1 * val2        */
1299
1300                         d = reg_of_var(iptr->dst, REG_NULL);
1301                         if (iptr->dst->flags & INMEMORY) {
1302                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1303                                         x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1304                                         x86_64_imul_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1305                                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1306
1307                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1308                                         x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1309                                         x86_64_imul_reg_reg(src->prev->regoff, REG_ITMP1);
1310                                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1311
1312                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1313                                         x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1314                                         x86_64_imul_reg_reg(src->regoff, REG_ITMP1);
1315                                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1316
1317                                 } else {
1318                                         x86_64_mov_reg_reg(src->prev->regoff, REG_ITMP1);
1319                                         x86_64_imul_reg_reg(src->regoff, REG_ITMP1);
1320                                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1321                                 }
1322
1323                         } else {
1324                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1325                                         x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1326                                         x86_64_imul_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1327
1328                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1329                                         M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1330                                         x86_64_imul_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1331
1332                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1333                                         M_INTMOVE(src->regoff, iptr->dst->regoff);
1334                                         x86_64_imul_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1335
1336                                 } else {
1337                                         if (src->regoff == iptr->dst->regoff) {
1338                                                 x86_64_imul_reg_reg(src->prev->regoff, iptr->dst->regoff);
1339
1340                                         } else {
1341                                                 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1342                                                 x86_64_imul_reg_reg(src->regoff, iptr->dst->regoff);
1343                                         }
1344                                 }
1345                         }
1346                         break;
1347
1348                 case ICMD_LMULCONST:  /* ..., value  ==> ..., value * constant        */
1349                                       /* val.l = constant                             */
1350
1351                         d = reg_of_var(iptr->dst, REG_NULL);
1352                         if (iptr->dst->flags & INMEMORY) {
1353                                 if (src->flags & INMEMORY) {
1354                                         if (x86_64_is_imm32(iptr->val.l)) {
1355                                                 x86_64_imul_imm_membase_reg(iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1356
1357                                         } else {
1358                                                 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
1359                                                 x86_64_imul_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1360                                         }
1361                                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1362                                         
1363                                 } else {
1364                                         if (x86_64_is_imm32(iptr->val.l)) {
1365                                                 x86_64_imul_imm_reg_reg(iptr->val.l, src->regoff, REG_ITMP1);
1366
1367                                         } else {
1368                                                 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
1369                                                 x86_64_imul_reg_reg(src->regoff, REG_ITMP1);
1370                                         }
1371                                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1372                                 }
1373
1374                         } else {
1375                                 if (src->flags & INMEMORY) {
1376                                         x86_64_imul_imm_membase_reg(iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1377
1378                                 } else {
1379                                         if (iptr->val.l == 2) {
1380                                                 M_INTMOVE(src->regoff, iptr->dst->regoff);
1381                                                 x86_64_alul_reg_reg(X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1382
1383                                         } else {
1384                                                 x86_64_imul_imm_reg_reg(iptr->val.l, src->regoff, iptr->dst->regoff);    /* 4 cycles */
1385                                         }
1386                                 }
1387                         }
1388                         break;
1389
1390                 case ICMD_IDIV:       /* ..., val1, val2  ==> ..., val1 / val2        */
1391
1392                         d = reg_of_var(iptr->dst, REG_NULL);
1393                 if (src->prev->flags & INMEMORY) {
1394                                 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, RAX);
1395
1396                         } else {
1397                                 M_INTMOVE(src->prev->regoff, RAX);
1398                         }
1399                         
1400                         if (src->flags & INMEMORY) {
1401                                 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1402
1403                         } else {
1404                                 M_INTMOVE(src->regoff, REG_ITMP3);
1405                         }
1406
1407                         x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, RAX);    /* check as described in jvm spec */
1408                         x86_64_jcc(X86_64_CC_NE, 4 + 6);
1409                         x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP3);      /* 4 bytes */
1410                         x86_64_jcc(X86_64_CC_E, 3 + 1 + 3);                  /* 6 bytes */
1411
1412                         x86_64_mov_reg_reg(RDX, REG_ITMP2);    /* save %rdx, cause it's an argument register */
1413                         x86_64_cltd();
1414                         x86_64_idivl_reg(REG_ITMP3);
1415
1416                         if (iptr->dst->flags & INMEMORY) {
1417                                 x86_64_mov_reg_membase(RAX, REG_SP, iptr->dst->regoff * 8);
1418                                 x86_64_mov_reg_reg(REG_ITMP2, RDX);    /* restore %rdx */
1419
1420                         } else {
1421                                 M_INTMOVE(RAX, iptr->dst->regoff);
1422
1423                                 if (iptr->dst->regoff != RDX) {
1424                                         x86_64_mov_reg_reg(REG_ITMP2, RDX);    /* restore %rdx */
1425                                 }
1426                         }
1427                         break;
1428
1429                 case ICMD_IREM:       /* ..., val1, val2  ==> ..., val1 % val2        */
1430
1431                         d = reg_of_var(iptr->dst, REG_NULL);
1432                         if (src->prev->flags & INMEMORY) {
1433                                 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, RAX);
1434
1435                         } else {
1436                                 M_INTMOVE(src->prev->regoff, RAX);
1437                         }
1438                         
1439                         if (src->flags & INMEMORY) {
1440                                 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1441
1442                         } else {
1443                                 M_INTMOVE(src->regoff, REG_ITMP3);
1444                         }
1445
1446                         x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, RAX);    /* check as described in jvm spec */
1447                         x86_64_jcc(X86_64_CC_NE, 2 + 4 + 6);
1448                         x86_64_alul_reg_reg(X86_64_XOR, RDX, RDX);           /* 2 bytes */
1449                         x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP3);      /* 4 bytes */
1450                         x86_64_jcc(X86_64_CC_E, 3 + 1 + 3);                  /* 6 bytes */
1451
1452                         x86_64_mov_reg_reg(RDX, REG_ITMP2);    /* save %rdx, cause it's an argument register */
1453                         x86_64_cltd();
1454                         x86_64_idivl_reg(REG_ITMP3);
1455
1456                         if (iptr->dst->flags & INMEMORY) {
1457                                 x86_64_mov_reg_membase(RDX, REG_SP, iptr->dst->regoff * 8);
1458                                 x86_64_mov_reg_reg(REG_ITMP2, RDX);    /* restore %rdx */
1459
1460                         } else {
1461                                 M_INTMOVE(RDX, iptr->dst->regoff);
1462
1463                                 if (iptr->dst->regoff != RDX) {
1464                                         x86_64_mov_reg_reg(REG_ITMP2, RDX);    /* restore %rdx */
1465                                 }
1466                         }
1467                         break;
1468
1469                 case ICMD_IDIVPOW2:   /* ..., value  ==> ..., value >> constant       */
1470                                       /* val.i = constant                             */
1471
1472                         var_to_reg_int(s1, src, REG_ITMP1);
1473                         d = reg_of_var(iptr->dst, REG_ITMP3);
1474                         M_INTMOVE(s1, REG_ITMP1);
1475                         x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1476                         x86_64_leal_membase_reg(REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1477                         x86_64_cmovccl_reg_reg(X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1478                         x86_64_shiftl_imm_reg(X86_64_SAR, iptr->val.i, REG_ITMP1);
1479                         x86_64_mov_reg_reg(REG_ITMP1, d);
1480                         store_reg_to_var_int(iptr->dst, d);
1481                         break;
1482
1483                 case ICMD_IREMPOW2:   /* ..., value  ==> ..., value % constant        */
1484                                       /* val.i = constant                             */
1485
1486                         var_to_reg_int(s1, src, REG_ITMP1);
1487                         d = reg_of_var(iptr->dst, REG_ITMP3);
1488                         M_INTMOVE(s1, REG_ITMP1);
1489                         x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1490                         x86_64_leal_membase_reg(REG_ITMP1, iptr->val.i, REG_ITMP2);
1491                         x86_64_cmovccl_reg_reg(X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1492                         x86_64_alul_imm_reg(X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1493                         x86_64_alul_reg_reg(X86_64_SUB, REG_ITMP2, REG_ITMP1);
1494                         x86_64_mov_reg_reg(REG_ITMP1, d);
1495                         store_reg_to_var_int(iptr->dst, d);
1496                         break;
1497
1498
1499                 case ICMD_LDIV:       /* ..., val1, val2  ==> ..., val1 / val2        */
1500
1501                         d = reg_of_var(iptr->dst, REG_NULL);
1502                 if (src->prev->flags & INMEMORY) {
1503                                 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1504
1505                         } else {
1506                                 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1507                         }
1508                         
1509                         if (src->flags & INMEMORY) {
1510                                 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1511
1512                         } else {
1513                                 M_INTMOVE(src->regoff, REG_ITMP3);
1514                         }
1515
1516                         x86_64_mov_imm_reg(0x8000000000000000LL, REG_ITMP2);    /* check as described in jvm spec */
1517                         x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
1518                         x86_64_jcc(X86_64_CC_NE, 4 + 6);
1519                         x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP3);          /* 4 bytes */
1520                         x86_64_jcc(X86_64_CC_E, 3 + 2 + 3);                     /* 6 bytes */
1521
1522                         x86_64_mov_reg_reg(RDX, REG_ITMP2);    /* save %rdx, cause it's an argument register */
1523                         x86_64_cqto();
1524                         x86_64_idiv_reg(REG_ITMP3);
1525
1526                         if (iptr->dst->flags & INMEMORY) {
1527                                 x86_64_mov_reg_membase(RAX, REG_SP, iptr->dst->regoff * 8);
1528                                 x86_64_mov_reg_reg(REG_ITMP2, RDX);    /* restore %rdx */
1529
1530                         } else {
1531                                 M_INTMOVE(RAX, iptr->dst->regoff);
1532
1533                                 if (iptr->dst->regoff != RDX) {
1534                                         x86_64_mov_reg_reg(REG_ITMP2, RDX);    /* restore %rdx */
1535                                 }
1536                         }
1537                         break;
1538
1539                 case ICMD_LREM:       /* ..., val1, val2  ==> ..., val1 % val2        */
1540
1541                         d = reg_of_var(iptr->dst, REG_NULL);
1542                         if (src->prev->flags & INMEMORY) {
1543                                 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1544
1545                         } else {
1546                                 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1547                         }
1548                         
1549                         if (src->flags & INMEMORY) {
1550                                 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1551
1552                         } else {
1553                                 M_INTMOVE(src->regoff, REG_ITMP3);
1554                         }
1555
1556                         x86_64_mov_imm_reg(0x8000000000000000LL, REG_ITMP2);    /* check as described in jvm spec */
1557                         x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
1558                         x86_64_jcc(X86_64_CC_NE, 2 + 4 + 6);
1559                         x86_64_alul_reg_reg(X86_64_XOR, RDX, RDX);              /* 2 bytes */
1560                         x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP3);          /* 4 bytes */
1561                         x86_64_jcc(X86_64_CC_E, 3 + 2 + 3);                     /* 6 bytes */
1562
1563                         x86_64_mov_reg_reg(RDX, REG_ITMP2);    /* save %rdx, cause it's an argument register */
1564                         x86_64_cqto();
1565                         x86_64_idiv_reg(REG_ITMP3);
1566
1567                         if (iptr->dst->flags & INMEMORY) {
1568                                 x86_64_mov_reg_membase(RDX, REG_SP, iptr->dst->regoff * 8);
1569                                 x86_64_mov_reg_reg(REG_ITMP2, RDX);    /* restore %rdx */
1570
1571                         } else {
1572                                 M_INTMOVE(RDX, iptr->dst->regoff);
1573
1574                                 if (iptr->dst->regoff != RDX) {
1575                                         x86_64_mov_reg_reg(REG_ITMP2, RDX);    /* restore %rdx */
1576                                 }
1577                         }
1578                         break;
1579
1580                 case ICMD_LDIVPOW2:   /* ..., value  ==> ..., value >> constant       */
1581                                       /* val.i = constant                             */
1582
1583                         var_to_reg_int(s1, src, REG_ITMP1);
1584                         d = reg_of_var(iptr->dst, REG_ITMP3);
1585                         M_INTMOVE(s1, REG_ITMP1);
1586                         x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1587                         x86_64_lea_membase_reg(REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1588                         x86_64_cmovcc_reg_reg(X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1589                         x86_64_shift_imm_reg(X86_64_SAR, iptr->val.i, REG_ITMP1);
1590                         x86_64_mov_reg_reg(REG_ITMP1, d);
1591                         store_reg_to_var_int(iptr->dst, d);
1592                         break;
1593
1594                 case ICMD_LREMPOW2:   /* ..., value  ==> ..., value % constant        */
1595                                       /* val.l = constant                             */
1596
1597                         var_to_reg_int(s1, src, REG_ITMP1);
1598                         d = reg_of_var(iptr->dst, REG_ITMP3);
1599                         M_INTMOVE(s1, REG_ITMP1);
1600                         x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1601                         x86_64_lea_membase_reg(REG_ITMP1, iptr->val.i, REG_ITMP2);
1602                         x86_64_cmovcc_reg_reg(X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1603                         x86_64_alu_imm_reg(X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1604                         x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP2, REG_ITMP1);
1605                         x86_64_mov_reg_reg(REG_ITMP1, d);
1606                         store_reg_to_var_int(iptr->dst, d);
1607                         break;
1608
1609                 case ICMD_ISHL:       /* ..., val1, val2  ==> ..., val1 << val2       */
1610
1611                         d = reg_of_var(iptr->dst, REG_NULL);
1612                         x86_64_emit_ishift(X86_64_SHL, src, iptr);
1613                         break;
1614
1615                 case ICMD_ISHLCONST:  /* ..., value  ==> ..., value << constant       */
1616                                       /* val.i = constant                             */
1617
1618                         d = reg_of_var(iptr->dst, REG_NULL);
1619                         x86_64_emit_ishiftconst(X86_64_SHL, src, iptr);
1620                         break;
1621
1622                 case ICMD_ISHR:       /* ..., val1, val2  ==> ..., val1 >> val2       */
1623
1624                         d = reg_of_var(iptr->dst, REG_NULL);
1625                         x86_64_emit_ishift(X86_64_SAR, src, iptr);
1626                         break;
1627
1628                 case ICMD_ISHRCONST:  /* ..., value  ==> ..., value >> constant       */
1629                                       /* val.i = constant                             */
1630
1631                         d = reg_of_var(iptr->dst, REG_NULL);
1632                         x86_64_emit_ishiftconst(X86_64_SAR, src, iptr);
1633                         break;
1634
1635                 case ICMD_IUSHR:      /* ..., val1, val2  ==> ..., val1 >>> val2      */
1636
1637                         d = reg_of_var(iptr->dst, REG_NULL);
1638                         x86_64_emit_ishift(X86_64_SHR, src, iptr);
1639                         break;
1640
1641                 case ICMD_IUSHRCONST: /* ..., value  ==> ..., value >>> constant      */
1642                                       /* val.i = constant                             */
1643
1644                         d = reg_of_var(iptr->dst, REG_NULL);
1645                         x86_64_emit_ishiftconst(X86_64_SHR, src, iptr);
1646                         break;
1647
1648                 case ICMD_LSHL:       /* ..., val1, val2  ==> ..., val1 << val2       */
1649
1650                         d = reg_of_var(iptr->dst, REG_NULL);
1651                         x86_64_emit_lshift(X86_64_SHL, src, iptr);
1652                         break;
1653
1654         case ICMD_LSHLCONST:  /* ..., value  ==> ..., value << constant       */
1655                                           /* val.i = constant                             */
1656
1657                         d = reg_of_var(iptr->dst, REG_NULL);
1658                         x86_64_emit_lshiftconst(X86_64_SHL, src, iptr);
1659                         break;
1660
1661                 case ICMD_LSHR:       /* ..., val1, val2  ==> ..., val1 >> val2       */
1662
1663                         d = reg_of_var(iptr->dst, REG_NULL);
1664                         x86_64_emit_lshift(X86_64_SAR, src, iptr);
1665                         break;
1666
1667                 case ICMD_LSHRCONST:  /* ..., value  ==> ..., value >> constant       */
1668                                       /* val.i = constant                             */
1669
1670                         d = reg_of_var(iptr->dst, REG_NULL);
1671                         x86_64_emit_lshiftconst(X86_64_SAR, src, iptr);
1672                         break;
1673
1674                 case ICMD_LUSHR:      /* ..., val1, val2  ==> ..., val1 >>> val2      */
1675
1676                         d = reg_of_var(iptr->dst, REG_NULL);
1677                         x86_64_emit_lshift(X86_64_SHR, src, iptr);
1678                         break;
1679
1680                 case ICMD_LUSHRCONST: /* ..., value  ==> ..., value >>> constant      */
1681                                       /* val.l = constant                             */
1682
1683                         d = reg_of_var(iptr->dst, REG_NULL);
1684                         x86_64_emit_lshiftconst(X86_64_SHR, src, iptr);
1685                         break;
1686
1687                 case ICMD_IAND:       /* ..., val1, val2  ==> ..., val1 & val2        */
1688
1689                         d = reg_of_var(iptr->dst, REG_NULL);
1690                         x86_64_emit_ialu(X86_64_AND, src, iptr);
1691                         break;
1692
1693                 case ICMD_IANDCONST:  /* ..., value  ==> ..., value & constant        */
1694                                       /* val.i = constant                             */
1695
1696                         d = reg_of_var(iptr->dst, REG_NULL);
1697                         x86_64_emit_ialuconst(X86_64_AND, src, iptr);
1698                         break;
1699
1700                 case ICMD_LAND:       /* ..., val1, val2  ==> ..., val1 & val2        */
1701
1702                         d = reg_of_var(iptr->dst, REG_NULL);
1703                         x86_64_emit_lalu(X86_64_AND, src, iptr);
1704                         break;
1705
1706                 case ICMD_LANDCONST:  /* ..., value  ==> ..., value & constant        */
1707                                       /* val.l = constant                             */
1708
1709                         d = reg_of_var(iptr->dst, REG_NULL);
1710                         x86_64_emit_laluconst(X86_64_AND, src, iptr);
1711                         break;
1712
1713                 case ICMD_IOR:        /* ..., val1, val2  ==> ..., val1 | val2        */
1714
1715                         d = reg_of_var(iptr->dst, REG_NULL);
1716                         x86_64_emit_ialu(X86_64_OR, src, iptr);
1717                         break;
1718
1719                 case ICMD_IORCONST:   /* ..., value  ==> ..., value | constant        */
1720                                       /* val.i = constant                             */
1721
1722                         d = reg_of_var(iptr->dst, REG_NULL);
1723                         x86_64_emit_ialuconst(X86_64_OR, src, iptr);
1724                         break;
1725
1726                 case ICMD_LOR:        /* ..., val1, val2  ==> ..., val1 | val2        */
1727
1728                         d = reg_of_var(iptr->dst, REG_NULL);
1729                         x86_64_emit_lalu(X86_64_OR, src, iptr);
1730                         break;
1731
1732                 case ICMD_LORCONST:   /* ..., value  ==> ..., value | constant        */
1733                                       /* val.l = constant                             */
1734
1735                         d = reg_of_var(iptr->dst, REG_NULL);
1736                         x86_64_emit_laluconst(X86_64_OR, src, iptr);
1737                         break;
1738
1739                 case ICMD_IXOR:       /* ..., val1, val2  ==> ..., val1 ^ val2        */
1740
1741                         d = reg_of_var(iptr->dst, REG_NULL);
1742                         x86_64_emit_ialu(X86_64_XOR, src, iptr);
1743                         break;
1744
1745                 case ICMD_IXORCONST:  /* ..., value  ==> ..., value ^ constant        */
1746                                       /* val.i = constant                             */
1747
1748                         d = reg_of_var(iptr->dst, REG_NULL);
1749                         x86_64_emit_ialuconst(X86_64_XOR, src, iptr);
1750                         break;
1751
1752                 case ICMD_LXOR:       /* ..., val1, val2  ==> ..., val1 ^ val2        */
1753
1754                         d = reg_of_var(iptr->dst, REG_NULL);
1755                         x86_64_emit_lalu(X86_64_XOR, src, iptr);
1756                         break;
1757
1758                 case ICMD_LXORCONST:  /* ..., value  ==> ..., value ^ constant        */
1759                                       /* val.l = constant                             */
1760
1761                         d = reg_of_var(iptr->dst, REG_NULL);
1762                         x86_64_emit_laluconst(X86_64_XOR, src, iptr);
1763                         break;
1764
1765
1766                 case ICMD_IINC:       /* ..., value  ==> ..., value + constant        */
1767                                       /* op1 = variable, val.i = constant             */
1768
1769                         var = &(locals[iptr->op1][TYPE_INT]);
1770                         d = var->regoff;
1771                         if (var->flags & INMEMORY) {
1772                                 if (iptr->val.i == 1) {
1773                                         x86_64_incl_membase(REG_SP, d * 8);
1774  
1775                                 } else if (iptr->val.i == -1) {
1776                                         x86_64_decl_membase(REG_SP, d * 8);
1777
1778                                 } else {
1779                                         x86_64_alul_imm_membase(X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1780                                 }
1781
1782                         } else {
1783                                 if (iptr->val.i == 1) {
1784                                         x86_64_incl_reg(d);
1785  
1786                                 } else if (iptr->val.i == -1) {
1787                                         x86_64_decl_reg(d);
1788
1789                                 } else {
1790                                         x86_64_alul_imm_reg(X86_64_ADD, iptr->val.i, d);
1791                                 }
1792                         }
1793                         break;
1794
1795
1796                 /* floating operations ************************************************/
1797
1798                 case ICMD_FNEG:       /* ..., value  ==> ..., - value                 */
1799
1800                         var_to_reg_flt(s1, src, REG_FTMP1);
1801                         d = reg_of_var(iptr->dst, REG_FTMP3);
1802                         a = dseg_adds4(0x80000000);
1803                         M_FLTMOVE(s1, d);
1804                         x86_64_movss_membase_reg(RIP, -(((s8) mcodeptr + 9) - (s8) mcodebase) + a, REG_FTMP2);
1805                         x86_64_xorps_reg_reg(REG_FTMP2, d);
1806                         store_reg_to_var_flt(iptr->dst, d);
1807                         break;
1808
1809                 case ICMD_DNEG:       /* ..., value  ==> ..., - value                 */
1810
1811                         var_to_reg_flt(s1, src, REG_FTMP1);
1812                         d = reg_of_var(iptr->dst, REG_FTMP3);
1813                         a = dseg_adds8(0x8000000000000000);
1814                         M_FLTMOVE(s1, d);
1815                         x86_64_movd_membase_reg(RIP, -(((s8) mcodeptr + 9) - (s8) mcodebase) + a, REG_FTMP2);
1816                         x86_64_xorpd_reg_reg(REG_FTMP2, d);
1817                         store_reg_to_var_flt(iptr->dst, d);
1818                         break;
1819
1820                 case ICMD_FADD:       /* ..., val1, val2  ==> ..., val1 + val2        */
1821
1822                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1823                         var_to_reg_flt(s2, src, REG_FTMP2);
1824                         d = reg_of_var(iptr->dst, REG_FTMP3);
1825                         if (s1 == d) {
1826                                 x86_64_addss_reg_reg(s2, d);
1827                         } else if (s2 == d) {
1828                                 x86_64_addss_reg_reg(s1, d);
1829                         } else {
1830                                 M_FLTMOVE(s1, d);
1831                                 x86_64_addss_reg_reg(s2, d);
1832                         }
1833                         store_reg_to_var_flt(iptr->dst, d);
1834                         break;
1835
1836                 case ICMD_DADD:       /* ..., val1, val2  ==> ..., val1 + val2        */
1837
1838                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1839                         var_to_reg_flt(s2, src, REG_FTMP2);
1840                         d = reg_of_var(iptr->dst, REG_FTMP3);
1841                         if (s1 == d) {
1842                                 x86_64_addsd_reg_reg(s2, d);
1843                         } else if (s2 == d) {
1844                                 x86_64_addsd_reg_reg(s1, d);
1845                         } else {
1846                                 M_FLTMOVE(s1, d);
1847                                 x86_64_addsd_reg_reg(s2, d);
1848                         }
1849                         store_reg_to_var_flt(iptr->dst, d);
1850                         break;
1851
1852                 case ICMD_FSUB:       /* ..., val1, val2  ==> ..., val1 - val2        */
1853
1854                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1855                         var_to_reg_flt(s2, src, REG_FTMP2);
1856                         d = reg_of_var(iptr->dst, REG_FTMP3);
1857                         if (s2 == d) {
1858                                 M_FLTMOVE(s2, REG_FTMP2);
1859                                 s2 = REG_FTMP2;
1860                         }
1861                         M_FLTMOVE(s1, d);
1862                         x86_64_subss_reg_reg(s2, d);
1863                         store_reg_to_var_flt(iptr->dst, d);
1864                         break;
1865
1866                 case ICMD_DSUB:       /* ..., val1, val2  ==> ..., val1 - val2        */
1867
1868                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1869                         var_to_reg_flt(s2, src, REG_FTMP2);
1870                         d = reg_of_var(iptr->dst, REG_FTMP3);
1871                         if (s2 == d) {
1872                                 M_FLTMOVE(s2, REG_FTMP2);
1873                                 s2 = REG_FTMP2;
1874                         }
1875                         M_FLTMOVE(s1, d);
1876                         x86_64_subsd_reg_reg(s2, d);
1877                         store_reg_to_var_flt(iptr->dst, d);
1878                         break;
1879
1880                 case ICMD_FMUL:       /* ..., val1, val2  ==> ..., val1 * val2        */
1881
1882                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1883                         var_to_reg_flt(s2, src, REG_FTMP2);
1884                         d = reg_of_var(iptr->dst, REG_FTMP3);
1885                         if (s1 == d) {
1886                                 x86_64_mulss_reg_reg(s2, d);
1887                         } else if (s2 == d) {
1888                                 x86_64_mulss_reg_reg(s1, d);
1889                         } else {
1890                                 M_FLTMOVE(s1, d);
1891                                 x86_64_mulss_reg_reg(s2, d);
1892                         }
1893                         store_reg_to_var_flt(iptr->dst, d);
1894                         break;
1895
1896                 case ICMD_DMUL:       /* ..., val1, val2  ==> ..., val1 * val2        */
1897
1898                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1899                         var_to_reg_flt(s2, src, REG_FTMP2);
1900                         d = reg_of_var(iptr->dst, REG_FTMP3);
1901                         if (s1 == d) {
1902                                 x86_64_mulsd_reg_reg(s2, d);
1903                         } else if (s2 == d) {
1904                                 x86_64_mulsd_reg_reg(s1, d);
1905                         } else {
1906                                 M_FLTMOVE(s1, d);
1907                                 x86_64_mulsd_reg_reg(s2, d);
1908                         }
1909                         store_reg_to_var_flt(iptr->dst, d);
1910                         break;
1911
1912                 case ICMD_FDIV:       /* ..., val1, val2  ==> ..., val1 / val2        */
1913
1914                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1915                         var_to_reg_flt(s2, src, REG_FTMP2);
1916                         d = reg_of_var(iptr->dst, REG_FTMP3);
1917                         if (s2 == d) {
1918                                 M_FLTMOVE(s2, REG_FTMP2);
1919                                 s2 = REG_FTMP2;
1920                         }
1921                         M_FLTMOVE(s1, d);
1922                         x86_64_divss_reg_reg(s2, d);
1923                         store_reg_to_var_flt(iptr->dst, d);
1924                         break;
1925
1926                 case ICMD_DDIV:       /* ..., val1, val2  ==> ..., val1 / val2        */
1927
1928                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1929                         var_to_reg_flt(s2, src, REG_FTMP2);
1930                         d = reg_of_var(iptr->dst, REG_FTMP3);
1931                         if (s2 == d) {
1932                                 M_FLTMOVE(s2, REG_FTMP2);
1933                                 s2 = REG_FTMP2;
1934                         }
1935                         M_FLTMOVE(s1, d);
1936                         x86_64_divsd_reg_reg(s2, d);
1937                         store_reg_to_var_flt(iptr->dst, d);
1938                         break;
1939
1940                 case ICMD_I2F:       /* ..., value  ==> ..., (float) value            */
1941
1942                         var_to_reg_int(s1, src, REG_ITMP1);
1943                         d = reg_of_var(iptr->dst, REG_FTMP1);
1944                         x86_64_cvtsi2ss_reg_reg(s1, d);
1945                         store_reg_to_var_flt(iptr->dst, d);
1946                         break;
1947
1948                 case ICMD_I2D:       /* ..., value  ==> ..., (double) value           */
1949
1950                         var_to_reg_int(s1, src, REG_ITMP1);
1951                         d = reg_of_var(iptr->dst, REG_FTMP1);
1952                         x86_64_cvtsi2sd_reg_reg(s1, d);
1953                         store_reg_to_var_flt(iptr->dst, d);
1954                         break;
1955
1956                 case ICMD_L2F:       /* ..., value  ==> ..., (float) value            */
1957
1958                         var_to_reg_int(s1, src, REG_ITMP1);
1959                         d = reg_of_var(iptr->dst, REG_FTMP1);
1960                         x86_64_cvtsi2ssq_reg_reg(s1, d);
1961                         store_reg_to_var_flt(iptr->dst, d);
1962                         break;
1963                         
1964                 case ICMD_L2D:       /* ..., value  ==> ..., (double) value           */
1965
1966                         var_to_reg_int(s1, src, REG_ITMP1);
1967                         d = reg_of_var(iptr->dst, REG_FTMP1);
1968                         x86_64_cvtsi2sdq_reg_reg(s1, d);
1969                         store_reg_to_var_flt(iptr->dst, d);
1970                         break;
1971                         
1972                 case ICMD_F2I:       /* ..., value  ==> ..., (int) value              */
1973
1974                         var_to_reg_flt(s1, src, REG_FTMP1);
1975                         d = reg_of_var(iptr->dst, REG_ITMP1);
1976                         x86_64_cvttss2si_reg_reg(s1, d);
1977                         x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, d);    /* corner cases */
1978                         a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1979                         x86_64_jcc(X86_64_CC_NE, a);
1980                         M_FLTMOVE(s1, REG_FTMP1);
1981                         x86_64_mov_imm_reg((s8) asm_builtin_f2i, REG_ITMP2);
1982                         x86_64_call_reg(REG_ITMP2);
1983                         M_INTMOVE(REG_RESULT, d);
1984                         store_reg_to_var_int(iptr->dst, d);
1985                         break;
1986
1987                 case ICMD_D2I:       /* ..., value  ==> ..., (int) value              */
1988
1989                         var_to_reg_flt(s1, src, REG_FTMP1);
1990                         d = reg_of_var(iptr->dst, REG_ITMP1);
1991                         x86_64_cvttsd2si_reg_reg(s1, d);
1992                         x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, d);    /* corner cases */
1993                         a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
1994                         x86_64_jcc(X86_64_CC_NE, a);
1995                         M_FLTMOVE(s1, REG_FTMP1);
1996                         x86_64_mov_imm_reg((s8) asm_builtin_d2i, REG_ITMP2);
1997                         x86_64_call_reg(REG_ITMP2);
1998                         M_INTMOVE(REG_RESULT, d);
1999                         store_reg_to_var_int(iptr->dst, d);
2000                         break;
2001
2002                 case ICMD_F2L:       /* ..., value  ==> ..., (long) value             */
2003
2004                         var_to_reg_flt(s1, src, REG_FTMP1);
2005                         d = reg_of_var(iptr->dst, REG_ITMP1);
2006                         x86_64_cvttss2siq_reg_reg(s1, d);
2007                         x86_64_mov_imm_reg(0x8000000000000000, REG_ITMP2);
2008                         x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, d);     /* corner cases */
2009                         a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
2010                         x86_64_jcc(X86_64_CC_NE, a);
2011                         M_FLTMOVE(s1, REG_FTMP1);
2012                         x86_64_mov_imm_reg((s8) asm_builtin_f2l, REG_ITMP2);
2013                         x86_64_call_reg(REG_ITMP2);
2014                         M_INTMOVE(REG_RESULT, d);
2015                         store_reg_to_var_int(iptr->dst, d);
2016                         break;
2017
2018                 case ICMD_D2L:       /* ..., value  ==> ..., (long) value             */
2019
2020                         var_to_reg_flt(s1, src, REG_FTMP1);
2021                         d = reg_of_var(iptr->dst, REG_ITMP1);
2022                         x86_64_cvttsd2siq_reg_reg(s1, d);
2023                         x86_64_mov_imm_reg(0x8000000000000000, REG_ITMP2);
2024                         x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, d);     /* corner cases */
2025                         a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
2026                         x86_64_jcc(X86_64_CC_NE, a);
2027                         M_FLTMOVE(s1, REG_FTMP1);
2028                         x86_64_mov_imm_reg((s8) asm_builtin_d2l, REG_ITMP2);
2029                         x86_64_call_reg(REG_ITMP2);
2030                         M_INTMOVE(REG_RESULT, d);
2031                         store_reg_to_var_int(iptr->dst, d);
2032                         break;
2033
2034                 case ICMD_F2D:       /* ..., value  ==> ..., (double) value           */
2035
2036                         var_to_reg_flt(s1, src, REG_FTMP1);
2037                         d = reg_of_var(iptr->dst, REG_FTMP3);
2038                         x86_64_cvtss2sd_reg_reg(s1, d);
2039                         store_reg_to_var_flt(iptr->dst, d);
2040                         break;
2041
2042                 case ICMD_D2F:       /* ..., value  ==> ..., (float) value            */
2043
2044                         var_to_reg_flt(s1, src, REG_FTMP1);
2045                         d = reg_of_var(iptr->dst, REG_FTMP3);
2046                         x86_64_cvtsd2ss_reg_reg(s1, d);
2047                         store_reg_to_var_flt(iptr->dst, d);
2048                         break;
2049
2050                 case ICMD_FCMPL:      /* ..., val1, val2  ==> ..., val1 fcmpl val2    */
2051                                           /* == => 0, < => 1, > => -1 */
2052
2053                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
2054                         var_to_reg_flt(s2, src, REG_FTMP2);
2055                         d = reg_of_var(iptr->dst, REG_ITMP3);
2056                         x86_64_alu_reg_reg(X86_64_XOR, d, d);
2057                         x86_64_mov_imm_reg(1, REG_ITMP1);
2058                         x86_64_mov_imm_reg(-1, REG_ITMP2);
2059                         x86_64_ucomiss_reg_reg(s1, s2);
2060                         x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
2061                         x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
2062                         x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP2, d);    /* treat unordered as GT */
2063                         store_reg_to_var_int(iptr->dst, d);
2064                         break;
2065
2066                 case ICMD_FCMPG:      /* ..., val1, val2  ==> ..., val1 fcmpg val2    */
2067                                           /* == => 0, < => 1, > => -1 */
2068
2069                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
2070                         var_to_reg_flt(s2, src, REG_FTMP2);
2071                         d = reg_of_var(iptr->dst, REG_ITMP3);
2072                         x86_64_alu_reg_reg(X86_64_XOR, d, d);
2073                         x86_64_mov_imm_reg(1, REG_ITMP1);
2074                         x86_64_mov_imm_reg(-1, REG_ITMP2);
2075                         x86_64_ucomiss_reg_reg(s1, s2);
2076                         x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
2077                         x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
2078                         x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP1, d);    /* treat unordered as LT */
2079                         store_reg_to_var_int(iptr->dst, d);
2080                         break;
2081
2082                 case ICMD_DCMPL:      /* ..., val1, val2  ==> ..., val1 fcmpl val2    */
2083                                           /* == => 0, < => 1, > => -1 */
2084
2085                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
2086                         var_to_reg_flt(s2, src, REG_FTMP2);
2087                         d = reg_of_var(iptr->dst, REG_ITMP3);
2088                         x86_64_alu_reg_reg(X86_64_XOR, d, d);
2089                         x86_64_mov_imm_reg(1, REG_ITMP1);
2090                         x86_64_mov_imm_reg(-1, REG_ITMP2);
2091                         x86_64_ucomisd_reg_reg(s1, s2);
2092                         x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
2093                         x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
2094                         x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP2, d);    /* treat unordered as GT */
2095                         store_reg_to_var_int(iptr->dst, d);
2096                         break;
2097
2098                 case ICMD_DCMPG:      /* ..., val1, val2  ==> ..., val1 fcmpg val2    */
2099                                           /* == => 0, < => 1, > => -1 */
2100
2101                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
2102                         var_to_reg_flt(s2, src, REG_FTMP2);
2103                         d = reg_of_var(iptr->dst, REG_ITMP3);
2104                         x86_64_alu_reg_reg(X86_64_XOR, d, d);
2105                         x86_64_mov_imm_reg(1, REG_ITMP1);
2106                         x86_64_mov_imm_reg(-1, REG_ITMP2);
2107                         x86_64_ucomisd_reg_reg(s1, s2);
2108                         x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
2109                         x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
2110                         x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP1, d);    /* treat unordered as LT */
2111                         store_reg_to_var_int(iptr->dst, d);
2112                         break;
2113
2114
2115                 /* memory operations **************************************************/
2116
2117 #define gen_bound_check \
2118     if (checkbounds) { \
2119         x86_64_alul_membase_reg(X86_64_CMP, s1, OFFSET(java_arrayheader, size), s2); \
2120         x86_64_jcc(X86_64_CC_AE, 0); \
2121         mcode_addxboundrefs(mcodeptr); \
2122     }
2123
2124                 case ICMD_ARRAYLENGTH: /* ..., arrayref  ==> ..., (int) length        */
2125
2126                         var_to_reg_int(s1, src, REG_ITMP1);
2127                         d = reg_of_var(iptr->dst, REG_ITMP3);
2128                         gen_nullptr_check(s1);
2129                         x86_64_movl_membase_reg(s1, OFFSET(java_arrayheader, size), d);
2130                         store_reg_to_var_int(iptr->dst, d);
2131                         break;
2132
2133                 case ICMD_AALOAD:     /* ..., arrayref, index  ==> ..., value         */
2134
2135                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2136                         var_to_reg_int(s2, src, REG_ITMP2);
2137                         d = reg_of_var(iptr->dst, REG_ITMP3);
2138                         if (iptr->op1 == 0) {
2139                                 gen_nullptr_check(s1);
2140                                 gen_bound_check;
2141                         }
2142                         x86_64_mov_memindex_reg(OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
2143                         store_reg_to_var_int(iptr->dst, d);
2144                         break;
2145
2146                 case ICMD_LALOAD:     /* ..., arrayref, index  ==> ..., value         */
2147
2148                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2149                         var_to_reg_int(s2, src, REG_ITMP2);
2150                         d = reg_of_var(iptr->dst, REG_ITMP3);
2151                         if (iptr->op1 == 0) {
2152                                 gen_nullptr_check(s1);
2153                                 gen_bound_check;
2154                         }
2155                         x86_64_mov_memindex_reg(OFFSET(java_longarray, data[0]), s1, s2, 3, d);
2156                         store_reg_to_var_int(iptr->dst, d);
2157                         break;
2158
2159                 case ICMD_IALOAD:     /* ..., arrayref, index  ==> ..., value         */
2160
2161                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2162                         var_to_reg_int(s2, src, REG_ITMP2);
2163                         d = reg_of_var(iptr->dst, REG_ITMP3);
2164                         if (iptr->op1 == 0) {
2165                                 gen_nullptr_check(s1);
2166                                 gen_bound_check;
2167                         }
2168                         x86_64_movl_memindex_reg(OFFSET(java_intarray, data[0]), s1, s2, 2, d);
2169                         store_reg_to_var_int(iptr->dst, d);
2170                         break;
2171
2172                 case ICMD_FALOAD:     /* ..., arrayref, index  ==> ..., value         */
2173
2174                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2175                         var_to_reg_int(s2, src, REG_ITMP2);
2176                         d = reg_of_var(iptr->dst, REG_FTMP3);
2177                         if (iptr->op1 == 0) {
2178                                 gen_nullptr_check(s1);
2179                                 gen_bound_check;
2180                         }
2181                         x86_64_movss_memindex_reg(OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2182                         store_reg_to_var_flt(iptr->dst, d);
2183                         break;
2184
2185                 case ICMD_DALOAD:     /* ..., arrayref, index  ==> ..., value         */
2186
2187                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2188                         var_to_reg_int(s2, src, REG_ITMP2);
2189                         d = reg_of_var(iptr->dst, REG_FTMP3);
2190                         if (iptr->op1 == 0) {
2191                                 gen_nullptr_check(s1);
2192                                 gen_bound_check;
2193                         }
2194                         x86_64_movsd_memindex_reg(OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2195                         store_reg_to_var_flt(iptr->dst, d);
2196                         break;
2197
2198                 case ICMD_CALOAD:     /* ..., arrayref, index  ==> ..., value         */
2199
2200                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2201                         var_to_reg_int(s2, src, REG_ITMP2);
2202                         d = reg_of_var(iptr->dst, REG_ITMP3);
2203                         if (iptr->op1 == 0) {
2204                                 gen_nullptr_check(s1);
2205                                 gen_bound_check;
2206                         }
2207                         x86_64_movzwq_memindex_reg(OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2208                         store_reg_to_var_int(iptr->dst, d);
2209                         break;                  
2210
2211                 case ICMD_SALOAD:     /* ..., arrayref, index  ==> ..., value         */
2212
2213                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2214                         var_to_reg_int(s2, src, REG_ITMP2);
2215                         d = reg_of_var(iptr->dst, REG_ITMP3);
2216                         if (iptr->op1 == 0) {
2217                                 gen_nullptr_check(s1);
2218                                 gen_bound_check;
2219                         }
2220                         x86_64_movswq_memindex_reg(OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2221                         store_reg_to_var_int(iptr->dst, d);
2222                         break;
2223
2224                 case ICMD_BALOAD:     /* ..., arrayref, index  ==> ..., value         */
2225
2226                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2227                         var_to_reg_int(s2, src, REG_ITMP2);
2228                         d = reg_of_var(iptr->dst, REG_ITMP3);
2229                         if (iptr->op1 == 0) {
2230                                 gen_nullptr_check(s1);
2231                                 gen_bound_check;
2232                         }
2233                         x86_64_movsbq_memindex_reg(OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2234                         store_reg_to_var_int(iptr->dst, d);
2235                         break;
2236
2237
2238                 case ICMD_AASTORE:    /* ..., arrayref, index, value  ==> ...         */
2239
2240                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2241                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2242                         if (iptr->op1 == 0) {
2243                                 gen_nullptr_check(s1);
2244                                 gen_bound_check;
2245                         }
2246                         var_to_reg_int(s3, src, REG_ITMP3);
2247                         x86_64_mov_reg_memindex(s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2248                         break;
2249
2250                 case ICMD_LASTORE:    /* ..., arrayref, index, value  ==> ...         */
2251
2252                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2253                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2254                         if (iptr->op1 == 0) {
2255                                 gen_nullptr_check(s1);
2256                                 gen_bound_check;
2257                         }
2258                         var_to_reg_int(s3, src, REG_ITMP3);
2259                         x86_64_mov_reg_memindex(s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2260                         break;
2261
2262                 case ICMD_IASTORE:    /* ..., arrayref, index, value  ==> ...         */
2263
2264                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2265                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2266                         if (iptr->op1 == 0) {
2267                                 gen_nullptr_check(s1);
2268                                 gen_bound_check;
2269                         }
2270                         var_to_reg_int(s3, src, REG_ITMP3);
2271                         x86_64_movl_reg_memindex(s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2272                         break;
2273
2274                 case ICMD_FASTORE:    /* ..., arrayref, index, value  ==> ...         */
2275
2276                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2277                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2278                         if (iptr->op1 == 0) {
2279                                 gen_nullptr_check(s1);
2280                                 gen_bound_check;
2281                         }
2282                         var_to_reg_flt(s3, src, REG_FTMP3);
2283                         x86_64_movss_reg_memindex(s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2284                         break;
2285
2286                 case ICMD_DASTORE:    /* ..., arrayref, index, value  ==> ...         */
2287
2288                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2289                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2290                         if (iptr->op1 == 0) {
2291                                 gen_nullptr_check(s1);
2292                                 gen_bound_check;
2293                         }
2294                         var_to_reg_flt(s3, src, REG_FTMP3);
2295                         x86_64_movsd_reg_memindex(s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2296                         break;
2297
2298                 case ICMD_CASTORE:    /* ..., arrayref, index, value  ==> ...         */
2299
2300                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2301                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2302                         if (iptr->op1 == 0) {
2303                                 gen_nullptr_check(s1);
2304                                 gen_bound_check;
2305                         }
2306                         var_to_reg_int(s3, src, REG_ITMP3);
2307                         x86_64_movw_reg_memindex(s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2308                         break;
2309
2310                 case ICMD_SASTORE:    /* ..., arrayref, index, value  ==> ...         */
2311
2312                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2313                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2314                         if (iptr->op1 == 0) {
2315                                 gen_nullptr_check(s1);
2316                                 gen_bound_check;
2317                         }
2318                         var_to_reg_int(s3, src, REG_ITMP3);
2319                         x86_64_movw_reg_memindex(s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2320                         break;
2321
2322                 case ICMD_BASTORE:    /* ..., arrayref, index, value  ==> ...         */
2323
2324                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2325                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2326                         if (iptr->op1 == 0) {
2327                                 gen_nullptr_check(s1);
2328                                 gen_bound_check;
2329                         }
2330                         var_to_reg_int(s3, src, REG_ITMP3);
2331                         x86_64_movb_reg_memindex(s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2332                         break;
2333
2334
2335                 case ICMD_PUTSTATIC:  /* ..., value  ==> ...                          */
2336                                       /* op1 = type, val.a = field address            */
2337
2338                         a = dseg_addaddress(&(((fieldinfo *)(iptr->val.a))->value));
2339 /*                      x86_64_mov_imm_reg(0, REG_ITMP2); */
2340 /*                      dseg_adddata(mcodeptr); */
2341 /*                      x86_64_mov_membase_reg(REG_ITMP2, a, REG_ITMP3); */
2342                         x86_64_mov_membase_reg(RIP, -(((s8) mcodeptr + 7) - (s8) mcodebase) + a, REG_ITMP2);
2343                         switch (iptr->op1) {
2344                                 case TYPE_INT:
2345                                         var_to_reg_int(s2, src, REG_ITMP1);
2346                                         x86_64_movl_reg_membase(s2, REG_ITMP2, 0);
2347                                         break;
2348                                 case TYPE_LNG:
2349                                 case TYPE_ADR:
2350                                         var_to_reg_int(s2, src, REG_ITMP1);
2351                                         x86_64_mov_reg_membase(s2, REG_ITMP2, 0);
2352                                         break;
2353                                 case TYPE_FLT:
2354                                         var_to_reg_flt(s2, src, REG_FTMP1);
2355                                         x86_64_movss_reg_membase(s2, REG_ITMP2, 0);
2356                                         break;
2357                                 case TYPE_DBL:
2358                                         var_to_reg_flt(s2, src, REG_FTMP1);
2359                                         x86_64_movsd_reg_membase(s2, REG_ITMP2, 0);
2360                                         break;
2361                                 default: panic("internal error");
2362                                 }
2363                         break;
2364
2365                 case ICMD_GETSTATIC:  /* ...  ==> ..., value                          */
2366                                       /* op1 = type, val.a = field address            */
2367
2368                         a = dseg_addaddress(&(((fieldinfo *)(iptr->val.a))->value));
2369 /*                      x86_64_mov_imm_reg(0, REG_ITMP2); */
2370 /*                      dseg_adddata(mcodeptr); */
2371 /*                      x86_64_mov_membase_reg(REG_ITMP2, a, REG_ITMP3); */
2372                         x86_64_mov_membase_reg(RIP, -(((s8) mcodeptr + 7) - (s8) mcodebase) + a, REG_ITMP2);
2373                         switch (iptr->op1) {
2374                                 case TYPE_INT:
2375                                         d = reg_of_var(iptr->dst, REG_ITMP1);
2376                                         x86_64_movl_membase_reg(REG_ITMP2, 0, d);
2377                                         store_reg_to_var_int(iptr->dst, d);
2378                                         break;
2379                                 case TYPE_LNG:
2380                                 case TYPE_ADR:
2381                                         d = reg_of_var(iptr->dst, REG_ITMP1);
2382                                         x86_64_mov_membase_reg(REG_ITMP2, 0, d);
2383                                         store_reg_to_var_int(iptr->dst, d);
2384                                         break;
2385                                 case TYPE_FLT:
2386                                         d = reg_of_var(iptr->dst, REG_ITMP1);
2387                                         x86_64_movss_membase_reg(REG_ITMP2, 0, d);
2388                                         store_reg_to_var_flt(iptr->dst, d);
2389                                         break;
2390                                 case TYPE_DBL:                          
2391                                         d = reg_of_var(iptr->dst, REG_ITMP1);
2392                                         x86_64_movsd_membase_reg(REG_ITMP2, 0, d);
2393                                         store_reg_to_var_flt(iptr->dst, d);
2394                                         break;
2395                                 default: panic("internal error");
2396                                 }
2397                         break;
2398
2399                 case ICMD_PUTFIELD:   /* ..., value  ==> ...                          */
2400                                       /* op1 = type, val.i = field offset             */
2401
2402                         a = ((fieldinfo *)(iptr->val.a))->offset;
2403                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2404                         switch (iptr->op1) {
2405                                 case TYPE_INT:
2406                                         var_to_reg_int(s2, src, REG_ITMP2);
2407                                         gen_nullptr_check(s1);
2408                                         x86_64_movl_reg_membase(s2, s1, a);
2409                                         break;
2410                                 case TYPE_LNG:
2411                                 case TYPE_ADR:
2412                                         var_to_reg_int(s2, src, REG_ITMP2);
2413                                         gen_nullptr_check(s1);
2414                                         x86_64_mov_reg_membase(s2, s1, a);
2415                                         break;
2416                                 case TYPE_FLT:
2417                                         var_to_reg_flt(s2, src, REG_FTMP2);
2418                                         gen_nullptr_check(s1);
2419                                         x86_64_movss_reg_membase(s2, s1, a);
2420                                         break;
2421                                 case TYPE_DBL:
2422                                         var_to_reg_flt(s2, src, REG_FTMP2);
2423                                         gen_nullptr_check(s1);
2424                                         x86_64_movsd_reg_membase(s2, s1, a);
2425                                         break;
2426                                 default: panic ("internal error");
2427                                 }
2428                         break;
2429
2430                 case ICMD_GETFIELD:   /* ...  ==> ..., value                          */
2431                                       /* op1 = type, val.i = field offset             */
2432
2433                         a = ((fieldinfo *)(iptr->val.a))->offset;
2434                         var_to_reg_int(s1, src, REG_ITMP1);
2435                         switch (iptr->op1) {
2436                                 case TYPE_INT:
2437                                         d = reg_of_var(iptr->dst, REG_ITMP1);
2438                                         gen_nullptr_check(s1);
2439                                         x86_64_movl_membase_reg(s1, a, d);
2440                                         store_reg_to_var_int(iptr->dst, d);
2441                                         break;
2442                                 case TYPE_LNG:
2443                                 case TYPE_ADR:
2444                                         d = reg_of_var(iptr->dst, REG_ITMP1);
2445                                         gen_nullptr_check(s1);
2446                                         x86_64_mov_membase_reg(s1, a, d);
2447                                         store_reg_to_var_int(iptr->dst, d);
2448                                         break;
2449                                 case TYPE_FLT:
2450                                         d = reg_of_var(iptr->dst, REG_FTMP1);
2451                                         gen_nullptr_check(s1);
2452                                         x86_64_movss_membase_reg(s1, a, d);
2453                                         store_reg_to_var_flt(iptr->dst, d);
2454                                         break;
2455                                 case TYPE_DBL:                          
2456                                         d = reg_of_var(iptr->dst, REG_FTMP1);
2457                                         gen_nullptr_check(s1);
2458                                         x86_64_movsd_membase_reg(s1, a, d);
2459                                         store_reg_to_var_flt(iptr->dst, d);
2460                                         break;
2461                                 default: panic ("internal error");
2462                                 }
2463                         break;
2464
2465
2466                 /* branch operations **************************************************/
2467
2468 /*  #define ALIGNCODENOP {if((int)((long)mcodeptr&7)){M_NOP;}} */
2469 #define ALIGNCODENOP do {} while (0)
2470
2471                 case ICMD_ATHROW:       /* ..., objectref ==> ... (, objectref)       */
2472
2473                         var_to_reg_int(s1, src, REG_ITMP1);
2474                         M_INTMOVE(s1, REG_ITMP1_XPTR);
2475
2476                         x86_64_call_imm(0); /* passing exception pointer                  */
2477                         x86_64_pop_reg(REG_ITMP2_XPC);
2478
2479                         x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
2480                         x86_64_jmp_reg(REG_ITMP3);
2481                         ALIGNCODENOP;
2482                         break;
2483
2484                 case ICMD_GOTO:         /* ... ==> ...                                */
2485                                         /* op1 = target JavaVM pc                     */
2486
2487                         x86_64_jmp_imm(0);
2488                         mcode_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2489                         ALIGNCODENOP;
2490                         break;
2491
2492                 case ICMD_JSR:          /* ... ==> ...                                */
2493                                         /* op1 = target JavaVM pc                     */
2494
2495                         x86_64_call_imm(0);
2496                         mcode_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2497                         break;
2498                         
2499                 case ICMD_RET:          /* ... ==> ...                                */
2500                                         /* op1 = local variable                       */
2501
2502                         var = &(locals[iptr->op1][TYPE_ADR]);
2503                         var_to_reg_int(s1, var, REG_ITMP1);
2504                         x86_64_jmp_reg(s1);
2505                         break;
2506
2507                 case ICMD_IFNULL:       /* ..., value ==> ...                         */
2508                                         /* op1 = target JavaVM pc                     */
2509
2510                         if (src->flags & INMEMORY) {
2511                                 x86_64_alu_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
2512
2513                         } else {
2514                                 x86_64_test_reg_reg(src->regoff, src->regoff);
2515                         }
2516                         x86_64_jcc(X86_64_CC_E, 0);
2517                         mcode_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2518                         break;
2519
2520                 case ICMD_IFNONNULL:    /* ..., value ==> ...                         */
2521                                         /* op1 = target JavaVM pc                     */
2522
2523                         if (src->flags & INMEMORY) {
2524                                 x86_64_alu_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
2525
2526                         } else {
2527                                 x86_64_test_reg_reg(src->regoff, src->regoff);
2528                         }
2529                         x86_64_jcc(X86_64_CC_NE, 0);
2530                         mcode_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2531                         break;
2532
2533                 case ICMD_IFEQ:         /* ..., value ==> ...                         */
2534                                         /* op1 = target JavaVM pc, val.i = constant   */
2535
2536                         x86_64_emit_ifcc(X86_64_CC_E, src, iptr);
2537                         break;
2538
2539                 case ICMD_IFLT:         /* ..., value ==> ...                         */
2540                                         /* op1 = target JavaVM pc, val.i = constant   */
2541
2542                         x86_64_emit_ifcc(X86_64_CC_L, src, iptr);
2543                         break;
2544
2545                 case ICMD_IFLE:         /* ..., value ==> ...                         */
2546                                         /* op1 = target JavaVM pc, val.i = constant   */
2547
2548                         x86_64_emit_ifcc(X86_64_CC_LE, src, iptr);
2549                         break;
2550
2551                 case ICMD_IFNE:         /* ..., value ==> ...                         */
2552                                         /* op1 = target JavaVM pc, val.i = constant   */
2553
2554                         x86_64_emit_ifcc(X86_64_CC_NE, src, iptr);
2555                         break;
2556
2557                 case ICMD_IFGT:         /* ..., value ==> ...                         */
2558                                         /* op1 = target JavaVM pc, val.i = constant   */
2559
2560                         x86_64_emit_ifcc(X86_64_CC_G, src, iptr);
2561                         break;
2562
2563                 case ICMD_IFGE:         /* ..., value ==> ...                         */
2564                                         /* op1 = target JavaVM pc, val.i = constant   */
2565
2566                         x86_64_emit_ifcc(X86_64_CC_GE, src, iptr);
2567                         break;
2568
2569                 case ICMD_IF_LEQ:       /* ..., value ==> ...                         */
2570                                         /* op1 = target JavaVM pc, val.l = constant   */
2571
2572                         x86_64_emit_if_lcc(X86_64_CC_E, src, iptr);
2573                         break;
2574
2575                 case ICMD_IF_LLT:       /* ..., value ==> ...                         */
2576                                         /* op1 = target JavaVM pc, val.l = constant   */
2577
2578                         x86_64_emit_if_lcc(X86_64_CC_L, src, iptr);
2579                         break;
2580
2581                 case ICMD_IF_LLE:       /* ..., value ==> ...                         */
2582                                         /* op1 = target JavaVM pc, val.l = constant   */
2583
2584                         x86_64_emit_if_lcc(X86_64_CC_LE, src, iptr);
2585                         break;
2586
2587                 case ICMD_IF_LNE:       /* ..., value ==> ...                         */
2588                                         /* op1 = target JavaVM pc, val.l = constant   */
2589
2590                         x86_64_emit_if_lcc(X86_64_CC_NE, src, iptr);
2591                         break;
2592
2593                 case ICMD_IF_LGT:       /* ..., value ==> ...                         */
2594                                         /* op1 = target JavaVM pc, val.l = constant   */
2595
2596                         x86_64_emit_if_lcc(X86_64_CC_G, src, iptr);
2597                         break;
2598
2599                 case ICMD_IF_LGE:       /* ..., value ==> ...                         */
2600                                         /* op1 = target JavaVM pc, val.l = constant   */
2601
2602                         x86_64_emit_if_lcc(X86_64_CC_GE, src, iptr);
2603                         break;
2604
2605                 case ICMD_IF_ICMPEQ:    /* ..., value, value ==> ...                  */
2606                                         /* op1 = target JavaVM pc                     */
2607
2608                         x86_64_emit_if_icmpcc(X86_64_CC_E, src, iptr);
2609                         break;
2610
2611                 case ICMD_IF_LCMPEQ:    /* ..., value, value ==> ...                  */
2612                 case ICMD_IF_ACMPEQ:    /* op1 = target JavaVM pc                     */
2613
2614                         x86_64_emit_if_lcmpcc(X86_64_CC_E, src, iptr);
2615                         break;
2616
2617                 case ICMD_IF_ICMPNE:    /* ..., value, value ==> ...                  */
2618                                         /* op1 = target JavaVM pc                     */
2619
2620                         x86_64_emit_if_icmpcc(X86_64_CC_NE, src, iptr);
2621                         break;
2622
2623                 case ICMD_IF_LCMPNE:    /* ..., value, value ==> ...                  */
2624                 case ICMD_IF_ACMPNE:    /* op1 = target JavaVM pc                     */
2625
2626                         x86_64_emit_if_lcmpcc(X86_64_CC_NE, src, iptr);
2627                         break;
2628
2629                 case ICMD_IF_ICMPLT:    /* ..., value, value ==> ...                  */
2630                                         /* op1 = target JavaVM pc                     */
2631
2632                         x86_64_emit_if_icmpcc(X86_64_CC_L, src, iptr);
2633                         break;
2634
2635                 case ICMD_IF_LCMPLT:    /* ..., value, value ==> ...                  */
2636                                     /* op1 = target JavaVM pc                     */
2637
2638                         x86_64_emit_if_lcmpcc(X86_64_CC_L, src, iptr);
2639                         break;
2640
2641                 case ICMD_IF_ICMPGT:    /* ..., value, value ==> ...                  */
2642                                         /* op1 = target JavaVM pc                     */
2643
2644                         x86_64_emit_if_icmpcc(X86_64_CC_G, src, iptr);
2645                         break;
2646
2647                 case ICMD_IF_LCMPGT:    /* ..., value, value ==> ...                  */
2648                                 /* op1 = target JavaVM pc                     */
2649
2650                         x86_64_emit_if_lcmpcc(X86_64_CC_G, src, iptr);
2651                         break;
2652
2653                 case ICMD_IF_ICMPLE:    /* ..., value, value ==> ...                  */
2654                                         /* op1 = target JavaVM pc                     */
2655
2656                         x86_64_emit_if_icmpcc(X86_64_CC_LE, src, iptr);
2657                         break;
2658
2659                 case ICMD_IF_LCMPLE:    /* ..., value, value ==> ...                  */
2660                                         /* op1 = target JavaVM pc                     */
2661
2662                         x86_64_emit_if_lcmpcc(X86_64_CC_LE, src, iptr);
2663                         break;
2664
2665                 case ICMD_IF_ICMPGE:    /* ..., value, value ==> ...                  */
2666                                         /* op1 = target JavaVM pc                     */
2667
2668                         x86_64_emit_if_icmpcc(X86_64_CC_GE, src, iptr);
2669                         break;
2670
2671                 case ICMD_IF_LCMPGE:    /* ..., value, value ==> ...                  */
2672                                     /* op1 = target JavaVM pc                     */
2673
2674                         x86_64_emit_if_lcmpcc(X86_64_CC_GE, src, iptr);
2675                         break;
2676
2677                 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST                           */
2678
2679                 case ICMD_ELSE_ICONST:  /* handled by IFxx_ICONST                     */
2680                         break;
2681
2682                 case ICMD_IFEQ_ICONST:  /* ..., value ==> ..., constant               */
2683                                         /* val.i = constant                           */
2684
2685                         var_to_reg_int(s1, src, REG_ITMP1);
2686                         d = reg_of_var(iptr->dst, REG_ITMP3);
2687                         s3 = iptr->val.i;
2688                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2689                                 if (s1 == d) {
2690                                         M_INTMOVE(s1, REG_ITMP1);
2691                                         s1 = REG_ITMP1;
2692                                 }
2693                                 x86_64_movl_imm_reg(iptr[1].val.i, d);
2694                         }
2695                         x86_64_movl_imm_reg(s3, REG_ITMP2);
2696                         x86_64_testl_reg_reg(s1, s1);
2697                         x86_64_cmovccl_reg_reg(X86_64_CC_E, REG_ITMP2, d);
2698                         store_reg_to_var_int(iptr->dst, d);
2699                         break;
2700
2701                 case ICMD_IFNE_ICONST:  /* ..., value ==> ..., constant               */
2702                                         /* val.i = constant                           */
2703
2704                         var_to_reg_int(s1, src, REG_ITMP1);
2705                         d = reg_of_var(iptr->dst, REG_ITMP3);
2706                         s3 = iptr->val.i;
2707                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2708                                 if (s1 == d) {
2709                                         M_INTMOVE(s1, REG_ITMP1);
2710                                         s1 = REG_ITMP1;
2711                                 }
2712                                 x86_64_movl_imm_reg(iptr[1].val.i, d);
2713                         }
2714                         x86_64_movl_imm_reg(s3, REG_ITMP2);
2715                         x86_64_testl_reg_reg(s1, s1);
2716                         x86_64_cmovccl_reg_reg(X86_64_CC_NE, REG_ITMP2, d);
2717                         store_reg_to_var_int(iptr->dst, d);
2718                         break;
2719
2720                 case ICMD_IFLT_ICONST:  /* ..., value ==> ..., constant               */
2721                                         /* val.i = constant                           */
2722
2723                         var_to_reg_int(s1, src, REG_ITMP1);
2724                         d = reg_of_var(iptr->dst, REG_ITMP3);
2725                         s3 = iptr->val.i;
2726                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2727                                 if (s1 == d) {
2728                                         M_INTMOVE(s1, REG_ITMP1);
2729                                         s1 = REG_ITMP1;
2730                                 }
2731                                 x86_64_movl_imm_reg(iptr[1].val.i, d);
2732                         }
2733                         x86_64_movl_imm_reg(s3, REG_ITMP2);
2734                         x86_64_testl_reg_reg(s1, s1);
2735                         x86_64_cmovccl_reg_reg(X86_64_CC_L, REG_ITMP2, d);
2736                         store_reg_to_var_int(iptr->dst, d);
2737                         break;
2738
2739                 case ICMD_IFGE_ICONST:  /* ..., value ==> ..., constant               */
2740                                         /* val.i = constant                           */
2741
2742                         var_to_reg_int(s1, src, REG_ITMP1);
2743                         d = reg_of_var(iptr->dst, REG_ITMP3);
2744                         s3 = iptr->val.i;
2745                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2746                                 if (s1 == d) {
2747                                         M_INTMOVE(s1, REG_ITMP1);
2748                                         s1 = REG_ITMP1;
2749                                 }
2750                                 x86_64_movl_imm_reg(iptr[1].val.i, d);
2751                         }
2752                         x86_64_movl_imm_reg(s3, REG_ITMP2);
2753                         x86_64_testl_reg_reg(s1, s1);
2754                         x86_64_cmovccl_reg_reg(X86_64_CC_GE, REG_ITMP2, d);
2755                         store_reg_to_var_int(iptr->dst, d);
2756                         break;
2757
2758                 case ICMD_IFGT_ICONST:  /* ..., value ==> ..., constant               */
2759                                         /* val.i = constant                           */
2760
2761                         var_to_reg_int(s1, src, REG_ITMP1);
2762                         d = reg_of_var(iptr->dst, REG_ITMP3);
2763                         s3 = iptr->val.i;
2764                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2765                                 if (s1 == d) {
2766                                         M_INTMOVE(s1, REG_ITMP1);
2767                                         s1 = REG_ITMP1;
2768                                 }
2769                                 x86_64_movl_imm_reg(iptr[1].val.i, d);
2770                         }
2771                         x86_64_movl_imm_reg(s3, REG_ITMP2);
2772                         x86_64_testl_reg_reg(s1, s1);
2773                         x86_64_cmovccl_reg_reg(X86_64_CC_G, REG_ITMP2, d);
2774                         store_reg_to_var_int(iptr->dst, d);
2775                         break;
2776
2777                 case ICMD_IFLE_ICONST:  /* ..., value ==> ..., constant               */
2778                                         /* val.i = constant                           */
2779
2780                         var_to_reg_int(s1, src, REG_ITMP1);
2781                         d = reg_of_var(iptr->dst, REG_ITMP3);
2782                         s3 = iptr->val.i;
2783                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2784                                 if (s1 == d) {
2785                                         M_INTMOVE(s1, REG_ITMP1);
2786                                         s1 = REG_ITMP1;
2787                                 }
2788                                 x86_64_movl_imm_reg(iptr[1].val.i, d);
2789                         }
2790                         x86_64_movl_imm_reg(s3, REG_ITMP2);
2791                         x86_64_testl_reg_reg(s1, s1);
2792                         x86_64_cmovccl_reg_reg(X86_64_CC_LE, REG_ITMP2, d);
2793                         store_reg_to_var_int(iptr->dst, d);
2794                         break;
2795
2796
2797                 case ICMD_IRETURN:      /* ..., retvalue ==> ...                      */
2798                 case ICMD_LRETURN:
2799                 case ICMD_ARETURN:
2800
2801 #ifdef USE_THREADS
2802                         if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
2803                                 x86_64_mov_membase_reg(REG_SP, 8 * maxmemuse, argintregs[0]);
2804                                 x86_64_mov_imm_reg((s8) builtin_monitorexit, REG_ITMP1);
2805                                 x86_64_call_reg(REG_ITMP1);
2806                         }
2807 #endif
2808                         var_to_reg_int(s1, src, REG_RESULT);
2809                         M_INTMOVE(s1, REG_RESULT);
2810                         goto nowperformreturn;
2811
2812                 case ICMD_FRETURN:      /* ..., retvalue ==> ...                      */
2813                 case ICMD_DRETURN:
2814
2815 #ifdef USE_THREADS
2816                         if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
2817                                 x86_64_mov_membase_reg(REG_SP, 8 * maxmemuse, argintregs[0]);
2818                                 x86_64_mov_imm_reg((s8) builtin_monitorexit, REG_ITMP1);
2819                                 x86_64_call_reg(REG_ITMP1);
2820                         }
2821 #endif
2822                         var_to_reg_flt(s1, src, REG_FRESULT);
2823                         M_FLTMOVE(s1, REG_FRESULT);
2824                         goto nowperformreturn;
2825
2826                 case ICMD_RETURN:      /* ...  ==> ...                                */
2827
2828 #ifdef USE_THREADS
2829                         if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
2830                                 x86_64_mov_membase_reg(REG_SP, 8 * maxmemuse, argintregs[0]);
2831                                 x86_64_mov_imm_reg((s8) builtin_monitorexit, REG_ITMP1);
2832                                 x86_64_call_reg(REG_ITMP1);
2833                         }
2834 #endif
2835
2836 nowperformreturn:
2837                         {
2838                         int r, p;
2839                         
2840                         p = parentargs_base;
2841                         
2842                         /* call trace function */
2843                         if (runverbose) {
2844                                 x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
2845
2846                                 x86_64_mov_reg_membase(REG_RESULT, REG_SP, 0 * 8);
2847                                 x86_64_movq_reg_membase(REG_FRESULT, REG_SP, 1 * 8);
2848
2849                                 x86_64_mov_imm_reg((s8) method, argintregs[0]);
2850                                 x86_64_mov_reg_reg(REG_RESULT, argintregs[1]);
2851                                 M_FLTMOVE(REG_FRESULT, argfltregs[0]);
2852                                 M_FLTMOVE(REG_FRESULT, argfltregs[1]);
2853
2854                                 x86_64_mov_imm_reg((s8) builtin_displaymethodstop, REG_ITMP1);
2855                                 x86_64_call_reg(REG_ITMP1);
2856
2857                                 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_RESULT);
2858                                 x86_64_movq_membase_reg(REG_SP, 1 * 8, REG_FRESULT);
2859
2860                                 x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
2861                         }
2862
2863                         /* restore saved registers                                        */
2864                         for (r = savintregcnt - 1; r >= maxsavintreguse; r--) {
2865                                 p--; x86_64_mov_membase_reg(REG_SP, p * 8, savintregs[r]);
2866                         }
2867                         for (r = savfltregcnt - 1; r >= maxsavfltreguse; r--) {
2868                                 p--; x86_64_movq_membase_reg(REG_SP, p * 8, savfltregs[r]);
2869                         }
2870
2871                         /* deallocate stack                                               */
2872                         if (parentargs_base) {
2873                                 x86_64_alu_imm_reg(X86_64_ADD, parentargs_base * 8, REG_SP);
2874                         }
2875
2876                         x86_64_ret();
2877                         ALIGNCODENOP;
2878                         }
2879                         break;
2880
2881
2882                 case ICMD_TABLESWITCH:  /* ..., index ==> ...                         */
2883                         {
2884                                 s4 i, l, *s4ptr;
2885                                 void **tptr;
2886
2887                                 tptr = (void **) iptr->target;
2888
2889                                 s4ptr = iptr->val.a;
2890                                 l = s4ptr[1];                          /* low     */
2891                                 i = s4ptr[2];                          /* high    */
2892
2893                                 var_to_reg_int(s1, src, REG_ITMP1);
2894                                 M_INTMOVE(s1, REG_ITMP1);
2895                                 if (l != 0) {
2896                                         x86_64_alul_imm_reg(X86_64_SUB, l, REG_ITMP1);
2897                                 }
2898                                 i = i - l + 1;
2899
2900                 /* range check */
2901                                 x86_64_alul_imm_reg(X86_64_CMP, i - 1, REG_ITMP1);
2902                                 x86_64_jcc(X86_64_CC_A, 0);
2903
2904                 /* mcode_addreference(BlockPtrOfPC(s4ptr[0]), mcodeptr); */
2905                                 mcode_addreference((basicblock *) tptr[0], mcodeptr);
2906
2907                                 /* build jump table top down and use address of lowest entry */
2908
2909                 /* s4ptr += 3 + i; */
2910                                 tptr += i;
2911
2912                                 while (--i >= 0) {
2913                                         /* dseg_addtarget(BlockPtrOfPC(*--s4ptr)); */
2914                                         dseg_addtarget((basicblock *) tptr[0]); 
2915                                         --tptr;
2916                                 }
2917
2918                                 /* length of dataseg after last dseg_addtarget is used by load */
2919
2920                                 x86_64_mov_imm_reg(0, REG_ITMP2);
2921                                 dseg_adddata(mcodeptr);
2922                                 x86_64_mov_memindex_reg(-dseglen, REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
2923                                 x86_64_jmp_reg(REG_ITMP1);
2924                                 ALIGNCODENOP;
2925                         }
2926                         break;
2927
2928
2929                 case ICMD_LOOKUPSWITCH: /* ..., key ==> ...                           */
2930                         {
2931                                 s4 i, l, val, *s4ptr;
2932                                 void **tptr;
2933
2934                                 tptr = (void **) iptr->target;
2935
2936                                 s4ptr = iptr->val.a;
2937                                 l = s4ptr[0];                          /* default  */
2938                                 i = s4ptr[1];                          /* count    */
2939                         
2940                                 MCODECHECK((i<<2)+8);
2941                                 var_to_reg_int(s1, src, REG_ITMP1);    /* reg compare should always be faster */
2942                                 while (--i >= 0) {
2943                                         s4ptr += 2;
2944                                         ++tptr;
2945
2946                                         val = s4ptr[0];
2947                                         x86_64_alul_imm_reg(X86_64_CMP, val, s1);
2948                                         x86_64_jcc(X86_64_CC_E, 0);
2949                                         /* mcode_addreference(BlockPtrOfPC(s4ptr[1]), mcodeptr); */
2950                                         mcode_addreference((basicblock *) tptr[0], mcodeptr); 
2951                                 }
2952
2953                                 x86_64_jmp_imm(0);
2954                                 /* mcode_addreference(BlockPtrOfPC(l), mcodeptr); */
2955                         
2956                                 tptr = (void **) iptr->target;
2957                                 mcode_addreference((basicblock *) tptr[0], mcodeptr);
2958
2959                                 ALIGNCODENOP;
2960                         }
2961                         break;
2962
2963
2964                 case ICMD_BUILTIN3:     /* ..., arg1, arg2, arg3 ==> ...              */
2965                                         /* op1 = return type, val.a = function pointer*/
2966                         s3 = 3;
2967                         goto gen_method;
2968
2969                 case ICMD_BUILTIN2:     /* ..., arg1, arg2 ==> ...                    */
2970                                         /* op1 = return type, val.a = function pointer*/
2971                         s3 = 2;
2972                         goto gen_method;
2973
2974                 case ICMD_BUILTIN1:     /* ..., arg1 ==> ...                          */
2975                                         /* op1 = return type, val.a = function pointer*/
2976                         s3 = 1;
2977                         goto gen_method;
2978
2979                 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ...            */
2980                                         /* op1 = arg count, val.a = method pointer    */
2981
2982                 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2983                                         /* op1 = arg count, val.a = method pointer    */
2984
2985                 case ICMD_INVOKEVIRTUAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
2986                                         /* op1 = arg count, val.a = method pointer    */
2987
2988                 case ICMD_INVOKEINTERFACE:/*.., objectref, [arg1, [arg2 ...]] ==> ... */
2989                                         /* op1 = arg count, val.a = method pointer    */
2990
2991                         s3 = iptr->op1;
2992
2993 gen_method: {
2994                         methodinfo   *m;
2995                         classinfo    *ci;
2996                         stackptr     tmpsrc;
2997                         int iarg = 0;
2998                         int farg = 0;
2999
3000                         MCODECHECK((s3 << 1) + 64);
3001
3002                         tmpsrc = src;
3003                         s2 = s3;
3004
3005                         /* copy arguments to registers or stack location                  */
3006                         for (; --s3 >= 0; src = src->prev) {
3007                                 IS_INT_LNG_TYPE(src->type) ? iarg++ : farg++;
3008                         }
3009
3010                         src = tmpsrc;
3011                         s3 = s2;
3012
3013                         s2 = (iarg > intreg_argnum) ? iarg - intreg_argnum : 0 + (farg > fltreg_argnum) ? farg - fltreg_argnum : 0;
3014
3015                         for (; --s3 >= 0; src = src->prev) {
3016                                 IS_INT_LNG_TYPE(src->type) ? iarg-- : farg--;
3017                                 if (src->varkind == ARGVAR) {
3018                                         if (IS_INT_LNG_TYPE(src->type)) {
3019                                                 if (iarg >= intreg_argnum) {
3020                                                         s2--;
3021                                                 }
3022                                         } else {
3023                                                 if (farg >= fltreg_argnum) {
3024                                                         s2--;
3025                                                 }
3026                                         }
3027                                         continue;
3028                                 }
3029
3030                                 if (IS_INT_LNG_TYPE(src->type)) {
3031                                         if (iarg < intreg_argnum) {
3032                                                 s1 = argintregs[iarg];
3033                                                 var_to_reg_int(d, src, s1);
3034                                                 M_INTMOVE(d, s1);
3035
3036                                         } else {
3037                                                 var_to_reg_int(d, src, REG_ITMP1);
3038                                                 s2--;
3039                                                 x86_64_mov_reg_membase(d, REG_SP, s2 * 8);
3040                                         }
3041
3042                                 } else {
3043                                         if (farg < fltreg_argnum) {
3044                                                 s1 = argfltregs[farg];
3045                                                 var_to_reg_flt(d, src, s1);
3046                                                 M_FLTMOVE(d, s1);
3047
3048                                         } else {
3049                                                 var_to_reg_flt(d, src, REG_FTMP1);
3050                                                 s2--;
3051                                                 x86_64_movq_reg_membase(d, REG_SP, s2 * 8);
3052                                         }
3053                                 }
3054                         } /* end of for */
3055
3056                         m = iptr->val.a;
3057                         switch (iptr->opc) {
3058                                 case ICMD_BUILTIN3:
3059                                 case ICMD_BUILTIN2:
3060                                 case ICMD_BUILTIN1:
3061
3062                                         a = (s8) m;
3063                                         d = iptr->op1;
3064
3065                                         x86_64_mov_imm_reg(a, REG_ITMP1);
3066                                         x86_64_call_reg(REG_ITMP1);
3067                                         break;
3068
3069                                 case ICMD_INVOKESTATIC:
3070
3071                                         a = (s8) m->stubroutine;
3072                                         d = m->returntype;
3073
3074                                         x86_64_mov_imm_reg(a, REG_ITMP2);
3075                                         x86_64_call_reg(REG_ITMP2);
3076                                         break;
3077
3078                                 case ICMD_INVOKESPECIAL:
3079
3080                                         a = (s8) m->stubroutine;
3081                                         d = m->returntype;
3082
3083                                         gen_nullptr_check(argintregs[0]);    /* first argument contains pointer */
3084                                         x86_64_mov_membase_reg(argintregs[0], 0, REG_ITMP2);    /* access memory for hardware nullptr */
3085                                         x86_64_mov_imm_reg(a, REG_ITMP2);
3086                                         x86_64_call_reg(REG_ITMP2);
3087                                         break;
3088
3089                                 case ICMD_INVOKEVIRTUAL:
3090
3091                                         d = m->returntype;
3092
3093                                         gen_nullptr_check(argintregs[0]);
3094                                         x86_64_mov_membase_reg(argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3095                                         x86_64_mov_membase32_reg(REG_ITMP2, OFFSET(vftbl, table[0]) + sizeof(methodptr) * m->vftblindex, REG_ITMP1);
3096                                         x86_64_call_reg(REG_ITMP1);
3097                                         break;
3098
3099                                 case ICMD_INVOKEINTERFACE:
3100
3101                                         ci = m->class;
3102                                         d = m->returntype;
3103
3104                                         gen_nullptr_check(argintregs[0]);
3105                                         x86_64_mov_membase_reg(argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3106                                         x86_64_mov_membase_reg(REG_ITMP2, OFFSET(vftbl, interfacetable[0]) - sizeof(methodptr) * ci->index, REG_ITMP2);
3107                                         x86_64_mov_membase32_reg(REG_ITMP2, sizeof(methodptr) * (m - ci->methods), REG_ITMP1);
3108                                         x86_64_call_reg(REG_ITMP1);
3109                                         break;
3110
3111                                 default:
3112                                         d = 0;
3113                                         sprintf(logtext, "Unkown ICMD-Command: %d", iptr->opc);
3114                                         error();
3115                                 }
3116
3117                         /* d contains return type */
3118
3119                         if (d != TYPE_VOID) {
3120                                 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3121                                         s1 = reg_of_var(iptr->dst, REG_RESULT);
3122                                         M_INTMOVE(REG_RESULT, s1);
3123                                         store_reg_to_var_int(iptr->dst, s1);
3124
3125                                 } else {
3126                                         s1 = reg_of_var(iptr->dst, REG_FRESULT);
3127                                         M_FLTMOVE(REG_FRESULT, s1);
3128                                         store_reg_to_var_flt(iptr->dst, s1);
3129                                 }
3130                         }
3131                         }
3132                         break;
3133
3134
3135                 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult            */
3136
3137                                       /* op1:   0 == array, 1 == class                */
3138                                       /* val.a: (classinfo*) superclass               */
3139
3140 /*          superclass is an interface:
3141  *
3142  *          return (sub != NULL) &&
3143  *                 (sub->vftbl->interfacetablelength > super->index) &&
3144  *                 (sub->vftbl->interfacetable[-super->index] != NULL);
3145  *
3146  *          superclass is a class:
3147  *
3148  *          return ((sub != NULL) && (0
3149  *                  <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3150  *                  super->vftbl->diffvall));
3151  */
3152
3153                         {
3154                         classinfo *super = (classinfo*) iptr->val.a;
3155                         
3156                         var_to_reg_int(s1, src, REG_ITMP1);
3157                         d = reg_of_var(iptr->dst, REG_ITMP3);
3158                         if (s1 == d) {
3159                                 M_INTMOVE(s1, REG_ITMP1);
3160                                 s1 = REG_ITMP1;
3161                         }
3162                         x86_64_alu_reg_reg(X86_64_XOR, d, d);
3163                         if (iptr->op1) {                               /* class/interface */
3164                                 if (super->flags & ACC_INTERFACE) {        /* interface       */
3165                                         x86_64_test_reg_reg(s1, s1);
3166
3167                                         /* TODO: clean up this calculation */
3168                                         a = 3;    /* mov_membase_reg */
3169                                         CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3170
3171                                         a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3172                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetablelength));
3173                                         
3174                                         a += 3;    /* sub */
3175                                         CALCOFFSETBYTES(a, 0, super->index);
3176                                         
3177                                         a += 3;    /* test */
3178
3179                                         a += 6;    /* jcc */
3180                                         a += 3;    /* mov_membase_reg */
3181                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*));
3182
3183                                         a += 3;    /* test */
3184                                         a += 4;    /* setcc */
3185
3186                                         x86_64_jcc(X86_64_CC_E, a);
3187
3188                                         x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3189                                         x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetablelength), REG_ITMP2);
3190                                         x86_64_alu_imm_reg(X86_64_SUB, super->index, REG_ITMP2);
3191                                         x86_64_test_reg_reg(REG_ITMP2, REG_ITMP2);
3192
3193                                         /* TODO: clean up this calculation */
3194                                         a = 0;
3195                                         a += 3;    /* mov_membase_reg */
3196                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*));
3197
3198                                         a += 3;    /* test */
3199                                         a += 4;    /* setcc */
3200
3201                                         x86_64_jcc(X86_64_CC_LE, a);
3202                                         x86_64_mov_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP1);
3203                                         x86_64_test_reg_reg(REG_ITMP1, REG_ITMP1);
3204                                         x86_64_setcc_reg(X86_64_CC_NE, d);
3205
3206                                 } else {                                   /* class           */
3207                                         x86_64_test_reg_reg(s1, s1);
3208
3209                                         /* TODO: clean up this calculation */
3210                                         a = 3;    /* mov_membase_reg */
3211                                         CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3212
3213                                         a += 10;   /* mov_imm_reg */
3214
3215                                         a += 2;    /* movl_membase_reg - only if REG_ITMP1 == RAX */
3216                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, baseval));
3217                                         
3218                                         a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3219                                         CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, baseval));
3220                                         
3221                                         a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3222                                         CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, diffval));
3223                                         
3224                                         a += 3;    /* sub */
3225                                         a += 3;    /* xor */
3226                                         a += 3;    /* cmp */
3227                                         a += 4;    /* setcc */
3228
3229                                         x86_64_jcc(X86_64_CC_E, a);
3230
3231                                         x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3232                                         x86_64_mov_imm_reg((s8) super->vftbl, REG_ITMP2);
3233                                         x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, baseval), REG_ITMP1);
3234                                         x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, baseval), REG_ITMP3);
3235                                         x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, diffval), REG_ITMP2);
3236                                         x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP3, REG_ITMP1);
3237                                         x86_64_alu_reg_reg(X86_64_XOR, d, d);
3238                                         x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
3239                                         x86_64_setcc_reg(X86_64_CC_BE, d);
3240                                 }
3241                         }
3242                         else
3243                                 panic("internal error: no inlined array instanceof");
3244                         }
3245                         store_reg_to_var_int(iptr->dst, d);
3246                         break;
3247
3248                 case ICMD_CHECKCAST:  /* ..., objectref ==> ..., objectref            */
3249
3250                                       /* op1:   0 == array, 1 == class                */
3251                                       /* val.a: (classinfo*) superclass               */
3252
3253 /*          superclass is an interface:
3254  *
3255  *          OK if ((sub == NULL) ||
3256  *                 (sub->vftbl->interfacetablelength > super->index) &&
3257  *                 (sub->vftbl->interfacetable[-super->index] != NULL));
3258  *
3259  *          superclass is a class:
3260  *
3261  *          OK if ((sub == NULL) || (0
3262  *                 <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3263  *                 super->vftbl->diffvall));
3264  */
3265
3266                         {
3267                         classinfo *super = (classinfo*) iptr->val.a;
3268                         
3269                         d = reg_of_var(iptr->dst, REG_ITMP3);
3270                         var_to_reg_int(s1, src, d);
3271                         if (iptr->op1) {                               /* class/interface */
3272                                 if (super->flags & ACC_INTERFACE) {        /* interface       */
3273                                         x86_64_test_reg_reg(s1, s1);
3274
3275                                         /* TODO: clean up this calculation */
3276                                         a = 3;    /* mov_membase_reg */
3277                                         CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3278
3279                                         a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3280                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetablelength));
3281
3282                                         a += 3;    /* sub */
3283                                         CALCOFFSETBYTES(a, 0, super->index);
3284
3285                                         a += 3;    /* test */
3286                                         a += 6;    /* jcc */
3287
3288                                         a += 3;    /* mov_membase_reg */
3289                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*));
3290
3291                                         a += 3;    /* test */
3292                                         a += 6;    /* jcc */
3293
3294                                         x86_64_jcc(X86_64_CC_E, a);
3295
3296                                         x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3297                                         x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetablelength), REG_ITMP2);
3298                                         x86_64_alu_imm_reg(X86_64_SUB, super->index, REG_ITMP2);
3299                                         x86_64_test_reg_reg(REG_ITMP2, REG_ITMP2);
3300                                         x86_64_jcc(X86_64_CC_LE, 0);
3301                                         mcode_addxcastrefs(mcodeptr);
3302                                         x86_64_mov_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP2);
3303                                         x86_64_test_reg_reg(REG_ITMP2, REG_ITMP2);
3304                                         x86_64_jcc(X86_64_CC_E, 0);
3305                                         mcode_addxcastrefs(mcodeptr);
3306
3307                                 } else {                                     /* class           */
3308                                         x86_64_test_reg_reg(s1, s1);
3309
3310                                         /* TODO: clean up this calculation */
3311                                         a = 3;    /* mov_membase_reg */
3312                                         CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3313                                         a += 10;   /* mov_imm_reg */
3314                                         a += 2;    /* movl_membase_reg - only if REG_ITMP1 == RAX */
3315                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, baseval));
3316
3317                                         if (d != REG_ITMP3) {
3318                                                 a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3319                                                 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, baseval));
3320                                                 a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3321                                                 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, diffval));
3322                                                 a += 3;    /* sub */
3323                                                 
3324                                         } else {
3325                                                 a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3326                                                 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, baseval));
3327                                                 a += 3;    /* sub */
3328                                                 a += 10;   /* mov_imm_reg */
3329                                                 a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3330                                                 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, diffval));
3331                                         }
3332
3333                                         a += 3;    /* cmp */
3334                                         a += 6;    /* jcc */
3335
3336                                         x86_64_jcc(X86_64_CC_E, a);
3337
3338                                         x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3339                                         x86_64_mov_imm_reg((s8) super->vftbl, REG_ITMP2);
3340                                         x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, baseval), REG_ITMP1);
3341                                         if (d != REG_ITMP3) {
3342                                                 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, baseval), REG_ITMP3);
3343                                                 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, diffval), REG_ITMP2);
3344                                                 x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP3, REG_ITMP1);
3345
3346                                         } else {
3347                                                 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, baseval), REG_ITMP2);
3348                                                 x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP2, REG_ITMP1);
3349                                                 x86_64_mov_imm_reg((s8) super->vftbl, REG_ITMP2);
3350                                                 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, diffval), REG_ITMP2);
3351                                         }
3352                                         x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
3353                                         x86_64_jcc(X86_64_CC_A, 0);    /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3354                                         mcode_addxcastrefs(mcodeptr);
3355                                 }
3356
3357                         } else
3358                                 panic("internal error: no inlined array checkcast");
3359                         }
3360                         M_INTMOVE(s1, d);
3361                         store_reg_to_var_int(iptr->dst, d);
3362                         break;
3363
3364                 case ICMD_CHECKASIZE:  /* ..., size ==> ..., size                     */
3365
3366                         if (src->flags & INMEMORY) {
3367                                 x86_64_alul_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
3368                                 
3369                         } else {
3370                                 x86_64_testl_reg_reg(src->regoff, src->regoff);
3371                         }
3372                         x86_64_jcc(X86_64_CC_L, 0);
3373                         mcode_addxcheckarefs(mcodeptr);
3374                         break;
3375
3376                 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref  */
3377                                          /* op1 = dimension, val.a = array descriptor */
3378
3379                         /* check for negative sizes and copy sizes to stack if necessary  */
3380
3381                         MCODECHECK((iptr->op1 << 1) + 64);
3382
3383                         for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3384                                 var_to_reg_int(s2, src, REG_ITMP1);
3385                                 x86_64_testl_reg_reg(s2, s2);
3386                                 x86_64_jcc(X86_64_CC_L, 0);
3387                                 mcode_addxcheckarefs(mcodeptr);
3388
3389                                 /* copy sizes to stack (argument numbers >= INT_ARG_CNT)      */
3390
3391                                 if (src->varkind != ARGVAR) {
3392                                         x86_64_mov_reg_membase(s2, REG_SP, (s1 + intreg_argnum) * 8);
3393                                 }
3394                         }
3395
3396                         /* a0 = dimension count */
3397                         x86_64_mov_imm_reg(iptr->op1, argintregs[0]);
3398
3399                         /* a1 = arraydescriptor */
3400                         x86_64_mov_imm_reg((s8) iptr->val.a, argintregs[1]);
3401
3402                         /* a2 = pointer to dimensions = stack pointer */
3403                         x86_64_mov_reg_reg(REG_SP, argintregs[2]);
3404
3405                         x86_64_mov_imm_reg((s8) (builtin_nmultianewarray), REG_ITMP1);
3406                         x86_64_call_reg(REG_ITMP1);
3407
3408                         s1 = reg_of_var(iptr->dst, REG_RESULT);
3409                         M_INTMOVE(REG_RESULT, s1);
3410                         store_reg_to_var_int(iptr->dst, s1);
3411                         break;
3412
3413                 default: sprintf(logtext, "Unknown pseudo command: %d", iptr->opc);
3414                          error();
3415         } /* switch */
3416                 
3417         } /* for instruction */
3418                 
3419         /* copy values to interface registers */
3420
3421         src = bptr->outstack;
3422         len = bptr->outdepth;
3423         MCODECHECK(64+len);
3424         while (src) {
3425                 len--;
3426                 if ((src->varkind != STACKVAR)) {
3427                         s2 = src->type;
3428                         if (IS_FLT_DBL_TYPE(s2)) {
3429                                 var_to_reg_flt(s1, src, REG_FTMP1);
3430                                 if (!(interfaces[len][s2].flags & INMEMORY)) {
3431                                         M_FLTMOVE(s1, interfaces[len][s2].regoff);
3432
3433                                 } else {
3434                                         x86_64_movq_reg_membase(s1, REG_SP, 8 * interfaces[len][s2].regoff);
3435                                 }
3436
3437                         } else {
3438                                 var_to_reg_int(s1, src, REG_ITMP1);
3439                                 if (!(interfaces[len][s2].flags & INMEMORY)) {
3440                                         M_INTMOVE(s1, interfaces[len][s2].regoff);
3441
3442                                 } else {
3443                                         x86_64_mov_reg_membase(s1, REG_SP, interfaces[len][s2].regoff * 8);
3444                                 }
3445                         }
3446                 }
3447                 src = src->prev;
3448         }
3449         } /* if (bptr -> flags >= BBREACHED) */
3450         } /* for basic block */
3451
3452         /* bptr -> mpc = (int)((u1*) mcodeptr - mcodebase); */
3453
3454         {
3455
3456         /* generate bound check stubs */
3457         u1 *xcodeptr = NULL;
3458         
3459         for (; xboundrefs != NULL; xboundrefs = xboundrefs->next) {
3460                 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3461                         gen_resolvebranch(mcodebase + xboundrefs->branchpos, 
3462                                 xboundrefs->branchpos, xcodeptr - mcodebase - (10 + 10 + 3));
3463                         continue;
3464                 }
3465
3466                 gen_resolvebranch(mcodebase + xboundrefs->branchpos, 
3467                                   xboundrefs->branchpos, mcodeptr - mcodebase);
3468
3469                 MCODECHECK(8);
3470
3471                 x86_64_mov_imm_reg(0, REG_ITMP2_XPC);    /* 10 bytes */
3472                 dseg_adddata(mcodeptr);
3473                 x86_64_mov_imm_reg(xboundrefs->branchpos - 6, REG_ITMP1);    /* 10 bytes */
3474                 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC);    /* 3 bytes */
3475
3476                 if (xcodeptr != NULL) {
3477                         x86_64_jmp_imm((xcodeptr - mcodeptr) - 5);
3478
3479                 } else {
3480                         xcodeptr = mcodeptr;
3481
3482                         x86_64_mov_imm_reg((s8) proto_java_lang_ArrayIndexOutOfBoundsException, REG_ITMP1_XPTR);
3483                         x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3484                         x86_64_jmp_reg(REG_ITMP3);
3485                 }
3486         }
3487
3488         /* generate negative array size check stubs */
3489         xcodeptr = NULL;
3490         
3491         for (; xcheckarefs != NULL; xcheckarefs = xcheckarefs->next) {
3492                 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3493                         gen_resolvebranch(mcodebase + xcheckarefs->branchpos, 
3494                                 xcheckarefs->branchpos, xcodeptr - mcodebase - (10 + 10 + 3));
3495                         continue;
3496                 }
3497
3498                 gen_resolvebranch(mcodebase + xcheckarefs->branchpos, 
3499                                   xcheckarefs->branchpos, mcodeptr - mcodebase);
3500
3501                 MCODECHECK(8);
3502
3503                 x86_64_mov_imm_reg(0, REG_ITMP2_XPC);    /* 10 bytes */
3504                 dseg_adddata(mcodeptr);
3505                 x86_64_mov_imm_reg(xcheckarefs->branchpos - 6, REG_ITMP1);    /* 10 bytes */
3506                 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC);    /* 3 bytes */
3507
3508                 if (xcodeptr != NULL) {
3509                         x86_64_jmp_imm((xcodeptr - mcodeptr) - 5);
3510
3511                 } else {
3512                         xcodeptr = mcodeptr;
3513
3514                         x86_64_mov_imm_reg((s8) proto_java_lang_NegativeArraySizeException, REG_ITMP1_XPTR);
3515                         x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3516                         x86_64_jmp_reg(REG_ITMP3);
3517                 }
3518         }
3519
3520         /* generate cast check stubs */
3521         xcodeptr = NULL;
3522         
3523         for (; xcastrefs != NULL; xcastrefs = xcastrefs->next) {
3524                 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3525                         gen_resolvebranch(mcodebase + xcastrefs->branchpos, 
3526                                 xcastrefs->branchpos, xcodeptr - mcodebase - (10 + 10 + 3));
3527                         continue;
3528                 }
3529
3530                 gen_resolvebranch(mcodebase + xcastrefs->branchpos, 
3531                                   xcastrefs->branchpos, mcodeptr - mcodebase);
3532
3533                 MCODECHECK(8);
3534
3535                 x86_64_mov_imm_reg(0, REG_ITMP2_XPC);    /* 10 bytes */
3536                 dseg_adddata(mcodeptr);
3537                 x86_64_mov_imm_reg(xcastrefs->branchpos - 6, REG_ITMP1);    /* 10 bytes */
3538                 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC);    /* 3 bytes */
3539
3540                 if (xcodeptr != NULL) {
3541                         x86_64_jmp_imm((xcodeptr - mcodeptr) - 5);
3542                 
3543                 } else {
3544                         xcodeptr = mcodeptr;
3545
3546                         x86_64_mov_imm_reg((s8) proto_java_lang_ClassCastException, REG_ITMP1_XPTR);
3547                         x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3548                         x86_64_jmp_reg(REG_ITMP3);
3549                 }
3550         }
3551
3552 #ifdef SOFTNULLPTRCHECK
3553         /* generate null pointer check stubs */
3554         xcodeptr = NULL;
3555         
3556         for (; xnullrefs != NULL; xnullrefs = xnullrefs->next) {
3557                 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3558                         gen_resolvebranch(mcodebase + xnullrefs->branchpos, 
3559                                 xnullrefs->branchpos, xcodeptr - mcodebase - (10 + 10 + 3));
3560                         continue;
3561                 }
3562
3563                 gen_resolvebranch(mcodebase + xnullrefs->branchpos, 
3564                                   xnullrefs->branchpos, mcodeptr - mcodebase);
3565
3566                 MCODECHECK(8);
3567
3568                 x86_64_mov_imm_reg(0, REG_ITMP2_XPC);    /* 10 bytes */
3569                 dseg_adddata(mcodeptr);
3570                 x86_64_mov_imm_reg(xnullrefs->branchpos - 6, REG_ITMP1);    /* 10 bytes */
3571                 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC);    /* 3 bytes */
3572
3573                 if (xcodeptr != NULL) {
3574                         x86_64_jmp_imm((xcodeptr - mcodeptr) - 5);
3575                 
3576                 } else {
3577                         xcodeptr = mcodeptr;
3578
3579                         x86_64_mov_imm_reg((s8) proto_java_lang_NullPointerException, REG_ITMP1_XPTR);
3580                         x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3581                         x86_64_jmp_reg(REG_ITMP3);
3582                 }
3583         }
3584
3585 #endif
3586         }
3587
3588         mcode_finish((int)((u1*) mcodeptr - mcodebase));
3589 }
3590
3591
3592 /* function createcompilerstub *************************************************
3593
3594         creates a stub routine which calls the compiler
3595         
3596 *******************************************************************************/
3597
3598 #define COMPSTUBSIZE 23
3599
3600 u1 *createcompilerstub(methodinfo *m)
3601 {
3602         u1 *s = CNEW(u1, COMPSTUBSIZE);     /* memory to hold the stub            */
3603         mcodeptr = s;                       /* code generation pointer            */
3604
3605                                             /* code for the stub                  */
3606         x86_64_mov_imm_reg((s8) m, REG_ITMP1); /* pass method pointer to compiler */
3607         x86_64_mov_imm_reg((s8) asm_call_jit_compiler, REG_ITMP3);/* load address */
3608         x86_64_jmp_reg(REG_ITMP3);          /* jump to compiler                   */
3609
3610 #ifdef STATISTICS
3611         count_cstub_len += COMPSTUBSIZE;
3612 #endif
3613
3614         return (u1*) s;
3615 }
3616
3617
3618 /* function removecompilerstub *************************************************
3619
3620      deletes a compilerstub from memory  (simply by freeing it)
3621
3622 *******************************************************************************/
3623
3624 void removecompilerstub(u1 *stub) 
3625 {
3626         CFREE(stub, COMPSTUBSIZE);
3627 }
3628
3629 /* function: createnativestub **************************************************
3630
3631         creates a stub routine which calls a native method
3632
3633 *******************************************************************************/
3634
3635 #define NATIVESTUBSIZE 420
3636
3637 u1 *createnativestub(functionptr f, methodinfo *m)
3638 {
3639         u1 *s = CNEW(u1, NATIVESTUBSIZE);   /* memory to hold the stub            */
3640         mcodeptr = s;                       /* make macros work                   */
3641
3642         reg_init();
3643
3644         if (runverbose) {
3645                 int p, l, s1;
3646
3647                 x86_64_alu_imm_reg(X86_64_SUB, (6 + 8 + 1) * 8, REG_SP);
3648
3649                 x86_64_mov_reg_membase(argintregs[0], REG_SP, 1 * 8);
3650                 x86_64_mov_reg_membase(argintregs[1], REG_SP, 2 * 8);
3651                 x86_64_mov_reg_membase(argintregs[2], REG_SP, 3 * 8);
3652                 x86_64_mov_reg_membase(argintregs[3], REG_SP, 4 * 8);
3653                 x86_64_mov_reg_membase(argintregs[4], REG_SP, 5 * 8);
3654                 x86_64_mov_reg_membase(argintregs[5], REG_SP, 6 * 8);
3655
3656                 x86_64_movq_reg_membase(argfltregs[0], REG_SP, 7 * 8);
3657                 x86_64_movq_reg_membase(argfltregs[1], REG_SP, 8 * 8);
3658                 x86_64_movq_reg_membase(argfltregs[2], REG_SP, 9 * 8);
3659                 x86_64_movq_reg_membase(argfltregs[3], REG_SP, 10 * 8);
3660 /*              x86_64_movq_reg_membase(argfltregs[4], REG_SP, 11 * 8); */
3661 /*              x86_64_movq_reg_membase(argfltregs[5], REG_SP, 12 * 8); */
3662 /*              x86_64_movq_reg_membase(argfltregs[6], REG_SP, 13 * 8); */
3663 /*              x86_64_movq_reg_membase(argfltregs[7], REG_SP, 14 * 8); */
3664
3665                 descriptor2types(m);                     /* set paramcount and paramtypes */
3666
3667                 /* also show the hex code for floats passed */
3668                 for (p = 0, l = 0; p < m->paramcount; p++) {
3669                         if (IS_FLT_DBL_TYPE(m->paramtypes[p])) {
3670                                 for (s1 = (m->paramcount > intreg_argnum) ? intreg_argnum - 2 : m->paramcount - 2; s1 >= p; s1--) {
3671                                         x86_64_mov_reg_reg(argintregs[s1], argintregs[s1 + 1]);
3672                                 }
3673
3674                                 x86_64_movd_freg_reg(argfltregs[l], argintregs[p]);
3675                                 l++;
3676                         }
3677                 }
3678
3679                 x86_64_mov_imm_reg((s8) m, REG_ITMP2);
3680                 x86_64_mov_reg_membase(REG_ITMP2, REG_SP, 0 * 8);
3681 /*              x86_64_mov_imm_reg(asm_builtin_trace, REG_ITMP1); */
3682                 x86_64_mov_imm_reg((s8) builtin_trace_args, REG_ITMP1);
3683                 x86_64_call_reg(REG_ITMP1);
3684
3685                 x86_64_mov_membase_reg(REG_SP, 1 * 8, argintregs[0]);
3686                 x86_64_mov_membase_reg(REG_SP, 2 * 8, argintregs[1]);
3687                 x86_64_mov_membase_reg(REG_SP, 3 * 8, argintregs[2]);
3688                 x86_64_mov_membase_reg(REG_SP, 4 * 8, argintregs[3]);
3689                 x86_64_mov_membase_reg(REG_SP, 5 * 8, argintregs[4]);
3690                 x86_64_mov_membase_reg(REG_SP, 6 * 8, argintregs[5]);
3691
3692                 x86_64_movq_membase_reg(REG_SP, 7 * 8, argfltregs[0]);
3693                 x86_64_movq_membase_reg(REG_SP, 8 * 8, argfltregs[1]);
3694                 x86_64_movq_membase_reg(REG_SP, 9 * 8, argfltregs[2]);
3695                 x86_64_movq_membase_reg(REG_SP, 10 * 8, argfltregs[3]);
3696 /*              x86_64_movq_membase_reg(REG_SP, 11 * 8, argfltregs[4]); */
3697 /*              x86_64_movq_membase_reg(REG_SP, 12 * 8, argfltregs[5]); */
3698 /*              x86_64_movq_membase_reg(REG_SP, 13 * 8, argfltregs[6]); */
3699 /*              x86_64_movq_membase_reg(REG_SP, 14 * 8, argfltregs[7]); */
3700
3701                 x86_64_alu_imm_reg(X86_64_ADD, (6 + 8 + 1) * 8, REG_SP);
3702         }
3703
3704         x86_64_alu_imm_reg(X86_64_SUB, 7 * 8, REG_SP);    /* keep stack 16-byte aligned */
3705
3706         /* save callee saved float registers */
3707         x86_64_movq_reg_membase(XMM15, REG_SP, 0 * 8);
3708         x86_64_movq_reg_membase(XMM14, REG_SP, 1 * 8);
3709         x86_64_movq_reg_membase(XMM13, REG_SP, 2 * 8);
3710         x86_64_movq_reg_membase(XMM12, REG_SP, 3 * 8);
3711         x86_64_movq_reg_membase(XMM11, REG_SP, 4 * 8);
3712         x86_64_movq_reg_membase(XMM10, REG_SP, 5 * 8);
3713
3714         x86_64_mov_reg_reg(argintregs[4], argintregs[5]);
3715         x86_64_mov_reg_reg(argintregs[3], argintregs[4]);
3716         x86_64_mov_reg_reg(argintregs[2], argintregs[3]);
3717         x86_64_mov_reg_reg(argintregs[1], argintregs[2]);
3718         x86_64_mov_reg_reg(argintregs[0], argintregs[1]);
3719
3720         x86_64_mov_imm_reg((s8) &env, argintregs[0]);
3721
3722         x86_64_mov_imm_reg((s8) f, REG_ITMP1);
3723         x86_64_call_reg(REG_ITMP1);
3724
3725         /* restore callee saved registers */
3726         x86_64_movq_membase_reg(REG_SP, 0 * 8, XMM15);
3727         x86_64_movq_membase_reg(REG_SP, 1 * 8, XMM14);
3728         x86_64_movq_membase_reg(REG_SP, 2 * 8, XMM13);
3729         x86_64_movq_membase_reg(REG_SP, 3 * 8, XMM12);
3730         x86_64_movq_membase_reg(REG_SP, 4 * 8, XMM11);
3731         x86_64_movq_membase_reg(REG_SP, 5 * 8, XMM10);
3732
3733         x86_64_alu_imm_reg(X86_64_ADD, 7 * 8, REG_SP);    /* keep stack 16-byte aligned */
3734
3735         if (runverbose) {
3736                 x86_64_alu_imm_reg(X86_64_SUB, 3 * 8, REG_SP);    /* keep stack 16-byte aligned */
3737
3738                 x86_64_mov_reg_membase(REG_RESULT, REG_SP, 0 * 8);
3739                 x86_64_movq_reg_membase(REG_FRESULT, REG_SP, 1 * 8);
3740
3741                 x86_64_mov_imm_reg((s8) m, argintregs[0]);
3742                 x86_64_mov_reg_reg(REG_RESULT, argintregs[1]);
3743                 M_FLTMOVE(REG_FRESULT, argfltregs[0]);
3744                 M_FLTMOVE(REG_FRESULT, argfltregs[1]);
3745
3746 /*              x86_64_mov_imm_reg(asm_builtin_exittrace, REG_ITMP1); */
3747                 x86_64_mov_imm_reg((s8) builtin_displaymethodstop, REG_ITMP1);
3748                 x86_64_call_reg(REG_ITMP1);
3749
3750                 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_RESULT);
3751                 x86_64_movq_membase_reg(REG_SP, 1 * 8, REG_FRESULT);
3752
3753                 x86_64_alu_imm_reg(X86_64_ADD, 3 * 8, REG_SP);    /* keep stack 16-byte aligned */
3754         }
3755
3756         x86_64_mov_imm_reg((s8) &exceptionptr, REG_ITMP3);
3757         x86_64_mov_membase_reg(REG_ITMP3, 0, REG_ITMP3);
3758         x86_64_test_reg_reg(REG_ITMP3, REG_ITMP3);
3759         x86_64_jcc(X86_64_CC_NE, 1);
3760
3761         x86_64_ret();
3762
3763         x86_64_mov_reg_reg(REG_ITMP3, REG_ITMP1_XPTR);
3764         x86_64_mov_imm_reg((s8) &exceptionptr, REG_ITMP3);
3765         x86_64_alu_reg_reg(X86_64_XOR, REG_ITMP2, REG_ITMP2);
3766         x86_64_mov_reg_membase(REG_ITMP2, REG_ITMP3, 0);    /* clear exception pointer */
3767
3768         x86_64_mov_membase_reg(REG_SP, 0, REG_ITMP2_XPC);    /* get return address from stack */
3769         x86_64_alu_imm_reg(X86_64_SUB, 3, REG_ITMP2_XPC);    /* callq */
3770
3771         x86_64_mov_imm_reg((s8) asm_handle_nat_exception, REG_ITMP3);
3772         x86_64_jmp_reg(REG_ITMP3);
3773
3774 #if 0
3775         {
3776                 static int stubprinted;
3777                 if (!stubprinted)
3778                         printf("stubsize: %d\n", ((long)mcodeptr - (long) s));
3779                 stubprinted = 1;
3780         }
3781 #endif
3782
3783 #ifdef STATISTICS
3784         count_nstub_len += NATIVESTUBSIZE;
3785 #endif
3786
3787         return (u1*) s;
3788 }
3789
3790
3791
3792 /* function: removenativestub **************************************************
3793
3794     removes a previously created native-stub from memory
3795     
3796 *******************************************************************************/
3797
3798 void removenativestub(u1 *stub)
3799 {
3800         CFREE(stub, NATIVESTUBSIZE);
3801 }
3802
3803
3804
3805 void x86_64_emit_ialu(s4 alu_op, stackptr src, instruction *iptr)
3806 {
3807         s4 s1 = src->prev->regoff;
3808         s4 s2 = src->regoff;
3809         s4 d = iptr->dst->regoff;
3810
3811         if (iptr->dst->flags & INMEMORY) {
3812                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
3813                         if (s2 == d) {
3814                                 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
3815                                 x86_64_alul_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
3816
3817                         } else if (s1 == d) {
3818                                 x86_64_movl_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
3819                                 x86_64_alul_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
3820
3821                         } else {
3822                                 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
3823                                 x86_64_alul_membase_reg(alu_op, REG_SP, s2 * 8, REG_ITMP1);
3824                                 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
3825                         }
3826
3827                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
3828                         if (s2 == d) {
3829                                 x86_64_alul_reg_membase(alu_op, s1, REG_SP, d * 8);
3830
3831                         } else {
3832                                 x86_64_movl_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
3833                                 x86_64_alul_reg_reg(alu_op, s1, REG_ITMP1);
3834                                 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
3835                         }
3836
3837                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
3838                         if (s1 == d) {
3839                                 x86_64_alul_reg_membase(alu_op, s2, REG_SP, d * 8);
3840                                                 
3841                         } else {
3842                                 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
3843                                 x86_64_alul_reg_reg(alu_op, s2, REG_ITMP1);
3844                                 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
3845                         }
3846
3847                 } else {
3848                         x86_64_movl_reg_membase(s1, REG_SP, d * 8);
3849                         x86_64_alul_reg_membase(alu_op, s2, REG_SP, d * 8);
3850                 }
3851
3852         } else {
3853                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
3854                         x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
3855                         x86_64_alul_membase_reg(alu_op, REG_SP, s2 * 8, d);
3856
3857                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
3858                         M_INTMOVE(s1, d);
3859                         x86_64_alul_membase_reg(alu_op, REG_SP, s2 * 8, d);
3860
3861                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
3862                         M_INTMOVE(s2, d);
3863                         x86_64_alul_membase_reg(alu_op, REG_SP, s1 * 8, d);
3864
3865                 } else {
3866                         if (s2 == d) {
3867                                 x86_64_alul_reg_reg(alu_op, s1, d);
3868
3869                         } else {
3870                                 M_INTMOVE(s1, d);
3871                                 x86_64_alul_reg_reg(alu_op, s2, d);
3872                         }
3873                 }
3874         }
3875 }
3876
3877
3878
3879 void x86_64_emit_lalu(s4 alu_op, stackptr src, instruction *iptr)
3880 {
3881         s4 s1 = src->prev->regoff;
3882         s4 s2 = src->regoff;
3883         s4 d = iptr->dst->regoff;
3884
3885         if (iptr->dst->flags & INMEMORY) {
3886                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
3887                         if (s2 == d) {
3888                                 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
3889                                 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
3890
3891                         } else if (s1 == d) {
3892                                 x86_64_mov_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
3893                                 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
3894
3895                         } else {
3896                                 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
3897                                 x86_64_alu_membase_reg(alu_op, REG_SP, s2 * 8, REG_ITMP1);
3898                                 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
3899                         }
3900
3901                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
3902                         if (s2 == d) {
3903                                 x86_64_alu_reg_membase(alu_op, s1, REG_SP, d * 8);
3904
3905                         } else {
3906                                 x86_64_mov_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
3907                                 x86_64_alu_reg_reg(alu_op, s1, REG_ITMP1);
3908                                 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
3909                         }
3910
3911                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
3912                         if (s1 == d) {
3913                                 x86_64_alu_reg_membase(alu_op, s2, REG_SP, d * 8);
3914                                                 
3915                         } else {
3916                                 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
3917                                 x86_64_alu_reg_reg(alu_op, s2, REG_ITMP1);
3918                                 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
3919                         }
3920
3921                 } else {
3922                         x86_64_mov_reg_membase(s1, REG_SP, d * 8);
3923                         x86_64_alu_reg_membase(alu_op, s2, REG_SP, d * 8);
3924                 }
3925
3926         } else {
3927                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
3928                         x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
3929                         x86_64_alu_membase_reg(alu_op, REG_SP, s2 * 8, d);
3930
3931                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
3932                         M_INTMOVE(s1, d);
3933                         x86_64_alu_membase_reg(alu_op, REG_SP, s2 * 8, d);
3934
3935                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
3936                         M_INTMOVE(s2, d);
3937                         x86_64_alu_membase_reg(alu_op, REG_SP, s1 * 8, d);
3938
3939                 } else {
3940                         if (s2 == d) {
3941                                 x86_64_alu_reg_reg(alu_op, s1, d);
3942
3943                         } else {
3944                                 M_INTMOVE(s1, d);
3945                                 x86_64_alu_reg_reg(alu_op, s2, d);
3946                         }
3947                 }
3948         }
3949 }
3950
3951
3952
3953 void x86_64_emit_ialuconst(s4 alu_op, stackptr src, instruction *iptr)
3954 {
3955         s4 s1 = src->regoff;
3956         s4 d = iptr->dst->regoff;
3957
3958         if (iptr->dst->flags & INMEMORY) {
3959                 if (src->flags & INMEMORY) {
3960                         if (s1 == d) {
3961                                 x86_64_alul_imm_membase(alu_op, iptr->val.i, REG_SP, d * 8);
3962
3963                         } else {
3964                                 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
3965                                 x86_64_alul_imm_reg(alu_op, iptr->val.i, REG_ITMP1);
3966                                 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
3967                         }
3968
3969                 } else {
3970                         x86_64_movl_reg_membase(s1, REG_SP, d * 8);
3971                         x86_64_alul_imm_membase(alu_op, iptr->val.i, REG_SP, d * 8);
3972                 }
3973
3974         } else {
3975                 if (src->flags & INMEMORY) {
3976                         x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
3977                         x86_64_alul_imm_reg(alu_op, iptr->val.i, d);
3978
3979                 } else {
3980                         M_INTMOVE(s1, d);
3981                         x86_64_alul_imm_reg(alu_op, iptr->val.i, d);
3982                 }
3983         }
3984 }
3985
3986
3987
3988 void x86_64_emit_laluconst(s4 alu_op, stackptr src, instruction *iptr)
3989 {
3990         s4 s1 = src->regoff;
3991         s4 d = iptr->dst->regoff;
3992
3993         if (iptr->dst->flags & INMEMORY) {
3994                 if (src->flags & INMEMORY) {
3995                         if (s1 == d) {
3996                                 if (x86_64_is_imm32(iptr->val.l)) {
3997                                         x86_64_alu_imm_membase(alu_op, iptr->val.l, REG_SP, d * 8);
3998
3999                                 } else {
4000                                         x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4001                                         x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4002                                 }
4003
4004                         } else {
4005                                 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4006
4007                                 if (x86_64_is_imm32(iptr->val.l)) {
4008                                         x86_64_alu_imm_reg(alu_op, iptr->val.l, REG_ITMP1);
4009
4010                                 } else {
4011                                         x86_64_mov_imm_reg(iptr->val.l, REG_ITMP2);
4012                                         x86_64_alu_reg_reg(alu_op, REG_ITMP2, REG_ITMP1);
4013                                 }
4014                                 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4015                         }
4016
4017                 } else {
4018                         x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4019
4020                         if (x86_64_is_imm32(iptr->val.l)) {
4021                                 x86_64_alu_imm_membase(alu_op, iptr->val.l, REG_SP, d * 8);
4022
4023                         } else {
4024                                 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4025                                 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4026                         }
4027                 }
4028
4029         } else {
4030                 if (src->flags & INMEMORY) {
4031                         x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4032
4033                 } else {
4034                         M_INTMOVE(s1, d);
4035                 }
4036
4037                 if (x86_64_is_imm32(iptr->val.l)) {
4038                         x86_64_alu_imm_reg(alu_op, iptr->val.l, d);
4039
4040                 } else {
4041                         x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4042                         x86_64_alu_reg_reg(alu_op, REG_ITMP1, d);
4043                 }
4044         }
4045 }
4046
4047
4048
4049 void x86_64_emit_ishift(s4 shift_op, stackptr src, instruction *iptr)
4050 {
4051         s4 s1 = src->prev->regoff;
4052         s4 s2 = src->regoff;
4053         s4 d = iptr->dst->regoff;
4054
4055         M_INTMOVE(RCX, REG_ITMP1);    /* save RCX */
4056         if (iptr->dst->flags & INMEMORY) {
4057                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4058                         if (s1 == d) {
4059                                 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4060                                 x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
4061
4062                         } else {
4063                                 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4064                                 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
4065                                 x86_64_shiftl_reg(shift_op, REG_ITMP2);
4066                                 x86_64_movl_reg_membase(REG_ITMP2, REG_SP, d * 8);
4067                         }
4068
4069                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4070                         x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4071                         x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4072                         x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
4073
4074                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4075                         if (s1 == d) {
4076                                 M_INTMOVE(s2, RCX);
4077                                 x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
4078
4079                         } else {
4080                                 M_INTMOVE(s2, RCX);
4081                                 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
4082                                 x86_64_shiftl_reg(shift_op, REG_ITMP2);
4083                                 x86_64_movl_reg_membase(REG_ITMP2, REG_SP, d * 8);
4084                         }
4085
4086                 } else {
4087                         M_INTMOVE(s2, RCX);
4088                         x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4089                         x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
4090                 }
4091                 M_INTMOVE(REG_ITMP1, RCX);    /* restore RCX */
4092
4093         } else {
4094                 if (d == RCX) {
4095                         d = REG_ITMP3;
4096                 }
4097                                         
4098                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4099                         x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4100                         x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4101                         x86_64_shiftl_reg(shift_op, d);
4102
4103                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4104                         M_INTMOVE(s1, d);    /* maybe src is RCX */
4105                         x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4106                         x86_64_shiftl_reg(shift_op, d);
4107
4108                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4109                         M_INTMOVE(s2, RCX);
4110                         x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4111                         x86_64_shiftl_reg(shift_op, d);
4112
4113                 } else {
4114                         if (s1 == RCX) {
4115                                 M_INTMOVE(s1, d);
4116                                 M_INTMOVE(s2, RCX);
4117
4118                         } else {
4119                                 M_INTMOVE(s2, RCX);
4120                                 M_INTMOVE(s1, d);
4121                         }
4122                         x86_64_shiftl_reg(shift_op, d);
4123                 }
4124
4125                 if (d == RCX) {
4126                         M_INTMOVE(REG_ITMP3, RCX);
4127
4128                 } else {
4129                         M_INTMOVE(REG_ITMP1, RCX);    /* restore RCX */
4130                 }
4131         }
4132 }
4133
4134
4135
4136 void x86_64_emit_lshift(s4 shift_op, stackptr src, instruction *iptr)
4137 {
4138         s4 s1 = src->prev->regoff;
4139         s4 s2 = src->regoff;
4140         s4 d = iptr->dst->regoff;
4141
4142         M_INTMOVE(RCX, REG_ITMP1);    /* save RCX */
4143         if (iptr->dst->flags & INMEMORY) {
4144                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4145                         if (s1 == d) {
4146                                 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4147                                 x86_64_shift_membase(shift_op, REG_SP, d * 8);
4148
4149                         } else {
4150                                 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4151                                 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
4152                                 x86_64_shift_reg(shift_op, REG_ITMP2);
4153                                 x86_64_mov_reg_membase(REG_ITMP2, REG_SP, d * 8);
4154                         }
4155
4156                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4157                         x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4158                         x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4159                         x86_64_shift_membase(shift_op, REG_SP, d * 8);
4160
4161                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4162                         if (s1 == d) {
4163                                 M_INTMOVE(s2, RCX);
4164                                 x86_64_shift_membase(shift_op, REG_SP, d * 8);
4165
4166                         } else {
4167                                 M_INTMOVE(s2, RCX);
4168                                 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
4169                                 x86_64_shift_reg(shift_op, REG_ITMP2);
4170                                 x86_64_mov_reg_membase(REG_ITMP2, REG_SP, d * 8);
4171                         }
4172
4173                 } else {
4174                         M_INTMOVE(s2, RCX);
4175                         x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4176                         x86_64_shift_membase(shift_op, REG_SP, d * 8);
4177                 }
4178                 M_INTMOVE(REG_ITMP1, RCX);    /* restore RCX */
4179
4180         } else {
4181                 if (d == RCX) {
4182                         d = REG_ITMP3;
4183                 }
4184
4185                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4186                         x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4187                         x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4188                         x86_64_shift_reg(shift_op, d);
4189
4190                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4191                         M_INTMOVE(s1, d);    /* maybe src is RCX */
4192                         x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4193                         x86_64_shift_reg(shift_op, d);
4194
4195                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4196                         M_INTMOVE(s2, RCX);
4197                         x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4198                         x86_64_shift_reg(shift_op, d);
4199
4200                 } else {
4201                         if (s1 == RCX) {
4202                                 M_INTMOVE(s1, d);
4203                                 M_INTMOVE(s2, RCX);
4204                         } else {
4205                                 M_INTMOVE(s2, RCX);
4206                                 M_INTMOVE(s1, d);
4207                         }
4208                         x86_64_shift_reg(shift_op, d);
4209                 }
4210
4211                 if (d == RCX) {
4212                         M_INTMOVE(REG_ITMP3, RCX);
4213
4214                 } else {
4215                         M_INTMOVE(REG_ITMP1, RCX);    /* restore RCX */
4216                 }
4217         }
4218 }
4219
4220
4221
4222 void x86_64_emit_ishiftconst(s4 shift_op, stackptr src, instruction *iptr)
4223 {
4224         s4 s1 = src->regoff;
4225         s4 d = iptr->dst->regoff;
4226
4227         if ((src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
4228                 if (s1 == d) {
4229                         x86_64_shiftl_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
4230
4231                 } else {
4232                         x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4233                         x86_64_shiftl_imm_reg(shift_op, iptr->val.i, REG_ITMP1);
4234                         x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
4235                 }
4236
4237         } else if ((src->flags & INMEMORY) && !(iptr->dst->flags & INMEMORY)) {
4238                 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4239                 x86_64_shiftl_imm_reg(shift_op, iptr->val.i, d);
4240                                 
4241         } else if (!(src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
4242                 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4243                 x86_64_shiftl_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
4244
4245         } else {
4246                 M_INTMOVE(s1, d);
4247                 x86_64_shiftl_imm_reg(shift_op, iptr->val.i, d);
4248         }
4249 }
4250
4251
4252
4253 void x86_64_emit_lshiftconst(s4 shift_op, stackptr src, instruction *iptr)
4254 {
4255         s4 s1 = src->regoff;
4256         s4 d = iptr->dst->regoff;
4257
4258         if ((src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
4259                 if (s1 == d) {
4260                         x86_64_shift_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
4261
4262                 } else {
4263                         x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4264                         x86_64_shift_imm_reg(shift_op, iptr->val.i, REG_ITMP1);
4265                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4266                 }
4267
4268         } else if ((src->flags & INMEMORY) && !(iptr->dst->flags & INMEMORY)) {
4269                 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4270                 x86_64_shift_imm_reg(shift_op, iptr->val.i, d);
4271                                 
4272         } else if (!(src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
4273                 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4274                 x86_64_shift_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
4275
4276         } else {
4277                 M_INTMOVE(s1, d);
4278                 x86_64_shift_imm_reg(shift_op, iptr->val.i, d);
4279         }
4280 }
4281
4282
4283
4284 void x86_64_emit_ifcc(s4 if_op, stackptr src, instruction *iptr)
4285 {
4286         if (src->flags & INMEMORY) {
4287                 x86_64_alul_imm_membase(X86_64_CMP, iptr->val.i, REG_SP, src->regoff * 8);
4288
4289         } else {
4290                 x86_64_alul_imm_reg(X86_64_CMP, iptr->val.i, src->regoff);
4291         }
4292         x86_64_jcc(if_op, 0);
4293         mcode_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
4294 }
4295
4296
4297
4298 void x86_64_emit_if_lcc(s4 if_op, stackptr src, instruction *iptr)
4299 {
4300         s4 s1 = src->regoff;
4301
4302         if (src->flags & INMEMORY) {
4303                 if (x86_64_is_imm32(iptr->val.l)) {
4304                         x86_64_alu_imm_membase(X86_64_CMP, iptr->val.l, REG_SP, s1 * 8);
4305
4306                 } else {
4307                         x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4308                         x86_64_alu_reg_membase(X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
4309                 }
4310
4311         } else {
4312                 if (x86_64_is_imm32(iptr->val.l)) {
4313                         x86_64_alu_imm_reg(X86_64_CMP, iptr->val.l, s1);
4314
4315                 } else {
4316                         x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4317                         x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP1, s1);
4318                 }
4319         }
4320         x86_64_jcc(if_op, 0);
4321         mcode_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
4322 }
4323
4324
4325
4326 void x86_64_emit_if_icmpcc(s4 if_op, stackptr src, instruction *iptr)
4327 {
4328         s4 s1 = src->prev->regoff;
4329         s4 s2 = src->regoff;
4330
4331         if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4332                 x86_64_movl_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4333                 x86_64_alul_reg_membase(X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
4334
4335         } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4336                 x86_64_alul_membase_reg(X86_64_CMP, REG_SP, s2 * 8, s1);
4337
4338         } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4339                 x86_64_alul_reg_membase(X86_64_CMP, s2, REG_SP, s1 * 8);
4340
4341         } else {
4342                 x86_64_alul_reg_reg(X86_64_CMP, s2, s1);
4343         }
4344         x86_64_jcc(if_op, 0);
4345         mcode_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
4346 }
4347
4348
4349
4350 void x86_64_emit_if_lcmpcc(s4 if_op, stackptr src, instruction *iptr)
4351 {
4352         s4 s1 = src->prev->regoff;
4353         s4 s2 = src->regoff;
4354
4355         if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4356                 x86_64_mov_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4357                 x86_64_alu_reg_membase(X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
4358
4359         } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4360                 x86_64_alu_membase_reg(X86_64_CMP, REG_SP, s2 * 8, s1);
4361
4362         } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4363                 x86_64_alu_reg_membase(X86_64_CMP, s2, REG_SP, s1 * 8);
4364
4365         } else {
4366                 x86_64_alu_reg_reg(X86_64_CMP, s2, s1);
4367         }
4368         x86_64_jcc(if_op, 0);
4369         mcode_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
4370 }
4371
4372
4373
4374 #if 1
4375
4376 /*
4377  * mov ops
4378  */
4379 void x86_64_mov_reg_reg(s8 reg, s8 dreg) {
4380         x86_64_emit_rex(1,(reg),0,(dreg));
4381         *(mcodeptr++) = 0x89;
4382         x86_64_emit_reg((reg),(dreg));
4383 }
4384
4385
4386 void x86_64_mov_imm_reg(s8 imm, s8 reg) {
4387         x86_64_emit_rex(1,0,0,(reg));
4388         *(mcodeptr++) = 0xb8 + ((reg) & 0x07);
4389         x86_64_emit_imm64((imm));
4390 }
4391
4392
4393 void x86_64_movl_imm_reg(s8 imm, s8 reg) {
4394         x86_64_emit_rex(0,0,0,(reg));
4395         *(mcodeptr++) = 0xb8 + ((reg) & 0x07);
4396         x86_64_emit_imm32((imm));
4397 }
4398
4399
4400 void x86_64_mov_membase_reg(s8 basereg, s8 disp, s8 reg) {
4401         x86_64_emit_rex(1,(reg),0,(basereg));
4402         *(mcodeptr++) = 0x8b;
4403         x86_64_emit_membase((basereg),(disp),(reg));
4404 }
4405
4406
4407 void x86_64_movl_membase_reg(s8 basereg, s8 disp, s8 reg) {
4408         x86_64_emit_rex(0,(reg),0,(basereg));
4409         *(mcodeptr++) = 0x8b;
4410         x86_64_emit_membase((basereg),(disp),(reg));
4411 }
4412
4413
4414 /*
4415  * this one is for INVOKEVIRTUAL/INVOKEINTERFACE to have a
4416  * constant membase immediate length of 32bit
4417  */
4418 void x86_64_mov_membase32_reg(s8 basereg, s8 disp, s8 reg) {
4419         x86_64_emit_rex(1,(reg),0,(basereg));
4420         *(mcodeptr++) = 0x8b;
4421         x86_64_address_byte(2, (reg), (basereg));
4422         x86_64_emit_imm32((disp));
4423 }
4424
4425
4426 void x86_64_mov_reg_membase(s8 reg, s8 basereg, s8 disp) {
4427         x86_64_emit_rex(1,(reg),0,(basereg));
4428         *(mcodeptr++) = 0x89;
4429         x86_64_emit_membase((basereg),(disp),(reg));
4430 }
4431
4432
4433 void x86_64_movl_reg_membase(s8 reg, s8 basereg, s8 disp) {
4434         x86_64_emit_rex(0,(reg),0,(basereg));
4435         *(mcodeptr++) = 0x89;
4436         x86_64_emit_membase((basereg),(disp),(reg));
4437 }
4438
4439
4440 void x86_64_mov_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4441         x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4442         *(mcodeptr++) = 0x8b;
4443         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4444 }
4445
4446
4447 void x86_64_movl_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4448         x86_64_emit_rex(0,(reg),(indexreg),(basereg));
4449         *(mcodeptr++) = 0x8b;
4450         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4451 }
4452
4453
4454 void x86_64_mov_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
4455         x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4456         *(mcodeptr++) = 0x89;
4457         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4458 }
4459
4460
4461 void x86_64_movl_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
4462         x86_64_emit_rex(0,(reg),(indexreg),(basereg));
4463         *(mcodeptr++) = 0x89;
4464         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4465 }
4466
4467
4468 void x86_64_movw_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
4469         *(mcodeptr++) = 0x66;
4470         x86_64_emit_rex(0,(reg),(indexreg),(basereg));
4471         *(mcodeptr++) = 0x89;
4472         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4473 }
4474
4475
4476 void x86_64_movb_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
4477         x86_64_emit_rex(0,(reg),(indexreg),(basereg));
4478         *(mcodeptr++) = 0x88;
4479         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4480 }
4481
4482
4483 void x86_64_mov_imm_membase(s8 imm, s8 basereg, s8 disp) {
4484         x86_64_emit_rex(1,0,0,(basereg));
4485         *(mcodeptr++) = 0xc7;
4486         x86_64_emit_membase((basereg),(disp),0);
4487         x86_64_emit_imm32((imm));
4488 }
4489
4490
4491 void x86_64_movl_imm_membase(s8 imm, s8 basereg, s8 disp) {
4492         x86_64_emit_rex(0,0,0,(basereg));
4493         *(mcodeptr++) = 0xc7;
4494         x86_64_emit_membase((basereg),(disp),0);
4495         x86_64_emit_imm32((imm));
4496 }
4497
4498
4499 void x86_64_movsbq_reg_reg(s8 reg, s8 dreg) {
4500         x86_64_emit_rex(1,(dreg),0,(reg));
4501         *(mcodeptr++) = 0x0f;
4502         *(mcodeptr++) = 0xbe;
4503         /* XXX: why do reg and dreg have to be exchanged */
4504         x86_64_emit_reg((dreg),(reg));
4505 }
4506
4507
4508 void x86_64_movsbq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4509         x86_64_emit_rex(1,(dreg),0,(basereg));
4510         *(mcodeptr++) = 0x0f;
4511         *(mcodeptr++) = 0xbe;
4512         x86_64_emit_membase((basereg),(disp),(dreg));
4513 }
4514
4515
4516 void x86_64_movswq_reg_reg(s8 reg, s8 dreg) {
4517         x86_64_emit_rex(1,(dreg),0,(reg));
4518         *(mcodeptr++) = 0x0f;
4519         *(mcodeptr++) = 0xbf;
4520         /* XXX: why do reg and dreg have to be exchanged */
4521         x86_64_emit_reg((dreg),(reg));
4522 }
4523
4524
4525 void x86_64_movswq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4526         x86_64_emit_rex(1,(dreg),0,(basereg));
4527         *(mcodeptr++) = 0x0f;
4528         *(mcodeptr++) = 0xbf;
4529         x86_64_emit_membase((basereg),(disp),(dreg));
4530 }
4531
4532
4533 void x86_64_movslq_reg_reg(s8 reg, s8 dreg) {
4534         x86_64_emit_rex(1,(dreg),0,(reg));
4535         *(mcodeptr++) = 0x63;
4536         /* XXX: why do reg and dreg have to be exchanged */
4537         x86_64_emit_reg((dreg),(reg));
4538 }
4539
4540
4541 void x86_64_movslq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4542         x86_64_emit_rex(1,(dreg),0,(basereg));
4543         *(mcodeptr++) = 0x63;
4544         x86_64_emit_membase((basereg),(disp),(dreg));
4545 }
4546
4547
4548 void x86_64_movzwq_reg_reg(s8 reg, s8 dreg) {
4549         x86_64_emit_rex(1,(dreg),0,(reg));
4550         *(mcodeptr++) = 0x0f;
4551         *(mcodeptr++) = 0xb7;
4552         /* XXX: why do reg and dreg have to be exchanged */
4553         x86_64_emit_reg((dreg),(reg));
4554 }
4555
4556
4557 void x86_64_movzwq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4558         x86_64_emit_rex(1,(dreg),0,(basereg));
4559         *(mcodeptr++) = 0x0f;
4560         *(mcodeptr++) = 0xb7;
4561         x86_64_emit_membase((basereg),(disp),(dreg));
4562 }
4563
4564
4565 void x86_64_movswq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4566         x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4567         *(mcodeptr++) = 0x0f;
4568         *(mcodeptr++) = 0xbf;
4569         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4570 }
4571
4572
4573 void x86_64_movsbq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4574         x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4575         *(mcodeptr++) = 0x0f;
4576         *(mcodeptr++) = 0xbe;
4577         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4578 }
4579
4580
4581 void x86_64_movzwq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4582         x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4583         *(mcodeptr++) = 0x0f;
4584         *(mcodeptr++) = 0xb7;
4585         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4586 }
4587
4588
4589
4590 /*
4591  * alu operations
4592  */
4593 void x86_64_alu_reg_reg(s8 opc, s8 reg, s8 dreg) {
4594         x86_64_emit_rex(1,(reg),0,(dreg));
4595         *(mcodeptr++) = (((opc)) << 3) + 1;
4596         x86_64_emit_reg((reg),(dreg));
4597 }
4598
4599
4600 void x86_64_alul_reg_reg(s8 opc, s8 reg, s8 dreg) {
4601         x86_64_emit_rex(0,(reg),0,(dreg));
4602         *(mcodeptr++) = (((opc)) << 3) + 1;
4603         x86_64_emit_reg((reg),(dreg));
4604 }
4605
4606
4607 void x86_64_alu_reg_membase(s8 opc, s8 reg, s8 basereg, s8 disp) {
4608         x86_64_emit_rex(1,(reg),0,(basereg));
4609         *(mcodeptr++) = (((opc)) << 3) + 1;
4610         x86_64_emit_membase((basereg),(disp),(reg));
4611 }
4612
4613
4614 void x86_64_alul_reg_membase(s8 opc, s8 reg, s8 basereg, s8 disp) {
4615         x86_64_emit_rex(0,(reg),0,(basereg));
4616         *(mcodeptr++) = (((opc)) << 3) + 1;
4617         x86_64_emit_membase((basereg),(disp),(reg));
4618 }
4619
4620
4621 void x86_64_alu_membase_reg(s8 opc, s8 basereg, s8 disp, s8 reg) {
4622         x86_64_emit_rex(1,(reg),0,(basereg));
4623         *(mcodeptr++) = (((opc)) << 3) + 3;
4624         x86_64_emit_membase((basereg),(disp),(reg));
4625 }
4626
4627
4628 void x86_64_alul_membase_reg(s8 opc, s8 basereg, s8 disp, s8 reg) {
4629         x86_64_emit_rex(0,(reg),0,(basereg));
4630         *(mcodeptr++) = (((opc)) << 3) + 3;
4631         x86_64_emit_membase((basereg),(disp),(reg));
4632 }
4633
4634
4635 void x86_64_alu_imm_reg(s8 opc, s8 imm, s8 dreg) {
4636         if (x86_64_is_imm8(imm)) {
4637                 x86_64_emit_rex(1,0,0,(dreg));
4638                 *(mcodeptr++) = 0x83;
4639                 x86_64_emit_reg((opc),(dreg));
4640                 x86_64_emit_imm8((imm));
4641         } else {
4642                 x86_64_emit_rex(1,0,0,(dreg));
4643                 *(mcodeptr++) = 0x81;
4644                 x86_64_emit_reg((opc),(dreg));
4645                 x86_64_emit_imm32((imm));
4646         }
4647 }
4648
4649
4650 void x86_64_alul_imm_reg(s8 opc, s8 imm, s8 dreg) {
4651         if (x86_64_is_imm8(imm)) {
4652                 x86_64_emit_rex(0,0,0,(dreg));
4653                 *(mcodeptr++) = 0x83;
4654                 x86_64_emit_reg((opc),(dreg));
4655                 x86_64_emit_imm8((imm));
4656         } else {
4657                 x86_64_emit_rex(0,0,0,(dreg));
4658                 *(mcodeptr++) = 0x81;
4659                 x86_64_emit_reg((opc),(dreg));
4660                 x86_64_emit_imm32((imm));
4661         }
4662 }
4663
4664
4665 void x86_64_alu_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
4666         if (x86_64_is_imm8(imm)) {
4667                 x86_64_emit_rex(1,(basereg),0,0);
4668                 *(mcodeptr++) = 0x83;
4669                 x86_64_emit_membase((basereg),(disp),(opc));
4670                 x86_64_emit_imm8((imm));
4671         } else {
4672                 x86_64_emit_rex(1,(basereg),0,0);
4673                 *(mcodeptr++) = 0x81;
4674                 x86_64_emit_membase((basereg),(disp),(opc));
4675                 x86_64_emit_imm32((imm));
4676         }
4677 }
4678
4679
4680 void x86_64_alul_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
4681         if (x86_64_is_imm8(imm)) {
4682                 x86_64_emit_rex(0,(basereg),0,0);
4683                 *(mcodeptr++) = 0x83;
4684                 x86_64_emit_membase((basereg),(disp),(opc));
4685                 x86_64_emit_imm8((imm));
4686         } else {
4687                 x86_64_emit_rex(0,(basereg),0,0);
4688                 *(mcodeptr++) = 0x81;
4689                 x86_64_emit_membase((basereg),(disp),(opc));
4690                 x86_64_emit_imm32((imm));
4691         }
4692 }
4693
4694
4695 void x86_64_test_reg_reg(s8 reg, s8 dreg) {
4696         x86_64_emit_rex(1,(reg),0,(dreg));
4697         *(mcodeptr++) = 0x85;
4698         x86_64_emit_reg((reg),(dreg));
4699 }
4700
4701
4702 void x86_64_testl_reg_reg(s8 reg, s8 dreg) {
4703         x86_64_emit_rex(0,(reg),0,(dreg));
4704         *(mcodeptr++) = 0x85;
4705         x86_64_emit_reg((reg),(dreg));
4706 }
4707
4708
4709 void x86_64_test_imm_reg(s8 imm, s8 reg) {
4710         *(mcodeptr++) = 0xf7;
4711         x86_64_emit_reg(0,(reg));
4712         x86_64_emit_imm32((imm));
4713 }
4714
4715
4716 void x86_64_testw_imm_reg(s8 imm, s8 reg) {
4717         *(mcodeptr++) = 0x66;
4718         *(mcodeptr++) = 0xf7;
4719         x86_64_emit_reg(0,(reg));
4720         x86_64_emit_imm16((imm));
4721 }
4722
4723
4724 void x86_64_testb_imm_reg(s8 imm, s8 reg) {
4725         *(mcodeptr++) = 0xf6;
4726         x86_64_emit_reg(0,(reg));
4727         x86_64_emit_imm8((imm));
4728 }
4729
4730
4731 void x86_64_lea_membase_reg(s8 basereg, s8 disp, s8 reg) {
4732         x86_64_emit_rex(1,(reg),0,(basereg));
4733         *(mcodeptr++) = 0x8d;
4734         x86_64_emit_membase((basereg),(disp),(reg));
4735 }
4736
4737
4738 void x86_64_leal_membase_reg(s8 basereg, s8 disp, s8 reg) {
4739         x86_64_emit_rex(0,(reg),0,(basereg));
4740         *(mcodeptr++) = 0x8d;
4741         x86_64_emit_membase((basereg),(disp),(reg));
4742 }
4743
4744
4745
4746 /*
4747  * inc, dec operations
4748  */
4749 void x86_64_inc_reg(s8 reg) {
4750         x86_64_emit_rex(1,0,0,(reg));
4751         *(mcodeptr++) = 0xff;
4752         x86_64_emit_reg(0,(reg));
4753 }
4754
4755
4756 void x86_64_incl_reg(s8 reg) {
4757         x86_64_emit_rex(0,0,0,(reg));
4758         *(mcodeptr++) = 0xff;
4759         x86_64_emit_reg(0,(reg));
4760 }
4761
4762
4763 void x86_64_inc_membase(s8 basereg, s8 disp) {
4764         x86_64_emit_rex(1,(basereg),0,0);
4765         *(mcodeptr++) = 0xff;
4766         x86_64_emit_membase((basereg),(disp),0);
4767 }
4768
4769
4770 void x86_64_incl_membase(s8 basereg, s8 disp) {
4771         x86_64_emit_rex(0,(basereg),0,0);
4772         *(mcodeptr++) = 0xff;
4773         x86_64_emit_membase((basereg),(disp),0);
4774 }
4775
4776
4777 void x86_64_dec_reg(s8 reg) {
4778         x86_64_emit_rex(1,0,0,(reg));
4779         *(mcodeptr++) = 0xff;
4780         x86_64_emit_reg(1,(reg));
4781 }
4782
4783         
4784 void x86_64_decl_reg(s8 reg) {
4785         x86_64_emit_rex(0,0,0,(reg));
4786         *(mcodeptr++) = 0xff;
4787         x86_64_emit_reg(1,(reg));
4788 }
4789
4790         
4791 void x86_64_dec_membase(s8 basereg, s8 disp) {
4792         x86_64_emit_rex(1,(basereg),0,0);
4793         *(mcodeptr++) = 0xff;
4794         x86_64_emit_membase((basereg),(disp),1);
4795 }
4796
4797
4798 void x86_64_decl_membase(s8 basereg, s8 disp) {
4799         x86_64_emit_rex(0,(basereg),0,0);
4800         *(mcodeptr++) = 0xff;
4801         x86_64_emit_membase((basereg),(disp),1);
4802 }
4803
4804
4805
4806
4807 void x86_64_cltd() {
4808     *(mcodeptr++) = 0x99;
4809 }
4810
4811
4812 void x86_64_cqto() {
4813         x86_64_emit_rex(1,0,0,0);
4814         *(mcodeptr++) = 0x99;
4815 }
4816
4817
4818
4819 void x86_64_imul_reg_reg(s8 reg, s8 dreg) {
4820         x86_64_emit_rex(1,(dreg),0,(reg));
4821         *(mcodeptr++) = 0x0f;
4822         *(mcodeptr++) = 0xaf;
4823         x86_64_emit_reg((dreg),(reg));
4824 }
4825
4826
4827 void x86_64_imull_reg_reg(s8 reg, s8 dreg) {
4828         x86_64_emit_rex(0,(dreg),0,(reg));
4829         *(mcodeptr++) = 0x0f;
4830         *(mcodeptr++) = 0xaf;
4831         x86_64_emit_reg((dreg),(reg));
4832 }
4833
4834
4835 void x86_64_imul_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4836         x86_64_emit_rex(1,(dreg),0,(basereg));
4837         *(mcodeptr++) = 0x0f;
4838         *(mcodeptr++) = 0xaf;
4839         x86_64_emit_membase((basereg),(disp),(dreg));
4840 }
4841
4842
4843 void x86_64_imull_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4844         x86_64_emit_rex(0,(dreg),0,(basereg));
4845         *(mcodeptr++) = 0x0f;
4846         *(mcodeptr++) = 0xaf;
4847         x86_64_emit_membase((basereg),(disp),(dreg));
4848 }
4849
4850
4851 void x86_64_imul_imm_reg(s8 imm, s8 dreg) {
4852         if (x86_64_is_imm8((imm))) {
4853                 x86_64_emit_rex(1,0,0,(dreg));
4854                 *(mcodeptr++) = 0x6b;
4855                 x86_64_emit_reg(0,(dreg));
4856                 x86_64_emit_imm8((imm));
4857         } else {
4858                 x86_64_emit_rex(1,0,0,(dreg));
4859                 *(mcodeptr++) = 0x69;
4860                 x86_64_emit_reg(0,(dreg));
4861                 x86_64_emit_imm32((imm));
4862         }
4863 }
4864
4865
4866 void x86_64_imul_imm_reg_reg(s8 imm, s8 reg, s8 dreg) {
4867         if (x86_64_is_imm8((imm))) {
4868                 x86_64_emit_rex(1,(dreg),0,(reg));
4869                 *(mcodeptr++) = 0x6b;
4870                 x86_64_emit_reg((dreg),(reg));
4871                 x86_64_emit_imm8((imm));
4872         } else {
4873                 x86_64_emit_rex(1,(dreg),0,(reg));
4874                 *(mcodeptr++) = 0x69;
4875                 x86_64_emit_reg((dreg),(reg));
4876                 x86_64_emit_imm32((imm));
4877         }
4878 }
4879
4880
4881 void x86_64_imull_imm_reg_reg(s8 imm, s8 reg, s8 dreg) {
4882         if (x86_64_is_imm8((imm))) {
4883                 x86_64_emit_rex(0,(dreg),0,(reg));
4884                 *(mcodeptr++) = 0x6b;
4885                 x86_64_emit_reg((dreg),(reg));
4886                 x86_64_emit_imm8((imm));
4887         } else {
4888                 x86_64_emit_rex(0,(dreg),0,(reg));
4889                 *(mcodeptr++) = 0x69;
4890                 x86_64_emit_reg((dreg),(reg));
4891                 x86_64_emit_imm32((imm));
4892         }
4893 }
4894
4895
4896 void x86_64_imul_imm_membase_reg(s8 imm, s8 basereg, s8 disp, s8 dreg) {
4897         if (x86_64_is_imm8((imm))) {
4898                 x86_64_emit_rex(1,(dreg),0,(basereg));
4899                 *(mcodeptr++) = 0x6b;
4900                 x86_64_emit_membase((basereg),(disp),(dreg));
4901                 x86_64_emit_imm8((imm));
4902         } else {
4903                 x86_64_emit_rex(1,(dreg),0,(basereg));
4904                 *(mcodeptr++) = 0x69;
4905                 x86_64_emit_membase((basereg),(disp),(dreg));
4906                 x86_64_emit_imm32((imm));
4907         }
4908 }
4909
4910
4911 void x86_64_imull_imm_membase_reg(s8 imm, s8 basereg, s8 disp, s8 dreg) {
4912         if (x86_64_is_imm8((imm))) {
4913                 x86_64_emit_rex(0,(dreg),0,(basereg));
4914                 *(mcodeptr++) = 0x6b;
4915                 x86_64_emit_membase((basereg),(disp),(dreg));
4916                 x86_64_emit_imm8((imm));
4917         } else {
4918                 x86_64_emit_rex(0,(dreg),0,(basereg));
4919                 *(mcodeptr++) = 0x69;
4920                 x86_64_emit_membase((basereg),(disp),(dreg));
4921                 x86_64_emit_imm32((imm));
4922         }
4923 }
4924
4925
4926 void x86_64_idiv_reg(s8 reg) {
4927         x86_64_emit_rex(1,0,0,(reg));
4928         *(mcodeptr++) = 0xf7;
4929         x86_64_emit_reg(7,(reg));
4930 }
4931
4932
4933 void x86_64_idivl_reg(s8 reg) {
4934         x86_64_emit_rex(0,0,0,(reg));
4935         *(mcodeptr++) = 0xf7;
4936         x86_64_emit_reg(7,(reg));
4937 }
4938
4939
4940
4941 void x86_64_ret() {
4942     *(mcodeptr++) = 0xc3;
4943 }
4944
4945
4946
4947 /*
4948  * shift ops
4949  */
4950 void x86_64_shift_reg(s8 opc, s8 reg) {
4951         x86_64_emit_rex(1,0,0,(reg));
4952         *(mcodeptr++) = 0xd3;
4953         x86_64_emit_reg((opc),(reg));
4954 }
4955
4956
4957 void x86_64_shiftl_reg(s8 opc, s8 reg) {
4958         x86_64_emit_rex(0,0,0,(reg));
4959         *(mcodeptr++) = 0xd3;
4960         x86_64_emit_reg((opc),(reg));
4961 }
4962
4963
4964 void x86_64_shift_membase(s8 opc, s8 basereg, s8 disp) {
4965         x86_64_emit_rex(1,0,0,(basereg));
4966         *(mcodeptr++) = 0xd3;
4967         x86_64_emit_membase((basereg),(disp),(opc));
4968 }
4969
4970
4971 void x86_64_shiftl_membase(s8 opc, s8 basereg, s8 disp) {
4972         x86_64_emit_rex(0,0,0,(basereg));
4973         *(mcodeptr++) = 0xd3;
4974         x86_64_emit_membase((basereg),(disp),(opc));
4975 }
4976
4977
4978 void x86_64_shift_imm_reg(s8 opc, s8 imm, s8 dreg) {
4979         if ((imm) == 1) {
4980                 x86_64_emit_rex(1,0,0,(dreg));
4981                 *(mcodeptr++) = 0xd1;
4982                 x86_64_emit_reg((opc),(dreg));
4983         } else {
4984                 x86_64_emit_rex(1,0,0,(dreg));
4985                 *(mcodeptr++) = 0xc1;
4986                 x86_64_emit_reg((opc),(dreg));
4987                 x86_64_emit_imm8((imm));
4988         }
4989 }
4990
4991
4992 void x86_64_shiftl_imm_reg(s8 opc, s8 imm, s8 dreg) {
4993         if ((imm) == 1) {
4994                 x86_64_emit_rex(0,0,0,(dreg));
4995                 *(mcodeptr++) = 0xd1;
4996                 x86_64_emit_reg((opc),(dreg));
4997         } else {
4998                 x86_64_emit_rex(0,0,0,(dreg));
4999                 *(mcodeptr++) = 0xc1;
5000                 x86_64_emit_reg((opc),(dreg));
5001                 x86_64_emit_imm8((imm));
5002         }
5003 }
5004
5005
5006 void x86_64_shift_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
5007         if ((imm) == 1) {
5008                 x86_64_emit_rex(1,0,0,(basereg));
5009                 *(mcodeptr++) = 0xd1;
5010                 x86_64_emit_membase((basereg),(disp),(opc));
5011         } else {
5012                 x86_64_emit_rex(1,0,0,(basereg));
5013                 *(mcodeptr++) = 0xc1;
5014                 x86_64_emit_membase((basereg),(disp),(opc));
5015                 x86_64_emit_imm8((imm));
5016         }
5017 }
5018
5019
5020 void x86_64_shiftl_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
5021         if ((imm) == 1) {
5022                 x86_64_emit_rex(0,0,0,(basereg));
5023                 *(mcodeptr++) = 0xd1;
5024                 x86_64_emit_membase((basereg),(disp),(opc));
5025         } else {
5026                 x86_64_emit_rex(0,0,0,(basereg));
5027                 *(mcodeptr++) = 0xc1;
5028                 x86_64_emit_membase((basereg),(disp),(opc));
5029                 x86_64_emit_imm8((imm));
5030         }
5031 }
5032
5033
5034
5035 /*
5036  * jump operations
5037  */
5038 void x86_64_jmp_imm(s8 imm) {
5039         *(mcodeptr++) = 0xe9;
5040         x86_64_emit_imm32((imm));
5041 }
5042
5043
5044 void x86_64_jmp_reg(s8 reg) {
5045         x86_64_emit_rex(0,0,0,(reg));
5046         *(mcodeptr++) = 0xff;
5047         x86_64_emit_reg(4,(reg));
5048 }
5049
5050
5051 void x86_64_jcc(s8 opc, s8 imm) {
5052         *(mcodeptr++) = 0x0f;
5053         *(mcodeptr++) = (0x80 + x86_64_cc_map[(opc)]);
5054         x86_64_emit_imm32((imm));
5055 }
5056
5057
5058
5059 /*
5060  * conditional set and move operations
5061  */
5062
5063 /* we need the rex byte to get all low bytes */
5064 void x86_64_setcc_reg(s8 opc, s8 reg) {
5065         *(mcodeptr++) = (0x40 | (((reg) >> 3) & 0x01));
5066         *(mcodeptr++) = 0x0f;
5067         *(mcodeptr++) = (0x90 + x86_64_cc_map[(opc)]);
5068         x86_64_emit_reg(0,(reg));
5069 }
5070
5071
5072 /* we need the rex byte to get all low bytes */
5073 void x86_64_setcc_membase(s8 opc, s8 basereg, s8 disp) {
5074         *(mcodeptr++) = (0x40 | (((basereg) >> 3) & 0x01));
5075         *(mcodeptr++) = 0x0f;
5076         *(mcodeptr++) = (0x90 + x86_64_cc_map[(opc)]);
5077         x86_64_emit_membase((basereg),(disp),0);
5078 }
5079
5080
5081 void x86_64_cmovcc_reg_reg(s8 opc, s8 reg, s8 dreg) {
5082         x86_64_emit_rex(1,(dreg),0,(reg));
5083         *(mcodeptr++) = 0x0f;
5084         *(mcodeptr++) = (0x40 + x86_64_cc_map[(opc)]);
5085         x86_64_emit_reg((dreg),(reg));
5086 }
5087
5088
5089 void x86_64_cmovccl_reg_reg(s8 opc, s8 reg, s8 dreg) {
5090         x86_64_emit_rex(0,(dreg),0,(reg));
5091         *(mcodeptr++) = 0x0f;
5092         *(mcodeptr++) = (0x40 + x86_64_cc_map[(opc)]);
5093         x86_64_emit_reg((dreg),(reg));
5094 }
5095
5096
5097
5098 void x86_64_neg_reg(s8 reg) {
5099         x86_64_emit_rex(1,0,0,(reg));
5100         *(mcodeptr++) = 0xf7;
5101         x86_64_emit_reg(3,(reg));
5102 }
5103
5104
5105 void x86_64_negl_reg(s8 reg) {
5106         x86_64_emit_rex(0,0,0,(reg));
5107         *(mcodeptr++) = 0xf7;
5108         x86_64_emit_reg(3,(reg));
5109 }
5110
5111
5112 void x86_64_neg_membase(s8 basereg, s8 disp) {
5113         x86_64_emit_rex(1,0,0,(basereg));
5114         *(mcodeptr++) = 0xf7;
5115         x86_64_emit_membase((basereg),(disp),3);
5116 }
5117
5118
5119 void x86_64_negl_membase(s8 basereg, s8 disp) {
5120         x86_64_emit_rex(0,0,0,(basereg));
5121         *(mcodeptr++) = 0xf7;
5122         x86_64_emit_membase((basereg),(disp),3);
5123 }
5124
5125
5126
5127 void x86_64_push_imm(s8 imm) {
5128         *(mcodeptr++) = 0x68;
5129         x86_64_emit_imm32((imm));
5130 }
5131
5132
5133 void x86_64_pop_reg(s8 reg) {
5134         x86_64_emit_rex(0,0,0,(reg));
5135         *(mcodeptr++) = 0x58 + (0x07 & (reg));
5136 }
5137
5138
5139 void x86_64_xchg_reg_reg(s8 reg, s8 dreg) {
5140         x86_64_emit_rex(1,(reg),0,(dreg));
5141         *(mcodeptr++) = 0x87;
5142         x86_64_emit_reg((reg),(dreg));
5143 }
5144
5145
5146 void x86_64_nop() {
5147     *(mcodeptr++) = 0x90;
5148 }
5149
5150
5151
5152 /*
5153  * call instructions
5154  */
5155 void x86_64_call_reg(s8 reg) {
5156         x86_64_emit_rex(1,0,0,(reg));
5157         *(mcodeptr++) = 0xff;
5158         x86_64_emit_reg(2,(reg));
5159 }
5160
5161
5162 void x86_64_call_imm(s8 imm) {
5163         *(mcodeptr++) = 0xe8;
5164         x86_64_emit_imm32((imm));
5165 }
5166
5167
5168
5169 /*
5170  * floating point instructions (SSE2)
5171  */
5172 void x86_64_addsd_reg_reg(s8 reg, s8 dreg) {
5173         *(mcodeptr++) = 0xf2;
5174         x86_64_emit_rex(0,(dreg),0,(reg));
5175         *(mcodeptr++) = 0x0f;
5176         *(mcodeptr++) = 0x58;
5177         x86_64_emit_reg((dreg),(reg));
5178 }
5179
5180
5181 void x86_64_addss_reg_reg(s8 reg, s8 dreg) {
5182         *(mcodeptr++) = 0xf3;
5183         x86_64_emit_rex(0,(dreg),0,(reg));
5184         *(mcodeptr++) = 0x0f;
5185         *(mcodeptr++) = 0x58;
5186         x86_64_emit_reg((dreg),(reg));
5187 }
5188
5189
5190 void x86_64_cvtsi2ssq_reg_reg(s8 reg, s8 dreg) {
5191         *(mcodeptr++) = 0xf3;
5192         x86_64_emit_rex(1,(dreg),0,(reg));
5193         *(mcodeptr++) = 0x0f;
5194         *(mcodeptr++) = 0x2a;
5195         x86_64_emit_reg((dreg),(reg));
5196 }
5197
5198
5199 void x86_64_cvtsi2ss_reg_reg(s8 reg, s8 dreg) {
5200         *(mcodeptr++) = 0xf3;
5201         x86_64_emit_rex(0,(dreg),0,(reg));
5202         *(mcodeptr++) = 0x0f;
5203         *(mcodeptr++) = 0x2a;
5204         x86_64_emit_reg((dreg),(reg));
5205 }
5206
5207
5208 void x86_64_cvtsi2sdq_reg_reg(s8 reg, s8 dreg) {
5209         *(mcodeptr++) = 0xf2;
5210         x86_64_emit_rex(1,(dreg),0,(reg));
5211         *(mcodeptr++) = 0x0f;
5212         *(mcodeptr++) = 0x2a;
5213         x86_64_emit_reg((dreg),(reg));
5214 }
5215
5216
5217 void x86_64_cvtsi2sd_reg_reg(s8 reg, s8 dreg) {
5218         *(mcodeptr++) = 0xf2;
5219         x86_64_emit_rex(0,(dreg),0,(reg));
5220         *(mcodeptr++) = 0x0f;
5221         *(mcodeptr++) = 0x2a;
5222         x86_64_emit_reg((dreg),(reg));
5223 }
5224
5225
5226 void x86_64_cvtss2sd_reg_reg(s8 reg, s8 dreg) {
5227         *(mcodeptr++) = 0xf3;
5228         x86_64_emit_rex(0,(dreg),0,(reg));
5229         *(mcodeptr++) = 0x0f;
5230         *(mcodeptr++) = 0x5a;
5231         x86_64_emit_reg((dreg),(reg));
5232 }
5233
5234
5235 void x86_64_cvtsd2ss_reg_reg(s8 reg, s8 dreg) {
5236         *(mcodeptr++) = 0xf2;
5237         x86_64_emit_rex(0,(dreg),0,(reg));
5238         *(mcodeptr++) = 0x0f;
5239         *(mcodeptr++) = 0x5a;
5240         x86_64_emit_reg((dreg),(reg));
5241 }
5242
5243
5244 void x86_64_cvttss2siq_reg_reg(s8 reg, s8 dreg) {
5245         *(mcodeptr++) = 0xf3;
5246         x86_64_emit_rex(1,(dreg),0,(reg));
5247         *(mcodeptr++) = 0x0f;
5248         *(mcodeptr++) = 0x2c;
5249         x86_64_emit_reg((dreg),(reg));
5250 }
5251
5252
5253 void x86_64_cvttss2si_reg_reg(s8 reg, s8 dreg) {
5254         *(mcodeptr++) = 0xf3;
5255         x86_64_emit_rex(0,(dreg),0,(reg));
5256         *(mcodeptr++) = 0x0f;
5257         *(mcodeptr++) = 0x2c;
5258         x86_64_emit_reg((dreg),(reg));
5259 }
5260
5261
5262 void x86_64_cvttsd2siq_reg_reg(s8 reg, s8 dreg) {
5263         *(mcodeptr++) = 0xf2;
5264         x86_64_emit_rex(1,(dreg),0,(reg));
5265         *(mcodeptr++) = 0x0f;
5266         *(mcodeptr++) = 0x2c;
5267         x86_64_emit_reg((dreg),(reg));
5268 }
5269
5270
5271 void x86_64_cvttsd2si_reg_reg(s8 reg, s8 dreg) {
5272         *(mcodeptr++) = 0xf2;
5273         x86_64_emit_rex(0,(dreg),0,(reg));
5274         *(mcodeptr++) = 0x0f;
5275         *(mcodeptr++) = 0x2c;
5276         x86_64_emit_reg((dreg),(reg));
5277 }
5278
5279
5280 void x86_64_divss_reg_reg(s8 reg, s8 dreg) {
5281         *(mcodeptr++) = 0xf3;
5282         x86_64_emit_rex(0,(dreg),0,(reg));
5283         *(mcodeptr++) = 0x0f;
5284         *(mcodeptr++) = 0x5e;
5285         x86_64_emit_reg((dreg),(reg));
5286 }
5287
5288
5289 void x86_64_divsd_reg_reg(s8 reg, s8 dreg) {
5290         *(mcodeptr++) = 0xf2;
5291         x86_64_emit_rex(0,(dreg),0,(reg));
5292         *(mcodeptr++) = 0x0f;
5293         *(mcodeptr++) = 0x5e;
5294         x86_64_emit_reg((dreg),(reg));
5295 }
5296
5297
5298 void x86_64_movd_reg_freg(s8 reg, s8 freg) {
5299         *(mcodeptr++) = 0x66;
5300         x86_64_emit_rex(1,(freg),0,(reg));
5301         *(mcodeptr++) = 0x0f;
5302         *(mcodeptr++) = 0x6e;
5303         x86_64_emit_reg((freg),(reg));
5304 }
5305
5306
5307 void x86_64_movd_freg_reg(s8 freg, s8 reg) {
5308         *(mcodeptr++) = 0x66;
5309         x86_64_emit_rex(1,(freg),0,(reg));
5310         *(mcodeptr++) = 0x0f;
5311         *(mcodeptr++) = 0x7e;
5312         x86_64_emit_reg((freg),(reg));
5313 }
5314
5315
5316 void x86_64_movd_reg_membase(s8 reg, s8 basereg, s8 disp) {
5317         *(mcodeptr++) = 0x66;
5318         x86_64_emit_rex(0,(reg),0,(basereg));
5319         *(mcodeptr++) = 0x0f;
5320         *(mcodeptr++) = 0x7e;
5321         x86_64_emit_membase((basereg),(disp),(reg));
5322 }
5323
5324
5325 void x86_64_movd_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
5326         *(mcodeptr++) = 0x66;
5327         x86_64_emit_rex(0,(reg),(indexreg),(basereg));
5328         *(mcodeptr++) = 0x0f;
5329         *(mcodeptr++) = 0x7e;
5330         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
5331 }
5332
5333
5334 void x86_64_movd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5335         *(mcodeptr++) = 0x66;
5336         x86_64_emit_rex(1,(dreg),0,(basereg));
5337         *(mcodeptr++) = 0x0f;
5338         *(mcodeptr++) = 0x6e;
5339         x86_64_emit_membase((basereg),(disp),(dreg));
5340 }
5341
5342
5343 void x86_64_movdl_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5344         *(mcodeptr++) = 0x66;
5345         x86_64_emit_rex(0,(dreg),0,(basereg));
5346         *(mcodeptr++) = 0x0f;
5347         *(mcodeptr++) = 0x6e;
5348         x86_64_emit_membase((basereg),(disp),(dreg));
5349 }
5350
5351
5352 void x86_64_movd_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
5353         *(mcodeptr++) = 0x66;
5354         x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
5355         *(mcodeptr++) = 0x0f;
5356         *(mcodeptr++) = 0x6e;
5357         x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
5358 }
5359
5360
5361 void x86_64_movq_reg_reg(s8 reg, s8 dreg) {
5362         *(mcodeptr++) = 0xf3;
5363         x86_64_emit_rex(0,(dreg),0,(reg));
5364         *(mcodeptr++) = 0x0f;
5365         *(mcodeptr++) = 0x7e;
5366         x86_64_emit_reg((dreg),(reg));
5367 }
5368
5369
5370 void x86_64_movq_reg_membase(s8 reg, s8 basereg, s8 disp) {
5371         *(mcodeptr++) = 0x66;
5372         x86_64_emit_rex(0,(reg),0,(basereg));
5373         *(mcodeptr++) = 0x0f;
5374         *(mcodeptr++) = 0xd6;
5375         x86_64_emit_membase((basereg),(disp),(reg));
5376 }
5377
5378
5379 void x86_64_movq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5380         *(mcodeptr++) = 0xf3;
5381         x86_64_emit_rex(0,(dreg),0,(basereg));
5382         *(mcodeptr++) = 0x0f;
5383         *(mcodeptr++) = 0x7e;
5384         x86_64_emit_membase((basereg),(disp),(dreg));
5385 }
5386
5387
5388 void x86_64_movss_reg_reg(s8 reg, s8 dreg) {
5389         *(mcodeptr++) = 0xf3;
5390         x86_64_emit_rex(0,(reg),0,(dreg));
5391         *(mcodeptr++) = 0x0f;
5392         *(mcodeptr++) = 0x10;
5393         x86_64_emit_reg((reg),(dreg));
5394 }
5395
5396
5397 void x86_64_movsd_reg_reg(s8 reg, s8 dreg) {
5398         *(mcodeptr++) = 0xf2;
5399         x86_64_emit_rex(0,(reg),0,(dreg));
5400         *(mcodeptr++) = 0x0f;
5401         *(mcodeptr++) = 0x10;
5402         x86_64_emit_reg((reg),(dreg));
5403 }
5404
5405
5406 void x86_64_movss_reg_membase(s8 reg, s8 basereg, s8 disp) {
5407         *(mcodeptr++) = 0xf3;
5408         x86_64_emit_rex(0,(reg),0,(basereg));
5409         *(mcodeptr++) = 0x0f;
5410         *(mcodeptr++) = 0x11;
5411         x86_64_emit_membase((basereg),(disp),(reg));
5412 }
5413
5414
5415 void x86_64_movsd_reg_membase(s8 reg, s8 basereg, s8 disp) {
5416         *(mcodeptr++) = 0xf2;
5417         x86_64_emit_rex(0,(reg),0,(basereg));
5418         *(mcodeptr++) = 0x0f;
5419         *(mcodeptr++) = 0x11;
5420         x86_64_emit_membase((basereg),(disp),(reg));
5421 }
5422
5423
5424 void x86_64_movss_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5425         *(mcodeptr++) = 0xf3;
5426         x86_64_emit_rex(0,(dreg),0,(basereg));
5427         *(mcodeptr++) = 0x0f;
5428         *(mcodeptr++) = 0x10;
5429         x86_64_emit_membase((basereg),(disp),(dreg));
5430 }
5431
5432
5433 void x86_64_movlps_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5434         x86_64_emit_rex(0,(dreg),0,(basereg));
5435         *(mcodeptr++) = 0x0f;
5436         *(mcodeptr++) = 0x12;
5437         x86_64_emit_membase((basereg),(disp),(dreg));
5438 }
5439
5440
5441 void x86_64_movsd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5442         *(mcodeptr++) = 0xf2;
5443         x86_64_emit_rex(0,(dreg),0,(basereg));
5444         *(mcodeptr++) = 0x0f;
5445         *(mcodeptr++) = 0x10;
5446         x86_64_emit_membase((basereg),(disp),(dreg));
5447 }
5448
5449
5450 void x86_64_movlpd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5451         *(mcodeptr++) = 0x66;
5452         x86_64_emit_rex(0,(dreg),0,(basereg));
5453         *(mcodeptr++) = 0x0f;
5454         *(mcodeptr++) = 0x12;
5455         x86_64_emit_membase((basereg),(disp),(dreg));
5456 }
5457
5458
5459 void x86_64_movss_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
5460         *(mcodeptr++) = 0xf3;
5461         x86_64_emit_rex(0,(reg),(indexreg),(basereg));
5462         *(mcodeptr++) = 0x0f;
5463         *(mcodeptr++) = 0x11;
5464         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
5465 }
5466
5467
5468 void x86_64_movsd_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
5469         *(mcodeptr++) = 0xf2;
5470         x86_64_emit_rex(0,(reg),(indexreg),(basereg));
5471         *(mcodeptr++) = 0x0f;
5472         *(mcodeptr++) = 0x11;
5473         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
5474 }
5475
5476
5477 void x86_64_movss_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
5478         *(mcodeptr++) = 0xf3;
5479         x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
5480         *(mcodeptr++) = 0x0f;
5481         *(mcodeptr++) = 0x10;
5482         x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
5483 }
5484
5485
5486 void x86_64_movsd_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
5487         *(mcodeptr++) = 0xf2;
5488         x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
5489         *(mcodeptr++) = 0x0f;
5490         *(mcodeptr++) = 0x10;
5491         x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
5492 }
5493
5494
5495 void x86_64_mulss_reg_reg(s8 reg, s8 dreg) {
5496         *(mcodeptr++) = 0xf3;
5497         x86_64_emit_rex(0,(dreg),0,(reg));
5498         *(mcodeptr++) = 0x0f;
5499         *(mcodeptr++) = 0x59;
5500         x86_64_emit_reg((dreg),(reg));
5501 }
5502
5503
5504 void x86_64_mulsd_reg_reg(s8 reg, s8 dreg) {
5505         *(mcodeptr++) = 0xf2;
5506         x86_64_emit_rex(0,(dreg),0,(reg));
5507         *(mcodeptr++) = 0x0f;
5508         *(mcodeptr++) = 0x59;
5509         x86_64_emit_reg((dreg),(reg));
5510 }
5511
5512
5513 void x86_64_subss_reg_reg(s8 reg, s8 dreg) {
5514         *(mcodeptr++) = 0xf3;
5515         x86_64_emit_rex(0,(dreg),0,(reg));
5516         *(mcodeptr++) = 0x0f;
5517         *(mcodeptr++) = 0x5c;
5518         x86_64_emit_reg((dreg),(reg));
5519 }
5520
5521
5522 void x86_64_subsd_reg_reg(s8 reg, s8 dreg) {
5523         *(mcodeptr++) = 0xf2;
5524         x86_64_emit_rex(0,(dreg),0,(reg));
5525         *(mcodeptr++) = 0x0f;
5526         *(mcodeptr++) = 0x5c;
5527         x86_64_emit_reg((dreg),(reg));
5528 }
5529
5530
5531 void x86_64_ucomiss_reg_reg(s8 reg, s8 dreg) {
5532         x86_64_emit_rex(0,(dreg),0,(reg));
5533         *(mcodeptr++) = 0x0f;
5534         *(mcodeptr++) = 0x2e;
5535         x86_64_emit_reg((dreg),(reg));
5536 }
5537
5538
5539 void x86_64_ucomisd_reg_reg(s8 reg, s8 dreg) {
5540         *(mcodeptr++) = 0x66;
5541         x86_64_emit_rex(0,(dreg),0,(reg));
5542         *(mcodeptr++) = 0x0f;
5543         *(mcodeptr++) = 0x2e;
5544         x86_64_emit_reg((dreg),(reg));
5545 }
5546
5547
5548 void x86_64_xorps_reg_reg(s8 reg, s8 dreg) {
5549         x86_64_emit_rex(0,(dreg),0,(reg));
5550         *(mcodeptr++) = 0x0f;
5551         *(mcodeptr++) = 0x57;
5552         x86_64_emit_reg((dreg),(reg));
5553 }
5554
5555
5556 void x86_64_xorps_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5557         x86_64_emit_rex(0,(dreg),0,(basereg));
5558         *(mcodeptr++) = 0x0f;
5559         *(mcodeptr++) = 0x57;
5560         x86_64_emit_membase((basereg),(disp),(dreg));
5561 }
5562
5563
5564 void x86_64_xorpd_reg_reg(s8 reg, s8 dreg) {
5565         *(mcodeptr++) = 0x66;
5566         x86_64_emit_rex(0,(dreg),0,(reg));
5567         *(mcodeptr++) = 0x0f;
5568         *(mcodeptr++) = 0x57;
5569         x86_64_emit_reg((dreg),(reg));
5570 }
5571
5572
5573 void x86_64_xorpd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5574         *(mcodeptr++) = 0x66;
5575         x86_64_emit_rex(0,(dreg),0,(basereg));
5576         *(mcodeptr++) = 0x0f;
5577         *(mcodeptr++) = 0x57;
5578         x86_64_emit_membase((basereg),(disp),(dreg));
5579 }
5580
5581 #endif
5582
5583 /*
5584  * These are local overrides for various environment variables in Emacs.
5585  * Please do not remove this and leave it at the end of the file, where
5586  * Emacs will automagically detect them.
5587  * ---------------------------------------------------------------------
5588  * Local variables:
5589  * mode: c
5590  * indent-tabs-mode: t
5591  * c-basic-offset: 4
5592  * tab-width: 4
5593  * End:
5594  */