a15d54031eae3292b3905a3653031e0bba666df6
[cacao.git] / jit / x86_64 / codegen.c
1 /* jit/x86_64/codegen.c - machine code generator for x86_64
2
3    Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003
4    Institut f. Computersprachen, TU Wien
5    R. Grafl, A. Krall, C. Kruegel, C. Oates, R. Obermaisser, M. Probst,
6    S. Ring, E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich,
7    J. Wenninger
8
9    This file is part of CACAO.
10
11    This program is free software; you can redistribute it and/or
12    modify it under the terms of the GNU General Public License as
13    published by the Free Software Foundation; either version 2, or (at
14    your option) any later version.
15
16    This program is distributed in the hope that it will be useful, but
17    WITHOUT ANY WARRANTY; without even the implied warranty of
18    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
19    General Public License for more details.
20
21    You should have received a copy of the GNU General Public License
22    along with this program; if not, write to the Free Software
23    Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
24    02111-1307, USA.
25
26    Contact: cacao@complang.tuwien.ac.at
27
28    Authors: Andreas Krall
29             Christian Thalinger
30
31    $Id: codegen.c 1064 2004-05-16 15:36:36Z twisti $
32
33 */
34
35
36 #define _POSIX_C_SOURCE 199506L
37 #define _XOPEN_SOURCE
38 #define _XOPEN_SOURCE_EXTENDED
39
40 #include <stdio.h>
41 #include <signal.h>
42 #include "types.h"
43 #include "main.h"
44 #include "codegen.h"
45 #include "jit.h"
46 #include "reg.h"
47 #include "parse.h"
48 #include "builtin.h"
49 #include "asmpart.h"
50 #include "jni.h"
51 #include "loader.h"
52 #include "tables.h"
53 #include "native.h"
54 #include "methodtable.h"
55
56 /* include independent code generation stuff */
57 #include "codegen.inc"
58 #include "reg.inc"
59
60
61 /* register descripton - array ************************************************/
62
63 /* #define REG_RES   0         reserved register for OS or code generator     */
64 /* #define REG_RET   1         return value register                          */
65 /* #define REG_EXC   2         exception value register (only old jit)        */
66 /* #define REG_SAV   3         (callee) saved register                        */
67 /* #define REG_TMP   4         scratch temporary register (caller saved)      */
68 /* #define REG_ARG   5         argument register (caller saved)               */
69
70 /* #define REG_END   -1        last entry in tables                           */
71
72 int nregdescint[] = {
73     REG_RET, REG_ARG, REG_ARG, REG_TMP, REG_RES, REG_SAV, REG_ARG, REG_ARG,
74     REG_ARG, REG_ARG, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV,
75     REG_END
76 };
77
78
79 int nregdescfloat[] = {
80         /*      REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_TMP, REG_TMP, REG_TMP, REG_TMP, */
81         /*      REG_RES, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV, REG_SAV, */
82     REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
83     REG_RES, REG_RES, REG_RES, REG_TMP, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
84     REG_END
85 };
86
87
88 /* additional functions and macros to generate code ***************************/
89
90 #define BlockPtrOfPC(pc)  ((basicblock *) iptr->target)
91
92
93 #ifdef STATISTICS
94 #define COUNT_SPILLS count_spills++
95 #else
96 #define COUNT_SPILLS
97 #endif
98
99
100 #define CALCOFFSETBYTES(var, reg, val) \
101     if ((s4) (val) < -128 || (s4) (val) > 127) (var) += 4; \
102     else if ((s4) (val) != 0) (var) += 1; \
103     else if ((reg) == RBP || (reg) == RSP || (reg) == R12 || (reg) == R13) (var) += 1;
104
105
106 #define CALCIMMEDIATEBYTES(var, val) \
107     if ((s4) (val) < -128 || (s4) (val) > 127) (var) += 4; \
108     else (var) += 1;
109
110
111 /* gen_nullptr_check(objreg) */
112
113 #define gen_nullptr_check(objreg) \
114         if (checknull) { \
115         x86_64_test_reg_reg((objreg), (objreg)); \
116         x86_64_jcc(X86_64_CC_E, 0); \
117             codegen_addxnullrefs(mcodeptr); \
118         }
119
120
121 /* MCODECHECK(icnt) */
122
123 #define MCODECHECK(icnt) \
124         if ((mcodeptr + (icnt)) > (u1*) mcodeend) mcodeptr = (u1*) codegen_increase((u1*) mcodeptr)
125
126 /* M_INTMOVE:
127     generates an integer-move from register a to b.
128     if a and b are the same int-register, no code will be generated.
129 */ 
130
131 #define M_INTMOVE(reg,dreg) \
132     if ((reg) != (dreg)) { \
133         x86_64_mov_reg_reg((reg),(dreg)); \
134     }
135
136
137 /* M_FLTMOVE:
138     generates a floating-point-move from register a to b.
139     if a and b are the same float-register, no code will be generated
140 */ 
141
142 #define M_FLTMOVE(reg,dreg) \
143     if ((reg) != (dreg)) { \
144         x86_64_movq_reg_reg((reg),(dreg)); \
145     }
146
147
148 /* var_to_reg_xxx:
149     this function generates code to fetch data from a pseudo-register
150     into a real register. 
151     If the pseudo-register has actually been assigned to a real 
152     register, no code will be emitted, since following operations
153     can use this register directly.
154     
155     v: pseudoregister to be fetched from
156     tempregnum: temporary register to be used if v is actually spilled to ram
157
158     return: the register number, where the operand can be found after 
159             fetching (this wil be either tempregnum or the register
160             number allready given to v)
161 */
162
163 #define var_to_reg_int(regnr,v,tempnr) \
164     if ((v)->flags & INMEMORY) { \
165         COUNT_SPILLS; \
166         if ((v)->type == TYPE_INT) { \
167             x86_64_movl_membase_reg(REG_SP, (v)->regoff * 8, tempnr); \
168         } else { \
169             x86_64_mov_membase_reg(REG_SP, (v)->regoff * 8, tempnr); \
170         } \
171         regnr = tempnr; \
172     } else { \
173         regnr = (v)->regoff; \
174     }
175
176
177
178 #define var_to_reg_flt(regnr,v,tempnr) \
179     if ((v)->flags & INMEMORY) { \
180         COUNT_SPILLS; \
181         if ((v)->type == TYPE_FLT) { \
182             x86_64_movlps_membase_reg(REG_SP, (v)->regoff * 8, tempnr); \
183         } else { \
184             x86_64_movlpd_membase_reg(REG_SP, (v)->regoff * 8, tempnr); \
185         } \
186 /*        x86_64_movq_membase_reg(REG_SP, (v)->regoff * 8, tempnr);*/ \
187         regnr = tempnr; \
188     } else { \
189         regnr = (v)->regoff; \
190     }
191
192
193 /* reg_of_var:
194     This function determines a register, to which the result of an operation
195     should go, when it is ultimatively intended to store the result in
196     pseudoregister v.
197     If v is assigned to an actual register, this register will be returned.
198     Otherwise (when v is spilled) this function returns tempregnum.
199     If not already done, regoff and flags are set in the stack location.
200 */        
201
202 static int reg_of_var(stackptr v, int tempregnum)
203 {
204         varinfo      *var;
205
206         switch (v->varkind) {
207         case TEMPVAR:
208                 if (!(v->flags & INMEMORY))
209                         return(v->regoff);
210                 break;
211         case STACKVAR:
212                 var = &(interfaces[v->varnum][v->type]);
213                 v->regoff = var->regoff;
214                 if (!(var->flags & INMEMORY))
215                         return(var->regoff);
216                 break;
217         case LOCALVAR:
218                 var = &(locals[v->varnum][v->type]);
219                 v->regoff = var->regoff;
220                 if (!(var->flags & INMEMORY))
221                         return(var->regoff);
222                 break;
223         case ARGVAR:
224                 v->regoff = v->varnum;
225                 if (IS_FLT_DBL_TYPE(v->type)) {
226                         if (v->varnum < FLT_ARG_CNT) {
227                                 v->regoff = argfltregs[v->varnum];
228                                 return(argfltregs[v->varnum]);
229                         }
230                 } else {
231                         if (v->varnum < INT_ARG_CNT) {
232                                 v->regoff = argintregs[v->varnum];
233                                 return(argintregs[v->varnum]);
234                         }
235                 }
236                 v->regoff -= INT_ARG_CNT;
237                 break;
238         }
239         v->flags |= INMEMORY;
240         return tempregnum;
241 }
242
243
244 /* store_reg_to_var_xxx:
245     This function generates the code to store the result of an operation
246     back into a spilled pseudo-variable.
247     If the pseudo-variable has not been spilled in the first place, this 
248     function will generate nothing.
249     
250     v ............ Pseudovariable
251     tempregnum ... Number of the temporary registers as returned by
252                    reg_of_var.
253 */      
254
255 #define store_reg_to_var_int(sptr, tempregnum) \
256     if ((sptr)->flags & INMEMORY) { \
257         COUNT_SPILLS; \
258         x86_64_mov_reg_membase(tempregnum, REG_SP, (sptr)->regoff * 8); \
259     }
260
261
262 #define store_reg_to_var_flt(sptr, tempregnum) \
263     if ((sptr)->flags & INMEMORY) { \
264          COUNT_SPILLS; \
265          x86_64_movq_reg_membase(tempregnum, REG_SP, (sptr)->regoff * 8); \
266     }
267
268
269 /* NullPointerException signal handler for hardware null pointer check */
270
271 void catch_NullPointerException(int sig, siginfo_t *siginfo, void *_p)
272 {
273         sigset_t nsig;
274         /*      int      instr; */
275         /*      long     faultaddr; */
276
277         struct ucontext *_uc = (struct ucontext *) _p;
278         struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
279         struct sigaction act;
280         java_objectheader *xptr;
281
282         /* Reset signal handler - necessary for SysV, does no harm for BSD */
283
284         
285 /*      instr = *((int*)(sigctx->rip)); */
286 /*      faultaddr = sigctx->sc_regs[(instr >> 16) & 0x1f]; */
287
288 /*      if (faultaddr == 0) { */
289         act.sa_sigaction = (void *) catch_NullPointerException; /* reinstall handler */
290         act.sa_flags = SA_SIGINFO;
291         sigaction(sig, &act, NULL);
292         
293         sigemptyset(&nsig);
294         sigaddset(&nsig, sig);
295         sigprocmask(SIG_UNBLOCK, &nsig, NULL);               /* unblock signal    */
296
297         xptr = new_exception(string_java_lang_NullPointerException);
298
299         sigctx->rax = (u8) xptr;                             /* REG_ITMP1_XPTR    */
300         sigctx->r10 = sigctx->rip;                           /* REG_ITMP2_XPC     */
301         sigctx->rip = (u8) asm_handle_exception;
302
303         return;
304
305 /*      } else { */
306 /*              faultaddr += (long) ((instr << 16) >> 16); */
307 /*              fprintf(stderr, "faulting address: 0x%08x\n", faultaddr); */
308 /*              panic("Stack overflow"); */
309 /*      } */
310 }
311
312
313 /* ArithmeticException signal handler for hardware divide by zero check */
314
315 void catch_ArithmeticException(int sig, siginfo_t *siginfo, void *_p)
316 {
317         sigset_t nsig;
318
319         struct ucontext *_uc = (struct ucontext *) _p;
320         struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
321         struct sigaction act;
322         java_objectheader *xptr;
323
324         /* Reset signal handler - necessary for SysV, does no harm for BSD */
325
326         act.sa_sigaction = (void *) catch_ArithmeticException; /* reinstall handler */
327         act.sa_flags = SA_SIGINFO;
328         sigaction(sig, &act, NULL);
329
330         sigemptyset(&nsig);
331         sigaddset(&nsig, sig);
332         sigprocmask(SIG_UNBLOCK, &nsig, NULL);               /* unblock signal    */
333
334         xptr = new_exception_message(string_java_lang_ArithmeticException,
335                                                                  string_java_lang_ArithmeticException_message);
336
337         sigctx->rax = (s8) xptr;                             /* REG_ITMP1_XPTR    */
338         sigctx->r10 = sigctx->rip;                           /* REG_ITMP2_XPC     */
339         sigctx->rip = (s8) asm_handle_exception;
340
341         return;
342 }
343
344
345 void init_exceptions(void)
346 {
347         struct sigaction act;
348
349         /* install signal handlers we need to convert to exceptions */
350
351         if (!checknull) {
352 #if defined(SIGSEGV)
353                 act.sa_sigaction = (void *) catch_NullPointerException;
354                 act.sa_flags = SA_SIGINFO;
355                 sigaction(SIGSEGV, &act, NULL);
356 #endif
357
358 #if defined(SIGBUS)
359                 act.sa_sigaction = (void *) catch_NullPointerException;
360                 act.sa_flags = SA_SIGINFO;
361                 sigaction(SIGBUS, &act, NULL);
362 #endif
363         }
364
365         act.sa_sigaction = (void *) catch_ArithmeticException;
366         act.sa_flags = SA_SIGINFO;
367         sigaction(SIGFPE, &act, NULL);
368 }
369
370
371 /* function gen_mcode **********************************************************
372
373         generates machine code
374
375 *******************************************************************************/
376
377 /* global code generation pointer */
378
379 u1 *mcodeptr;
380
381
382 void codegen()
383 {
384         int  len, s1, s2, s3, d;
385         s8   a;
386         stackptr    src;
387         varinfo     *var;
388         basicblock  *bptr;
389         instruction *iptr;
390
391         xtable *ex;
392
393         {
394         int p, pa, t, l, r;
395
396         savedregs_num = 0;
397
398         /* space to save used callee saved registers */
399
400         savedregs_num += (savintregcnt - maxsavintreguse);
401         savedregs_num += (savfltregcnt - maxsavfltreguse);
402
403         parentargs_base = maxmemuse + savedregs_num;
404
405 #if defined(USE_THREADS)           /* space to save argument of monitor_enter */
406
407         if (checksync && (method->flags & ACC_SYNCHRONIZED))
408                 parentargs_base++;
409
410 #endif
411
412     /* keep stack 16-byte aligned for calls into libc */
413
414         if (!isleafmethod || runverbose) {
415                 if ((parentargs_base % 2) == 0) {
416                         parentargs_base++;
417                 }
418         }
419
420         /* create method header */
421
422         (void) dseg_addaddress(method);                         /* MethodPointer  */
423         (void) dseg_adds4(parentargs_base * 8);                 /* FrameSize      */
424
425 #if defined(USE_THREADS)
426
427         /* IsSync contains the offset relative to the stack pointer for the
428            argument of monitor_exit used in the exception handler. Since the
429            offset could be zero and give a wrong meaning of the flag it is
430            offset by one.
431         */
432
433         if (checksync && (method->flags & ACC_SYNCHRONIZED))
434                 (void) dseg_adds4((maxmemuse + 1) * 8);             /* IsSync         */
435         else
436
437 #endif
438
439         (void) dseg_adds4(0);                                   /* IsSync         */
440                                                
441         (void) dseg_adds4(isleafmethod);                        /* IsLeaf         */
442         (void) dseg_adds4(savintregcnt - maxsavintreguse);      /* IntSave        */
443         (void) dseg_adds4(savfltregcnt - maxsavfltreguse);      /* FltSave        */
444         (void) dseg_adds4(exceptiontablelength);                /* ExTableSize    */
445
446         /* create exception table */
447
448         for (ex = extable; ex != NULL; ex = ex->down) {
449                 dseg_addtarget(ex->start);
450                 dseg_addtarget(ex->end);
451                 dseg_addtarget(ex->handler);
452                 (void) dseg_addaddress(ex->catchtype);
453         }
454         
455         /* initialize mcode variables */
456         
457         mcodeptr = (u1*) mcodebase;
458         mcodeend = (s4*) (mcodebase + mcodesize);
459         MCODECHECK(128 + mparamcount);
460
461         /* create stack frame (if necessary) */
462
463         if (parentargs_base) {
464                 x86_64_alu_imm_reg(X86_64_SUB, parentargs_base * 8, REG_SP);
465         }
466
467         /* save return address and used callee saved registers */
468
469         p = parentargs_base;
470         for (r = savintregcnt - 1; r >= maxsavintreguse; r--) {
471                 p--; x86_64_mov_reg_membase(savintregs[r], REG_SP, p * 8);
472         }
473         for (r = savfltregcnt - 1; r >= maxsavfltreguse; r--) {
474                 p--; x86_64_movq_reg_membase(savfltregs[r], REG_SP, p * 8);
475         }
476
477         /* save monitorenter argument */
478
479 #if defined(USE_THREADS)
480         if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
481                 if (method->flags & ACC_STATIC) {
482                         x86_64_mov_imm_reg((s8) class, REG_ITMP1);
483                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, maxmemuse * 8);
484
485                 } else {
486                         x86_64_mov_reg_membase(argintregs[0], REG_SP, maxmemuse * 8);
487                 }
488         }                       
489 #endif
490
491         /* copy argument registers to stack and call trace function with pointer
492            to arguments on stack.
493         */
494         if (runverbose) {
495                 x86_64_alu_imm_reg(X86_64_SUB, (6 + 8 + 1 + 1) * 8, REG_SP);
496
497                 x86_64_mov_reg_membase(argintregs[0], REG_SP, 1 * 8);
498                 x86_64_mov_reg_membase(argintregs[1], REG_SP, 2 * 8);
499                 x86_64_mov_reg_membase(argintregs[2], REG_SP, 3 * 8);
500                 x86_64_mov_reg_membase(argintregs[3], REG_SP, 4 * 8);
501                 x86_64_mov_reg_membase(argintregs[4], REG_SP, 5 * 8);
502                 x86_64_mov_reg_membase(argintregs[5], REG_SP, 6 * 8);
503
504                 x86_64_movq_reg_membase(argfltregs[0], REG_SP, 7 * 8);
505                 x86_64_movq_reg_membase(argfltregs[1], REG_SP, 8 * 8);
506                 x86_64_movq_reg_membase(argfltregs[2], REG_SP, 9 * 8);
507                 x86_64_movq_reg_membase(argfltregs[3], REG_SP, 10 * 8);
508 /*              x86_64_movq_reg_membase(argfltregs[4], REG_SP, 11 * 8); */
509 /*              x86_64_movq_reg_membase(argfltregs[5], REG_SP, 12 * 8); */
510 /*              x86_64_movq_reg_membase(argfltregs[6], REG_SP, 13 * 8); */
511 /*              x86_64_movq_reg_membase(argfltregs[7], REG_SP, 14 * 8); */
512
513                 for (p = 0, l = 0; p < mparamcount; p++) {
514                         t = mparamtypes[p];
515
516                         if (IS_FLT_DBL_TYPE(t)) {
517                                 for (s1 = (mparamcount > INT_ARG_CNT) ? INT_ARG_CNT - 2 : mparamcount - 2; s1 >= p; s1--) {
518                                         x86_64_mov_reg_reg(argintregs[s1], argintregs[s1 + 1]);
519                                 }
520
521                                 x86_64_movd_freg_reg(argfltregs[l], argintregs[p]);
522                                 l++;
523                         }
524                 }
525
526                 x86_64_mov_imm_reg((s8) method, REG_ITMP2);
527                 x86_64_mov_reg_membase(REG_ITMP2, REG_SP, 0 * 8);
528                 x86_64_mov_imm_reg((s8) builtin_trace_args, REG_ITMP1);
529                 x86_64_call_reg(REG_ITMP1);
530
531                 x86_64_mov_membase_reg(REG_SP, 1 * 8, argintregs[0]);
532                 x86_64_mov_membase_reg(REG_SP, 2 * 8, argintregs[1]);
533                 x86_64_mov_membase_reg(REG_SP, 3 * 8, argintregs[2]);
534                 x86_64_mov_membase_reg(REG_SP, 4 * 8, argintregs[3]);
535                 x86_64_mov_membase_reg(REG_SP, 5 * 8, argintregs[4]);
536                 x86_64_mov_membase_reg(REG_SP, 6 * 8, argintregs[5]);
537
538                 x86_64_movq_membase_reg(REG_SP, 7 * 8, argfltregs[0]);
539                 x86_64_movq_membase_reg(REG_SP, 8 * 8, argfltregs[1]);
540                 x86_64_movq_membase_reg(REG_SP, 9 * 8, argfltregs[2]);
541                 x86_64_movq_membase_reg(REG_SP, 10 * 8, argfltregs[3]);
542 /*              x86_64_movq_membase_reg(REG_SP, 11 * 8, argfltregs[4]); */
543 /*              x86_64_movq_membase_reg(REG_SP, 12 * 8, argfltregs[5]); */
544 /*              x86_64_movq_membase_reg(REG_SP, 13 * 8, argfltregs[6]); */
545 /*              x86_64_movq_membase_reg(REG_SP, 14 * 8, argfltregs[7]); */
546
547                 x86_64_alu_imm_reg(X86_64_ADD, (6 + 8 + 1 + 1) * 8, REG_SP);
548         }
549
550         /* take arguments out of register or stack frame */
551
552         for (p = 0, l = 0, s1 = 0, s2 = 0; p < mparamcount; p++) {
553                 t = mparamtypes[p];
554                 var = &(locals[l][t]);
555                 l++;
556                 if (IS_2_WORD_TYPE(t))    /* increment local counter for 2 word types */
557                         l++;
558                 if (var->type < 0) {
559                         if (IS_INT_LNG_TYPE(t)) {
560                                 s1++;
561                         } else {
562                                 s2++;
563                         }
564                         continue;
565                 }
566                 r = var->regoff; 
567                 if (IS_INT_LNG_TYPE(t)) {                    /* integer args          */
568                         if (s1 < INT_ARG_CNT) {                /* register arguments    */
569                                 if (!(var->flags & INMEMORY)) {      /* reg arg -> register   */
570                                         M_INTMOVE(argintregs[s1], r);
571
572                                 } else {                             /* reg arg -> spilled    */
573                                     x86_64_mov_reg_membase(argintregs[s1], REG_SP, r * 8);
574                                 }
575                         } else {                                 /* stack arguments       */
576                                 pa = s1 - INT_ARG_CNT;
577                                 if (s2 >= FLT_ARG_CNT) {
578                                         pa += s2 - FLT_ARG_CNT;
579                                 }
580                                 if (!(var->flags & INMEMORY)) {      /* stack arg -> register */ 
581                                         x86_64_mov_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, r);    /* + 8 for return address */
582                                 } else {                             /* stack arg -> spilled  */
583                                         x86_64_mov_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, REG_ITMP1);    /* + 8 for return address */
584                                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, r * 8);
585                                 }
586                         }
587                         s1++;
588
589                 } else {                                     /* floating args         */   
590                         if (s2 < FLT_ARG_CNT) {                /* register arguments    */
591                                 if (!(var->flags & INMEMORY)) {      /* reg arg -> register   */
592                                         M_FLTMOVE(argfltregs[s2], r);
593
594                                 } else {                                         /* reg arg -> spilled    */
595                                         x86_64_movq_reg_membase(argfltregs[s2], REG_SP, r * 8);
596                                 }
597
598                         } else {                                 /* stack arguments       */
599                                 pa = s2 - FLT_ARG_CNT;
600                                 if (s1 >= INT_ARG_CNT) {
601                                         pa += s1 - INT_ARG_CNT;
602                                 }
603                                 if (!(var->flags & INMEMORY)) {      /* stack-arg -> register */
604                                         x86_64_movq_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, r);
605
606                                 } else {
607                                         x86_64_movq_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, REG_FTMP1);
608                                         x86_64_movq_reg_membase(REG_FTMP1, REG_SP, r * 8);
609                                 }
610                         }
611                         s2++;
612                 }
613         }  /* end for */
614
615         /* call monitorenter function */
616
617 #if defined(USE_THREADS)
618         if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
619                 x86_64_mov_membase_reg(REG_SP, maxmemuse * 8, argintregs[0]);
620                 x86_64_mov_imm_reg((s8) builtin_monitorenter, REG_ITMP1);
621                 x86_64_call_reg(REG_ITMP1);
622         }                       
623 #endif
624         }
625
626         /* end of header generation */
627
628         /* walk through all basic blocks */
629         for (/* bbs = block_count, */ bptr = block; /* --bbs >= 0 */ bptr != NULL; bptr = bptr->next) {
630
631                 bptr->mpc = (int)((u1*) mcodeptr - mcodebase);
632
633                 if (bptr->flags >= BBREACHED) {
634
635                 /* branch resolving */
636
637                 branchref *brefs;
638                 for (brefs = bptr->branchrefs; brefs != NULL; brefs = brefs->next) {
639                         gen_resolvebranch((u1*) mcodebase + brefs->branchpos, 
640                                           brefs->branchpos, bptr->mpc);
641                 }
642
643                 /* copy interface registers to their destination */
644
645                 src = bptr->instack;
646                 len = bptr->indepth;
647                 MCODECHECK(64+len);
648                 while (src != NULL) {
649                         len--;
650                         if ((len == 0) && (bptr->type != BBTYPE_STD)) {
651                                 if (bptr->type == BBTYPE_SBR) {
652                                         d = reg_of_var(src, REG_ITMP1);
653                                         x86_64_pop_reg(d);
654                                         store_reg_to_var_int(src, d);
655
656                                 } else if (bptr->type == BBTYPE_EXH) {
657                                         d = reg_of_var(src, REG_ITMP1);
658                                         M_INTMOVE(REG_ITMP1, d);
659                                         store_reg_to_var_int(src, d);
660                                 }
661
662                         } else {
663                                 d = reg_of_var(src, REG_ITMP1);
664                                 if ((src->varkind != STACKVAR)) {
665                                         s2 = src->type;
666                                         if (IS_FLT_DBL_TYPE(s2)) {
667                                                 s1 = interfaces[len][s2].regoff;
668                                                 if (!(interfaces[len][s2].flags & INMEMORY)) {
669                                                         M_FLTMOVE(s1, d);
670
671                                                 } else {
672                                                         x86_64_movq_membase_reg(REG_SP, s1 * 8, d);
673                                                 }
674                                                 store_reg_to_var_flt(src, d);
675
676                                         } else {
677                                                 s1 = interfaces[len][s2].regoff;
678                                                 if (!(interfaces[len][s2].flags & INMEMORY)) {
679                                                         M_INTMOVE(s1, d);
680
681                                                 } else {
682                                                         x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
683                                                 }
684                                                 store_reg_to_var_int(src, d);
685                                         }
686                                 }
687                         }
688                         src = src->prev;
689                 }
690
691                 /* walk through all instructions */
692                 
693                 src = bptr->instack;
694                 len = bptr->icount;
695                 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
696
697                         MCODECHECK(64);   /* an instruction usually needs < 64 words      */
698                         switch (iptr->opc) {
699
700                         case ICMD_NOP:    /* ...  ==> ...                                 */
701                                 break;
702
703                         case ICMD_NULLCHECKPOP: /* ..., objectref  ==> ...                */
704                                 if (src->flags & INMEMORY) {
705                                         x86_64_alu_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
706
707                                 } else {
708                                         x86_64_test_reg_reg(src->regoff, src->regoff);
709                                 }
710                                 x86_64_jcc(X86_64_CC_E, 0);
711                                 codegen_addxnullrefs(mcodeptr);
712                                 break;
713
714                 /* constant operations ************************************************/
715
716                 case ICMD_ICONST:     /* ...  ==> ..., constant                       */
717                                       /* op1 = 0, val.i = constant                    */
718
719                         d = reg_of_var(iptr->dst, REG_ITMP1);
720                         if (iptr->val.i == 0) {
721                                 x86_64_alu_reg_reg(X86_64_XOR, d, d);
722                         } else {
723                                 x86_64_movl_imm_reg(iptr->val.i, d);
724                         }
725                         store_reg_to_var_int(iptr->dst, d);
726                         break;
727
728                 case ICMD_ACONST:     /* ...  ==> ..., constant                       */
729                                       /* op1 = 0, val.a = constant                    */
730
731                         d = reg_of_var(iptr->dst, REG_ITMP1);
732                         if (iptr->val.a == 0) {
733                                 x86_64_alu_reg_reg(X86_64_XOR, d, d);
734                         } else {
735                                 x86_64_mov_imm_reg((s8) iptr->val.a, d);
736                         }
737                         store_reg_to_var_int(iptr->dst, d);
738                         break;
739
740                 case ICMD_LCONST:     /* ...  ==> ..., constant                       */
741                                       /* op1 = 0, val.l = constant                    */
742
743                         d = reg_of_var(iptr->dst, REG_ITMP1);
744                         if (iptr->val.l == 0) {
745                                 x86_64_alu_reg_reg(X86_64_XOR, d, d);
746                         } else {
747                                 x86_64_mov_imm_reg(iptr->val.l, d);
748                         }
749                         store_reg_to_var_int(iptr->dst, d);
750                         break;
751
752                 case ICMD_FCONST:     /* ...  ==> ..., constant                       */
753                                       /* op1 = 0, val.f = constant                    */
754
755                         d = reg_of_var(iptr->dst, REG_FTMP1);
756                         a = dseg_addfloat(iptr->val.f);
757                         x86_64_movdl_membase_reg(RIP, -(((s8) mcodeptr + ((d > 7) ? 9 : 8)) - (s8) mcodebase) + a, d);
758                         store_reg_to_var_flt(iptr->dst, d);
759                         break;
760                 
761                 case ICMD_DCONST:     /* ...  ==> ..., constant                       */
762                                       /* op1 = 0, val.d = constant                    */
763
764                         d = reg_of_var(iptr->dst, REG_FTMP1);
765                         a = dseg_adddouble(iptr->val.d);
766                         x86_64_movd_membase_reg(RIP, -(((s8) mcodeptr + 9) - (s8) mcodebase) + a, d);
767                         store_reg_to_var_flt(iptr->dst, d);
768                         break;
769
770
771                 /* load/store operations **********************************************/
772
773                 case ICMD_ILOAD:      /* ...  ==> ..., content of local variable      */
774                                       /* op1 = local variable                         */
775
776                         d = reg_of_var(iptr->dst, REG_ITMP1);
777                         if ((iptr->dst->varkind == LOCALVAR) &&
778                             (iptr->dst->varnum == iptr->op1)) {
779                                 break;
780                         }
781                         var = &(locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
782                         if (var->flags & INMEMORY) {
783                                 x86_64_movl_membase_reg(REG_SP, var->regoff * 8, d);
784                                 store_reg_to_var_int(iptr->dst, d);
785
786                         } else {
787                                 if (iptr->dst->flags & INMEMORY) {
788                                         x86_64_mov_reg_membase(var->regoff, REG_SP, iptr->dst->regoff * 8);
789
790                                 } else {
791                                         M_INTMOVE(var->regoff, d);
792                                 }
793                         }
794                         break;
795
796                 case ICMD_LLOAD:      /* ...  ==> ..., content of local variable      */
797                 case ICMD_ALOAD:      /* op1 = local variable                         */
798
799                         d = reg_of_var(iptr->dst, REG_ITMP1);
800                         if ((iptr->dst->varkind == LOCALVAR) &&
801                             (iptr->dst->varnum == iptr->op1)) {
802                                 break;
803                         }
804                         var = &(locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
805                         if (var->flags & INMEMORY) {
806                                 x86_64_mov_membase_reg(REG_SP, var->regoff * 8, d);
807                                 store_reg_to_var_int(iptr->dst, d);
808
809                         } else {
810                                 if (iptr->dst->flags & INMEMORY) {
811                                         x86_64_mov_reg_membase(var->regoff, REG_SP, iptr->dst->regoff * 8);
812
813                                 } else {
814                                         M_INTMOVE(var->regoff, d);
815                                 }
816                         }
817                         break;
818
819                 case ICMD_FLOAD:      /* ...  ==> ..., content of local variable      */
820                 case ICMD_DLOAD:      /* op1 = local variable                         */
821
822                         d = reg_of_var(iptr->dst, REG_FTMP1);
823                         if ((iptr->dst->varkind == LOCALVAR) &&
824                             (iptr->dst->varnum == iptr->op1)) {
825                                 break;
826                         }
827                         var = &(locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
828                         if (var->flags & INMEMORY) {
829                                 x86_64_movq_membase_reg(REG_SP, var->regoff * 8, d);
830                                 store_reg_to_var_flt(iptr->dst, d);
831
832                         } else {
833                                 if (iptr->dst->flags & INMEMORY) {
834                                         x86_64_movq_reg_membase(var->regoff, REG_SP, iptr->dst->regoff * 8);
835
836                                 } else {
837                                         M_FLTMOVE(var->regoff, d);
838                                 }
839                         }
840                         break;
841
842                 case ICMD_ISTORE:     /* ..., value  ==> ...                          */
843                 case ICMD_LSTORE:     /* op1 = local variable                         */
844                 case ICMD_ASTORE:
845
846                         if ((src->varkind == LOCALVAR) &&
847                             (src->varnum == iptr->op1)) {
848                                 break;
849                         }
850                         var = &(locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
851                         if (var->flags & INMEMORY) {
852                                 var_to_reg_int(s1, src, REG_ITMP1);
853                                 x86_64_mov_reg_membase(s1, REG_SP, var->regoff * 8);
854
855                         } else {
856                                 var_to_reg_int(s1, src, var->regoff);
857                                 M_INTMOVE(s1, var->regoff);
858                         }
859                         break;
860
861                 case ICMD_FSTORE:     /* ..., value  ==> ...                          */
862                 case ICMD_DSTORE:     /* op1 = local variable                         */
863
864                         if ((src->varkind == LOCALVAR) &&
865                             (src->varnum == iptr->op1)) {
866                                 break;
867                         }
868                         var = &(locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
869                         if (var->flags & INMEMORY) {
870                                 var_to_reg_flt(s1, src, REG_FTMP1);
871                                 x86_64_movq_reg_membase(s1, REG_SP, var->regoff * 8);
872
873                         } else {
874                                 var_to_reg_flt(s1, src, var->regoff);
875                                 M_FLTMOVE(s1, var->regoff);
876                         }
877                         break;
878
879
880                 /* pop/dup/swap operations ********************************************/
881
882                 /* attention: double and longs are only one entry in CACAO ICMDs      */
883
884                 case ICMD_POP:        /* ..., value  ==> ...                          */
885                 case ICMD_POP2:       /* ..., value, value  ==> ...                   */
886                         break;
887
888 #define M_COPY(from,to) \
889                 d = reg_of_var(to, REG_ITMP1); \
890                         if ((from->regoff != to->regoff) || \
891                             ((from->flags ^ to->flags) & INMEMORY)) { \
892                                 if (IS_FLT_DBL_TYPE(from->type)) { \
893                                         var_to_reg_flt(s1, from, d); \
894                                         M_FLTMOVE(s1, d); \
895                                         store_reg_to_var_flt(to, d); \
896                                 } else { \
897                                         var_to_reg_int(s1, from, d); \
898                                         M_INTMOVE(s1, d); \
899                                         store_reg_to_var_int(to, d); \
900                                 } \
901                         }
902
903                 case ICMD_DUP:        /* ..., a ==> ..., a, a                         */
904                         M_COPY(src, iptr->dst);
905                         break;
906
907                 case ICMD_DUP_X1:     /* ..., a, b ==> ..., b, a, b                   */
908
909                         M_COPY(src,       iptr->dst->prev->prev);
910
911                 case ICMD_DUP2:       /* ..., a, b ==> ..., a, b, a, b                */
912
913                         M_COPY(src,       iptr->dst);
914                         M_COPY(src->prev, iptr->dst->prev);
915                         break;
916
917                 case ICMD_DUP2_X1:    /* ..., a, b, c ==> ..., b, c, a, b, c          */
918
919                         M_COPY(src->prev,       iptr->dst->prev->prev->prev);
920
921                 case ICMD_DUP_X2:     /* ..., a, b, c ==> ..., c, a, b, c             */
922
923                         M_COPY(src,             iptr->dst);
924                         M_COPY(src->prev,       iptr->dst->prev);
925                         M_COPY(src->prev->prev, iptr->dst->prev->prev);
926                         M_COPY(src, iptr->dst->prev->prev->prev);
927                         break;
928
929                 case ICMD_DUP2_X2:    /* ..., a, b, c, d ==> ..., c, d, a, b, c, d    */
930
931                         M_COPY(src,                   iptr->dst);
932                         M_COPY(src->prev,             iptr->dst->prev);
933                         M_COPY(src->prev->prev,       iptr->dst->prev->prev);
934                         M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
935                         M_COPY(src,       iptr->dst->prev->prev->prev->prev);
936                         M_COPY(src->prev, iptr->dst->prev->prev->prev->prev->prev);
937                         break;
938
939                 case ICMD_SWAP:       /* ..., a, b ==> ..., b, a                      */
940
941                         M_COPY(src, iptr->dst->prev);
942                         M_COPY(src->prev, iptr->dst);
943                         break;
944
945
946                 /* integer operations *************************************************/
947
948                 case ICMD_INEG:       /* ..., value  ==> ..., - value                 */
949
950                         d = reg_of_var(iptr->dst, REG_NULL);
951                         if (iptr->dst->flags & INMEMORY) {
952                                 if (src->flags & INMEMORY) {
953                                         if (src->regoff == iptr->dst->regoff) {
954                                                 x86_64_negl_membase(REG_SP, iptr->dst->regoff * 8);
955
956                                         } else {
957                                                 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
958                                                 x86_64_negl_reg(REG_ITMP1);
959                                                 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
960                                         }
961
962                                 } else {
963                                         x86_64_movl_reg_membase(src->regoff, REG_SP, iptr->dst->regoff * 8);
964                                         x86_64_negl_membase(REG_SP, iptr->dst->regoff * 8);
965                                 }
966
967                         } else {
968                                 if (src->flags & INMEMORY) {
969                                         x86_64_movl_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
970                                         x86_64_negl_reg(d);
971
972                                 } else {
973                                         M_INTMOVE(src->regoff, iptr->dst->regoff);
974                                         x86_64_negl_reg(iptr->dst->regoff);
975                                 }
976                         }
977                         break;
978
979                 case ICMD_LNEG:       /* ..., value  ==> ..., - value                 */
980
981                         d = reg_of_var(iptr->dst, REG_NULL);
982                         if (iptr->dst->flags & INMEMORY) {
983                                 if (src->flags & INMEMORY) {
984                                         if (src->regoff == iptr->dst->regoff) {
985                                                 x86_64_neg_membase(REG_SP, iptr->dst->regoff * 8);
986
987                                         } else {
988                                                 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
989                                                 x86_64_neg_reg(REG_ITMP1);
990                                                 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
991                                         }
992
993                                 } else {
994                                         x86_64_mov_reg_membase(src->regoff, REG_SP, iptr->dst->regoff * 8);
995                                         x86_64_neg_membase(REG_SP, iptr->dst->regoff * 8);
996                                 }
997
998                         } else {
999                                 if (src->flags & INMEMORY) {
1000                                         x86_64_mov_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1001                                         x86_64_neg_reg(iptr->dst->regoff);
1002
1003                                 } else {
1004                                         M_INTMOVE(src->regoff, iptr->dst->regoff);
1005                                         x86_64_neg_reg(iptr->dst->regoff);
1006                                 }
1007                         }
1008                         break;
1009
1010                 case ICMD_I2L:        /* ..., value  ==> ..., value                   */
1011
1012                         d = reg_of_var(iptr->dst, REG_ITMP3);
1013                         if (src->flags & INMEMORY) {
1014                                 x86_64_movslq_membase_reg(REG_SP, src->regoff * 8, d);
1015
1016                         } else {
1017                                 x86_64_movslq_reg_reg(src->regoff, d);
1018                         }
1019                         store_reg_to_var_int(iptr->dst, d);
1020                         break;
1021
1022                 case ICMD_L2I:        /* ..., value  ==> ..., value                   */
1023
1024                         var_to_reg_int(s1, src, REG_ITMP1);
1025                         d = reg_of_var(iptr->dst, REG_ITMP3);
1026                         M_INTMOVE(s1, d);
1027                         store_reg_to_var_int(iptr->dst, d);
1028                         break;
1029
1030                 case ICMD_INT2BYTE:   /* ..., value  ==> ..., value                   */
1031
1032                         d = reg_of_var(iptr->dst, REG_ITMP3);
1033                         if (src->flags & INMEMORY) {
1034                                 x86_64_movsbq_membase_reg(REG_SP, src->regoff * 8, d);
1035
1036                         } else {
1037                                 x86_64_movsbq_reg_reg(src->regoff, d);
1038                         }
1039                         store_reg_to_var_int(iptr->dst, d);
1040                         break;
1041
1042                 case ICMD_INT2CHAR:   /* ..., value  ==> ..., value                   */
1043
1044                         d = reg_of_var(iptr->dst, REG_ITMP3);
1045                         if (src->flags & INMEMORY) {
1046                                 x86_64_movzwq_membase_reg(REG_SP, src->regoff * 8, d);
1047
1048                         } else {
1049                                 x86_64_movzwq_reg_reg(src->regoff, d);
1050                         }
1051                         store_reg_to_var_int(iptr->dst, d);
1052                         break;
1053
1054                 case ICMD_INT2SHORT:  /* ..., value  ==> ..., value                   */
1055
1056                         d = reg_of_var(iptr->dst, REG_ITMP3);
1057                         if (src->flags & INMEMORY) {
1058                                 x86_64_movswq_membase_reg(REG_SP, src->regoff * 8, d);
1059
1060                         } else {
1061                                 x86_64_movswq_reg_reg(src->regoff, d);
1062                         }
1063                         store_reg_to_var_int(iptr->dst, d);
1064                         break;
1065
1066
1067                 case ICMD_IADD:       /* ..., val1, val2  ==> ..., val1 + val2        */
1068
1069                         d = reg_of_var(iptr->dst, REG_NULL);
1070                         x86_64_emit_ialu(X86_64_ADD, src, iptr);
1071                         break;
1072
1073                 case ICMD_IADDCONST:  /* ..., value  ==> ..., value + constant        */
1074                                       /* val.i = constant                             */
1075
1076                         d = reg_of_var(iptr->dst, REG_NULL);
1077                         x86_64_emit_ialuconst(X86_64_ADD, src, iptr);
1078                         break;
1079
1080                 case ICMD_LADD:       /* ..., val1, val2  ==> ..., val1 + val2        */
1081
1082                         d = reg_of_var(iptr->dst, REG_NULL);
1083                         x86_64_emit_lalu(X86_64_ADD, src, iptr);
1084                         break;
1085
1086                 case ICMD_LADDCONST:  /* ..., value  ==> ..., value + constant        */
1087                                       /* val.l = constant                             */
1088
1089                         d = reg_of_var(iptr->dst, REG_NULL);
1090                         x86_64_emit_laluconst(X86_64_ADD, src, iptr);
1091                         break;
1092
1093                 case ICMD_ISUB:       /* ..., val1, val2  ==> ..., val1 - val2        */
1094
1095                         d = reg_of_var(iptr->dst, REG_NULL);
1096                         if (iptr->dst->flags & INMEMORY) {
1097                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1098                                         if (src->prev->regoff == iptr->dst->regoff) {
1099                                                 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1100                                                 x86_64_alul_reg_membase(X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1101
1102                                         } else {
1103                                                 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1104                                                 x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1105                                                 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1106                                         }
1107
1108                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1109                                         M_INTMOVE(src->prev->regoff, REG_ITMP1);
1110                                         x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1111                                         x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1112
1113                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1114                                         if (src->prev->regoff == iptr->dst->regoff) {
1115                                                 x86_64_alul_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1116
1117                                         } else {
1118                                                 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1119                                                 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1120                                                 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1121                                         }
1122
1123                                 } else {
1124                                         x86_64_movl_reg_membase(src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1125                                         x86_64_alul_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1126                                 }
1127
1128                         } else {
1129                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1130                                         x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, d);
1131                                         x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1132
1133                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1134                                         M_INTMOVE(src->prev->regoff, d);
1135                                         x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1136
1137                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1138                                         /* workaround for reg alloc */
1139                                         if (src->regoff == iptr->dst->regoff) {
1140                                                 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1141                                                 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1142                                                 M_INTMOVE(REG_ITMP1, d);
1143
1144                                         } else {
1145                                                 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, d);
1146                                                 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, d);
1147                                         }
1148
1149                                 } else {
1150                                         /* workaround for reg alloc */
1151                                         if (src->regoff == iptr->dst->regoff) {
1152                                                 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1153                                                 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1154                                                 M_INTMOVE(REG_ITMP1, d);
1155
1156                                         } else {
1157                                                 M_INTMOVE(src->prev->regoff, d);
1158                                                 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, d);
1159                                         }
1160                                 }
1161                         }
1162                         break;
1163
1164                 case ICMD_ISUBCONST:  /* ..., value  ==> ..., value + constant        */
1165                                       /* val.i = constant                             */
1166
1167                         d = reg_of_var(iptr->dst, REG_NULL);
1168                         x86_64_emit_ialuconst(X86_64_SUB, src, iptr);
1169                         break;
1170
1171                 case ICMD_LSUB:       /* ..., val1, val2  ==> ..., val1 - val2        */
1172
1173                         d = reg_of_var(iptr->dst, REG_NULL);
1174                         if (iptr->dst->flags & INMEMORY) {
1175                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1176                                         if (src->prev->regoff == iptr->dst->regoff) {
1177                                                 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1178                                                 x86_64_alu_reg_membase(X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1179
1180                                         } else {
1181                                                 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1182                                                 x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1183                                                 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1184                                         }
1185
1186                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1187                                         M_INTMOVE(src->prev->regoff, REG_ITMP1);
1188                                         x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1189                                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1190
1191                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1192                                         if (src->prev->regoff == iptr->dst->regoff) {
1193                                                 x86_64_alu_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1194
1195                                         } else {
1196                                                 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1197                                                 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1198                                                 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1199                                         }
1200
1201                                 } else {
1202                                         x86_64_mov_reg_membase(src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1203                                         x86_64_alu_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1204                                 }
1205
1206                         } else {
1207                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1208                                         x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, d);
1209                                         x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1210
1211                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1212                                         M_INTMOVE(src->prev->regoff, d);
1213                                         x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1214
1215                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1216                                         /* workaround for reg alloc */
1217                                         if (src->regoff == iptr->dst->regoff) {
1218                                                 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1219                                                 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1220                                                 M_INTMOVE(REG_ITMP1, d);
1221
1222                                         } else {
1223                                                 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, d);
1224                                                 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, d);
1225                                         }
1226
1227                                 } else {
1228                                         /* workaround for reg alloc */
1229                                         if (src->regoff == iptr->dst->regoff) {
1230                                                 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1231                                                 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1232                                                 M_INTMOVE(REG_ITMP1, d);
1233
1234                                         } else {
1235                                                 M_INTMOVE(src->prev->regoff, d);
1236                                                 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, d);
1237                                         }
1238                                 }
1239                         }
1240                         break;
1241
1242                 case ICMD_LSUBCONST:  /* ..., value  ==> ..., value - constant        */
1243                                       /* val.l = constant                             */
1244
1245                         d = reg_of_var(iptr->dst, REG_NULL);
1246                         x86_64_emit_laluconst(X86_64_SUB, src, iptr);
1247                         break;
1248
1249                 case ICMD_IMUL:       /* ..., val1, val2  ==> ..., val1 * val2        */
1250
1251                         d = reg_of_var(iptr->dst, REG_NULL);
1252                         if (iptr->dst->flags & INMEMORY) {
1253                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1254                                         x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1255                                         x86_64_imull_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1256                                         x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1257
1258                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1259                                         x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1260                                         x86_64_imull_reg_reg(src->prev->regoff, REG_ITMP1);
1261                                         x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1262
1263                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1264                                         x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1265                                         x86_64_imull_reg_reg(src->regoff, REG_ITMP1);
1266                                         x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1267
1268                                 } else {
1269                                         M_INTMOVE(src->prev->regoff, REG_ITMP1);
1270                                         x86_64_imull_reg_reg(src->regoff, REG_ITMP1);
1271                                         x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1272                                 }
1273
1274                         } else {
1275                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1276                                         x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1277                                         x86_64_imull_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1278
1279                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1280                                         M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1281                                         x86_64_imull_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1282
1283                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1284                                         M_INTMOVE(src->regoff, iptr->dst->regoff);
1285                                         x86_64_imull_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1286
1287                                 } else {
1288                                         if (src->regoff == iptr->dst->regoff) {
1289                                                 x86_64_imull_reg_reg(src->prev->regoff, iptr->dst->regoff);
1290
1291                                         } else {
1292                                                 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1293                                                 x86_64_imull_reg_reg(src->regoff, iptr->dst->regoff);
1294                                         }
1295                                 }
1296                         }
1297                         break;
1298
1299                 case ICMD_IMULCONST:  /* ..., value  ==> ..., value * constant        */
1300                                       /* val.i = constant                             */
1301
1302                         d = reg_of_var(iptr->dst, REG_NULL);
1303                         if (iptr->dst->flags & INMEMORY) {
1304                                 if (src->flags & INMEMORY) {
1305                                         x86_64_imull_imm_membase_reg(iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1306                                         x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1307
1308                                 } else {
1309                                         x86_64_imull_imm_reg_reg(iptr->val.i, src->regoff, REG_ITMP1);
1310                                         x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1311                                 }
1312
1313                         } else {
1314                                 if (src->flags & INMEMORY) {
1315                                         x86_64_imull_imm_membase_reg(iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1316
1317                                 } else {
1318                                         if (iptr->val.i == 2) {
1319                                                 M_INTMOVE(src->regoff, iptr->dst->regoff);
1320                                                 x86_64_alul_reg_reg(X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1321
1322                                         } else {
1323                                                 x86_64_imull_imm_reg_reg(iptr->val.i, src->regoff, iptr->dst->regoff);    /* 3 cycles */
1324                                         }
1325                                 }
1326                         }
1327                         break;
1328
1329                 case ICMD_LMUL:       /* ..., val1, val2  ==> ..., val1 * val2        */
1330
1331                         d = reg_of_var(iptr->dst, REG_NULL);
1332                         if (iptr->dst->flags & INMEMORY) {
1333                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1334                                         x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1335                                         x86_64_imul_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1336                                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1337
1338                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1339                                         x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1340                                         x86_64_imul_reg_reg(src->prev->regoff, REG_ITMP1);
1341                                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1342
1343                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1344                                         x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1345                                         x86_64_imul_reg_reg(src->regoff, REG_ITMP1);
1346                                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1347
1348                                 } else {
1349                                         x86_64_mov_reg_reg(src->prev->regoff, REG_ITMP1);
1350                                         x86_64_imul_reg_reg(src->regoff, REG_ITMP1);
1351                                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1352                                 }
1353
1354                         } else {
1355                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1356                                         x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1357                                         x86_64_imul_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1358
1359                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1360                                         M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1361                                         x86_64_imul_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1362
1363                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1364                                         M_INTMOVE(src->regoff, iptr->dst->regoff);
1365                                         x86_64_imul_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1366
1367                                 } else {
1368                                         if (src->regoff == iptr->dst->regoff) {
1369                                                 x86_64_imul_reg_reg(src->prev->regoff, iptr->dst->regoff);
1370
1371                                         } else {
1372                                                 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1373                                                 x86_64_imul_reg_reg(src->regoff, iptr->dst->regoff);
1374                                         }
1375                                 }
1376                         }
1377                         break;
1378
1379                 case ICMD_LMULCONST:  /* ..., value  ==> ..., value * constant        */
1380                                       /* val.l = constant                             */
1381
1382                         d = reg_of_var(iptr->dst, REG_NULL);
1383                         if (iptr->dst->flags & INMEMORY) {
1384                                 if (src->flags & INMEMORY) {
1385                                         if (x86_64_is_imm32(iptr->val.l)) {
1386                                                 x86_64_imul_imm_membase_reg(iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1387
1388                                         } else {
1389                                                 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
1390                                                 x86_64_imul_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1391                                         }
1392                                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1393                                         
1394                                 } else {
1395                                         if (x86_64_is_imm32(iptr->val.l)) {
1396                                                 x86_64_imul_imm_reg_reg(iptr->val.l, src->regoff, REG_ITMP1);
1397
1398                                         } else {
1399                                                 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
1400                                                 x86_64_imul_reg_reg(src->regoff, REG_ITMP1);
1401                                         }
1402                                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1403                                 }
1404
1405                         } else {
1406                                 if (src->flags & INMEMORY) {
1407                                         if (x86_64_is_imm32(iptr->val.l)) {
1408                                                 x86_64_imul_imm_membase_reg(iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1409
1410                                         } else {
1411                                                 x86_64_mov_imm_reg(iptr->val.l, iptr->dst->regoff);
1412                                                 x86_64_imul_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1413                                         }
1414
1415                                 } else {
1416                                         /* should match in many cases */
1417                                         if (iptr->val.l == 2) {
1418                                                 M_INTMOVE(src->regoff, iptr->dst->regoff);
1419                                                 x86_64_alul_reg_reg(X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1420
1421                                         } else {
1422                                                 if (x86_64_is_imm32(iptr->val.l)) {
1423                                                         x86_64_imul_imm_reg_reg(iptr->val.l, src->regoff, iptr->dst->regoff);    /* 4 cycles */
1424
1425                                                 } else {
1426                                                         x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
1427                                                         M_INTMOVE(src->regoff, iptr->dst->regoff);
1428                                                         x86_64_imul_reg_reg(REG_ITMP1, iptr->dst->regoff);
1429                                                 }
1430                                         }
1431                                 }
1432                         }
1433                         break;
1434
1435                 case ICMD_IDIV:       /* ..., val1, val2  ==> ..., val1 / val2        */
1436
1437                         d = reg_of_var(iptr->dst, REG_NULL);
1438                 if (src->prev->flags & INMEMORY) {
1439                                 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, RAX);
1440
1441                         } else {
1442                                 M_INTMOVE(src->prev->regoff, RAX);
1443                         }
1444                         
1445                         if (src->flags & INMEMORY) {
1446                                 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1447
1448                         } else {
1449                                 M_INTMOVE(src->regoff, REG_ITMP3);
1450                         }
1451
1452                         x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, RAX);    /* check as described in jvm spec */
1453                         x86_64_jcc(X86_64_CC_NE, 4 + 6);
1454                         x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP3);      /* 4 bytes */
1455                         x86_64_jcc(X86_64_CC_E, 3 + 1 + 3);                  /* 6 bytes */
1456
1457                         x86_64_mov_reg_reg(RDX, REG_ITMP2);    /* save %rdx, cause it's an argument register */
1458                         x86_64_cltd();
1459                         x86_64_idivl_reg(REG_ITMP3);
1460
1461                         if (iptr->dst->flags & INMEMORY) {
1462                                 x86_64_mov_reg_membase(RAX, REG_SP, iptr->dst->regoff * 8);
1463                                 x86_64_mov_reg_reg(REG_ITMP2, RDX);    /* restore %rdx */
1464
1465                         } else {
1466                                 M_INTMOVE(RAX, iptr->dst->regoff);
1467
1468                                 if (iptr->dst->regoff != RDX) {
1469                                         x86_64_mov_reg_reg(REG_ITMP2, RDX);    /* restore %rdx */
1470                                 }
1471                         }
1472                         break;
1473
1474                 case ICMD_IREM:       /* ..., val1, val2  ==> ..., val1 % val2        */
1475
1476                         d = reg_of_var(iptr->dst, REG_NULL);
1477                         if (src->prev->flags & INMEMORY) {
1478                                 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, RAX);
1479
1480                         } else {
1481                                 M_INTMOVE(src->prev->regoff, RAX);
1482                         }
1483                         
1484                         if (src->flags & INMEMORY) {
1485                                 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1486
1487                         } else {
1488                                 M_INTMOVE(src->regoff, REG_ITMP3);
1489                         }
1490
1491                         x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, RAX);    /* check as described in jvm spec */
1492                         x86_64_jcc(X86_64_CC_NE, 2 + 4 + 6);
1493                         x86_64_alul_reg_reg(X86_64_XOR, RDX, RDX);           /* 2 bytes */
1494                         x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP3);      /* 4 bytes */
1495                         x86_64_jcc(X86_64_CC_E, 3 + 1 + 3);                  /* 6 bytes */
1496
1497                         x86_64_mov_reg_reg(RDX, REG_ITMP2);    /* save %rdx, cause it's an argument register */
1498                         x86_64_cltd();
1499                         x86_64_idivl_reg(REG_ITMP3);
1500
1501                         if (iptr->dst->flags & INMEMORY) {
1502                                 x86_64_mov_reg_membase(RDX, REG_SP, iptr->dst->regoff * 8);
1503                                 x86_64_mov_reg_reg(REG_ITMP2, RDX);    /* restore %rdx */
1504
1505                         } else {
1506                                 M_INTMOVE(RDX, iptr->dst->regoff);
1507
1508                                 if (iptr->dst->regoff != RDX) {
1509                                         x86_64_mov_reg_reg(REG_ITMP2, RDX);    /* restore %rdx */
1510                                 }
1511                         }
1512                         break;
1513
1514                 case ICMD_IDIVPOW2:   /* ..., value  ==> ..., value >> constant       */
1515                                       /* val.i = constant                             */
1516
1517                         var_to_reg_int(s1, src, REG_ITMP1);
1518                         d = reg_of_var(iptr->dst, REG_ITMP3);
1519                         M_INTMOVE(s1, REG_ITMP1);
1520                         x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1521                         x86_64_leal_membase_reg(REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1522                         x86_64_cmovccl_reg_reg(X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1523                         x86_64_shiftl_imm_reg(X86_64_SAR, iptr->val.i, REG_ITMP1);
1524                         x86_64_mov_reg_reg(REG_ITMP1, d);
1525                         store_reg_to_var_int(iptr->dst, d);
1526                         break;
1527
1528                 case ICMD_IREMPOW2:   /* ..., value  ==> ..., value % constant        */
1529                                       /* val.i = constant                             */
1530
1531                         var_to_reg_int(s1, src, REG_ITMP1);
1532                         d = reg_of_var(iptr->dst, REG_ITMP3);
1533                         M_INTMOVE(s1, REG_ITMP1);
1534                         x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1535                         x86_64_leal_membase_reg(REG_ITMP1, iptr->val.i, REG_ITMP2);
1536                         x86_64_cmovccl_reg_reg(X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1537                         x86_64_alul_imm_reg(X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1538                         x86_64_alul_reg_reg(X86_64_SUB, REG_ITMP2, REG_ITMP1);
1539                         x86_64_mov_reg_reg(REG_ITMP1, d);
1540                         store_reg_to_var_int(iptr->dst, d);
1541                         break;
1542
1543
1544                 case ICMD_LDIV:       /* ..., val1, val2  ==> ..., val1 / val2        */
1545
1546                         d = reg_of_var(iptr->dst, REG_NULL);
1547                 if (src->prev->flags & INMEMORY) {
1548                                 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1549
1550                         } else {
1551                                 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1552                         }
1553                         
1554                         if (src->flags & INMEMORY) {
1555                                 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1556
1557                         } else {
1558                                 M_INTMOVE(src->regoff, REG_ITMP3);
1559                         }
1560
1561                         x86_64_mov_imm_reg(0x8000000000000000LL, REG_ITMP2);    /* check as described in jvm spec */
1562                         x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
1563                         x86_64_jcc(X86_64_CC_NE, 4 + 6);
1564                         x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP3);          /* 4 bytes */
1565                         x86_64_jcc(X86_64_CC_E, 3 + 2 + 3);                     /* 6 bytes */
1566
1567                         x86_64_mov_reg_reg(RDX, REG_ITMP2);    /* save %rdx, cause it's an argument register */
1568                         x86_64_cqto();
1569                         x86_64_idiv_reg(REG_ITMP3);
1570
1571                         if (iptr->dst->flags & INMEMORY) {
1572                                 x86_64_mov_reg_membase(RAX, REG_SP, iptr->dst->regoff * 8);
1573                                 x86_64_mov_reg_reg(REG_ITMP2, RDX);    /* restore %rdx */
1574
1575                         } else {
1576                                 M_INTMOVE(RAX, iptr->dst->regoff);
1577
1578                                 if (iptr->dst->regoff != RDX) {
1579                                         x86_64_mov_reg_reg(REG_ITMP2, RDX);    /* restore %rdx */
1580                                 }
1581                         }
1582                         break;
1583
1584                 case ICMD_LREM:       /* ..., val1, val2  ==> ..., val1 % val2        */
1585
1586                         d = reg_of_var(iptr->dst, REG_NULL);
1587                         if (src->prev->flags & INMEMORY) {
1588                                 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1589
1590                         } else {
1591                                 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1592                         }
1593                         
1594                         if (src->flags & INMEMORY) {
1595                                 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1596
1597                         } else {
1598                                 M_INTMOVE(src->regoff, REG_ITMP3);
1599                         }
1600
1601                         x86_64_mov_imm_reg(0x8000000000000000LL, REG_ITMP2);    /* check as described in jvm spec */
1602                         x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
1603                         x86_64_jcc(X86_64_CC_NE, 2 + 4 + 6);
1604                         x86_64_alul_reg_reg(X86_64_XOR, RDX, RDX);              /* 2 bytes */
1605                         x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP3);          /* 4 bytes */
1606                         x86_64_jcc(X86_64_CC_E, 3 + 2 + 3);                     /* 6 bytes */
1607
1608                         x86_64_mov_reg_reg(RDX, REG_ITMP2);    /* save %rdx, cause it's an argument register */
1609                         x86_64_cqto();
1610                         x86_64_idiv_reg(REG_ITMP3);
1611
1612                         if (iptr->dst->flags & INMEMORY) {
1613                                 x86_64_mov_reg_membase(RDX, REG_SP, iptr->dst->regoff * 8);
1614                                 x86_64_mov_reg_reg(REG_ITMP2, RDX);    /* restore %rdx */
1615
1616                         } else {
1617                                 M_INTMOVE(RDX, iptr->dst->regoff);
1618
1619                                 if (iptr->dst->regoff != RDX) {
1620                                         x86_64_mov_reg_reg(REG_ITMP2, RDX);    /* restore %rdx */
1621                                 }
1622                         }
1623                         break;
1624
1625                 case ICMD_LDIVPOW2:   /* ..., value  ==> ..., value >> constant       */
1626                                       /* val.i = constant                             */
1627
1628                         var_to_reg_int(s1, src, REG_ITMP1);
1629                         d = reg_of_var(iptr->dst, REG_ITMP3);
1630                         M_INTMOVE(s1, REG_ITMP1);
1631                         x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1632                         x86_64_lea_membase_reg(REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1633                         x86_64_cmovcc_reg_reg(X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1634                         x86_64_shift_imm_reg(X86_64_SAR, iptr->val.i, REG_ITMP1);
1635                         x86_64_mov_reg_reg(REG_ITMP1, d);
1636                         store_reg_to_var_int(iptr->dst, d);
1637                         break;
1638
1639                 case ICMD_LREMPOW2:   /* ..., value  ==> ..., value % constant        */
1640                                       /* val.l = constant                             */
1641
1642                         var_to_reg_int(s1, src, REG_ITMP1);
1643                         d = reg_of_var(iptr->dst, REG_ITMP3);
1644                         M_INTMOVE(s1, REG_ITMP1);
1645                         x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1646                         x86_64_lea_membase_reg(REG_ITMP1, iptr->val.i, REG_ITMP2);
1647                         x86_64_cmovcc_reg_reg(X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1648                         x86_64_alu_imm_reg(X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1649                         x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP2, REG_ITMP1);
1650                         x86_64_mov_reg_reg(REG_ITMP1, d);
1651                         store_reg_to_var_int(iptr->dst, d);
1652                         break;
1653
1654                 case ICMD_ISHL:       /* ..., val1, val2  ==> ..., val1 << val2       */
1655
1656                         d = reg_of_var(iptr->dst, REG_NULL);
1657                         x86_64_emit_ishift(X86_64_SHL, src, iptr);
1658                         break;
1659
1660                 case ICMD_ISHLCONST:  /* ..., value  ==> ..., value << constant       */
1661                                       /* val.i = constant                             */
1662
1663                         d = reg_of_var(iptr->dst, REG_NULL);
1664                         x86_64_emit_ishiftconst(X86_64_SHL, src, iptr);
1665                         break;
1666
1667                 case ICMD_ISHR:       /* ..., val1, val2  ==> ..., val1 >> val2       */
1668
1669                         d = reg_of_var(iptr->dst, REG_NULL);
1670                         x86_64_emit_ishift(X86_64_SAR, src, iptr);
1671                         break;
1672
1673                 case ICMD_ISHRCONST:  /* ..., value  ==> ..., value >> constant       */
1674                                       /* val.i = constant                             */
1675
1676                         d = reg_of_var(iptr->dst, REG_NULL);
1677                         x86_64_emit_ishiftconst(X86_64_SAR, src, iptr);
1678                         break;
1679
1680                 case ICMD_IUSHR:      /* ..., val1, val2  ==> ..., val1 >>> val2      */
1681
1682                         d = reg_of_var(iptr->dst, REG_NULL);
1683                         x86_64_emit_ishift(X86_64_SHR, src, iptr);
1684                         break;
1685
1686                 case ICMD_IUSHRCONST: /* ..., value  ==> ..., value >>> constant      */
1687                                       /* val.i = constant                             */
1688
1689                         d = reg_of_var(iptr->dst, REG_NULL);
1690                         x86_64_emit_ishiftconst(X86_64_SHR, src, iptr);
1691                         break;
1692
1693                 case ICMD_LSHL:       /* ..., val1, val2  ==> ..., val1 << val2       */
1694
1695                         d = reg_of_var(iptr->dst, REG_NULL);
1696                         x86_64_emit_lshift(X86_64_SHL, src, iptr);
1697                         break;
1698
1699         case ICMD_LSHLCONST:  /* ..., value  ==> ..., value << constant       */
1700                                           /* val.i = constant                             */
1701
1702                         d = reg_of_var(iptr->dst, REG_NULL);
1703                         x86_64_emit_lshiftconst(X86_64_SHL, src, iptr);
1704                         break;
1705
1706                 case ICMD_LSHR:       /* ..., val1, val2  ==> ..., val1 >> val2       */
1707
1708                         d = reg_of_var(iptr->dst, REG_NULL);
1709                         x86_64_emit_lshift(X86_64_SAR, src, iptr);
1710                         break;
1711
1712                 case ICMD_LSHRCONST:  /* ..., value  ==> ..., value >> constant       */
1713                                       /* val.i = constant                             */
1714
1715                         d = reg_of_var(iptr->dst, REG_NULL);
1716                         x86_64_emit_lshiftconst(X86_64_SAR, src, iptr);
1717                         break;
1718
1719                 case ICMD_LUSHR:      /* ..., val1, val2  ==> ..., val1 >>> val2      */
1720
1721                         d = reg_of_var(iptr->dst, REG_NULL);
1722                         x86_64_emit_lshift(X86_64_SHR, src, iptr);
1723                         break;
1724
1725                 case ICMD_LUSHRCONST: /* ..., value  ==> ..., value >>> constant      */
1726                                       /* val.l = constant                             */
1727
1728                         d = reg_of_var(iptr->dst, REG_NULL);
1729                         x86_64_emit_lshiftconst(X86_64_SHR, src, iptr);
1730                         break;
1731
1732                 case ICMD_IAND:       /* ..., val1, val2  ==> ..., val1 & val2        */
1733
1734                         d = reg_of_var(iptr->dst, REG_NULL);
1735                         x86_64_emit_ialu(X86_64_AND, src, iptr);
1736                         break;
1737
1738                 case ICMD_IANDCONST:  /* ..., value  ==> ..., value & constant        */
1739                                       /* val.i = constant                             */
1740
1741                         d = reg_of_var(iptr->dst, REG_NULL);
1742                         x86_64_emit_ialuconst(X86_64_AND, src, iptr);
1743                         break;
1744
1745                 case ICMD_LAND:       /* ..., val1, val2  ==> ..., val1 & val2        */
1746
1747                         d = reg_of_var(iptr->dst, REG_NULL);
1748                         x86_64_emit_lalu(X86_64_AND, src, iptr);
1749                         break;
1750
1751                 case ICMD_LANDCONST:  /* ..., value  ==> ..., value & constant        */
1752                                       /* val.l = constant                             */
1753
1754                         d = reg_of_var(iptr->dst, REG_NULL);
1755                         x86_64_emit_laluconst(X86_64_AND, src, iptr);
1756                         break;
1757
1758                 case ICMD_IOR:        /* ..., val1, val2  ==> ..., val1 | val2        */
1759
1760                         d = reg_of_var(iptr->dst, REG_NULL);
1761                         x86_64_emit_ialu(X86_64_OR, src, iptr);
1762                         break;
1763
1764                 case ICMD_IORCONST:   /* ..., value  ==> ..., value | constant        */
1765                                       /* val.i = constant                             */
1766
1767                         d = reg_of_var(iptr->dst, REG_NULL);
1768                         x86_64_emit_ialuconst(X86_64_OR, src, iptr);
1769                         break;
1770
1771                 case ICMD_LOR:        /* ..., val1, val2  ==> ..., val1 | val2        */
1772
1773                         d = reg_of_var(iptr->dst, REG_NULL);
1774                         x86_64_emit_lalu(X86_64_OR, src, iptr);
1775                         break;
1776
1777                 case ICMD_LORCONST:   /* ..., value  ==> ..., value | constant        */
1778                                       /* val.l = constant                             */
1779
1780                         d = reg_of_var(iptr->dst, REG_NULL);
1781                         x86_64_emit_laluconst(X86_64_OR, src, iptr);
1782                         break;
1783
1784                 case ICMD_IXOR:       /* ..., val1, val2  ==> ..., val1 ^ val2        */
1785
1786                         d = reg_of_var(iptr->dst, REG_NULL);
1787                         x86_64_emit_ialu(X86_64_XOR, src, iptr);
1788                         break;
1789
1790                 case ICMD_IXORCONST:  /* ..., value  ==> ..., value ^ constant        */
1791                                       /* val.i = constant                             */
1792
1793                         d = reg_of_var(iptr->dst, REG_NULL);
1794                         x86_64_emit_ialuconst(X86_64_XOR, src, iptr);
1795                         break;
1796
1797                 case ICMD_LXOR:       /* ..., val1, val2  ==> ..., val1 ^ val2        */
1798
1799                         d = reg_of_var(iptr->dst, REG_NULL);
1800                         x86_64_emit_lalu(X86_64_XOR, src, iptr);
1801                         break;
1802
1803                 case ICMD_LXORCONST:  /* ..., value  ==> ..., value ^ constant        */
1804                                       /* val.l = constant                             */
1805
1806                         d = reg_of_var(iptr->dst, REG_NULL);
1807                         x86_64_emit_laluconst(X86_64_XOR, src, iptr);
1808                         break;
1809
1810
1811                 case ICMD_IINC:       /* ..., value  ==> ..., value + constant        */
1812                                       /* op1 = variable, val.i = constant             */
1813
1814                         var = &(locals[iptr->op1][TYPE_INT]);
1815                         d = var->regoff;
1816                         if (var->flags & INMEMORY) {
1817                                 if (iptr->val.i == 1) {
1818                                         x86_64_incl_membase(REG_SP, d * 8);
1819  
1820                                 } else if (iptr->val.i == -1) {
1821                                         x86_64_decl_membase(REG_SP, d * 8);
1822
1823                                 } else {
1824                                         x86_64_alul_imm_membase(X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1825                                 }
1826
1827                         } else {
1828                                 if (iptr->val.i == 1) {
1829                                         x86_64_incl_reg(d);
1830  
1831                                 } else if (iptr->val.i == -1) {
1832                                         x86_64_decl_reg(d);
1833
1834                                 } else {
1835                                         x86_64_alul_imm_reg(X86_64_ADD, iptr->val.i, d);
1836                                 }
1837                         }
1838                         break;
1839
1840
1841                 /* floating operations ************************************************/
1842
1843                 case ICMD_FNEG:       /* ..., value  ==> ..., - value                 */
1844
1845                         var_to_reg_flt(s1, src, REG_FTMP1);
1846                         d = reg_of_var(iptr->dst, REG_FTMP3);
1847                         a = dseg_adds4(0x80000000);
1848                         M_FLTMOVE(s1, d);
1849                         x86_64_movss_membase_reg(RIP, -(((s8) mcodeptr + 9) - (s8) mcodebase) + a, REG_FTMP2);
1850                         x86_64_xorps_reg_reg(REG_FTMP2, d);
1851                         store_reg_to_var_flt(iptr->dst, d);
1852                         break;
1853
1854                 case ICMD_DNEG:       /* ..., value  ==> ..., - value                 */
1855
1856                         var_to_reg_flt(s1, src, REG_FTMP1);
1857                         d = reg_of_var(iptr->dst, REG_FTMP3);
1858                         a = dseg_adds8(0x8000000000000000);
1859                         M_FLTMOVE(s1, d);
1860                         x86_64_movd_membase_reg(RIP, -(((s8) mcodeptr + 9) - (s8) mcodebase) + a, REG_FTMP2);
1861                         x86_64_xorpd_reg_reg(REG_FTMP2, d);
1862                         store_reg_to_var_flt(iptr->dst, d);
1863                         break;
1864
1865                 case ICMD_FADD:       /* ..., val1, val2  ==> ..., val1 + val2        */
1866
1867                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1868                         var_to_reg_flt(s2, src, REG_FTMP2);
1869                         d = reg_of_var(iptr->dst, REG_FTMP3);
1870                         if (s1 == d) {
1871                                 x86_64_addss_reg_reg(s2, d);
1872                         } else if (s2 == d) {
1873                                 x86_64_addss_reg_reg(s1, d);
1874                         } else {
1875                                 M_FLTMOVE(s1, d);
1876                                 x86_64_addss_reg_reg(s2, d);
1877                         }
1878                         store_reg_to_var_flt(iptr->dst, d);
1879                         break;
1880
1881                 case ICMD_DADD:       /* ..., val1, val2  ==> ..., val1 + val2        */
1882
1883                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1884                         var_to_reg_flt(s2, src, REG_FTMP2);
1885                         d = reg_of_var(iptr->dst, REG_FTMP3);
1886                         if (s1 == d) {
1887                                 x86_64_addsd_reg_reg(s2, d);
1888                         } else if (s2 == d) {
1889                                 x86_64_addsd_reg_reg(s1, d);
1890                         } else {
1891                                 M_FLTMOVE(s1, d);
1892                                 x86_64_addsd_reg_reg(s2, d);
1893                         }
1894                         store_reg_to_var_flt(iptr->dst, d);
1895                         break;
1896
1897                 case ICMD_FSUB:       /* ..., val1, val2  ==> ..., val1 - val2        */
1898
1899                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1900                         var_to_reg_flt(s2, src, REG_FTMP2);
1901                         d = reg_of_var(iptr->dst, REG_FTMP3);
1902                         if (s2 == d) {
1903                                 M_FLTMOVE(s2, REG_FTMP2);
1904                                 s2 = REG_FTMP2;
1905                         }
1906                         M_FLTMOVE(s1, d);
1907                         x86_64_subss_reg_reg(s2, d);
1908                         store_reg_to_var_flt(iptr->dst, d);
1909                         break;
1910
1911                 case ICMD_DSUB:       /* ..., val1, val2  ==> ..., val1 - val2        */
1912
1913                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1914                         var_to_reg_flt(s2, src, REG_FTMP2);
1915                         d = reg_of_var(iptr->dst, REG_FTMP3);
1916                         if (s2 == d) {
1917                                 M_FLTMOVE(s2, REG_FTMP2);
1918                                 s2 = REG_FTMP2;
1919                         }
1920                         M_FLTMOVE(s1, d);
1921                         x86_64_subsd_reg_reg(s2, d);
1922                         store_reg_to_var_flt(iptr->dst, d);
1923                         break;
1924
1925                 case ICMD_FMUL:       /* ..., val1, val2  ==> ..., val1 * val2        */
1926
1927                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1928                         var_to_reg_flt(s2, src, REG_FTMP2);
1929                         d = reg_of_var(iptr->dst, REG_FTMP3);
1930                         if (s1 == d) {
1931                                 x86_64_mulss_reg_reg(s2, d);
1932                         } else if (s2 == d) {
1933                                 x86_64_mulss_reg_reg(s1, d);
1934                         } else {
1935                                 M_FLTMOVE(s1, d);
1936                                 x86_64_mulss_reg_reg(s2, d);
1937                         }
1938                         store_reg_to_var_flt(iptr->dst, d);
1939                         break;
1940
1941                 case ICMD_DMUL:       /* ..., val1, val2  ==> ..., val1 * val2        */
1942
1943                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1944                         var_to_reg_flt(s2, src, REG_FTMP2);
1945                         d = reg_of_var(iptr->dst, REG_FTMP3);
1946                         if (s1 == d) {
1947                                 x86_64_mulsd_reg_reg(s2, d);
1948                         } else if (s2 == d) {
1949                                 x86_64_mulsd_reg_reg(s1, d);
1950                         } else {
1951                                 M_FLTMOVE(s1, d);
1952                                 x86_64_mulsd_reg_reg(s2, d);
1953                         }
1954                         store_reg_to_var_flt(iptr->dst, d);
1955                         break;
1956
1957                 case ICMD_FDIV:       /* ..., val1, val2  ==> ..., val1 / val2        */
1958
1959                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1960                         var_to_reg_flt(s2, src, REG_FTMP2);
1961                         d = reg_of_var(iptr->dst, REG_FTMP3);
1962                         if (s2 == d) {
1963                                 M_FLTMOVE(s2, REG_FTMP2);
1964                                 s2 = REG_FTMP2;
1965                         }
1966                         M_FLTMOVE(s1, d);
1967                         x86_64_divss_reg_reg(s2, d);
1968                         store_reg_to_var_flt(iptr->dst, d);
1969                         break;
1970
1971                 case ICMD_DDIV:       /* ..., val1, val2  ==> ..., val1 / val2        */
1972
1973                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1974                         var_to_reg_flt(s2, src, REG_FTMP2);
1975                         d = reg_of_var(iptr->dst, REG_FTMP3);
1976                         if (s2 == d) {
1977                                 M_FLTMOVE(s2, REG_FTMP2);
1978                                 s2 = REG_FTMP2;
1979                         }
1980                         M_FLTMOVE(s1, d);
1981                         x86_64_divsd_reg_reg(s2, d);
1982                         store_reg_to_var_flt(iptr->dst, d);
1983                         break;
1984
1985                 case ICMD_I2F:       /* ..., value  ==> ..., (float) value            */
1986
1987                         var_to_reg_int(s1, src, REG_ITMP1);
1988                         d = reg_of_var(iptr->dst, REG_FTMP1);
1989                         x86_64_cvtsi2ss_reg_reg(s1, d);
1990                         store_reg_to_var_flt(iptr->dst, d);
1991                         break;
1992
1993                 case ICMD_I2D:       /* ..., value  ==> ..., (double) value           */
1994
1995                         var_to_reg_int(s1, src, REG_ITMP1);
1996                         d = reg_of_var(iptr->dst, REG_FTMP1);
1997                         x86_64_cvtsi2sd_reg_reg(s1, d);
1998                         store_reg_to_var_flt(iptr->dst, d);
1999                         break;
2000
2001                 case ICMD_L2F:       /* ..., value  ==> ..., (float) value            */
2002
2003                         var_to_reg_int(s1, src, REG_ITMP1);
2004                         d = reg_of_var(iptr->dst, REG_FTMP1);
2005                         x86_64_cvtsi2ssq_reg_reg(s1, d);
2006                         store_reg_to_var_flt(iptr->dst, d);
2007                         break;
2008                         
2009                 case ICMD_L2D:       /* ..., value  ==> ..., (double) value           */
2010
2011                         var_to_reg_int(s1, src, REG_ITMP1);
2012                         d = reg_of_var(iptr->dst, REG_FTMP1);
2013                         x86_64_cvtsi2sdq_reg_reg(s1, d);
2014                         store_reg_to_var_flt(iptr->dst, d);
2015                         break;
2016                         
2017                 case ICMD_F2I:       /* ..., value  ==> ..., (int) value              */
2018
2019                         var_to_reg_flt(s1, src, REG_FTMP1);
2020                         d = reg_of_var(iptr->dst, REG_ITMP1);
2021                         x86_64_cvttss2si_reg_reg(s1, d);
2022                         x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, d);    /* corner cases */
2023                         a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
2024                         x86_64_jcc(X86_64_CC_NE, a);
2025                         M_FLTMOVE(s1, REG_FTMP1);
2026                         x86_64_mov_imm_reg((s8) asm_builtin_f2i, REG_ITMP2);
2027                         x86_64_call_reg(REG_ITMP2);
2028                         M_INTMOVE(REG_RESULT, d);
2029                         store_reg_to_var_int(iptr->dst, d);
2030                         break;
2031
2032                 case ICMD_D2I:       /* ..., value  ==> ..., (int) value              */
2033
2034                         var_to_reg_flt(s1, src, REG_FTMP1);
2035                         d = reg_of_var(iptr->dst, REG_ITMP1);
2036                         x86_64_cvttsd2si_reg_reg(s1, d);
2037                         x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, d);    /* corner cases */
2038                         a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
2039                         x86_64_jcc(X86_64_CC_NE, a);
2040                         M_FLTMOVE(s1, REG_FTMP1);
2041                         x86_64_mov_imm_reg((s8) asm_builtin_d2i, REG_ITMP2);
2042                         x86_64_call_reg(REG_ITMP2);
2043                         M_INTMOVE(REG_RESULT, d);
2044                         store_reg_to_var_int(iptr->dst, d);
2045                         break;
2046
2047                 case ICMD_F2L:       /* ..., value  ==> ..., (long) value             */
2048
2049                         var_to_reg_flt(s1, src, REG_FTMP1);
2050                         d = reg_of_var(iptr->dst, REG_ITMP1);
2051                         x86_64_cvttss2siq_reg_reg(s1, d);
2052                         x86_64_mov_imm_reg(0x8000000000000000, REG_ITMP2);
2053                         x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, d);     /* corner cases */
2054                         a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
2055                         x86_64_jcc(X86_64_CC_NE, a);
2056                         M_FLTMOVE(s1, REG_FTMP1);
2057                         x86_64_mov_imm_reg((s8) asm_builtin_f2l, REG_ITMP2);
2058                         x86_64_call_reg(REG_ITMP2);
2059                         M_INTMOVE(REG_RESULT, d);
2060                         store_reg_to_var_int(iptr->dst, d);
2061                         break;
2062
2063                 case ICMD_D2L:       /* ..., value  ==> ..., (long) value             */
2064
2065                         var_to_reg_flt(s1, src, REG_FTMP1);
2066                         d = reg_of_var(iptr->dst, REG_ITMP1);
2067                         x86_64_cvttsd2siq_reg_reg(s1, d);
2068                         x86_64_mov_imm_reg(0x8000000000000000, REG_ITMP2);
2069                         x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, d);     /* corner cases */
2070                         a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
2071                         x86_64_jcc(X86_64_CC_NE, a);
2072                         M_FLTMOVE(s1, REG_FTMP1);
2073                         x86_64_mov_imm_reg((s8) asm_builtin_d2l, REG_ITMP2);
2074                         x86_64_call_reg(REG_ITMP2);
2075                         M_INTMOVE(REG_RESULT, d);
2076                         store_reg_to_var_int(iptr->dst, d);
2077                         break;
2078
2079                 case ICMD_F2D:       /* ..., value  ==> ..., (double) value           */
2080
2081                         var_to_reg_flt(s1, src, REG_FTMP1);
2082                         d = reg_of_var(iptr->dst, REG_FTMP3);
2083                         x86_64_cvtss2sd_reg_reg(s1, d);
2084                         store_reg_to_var_flt(iptr->dst, d);
2085                         break;
2086
2087                 case ICMD_D2F:       /* ..., value  ==> ..., (float) value            */
2088
2089                         var_to_reg_flt(s1, src, REG_FTMP1);
2090                         d = reg_of_var(iptr->dst, REG_FTMP3);
2091                         x86_64_cvtsd2ss_reg_reg(s1, d);
2092                         store_reg_to_var_flt(iptr->dst, d);
2093                         break;
2094
2095                 case ICMD_FCMPL:      /* ..., val1, val2  ==> ..., val1 fcmpl val2    */
2096                                           /* == => 0, < => 1, > => -1 */
2097
2098                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
2099                         var_to_reg_flt(s2, src, REG_FTMP2);
2100                         d = reg_of_var(iptr->dst, REG_ITMP3);
2101                         x86_64_alu_reg_reg(X86_64_XOR, d, d);
2102                         x86_64_mov_imm_reg(1, REG_ITMP1);
2103                         x86_64_mov_imm_reg(-1, REG_ITMP2);
2104                         x86_64_ucomiss_reg_reg(s1, s2);
2105                         x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
2106                         x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
2107                         x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP2, d);    /* treat unordered as GT */
2108                         store_reg_to_var_int(iptr->dst, d);
2109                         break;
2110
2111                 case ICMD_FCMPG:      /* ..., val1, val2  ==> ..., val1 fcmpg val2    */
2112                                           /* == => 0, < => 1, > => -1 */
2113
2114                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
2115                         var_to_reg_flt(s2, src, REG_FTMP2);
2116                         d = reg_of_var(iptr->dst, REG_ITMP3);
2117                         x86_64_alu_reg_reg(X86_64_XOR, d, d);
2118                         x86_64_mov_imm_reg(1, REG_ITMP1);
2119                         x86_64_mov_imm_reg(-1, REG_ITMP2);
2120                         x86_64_ucomiss_reg_reg(s1, s2);
2121                         x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
2122                         x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
2123                         x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP1, d);    /* treat unordered as LT */
2124                         store_reg_to_var_int(iptr->dst, d);
2125                         break;
2126
2127                 case ICMD_DCMPL:      /* ..., val1, val2  ==> ..., val1 fcmpl val2    */
2128                                           /* == => 0, < => 1, > => -1 */
2129
2130                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
2131                         var_to_reg_flt(s2, src, REG_FTMP2);
2132                         d = reg_of_var(iptr->dst, REG_ITMP3);
2133                         x86_64_alu_reg_reg(X86_64_XOR, d, d);
2134                         x86_64_mov_imm_reg(1, REG_ITMP1);
2135                         x86_64_mov_imm_reg(-1, REG_ITMP2);
2136                         x86_64_ucomisd_reg_reg(s1, s2);
2137                         x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
2138                         x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
2139                         x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP2, d);    /* treat unordered as GT */
2140                         store_reg_to_var_int(iptr->dst, d);
2141                         break;
2142
2143                 case ICMD_DCMPG:      /* ..., val1, val2  ==> ..., val1 fcmpg val2    */
2144                                           /* == => 0, < => 1, > => -1 */
2145
2146                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
2147                         var_to_reg_flt(s2, src, REG_FTMP2);
2148                         d = reg_of_var(iptr->dst, REG_ITMP3);
2149                         x86_64_alu_reg_reg(X86_64_XOR, d, d);
2150                         x86_64_mov_imm_reg(1, REG_ITMP1);
2151                         x86_64_mov_imm_reg(-1, REG_ITMP2);
2152                         x86_64_ucomisd_reg_reg(s1, s2);
2153                         x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
2154                         x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
2155                         x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP1, d);    /* treat unordered as LT */
2156                         store_reg_to_var_int(iptr->dst, d);
2157                         break;
2158
2159
2160                 /* memory operations **************************************************/
2161
2162 #define gen_bound_check \
2163     if (checkbounds) { \
2164         x86_64_alul_membase_reg(X86_64_CMP, s1, OFFSET(java_arrayheader, size), s2); \
2165         x86_64_jcc(X86_64_CC_AE, 0); \
2166         codegen_addxboundrefs(mcodeptr, s2); \
2167     }
2168
2169                 case ICMD_ARRAYLENGTH: /* ..., arrayref  ==> ..., (int) length        */
2170
2171                         var_to_reg_int(s1, src, REG_ITMP1);
2172                         d = reg_of_var(iptr->dst, REG_ITMP3);
2173                         gen_nullptr_check(s1);
2174                         x86_64_movl_membase_reg(s1, OFFSET(java_arrayheader, size), d);
2175                         store_reg_to_var_int(iptr->dst, d);
2176                         break;
2177
2178                 case ICMD_AALOAD:     /* ..., arrayref, index  ==> ..., value         */
2179
2180                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2181                         var_to_reg_int(s2, src, REG_ITMP2);
2182                         d = reg_of_var(iptr->dst, REG_ITMP3);
2183                         if (iptr->op1 == 0) {
2184                                 gen_nullptr_check(s1);
2185                                 gen_bound_check;
2186                         }
2187                         x86_64_mov_memindex_reg(OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
2188                         store_reg_to_var_int(iptr->dst, d);
2189                         break;
2190
2191                 case ICMD_LALOAD:     /* ..., arrayref, index  ==> ..., value         */
2192
2193                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2194                         var_to_reg_int(s2, src, REG_ITMP2);
2195                         d = reg_of_var(iptr->dst, REG_ITMP3);
2196                         if (iptr->op1 == 0) {
2197                                 gen_nullptr_check(s1);
2198                                 gen_bound_check;
2199                         }
2200                         x86_64_mov_memindex_reg(OFFSET(java_longarray, data[0]), s1, s2, 3, d);
2201                         store_reg_to_var_int(iptr->dst, d);
2202                         break;
2203
2204                 case ICMD_IALOAD:     /* ..., arrayref, index  ==> ..., value         */
2205
2206                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2207                         var_to_reg_int(s2, src, REG_ITMP2);
2208                         d = reg_of_var(iptr->dst, REG_ITMP3);
2209                         if (iptr->op1 == 0) {
2210                                 gen_nullptr_check(s1);
2211                                 gen_bound_check;
2212                         }
2213                         x86_64_movl_memindex_reg(OFFSET(java_intarray, data[0]), s1, s2, 2, d);
2214                         store_reg_to_var_int(iptr->dst, d);
2215                         break;
2216
2217                 case ICMD_FALOAD:     /* ..., arrayref, index  ==> ..., value         */
2218
2219                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2220                         var_to_reg_int(s2, src, REG_ITMP2);
2221                         d = reg_of_var(iptr->dst, REG_FTMP3);
2222                         if (iptr->op1 == 0) {
2223                                 gen_nullptr_check(s1);
2224                                 gen_bound_check;
2225                         }
2226                         x86_64_movss_memindex_reg(OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2227                         store_reg_to_var_flt(iptr->dst, d);
2228                         break;
2229
2230                 case ICMD_DALOAD:     /* ..., arrayref, index  ==> ..., value         */
2231
2232                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2233                         var_to_reg_int(s2, src, REG_ITMP2);
2234                         d = reg_of_var(iptr->dst, REG_FTMP3);
2235                         if (iptr->op1 == 0) {
2236                                 gen_nullptr_check(s1);
2237                                 gen_bound_check;
2238                         }
2239                         x86_64_movsd_memindex_reg(OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2240                         store_reg_to_var_flt(iptr->dst, d);
2241                         break;
2242
2243                 case ICMD_CALOAD:     /* ..., arrayref, index  ==> ..., value         */
2244
2245                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2246                         var_to_reg_int(s2, src, REG_ITMP2);
2247                         d = reg_of_var(iptr->dst, REG_ITMP3);
2248                         if (iptr->op1 == 0) {
2249                                 gen_nullptr_check(s1);
2250                                 gen_bound_check;
2251                         }
2252                         x86_64_movzwq_memindex_reg(OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2253                         store_reg_to_var_int(iptr->dst, d);
2254                         break;                  
2255
2256                 case ICMD_SALOAD:     /* ..., arrayref, index  ==> ..., value         */
2257
2258                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2259                         var_to_reg_int(s2, src, REG_ITMP2);
2260                         d = reg_of_var(iptr->dst, REG_ITMP3);
2261                         if (iptr->op1 == 0) {
2262                                 gen_nullptr_check(s1);
2263                                 gen_bound_check;
2264                         }
2265                         x86_64_movswq_memindex_reg(OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2266                         store_reg_to_var_int(iptr->dst, d);
2267                         break;
2268
2269                 case ICMD_BALOAD:     /* ..., arrayref, index  ==> ..., value         */
2270
2271                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2272                         var_to_reg_int(s2, src, REG_ITMP2);
2273                         d = reg_of_var(iptr->dst, REG_ITMP3);
2274                         if (iptr->op1 == 0) {
2275                                 gen_nullptr_check(s1);
2276                                 gen_bound_check;
2277                         }
2278                         x86_64_movsbq_memindex_reg(OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2279                         store_reg_to_var_int(iptr->dst, d);
2280                         break;
2281
2282
2283                 case ICMD_AASTORE:    /* ..., arrayref, index, value  ==> ...         */
2284
2285                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2286                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2287                         if (iptr->op1 == 0) {
2288                                 gen_nullptr_check(s1);
2289                                 gen_bound_check;
2290                         }
2291                         var_to_reg_int(s3, src, REG_ITMP3);
2292                         x86_64_mov_reg_memindex(s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2293                         break;
2294
2295                 case ICMD_LASTORE:    /* ..., arrayref, index, value  ==> ...         */
2296
2297                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2298                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2299                         if (iptr->op1 == 0) {
2300                                 gen_nullptr_check(s1);
2301                                 gen_bound_check;
2302                         }
2303                         var_to_reg_int(s3, src, REG_ITMP3);
2304                         x86_64_mov_reg_memindex(s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2305                         break;
2306
2307                 case ICMD_IASTORE:    /* ..., arrayref, index, value  ==> ...         */
2308
2309                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2310                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2311                         if (iptr->op1 == 0) {
2312                                 gen_nullptr_check(s1);
2313                                 gen_bound_check;
2314                         }
2315                         var_to_reg_int(s3, src, REG_ITMP3);
2316                         x86_64_movl_reg_memindex(s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2317                         break;
2318
2319                 case ICMD_FASTORE:    /* ..., arrayref, index, value  ==> ...         */
2320
2321                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2322                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2323                         if (iptr->op1 == 0) {
2324                                 gen_nullptr_check(s1);
2325                                 gen_bound_check;
2326                         }
2327                         var_to_reg_flt(s3, src, REG_FTMP3);
2328                         x86_64_movss_reg_memindex(s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2329                         break;
2330
2331                 case ICMD_DASTORE:    /* ..., arrayref, index, value  ==> ...         */
2332
2333                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2334                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2335                         if (iptr->op1 == 0) {
2336                                 gen_nullptr_check(s1);
2337                                 gen_bound_check;
2338                         }
2339                         var_to_reg_flt(s3, src, REG_FTMP3);
2340                         x86_64_movsd_reg_memindex(s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2341                         break;
2342
2343                 case ICMD_CASTORE:    /* ..., arrayref, index, value  ==> ...         */
2344
2345                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2346                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2347                         if (iptr->op1 == 0) {
2348                                 gen_nullptr_check(s1);
2349                                 gen_bound_check;
2350                         }
2351                         var_to_reg_int(s3, src, REG_ITMP3);
2352                         x86_64_movw_reg_memindex(s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2353                         break;
2354
2355                 case ICMD_SASTORE:    /* ..., arrayref, index, value  ==> ...         */
2356
2357                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2358                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2359                         if (iptr->op1 == 0) {
2360                                 gen_nullptr_check(s1);
2361                                 gen_bound_check;
2362                         }
2363                         var_to_reg_int(s3, src, REG_ITMP3);
2364                         x86_64_movw_reg_memindex(s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2365                         break;
2366
2367                 case ICMD_BASTORE:    /* ..., arrayref, index, value  ==> ...         */
2368
2369                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2370                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2371                         if (iptr->op1 == 0) {
2372                                 gen_nullptr_check(s1);
2373                                 gen_bound_check;
2374                         }
2375                         var_to_reg_int(s3, src, REG_ITMP3);
2376                         x86_64_movb_reg_memindex(s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2377                         break;
2378
2379
2380                 case ICMD_PUTSTATIC:  /* ..., value  ==> ...                          */
2381                                       /* op1 = type, val.a = field address            */
2382
2383                         /* if class isn't yet initialized, do it */
2384                         if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2385                                 /* call helper function which patches this code */
2386                                 x86_64_mov_imm_reg((s8) ((fieldinfo *) iptr->val.a)->class, REG_ITMP1);
2387                                 x86_64_mov_imm_reg((s8) asm_check_clinit, REG_ITMP2);
2388                                 x86_64_call_reg(REG_ITMP2);
2389                         }
2390
2391                         a = dseg_addaddress(&(((fieldinfo *) iptr->val.a)->value));
2392 /*                      x86_64_mov_imm_reg(0, REG_ITMP2); */
2393 /*                      dseg_adddata(mcodeptr); */
2394 /*                      x86_64_mov_membase_reg(REG_ITMP2, a, REG_ITMP2); */
2395                         x86_64_mov_membase_reg(RIP, -(((s8) mcodeptr + 7) - (s8) mcodebase) + a, REG_ITMP2);
2396                         switch (iptr->op1) {
2397                         case TYPE_INT:
2398                                 var_to_reg_int(s2, src, REG_ITMP1);
2399                                 x86_64_movl_reg_membase(s2, REG_ITMP2, 0);
2400                                 break;
2401                         case TYPE_LNG:
2402                         case TYPE_ADR:
2403                                 var_to_reg_int(s2, src, REG_ITMP1);
2404                                 x86_64_mov_reg_membase(s2, REG_ITMP2, 0);
2405                                 break;
2406                         case TYPE_FLT:
2407                                 var_to_reg_flt(s2, src, REG_FTMP1);
2408                                 x86_64_movss_reg_membase(s2, REG_ITMP2, 0);
2409                                 break;
2410                         case TYPE_DBL:
2411                                 var_to_reg_flt(s2, src, REG_FTMP1);
2412                                 x86_64_movsd_reg_membase(s2, REG_ITMP2, 0);
2413                                 break;
2414                         default: panic("internal error");
2415                         }
2416                         break;
2417
2418                 case ICMD_GETSTATIC:  /* ...  ==> ..., value                          */
2419                                       /* op1 = type, val.a = field address            */
2420
2421                         /* if class isn't yet initialized, do it */
2422                         if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2423                                 /* call helper function which patches this code */
2424                                 x86_64_mov_imm_reg((s8) ((fieldinfo *) iptr->val.a)->class, REG_ITMP1);
2425                                 x86_64_mov_imm_reg((s8) asm_check_clinit, REG_ITMP2);
2426                                 x86_64_call_reg(REG_ITMP2);
2427                         }
2428
2429                         a = dseg_addaddress(&(((fieldinfo *) iptr->val.a)->value));
2430 /*                      x86_64_mov_imm_reg(0, REG_ITMP2); */
2431 /*                      dseg_adddata(mcodeptr); */
2432 /*                      x86_64_mov_membase_reg(REG_ITMP2, a, REG_ITMP2); */
2433                         x86_64_mov_membase_reg(RIP, -(((s8) mcodeptr + 7) - (s8) mcodebase) + a, REG_ITMP2);
2434                         switch (iptr->op1) {
2435                         case TYPE_INT:
2436                                 d = reg_of_var(iptr->dst, REG_ITMP1);
2437                                 x86_64_movl_membase_reg(REG_ITMP2, 0, d);
2438                                 store_reg_to_var_int(iptr->dst, d);
2439                                 break;
2440                         case TYPE_LNG:
2441                         case TYPE_ADR:
2442                                 d = reg_of_var(iptr->dst, REG_ITMP1);
2443                                 x86_64_mov_membase_reg(REG_ITMP2, 0, d);
2444                                 store_reg_to_var_int(iptr->dst, d);
2445                                 break;
2446                         case TYPE_FLT:
2447                                 d = reg_of_var(iptr->dst, REG_ITMP1);
2448                                 x86_64_movss_membase_reg(REG_ITMP2, 0, d);
2449                                 store_reg_to_var_flt(iptr->dst, d);
2450                                 break;
2451                         case TYPE_DBL:                          
2452                                 d = reg_of_var(iptr->dst, REG_ITMP1);
2453                                 x86_64_movsd_membase_reg(REG_ITMP2, 0, d);
2454                                 store_reg_to_var_flt(iptr->dst, d);
2455                                 break;
2456                         default: panic("internal error");
2457                         }
2458                         break;
2459
2460                 case ICMD_PUTFIELD:   /* ..., value  ==> ...                          */
2461                                       /* op1 = type, val.i = field offset             */
2462
2463                         /* if class isn't yet initialized, do it */
2464                         if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2465                                 /* call helper function which patches this code */
2466                                 x86_64_mov_imm_reg((s8) ((fieldinfo *) iptr->val.a)->class, REG_ITMP1);
2467                                 x86_64_mov_imm_reg((s8) asm_check_clinit, REG_ITMP2);
2468                                 x86_64_call_reg(REG_ITMP2);
2469                         }
2470
2471                         a = ((fieldinfo *)(iptr->val.a))->offset;
2472                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2473                         switch (iptr->op1) {
2474                                 case TYPE_INT:
2475                                         var_to_reg_int(s2, src, REG_ITMP2);
2476                                         gen_nullptr_check(s1);
2477                                         x86_64_movl_reg_membase(s2, s1, a);
2478                                         break;
2479                                 case TYPE_LNG:
2480                                 case TYPE_ADR:
2481                                         var_to_reg_int(s2, src, REG_ITMP2);
2482                                         gen_nullptr_check(s1);
2483                                         x86_64_mov_reg_membase(s2, s1, a);
2484                                         break;
2485                                 case TYPE_FLT:
2486                                         var_to_reg_flt(s2, src, REG_FTMP2);
2487                                         gen_nullptr_check(s1);
2488                                         x86_64_movss_reg_membase(s2, s1, a);
2489                                         break;
2490                                 case TYPE_DBL:
2491                                         var_to_reg_flt(s2, src, REG_FTMP2);
2492                                         gen_nullptr_check(s1);
2493                                         x86_64_movsd_reg_membase(s2, s1, a);
2494                                         break;
2495                                 default: panic ("internal error");
2496                                 }
2497                         break;
2498
2499                 case ICMD_GETFIELD:   /* ...  ==> ..., value                          */
2500                                       /* op1 = type, val.i = field offset             */
2501
2502                         a = ((fieldinfo *)(iptr->val.a))->offset;
2503                         var_to_reg_int(s1, src, REG_ITMP1);
2504                         switch (iptr->op1) {
2505                                 case TYPE_INT:
2506                                         d = reg_of_var(iptr->dst, REG_ITMP1);
2507                                         gen_nullptr_check(s1);
2508                                         x86_64_movl_membase_reg(s1, a, d);
2509                                         store_reg_to_var_int(iptr->dst, d);
2510                                         break;
2511                                 case TYPE_LNG:
2512                                 case TYPE_ADR:
2513                                         d = reg_of_var(iptr->dst, REG_ITMP1);
2514                                         gen_nullptr_check(s1);
2515                                         x86_64_mov_membase_reg(s1, a, d);
2516                                         store_reg_to_var_int(iptr->dst, d);
2517                                         break;
2518                                 case TYPE_FLT:
2519                                         d = reg_of_var(iptr->dst, REG_FTMP1);
2520                                         gen_nullptr_check(s1);
2521                                         x86_64_movss_membase_reg(s1, a, d);
2522                                         store_reg_to_var_flt(iptr->dst, d);
2523                                         break;
2524                                 case TYPE_DBL:                          
2525                                         d = reg_of_var(iptr->dst, REG_FTMP1);
2526                                         gen_nullptr_check(s1);
2527                                         x86_64_movsd_membase_reg(s1, a, d);
2528                                         store_reg_to_var_flt(iptr->dst, d);
2529                                         break;
2530                                 default: panic ("internal error");
2531                                 }
2532                         break;
2533
2534
2535                 /* branch operations **************************************************/
2536
2537 /*  #define ALIGNCODENOP {if((int)((long)mcodeptr&7)){M_NOP;}} */
2538 #define ALIGNCODENOP do {} while (0)
2539
2540                 case ICMD_ATHROW:       /* ..., objectref ==> ... (, objectref)       */
2541
2542                         var_to_reg_int(s1, src, REG_ITMP1);
2543                         M_INTMOVE(s1, REG_ITMP1_XPTR);
2544
2545                         x86_64_call_imm(0); /* passing exception pointer                  */
2546                         x86_64_pop_reg(REG_ITMP2_XPC);
2547
2548                         x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
2549                         x86_64_jmp_reg(REG_ITMP3);
2550                         ALIGNCODENOP;
2551                         break;
2552
2553                 case ICMD_GOTO:         /* ... ==> ...                                */
2554                                         /* op1 = target JavaVM pc                     */
2555
2556                         x86_64_jmp_imm(0);
2557                         codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2558                         ALIGNCODENOP;
2559                         break;
2560
2561                 case ICMD_JSR:          /* ... ==> ...                                */
2562                                         /* op1 = target JavaVM pc                     */
2563
2564                         x86_64_call_imm(0);
2565                         codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2566                         break;
2567                         
2568                 case ICMD_RET:          /* ... ==> ...                                */
2569                                         /* op1 = local variable                       */
2570
2571                         var = &(locals[iptr->op1][TYPE_ADR]);
2572                         var_to_reg_int(s1, var, REG_ITMP1);
2573                         x86_64_jmp_reg(s1);
2574                         break;
2575
2576                 case ICMD_IFNULL:       /* ..., value ==> ...                         */
2577                                         /* op1 = target JavaVM pc                     */
2578
2579                         if (src->flags & INMEMORY) {
2580                                 x86_64_alu_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
2581
2582                         } else {
2583                                 x86_64_test_reg_reg(src->regoff, src->regoff);
2584                         }
2585                         x86_64_jcc(X86_64_CC_E, 0);
2586                         codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2587                         break;
2588
2589                 case ICMD_IFNONNULL:    /* ..., value ==> ...                         */
2590                                         /* op1 = target JavaVM pc                     */
2591
2592                         if (src->flags & INMEMORY) {
2593                                 x86_64_alu_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
2594
2595                         } else {
2596                                 x86_64_test_reg_reg(src->regoff, src->regoff);
2597                         }
2598                         x86_64_jcc(X86_64_CC_NE, 0);
2599                         codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2600                         break;
2601
2602                 case ICMD_IFEQ:         /* ..., value ==> ...                         */
2603                                         /* op1 = target JavaVM pc, val.i = constant   */
2604
2605                         x86_64_emit_ifcc(X86_64_CC_E, src, iptr);
2606                         break;
2607
2608                 case ICMD_IFLT:         /* ..., value ==> ...                         */
2609                                         /* op1 = target JavaVM pc, val.i = constant   */
2610
2611                         x86_64_emit_ifcc(X86_64_CC_L, src, iptr);
2612                         break;
2613
2614                 case ICMD_IFLE:         /* ..., value ==> ...                         */
2615                                         /* op1 = target JavaVM pc, val.i = constant   */
2616
2617                         x86_64_emit_ifcc(X86_64_CC_LE, src, iptr);
2618                         break;
2619
2620                 case ICMD_IFNE:         /* ..., value ==> ...                         */
2621                                         /* op1 = target JavaVM pc, val.i = constant   */
2622
2623                         x86_64_emit_ifcc(X86_64_CC_NE, src, iptr);
2624                         break;
2625
2626                 case ICMD_IFGT:         /* ..., value ==> ...                         */
2627                                         /* op1 = target JavaVM pc, val.i = constant   */
2628
2629                         x86_64_emit_ifcc(X86_64_CC_G, src, iptr);
2630                         break;
2631
2632                 case ICMD_IFGE:         /* ..., value ==> ...                         */
2633                                         /* op1 = target JavaVM pc, val.i = constant   */
2634
2635                         x86_64_emit_ifcc(X86_64_CC_GE, src, iptr);
2636                         break;
2637
2638                 case ICMD_IF_LEQ:       /* ..., value ==> ...                         */
2639                                         /* op1 = target JavaVM pc, val.l = constant   */
2640
2641                         x86_64_emit_if_lcc(X86_64_CC_E, src, iptr);
2642                         break;
2643
2644                 case ICMD_IF_LLT:       /* ..., value ==> ...                         */
2645                                         /* op1 = target JavaVM pc, val.l = constant   */
2646
2647                         x86_64_emit_if_lcc(X86_64_CC_L, src, iptr);
2648                         break;
2649
2650                 case ICMD_IF_LLE:       /* ..., value ==> ...                         */
2651                                         /* op1 = target JavaVM pc, val.l = constant   */
2652
2653                         x86_64_emit_if_lcc(X86_64_CC_LE, src, iptr);
2654                         break;
2655
2656                 case ICMD_IF_LNE:       /* ..., value ==> ...                         */
2657                                         /* op1 = target JavaVM pc, val.l = constant   */
2658
2659                         x86_64_emit_if_lcc(X86_64_CC_NE, src, iptr);
2660                         break;
2661
2662                 case ICMD_IF_LGT:       /* ..., value ==> ...                         */
2663                                         /* op1 = target JavaVM pc, val.l = constant   */
2664
2665                         x86_64_emit_if_lcc(X86_64_CC_G, src, iptr);
2666                         break;
2667
2668                 case ICMD_IF_LGE:       /* ..., value ==> ...                         */
2669                                         /* op1 = target JavaVM pc, val.l = constant   */
2670
2671                         x86_64_emit_if_lcc(X86_64_CC_GE, src, iptr);
2672                         break;
2673
2674                 case ICMD_IF_ICMPEQ:    /* ..., value, value ==> ...                  */
2675                                         /* op1 = target JavaVM pc                     */
2676
2677                         x86_64_emit_if_icmpcc(X86_64_CC_E, src, iptr);
2678                         break;
2679
2680                 case ICMD_IF_LCMPEQ:    /* ..., value, value ==> ...                  */
2681                 case ICMD_IF_ACMPEQ:    /* op1 = target JavaVM pc                     */
2682
2683                         x86_64_emit_if_lcmpcc(X86_64_CC_E, src, iptr);
2684                         break;
2685
2686                 case ICMD_IF_ICMPNE:    /* ..., value, value ==> ...                  */
2687                                         /* op1 = target JavaVM pc                     */
2688
2689                         x86_64_emit_if_icmpcc(X86_64_CC_NE, src, iptr);
2690                         break;
2691
2692                 case ICMD_IF_LCMPNE:    /* ..., value, value ==> ...                  */
2693                 case ICMD_IF_ACMPNE:    /* op1 = target JavaVM pc                     */
2694
2695                         x86_64_emit_if_lcmpcc(X86_64_CC_NE, src, iptr);
2696                         break;
2697
2698                 case ICMD_IF_ICMPLT:    /* ..., value, value ==> ...                  */
2699                                         /* op1 = target JavaVM pc                     */
2700
2701                         x86_64_emit_if_icmpcc(X86_64_CC_L, src, iptr);
2702                         break;
2703
2704                 case ICMD_IF_LCMPLT:    /* ..., value, value ==> ...                  */
2705                                     /* op1 = target JavaVM pc                     */
2706
2707                         x86_64_emit_if_lcmpcc(X86_64_CC_L, src, iptr);
2708                         break;
2709
2710                 case ICMD_IF_ICMPGT:    /* ..., value, value ==> ...                  */
2711                                         /* op1 = target JavaVM pc                     */
2712
2713                         x86_64_emit_if_icmpcc(X86_64_CC_G, src, iptr);
2714                         break;
2715
2716                 case ICMD_IF_LCMPGT:    /* ..., value, value ==> ...                  */
2717                                 /* op1 = target JavaVM pc                     */
2718
2719                         x86_64_emit_if_lcmpcc(X86_64_CC_G, src, iptr);
2720                         break;
2721
2722                 case ICMD_IF_ICMPLE:    /* ..., value, value ==> ...                  */
2723                                         /* op1 = target JavaVM pc                     */
2724
2725                         x86_64_emit_if_icmpcc(X86_64_CC_LE, src, iptr);
2726                         break;
2727
2728                 case ICMD_IF_LCMPLE:    /* ..., value, value ==> ...                  */
2729                                         /* op1 = target JavaVM pc                     */
2730
2731                         x86_64_emit_if_lcmpcc(X86_64_CC_LE, src, iptr);
2732                         break;
2733
2734                 case ICMD_IF_ICMPGE:    /* ..., value, value ==> ...                  */
2735                                         /* op1 = target JavaVM pc                     */
2736
2737                         x86_64_emit_if_icmpcc(X86_64_CC_GE, src, iptr);
2738                         break;
2739
2740                 case ICMD_IF_LCMPGE:    /* ..., value, value ==> ...                  */
2741                                     /* op1 = target JavaVM pc                     */
2742
2743                         x86_64_emit_if_lcmpcc(X86_64_CC_GE, src, iptr);
2744                         break;
2745
2746                 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST                           */
2747
2748                 case ICMD_ELSE_ICONST:  /* handled by IFxx_ICONST                     */
2749                         break;
2750
2751                 case ICMD_IFEQ_ICONST:  /* ..., value ==> ..., constant               */
2752                                         /* val.i = constant                           */
2753
2754                         var_to_reg_int(s1, src, REG_ITMP1);
2755                         d = reg_of_var(iptr->dst, REG_ITMP3);
2756                         s3 = iptr->val.i;
2757                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2758                                 if (s1 == d) {
2759                                         M_INTMOVE(s1, REG_ITMP1);
2760                                         s1 = REG_ITMP1;
2761                                 }
2762                                 x86_64_movl_imm_reg(iptr[1].val.i, d);
2763                         }
2764                         x86_64_movl_imm_reg(s3, REG_ITMP2);
2765                         x86_64_testl_reg_reg(s1, s1);
2766                         x86_64_cmovccl_reg_reg(X86_64_CC_E, REG_ITMP2, d);
2767                         store_reg_to_var_int(iptr->dst, d);
2768                         break;
2769
2770                 case ICMD_IFNE_ICONST:  /* ..., value ==> ..., constant               */
2771                                         /* val.i = constant                           */
2772
2773                         var_to_reg_int(s1, src, REG_ITMP1);
2774                         d = reg_of_var(iptr->dst, REG_ITMP3);
2775                         s3 = iptr->val.i;
2776                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2777                                 if (s1 == d) {
2778                                         M_INTMOVE(s1, REG_ITMP1);
2779                                         s1 = REG_ITMP1;
2780                                 }
2781                                 x86_64_movl_imm_reg(iptr[1].val.i, d);
2782                         }
2783                         x86_64_movl_imm_reg(s3, REG_ITMP2);
2784                         x86_64_testl_reg_reg(s1, s1);
2785                         x86_64_cmovccl_reg_reg(X86_64_CC_NE, REG_ITMP2, d);
2786                         store_reg_to_var_int(iptr->dst, d);
2787                         break;
2788
2789                 case ICMD_IFLT_ICONST:  /* ..., value ==> ..., constant               */
2790                                         /* val.i = constant                           */
2791
2792                         var_to_reg_int(s1, src, REG_ITMP1);
2793                         d = reg_of_var(iptr->dst, REG_ITMP3);
2794                         s3 = iptr->val.i;
2795                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2796                                 if (s1 == d) {
2797                                         M_INTMOVE(s1, REG_ITMP1);
2798                                         s1 = REG_ITMP1;
2799                                 }
2800                                 x86_64_movl_imm_reg(iptr[1].val.i, d);
2801                         }
2802                         x86_64_movl_imm_reg(s3, REG_ITMP2);
2803                         x86_64_testl_reg_reg(s1, s1);
2804                         x86_64_cmovccl_reg_reg(X86_64_CC_L, REG_ITMP2, d);
2805                         store_reg_to_var_int(iptr->dst, d);
2806                         break;
2807
2808                 case ICMD_IFGE_ICONST:  /* ..., value ==> ..., constant               */
2809                                         /* val.i = constant                           */
2810
2811                         var_to_reg_int(s1, src, REG_ITMP1);
2812                         d = reg_of_var(iptr->dst, REG_ITMP3);
2813                         s3 = iptr->val.i;
2814                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2815                                 if (s1 == d) {
2816                                         M_INTMOVE(s1, REG_ITMP1);
2817                                         s1 = REG_ITMP1;
2818                                 }
2819                                 x86_64_movl_imm_reg(iptr[1].val.i, d);
2820                         }
2821                         x86_64_movl_imm_reg(s3, REG_ITMP2);
2822                         x86_64_testl_reg_reg(s1, s1);
2823                         x86_64_cmovccl_reg_reg(X86_64_CC_GE, REG_ITMP2, d);
2824                         store_reg_to_var_int(iptr->dst, d);
2825                         break;
2826
2827                 case ICMD_IFGT_ICONST:  /* ..., value ==> ..., constant               */
2828                                         /* val.i = constant                           */
2829
2830                         var_to_reg_int(s1, src, REG_ITMP1);
2831                         d = reg_of_var(iptr->dst, REG_ITMP3);
2832                         s3 = iptr->val.i;
2833                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2834                                 if (s1 == d) {
2835                                         M_INTMOVE(s1, REG_ITMP1);
2836                                         s1 = REG_ITMP1;
2837                                 }
2838                                 x86_64_movl_imm_reg(iptr[1].val.i, d);
2839                         }
2840                         x86_64_movl_imm_reg(s3, REG_ITMP2);
2841                         x86_64_testl_reg_reg(s1, s1);
2842                         x86_64_cmovccl_reg_reg(X86_64_CC_G, REG_ITMP2, d);
2843                         store_reg_to_var_int(iptr->dst, d);
2844                         break;
2845
2846                 case ICMD_IFLE_ICONST:  /* ..., value ==> ..., constant               */
2847                                         /* val.i = constant                           */
2848
2849                         var_to_reg_int(s1, src, REG_ITMP1);
2850                         d = reg_of_var(iptr->dst, REG_ITMP3);
2851                         s3 = iptr->val.i;
2852                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2853                                 if (s1 == d) {
2854                                         M_INTMOVE(s1, REG_ITMP1);
2855                                         s1 = REG_ITMP1;
2856                                 }
2857                                 x86_64_movl_imm_reg(iptr[1].val.i, d);
2858                         }
2859                         x86_64_movl_imm_reg(s3, REG_ITMP2);
2860                         x86_64_testl_reg_reg(s1, s1);
2861                         x86_64_cmovccl_reg_reg(X86_64_CC_LE, REG_ITMP2, d);
2862                         store_reg_to_var_int(iptr->dst, d);
2863                         break;
2864
2865
2866                 case ICMD_IRETURN:      /* ..., retvalue ==> ...                      */
2867                 case ICMD_LRETURN:
2868                 case ICMD_ARETURN:
2869
2870                         var_to_reg_int(s1, src, REG_RESULT);
2871                         M_INTMOVE(s1, REG_RESULT);
2872
2873 #if defined(USE_THREADS)
2874                         if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
2875                                 x86_64_mov_membase_reg(REG_SP, maxmemuse * 8, argintregs[0]);
2876                                 x86_64_mov_reg_membase(REG_RESULT, REG_SP, maxmemuse * 8);
2877                                 x86_64_mov_imm_reg((u8) builtin_monitorexit, REG_ITMP1);
2878                                 x86_64_call_reg(REG_ITMP1);
2879                                 x86_64_mov_membase_reg(REG_SP, maxmemuse * 8, REG_RESULT);
2880                         }
2881 #endif
2882
2883                         goto nowperformreturn;
2884
2885                 case ICMD_FRETURN:      /* ..., retvalue ==> ...                      */
2886                 case ICMD_DRETURN:
2887
2888                         var_to_reg_flt(s1, src, REG_FRESULT);
2889                         M_FLTMOVE(s1, REG_FRESULT);
2890
2891 #if defined(USE_THREADS)
2892                         if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
2893                                 x86_64_mov_membase_reg(REG_SP, maxmemuse * 8, argintregs[0]);
2894                                 x86_64_movq_reg_membase(REG_FRESULT, REG_SP, maxmemuse * 8);
2895                                 x86_64_mov_imm_reg((u8) builtin_monitorexit, REG_ITMP1);
2896                                 x86_64_call_reg(REG_ITMP1);
2897                                 x86_64_movq_membase_reg(REG_SP, maxmemuse * 8, REG_FRESULT);
2898                         }
2899 #endif
2900
2901                         goto nowperformreturn;
2902
2903                 case ICMD_RETURN:      /* ...  ==> ...                                */
2904
2905 #if defined(USE_THREADS)
2906                         if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
2907                                 x86_64_mov_membase_reg(REG_SP, maxmemuse * 8, argintregs[0]);
2908                                 x86_64_mov_imm_reg((u8) builtin_monitorexit, REG_ITMP1);
2909                                 x86_64_call_reg(REG_ITMP1);
2910                         }
2911 #endif
2912
2913 nowperformreturn:
2914                         {
2915                         int r, p;
2916                         
2917                         p = parentargs_base;
2918                         
2919                         /* call trace function */
2920                         if (runverbose) {
2921                                 x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
2922
2923                                 x86_64_mov_reg_membase(REG_RESULT, REG_SP, 0 * 8);
2924                                 x86_64_movq_reg_membase(REG_FRESULT, REG_SP, 1 * 8);
2925
2926                                 x86_64_mov_imm_reg((s8) method, argintregs[0]);
2927                                 x86_64_mov_reg_reg(REG_RESULT, argintregs[1]);
2928                                 M_FLTMOVE(REG_FRESULT, argfltregs[0]);
2929                                 M_FLTMOVE(REG_FRESULT, argfltregs[1]);
2930
2931                                 x86_64_mov_imm_reg((s8) builtin_displaymethodstop, REG_ITMP1);
2932                                 x86_64_call_reg(REG_ITMP1);
2933
2934                                 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_RESULT);
2935                                 x86_64_movq_membase_reg(REG_SP, 1 * 8, REG_FRESULT);
2936
2937                                 x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
2938                         }
2939
2940                         /* restore saved registers                                        */
2941                         for (r = savintregcnt - 1; r >= maxsavintreguse; r--) {
2942                                 p--; x86_64_mov_membase_reg(REG_SP, p * 8, savintregs[r]);
2943                         }
2944                         for (r = savfltregcnt - 1; r >= maxsavfltreguse; r--) {
2945                                 p--; x86_64_movq_membase_reg(REG_SP, p * 8, savfltregs[r]);
2946                         }
2947
2948                         /* deallocate stack                                               */
2949                         if (parentargs_base) {
2950                                 x86_64_alu_imm_reg(X86_64_ADD, parentargs_base * 8, REG_SP);
2951                         }
2952
2953                         x86_64_ret();
2954                         ALIGNCODENOP;
2955                         }
2956                         break;
2957
2958
2959                 case ICMD_TABLESWITCH:  /* ..., index ==> ...                         */
2960                         {
2961                                 s4 i, l, *s4ptr;
2962                                 void **tptr;
2963
2964                                 tptr = (void **) iptr->target;
2965
2966                                 s4ptr = iptr->val.a;
2967                                 l = s4ptr[1];                          /* low     */
2968                                 i = s4ptr[2];                          /* high    */
2969
2970                                 var_to_reg_int(s1, src, REG_ITMP1);
2971                                 M_INTMOVE(s1, REG_ITMP1);
2972                                 if (l != 0) {
2973                                         x86_64_alul_imm_reg(X86_64_SUB, l, REG_ITMP1);
2974                                 }
2975                                 i = i - l + 1;
2976
2977                 /* range check */
2978                                 x86_64_alul_imm_reg(X86_64_CMP, i - 1, REG_ITMP1);
2979                                 x86_64_jcc(X86_64_CC_A, 0);
2980
2981                 /* codegen_addreference(BlockPtrOfPC(s4ptr[0]), mcodeptr); */
2982                                 codegen_addreference((basicblock *) tptr[0], mcodeptr);
2983
2984                                 /* build jump table top down and use address of lowest entry */
2985
2986                 /* s4ptr += 3 + i; */
2987                                 tptr += i;
2988
2989                                 while (--i >= 0) {
2990                                         /* dseg_addtarget(BlockPtrOfPC(*--s4ptr)); */
2991                                         dseg_addtarget((basicblock *) tptr[0]); 
2992                                         --tptr;
2993                                 }
2994
2995                                 /* length of dataseg after last dseg_addtarget is used by load */
2996
2997                                 x86_64_mov_imm_reg(0, REG_ITMP2);
2998                                 dseg_adddata(mcodeptr);
2999                                 x86_64_mov_memindex_reg(-dseglen, REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
3000                                 x86_64_jmp_reg(REG_ITMP1);
3001                                 ALIGNCODENOP;
3002                         }
3003                         break;
3004
3005
3006                 case ICMD_LOOKUPSWITCH: /* ..., key ==> ...                           */
3007                         {
3008                                 s4 i, l, val, *s4ptr;
3009                                 void **tptr;
3010
3011                                 tptr = (void **) iptr->target;
3012
3013                                 s4ptr = iptr->val.a;
3014                                 l = s4ptr[0];                          /* default  */
3015                                 i = s4ptr[1];                          /* count    */
3016                         
3017                                 MCODECHECK((i<<2)+8);
3018                                 var_to_reg_int(s1, src, REG_ITMP1);    /* reg compare should always be faster */
3019                                 while (--i >= 0) {
3020                                         s4ptr += 2;
3021                                         ++tptr;
3022
3023                                         val = s4ptr[0];
3024                                         x86_64_alul_imm_reg(X86_64_CMP, val, s1);
3025                                         x86_64_jcc(X86_64_CC_E, 0);
3026                                         /* codegen_addreference(BlockPtrOfPC(s4ptr[1]), mcodeptr); */
3027                                         codegen_addreference((basicblock *) tptr[0], mcodeptr); 
3028                                 }
3029
3030                                 x86_64_jmp_imm(0);
3031                                 /* codegen_addreference(BlockPtrOfPC(l), mcodeptr); */
3032                         
3033                                 tptr = (void **) iptr->target;
3034                                 codegen_addreference((basicblock *) tptr[0], mcodeptr);
3035
3036                                 ALIGNCODENOP;
3037                         }
3038                         break;
3039
3040
3041                 case ICMD_BUILTIN3:     /* ..., arg1, arg2, arg3 ==> ...              */
3042                                         /* op1 = return type, val.a = function pointer*/
3043                         s3 = 3;
3044                         goto gen_method;
3045
3046                 case ICMD_BUILTIN2:     /* ..., arg1, arg2 ==> ...                    */
3047                                         /* op1 = return type, val.a = function pointer*/
3048                         s3 = 2;
3049                         goto gen_method;
3050
3051                 case ICMD_BUILTIN1:     /* ..., arg1 ==> ...                          */
3052                                         /* op1 = return type, val.a = function pointer*/
3053                         s3 = 1;
3054                         goto gen_method;
3055
3056                 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ...            */
3057                                         /* op1 = arg count, val.a = method pointer    */
3058
3059                 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3060                                         /* op1 = arg count, val.a = method pointer    */
3061
3062                 case ICMD_INVOKEVIRTUAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3063                                         /* op1 = arg count, val.a = method pointer    */
3064
3065                 case ICMD_INVOKEINTERFACE:/*.., objectref, [arg1, [arg2 ...]] ==> ... */
3066                                         /* op1 = arg count, val.a = method pointer    */
3067
3068                         s3 = iptr->op1;
3069
3070 gen_method: {
3071                         methodinfo   *m;
3072                         classinfo    *ci;
3073                         stackptr     tmpsrc;
3074                         int iarg = 0;
3075                         int farg = 0;
3076
3077                         MCODECHECK((s3 << 1) + 64);
3078
3079                         tmpsrc = src;
3080                         s2 = s3;
3081
3082                         /* copy arguments to registers or stack location                  */
3083                         for (; --s3 >= 0; src = src->prev) {
3084                                 IS_INT_LNG_TYPE(src->type) ? iarg++ : farg++;
3085                         }
3086
3087                         src = tmpsrc;
3088                         s3 = s2;
3089
3090                         s2 = (iarg > INT_ARG_CNT) ? iarg - INT_ARG_CNT : 0 + (farg > FLT_ARG_CNT) ? farg - FLT_ARG_CNT : 0;
3091
3092                         for (; --s3 >= 0; src = src->prev) {
3093                                 IS_INT_LNG_TYPE(src->type) ? iarg-- : farg--;
3094                                 if (src->varkind == ARGVAR) {
3095                                         if (IS_INT_LNG_TYPE(src->type)) {
3096                                                 if (iarg >= INT_ARG_CNT) {
3097                                                         s2--;
3098                                                 }
3099                                         } else {
3100                                                 if (farg >= FLT_ARG_CNT) {
3101                                                         s2--;
3102                                                 }
3103                                         }
3104                                         continue;
3105                                 }
3106
3107                                 if (IS_INT_LNG_TYPE(src->type)) {
3108                                         if (iarg < INT_ARG_CNT) {
3109                                                 s1 = argintregs[iarg];
3110                                                 var_to_reg_int(d, src, s1);
3111                                                 M_INTMOVE(d, s1);
3112
3113                                         } else {
3114                                                 var_to_reg_int(d, src, REG_ITMP1);
3115                                                 s2--;
3116                                                 x86_64_mov_reg_membase(d, REG_SP, s2 * 8);
3117                                         }
3118
3119                                 } else {
3120                                         if (farg < FLT_ARG_CNT) {
3121                                                 s1 = argfltregs[farg];
3122                                                 var_to_reg_flt(d, src, s1);
3123                                                 M_FLTMOVE(d, s1);
3124
3125                                         } else {
3126                                                 var_to_reg_flt(d, src, REG_FTMP1);
3127                                                 s2--;
3128                                                 x86_64_movq_reg_membase(d, REG_SP, s2 * 8);
3129                                         }
3130                                 }
3131                         } /* end of for */
3132
3133                         m = iptr->val.a;
3134                         switch (iptr->opc) {
3135                                 case ICMD_BUILTIN3:
3136                                 case ICMD_BUILTIN2:
3137                                 case ICMD_BUILTIN1:
3138
3139                                         a = (s8) m;
3140                                         d = iptr->op1;
3141
3142                                         x86_64_mov_imm_reg(a, REG_ITMP1);
3143                                         x86_64_call_reg(REG_ITMP1);
3144                                         break;
3145
3146                                 case ICMD_INVOKESTATIC:
3147
3148                                         a = (s8) m->stubroutine;
3149                                         d = m->returntype;
3150
3151                                         x86_64_mov_imm_reg(a, REG_ITMP2);
3152                                         x86_64_call_reg(REG_ITMP2);
3153                                         break;
3154
3155                                 case ICMD_INVOKESPECIAL:
3156
3157                                         a = (s8) m->stubroutine;
3158                                         d = m->returntype;
3159
3160                                         gen_nullptr_check(argintregs[0]);    /* first argument contains pointer */
3161                                         x86_64_mov_membase_reg(argintregs[0], 0, REG_ITMP2);    /* access memory for hardware nullptr */
3162                                         x86_64_mov_imm_reg(a, REG_ITMP2);
3163                                         x86_64_call_reg(REG_ITMP2);
3164                                         break;
3165
3166                                 case ICMD_INVOKEVIRTUAL:
3167
3168                                         d = m->returntype;
3169
3170                                         gen_nullptr_check(argintregs[0]);
3171                                         x86_64_mov_membase_reg(argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3172                                         x86_64_mov_membase32_reg(REG_ITMP2, OFFSET(vftbl, table[0]) + sizeof(methodptr) * m->vftblindex, REG_ITMP1);
3173                                         x86_64_call_reg(REG_ITMP1);
3174                                         break;
3175
3176                                 case ICMD_INVOKEINTERFACE:
3177
3178                                         ci = m->class;
3179                                         d = m->returntype;
3180
3181                                         gen_nullptr_check(argintregs[0]);
3182                                         x86_64_mov_membase_reg(argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3183                                         x86_64_mov_membase_reg(REG_ITMP2, OFFSET(vftbl, interfacetable[0]) - sizeof(methodptr) * ci->index, REG_ITMP2);
3184                                         x86_64_mov_membase32_reg(REG_ITMP2, sizeof(methodptr) * (m - ci->methods), REG_ITMP1);
3185                                         x86_64_call_reg(REG_ITMP1);
3186                                         break;
3187
3188                                 default:
3189                                         d = 0;
3190                                         error("Unkown ICMD-Command: %d", iptr->opc);
3191                                 }
3192
3193                         /* d contains return type */
3194
3195                         if (d != TYPE_VOID) {
3196                                 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3197                                         s1 = reg_of_var(iptr->dst, REG_RESULT);
3198                                         M_INTMOVE(REG_RESULT, s1);
3199                                         store_reg_to_var_int(iptr->dst, s1);
3200
3201                                 } else {
3202                                         s1 = reg_of_var(iptr->dst, REG_FRESULT);
3203                                         M_FLTMOVE(REG_FRESULT, s1);
3204                                         store_reg_to_var_flt(iptr->dst, s1);
3205                                 }
3206                         }
3207                         }
3208                         break;
3209
3210
3211                 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult            */
3212
3213                                       /* op1:   0 == array, 1 == class                */
3214                                       /* val.a: (classinfo*) superclass               */
3215
3216 /*          superclass is an interface:
3217  *
3218  *          return (sub != NULL) &&
3219  *                 (sub->vftbl->interfacetablelength > super->index) &&
3220  *                 (sub->vftbl->interfacetable[-super->index] != NULL);
3221  *
3222  *          superclass is a class:
3223  *
3224  *          return ((sub != NULL) && (0
3225  *                  <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3226  *                  super->vftbl->diffvall));
3227  */
3228
3229                         {
3230                         classinfo *super = (classinfo*) iptr->val.a;
3231                         
3232                         var_to_reg_int(s1, src, REG_ITMP1);
3233                         d = reg_of_var(iptr->dst, REG_ITMP3);
3234                         if (s1 == d) {
3235                                 M_INTMOVE(s1, REG_ITMP1);
3236                                 s1 = REG_ITMP1;
3237                         }
3238                         x86_64_alu_reg_reg(X86_64_XOR, d, d);
3239                         if (iptr->op1) {                               /* class/interface */
3240                                 if (super->flags & ACC_INTERFACE) {        /* interface       */
3241                                         x86_64_test_reg_reg(s1, s1);
3242
3243                                         /* TODO: clean up this calculation */
3244                                         a = 3;    /* mov_membase_reg */
3245                                         CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3246
3247                                         a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3248                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetablelength));
3249                                         
3250                                         a += 3;    /* sub */
3251                                         CALCIMMEDIATEBYTES(a, super->index);
3252                                         
3253                                         a += 3;    /* test */
3254
3255                                         a += 6;    /* jcc */
3256                                         a += 3;    /* mov_membase_reg */
3257                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*));
3258
3259                                         a += 3;    /* test */
3260                                         a += 4;    /* setcc */
3261
3262                                         x86_64_jcc(X86_64_CC_E, a);
3263
3264                                         x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3265                                         x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetablelength), REG_ITMP2);
3266                                         x86_64_alu_imm_reg(X86_64_SUB, super->index, REG_ITMP2);
3267                                         x86_64_test_reg_reg(REG_ITMP2, REG_ITMP2);
3268
3269                                         /* TODO: clean up this calculation */
3270                                         a = 0;
3271                                         a += 3;    /* mov_membase_reg */
3272                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*));
3273
3274                                         a += 3;    /* test */
3275                                         a += 4;    /* setcc */
3276
3277                                         x86_64_jcc(X86_64_CC_LE, a);
3278                                         x86_64_mov_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP1);
3279                                         x86_64_test_reg_reg(REG_ITMP1, REG_ITMP1);
3280                                         x86_64_setcc_reg(X86_64_CC_NE, d);
3281
3282                                 } else {                                   /* class           */
3283                                         x86_64_test_reg_reg(s1, s1);
3284
3285                                         /* TODO: clean up this calculation */
3286                                         a = 3;    /* mov_membase_reg */
3287                                         CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3288
3289                                         a += 10;   /* mov_imm_reg */
3290
3291                                         a += 2;    /* movl_membase_reg - only if REG_ITMP1 == RAX */
3292                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, baseval));
3293                                         
3294                                         a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3295                                         CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, baseval));
3296                                         
3297                                         a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3298                                         CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, diffval));
3299                                         
3300                                         a += 3;    /* sub */
3301                                         a += 3;    /* xor */
3302                                         a += 3;    /* cmp */
3303                                         a += 4;    /* setcc */
3304
3305                                         x86_64_jcc(X86_64_CC_E, a);
3306
3307                                         x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3308                                         x86_64_mov_imm_reg((s8) super->vftbl, REG_ITMP2);
3309                                         x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, baseval), REG_ITMP1);
3310                                         x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, baseval), REG_ITMP3);
3311                                         x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, diffval), REG_ITMP2);
3312                                         x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP3, REG_ITMP1);
3313                                         x86_64_alu_reg_reg(X86_64_XOR, d, d);
3314                                         x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
3315                                         x86_64_setcc_reg(X86_64_CC_BE, d);
3316                                 }
3317                         }
3318                         else
3319                                 panic("internal error: no inlined array instanceof");
3320                         }
3321                         store_reg_to_var_int(iptr->dst, d);
3322                         break;
3323
3324                 case ICMD_CHECKCAST:  /* ..., objectref ==> ..., objectref            */
3325
3326                                       /* op1:   0 == array, 1 == class                */
3327                                       /* val.a: (classinfo*) superclass               */
3328
3329 /*          superclass is an interface:
3330  *
3331  *          OK if ((sub == NULL) ||
3332  *                 (sub->vftbl->interfacetablelength > super->index) &&
3333  *                 (sub->vftbl->interfacetable[-super->index] != NULL));
3334  *
3335  *          superclass is a class:
3336  *
3337  *          OK if ((sub == NULL) || (0
3338  *                 <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3339  *                 super->vftbl->diffvall));
3340  */
3341
3342                         {
3343                         classinfo *super = (classinfo*) iptr->val.a;
3344                         
3345                         d = reg_of_var(iptr->dst, REG_ITMP3);
3346                         var_to_reg_int(s1, src, d);
3347                         if (iptr->op1) {                               /* class/interface */
3348                                 if (super->flags & ACC_INTERFACE) {        /* interface       */
3349                                         x86_64_test_reg_reg(s1, s1);
3350
3351                                         /* TODO: clean up this calculation */
3352                                         a = 3;    /* mov_membase_reg */
3353                                         CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3354
3355                                         a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3356                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetablelength));
3357
3358                                         a += 3;    /* sub */
3359                                         CALCIMMEDIATEBYTES(a, super->index);
3360
3361                                         a += 3;    /* test */
3362                                         a += 6;    /* jcc */
3363
3364                                         a += 3;    /* mov_membase_reg */
3365                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*));
3366
3367                                         a += 3;    /* test */
3368                                         a += 6;    /* jcc */
3369
3370                                         x86_64_jcc(X86_64_CC_E, a);
3371
3372                                         x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3373                                         x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetablelength), REG_ITMP2);
3374                                         x86_64_alu_imm_reg(X86_64_SUB, super->index, REG_ITMP2);
3375                                         x86_64_test_reg_reg(REG_ITMP2, REG_ITMP2);
3376                                         x86_64_jcc(X86_64_CC_LE, 0);
3377                                         codegen_addxcastrefs(mcodeptr);
3378                                         x86_64_mov_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP2);
3379                                         x86_64_test_reg_reg(REG_ITMP2, REG_ITMP2);
3380                                         x86_64_jcc(X86_64_CC_E, 0);
3381                                         codegen_addxcastrefs(mcodeptr);
3382
3383                                 } else {                                     /* class           */
3384                                         x86_64_test_reg_reg(s1, s1);
3385
3386                                         /* TODO: clean up this calculation */
3387                                         a = 3;    /* mov_membase_reg */
3388                                         CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3389                                         a += 10;   /* mov_imm_reg */
3390                                         a += 2;    /* movl_membase_reg - only if REG_ITMP1 == RAX */
3391                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, baseval));
3392
3393                                         if (d != REG_ITMP3) {
3394                                                 a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3395                                                 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, baseval));
3396                                                 a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3397                                                 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, diffval));
3398                                                 a += 3;    /* sub */
3399                                                 
3400                                         } else {
3401                                                 a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3402                                                 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, baseval));
3403                                                 a += 3;    /* sub */
3404                                                 a += 10;   /* mov_imm_reg */
3405                                                 a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3406                                                 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, diffval));
3407                                         }
3408
3409                                         a += 3;    /* cmp */
3410                                         a += 6;    /* jcc */
3411
3412                                         x86_64_jcc(X86_64_CC_E, a);
3413
3414                                         x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3415                                         x86_64_mov_imm_reg((s8) super->vftbl, REG_ITMP2);
3416                                         x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, baseval), REG_ITMP1);
3417                                         if (d != REG_ITMP3) {
3418                                                 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, baseval), REG_ITMP3);
3419                                                 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, diffval), REG_ITMP2);
3420                                                 x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP3, REG_ITMP1);
3421
3422                                         } else {
3423                                                 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, baseval), REG_ITMP2);
3424                                                 x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP2, REG_ITMP1);
3425                                                 x86_64_mov_imm_reg((s8) super->vftbl, REG_ITMP2);
3426                                                 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, diffval), REG_ITMP2);
3427                                         }
3428                                         x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
3429                                         x86_64_jcc(X86_64_CC_A, 0);    /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3430                                         codegen_addxcastrefs(mcodeptr);
3431                                 }
3432
3433                         } else
3434                                 panic("internal error: no inlined array checkcast");
3435                         }
3436                         M_INTMOVE(s1, d);
3437                         store_reg_to_var_int(iptr->dst, d);
3438                         break;
3439
3440                 case ICMD_CHECKASIZE:  /* ..., size ==> ..., size                     */
3441
3442                         if (src->flags & INMEMORY) {
3443                                 x86_64_alul_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
3444                                 
3445                         } else {
3446                                 x86_64_testl_reg_reg(src->regoff, src->regoff);
3447                         }
3448                         x86_64_jcc(X86_64_CC_L, 0);
3449                         codegen_addxcheckarefs(mcodeptr);
3450                         break;
3451
3452                 case ICMD_CHECKOOM:    /* ... ==> ...                                 */
3453
3454                         x86_64_test_reg_reg(REG_RESULT, REG_RESULT);
3455                         x86_64_jcc(X86_64_CC_E, 0);
3456                         codegen_addxoomrefs(mcodeptr);
3457                         break;
3458
3459                 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref  */
3460                                          /* op1 = dimension, val.a = array descriptor */
3461
3462                         /* check for negative sizes and copy sizes to stack if necessary  */
3463
3464                         MCODECHECK((iptr->op1 << 1) + 64);
3465
3466                         for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3467                                 var_to_reg_int(s2, src, REG_ITMP1);
3468                                 x86_64_testl_reg_reg(s2, s2);
3469                                 x86_64_jcc(X86_64_CC_L, 0);
3470                                 codegen_addxcheckarefs(mcodeptr);
3471
3472                                 /* copy sizes to stack (argument numbers >= INT_ARG_CNT)      */
3473
3474                                 if (src->varkind != ARGVAR) {
3475                                         x86_64_mov_reg_membase(s2, REG_SP, (s1 + INT_ARG_CNT) * 8);
3476                                 }
3477                         }
3478
3479                         /* a0 = dimension count */
3480                         x86_64_mov_imm_reg(iptr->op1, argintregs[0]);
3481
3482                         /* a1 = arraydescriptor */
3483                         x86_64_mov_imm_reg((s8) iptr->val.a, argintregs[1]);
3484
3485                         /* a2 = pointer to dimensions = stack pointer */
3486                         x86_64_mov_reg_reg(REG_SP, argintregs[2]);
3487
3488                         x86_64_mov_imm_reg((s8) (builtin_nmultianewarray), REG_ITMP1);
3489                         x86_64_call_reg(REG_ITMP1);
3490
3491                         s1 = reg_of_var(iptr->dst, REG_RESULT);
3492                         M_INTMOVE(REG_RESULT, s1);
3493                         store_reg_to_var_int(iptr->dst, s1);
3494                         break;
3495
3496                 default: error("Unknown pseudo command: %d", iptr->opc);
3497         } /* switch */
3498                 
3499         } /* for instruction */
3500                 
3501         /* copy values to interface registers */
3502
3503         src = bptr->outstack;
3504         len = bptr->outdepth;
3505         MCODECHECK(64+len);
3506         while (src) {
3507                 len--;
3508                 if ((src->varkind != STACKVAR)) {
3509                         s2 = src->type;
3510                         if (IS_FLT_DBL_TYPE(s2)) {
3511                                 var_to_reg_flt(s1, src, REG_FTMP1);
3512                                 if (!(interfaces[len][s2].flags & INMEMORY)) {
3513                                         M_FLTMOVE(s1, interfaces[len][s2].regoff);
3514
3515                                 } else {
3516                                         x86_64_movq_reg_membase(s1, REG_SP, 8 * interfaces[len][s2].regoff);
3517                                 }
3518
3519                         } else {
3520                                 var_to_reg_int(s1, src, REG_ITMP1);
3521                                 if (!(interfaces[len][s2].flags & INMEMORY)) {
3522                                         M_INTMOVE(s1, interfaces[len][s2].regoff);
3523
3524                                 } else {
3525                                         x86_64_mov_reg_membase(s1, REG_SP, interfaces[len][s2].regoff * 8);
3526                                 }
3527                         }
3528                 }
3529                 src = src->prev;
3530         }
3531         } /* if (bptr -> flags >= BBREACHED) */
3532         } /* for basic block */
3533
3534         /* bptr -> mpc = (int)((u1*) mcodeptr - mcodebase); */
3535
3536         {
3537
3538         /* generate bound check stubs */
3539
3540         u1 *xcodeptr = NULL;
3541         
3542         for (; xboundrefs != NULL; xboundrefs = xboundrefs->next) {
3543                 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3544                         gen_resolvebranch(mcodebase + xboundrefs->branchpos, 
3545                                 xboundrefs->branchpos, xcodeptr - mcodebase - (3 + 10 + 10 + 3));
3546                         continue;
3547                 }
3548
3549                 gen_resolvebranch(mcodebase + xboundrefs->branchpos, 
3550                                   xboundrefs->branchpos, mcodeptr - mcodebase);
3551
3552                 MCODECHECK(8);
3553
3554                 /* move index register into REG_ITMP1 */
3555                 x86_64_mov_reg_reg(xboundrefs->reg, REG_ITMP1);              /* 3 bytes  */
3556
3557                 x86_64_mov_imm_reg(0, REG_ITMP2_XPC);                        /* 10 bytes */
3558                 dseg_adddata(mcodeptr);
3559                 x86_64_mov_imm_reg(xboundrefs->branchpos - 6, REG_ITMP3);    /* 10 bytes */
3560                 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC);    /* 3 bytes  */
3561
3562                 if (xcodeptr != NULL) {
3563                         x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3564
3565                 } else {
3566                         xcodeptr = mcodeptr;
3567
3568                         x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
3569                         x86_64_mov_reg_membase(REG_ITMP2_XPC, REG_SP, 0 * 8);
3570                         x86_64_mov_imm_reg((s8) string_java_lang_ArrayIndexOutOfBoundsException, argintregs[0]);
3571                         x86_64_mov_reg_reg(REG_ITMP1, argintregs[1]);
3572                         x86_64_mov_imm_reg((s8) new_exception_int, REG_ITMP3);
3573                         x86_64_call_reg(REG_ITMP3);
3574                         x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_ITMP2_XPC);
3575                         x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
3576
3577                         x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3578                         x86_64_jmp_reg(REG_ITMP3);
3579                 }
3580         }
3581
3582         /* generate negative array size check stubs */
3583
3584         xcodeptr = NULL;
3585         
3586         for (; xcheckarefs != NULL; xcheckarefs = xcheckarefs->next) {
3587                 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3588                         gen_resolvebranch(mcodebase + xcheckarefs->branchpos, 
3589                                 xcheckarefs->branchpos, xcodeptr - mcodebase - (10 + 10 + 3));
3590                         continue;
3591                 }
3592
3593                 gen_resolvebranch(mcodebase + xcheckarefs->branchpos, 
3594                                   xcheckarefs->branchpos, mcodeptr - mcodebase);
3595
3596                 MCODECHECK(8);
3597
3598                 x86_64_mov_imm_reg(0, REG_ITMP2_XPC);                         /* 10 bytes */
3599                 dseg_adddata(mcodeptr);
3600                 x86_64_mov_imm_reg(xcheckarefs->branchpos - 6, REG_ITMP3);    /* 10 bytes */
3601                 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC);     /* 3 bytes  */
3602
3603                 if (xcodeptr != NULL) {
3604                         x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3605
3606                 } else {
3607                         xcodeptr = mcodeptr;
3608
3609                         x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
3610                         x86_64_mov_reg_membase(REG_ITMP2_XPC, REG_SP, 0 * 8);
3611                         x86_64_mov_imm_reg((s8) string_java_lang_NegativeArraySizeException, argintregs[0]);
3612                         x86_64_mov_imm_reg((s8) new_exception, REG_ITMP3);
3613                         x86_64_call_reg(REG_ITMP3);
3614                         x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_ITMP2_XPC);
3615                         x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
3616
3617                         x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3618                         x86_64_jmp_reg(REG_ITMP3);
3619                 }
3620         }
3621
3622         /* generate cast check stubs */
3623
3624         xcodeptr = NULL;
3625         
3626         for (; xcastrefs != NULL; xcastrefs = xcastrefs->next) {
3627                 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3628                         gen_resolvebranch(mcodebase + xcastrefs->branchpos, 
3629                                 xcastrefs->branchpos, xcodeptr - mcodebase - (10 + 10 + 3));
3630                         continue;
3631                 }
3632
3633                 gen_resolvebranch(mcodebase + xcastrefs->branchpos, 
3634                                   xcastrefs->branchpos, mcodeptr - mcodebase);
3635
3636                 MCODECHECK(8);
3637
3638                 x86_64_mov_imm_reg(0, REG_ITMP2_XPC);                        /* 10 bytes */
3639                 dseg_adddata(mcodeptr);
3640                 x86_64_mov_imm_reg(xcastrefs->branchpos - 6, REG_ITMP3);     /* 10 bytes */
3641                 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC);    /* 3 bytes  */
3642
3643                 if (xcodeptr != NULL) {
3644                         x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3645                 
3646                 } else {
3647                         xcodeptr = mcodeptr;
3648
3649                         x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
3650                         x86_64_mov_reg_membase(REG_ITMP2_XPC, REG_SP, 0 * 8);
3651                         x86_64_mov_imm_reg((s8) string_java_lang_ClassCastException, argintregs[0]);
3652                         x86_64_mov_imm_reg((s8) new_exception, REG_ITMP3);
3653                         x86_64_call_reg(REG_ITMP3);
3654                         x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_ITMP2_XPC);
3655                         x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
3656
3657                         x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3658                         x86_64_jmp_reg(REG_ITMP3);
3659                 }
3660         }
3661
3662         /* generate oom check stubs */
3663
3664         xcodeptr = NULL;
3665         
3666         for (; xoomrefs != NULL; xoomrefs = xoomrefs->next) {
3667                 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3668                         gen_resolvebranch(mcodebase + xoomrefs->branchpos, 
3669                                 xoomrefs->branchpos, xcodeptr - mcodebase - (10 + 10 + 3));
3670                         continue;
3671                 }
3672
3673                 gen_resolvebranch(mcodebase + xoomrefs->branchpos, 
3674                                   xoomrefs->branchpos, mcodeptr - mcodebase);
3675
3676                 MCODECHECK(8);
3677
3678                 x86_64_mov_imm_reg(0, REG_ITMP2_XPC);                        /* 10 bytes */
3679                 dseg_adddata(mcodeptr);
3680                 x86_64_mov_imm_reg(xoomrefs->branchpos - 6, REG_ITMP1);     /* 10 bytes */
3681                 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC);    /* 3 bytes  */
3682
3683                 if (xcodeptr != NULL) {
3684                         x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3685                 
3686                 } else {
3687                         xcodeptr = mcodeptr;
3688
3689 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3690                         x86_64_push_reg(REG_ITMP2_XPC);
3691                         x86_64_mov_imm_reg((u8) &builtin_get_exceptionptrptr, REG_ITMP1);
3692                         x86_64_call_reg(REG_ITMP1);
3693                         x86_64_mov_membase_reg(REG_RESULT, 0, REG_ITMP3);
3694                         x86_64_mov_imm_membase(0, REG_RESULT, 0);
3695                         x86_64_mov_reg_reg(REG_ITMP3, REG_ITMP1_XPTR);
3696                         x86_64_pop_reg(REG_ITMP2_XPC);
3697 #else
3698                         x86_64_mov_imm_reg((u8) &_exceptionptr, REG_ITMP3);
3699                         x86_64_mov_membase_reg(REG_ITMP3, 0, REG_ITMP1_XPTR);
3700                         x86_64_mov_imm_membase(0, REG_ITMP3, 0);
3701 #endif
3702
3703                         x86_64_mov_imm_reg((u8) asm_handle_exception, REG_ITMP3);
3704                         x86_64_jmp_reg(REG_ITMP3);
3705                 }
3706         }
3707
3708         /* generate null pointer check stubs */
3709
3710         xcodeptr = NULL;
3711         
3712         for (; xnullrefs != NULL; xnullrefs = xnullrefs->next) {
3713                 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3714                         gen_resolvebranch(mcodebase + xnullrefs->branchpos, 
3715                                 xnullrefs->branchpos, xcodeptr - mcodebase - (10 + 10 + 3));
3716                         continue;
3717                 }
3718
3719                 gen_resolvebranch(mcodebase + xnullrefs->branchpos, 
3720                                   xnullrefs->branchpos, mcodeptr - mcodebase);
3721
3722                 MCODECHECK(8);
3723
3724                 x86_64_mov_imm_reg(0, REG_ITMP2_XPC);                        /* 10 bytes */
3725                 dseg_adddata(mcodeptr);
3726                 x86_64_mov_imm_reg(xnullrefs->branchpos - 6, REG_ITMP1);     /* 10 bytes */
3727                 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC);    /* 3 bytes  */
3728
3729                 if (xcodeptr != NULL) {
3730                         x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3731                 
3732                 } else {
3733                         xcodeptr = mcodeptr;
3734
3735                         x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
3736                         x86_64_mov_reg_membase(REG_ITMP2_XPC, REG_SP, 0 * 8);
3737                         x86_64_mov_imm_reg((s8) string_java_lang_NullPointerException, argintregs[0]);
3738                         x86_64_mov_imm_reg((s8) new_exception, REG_ITMP3);
3739                         x86_64_call_reg(REG_ITMP3);
3740                         x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_ITMP2_XPC);
3741                         x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
3742
3743                         x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3744                         x86_64_jmp_reg(REG_ITMP3);
3745                 }
3746         }
3747
3748         }
3749
3750         codegen_finish((int)((u1*) mcodeptr - mcodebase));
3751 }
3752
3753
3754 /* function createcompilerstub *************************************************
3755
3756         creates a stub routine which calls the compiler
3757         
3758 *******************************************************************************/
3759
3760 #define COMPSTUBSIZE 23
3761
3762 u1 *createcompilerstub(methodinfo *m)
3763 {
3764         u1 *s = CNEW(u1, COMPSTUBSIZE);     /* memory to hold the stub            */
3765         mcodeptr = s;                       /* code generation pointer            */
3766
3767                                             /* code for the stub                  */
3768         x86_64_mov_imm_reg((s8) m, REG_ITMP1); /* pass method pointer to compiler */
3769         x86_64_mov_imm_reg((s8) asm_call_jit_compiler, REG_ITMP3);/* load address */
3770         x86_64_jmp_reg(REG_ITMP3);          /* jump to compiler                   */
3771
3772 #ifdef STATISTICS
3773         count_cstub_len += COMPSTUBSIZE;
3774 #endif
3775
3776         return (u1*) s;
3777 }
3778
3779
3780 /* function removecompilerstub *************************************************
3781
3782      deletes a compilerstub from memory  (simply by freeing it)
3783
3784 *******************************************************************************/
3785
3786 void removecompilerstub(u1 *stub) 
3787 {
3788         CFREE(stub, COMPSTUBSIZE);
3789 }
3790
3791 /* function: createnativestub **************************************************
3792
3793         creates a stub routine which calls a native method
3794
3795 *******************************************************************************/
3796
3797 #define NATIVESTUBSIZE 420
3798
3799 u1 *createnativestub(functionptr f, methodinfo *m)
3800 {
3801         u1 *s = CNEW(u1, NATIVESTUBSIZE);   /* memory to hold the stub            */
3802         int stackframesize;                 /* size of stackframe if needed       */
3803         mcodeptr = s;                       /* make macros work                   */
3804
3805         reg_init();
3806     descriptor2types(m);                /* set paramcount and paramtypes      */
3807
3808         /* if function is static, check for initialized */
3809
3810         if (m->flags & ACC_STATIC) {
3811                 /* if class isn't yet initialized, do it */
3812                 if (!m->class->initialized) {
3813                         /* call helper function which patches this code */
3814                         x86_64_mov_imm_reg((u8) m->class, REG_ITMP1);
3815                         x86_64_mov_imm_reg((u8) asm_check_clinit, REG_ITMP2);
3816                         x86_64_call_reg(REG_ITMP2);
3817                 }
3818         }
3819
3820         if (runverbose) {
3821                 int p, l, s1;
3822
3823                 x86_64_alu_imm_reg(X86_64_SUB, (6 + 8 + 1) * 8, REG_SP);
3824
3825                 x86_64_mov_reg_membase(argintregs[0], REG_SP, 1 * 8);
3826                 x86_64_mov_reg_membase(argintregs[1], REG_SP, 2 * 8);
3827                 x86_64_mov_reg_membase(argintregs[2], REG_SP, 3 * 8);
3828                 x86_64_mov_reg_membase(argintregs[3], REG_SP, 4 * 8);
3829                 x86_64_mov_reg_membase(argintregs[4], REG_SP, 5 * 8);
3830                 x86_64_mov_reg_membase(argintregs[5], REG_SP, 6 * 8);
3831
3832                 x86_64_movq_reg_membase(argfltregs[0], REG_SP, 7 * 8);
3833                 x86_64_movq_reg_membase(argfltregs[1], REG_SP, 8 * 8);
3834                 x86_64_movq_reg_membase(argfltregs[2], REG_SP, 9 * 8);
3835                 x86_64_movq_reg_membase(argfltregs[3], REG_SP, 10 * 8);
3836 /*              x86_64_movq_reg_membase(argfltregs[4], REG_SP, 11 * 8); */
3837 /*              x86_64_movq_reg_membase(argfltregs[5], REG_SP, 12 * 8); */
3838 /*              x86_64_movq_reg_membase(argfltregs[6], REG_SP, 13 * 8); */
3839 /*              x86_64_movq_reg_membase(argfltregs[7], REG_SP, 14 * 8); */
3840
3841                 /* show integer hex code for float arguments */
3842                 for (p = 0, l = 0; p < m->paramcount; p++) {
3843                         if (IS_FLT_DBL_TYPE(m->paramtypes[p])) {
3844                                 for (s1 = (m->paramcount > INT_ARG_CNT) ? INT_ARG_CNT - 2 : m->paramcount - 2; s1 >= p; s1--) {
3845                                         x86_64_mov_reg_reg(argintregs[s1], argintregs[s1 + 1]);
3846                                 }
3847
3848                                 x86_64_movd_freg_reg(argfltregs[l], argintregs[p]);
3849                                 l++;
3850                         }
3851                 }
3852
3853                 x86_64_mov_imm_reg((s8) m, REG_ITMP1);
3854                 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, 0 * 8);
3855                 x86_64_mov_imm_reg((s8) builtin_trace_args, REG_ITMP1);
3856                 x86_64_call_reg(REG_ITMP1);
3857
3858                 x86_64_mov_membase_reg(REG_SP, 1 * 8, argintregs[0]);
3859                 x86_64_mov_membase_reg(REG_SP, 2 * 8, argintregs[1]);
3860                 x86_64_mov_membase_reg(REG_SP, 3 * 8, argintregs[2]);
3861                 x86_64_mov_membase_reg(REG_SP, 4 * 8, argintregs[3]);
3862                 x86_64_mov_membase_reg(REG_SP, 5 * 8, argintregs[4]);
3863                 x86_64_mov_membase_reg(REG_SP, 6 * 8, argintregs[5]);
3864
3865                 x86_64_movq_membase_reg(REG_SP, 7 * 8, argfltregs[0]);
3866                 x86_64_movq_membase_reg(REG_SP, 8 * 8, argfltregs[1]);
3867                 x86_64_movq_membase_reg(REG_SP, 9 * 8, argfltregs[2]);
3868                 x86_64_movq_membase_reg(REG_SP, 10 * 8, argfltregs[3]);
3869 /*              x86_64_movq_membase_reg(REG_SP, 11 * 8, argfltregs[4]); */
3870 /*              x86_64_movq_membase_reg(REG_SP, 12 * 8, argfltregs[5]); */
3871 /*              x86_64_movq_membase_reg(REG_SP, 13 * 8, argfltregs[6]); */
3872 /*              x86_64_movq_membase_reg(REG_SP, 14 * 8, argfltregs[7]); */
3873
3874                 x86_64_alu_imm_reg(X86_64_ADD, (6 + 8 + 1) * 8, REG_SP);
3875         }
3876
3877 #if 0
3878         x86_64_alu_imm_reg(X86_64_SUB, 7 * 8, REG_SP);    /* keep stack 16-byte aligned */
3879
3880         /* save callee saved float registers */
3881         x86_64_movq_reg_membase(XMM15, REG_SP, 0 * 8);
3882         x86_64_movq_reg_membase(XMM14, REG_SP, 1 * 8);
3883         x86_64_movq_reg_membase(XMM13, REG_SP, 2 * 8);
3884         x86_64_movq_reg_membase(XMM12, REG_SP, 3 * 8);
3885         x86_64_movq_reg_membase(XMM11, REG_SP, 4 * 8);
3886         x86_64_movq_reg_membase(XMM10, REG_SP, 5 * 8);
3887 #endif
3888
3889         /* save argument registers on stack -- if we have to */
3890         if ((m->flags & ACC_STATIC && m->paramcount > (INT_ARG_CNT - 2)) || m->paramcount > (INT_ARG_CNT - 1)) {
3891                 int i;
3892                 int paramshiftcnt = (m->flags & ACC_STATIC) ? 2 : 1;
3893                 int stackparamcnt = (m->paramcount > INT_ARG_CNT) ? m->paramcount - INT_ARG_CNT : 0;
3894
3895                 stackframesize = stackparamcnt + paramshiftcnt;
3896
3897                 /* keep stack 16-byte aligned */
3898                 if ((stackframesize % 2) == 0) stackframesize++;
3899
3900                 x86_64_alu_imm_reg(X86_64_SUB, stackframesize * 8, REG_SP);
3901
3902                 /* copy stack arguments into new stack frame -- if any */
3903                 for (i = 0; i < stackparamcnt; i++) {
3904                         x86_64_mov_membase_reg(REG_SP, (stackparamcnt + 1 + i) * 8, REG_ITMP1);
3905                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, (paramshiftcnt + i) * 8);
3906                 }
3907
3908                 if (m->flags & ACC_STATIC) {
3909                         x86_64_mov_reg_membase(argintregs[5], REG_SP, 1 * 8);
3910                         x86_64_mov_reg_membase(argintregs[4], REG_SP, 0 * 8);
3911
3912                 } else {
3913                         x86_64_mov_reg_membase(argintregs[5], REG_SP, 0 * 8);
3914                 }
3915
3916         } else {
3917                 /* keep stack 16-byte aligned -- this is essential for x86_64 */
3918                 x86_64_alu_imm_reg(X86_64_SUB, 8, REG_SP);
3919                 stackframesize = 1;
3920         }
3921
3922         if (m->flags & ACC_STATIC) {
3923                 x86_64_mov_reg_reg(argintregs[3], argintregs[5]);
3924                 x86_64_mov_reg_reg(argintregs[2], argintregs[4]);
3925                 x86_64_mov_reg_reg(argintregs[1], argintregs[3]);
3926                 x86_64_mov_reg_reg(argintregs[0], argintregs[2]);
3927
3928                 /* put class into second argument register */
3929                 x86_64_mov_imm_reg((s8) m->class, argintregs[1]);
3930
3931         } else {
3932                 x86_64_mov_reg_reg(argintregs[4], argintregs[5]);
3933                 x86_64_mov_reg_reg(argintregs[3], argintregs[4]);
3934                 x86_64_mov_reg_reg(argintregs[2], argintregs[3]);
3935                 x86_64_mov_reg_reg(argintregs[1], argintregs[2]);
3936                 x86_64_mov_reg_reg(argintregs[0], argintregs[1]);
3937         }
3938
3939         /* put env into first argument register */
3940         x86_64_mov_imm_reg((s8) &env, argintregs[0]);
3941
3942         x86_64_mov_imm_reg((s8) f, REG_ITMP1);
3943         x86_64_call_reg(REG_ITMP1);
3944
3945         /* remove stackframe if there is one */
3946         if (stackframesize) {
3947                 x86_64_alu_imm_reg(X86_64_ADD, stackframesize * 8, REG_SP);
3948         }
3949
3950         if (runverbose) {
3951                 x86_64_alu_imm_reg(X86_64_SUB, 3 * 8, REG_SP);    /* keep stack 16-byte aligned */
3952
3953                 x86_64_mov_reg_membase(REG_RESULT, REG_SP, 0 * 8);
3954                 x86_64_movq_reg_membase(REG_FRESULT, REG_SP, 1 * 8);
3955
3956                 x86_64_mov_imm_reg((s8) m, argintregs[0]);
3957                 x86_64_mov_reg_reg(REG_RESULT, argintregs[1]);
3958                 M_FLTMOVE(REG_FRESULT, argfltregs[0]);
3959                 M_FLTMOVE(REG_FRESULT, argfltregs[1]);
3960
3961                 x86_64_mov_imm_reg((s8) builtin_displaymethodstop, REG_ITMP1);
3962                 x86_64_call_reg(REG_ITMP1);
3963
3964                 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_RESULT);
3965                 x86_64_movq_membase_reg(REG_SP, 1 * 8, REG_FRESULT);
3966
3967                 x86_64_alu_imm_reg(X86_64_ADD, 3 * 8, REG_SP);    /* keep stack 16-byte aligned */
3968         }
3969
3970 #if 0
3971         /* restore callee saved registers */
3972         x86_64_movq_membase_reg(REG_SP, 0 * 8, XMM15);
3973         x86_64_movq_membase_reg(REG_SP, 1 * 8, XMM14);
3974         x86_64_movq_membase_reg(REG_SP, 2 * 8, XMM13);
3975         x86_64_movq_membase_reg(REG_SP, 3 * 8, XMM12);
3976         x86_64_movq_membase_reg(REG_SP, 4 * 8, XMM11);
3977         x86_64_movq_membase_reg(REG_SP, 5 * 8, XMM10);
3978
3979         x86_64_alu_imm_reg(X86_64_ADD, 7 * 8, REG_SP);    /* keep stack 16-byte aligned */
3980 #endif
3981
3982         x86_64_mov_imm_reg((s8) &_exceptionptr, REG_ITMP3);
3983         x86_64_mov_membase_reg(REG_ITMP3, 0, REG_ITMP3);
3984         x86_64_test_reg_reg(REG_ITMP3, REG_ITMP3);
3985         x86_64_jcc(X86_64_CC_NE, 1);
3986
3987         x86_64_ret();
3988
3989         x86_64_mov_reg_reg(REG_ITMP3, REG_ITMP1_XPTR);
3990         x86_64_mov_imm_reg((s8) &_exceptionptr, REG_ITMP3);
3991         x86_64_alu_reg_reg(X86_64_XOR, REG_ITMP2, REG_ITMP2);
3992         x86_64_mov_reg_membase(REG_ITMP2, REG_ITMP3, 0);    /* clear exception pointer */
3993
3994         x86_64_mov_membase_reg(REG_SP, 0, REG_ITMP2_XPC);    /* get return address from stack */
3995         x86_64_alu_imm_reg(X86_64_SUB, 3, REG_ITMP2_XPC);    /* callq */
3996
3997         x86_64_mov_imm_reg((s8) asm_handle_nat_exception, REG_ITMP3);
3998         x86_64_jmp_reg(REG_ITMP3);
3999
4000 #if 0
4001         {
4002                 static int stubprinted;
4003                 if (!stubprinted)
4004                         printf("stubsize: %d\n", ((long)mcodeptr - (long) s));
4005                 stubprinted = 1;
4006         }
4007 #endif
4008
4009 #ifdef STATISTICS
4010         count_nstub_len += NATIVESTUBSIZE;
4011 #endif
4012
4013         return s;
4014 }
4015
4016
4017 /* function: removenativestub **************************************************
4018
4019     removes a previously created native-stub from memory
4020     
4021 *******************************************************************************/
4022
4023 void removenativestub(u1 *stub)
4024 {
4025         CFREE(stub, NATIVESTUBSIZE);
4026 }
4027
4028
4029 /* code generation functions */
4030
4031 void x86_64_emit_ialu(s4 alu_op, stackptr src, instruction *iptr)
4032 {
4033         s4 s1 = src->prev->regoff;
4034         s4 s2 = src->regoff;
4035         s4 d = iptr->dst->regoff;
4036
4037         if (iptr->dst->flags & INMEMORY) {
4038                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4039                         if (s2 == d) {
4040                                 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4041                                 x86_64_alul_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4042
4043                         } else if (s1 == d) {
4044                                 x86_64_movl_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4045                                 x86_64_alul_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4046
4047                         } else {
4048                                 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4049                                 x86_64_alul_membase_reg(alu_op, REG_SP, s2 * 8, REG_ITMP1);
4050                                 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
4051                         }
4052
4053                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4054                         if (s2 == d) {
4055                                 x86_64_alul_reg_membase(alu_op, s1, REG_SP, d * 8);
4056
4057                         } else {
4058                                 x86_64_movl_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4059                                 x86_64_alul_reg_reg(alu_op, s1, REG_ITMP1);
4060                                 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
4061                         }
4062
4063                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4064                         if (s1 == d) {
4065                                 x86_64_alul_reg_membase(alu_op, s2, REG_SP, d * 8);
4066                                                 
4067                         } else {
4068                                 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4069                                 x86_64_alul_reg_reg(alu_op, s2, REG_ITMP1);
4070                                 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
4071                         }
4072
4073                 } else {
4074                         x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4075                         x86_64_alul_reg_membase(alu_op, s2, REG_SP, d * 8);
4076                 }
4077
4078         } else {
4079                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4080                         x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4081                         x86_64_alul_membase_reg(alu_op, REG_SP, s2 * 8, d);
4082
4083                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4084                         M_INTMOVE(s1, d);
4085                         x86_64_alul_membase_reg(alu_op, REG_SP, s2 * 8, d);
4086
4087                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4088                         M_INTMOVE(s2, d);
4089                         x86_64_alul_membase_reg(alu_op, REG_SP, s1 * 8, d);
4090
4091                 } else {
4092                         if (s2 == d) {
4093                                 x86_64_alul_reg_reg(alu_op, s1, d);
4094
4095                         } else {
4096                                 M_INTMOVE(s1, d);
4097                                 x86_64_alul_reg_reg(alu_op, s2, d);
4098                         }
4099                 }
4100         }
4101 }
4102
4103
4104
4105 void x86_64_emit_lalu(s4 alu_op, stackptr src, instruction *iptr)
4106 {
4107         s4 s1 = src->prev->regoff;
4108         s4 s2 = src->regoff;
4109         s4 d = iptr->dst->regoff;
4110
4111         if (iptr->dst->flags & INMEMORY) {
4112                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4113                         if (s2 == d) {
4114                                 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4115                                 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4116
4117                         } else if (s1 == d) {
4118                                 x86_64_mov_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4119                                 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4120
4121                         } else {
4122                                 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4123                                 x86_64_alu_membase_reg(alu_op, REG_SP, s2 * 8, REG_ITMP1);
4124                                 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4125                         }
4126
4127                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4128                         if (s2 == d) {
4129                                 x86_64_alu_reg_membase(alu_op, s1, REG_SP, d * 8);
4130
4131                         } else {
4132                                 x86_64_mov_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4133                                 x86_64_alu_reg_reg(alu_op, s1, REG_ITMP1);
4134                                 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4135                         }
4136
4137                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4138                         if (s1 == d) {
4139                                 x86_64_alu_reg_membase(alu_op, s2, REG_SP, d * 8);
4140                                                 
4141                         } else {
4142                                 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4143                                 x86_64_alu_reg_reg(alu_op, s2, REG_ITMP1);
4144                                 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4145                         }
4146
4147                 } else {
4148                         x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4149                         x86_64_alu_reg_membase(alu_op, s2, REG_SP, d * 8);
4150                 }
4151
4152         } else {
4153                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4154                         x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4155                         x86_64_alu_membase_reg(alu_op, REG_SP, s2 * 8, d);
4156
4157                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4158                         M_INTMOVE(s1, d);
4159                         x86_64_alu_membase_reg(alu_op, REG_SP, s2 * 8, d);
4160
4161                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4162                         M_INTMOVE(s2, d);
4163                         x86_64_alu_membase_reg(alu_op, REG_SP, s1 * 8, d);
4164
4165                 } else {
4166                         if (s2 == d) {
4167                                 x86_64_alu_reg_reg(alu_op, s1, d);
4168
4169                         } else {
4170                                 M_INTMOVE(s1, d);
4171                                 x86_64_alu_reg_reg(alu_op, s2, d);
4172                         }
4173                 }
4174         }
4175 }
4176
4177
4178
4179 void x86_64_emit_ialuconst(s4 alu_op, stackptr src, instruction *iptr)
4180 {
4181         s4 s1 = src->regoff;
4182         s4 d = iptr->dst->regoff;
4183
4184         if (iptr->dst->flags & INMEMORY) {
4185                 if (src->flags & INMEMORY) {
4186                         if (s1 == d) {
4187                                 x86_64_alul_imm_membase(alu_op, iptr->val.i, REG_SP, d * 8);
4188
4189                         } else {
4190                                 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4191                                 x86_64_alul_imm_reg(alu_op, iptr->val.i, REG_ITMP1);
4192                                 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
4193                         }
4194
4195                 } else {
4196                         x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4197                         x86_64_alul_imm_membase(alu_op, iptr->val.i, REG_SP, d * 8);
4198                 }
4199
4200         } else {
4201                 if (src->flags & INMEMORY) {
4202                         x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4203                         x86_64_alul_imm_reg(alu_op, iptr->val.i, d);
4204
4205                 } else {
4206                         M_INTMOVE(s1, d);
4207                         x86_64_alul_imm_reg(alu_op, iptr->val.i, d);
4208                 }
4209         }
4210 }
4211
4212
4213
4214 void x86_64_emit_laluconst(s4 alu_op, stackptr src, instruction *iptr)
4215 {
4216         s4 s1 = src->regoff;
4217         s4 d = iptr->dst->regoff;
4218
4219         if (iptr->dst->flags & INMEMORY) {
4220                 if (src->flags & INMEMORY) {
4221                         if (s1 == d) {
4222                                 if (x86_64_is_imm32(iptr->val.l)) {
4223                                         x86_64_alu_imm_membase(alu_op, iptr->val.l, REG_SP, d * 8);
4224
4225                                 } else {
4226                                         x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4227                                         x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4228                                 }
4229
4230                         } else {
4231                                 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4232
4233                                 if (x86_64_is_imm32(iptr->val.l)) {
4234                                         x86_64_alu_imm_reg(alu_op, iptr->val.l, REG_ITMP1);
4235
4236                                 } else {
4237                                         x86_64_mov_imm_reg(iptr->val.l, REG_ITMP2);
4238                                         x86_64_alu_reg_reg(alu_op, REG_ITMP2, REG_ITMP1);
4239                                 }
4240                                 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4241                         }
4242
4243                 } else {
4244                         x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4245
4246                         if (x86_64_is_imm32(iptr->val.l)) {
4247                                 x86_64_alu_imm_membase(alu_op, iptr->val.l, REG_SP, d * 8);
4248
4249                         } else {
4250                                 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4251                                 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4252                         }
4253                 }
4254
4255         } else {
4256                 if (src->flags & INMEMORY) {
4257                         x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4258
4259                 } else {
4260                         M_INTMOVE(s1, d);
4261                 }
4262
4263                 if (x86_64_is_imm32(iptr->val.l)) {
4264                         x86_64_alu_imm_reg(alu_op, iptr->val.l, d);
4265
4266                 } else {
4267                         x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4268                         x86_64_alu_reg_reg(alu_op, REG_ITMP1, d);
4269                 }
4270         }
4271 }
4272
4273
4274
4275 void x86_64_emit_ishift(s4 shift_op, stackptr src, instruction *iptr)
4276 {
4277         s4 s1 = src->prev->regoff;
4278         s4 s2 = src->regoff;
4279         s4 d = iptr->dst->regoff;
4280
4281         M_INTMOVE(RCX, REG_ITMP1);    /* save RCX */
4282         if (iptr->dst->flags & INMEMORY) {
4283                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4284                         if (s1 == d) {
4285                                 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4286                                 x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
4287
4288                         } else {
4289                                 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4290                                 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
4291                                 x86_64_shiftl_reg(shift_op, REG_ITMP2);
4292                                 x86_64_movl_reg_membase(REG_ITMP2, REG_SP, d * 8);
4293                         }
4294
4295                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4296                         x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4297                         x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4298                         x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
4299
4300                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4301                         if (s1 == d) {
4302                                 M_INTMOVE(s2, RCX);
4303                                 x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
4304
4305                         } else {
4306                                 M_INTMOVE(s2, RCX);
4307                                 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
4308                                 x86_64_shiftl_reg(shift_op, REG_ITMP2);
4309                                 x86_64_movl_reg_membase(REG_ITMP2, REG_SP, d * 8);
4310                         }
4311
4312                 } else {
4313                         M_INTMOVE(s2, RCX);
4314                         x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4315                         x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
4316                 }
4317                 M_INTMOVE(REG_ITMP1, RCX);    /* restore RCX */
4318
4319         } else {
4320                 if (d == RCX) {
4321                         d = REG_ITMP3;
4322                 }
4323                                         
4324                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4325                         x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4326                         x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4327                         x86_64_shiftl_reg(shift_op, d);
4328
4329                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4330                         M_INTMOVE(s1, d);    /* maybe src is RCX */
4331                         x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4332                         x86_64_shiftl_reg(shift_op, d);
4333
4334                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4335                         M_INTMOVE(s2, RCX);
4336                         x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4337                         x86_64_shiftl_reg(shift_op, d);
4338
4339                 } else {
4340                         if (s1 == RCX) {
4341                                 M_INTMOVE(s1, d);
4342                                 M_INTMOVE(s2, RCX);
4343
4344                         } else {
4345                                 M_INTMOVE(s2, RCX);
4346                                 M_INTMOVE(s1, d);
4347                         }
4348                         x86_64_shiftl_reg(shift_op, d);
4349                 }
4350
4351                 if (d == RCX) {
4352                         M_INTMOVE(REG_ITMP3, RCX);
4353
4354                 } else {
4355                         M_INTMOVE(REG_ITMP1, RCX);    /* restore RCX */
4356                 }
4357         }
4358 }
4359
4360
4361
4362 void x86_64_emit_lshift(s4 shift_op, stackptr src, instruction *iptr)
4363 {
4364         s4 s1 = src->prev->regoff;
4365         s4 s2 = src->regoff;
4366         s4 d = iptr->dst->regoff;
4367
4368         M_INTMOVE(RCX, REG_ITMP1);    /* save RCX */
4369         if (iptr->dst->flags & INMEMORY) {
4370                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4371                         if (s1 == d) {
4372                                 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4373                                 x86_64_shift_membase(shift_op, REG_SP, d * 8);
4374
4375                         } else {
4376                                 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4377                                 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
4378                                 x86_64_shift_reg(shift_op, REG_ITMP2);
4379                                 x86_64_mov_reg_membase(REG_ITMP2, REG_SP, d * 8);
4380                         }
4381
4382                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4383                         x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4384                         x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4385                         x86_64_shift_membase(shift_op, REG_SP, d * 8);
4386
4387                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4388                         if (s1 == d) {
4389                                 M_INTMOVE(s2, RCX);
4390                                 x86_64_shift_membase(shift_op, REG_SP, d * 8);
4391
4392                         } else {
4393                                 M_INTMOVE(s2, RCX);
4394                                 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
4395                                 x86_64_shift_reg(shift_op, REG_ITMP2);
4396                                 x86_64_mov_reg_membase(REG_ITMP2, REG_SP, d * 8);
4397                         }
4398
4399                 } else {
4400                         M_INTMOVE(s2, RCX);
4401                         x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4402                         x86_64_shift_membase(shift_op, REG_SP, d * 8);
4403                 }
4404                 M_INTMOVE(REG_ITMP1, RCX);    /* restore RCX */
4405
4406         } else {
4407                 if (d == RCX) {
4408                         d = REG_ITMP3;
4409                 }
4410
4411                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4412                         x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4413                         x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4414                         x86_64_shift_reg(shift_op, d);
4415
4416                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4417                         M_INTMOVE(s1, d);    /* maybe src is RCX */
4418                         x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4419                         x86_64_shift_reg(shift_op, d);
4420
4421                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4422                         M_INTMOVE(s2, RCX);
4423                         x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4424                         x86_64_shift_reg(shift_op, d);
4425
4426                 } else {
4427                         if (s1 == RCX) {
4428                                 M_INTMOVE(s1, d);
4429                                 M_INTMOVE(s2, RCX);
4430                         } else {
4431                                 M_INTMOVE(s2, RCX);
4432                                 M_INTMOVE(s1, d);
4433                         }
4434                         x86_64_shift_reg(shift_op, d);
4435                 }
4436
4437                 if (d == RCX) {
4438                         M_INTMOVE(REG_ITMP3, RCX);
4439
4440                 } else {
4441                         M_INTMOVE(REG_ITMP1, RCX);    /* restore RCX */
4442                 }
4443         }
4444 }
4445
4446
4447
4448 void x86_64_emit_ishiftconst(s4 shift_op, stackptr src, instruction *iptr)
4449 {
4450         s4 s1 = src->regoff;
4451         s4 d = iptr->dst->regoff;
4452
4453         if ((src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
4454                 if (s1 == d) {
4455                         x86_64_shiftl_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
4456
4457                 } else {
4458                         x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4459                         x86_64_shiftl_imm_reg(shift_op, iptr->val.i, REG_ITMP1);
4460                         x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
4461                 }
4462
4463         } else if ((src->flags & INMEMORY) && !(iptr->dst->flags & INMEMORY)) {
4464                 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4465                 x86_64_shiftl_imm_reg(shift_op, iptr->val.i, d);
4466                                 
4467         } else if (!(src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
4468                 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4469                 x86_64_shiftl_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
4470
4471         } else {
4472                 M_INTMOVE(s1, d);
4473                 x86_64_shiftl_imm_reg(shift_op, iptr->val.i, d);
4474         }
4475 }
4476
4477
4478
4479 void x86_64_emit_lshiftconst(s4 shift_op, stackptr src, instruction *iptr)
4480 {
4481         s4 s1 = src->regoff;
4482         s4 d = iptr->dst->regoff;
4483
4484         if ((src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
4485                 if (s1 == d) {
4486                         x86_64_shift_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
4487
4488                 } else {
4489                         x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4490                         x86_64_shift_imm_reg(shift_op, iptr->val.i, REG_ITMP1);
4491                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4492                 }
4493
4494         } else if ((src->flags & INMEMORY) && !(iptr->dst->flags & INMEMORY)) {
4495                 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4496                 x86_64_shift_imm_reg(shift_op, iptr->val.i, d);
4497                                 
4498         } else if (!(src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
4499                 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4500                 x86_64_shift_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
4501
4502         } else {
4503                 M_INTMOVE(s1, d);
4504                 x86_64_shift_imm_reg(shift_op, iptr->val.i, d);
4505         }
4506 }
4507
4508
4509
4510 void x86_64_emit_ifcc(s4 if_op, stackptr src, instruction *iptr)
4511 {
4512         if (src->flags & INMEMORY) {
4513                 x86_64_alul_imm_membase(X86_64_CMP, iptr->val.i, REG_SP, src->regoff * 8);
4514
4515         } else {
4516                 x86_64_alul_imm_reg(X86_64_CMP, iptr->val.i, src->regoff);
4517         }
4518         x86_64_jcc(if_op, 0);
4519         codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
4520 }
4521
4522
4523
4524 void x86_64_emit_if_lcc(s4 if_op, stackptr src, instruction *iptr)
4525 {
4526         s4 s1 = src->regoff;
4527
4528         if (src->flags & INMEMORY) {
4529                 if (x86_64_is_imm32(iptr->val.l)) {
4530                         x86_64_alu_imm_membase(X86_64_CMP, iptr->val.l, REG_SP, s1 * 8);
4531
4532                 } else {
4533                         x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4534                         x86_64_alu_reg_membase(X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
4535                 }
4536
4537         } else {
4538                 if (x86_64_is_imm32(iptr->val.l)) {
4539                         x86_64_alu_imm_reg(X86_64_CMP, iptr->val.l, s1);
4540
4541                 } else {
4542                         x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4543                         x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP1, s1);
4544                 }
4545         }
4546         x86_64_jcc(if_op, 0);
4547         codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
4548 }
4549
4550
4551
4552 void x86_64_emit_if_icmpcc(s4 if_op, stackptr src, instruction *iptr)
4553 {
4554         s4 s1 = src->prev->regoff;
4555         s4 s2 = src->regoff;
4556
4557         if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4558                 x86_64_movl_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4559                 x86_64_alul_reg_membase(X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
4560
4561         } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4562                 x86_64_alul_membase_reg(X86_64_CMP, REG_SP, s2 * 8, s1);
4563
4564         } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4565                 x86_64_alul_reg_membase(X86_64_CMP, s2, REG_SP, s1 * 8);
4566
4567         } else {
4568                 x86_64_alul_reg_reg(X86_64_CMP, s2, s1);
4569         }
4570         x86_64_jcc(if_op, 0);
4571         codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
4572 }
4573
4574
4575
4576 void x86_64_emit_if_lcmpcc(s4 if_op, stackptr src, instruction *iptr)
4577 {
4578         s4 s1 = src->prev->regoff;
4579         s4 s2 = src->regoff;
4580
4581         if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4582                 x86_64_mov_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4583                 x86_64_alu_reg_membase(X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
4584
4585         } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4586                 x86_64_alu_membase_reg(X86_64_CMP, REG_SP, s2 * 8, s1);
4587
4588         } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4589                 x86_64_alu_reg_membase(X86_64_CMP, s2, REG_SP, s1 * 8);
4590
4591         } else {
4592                 x86_64_alu_reg_reg(X86_64_CMP, s2, s1);
4593         }
4594         x86_64_jcc(if_op, 0);
4595         codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
4596 }
4597
4598
4599
4600 #if 1
4601
4602 /*
4603  * mov ops
4604  */
4605 void x86_64_mov_reg_reg(s8 reg, s8 dreg) {
4606         x86_64_emit_rex(1,(reg),0,(dreg));
4607         *(mcodeptr++) = 0x89;
4608         x86_64_emit_reg((reg),(dreg));
4609 }
4610
4611
4612 void x86_64_mov_imm_reg(s8 imm, s8 reg) {
4613         x86_64_emit_rex(1,0,0,(reg));
4614         *(mcodeptr++) = 0xb8 + ((reg) & 0x07);
4615         x86_64_emit_imm64((imm));
4616 }
4617
4618
4619 void x86_64_movl_imm_reg(s8 imm, s8 reg) {
4620         x86_64_emit_rex(0,0,0,(reg));
4621         *(mcodeptr++) = 0xb8 + ((reg) & 0x07);
4622         x86_64_emit_imm32((imm));
4623 }
4624
4625
4626 void x86_64_mov_membase_reg(s8 basereg, s8 disp, s8 reg) {
4627         x86_64_emit_rex(1,(reg),0,(basereg));
4628         *(mcodeptr++) = 0x8b;
4629         x86_64_emit_membase((basereg),(disp),(reg));
4630 }
4631
4632
4633 void x86_64_movl_membase_reg(s8 basereg, s8 disp, s8 reg) {
4634         x86_64_emit_rex(0,(reg),0,(basereg));
4635         *(mcodeptr++) = 0x8b;
4636         x86_64_emit_membase((basereg),(disp),(reg));
4637 }
4638
4639
4640 /*
4641  * this one is for INVOKEVIRTUAL/INVOKEINTERFACE to have a
4642  * constant membase immediate length of 32bit
4643  */
4644 void x86_64_mov_membase32_reg(s8 basereg, s8 disp, s8 reg) {
4645         x86_64_emit_rex(1,(reg),0,(basereg));
4646         *(mcodeptr++) = 0x8b;
4647         x86_64_address_byte(2, (reg), (basereg));
4648         x86_64_emit_imm32((disp));
4649 }
4650
4651
4652 void x86_64_mov_reg_membase(s8 reg, s8 basereg, s8 disp) {
4653         x86_64_emit_rex(1,(reg),0,(basereg));
4654         *(mcodeptr++) = 0x89;
4655         x86_64_emit_membase((basereg),(disp),(reg));
4656 }
4657
4658
4659 void x86_64_movl_reg_membase(s8 reg, s8 basereg, s8 disp) {
4660         x86_64_emit_rex(0,(reg),0,(basereg));
4661         *(mcodeptr++) = 0x89;
4662         x86_64_emit_membase((basereg),(disp),(reg));
4663 }
4664
4665
4666 void x86_64_mov_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4667         x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4668         *(mcodeptr++) = 0x8b;
4669         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4670 }
4671
4672
4673 void x86_64_movl_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4674         x86_64_emit_rex(0,(reg),(indexreg),(basereg));
4675         *(mcodeptr++) = 0x8b;
4676         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4677 }
4678
4679
4680 void x86_64_mov_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
4681         x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4682         *(mcodeptr++) = 0x89;
4683         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4684 }
4685
4686
4687 void x86_64_movl_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
4688         x86_64_emit_rex(0,(reg),(indexreg),(basereg));
4689         *(mcodeptr++) = 0x89;
4690         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4691 }
4692
4693
4694 void x86_64_movw_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
4695         *(mcodeptr++) = 0x66;
4696         x86_64_emit_rex(0,(reg),(indexreg),(basereg));
4697         *(mcodeptr++) = 0x89;
4698         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4699 }
4700
4701
4702 void x86_64_movb_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
4703         x86_64_emit_rex(0,(reg),(indexreg),(basereg));
4704         *(mcodeptr++) = 0x88;
4705         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4706 }
4707
4708
4709 void x86_64_mov_imm_membase(s8 imm, s8 basereg, s8 disp) {
4710         x86_64_emit_rex(1,0,0,(basereg));
4711         *(mcodeptr++) = 0xc7;
4712         x86_64_emit_membase((basereg),(disp),0);
4713         x86_64_emit_imm32((imm));
4714 }
4715
4716
4717 void x86_64_movl_imm_membase(s8 imm, s8 basereg, s8 disp) {
4718         x86_64_emit_rex(0,0,0,(basereg));
4719         *(mcodeptr++) = 0xc7;
4720         x86_64_emit_membase((basereg),(disp),0);
4721         x86_64_emit_imm32((imm));
4722 }
4723
4724
4725 void x86_64_movsbq_reg_reg(s8 reg, s8 dreg) {
4726         x86_64_emit_rex(1,(dreg),0,(reg));
4727         *(mcodeptr++) = 0x0f;
4728         *(mcodeptr++) = 0xbe;
4729         /* XXX: why do reg and dreg have to be exchanged */
4730         x86_64_emit_reg((dreg),(reg));
4731 }
4732
4733
4734 void x86_64_movsbq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4735         x86_64_emit_rex(1,(dreg),0,(basereg));
4736         *(mcodeptr++) = 0x0f;
4737         *(mcodeptr++) = 0xbe;
4738         x86_64_emit_membase((basereg),(disp),(dreg));
4739 }
4740
4741
4742 void x86_64_movswq_reg_reg(s8 reg, s8 dreg) {
4743         x86_64_emit_rex(1,(dreg),0,(reg));
4744         *(mcodeptr++) = 0x0f;
4745         *(mcodeptr++) = 0xbf;
4746         /* XXX: why do reg and dreg have to be exchanged */
4747         x86_64_emit_reg((dreg),(reg));
4748 }
4749
4750
4751 void x86_64_movswq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4752         x86_64_emit_rex(1,(dreg),0,(basereg));
4753         *(mcodeptr++) = 0x0f;
4754         *(mcodeptr++) = 0xbf;
4755         x86_64_emit_membase((basereg),(disp),(dreg));
4756 }
4757
4758
4759 void x86_64_movslq_reg_reg(s8 reg, s8 dreg) {
4760         x86_64_emit_rex(1,(dreg),0,(reg));
4761         *(mcodeptr++) = 0x63;
4762         /* XXX: why do reg and dreg have to be exchanged */
4763         x86_64_emit_reg((dreg),(reg));
4764 }
4765
4766
4767 void x86_64_movslq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4768         x86_64_emit_rex(1,(dreg),0,(basereg));
4769         *(mcodeptr++) = 0x63;
4770         x86_64_emit_membase((basereg),(disp),(dreg));
4771 }
4772
4773
4774 void x86_64_movzwq_reg_reg(s8 reg, s8 dreg) {
4775         x86_64_emit_rex(1,(dreg),0,(reg));
4776         *(mcodeptr++) = 0x0f;
4777         *(mcodeptr++) = 0xb7;
4778         /* XXX: why do reg and dreg have to be exchanged */
4779         x86_64_emit_reg((dreg),(reg));
4780 }
4781
4782
4783 void x86_64_movzwq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4784         x86_64_emit_rex(1,(dreg),0,(basereg));
4785         *(mcodeptr++) = 0x0f;
4786         *(mcodeptr++) = 0xb7;
4787         x86_64_emit_membase((basereg),(disp),(dreg));
4788 }
4789
4790
4791 void x86_64_movswq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4792         x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4793         *(mcodeptr++) = 0x0f;
4794         *(mcodeptr++) = 0xbf;
4795         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4796 }
4797
4798
4799 void x86_64_movsbq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4800         x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4801         *(mcodeptr++) = 0x0f;
4802         *(mcodeptr++) = 0xbe;
4803         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4804 }
4805
4806
4807 void x86_64_movzwq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4808         x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4809         *(mcodeptr++) = 0x0f;
4810         *(mcodeptr++) = 0xb7;
4811         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4812 }
4813
4814
4815
4816 /*
4817  * alu operations
4818  */
4819 void x86_64_alu_reg_reg(s8 opc, s8 reg, s8 dreg) {
4820         x86_64_emit_rex(1,(reg),0,(dreg));
4821         *(mcodeptr++) = (((opc)) << 3) + 1;
4822         x86_64_emit_reg((reg),(dreg));
4823 }
4824
4825
4826 void x86_64_alul_reg_reg(s8 opc, s8 reg, s8 dreg) {
4827         x86_64_emit_rex(0,(reg),0,(dreg));
4828         *(mcodeptr++) = (((opc)) << 3) + 1;
4829         x86_64_emit_reg((reg),(dreg));
4830 }
4831
4832
4833 void x86_64_alu_reg_membase(s8 opc, s8 reg, s8 basereg, s8 disp) {
4834         x86_64_emit_rex(1,(reg),0,(basereg));
4835         *(mcodeptr++) = (((opc)) << 3) + 1;
4836         x86_64_emit_membase((basereg),(disp),(reg));
4837 }
4838
4839
4840 void x86_64_alul_reg_membase(s8 opc, s8 reg, s8 basereg, s8 disp) {
4841         x86_64_emit_rex(0,(reg),0,(basereg));
4842         *(mcodeptr++) = (((opc)) << 3) + 1;
4843         x86_64_emit_membase((basereg),(disp),(reg));
4844 }
4845
4846
4847 void x86_64_alu_membase_reg(s8 opc, s8 basereg, s8 disp, s8 reg) {
4848         x86_64_emit_rex(1,(reg),0,(basereg));
4849         *(mcodeptr++) = (((opc)) << 3) + 3;
4850         x86_64_emit_membase((basereg),(disp),(reg));
4851 }
4852
4853
4854 void x86_64_alul_membase_reg(s8 opc, s8 basereg, s8 disp, s8 reg) {
4855         x86_64_emit_rex(0,(reg),0,(basereg));
4856         *(mcodeptr++) = (((opc)) << 3) + 3;
4857         x86_64_emit_membase((basereg),(disp),(reg));
4858 }
4859
4860
4861 void x86_64_alu_imm_reg(s8 opc, s8 imm, s8 dreg) {
4862         if (x86_64_is_imm8(imm)) {
4863                 x86_64_emit_rex(1,0,0,(dreg));
4864                 *(mcodeptr++) = 0x83;
4865                 x86_64_emit_reg((opc),(dreg));
4866                 x86_64_emit_imm8((imm));
4867         } else {
4868                 x86_64_emit_rex(1,0,0,(dreg));
4869                 *(mcodeptr++) = 0x81;
4870                 x86_64_emit_reg((opc),(dreg));
4871                 x86_64_emit_imm32((imm));
4872         }
4873 }
4874
4875
4876 void x86_64_alul_imm_reg(s8 opc, s8 imm, s8 dreg) {
4877         if (x86_64_is_imm8(imm)) {
4878                 x86_64_emit_rex(0,0,0,(dreg));
4879                 *(mcodeptr++) = 0x83;
4880                 x86_64_emit_reg((opc),(dreg));
4881                 x86_64_emit_imm8((imm));
4882         } else {
4883                 x86_64_emit_rex(0,0,0,(dreg));
4884                 *(mcodeptr++) = 0x81;
4885                 x86_64_emit_reg((opc),(dreg));
4886                 x86_64_emit_imm32((imm));
4887         }
4888 }
4889
4890
4891 void x86_64_alu_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
4892         if (x86_64_is_imm8(imm)) {
4893                 x86_64_emit_rex(1,(basereg),0,0);
4894                 *(mcodeptr++) = 0x83;
4895                 x86_64_emit_membase((basereg),(disp),(opc));
4896                 x86_64_emit_imm8((imm));
4897         } else {
4898                 x86_64_emit_rex(1,(basereg),0,0);
4899                 *(mcodeptr++) = 0x81;
4900                 x86_64_emit_membase((basereg),(disp),(opc));
4901                 x86_64_emit_imm32((imm));
4902         }
4903 }
4904
4905
4906 void x86_64_alul_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
4907         if (x86_64_is_imm8(imm)) {
4908                 x86_64_emit_rex(0,(basereg),0,0);
4909                 *(mcodeptr++) = 0x83;
4910                 x86_64_emit_membase((basereg),(disp),(opc));
4911                 x86_64_emit_imm8((imm));
4912         } else {
4913                 x86_64_emit_rex(0,(basereg),0,0);
4914                 *(mcodeptr++) = 0x81;
4915                 x86_64_emit_membase((basereg),(disp),(opc));
4916                 x86_64_emit_imm32((imm));
4917         }
4918 }
4919
4920
4921 void x86_64_test_reg_reg(s8 reg, s8 dreg) {
4922         x86_64_emit_rex(1,(reg),0,(dreg));
4923         *(mcodeptr++) = 0x85;
4924         x86_64_emit_reg((reg),(dreg));
4925 }
4926
4927
4928 void x86_64_testl_reg_reg(s8 reg, s8 dreg) {
4929         x86_64_emit_rex(0,(reg),0,(dreg));
4930         *(mcodeptr++) = 0x85;
4931         x86_64_emit_reg((reg),(dreg));
4932 }
4933
4934
4935 void x86_64_test_imm_reg(s8 imm, s8 reg) {
4936         *(mcodeptr++) = 0xf7;
4937         x86_64_emit_reg(0,(reg));
4938         x86_64_emit_imm32((imm));
4939 }
4940
4941
4942 void x86_64_testw_imm_reg(s8 imm, s8 reg) {
4943         *(mcodeptr++) = 0x66;
4944         *(mcodeptr++) = 0xf7;
4945         x86_64_emit_reg(0,(reg));
4946         x86_64_emit_imm16((imm));
4947 }
4948
4949
4950 void x86_64_testb_imm_reg(s8 imm, s8 reg) {
4951         *(mcodeptr++) = 0xf6;
4952         x86_64_emit_reg(0,(reg));
4953         x86_64_emit_imm8((imm));
4954 }
4955
4956
4957 void x86_64_lea_membase_reg(s8 basereg, s8 disp, s8 reg) {
4958         x86_64_emit_rex(1,(reg),0,(basereg));
4959         *(mcodeptr++) = 0x8d;
4960         x86_64_emit_membase((basereg),(disp),(reg));
4961 }
4962
4963
4964 void x86_64_leal_membase_reg(s8 basereg, s8 disp, s8 reg) {
4965         x86_64_emit_rex(0,(reg),0,(basereg));
4966         *(mcodeptr++) = 0x8d;
4967         x86_64_emit_membase((basereg),(disp),(reg));
4968 }
4969
4970
4971
4972 /*
4973  * inc, dec operations
4974  */
4975 void x86_64_inc_reg(s8 reg) {
4976         x86_64_emit_rex(1,0,0,(reg));
4977         *(mcodeptr++) = 0xff;
4978         x86_64_emit_reg(0,(reg));
4979 }
4980
4981
4982 void x86_64_incl_reg(s8 reg) {
4983         x86_64_emit_rex(0,0,0,(reg));
4984         *(mcodeptr++) = 0xff;
4985         x86_64_emit_reg(0,(reg));
4986 }
4987
4988
4989 void x86_64_inc_membase(s8 basereg, s8 disp) {
4990         x86_64_emit_rex(1,(basereg),0,0);
4991         *(mcodeptr++) = 0xff;
4992         x86_64_emit_membase((basereg),(disp),0);
4993 }
4994
4995
4996 void x86_64_incl_membase(s8 basereg, s8 disp) {
4997         x86_64_emit_rex(0,(basereg),0,0);
4998         *(mcodeptr++) = 0xff;
4999         x86_64_emit_membase((basereg),(disp),0);
5000 }
5001
5002
5003 void x86_64_dec_reg(s8 reg) {
5004         x86_64_emit_rex(1,0,0,(reg));
5005         *(mcodeptr++) = 0xff;
5006         x86_64_emit_reg(1,(reg));
5007 }
5008
5009         
5010 void x86_64_decl_reg(s8 reg) {
5011         x86_64_emit_rex(0,0,0,(reg));
5012         *(mcodeptr++) = 0xff;
5013         x86_64_emit_reg(1,(reg));
5014 }
5015
5016         
5017 void x86_64_dec_membase(s8 basereg, s8 disp) {
5018         x86_64_emit_rex(1,(basereg),0,0);
5019         *(mcodeptr++) = 0xff;
5020         x86_64_emit_membase((basereg),(disp),1);
5021 }
5022
5023
5024 void x86_64_decl_membase(s8 basereg, s8 disp) {
5025         x86_64_emit_rex(0,(basereg),0,0);
5026         *(mcodeptr++) = 0xff;
5027         x86_64_emit_membase((basereg),(disp),1);
5028 }
5029
5030
5031
5032
5033 void x86_64_cltd() {
5034     *(mcodeptr++) = 0x99;
5035 }
5036
5037
5038 void x86_64_cqto() {
5039         x86_64_emit_rex(1,0,0,0);
5040         *(mcodeptr++) = 0x99;
5041 }
5042
5043
5044
5045 void x86_64_imul_reg_reg(s8 reg, s8 dreg) {
5046         x86_64_emit_rex(1,(dreg),0,(reg));
5047         *(mcodeptr++) = 0x0f;
5048         *(mcodeptr++) = 0xaf;
5049         x86_64_emit_reg((dreg),(reg));
5050 }
5051
5052
5053 void x86_64_imull_reg_reg(s8 reg, s8 dreg) {
5054         x86_64_emit_rex(0,(dreg),0,(reg));
5055         *(mcodeptr++) = 0x0f;
5056         *(mcodeptr++) = 0xaf;
5057         x86_64_emit_reg((dreg),(reg));
5058 }
5059
5060
5061 void x86_64_imul_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5062         x86_64_emit_rex(1,(dreg),0,(basereg));
5063         *(mcodeptr++) = 0x0f;
5064         *(mcodeptr++) = 0xaf;
5065         x86_64_emit_membase((basereg),(disp),(dreg));
5066 }
5067
5068
5069 void x86_64_imull_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5070         x86_64_emit_rex(0,(dreg),0,(basereg));
5071         *(mcodeptr++) = 0x0f;
5072         *(mcodeptr++) = 0xaf;
5073         x86_64_emit_membase((basereg),(disp),(dreg));
5074 }
5075
5076
5077 void x86_64_imul_imm_reg(s8 imm, s8 dreg) {
5078         if (x86_64_is_imm8((imm))) {
5079                 x86_64_emit_rex(1,0,0,(dreg));
5080                 *(mcodeptr++) = 0x6b;
5081                 x86_64_emit_reg(0,(dreg));
5082                 x86_64_emit_imm8((imm));
5083         } else {
5084                 x86_64_emit_rex(1,0,0,(dreg));
5085                 *(mcodeptr++) = 0x69;
5086                 x86_64_emit_reg(0,(dreg));
5087                 x86_64_emit_imm32((imm));
5088         }
5089 }
5090
5091
5092 void x86_64_imul_imm_reg_reg(s8 imm, s8 reg, s8 dreg) {
5093         if (x86_64_is_imm8((imm))) {
5094                 x86_64_emit_rex(1,(dreg),0,(reg));
5095                 *(mcodeptr++) = 0x6b;
5096                 x86_64_emit_reg((dreg),(reg));
5097                 x86_64_emit_imm8((imm));
5098         } else {
5099                 x86_64_emit_rex(1,(dreg),0,(reg));
5100                 *(mcodeptr++) = 0x69;
5101                 x86_64_emit_reg((dreg),(reg));
5102                 x86_64_emit_imm32((imm));
5103         }
5104 }
5105
5106
5107 void x86_64_imull_imm_reg_reg(s8 imm, s8 reg, s8 dreg) {
5108         if (x86_64_is_imm8((imm))) {
5109                 x86_64_emit_rex(0,(dreg),0,(reg));
5110                 *(mcodeptr++) = 0x6b;
5111                 x86_64_emit_reg((dreg),(reg));
5112                 x86_64_emit_imm8((imm));
5113         } else {
5114                 x86_64_emit_rex(0,(dreg),0,(reg));
5115                 *(mcodeptr++) = 0x69;
5116                 x86_64_emit_reg((dreg),(reg));
5117                 x86_64_emit_imm32((imm));
5118         }
5119 }
5120
5121
5122 void x86_64_imul_imm_membase_reg(s8 imm, s8 basereg, s8 disp, s8 dreg) {
5123         if (x86_64_is_imm8((imm))) {
5124                 x86_64_emit_rex(1,(dreg),0,(basereg));
5125                 *(mcodeptr++) = 0x6b;
5126                 x86_64_emit_membase((basereg),(disp),(dreg));
5127                 x86_64_emit_imm8((imm));
5128         } else {
5129                 x86_64_emit_rex(1,(dreg),0,(basereg));
5130                 *(mcodeptr++) = 0x69;
5131                 x86_64_emit_membase((basereg),(disp),(dreg));
5132                 x86_64_emit_imm32((imm));
5133         }
5134 }
5135
5136
5137 void x86_64_imull_imm_membase_reg(s8 imm, s8 basereg, s8 disp, s8 dreg) {
5138         if (x86_64_is_imm8((imm))) {
5139                 x86_64_emit_rex(0,(dreg),0,(basereg));
5140                 *(mcodeptr++) = 0x6b;
5141                 x86_64_emit_membase((basereg),(disp),(dreg));
5142                 x86_64_emit_imm8((imm));
5143         } else {
5144                 x86_64_emit_rex(0,(dreg),0,(basereg));
5145                 *(mcodeptr++) = 0x69;
5146                 x86_64_emit_membase((basereg),(disp),(dreg));
5147                 x86_64_emit_imm32((imm));
5148         }
5149 }
5150
5151
5152 void x86_64_idiv_reg(s8 reg) {
5153         x86_64_emit_rex(1,0,0,(reg));
5154         *(mcodeptr++) = 0xf7;
5155         x86_64_emit_reg(7,(reg));
5156 }
5157
5158
5159 void x86_64_idivl_reg(s8 reg) {
5160         x86_64_emit_rex(0,0,0,(reg));
5161         *(mcodeptr++) = 0xf7;
5162         x86_64_emit_reg(7,(reg));
5163 }
5164
5165
5166
5167 void x86_64_ret() {
5168     *(mcodeptr++) = 0xc3;
5169 }
5170
5171
5172
5173 /*
5174  * shift ops
5175  */
5176 void x86_64_shift_reg(s8 opc, s8 reg) {
5177         x86_64_emit_rex(1,0,0,(reg));
5178         *(mcodeptr++) = 0xd3;
5179         x86_64_emit_reg((opc),(reg));
5180 }
5181
5182
5183 void x86_64_shiftl_reg(s8 opc, s8 reg) {
5184         x86_64_emit_rex(0,0,0,(reg));
5185         *(mcodeptr++) = 0xd3;
5186         x86_64_emit_reg((opc),(reg));
5187 }
5188
5189
5190 void x86_64_shift_membase(s8 opc, s8 basereg, s8 disp) {
5191         x86_64_emit_rex(1,0,0,(basereg));
5192         *(mcodeptr++) = 0xd3;
5193         x86_64_emit_membase((basereg),(disp),(opc));
5194 }
5195
5196
5197 void x86_64_shiftl_membase(s8 opc, s8 basereg, s8 disp) {
5198         x86_64_emit_rex(0,0,0,(basereg));
5199         *(mcodeptr++) = 0xd3;
5200         x86_64_emit_membase((basereg),(disp),(opc));
5201 }
5202
5203
5204 void x86_64_shift_imm_reg(s8 opc, s8 imm, s8 dreg) {
5205         if ((imm) == 1) {
5206                 x86_64_emit_rex(1,0,0,(dreg));
5207                 *(mcodeptr++) = 0xd1;
5208                 x86_64_emit_reg((opc),(dreg));
5209         } else {
5210                 x86_64_emit_rex(1,0,0,(dreg));
5211                 *(mcodeptr++) = 0xc1;
5212                 x86_64_emit_reg((opc),(dreg));
5213                 x86_64_emit_imm8((imm));
5214         }
5215 }
5216
5217
5218 void x86_64_shiftl_imm_reg(s8 opc, s8 imm, s8 dreg) {
5219         if ((imm) == 1) {
5220                 x86_64_emit_rex(0,0,0,(dreg));
5221                 *(mcodeptr++) = 0xd1;
5222                 x86_64_emit_reg((opc),(dreg));
5223         } else {
5224                 x86_64_emit_rex(0,0,0,(dreg));
5225                 *(mcodeptr++) = 0xc1;
5226                 x86_64_emit_reg((opc),(dreg));
5227                 x86_64_emit_imm8((imm));
5228         }
5229 }
5230
5231
5232 void x86_64_shift_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
5233         if ((imm) == 1) {
5234                 x86_64_emit_rex(1,0,0,(basereg));
5235                 *(mcodeptr++) = 0xd1;
5236                 x86_64_emit_membase((basereg),(disp),(opc));
5237         } else {
5238                 x86_64_emit_rex(1,0,0,(basereg));
5239                 *(mcodeptr++) = 0xc1;
5240                 x86_64_emit_membase((basereg),(disp),(opc));
5241                 x86_64_emit_imm8((imm));
5242         }
5243 }
5244
5245
5246 void x86_64_shiftl_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
5247         if ((imm) == 1) {
5248                 x86_64_emit_rex(0,0,0,(basereg));
5249                 *(mcodeptr++) = 0xd1;
5250                 x86_64_emit_membase((basereg),(disp),(opc));
5251         } else {
5252                 x86_64_emit_rex(0,0,0,(basereg));
5253                 *(mcodeptr++) = 0xc1;
5254                 x86_64_emit_membase((basereg),(disp),(opc));
5255                 x86_64_emit_imm8((imm));
5256         }
5257 }
5258
5259
5260
5261 /*
5262  * jump operations
5263  */
5264 void x86_64_jmp_imm(s8 imm) {
5265         *(mcodeptr++) = 0xe9;
5266         x86_64_emit_imm32((imm));
5267 }
5268
5269
5270 void x86_64_jmp_reg(s8 reg) {
5271         x86_64_emit_rex(0,0,0,(reg));
5272         *(mcodeptr++) = 0xff;
5273         x86_64_emit_reg(4,(reg));
5274 }
5275
5276
5277 void x86_64_jcc(s8 opc, s8 imm) {
5278         *(mcodeptr++) = 0x0f;
5279         *(mcodeptr++) = (0x80 + (opc));
5280         x86_64_emit_imm32((imm));
5281 }
5282
5283
5284
5285 /*
5286  * conditional set and move operations
5287  */
5288
5289 /* we need the rex byte to get all low bytes */
5290 void x86_64_setcc_reg(s8 opc, s8 reg) {
5291         *(mcodeptr++) = (0x40 | (((reg) >> 3) & 0x01));
5292         *(mcodeptr++) = 0x0f;
5293         *(mcodeptr++) = (0x90 + (opc));
5294         x86_64_emit_reg(0,(reg));
5295 }
5296
5297
5298 /* we need the rex byte to get all low bytes */
5299 void x86_64_setcc_membase(s8 opc, s8 basereg, s8 disp) {
5300         *(mcodeptr++) = (0x40 | (((basereg) >> 3) & 0x01));
5301         *(mcodeptr++) = 0x0f;
5302         *(mcodeptr++) = (0x90 + (opc));
5303         x86_64_emit_membase((basereg),(disp),0);
5304 }
5305
5306
5307 void x86_64_cmovcc_reg_reg(s8 opc, s8 reg, s8 dreg) {
5308         x86_64_emit_rex(1,(dreg),0,(reg));
5309         *(mcodeptr++) = 0x0f;
5310         *(mcodeptr++) = (0x40 + (opc));
5311         x86_64_emit_reg((dreg),(reg));
5312 }
5313
5314
5315 void x86_64_cmovccl_reg_reg(s8 opc, s8 reg, s8 dreg) {
5316         x86_64_emit_rex(0,(dreg),0,(reg));
5317         *(mcodeptr++) = 0x0f;
5318         *(mcodeptr++) = (0x40 + (opc));
5319         x86_64_emit_reg((dreg),(reg));
5320 }
5321
5322
5323
5324 void x86_64_neg_reg(s8 reg) {
5325         x86_64_emit_rex(1,0,0,(reg));
5326         *(mcodeptr++) = 0xf7;
5327         x86_64_emit_reg(3,(reg));
5328 }
5329
5330
5331 void x86_64_negl_reg(s8 reg) {
5332         x86_64_emit_rex(0,0,0,(reg));
5333         *(mcodeptr++) = 0xf7;
5334         x86_64_emit_reg(3,(reg));
5335 }
5336
5337
5338 void x86_64_neg_membase(s8 basereg, s8 disp) {
5339         x86_64_emit_rex(1,0,0,(basereg));
5340         *(mcodeptr++) = 0xf7;
5341         x86_64_emit_membase((basereg),(disp),3);
5342 }
5343
5344
5345 void x86_64_negl_membase(s8 basereg, s8 disp) {
5346         x86_64_emit_rex(0,0,0,(basereg));
5347         *(mcodeptr++) = 0xf7;
5348         x86_64_emit_membase((basereg),(disp),3);
5349 }
5350
5351
5352
5353 void x86_64_push_imm(s8 imm) {
5354         *(mcodeptr++) = 0x68;
5355         x86_64_emit_imm32((imm));
5356 }
5357
5358
5359 void x86_64_pop_reg(s8 reg) {
5360         x86_64_emit_rex(0,0,0,(reg));
5361         *(mcodeptr++) = 0x58 + (0x07 & (reg));
5362 }
5363
5364
5365 void x86_64_xchg_reg_reg(s8 reg, s8 dreg) {
5366         x86_64_emit_rex(1,(reg),0,(dreg));
5367         *(mcodeptr++) = 0x87;
5368         x86_64_emit_reg((reg),(dreg));
5369 }
5370
5371
5372 void x86_64_nop() {
5373     *(mcodeptr++) = 0x90;
5374 }
5375
5376
5377
5378 /*
5379  * call instructions
5380  */
5381 void x86_64_call_reg(s8 reg) {
5382         x86_64_emit_rex(1,0,0,(reg));
5383         *(mcodeptr++) = 0xff;
5384         x86_64_emit_reg(2,(reg));
5385 }
5386
5387
5388 void x86_64_call_imm(s8 imm) {
5389         *(mcodeptr++) = 0xe8;
5390         x86_64_emit_imm32((imm));
5391 }
5392
5393
5394
5395 /*
5396  * floating point instructions (SSE2)
5397  */
5398 void x86_64_addsd_reg_reg(s8 reg, s8 dreg) {
5399         *(mcodeptr++) = 0xf2;
5400         x86_64_emit_rex(0,(dreg),0,(reg));
5401         *(mcodeptr++) = 0x0f;
5402         *(mcodeptr++) = 0x58;
5403         x86_64_emit_reg((dreg),(reg));
5404 }
5405
5406
5407 void x86_64_addss_reg_reg(s8 reg, s8 dreg) {
5408         *(mcodeptr++) = 0xf3;
5409         x86_64_emit_rex(0,(dreg),0,(reg));
5410         *(mcodeptr++) = 0x0f;
5411         *(mcodeptr++) = 0x58;
5412         x86_64_emit_reg((dreg),(reg));
5413 }
5414
5415
5416 void x86_64_cvtsi2ssq_reg_reg(s8 reg, s8 dreg) {
5417         *(mcodeptr++) = 0xf3;
5418         x86_64_emit_rex(1,(dreg),0,(reg));
5419         *(mcodeptr++) = 0x0f;
5420         *(mcodeptr++) = 0x2a;
5421         x86_64_emit_reg((dreg),(reg));
5422 }
5423
5424
5425 void x86_64_cvtsi2ss_reg_reg(s8 reg, s8 dreg) {
5426         *(mcodeptr++) = 0xf3;
5427         x86_64_emit_rex(0,(dreg),0,(reg));
5428         *(mcodeptr++) = 0x0f;
5429         *(mcodeptr++) = 0x2a;
5430         x86_64_emit_reg((dreg),(reg));
5431 }
5432
5433
5434 void x86_64_cvtsi2sdq_reg_reg(s8 reg, s8 dreg) {
5435         *(mcodeptr++) = 0xf2;
5436         x86_64_emit_rex(1,(dreg),0,(reg));
5437         *(mcodeptr++) = 0x0f;
5438         *(mcodeptr++) = 0x2a;
5439         x86_64_emit_reg((dreg),(reg));
5440 }
5441
5442
5443 void x86_64_cvtsi2sd_reg_reg(s8 reg, s8 dreg) {
5444         *(mcodeptr++) = 0xf2;
5445         x86_64_emit_rex(0,(dreg),0,(reg));
5446         *(mcodeptr++) = 0x0f;
5447         *(mcodeptr++) = 0x2a;
5448         x86_64_emit_reg((dreg),(reg));
5449 }
5450
5451
5452 void x86_64_cvtss2sd_reg_reg(s8 reg, s8 dreg) {
5453         *(mcodeptr++) = 0xf3;
5454         x86_64_emit_rex(0,(dreg),0,(reg));
5455         *(mcodeptr++) = 0x0f;
5456         *(mcodeptr++) = 0x5a;
5457         x86_64_emit_reg((dreg),(reg));
5458 }
5459
5460
5461 void x86_64_cvtsd2ss_reg_reg(s8 reg, s8 dreg) {
5462         *(mcodeptr++) = 0xf2;
5463         x86_64_emit_rex(0,(dreg),0,(reg));
5464         *(mcodeptr++) = 0x0f;
5465         *(mcodeptr++) = 0x5a;
5466         x86_64_emit_reg((dreg),(reg));
5467 }
5468
5469
5470 void x86_64_cvttss2siq_reg_reg(s8 reg, s8 dreg) {
5471         *(mcodeptr++) = 0xf3;
5472         x86_64_emit_rex(1,(dreg),0,(reg));
5473         *(mcodeptr++) = 0x0f;
5474         *(mcodeptr++) = 0x2c;
5475         x86_64_emit_reg((dreg),(reg));
5476 }
5477
5478
5479 void x86_64_cvttss2si_reg_reg(s8 reg, s8 dreg) {
5480         *(mcodeptr++) = 0xf3;
5481         x86_64_emit_rex(0,(dreg),0,(reg));
5482         *(mcodeptr++) = 0x0f;
5483         *(mcodeptr++) = 0x2c;
5484         x86_64_emit_reg((dreg),(reg));
5485 }
5486
5487
5488 void x86_64_cvttsd2siq_reg_reg(s8 reg, s8 dreg) {
5489         *(mcodeptr++) = 0xf2;
5490         x86_64_emit_rex(1,(dreg),0,(reg));
5491         *(mcodeptr++) = 0x0f;
5492         *(mcodeptr++) = 0x2c;
5493         x86_64_emit_reg((dreg),(reg));
5494 }
5495
5496
5497 void x86_64_cvttsd2si_reg_reg(s8 reg, s8 dreg) {
5498         *(mcodeptr++) = 0xf2;
5499         x86_64_emit_rex(0,(dreg),0,(reg));
5500         *(mcodeptr++) = 0x0f;
5501         *(mcodeptr++) = 0x2c;
5502         x86_64_emit_reg((dreg),(reg));
5503 }
5504
5505
5506 void x86_64_divss_reg_reg(s8 reg, s8 dreg) {
5507         *(mcodeptr++) = 0xf3;
5508         x86_64_emit_rex(0,(dreg),0,(reg));
5509         *(mcodeptr++) = 0x0f;
5510         *(mcodeptr++) = 0x5e;
5511         x86_64_emit_reg((dreg),(reg));
5512 }
5513
5514
5515 void x86_64_divsd_reg_reg(s8 reg, s8 dreg) {
5516         *(mcodeptr++) = 0xf2;
5517         x86_64_emit_rex(0,(dreg),0,(reg));
5518         *(mcodeptr++) = 0x0f;
5519         *(mcodeptr++) = 0x5e;
5520         x86_64_emit_reg((dreg),(reg));
5521 }
5522
5523
5524 void x86_64_movd_reg_freg(s8 reg, s8 freg) {
5525         *(mcodeptr++) = 0x66;
5526         x86_64_emit_rex(1,(freg),0,(reg));
5527         *(mcodeptr++) = 0x0f;
5528         *(mcodeptr++) = 0x6e;
5529         x86_64_emit_reg((freg),(reg));
5530 }
5531
5532
5533 void x86_64_movd_freg_reg(s8 freg, s8 reg) {
5534         *(mcodeptr++) = 0x66;
5535         x86_64_emit_rex(1,(freg),0,(reg));
5536         *(mcodeptr++) = 0x0f;
5537         *(mcodeptr++) = 0x7e;
5538         x86_64_emit_reg((freg),(reg));
5539 }
5540
5541
5542 void x86_64_movd_reg_membase(s8 reg, s8 basereg, s8 disp) {
5543         *(mcodeptr++) = 0x66;
5544         x86_64_emit_rex(0,(reg),0,(basereg));
5545         *(mcodeptr++) = 0x0f;
5546         *(mcodeptr++) = 0x7e;
5547         x86_64_emit_membase((basereg),(disp),(reg));
5548 }
5549
5550
5551 void x86_64_movd_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
5552         *(mcodeptr++) = 0x66;
5553         x86_64_emit_rex(0,(reg),(indexreg),(basereg));
5554         *(mcodeptr++) = 0x0f;
5555         *(mcodeptr++) = 0x7e;
5556         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
5557 }
5558
5559
5560 void x86_64_movd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5561         *(mcodeptr++) = 0x66;
5562         x86_64_emit_rex(1,(dreg),0,(basereg));
5563         *(mcodeptr++) = 0x0f;
5564         *(mcodeptr++) = 0x6e;
5565         x86_64_emit_membase((basereg),(disp),(dreg));
5566 }
5567
5568
5569 void x86_64_movdl_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5570         *(mcodeptr++) = 0x66;
5571         x86_64_emit_rex(0,(dreg),0,(basereg));
5572         *(mcodeptr++) = 0x0f;
5573         *(mcodeptr++) = 0x6e;
5574         x86_64_emit_membase((basereg),(disp),(dreg));
5575 }
5576
5577
5578 void x86_64_movd_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
5579         *(mcodeptr++) = 0x66;
5580         x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
5581         *(mcodeptr++) = 0x0f;
5582         *(mcodeptr++) = 0x6e;
5583         x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
5584 }
5585
5586
5587 void x86_64_movq_reg_reg(s8 reg, s8 dreg) {
5588         *(mcodeptr++) = 0xf3;
5589         x86_64_emit_rex(0,(dreg),0,(reg));
5590         *(mcodeptr++) = 0x0f;
5591         *(mcodeptr++) = 0x7e;
5592         x86_64_emit_reg((dreg),(reg));
5593 }
5594
5595
5596 void x86_64_movq_reg_membase(s8 reg, s8 basereg, s8 disp) {
5597         *(mcodeptr++) = 0x66;
5598         x86_64_emit_rex(0,(reg),0,(basereg));
5599         *(mcodeptr++) = 0x0f;
5600         *(mcodeptr++) = 0xd6;
5601         x86_64_emit_membase((basereg),(disp),(reg));
5602 }
5603
5604
5605 void x86_64_movq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5606         *(mcodeptr++) = 0xf3;
5607         x86_64_emit_rex(0,(dreg),0,(basereg));
5608         *(mcodeptr++) = 0x0f;
5609         *(mcodeptr++) = 0x7e;
5610         x86_64_emit_membase((basereg),(disp),(dreg));
5611 }
5612
5613
5614 void x86_64_movss_reg_reg(s8 reg, s8 dreg) {
5615         *(mcodeptr++) = 0xf3;
5616         x86_64_emit_rex(0,(reg),0,(dreg));
5617         *(mcodeptr++) = 0x0f;
5618         *(mcodeptr++) = 0x10;
5619         x86_64_emit_reg((reg),(dreg));
5620 }
5621
5622
5623 void x86_64_movsd_reg_reg(s8 reg, s8 dreg) {
5624         *(mcodeptr++) = 0xf2;
5625         x86_64_emit_rex(0,(reg),0,(dreg));
5626         *(mcodeptr++) = 0x0f;
5627         *(mcodeptr++) = 0x10;
5628         x86_64_emit_reg((reg),(dreg));
5629 }
5630
5631
5632 void x86_64_movss_reg_membase(s8 reg, s8 basereg, s8 disp) {
5633         *(mcodeptr++) = 0xf3;
5634         x86_64_emit_rex(0,(reg),0,(basereg));
5635         *(mcodeptr++) = 0x0f;
5636         *(mcodeptr++) = 0x11;
5637         x86_64_emit_membase((basereg),(disp),(reg));
5638 }
5639
5640
5641 void x86_64_movsd_reg_membase(s8 reg, s8 basereg, s8 disp) {
5642         *(mcodeptr++) = 0xf2;
5643         x86_64_emit_rex(0,(reg),0,(basereg));
5644         *(mcodeptr++) = 0x0f;
5645         *(mcodeptr++) = 0x11;
5646         x86_64_emit_membase((basereg),(disp),(reg));
5647 }
5648
5649
5650 void x86_64_movss_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5651         *(mcodeptr++) = 0xf3;
5652         x86_64_emit_rex(0,(dreg),0,(basereg));
5653         *(mcodeptr++) = 0x0f;
5654         *(mcodeptr++) = 0x10;
5655         x86_64_emit_membase((basereg),(disp),(dreg));
5656 }
5657
5658
5659 void x86_64_movlps_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5660         x86_64_emit_rex(0,(dreg),0,(basereg));
5661         *(mcodeptr++) = 0x0f;
5662         *(mcodeptr++) = 0x12;
5663         x86_64_emit_membase((basereg),(disp),(dreg));
5664 }
5665
5666
5667 void x86_64_movsd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5668         *(mcodeptr++) = 0xf2;
5669         x86_64_emit_rex(0,(dreg),0,(basereg));
5670         *(mcodeptr++) = 0x0f;
5671         *(mcodeptr++) = 0x10;
5672         x86_64_emit_membase((basereg),(disp),(dreg));
5673 }
5674
5675
5676 void x86_64_movlpd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5677         *(mcodeptr++) = 0x66;
5678         x86_64_emit_rex(0,(dreg),0,(basereg));
5679         *(mcodeptr++) = 0x0f;
5680         *(mcodeptr++) = 0x12;
5681         x86_64_emit_membase((basereg),(disp),(dreg));
5682 }
5683
5684
5685 void x86_64_movss_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
5686         *(mcodeptr++) = 0xf3;
5687         x86_64_emit_rex(0,(reg),(indexreg),(basereg));
5688         *(mcodeptr++) = 0x0f;
5689         *(mcodeptr++) = 0x11;
5690         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
5691 }
5692
5693
5694 void x86_64_movsd_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
5695         *(mcodeptr++) = 0xf2;
5696         x86_64_emit_rex(0,(reg),(indexreg),(basereg));
5697         *(mcodeptr++) = 0x0f;
5698         *(mcodeptr++) = 0x11;
5699         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
5700 }
5701
5702
5703 void x86_64_movss_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
5704         *(mcodeptr++) = 0xf3;
5705         x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
5706         *(mcodeptr++) = 0x0f;
5707         *(mcodeptr++) = 0x10;
5708         x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
5709 }
5710
5711
5712 void x86_64_movsd_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
5713         *(mcodeptr++) = 0xf2;
5714         x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
5715         *(mcodeptr++) = 0x0f;
5716         *(mcodeptr++) = 0x10;
5717         x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
5718 }
5719
5720
5721 void x86_64_mulss_reg_reg(s8 reg, s8 dreg) {
5722         *(mcodeptr++) = 0xf3;
5723         x86_64_emit_rex(0,(dreg),0,(reg));
5724         *(mcodeptr++) = 0x0f;
5725         *(mcodeptr++) = 0x59;
5726         x86_64_emit_reg((dreg),(reg));
5727 }
5728
5729
5730 void x86_64_mulsd_reg_reg(s8 reg, s8 dreg) {
5731         *(mcodeptr++) = 0xf2;
5732         x86_64_emit_rex(0,(dreg),0,(reg));
5733         *(mcodeptr++) = 0x0f;
5734         *(mcodeptr++) = 0x59;
5735         x86_64_emit_reg((dreg),(reg));
5736 }
5737
5738
5739 void x86_64_subss_reg_reg(s8 reg, s8 dreg) {
5740         *(mcodeptr++) = 0xf3;
5741         x86_64_emit_rex(0,(dreg),0,(reg));
5742         *(mcodeptr++) = 0x0f;
5743         *(mcodeptr++) = 0x5c;
5744         x86_64_emit_reg((dreg),(reg));
5745 }
5746
5747
5748 void x86_64_subsd_reg_reg(s8 reg, s8 dreg) {
5749         *(mcodeptr++) = 0xf2;
5750         x86_64_emit_rex(0,(dreg),0,(reg));
5751         *(mcodeptr++) = 0x0f;
5752         *(mcodeptr++) = 0x5c;
5753         x86_64_emit_reg((dreg),(reg));
5754 }
5755
5756
5757 void x86_64_ucomiss_reg_reg(s8 reg, s8 dreg) {
5758         x86_64_emit_rex(0,(dreg),0,(reg));
5759         *(mcodeptr++) = 0x0f;
5760         *(mcodeptr++) = 0x2e;
5761         x86_64_emit_reg((dreg),(reg));
5762 }
5763
5764
5765 void x86_64_ucomisd_reg_reg(s8 reg, s8 dreg) {
5766         *(mcodeptr++) = 0x66;
5767         x86_64_emit_rex(0,(dreg),0,(reg));
5768         *(mcodeptr++) = 0x0f;
5769         *(mcodeptr++) = 0x2e;
5770         x86_64_emit_reg((dreg),(reg));
5771 }
5772
5773
5774 void x86_64_xorps_reg_reg(s8 reg, s8 dreg) {
5775         x86_64_emit_rex(0,(dreg),0,(reg));
5776         *(mcodeptr++) = 0x0f;
5777         *(mcodeptr++) = 0x57;
5778         x86_64_emit_reg((dreg),(reg));
5779 }
5780
5781
5782 void x86_64_xorps_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5783         x86_64_emit_rex(0,(dreg),0,(basereg));
5784         *(mcodeptr++) = 0x0f;
5785         *(mcodeptr++) = 0x57;
5786         x86_64_emit_membase((basereg),(disp),(dreg));
5787 }
5788
5789
5790 void x86_64_xorpd_reg_reg(s8 reg, s8 dreg) {
5791         *(mcodeptr++) = 0x66;
5792         x86_64_emit_rex(0,(dreg),0,(reg));
5793         *(mcodeptr++) = 0x0f;
5794         *(mcodeptr++) = 0x57;
5795         x86_64_emit_reg((dreg),(reg));
5796 }
5797
5798
5799 void x86_64_xorpd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5800         *(mcodeptr++) = 0x66;
5801         x86_64_emit_rex(0,(dreg),0,(basereg));
5802         *(mcodeptr++) = 0x0f;
5803         *(mcodeptr++) = 0x57;
5804         x86_64_emit_membase((basereg),(disp),(dreg));
5805 }
5806
5807 #endif
5808
5809 /*
5810  * These are local overrides for various environment variables in Emacs.
5811  * Please do not remove this and leave it at the end of the file, where
5812  * Emacs will automagically detect them.
5813  * ---------------------------------------------------------------------
5814  * Local variables:
5815  * mode: c
5816  * indent-tabs-mode: t
5817  * c-basic-offset: 4
5818  * tab-width: 4
5819  * End:
5820  */