- replaced ICMD_CHECKOOM with ICMD_CHECKEXCEPTION
[cacao.git] / src / vm / jit / x86_64 / codegen.c
1 /* jit/x86_64/codegen.c - machine code generator for x86_64
2
3    Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003
4    Institut f. Computersprachen, TU Wien
5    R. Grafl, A. Krall, C. Kruegel, C. Oates, R. Obermaisser, M. Probst,
6    S. Ring, E. Steiner, C. Thalinger, D. Thuernbeck, P. Tomsich,
7    J. Wenninger
8
9    This file is part of CACAO.
10
11    This program is free software; you can redistribute it and/or
12    modify it under the terms of the GNU General Public License as
13    published by the Free Software Foundation; either version 2, or (at
14    your option) any later version.
15
16    This program is distributed in the hope that it will be useful, but
17    WITHOUT ANY WARRANTY; without even the implied warranty of
18    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
19    General Public License for more details.
20
21    You should have received a copy of the GNU General Public License
22    along with this program; if not, write to the Free Software
23    Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
24    02111-1307, USA.
25
26    Contact: cacao@complang.tuwien.ac.at
27
28    Authors: Andreas Krall
29             Christian Thalinger
30
31    $Id: codegen.c 1126 2004-06-03 21:35:05Z twisti $
32
33 */
34
35
36 #include <stdio.h>
37 #include <signal.h>
38 #include "types.h"
39 #include "main.h"
40 #include "codegen.h"
41 #include "jit.h"
42 #include "reg.h"
43 #include "parse.h"
44 #include "builtin.h"
45 #include "asmpart.h"
46 #include "jni.h"
47 #include "loader.h"
48 #include "tables.h"
49 #include "native.h"
50
51 /* include independent code generation stuff */
52 #include "codegen.inc"
53 #include "reg.inc"
54
55
56 /* register descripton - array ************************************************/
57
58 /* #define REG_RES   0         reserved register for OS or code generator     */
59 /* #define REG_RET   1         return value register                          */
60 /* #define REG_EXC   2         exception value register (only old jit)        */
61 /* #define REG_SAV   3         (callee) saved register                        */
62 /* #define REG_TMP   4         scratch temporary register (caller saved)      */
63 /* #define REG_ARG   5         argument register (caller saved)               */
64
65 /* #define REG_END   -1        last entry in tables                           */
66
67 int nregdescint[] = {
68     REG_RET, REG_ARG, REG_ARG, REG_TMP, REG_RES, REG_SAV, REG_ARG, REG_ARG,
69     REG_ARG, REG_ARG, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV,
70     REG_END
71 };
72
73
74 int nregdescfloat[] = {
75         /*      REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_TMP, REG_TMP, REG_TMP, REG_TMP, */
76         /*      REG_RES, REG_RES, REG_RES, REG_SAV, REG_SAV, REG_SAV, REG_SAV, REG_SAV, */
77     REG_ARG, REG_ARG, REG_ARG, REG_ARG, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
78     REG_RES, REG_RES, REG_RES, REG_TMP, REG_TMP, REG_TMP, REG_TMP, REG_TMP,
79     REG_END
80 };
81
82
83 /* additional functions and macros to generate code ***************************/
84
85 #define BlockPtrOfPC(pc)  ((basicblock *) iptr->target)
86
87
88 #ifdef STATISTICS
89 #define COUNT_SPILLS count_spills++
90 #else
91 #define COUNT_SPILLS
92 #endif
93
94
95 #define CALCOFFSETBYTES(var, reg, val) \
96     if ((s4) (val) < -128 || (s4) (val) > 127) (var) += 4; \
97     else if ((s4) (val) != 0) (var) += 1; \
98     else if ((reg) == RBP || (reg) == RSP || (reg) == R12 || (reg) == R13) (var) += 1;
99
100
101 #define CALCIMMEDIATEBYTES(var, val) \
102     if ((s4) (val) < -128 || (s4) (val) > 127) (var) += 4; \
103     else (var) += 1;
104
105
106 /* gen_nullptr_check(objreg) */
107
108 #define gen_nullptr_check(objreg) \
109         if (checknull) { \
110         x86_64_test_reg_reg((objreg), (objreg)); \
111         x86_64_jcc(X86_64_CC_E, 0); \
112             codegen_addxnullrefs(mcodeptr); \
113         }
114
115
116 #define gen_div_check(v) \
117     if (checknull) { \
118         if ((v)->flags & INMEMORY) { \
119             x86_64_alu_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8); \
120         } else { \
121             x86_64_test_reg_reg(src->regoff, src->regoff); \
122         } \
123         x86_64_jcc(X86_64_CC_E, 0); \
124         codegen_addxdivrefs(mcodeptr); \
125     }
126
127
128 /* MCODECHECK(icnt) */
129
130 #define MCODECHECK(icnt) \
131         if ((mcodeptr + (icnt)) > (u1*) mcodeend) mcodeptr = (u1*) codegen_increase((u1*) mcodeptr)
132
133 /* M_INTMOVE:
134     generates an integer-move from register a to b.
135     if a and b are the same int-register, no code will be generated.
136 */ 
137
138 #define M_INTMOVE(reg,dreg) \
139     if ((reg) != (dreg)) { \
140         x86_64_mov_reg_reg((reg),(dreg)); \
141     }
142
143
144 /* M_FLTMOVE:
145     generates a floating-point-move from register a to b.
146     if a and b are the same float-register, no code will be generated
147 */ 
148
149 #define M_FLTMOVE(reg,dreg) \
150     if ((reg) != (dreg)) { \
151         x86_64_movq_reg_reg((reg),(dreg)); \
152     }
153
154
155 /* var_to_reg_xxx:
156     this function generates code to fetch data from a pseudo-register
157     into a real register. 
158     If the pseudo-register has actually been assigned to a real 
159     register, no code will be emitted, since following operations
160     can use this register directly.
161     
162     v: pseudoregister to be fetched from
163     tempregnum: temporary register to be used if v is actually spilled to ram
164
165     return: the register number, where the operand can be found after 
166             fetching (this wil be either tempregnum or the register
167             number allready given to v)
168 */
169
170 #define var_to_reg_int(regnr,v,tempnr) \
171     if ((v)->flags & INMEMORY) { \
172         COUNT_SPILLS; \
173         if ((v)->type == TYPE_INT) { \
174             x86_64_movl_membase_reg(REG_SP, (v)->regoff * 8, tempnr); \
175         } else { \
176             x86_64_mov_membase_reg(REG_SP, (v)->regoff * 8, tempnr); \
177         } \
178         regnr = tempnr; \
179     } else { \
180         regnr = (v)->regoff; \
181     }
182
183
184
185 #define var_to_reg_flt(regnr,v,tempnr) \
186     if ((v)->flags & INMEMORY) { \
187         COUNT_SPILLS; \
188         if ((v)->type == TYPE_FLT) { \
189             x86_64_movlps_membase_reg(REG_SP, (v)->regoff * 8, tempnr); \
190         } else { \
191             x86_64_movlpd_membase_reg(REG_SP, (v)->regoff * 8, tempnr); \
192         } \
193 /*        x86_64_movq_membase_reg(REG_SP, (v)->regoff * 8, tempnr);*/ \
194         regnr = tempnr; \
195     } else { \
196         regnr = (v)->regoff; \
197     }
198
199
200 /* reg_of_var:
201     This function determines a register, to which the result of an operation
202     should go, when it is ultimatively intended to store the result in
203     pseudoregister v.
204     If v is assigned to an actual register, this register will be returned.
205     Otherwise (when v is spilled) this function returns tempregnum.
206     If not already done, regoff and flags are set in the stack location.
207 */        
208
209 static int reg_of_var(stackptr v, int tempregnum)
210 {
211         varinfo      *var;
212
213         switch (v->varkind) {
214         case TEMPVAR:
215                 if (!(v->flags & INMEMORY))
216                         return(v->regoff);
217                 break;
218         case STACKVAR:
219                 var = &(interfaces[v->varnum][v->type]);
220                 v->regoff = var->regoff;
221                 if (!(var->flags & INMEMORY))
222                         return(var->regoff);
223                 break;
224         case LOCALVAR:
225                 var = &(locals[v->varnum][v->type]);
226                 v->regoff = var->regoff;
227                 if (!(var->flags & INMEMORY))
228                         return(var->regoff);
229                 break;
230         case ARGVAR:
231                 v->regoff = v->varnum;
232                 if (IS_FLT_DBL_TYPE(v->type)) {
233                         if (v->varnum < FLT_ARG_CNT) {
234                                 v->regoff = argfltregs[v->varnum];
235                                 return(argfltregs[v->varnum]);
236                         }
237                 } else {
238                         if (v->varnum < INT_ARG_CNT) {
239                                 v->regoff = argintregs[v->varnum];
240                                 return(argintregs[v->varnum]);
241                         }
242                 }
243                 v->regoff -= INT_ARG_CNT;
244                 break;
245         }
246         v->flags |= INMEMORY;
247         return tempregnum;
248 }
249
250
251 /* store_reg_to_var_xxx:
252     This function generates the code to store the result of an operation
253     back into a spilled pseudo-variable.
254     If the pseudo-variable has not been spilled in the first place, this 
255     function will generate nothing.
256     
257     v ............ Pseudovariable
258     tempregnum ... Number of the temporary registers as returned by
259                    reg_of_var.
260 */      
261
262 #define store_reg_to_var_int(sptr, tempregnum) \
263     if ((sptr)->flags & INMEMORY) { \
264         COUNT_SPILLS; \
265         x86_64_mov_reg_membase(tempregnum, REG_SP, (sptr)->regoff * 8); \
266     }
267
268
269 #define store_reg_to_var_flt(sptr, tempregnum) \
270     if ((sptr)->flags & INMEMORY) { \
271          COUNT_SPILLS; \
272          x86_64_movq_reg_membase(tempregnum, REG_SP, (sptr)->regoff * 8); \
273     }
274
275
276 /* NullPointerException signal handler for hardware null pointer check */
277
278 void catch_NullPointerException(int sig, siginfo_t *siginfo, void *_p)
279 {
280         sigset_t nsig;
281         /*      int      instr; */
282         /*      long     faultaddr; */
283
284         struct ucontext *_uc = (struct ucontext *) _p;
285         struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
286         struct sigaction act;
287         java_objectheader *xptr;
288
289         /* Reset signal handler - necessary for SysV, does no harm for BSD */
290
291         
292 /*      instr = *((int*)(sigctx->rip)); */
293 /*      faultaddr = sigctx->sc_regs[(instr >> 16) & 0x1f]; */
294
295 /*      if (faultaddr == 0) { */
296         act.sa_sigaction = (void *) catch_NullPointerException; /* reinstall handler */
297         act.sa_flags = SA_SIGINFO;
298         sigaction(sig, &act, NULL);
299         
300         sigemptyset(&nsig);
301         sigaddset(&nsig, sig);
302         sigprocmask(SIG_UNBLOCK, &nsig, NULL);               /* unblock signal    */
303
304         xptr = new_exception(string_java_lang_NullPointerException);
305
306         sigctx->rax = (u8) xptr;                             /* REG_ITMP1_XPTR    */
307         sigctx->r10 = sigctx->rip;                           /* REG_ITMP2_XPC     */
308         sigctx->rip = (u8) asm_handle_exception;
309
310         return;
311
312 /*      } else { */
313 /*              faultaddr += (long) ((instr << 16) >> 16); */
314 /*              fprintf(stderr, "faulting address: 0x%08x\n", faultaddr); */
315 /*              panic("Stack overflow"); */
316 /*      } */
317 }
318
319
320 /* ArithmeticException signal handler for hardware divide by zero check */
321
322 void catch_ArithmeticException(int sig, siginfo_t *siginfo, void *_p)
323 {
324         sigset_t nsig;
325
326         struct ucontext *_uc = (struct ucontext *) _p;
327         struct sigcontext *sigctx = (struct sigcontext *) &_uc->uc_mcontext;
328         struct sigaction act;
329         java_objectheader *xptr;
330
331         /* Reset signal handler - necessary for SysV, does no harm for BSD */
332
333         act.sa_sigaction = (void *) catch_ArithmeticException; /* reinstall handler */
334         act.sa_flags = SA_SIGINFO;
335         sigaction(sig, &act, NULL);
336
337         sigemptyset(&nsig);
338         sigaddset(&nsig, sig);
339         sigprocmask(SIG_UNBLOCK, &nsig, NULL);               /* unblock signal    */
340
341         xptr = new_exception_message(string_java_lang_ArithmeticException,
342                                                                  string_java_lang_ArithmeticException_message);
343
344         sigctx->rax = (u8) xptr;                             /* REG_ITMP1_XPTR    */
345         sigctx->r10 = sigctx->rip;                           /* REG_ITMP2_XPC     */
346         sigctx->rip = (u8) asm_handle_exception;
347
348         return;
349 }
350
351
352 void init_exceptions(void)
353 {
354         struct sigaction act;
355
356         /* install signal handlers we need to convert to exceptions */
357
358         if (!checknull) {
359 #if defined(SIGSEGV)
360                 act.sa_sigaction = (void *) catch_NullPointerException;
361                 act.sa_flags = SA_SIGINFO;
362                 sigaction(SIGSEGV, &act, NULL);
363 #endif
364
365 #if defined(SIGBUS)
366                 act.sa_sigaction = (void *) catch_NullPointerException;
367                 act.sa_flags = SA_SIGINFO;
368                 sigaction(SIGBUS, &act, NULL);
369 #endif
370         }
371
372         act.sa_sigaction = (void *) catch_ArithmeticException;
373         act.sa_flags = SA_SIGINFO;
374         sigaction(SIGFPE, &act, NULL);
375 }
376
377
378 /* function gen_mcode **********************************************************
379
380         generates machine code
381
382 *******************************************************************************/
383
384 /* global code generation pointer */
385
386 u1 *mcodeptr;
387
388
389 void codegen()
390 {
391         int  len, s1, s2, s3, d;
392         s8   a;
393         stackptr    src;
394         varinfo     *var;
395         basicblock  *bptr;
396         instruction *iptr;
397
398         xtable *ex;
399
400         {
401         int p, pa, t, l, r;
402
403         savedregs_num = 0;
404
405         /* space to save used callee saved registers */
406
407         savedregs_num += (savintregcnt - maxsavintreguse);
408         savedregs_num += (savfltregcnt - maxsavfltreguse);
409
410         parentargs_base = maxmemuse + savedregs_num;
411
412 #if defined(USE_THREADS)           /* space to save argument of monitor_enter */
413
414         if (checksync && (method->flags & ACC_SYNCHRONIZED))
415                 parentargs_base++;
416
417 #endif
418
419     /* keep stack 16-byte aligned for calls into libc */
420
421         if (!isleafmethod || runverbose) {
422                 if ((parentargs_base % 2) == 0) {
423                         parentargs_base++;
424                 }
425         }
426
427         /* create method header */
428
429         (void) dseg_addaddress(method);                         /* MethodPointer  */
430         (void) dseg_adds4(parentargs_base * 8);                 /* FrameSize      */
431
432 #if defined(USE_THREADS)
433
434         /* IsSync contains the offset relative to the stack pointer for the
435            argument of monitor_exit used in the exception handler. Since the
436            offset could be zero and give a wrong meaning of the flag it is
437            offset by one.
438         */
439
440         if (checksync && (method->flags & ACC_SYNCHRONIZED))
441                 (void) dseg_adds4((maxmemuse + 1) * 8);             /* IsSync         */
442         else
443
444 #endif
445
446         (void) dseg_adds4(0);                                   /* IsSync         */
447                                                
448         (void) dseg_adds4(isleafmethod);                        /* IsLeaf         */
449         (void) dseg_adds4(savintregcnt - maxsavintreguse);      /* IntSave        */
450         (void) dseg_adds4(savfltregcnt - maxsavfltreguse);      /* FltSave        */
451         (void) dseg_adds4(exceptiontablelength);                /* ExTableSize    */
452
453         /* create exception table */
454
455         for (ex = extable; ex != NULL; ex = ex->down) {
456                 dseg_addtarget(ex->start);
457                 dseg_addtarget(ex->end);
458                 dseg_addtarget(ex->handler);
459                 (void) dseg_addaddress(ex->catchtype);
460         }
461         
462         /* initialize mcode variables */
463         
464         mcodeptr = (u1*) mcodebase;
465         mcodeend = (s4*) (mcodebase + mcodesize);
466         MCODECHECK(128 + mparamcount);
467
468         /* create stack frame (if necessary) */
469
470         if (parentargs_base) {
471                 x86_64_alu_imm_reg(X86_64_SUB, parentargs_base * 8, REG_SP);
472         }
473
474         /* save return address and used callee saved registers */
475
476         p = parentargs_base;
477         for (r = savintregcnt - 1; r >= maxsavintreguse; r--) {
478                 p--; x86_64_mov_reg_membase(savintregs[r], REG_SP, p * 8);
479         }
480         for (r = savfltregcnt - 1; r >= maxsavfltreguse; r--) {
481                 p--; x86_64_movq_reg_membase(savfltregs[r], REG_SP, p * 8);
482         }
483
484         /* save monitorenter argument */
485
486 #if defined(USE_THREADS)
487         if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
488                 if (method->flags & ACC_STATIC) {
489                         x86_64_mov_imm_reg((s8) class, REG_ITMP1);
490                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, maxmemuse * 8);
491
492                 } else {
493                         x86_64_mov_reg_membase(argintregs[0], REG_SP, maxmemuse * 8);
494                 }
495         }                       
496 #endif
497
498         /* copy argument registers to stack and call trace function with pointer
499            to arguments on stack.
500         */
501         if (runverbose) {
502                 x86_64_alu_imm_reg(X86_64_SUB, (6 + 8 + 1 + 1) * 8, REG_SP);
503
504                 x86_64_mov_reg_membase(argintregs[0], REG_SP, 1 * 8);
505                 x86_64_mov_reg_membase(argintregs[1], REG_SP, 2 * 8);
506                 x86_64_mov_reg_membase(argintregs[2], REG_SP, 3 * 8);
507                 x86_64_mov_reg_membase(argintregs[3], REG_SP, 4 * 8);
508                 x86_64_mov_reg_membase(argintregs[4], REG_SP, 5 * 8);
509                 x86_64_mov_reg_membase(argintregs[5], REG_SP, 6 * 8);
510
511                 x86_64_movq_reg_membase(argfltregs[0], REG_SP, 7 * 8);
512                 x86_64_movq_reg_membase(argfltregs[1], REG_SP, 8 * 8);
513                 x86_64_movq_reg_membase(argfltregs[2], REG_SP, 9 * 8);
514                 x86_64_movq_reg_membase(argfltregs[3], REG_SP, 10 * 8);
515 /*              x86_64_movq_reg_membase(argfltregs[4], REG_SP, 11 * 8); */
516 /*              x86_64_movq_reg_membase(argfltregs[5], REG_SP, 12 * 8); */
517 /*              x86_64_movq_reg_membase(argfltregs[6], REG_SP, 13 * 8); */
518 /*              x86_64_movq_reg_membase(argfltregs[7], REG_SP, 14 * 8); */
519
520                 for (p = 0, l = 0; p < mparamcount; p++) {
521                         t = mparamtypes[p];
522
523                         if (IS_FLT_DBL_TYPE(t)) {
524                                 for (s1 = (mparamcount > INT_ARG_CNT) ? INT_ARG_CNT - 2 : mparamcount - 2; s1 >= p; s1--) {
525                                         x86_64_mov_reg_reg(argintregs[s1], argintregs[s1 + 1]);
526                                 }
527
528                                 x86_64_movd_freg_reg(argfltregs[l], argintregs[p]);
529                                 l++;
530                         }
531                 }
532
533                 x86_64_mov_imm_reg((s8) method, REG_ITMP2);
534                 x86_64_mov_reg_membase(REG_ITMP2, REG_SP, 0 * 8);
535                 x86_64_mov_imm_reg((s8) builtin_trace_args, REG_ITMP1);
536                 x86_64_call_reg(REG_ITMP1);
537
538                 x86_64_mov_membase_reg(REG_SP, 1 * 8, argintregs[0]);
539                 x86_64_mov_membase_reg(REG_SP, 2 * 8, argintregs[1]);
540                 x86_64_mov_membase_reg(REG_SP, 3 * 8, argintregs[2]);
541                 x86_64_mov_membase_reg(REG_SP, 4 * 8, argintregs[3]);
542                 x86_64_mov_membase_reg(REG_SP, 5 * 8, argintregs[4]);
543                 x86_64_mov_membase_reg(REG_SP, 6 * 8, argintregs[5]);
544
545                 x86_64_movq_membase_reg(REG_SP, 7 * 8, argfltregs[0]);
546                 x86_64_movq_membase_reg(REG_SP, 8 * 8, argfltregs[1]);
547                 x86_64_movq_membase_reg(REG_SP, 9 * 8, argfltregs[2]);
548                 x86_64_movq_membase_reg(REG_SP, 10 * 8, argfltregs[3]);
549 /*              x86_64_movq_membase_reg(REG_SP, 11 * 8, argfltregs[4]); */
550 /*              x86_64_movq_membase_reg(REG_SP, 12 * 8, argfltregs[5]); */
551 /*              x86_64_movq_membase_reg(REG_SP, 13 * 8, argfltregs[6]); */
552 /*              x86_64_movq_membase_reg(REG_SP, 14 * 8, argfltregs[7]); */
553
554                 x86_64_alu_imm_reg(X86_64_ADD, (6 + 8 + 1 + 1) * 8, REG_SP);
555         }
556
557         /* take arguments out of register or stack frame */
558
559         for (p = 0, l = 0, s1 = 0, s2 = 0; p < mparamcount; p++) {
560                 t = mparamtypes[p];
561                 var = &(locals[l][t]);
562                 l++;
563                 if (IS_2_WORD_TYPE(t))    /* increment local counter for 2 word types */
564                         l++;
565                 if (var->type < 0) {
566                         if (IS_INT_LNG_TYPE(t)) {
567                                 s1++;
568                         } else {
569                                 s2++;
570                         }
571                         continue;
572                 }
573                 r = var->regoff; 
574                 if (IS_INT_LNG_TYPE(t)) {                    /* integer args          */
575                         if (s1 < INT_ARG_CNT) {                /* register arguments    */
576                                 if (!(var->flags & INMEMORY)) {      /* reg arg -> register   */
577                                         M_INTMOVE(argintregs[s1], r);
578
579                                 } else {                             /* reg arg -> spilled    */
580                                     x86_64_mov_reg_membase(argintregs[s1], REG_SP, r * 8);
581                                 }
582                         } else {                                 /* stack arguments       */
583                                 pa = s1 - INT_ARG_CNT;
584                                 if (s2 >= FLT_ARG_CNT) {
585                                         pa += s2 - FLT_ARG_CNT;
586                                 }
587                                 if (!(var->flags & INMEMORY)) {      /* stack arg -> register */ 
588                                         x86_64_mov_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, r);    /* + 8 for return address */
589                                 } else {                             /* stack arg -> spilled  */
590                                         x86_64_mov_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, REG_ITMP1);    /* + 8 for return address */
591                                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, r * 8);
592                                 }
593                         }
594                         s1++;
595
596                 } else {                                     /* floating args         */   
597                         if (s2 < FLT_ARG_CNT) {                /* register arguments    */
598                                 if (!(var->flags & INMEMORY)) {      /* reg arg -> register   */
599                                         M_FLTMOVE(argfltregs[s2], r);
600
601                                 } else {                                         /* reg arg -> spilled    */
602                                         x86_64_movq_reg_membase(argfltregs[s2], REG_SP, r * 8);
603                                 }
604
605                         } else {                                 /* stack arguments       */
606                                 pa = s2 - FLT_ARG_CNT;
607                                 if (s1 >= INT_ARG_CNT) {
608                                         pa += s1 - INT_ARG_CNT;
609                                 }
610                                 if (!(var->flags & INMEMORY)) {      /* stack-arg -> register */
611                                         x86_64_movq_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, r);
612
613                                 } else {
614                                         x86_64_movq_membase_reg(REG_SP, (parentargs_base + pa) * 8 + 8, REG_FTMP1);
615                                         x86_64_movq_reg_membase(REG_FTMP1, REG_SP, r * 8);
616                                 }
617                         }
618                         s2++;
619                 }
620         }  /* end for */
621
622         /* call monitorenter function */
623
624 #if defined(USE_THREADS)
625         if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
626                 x86_64_mov_membase_reg(REG_SP, maxmemuse * 8, argintregs[0]);
627                 x86_64_mov_imm_reg((s8) builtin_monitorenter, REG_ITMP1);
628                 x86_64_call_reg(REG_ITMP1);
629         }                       
630 #endif
631         }
632
633         /* end of header generation */
634
635         /* walk through all basic blocks */
636         for (bptr = block; bptr != NULL; bptr = bptr->next) {
637
638                 bptr->mpc = (u4) ((u1 *) mcodeptr - mcodebase);
639
640                 if (bptr->flags >= BBREACHED) {
641
642                 /* branch resolving */
643
644                 branchref *brefs;
645                 for (brefs = bptr->branchrefs; brefs != NULL; brefs = brefs->next) {
646                         gen_resolvebranch((u1*) mcodebase + brefs->branchpos, 
647                                           brefs->branchpos, bptr->mpc);
648                 }
649
650                 /* copy interface registers to their destination */
651
652                 src = bptr->instack;
653                 len = bptr->indepth;
654                 MCODECHECK(64+len);
655                 while (src != NULL) {
656                         len--;
657                         if ((len == 0) && (bptr->type != BBTYPE_STD)) {
658                                 if (bptr->type == BBTYPE_SBR) {
659                                         d = reg_of_var(src, REG_ITMP1);
660                                         x86_64_pop_reg(d);
661                                         store_reg_to_var_int(src, d);
662
663                                 } else if (bptr->type == BBTYPE_EXH) {
664                                         d = reg_of_var(src, REG_ITMP1);
665                                         M_INTMOVE(REG_ITMP1, d);
666                                         store_reg_to_var_int(src, d);
667                                 }
668
669                         } else {
670                                 d = reg_of_var(src, REG_ITMP1);
671                                 if ((src->varkind != STACKVAR)) {
672                                         s2 = src->type;
673                                         if (IS_FLT_DBL_TYPE(s2)) {
674                                                 s1 = interfaces[len][s2].regoff;
675                                                 if (!(interfaces[len][s2].flags & INMEMORY)) {
676                                                         M_FLTMOVE(s1, d);
677
678                                                 } else {
679                                                         x86_64_movq_membase_reg(REG_SP, s1 * 8, d);
680                                                 }
681                                                 store_reg_to_var_flt(src, d);
682
683                                         } else {
684                                                 s1 = interfaces[len][s2].regoff;
685                                                 if (!(interfaces[len][s2].flags & INMEMORY)) {
686                                                         M_INTMOVE(s1, d);
687
688                                                 } else {
689                                                         x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
690                                                 }
691                                                 store_reg_to_var_int(src, d);
692                                         }
693                                 }
694                         }
695                         src = src->prev;
696                 }
697
698                 /* walk through all instructions */
699                 
700                 src = bptr->instack;
701                 len = bptr->icount;
702                 for (iptr = bptr->iinstr; len > 0; src = iptr->dst, len--, iptr++) {
703
704                         MCODECHECK(64);   /* an instruction usually needs < 64 words      */
705                         switch (iptr->opc) {
706
707                         case ICMD_NOP:    /* ...  ==> ...                                 */
708                                 break;
709
710                         case ICMD_NULLCHECKPOP: /* ..., objectref  ==> ...                */
711                                 if (src->flags & INMEMORY) {
712                                         x86_64_alu_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
713
714                                 } else {
715                                         x86_64_test_reg_reg(src->regoff, src->regoff);
716                                 }
717                                 x86_64_jcc(X86_64_CC_E, 0);
718                                 codegen_addxnullrefs(mcodeptr);
719                                 break;
720
721                 /* constant operations ************************************************/
722
723                 case ICMD_ICONST:     /* ...  ==> ..., constant                       */
724                                       /* op1 = 0, val.i = constant                    */
725
726                         d = reg_of_var(iptr->dst, REG_ITMP1);
727                         if (iptr->val.i == 0) {
728                                 x86_64_alu_reg_reg(X86_64_XOR, d, d);
729                         } else {
730                                 x86_64_movl_imm_reg(iptr->val.i, d);
731                         }
732                         store_reg_to_var_int(iptr->dst, d);
733                         break;
734
735                 case ICMD_ACONST:     /* ...  ==> ..., constant                       */
736                                       /* op1 = 0, val.a = constant                    */
737
738                         d = reg_of_var(iptr->dst, REG_ITMP1);
739                         if (iptr->val.a == 0) {
740                                 x86_64_alu_reg_reg(X86_64_XOR, d, d);
741                         } else {
742                                 x86_64_mov_imm_reg((s8) iptr->val.a, d);
743                         }
744                         store_reg_to_var_int(iptr->dst, d);
745                         break;
746
747                 case ICMD_LCONST:     /* ...  ==> ..., constant                       */
748                                       /* op1 = 0, val.l = constant                    */
749
750                         d = reg_of_var(iptr->dst, REG_ITMP1);
751                         if (iptr->val.l == 0) {
752                                 x86_64_alu_reg_reg(X86_64_XOR, d, d);
753                         } else {
754                                 x86_64_mov_imm_reg(iptr->val.l, d);
755                         }
756                         store_reg_to_var_int(iptr->dst, d);
757                         break;
758
759                 case ICMD_FCONST:     /* ...  ==> ..., constant                       */
760                                       /* op1 = 0, val.f = constant                    */
761
762                         d = reg_of_var(iptr->dst, REG_FTMP1);
763                         a = dseg_addfloat(iptr->val.f);
764                         x86_64_movdl_membase_reg(RIP, -(((s8) mcodeptr + ((d > 7) ? 9 : 8)) - (s8) mcodebase) + a, d);
765                         store_reg_to_var_flt(iptr->dst, d);
766                         break;
767                 
768                 case ICMD_DCONST:     /* ...  ==> ..., constant                       */
769                                       /* op1 = 0, val.d = constant                    */
770
771                         d = reg_of_var(iptr->dst, REG_FTMP1);
772                         a = dseg_adddouble(iptr->val.d);
773                         x86_64_movd_membase_reg(RIP, -(((s8) mcodeptr + 9) - (s8) mcodebase) + a, d);
774                         store_reg_to_var_flt(iptr->dst, d);
775                         break;
776
777
778                 /* load/store operations **********************************************/
779
780                 case ICMD_ILOAD:      /* ...  ==> ..., content of local variable      */
781                                       /* op1 = local variable                         */
782
783                         d = reg_of_var(iptr->dst, REG_ITMP1);
784                         if ((iptr->dst->varkind == LOCALVAR) &&
785                             (iptr->dst->varnum == iptr->op1)) {
786                                 break;
787                         }
788                         var = &(locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
789                         if (var->flags & INMEMORY) {
790                                 x86_64_movl_membase_reg(REG_SP, var->regoff * 8, d);
791                                 store_reg_to_var_int(iptr->dst, d);
792
793                         } else {
794                                 if (iptr->dst->flags & INMEMORY) {
795                                         x86_64_mov_reg_membase(var->regoff, REG_SP, iptr->dst->regoff * 8);
796
797                                 } else {
798                                         M_INTMOVE(var->regoff, d);
799                                 }
800                         }
801                         break;
802
803                 case ICMD_LLOAD:      /* ...  ==> ..., content of local variable      */
804                 case ICMD_ALOAD:      /* op1 = local variable                         */
805
806                         d = reg_of_var(iptr->dst, REG_ITMP1);
807                         if ((iptr->dst->varkind == LOCALVAR) &&
808                             (iptr->dst->varnum == iptr->op1)) {
809                                 break;
810                         }
811                         var = &(locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
812                         if (var->flags & INMEMORY) {
813                                 x86_64_mov_membase_reg(REG_SP, var->regoff * 8, d);
814                                 store_reg_to_var_int(iptr->dst, d);
815
816                         } else {
817                                 if (iptr->dst->flags & INMEMORY) {
818                                         x86_64_mov_reg_membase(var->regoff, REG_SP, iptr->dst->regoff * 8);
819
820                                 } else {
821                                         M_INTMOVE(var->regoff, d);
822                                 }
823                         }
824                         break;
825
826                 case ICMD_FLOAD:      /* ...  ==> ..., content of local variable      */
827                 case ICMD_DLOAD:      /* op1 = local variable                         */
828
829                         d = reg_of_var(iptr->dst, REG_FTMP1);
830                         if ((iptr->dst->varkind == LOCALVAR) &&
831                             (iptr->dst->varnum == iptr->op1)) {
832                                 break;
833                         }
834                         var = &(locals[iptr->op1][iptr->opc - ICMD_ILOAD]);
835                         if (var->flags & INMEMORY) {
836                                 x86_64_movq_membase_reg(REG_SP, var->regoff * 8, d);
837                                 store_reg_to_var_flt(iptr->dst, d);
838
839                         } else {
840                                 if (iptr->dst->flags & INMEMORY) {
841                                         x86_64_movq_reg_membase(var->regoff, REG_SP, iptr->dst->regoff * 8);
842
843                                 } else {
844                                         M_FLTMOVE(var->regoff, d);
845                                 }
846                         }
847                         break;
848
849                 case ICMD_ISTORE:     /* ..., value  ==> ...                          */
850                 case ICMD_LSTORE:     /* op1 = local variable                         */
851                 case ICMD_ASTORE:
852
853                         if ((src->varkind == LOCALVAR) &&
854                             (src->varnum == iptr->op1)) {
855                                 break;
856                         }
857                         var = &(locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
858                         if (var->flags & INMEMORY) {
859                                 var_to_reg_int(s1, src, REG_ITMP1);
860                                 x86_64_mov_reg_membase(s1, REG_SP, var->regoff * 8);
861
862                         } else {
863                                 var_to_reg_int(s1, src, var->regoff);
864                                 M_INTMOVE(s1, var->regoff);
865                         }
866                         break;
867
868                 case ICMD_FSTORE:     /* ..., value  ==> ...                          */
869                 case ICMD_DSTORE:     /* op1 = local variable                         */
870
871                         if ((src->varkind == LOCALVAR) &&
872                             (src->varnum == iptr->op1)) {
873                                 break;
874                         }
875                         var = &(locals[iptr->op1][iptr->opc - ICMD_ISTORE]);
876                         if (var->flags & INMEMORY) {
877                                 var_to_reg_flt(s1, src, REG_FTMP1);
878                                 x86_64_movq_reg_membase(s1, REG_SP, var->regoff * 8);
879
880                         } else {
881                                 var_to_reg_flt(s1, src, var->regoff);
882                                 M_FLTMOVE(s1, var->regoff);
883                         }
884                         break;
885
886
887                 /* pop/dup/swap operations ********************************************/
888
889                 /* attention: double and longs are only one entry in CACAO ICMDs      */
890
891                 case ICMD_POP:        /* ..., value  ==> ...                          */
892                 case ICMD_POP2:       /* ..., value, value  ==> ...                   */
893                         break;
894
895 #define M_COPY(from,to) \
896                 d = reg_of_var(to, REG_ITMP1); \
897                         if ((from->regoff != to->regoff) || \
898                             ((from->flags ^ to->flags) & INMEMORY)) { \
899                                 if (IS_FLT_DBL_TYPE(from->type)) { \
900                                         var_to_reg_flt(s1, from, d); \
901                                         M_FLTMOVE(s1, d); \
902                                         store_reg_to_var_flt(to, d); \
903                                 } else { \
904                                         var_to_reg_int(s1, from, d); \
905                                         M_INTMOVE(s1, d); \
906                                         store_reg_to_var_int(to, d); \
907                                 } \
908                         }
909
910                 case ICMD_DUP:        /* ..., a ==> ..., a, a                         */
911                         M_COPY(src, iptr->dst);
912                         break;
913
914                 case ICMD_DUP_X1:     /* ..., a, b ==> ..., b, a, b                   */
915
916                         M_COPY(src,       iptr->dst->prev->prev);
917
918                 case ICMD_DUP2:       /* ..., a, b ==> ..., a, b, a, b                */
919
920                         M_COPY(src,       iptr->dst);
921                         M_COPY(src->prev, iptr->dst->prev);
922                         break;
923
924                 case ICMD_DUP2_X1:    /* ..., a, b, c ==> ..., b, c, a, b, c          */
925
926                         M_COPY(src->prev,       iptr->dst->prev->prev->prev);
927
928                 case ICMD_DUP_X2:     /* ..., a, b, c ==> ..., c, a, b, c             */
929
930                         M_COPY(src,             iptr->dst);
931                         M_COPY(src->prev,       iptr->dst->prev);
932                         M_COPY(src->prev->prev, iptr->dst->prev->prev);
933                         M_COPY(src, iptr->dst->prev->prev->prev);
934                         break;
935
936                 case ICMD_DUP2_X2:    /* ..., a, b, c, d ==> ..., c, d, a, b, c, d    */
937
938                         M_COPY(src,                   iptr->dst);
939                         M_COPY(src->prev,             iptr->dst->prev);
940                         M_COPY(src->prev->prev,       iptr->dst->prev->prev);
941                         M_COPY(src->prev->prev->prev, iptr->dst->prev->prev->prev);
942                         M_COPY(src,       iptr->dst->prev->prev->prev->prev);
943                         M_COPY(src->prev, iptr->dst->prev->prev->prev->prev->prev);
944                         break;
945
946                 case ICMD_SWAP:       /* ..., a, b ==> ..., b, a                      */
947
948                         M_COPY(src, iptr->dst->prev);
949                         M_COPY(src->prev, iptr->dst);
950                         break;
951
952
953                 /* integer operations *************************************************/
954
955                 case ICMD_INEG:       /* ..., value  ==> ..., - value                 */
956
957                         d = reg_of_var(iptr->dst, REG_NULL);
958                         if (iptr->dst->flags & INMEMORY) {
959                                 if (src->flags & INMEMORY) {
960                                         if (src->regoff == iptr->dst->regoff) {
961                                                 x86_64_negl_membase(REG_SP, iptr->dst->regoff * 8);
962
963                                         } else {
964                                                 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
965                                                 x86_64_negl_reg(REG_ITMP1);
966                                                 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
967                                         }
968
969                                 } else {
970                                         x86_64_movl_reg_membase(src->regoff, REG_SP, iptr->dst->regoff * 8);
971                                         x86_64_negl_membase(REG_SP, iptr->dst->regoff * 8);
972                                 }
973
974                         } else {
975                                 if (src->flags & INMEMORY) {
976                                         x86_64_movl_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
977                                         x86_64_negl_reg(d);
978
979                                 } else {
980                                         M_INTMOVE(src->regoff, iptr->dst->regoff);
981                                         x86_64_negl_reg(iptr->dst->regoff);
982                                 }
983                         }
984                         break;
985
986                 case ICMD_LNEG:       /* ..., value  ==> ..., - value                 */
987
988                         d = reg_of_var(iptr->dst, REG_NULL);
989                         if (iptr->dst->flags & INMEMORY) {
990                                 if (src->flags & INMEMORY) {
991                                         if (src->regoff == iptr->dst->regoff) {
992                                                 x86_64_neg_membase(REG_SP, iptr->dst->regoff * 8);
993
994                                         } else {
995                                                 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
996                                                 x86_64_neg_reg(REG_ITMP1);
997                                                 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
998                                         }
999
1000                                 } else {
1001                                         x86_64_mov_reg_membase(src->regoff, REG_SP, iptr->dst->regoff * 8);
1002                                         x86_64_neg_membase(REG_SP, iptr->dst->regoff * 8);
1003                                 }
1004
1005                         } else {
1006                                 if (src->flags & INMEMORY) {
1007                                         x86_64_mov_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1008                                         x86_64_neg_reg(iptr->dst->regoff);
1009
1010                                 } else {
1011                                         M_INTMOVE(src->regoff, iptr->dst->regoff);
1012                                         x86_64_neg_reg(iptr->dst->regoff);
1013                                 }
1014                         }
1015                         break;
1016
1017                 case ICMD_I2L:        /* ..., value  ==> ..., value                   */
1018
1019                         d = reg_of_var(iptr->dst, REG_ITMP3);
1020                         if (src->flags & INMEMORY) {
1021                                 x86_64_movslq_membase_reg(REG_SP, src->regoff * 8, d);
1022
1023                         } else {
1024                                 x86_64_movslq_reg_reg(src->regoff, d);
1025                         }
1026                         store_reg_to_var_int(iptr->dst, d);
1027                         break;
1028
1029                 case ICMD_L2I:        /* ..., value  ==> ..., value                   */
1030
1031                         var_to_reg_int(s1, src, REG_ITMP1);
1032                         d = reg_of_var(iptr->dst, REG_ITMP3);
1033                         M_INTMOVE(s1, d);
1034                         store_reg_to_var_int(iptr->dst, d);
1035                         break;
1036
1037                 case ICMD_INT2BYTE:   /* ..., value  ==> ..., value                   */
1038
1039                         d = reg_of_var(iptr->dst, REG_ITMP3);
1040                         if (src->flags & INMEMORY) {
1041                                 x86_64_movsbq_membase_reg(REG_SP, src->regoff * 8, d);
1042
1043                         } else {
1044                                 x86_64_movsbq_reg_reg(src->regoff, d);
1045                         }
1046                         store_reg_to_var_int(iptr->dst, d);
1047                         break;
1048
1049                 case ICMD_INT2CHAR:   /* ..., value  ==> ..., value                   */
1050
1051                         d = reg_of_var(iptr->dst, REG_ITMP3);
1052                         if (src->flags & INMEMORY) {
1053                                 x86_64_movzwq_membase_reg(REG_SP, src->regoff * 8, d);
1054
1055                         } else {
1056                                 x86_64_movzwq_reg_reg(src->regoff, d);
1057                         }
1058                         store_reg_to_var_int(iptr->dst, d);
1059                         break;
1060
1061                 case ICMD_INT2SHORT:  /* ..., value  ==> ..., value                   */
1062
1063                         d = reg_of_var(iptr->dst, REG_ITMP3);
1064                         if (src->flags & INMEMORY) {
1065                                 x86_64_movswq_membase_reg(REG_SP, src->regoff * 8, d);
1066
1067                         } else {
1068                                 x86_64_movswq_reg_reg(src->regoff, d);
1069                         }
1070                         store_reg_to_var_int(iptr->dst, d);
1071                         break;
1072
1073
1074                 case ICMD_IADD:       /* ..., val1, val2  ==> ..., val1 + val2        */
1075
1076                         d = reg_of_var(iptr->dst, REG_NULL);
1077                         x86_64_emit_ialu(X86_64_ADD, src, iptr);
1078                         break;
1079
1080                 case ICMD_IADDCONST:  /* ..., value  ==> ..., value + constant        */
1081                                       /* val.i = constant                             */
1082
1083                         d = reg_of_var(iptr->dst, REG_NULL);
1084                         x86_64_emit_ialuconst(X86_64_ADD, src, iptr);
1085                         break;
1086
1087                 case ICMD_LADD:       /* ..., val1, val2  ==> ..., val1 + val2        */
1088
1089                         d = reg_of_var(iptr->dst, REG_NULL);
1090                         x86_64_emit_lalu(X86_64_ADD, src, iptr);
1091                         break;
1092
1093                 case ICMD_LADDCONST:  /* ..., value  ==> ..., value + constant        */
1094                                       /* val.l = constant                             */
1095
1096                         d = reg_of_var(iptr->dst, REG_NULL);
1097                         x86_64_emit_laluconst(X86_64_ADD, src, iptr);
1098                         break;
1099
1100                 case ICMD_ISUB:       /* ..., val1, val2  ==> ..., val1 - val2        */
1101
1102                         d = reg_of_var(iptr->dst, REG_NULL);
1103                         if (iptr->dst->flags & INMEMORY) {
1104                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1105                                         if (src->prev->regoff == iptr->dst->regoff) {
1106                                                 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1107                                                 x86_64_alul_reg_membase(X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1108
1109                                         } else {
1110                                                 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1111                                                 x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1112                                                 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1113                                         }
1114
1115                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1116                                         M_INTMOVE(src->prev->regoff, REG_ITMP1);
1117                                         x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1118                                         x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1119
1120                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1121                                         if (src->prev->regoff == iptr->dst->regoff) {
1122                                                 x86_64_alul_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1123
1124                                         } else {
1125                                                 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1126                                                 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1127                                                 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1128                                         }
1129
1130                                 } else {
1131                                         x86_64_movl_reg_membase(src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1132                                         x86_64_alul_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1133                                 }
1134
1135                         } else {
1136                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1137                                         x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, d);
1138                                         x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1139
1140                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1141                                         M_INTMOVE(src->prev->regoff, d);
1142                                         x86_64_alul_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1143
1144                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1145                                         /* workaround for reg alloc */
1146                                         if (src->regoff == iptr->dst->regoff) {
1147                                                 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1148                                                 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1149                                                 M_INTMOVE(REG_ITMP1, d);
1150
1151                                         } else {
1152                                                 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, d);
1153                                                 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, d);
1154                                         }
1155
1156                                 } else {
1157                                         /* workaround for reg alloc */
1158                                         if (src->regoff == iptr->dst->regoff) {
1159                                                 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1160                                                 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1161                                                 M_INTMOVE(REG_ITMP1, d);
1162
1163                                         } else {
1164                                                 M_INTMOVE(src->prev->regoff, d);
1165                                                 x86_64_alul_reg_reg(X86_64_SUB, src->regoff, d);
1166                                         }
1167                                 }
1168                         }
1169                         break;
1170
1171                 case ICMD_ISUBCONST:  /* ..., value  ==> ..., value + constant        */
1172                                       /* val.i = constant                             */
1173
1174                         d = reg_of_var(iptr->dst, REG_NULL);
1175                         x86_64_emit_ialuconst(X86_64_SUB, src, iptr);
1176                         break;
1177
1178                 case ICMD_LSUB:       /* ..., val1, val2  ==> ..., val1 - val2        */
1179
1180                         d = reg_of_var(iptr->dst, REG_NULL);
1181                         if (iptr->dst->flags & INMEMORY) {
1182                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1183                                         if (src->prev->regoff == iptr->dst->regoff) {
1184                                                 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1185                                                 x86_64_alu_reg_membase(X86_64_SUB, REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1186
1187                                         } else {
1188                                                 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1189                                                 x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1190                                                 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1191                                         }
1192
1193                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1194                                         M_INTMOVE(src->prev->regoff, REG_ITMP1);
1195                                         x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, REG_ITMP1);
1196                                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1197
1198                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1199                                         if (src->prev->regoff == iptr->dst->regoff) {
1200                                                 x86_64_alu_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1201
1202                                         } else {
1203                                                 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1204                                                 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1205                                                 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1206                                         }
1207
1208                                 } else {
1209                                         x86_64_mov_reg_membase(src->prev->regoff, REG_SP, iptr->dst->regoff * 8);
1210                                         x86_64_alu_reg_membase(X86_64_SUB, src->regoff, REG_SP, iptr->dst->regoff * 8);
1211                                 }
1212
1213                         } else {
1214                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1215                                         x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, d);
1216                                         x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1217
1218                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1219                                         M_INTMOVE(src->prev->regoff, d);
1220                                         x86_64_alu_membase_reg(X86_64_SUB, REG_SP, src->regoff * 8, d);
1221
1222                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1223                                         /* workaround for reg alloc */
1224                                         if (src->regoff == iptr->dst->regoff) {
1225                                                 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1226                                                 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1227                                                 M_INTMOVE(REG_ITMP1, d);
1228
1229                                         } else {
1230                                                 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, d);
1231                                                 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, d);
1232                                         }
1233
1234                                 } else {
1235                                         /* workaround for reg alloc */
1236                                         if (src->regoff == iptr->dst->regoff) {
1237                                                 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1238                                                 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, REG_ITMP1);
1239                                                 M_INTMOVE(REG_ITMP1, d);
1240
1241                                         } else {
1242                                                 M_INTMOVE(src->prev->regoff, d);
1243                                                 x86_64_alu_reg_reg(X86_64_SUB, src->regoff, d);
1244                                         }
1245                                 }
1246                         }
1247                         break;
1248
1249                 case ICMD_LSUBCONST:  /* ..., value  ==> ..., value - constant        */
1250                                       /* val.l = constant                             */
1251
1252                         d = reg_of_var(iptr->dst, REG_NULL);
1253                         x86_64_emit_laluconst(X86_64_SUB, src, iptr);
1254                         break;
1255
1256                 case ICMD_IMUL:       /* ..., val1, val2  ==> ..., val1 * val2        */
1257
1258                         d = reg_of_var(iptr->dst, REG_NULL);
1259                         if (iptr->dst->flags & INMEMORY) {
1260                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1261                                         x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1262                                         x86_64_imull_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1263                                         x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1264
1265                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1266                                         x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1267                                         x86_64_imull_reg_reg(src->prev->regoff, REG_ITMP1);
1268                                         x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1269
1270                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1271                                         x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1272                                         x86_64_imull_reg_reg(src->regoff, REG_ITMP1);
1273                                         x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1274
1275                                 } else {
1276                                         M_INTMOVE(src->prev->regoff, REG_ITMP1);
1277                                         x86_64_imull_reg_reg(src->regoff, REG_ITMP1);
1278                                         x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1279                                 }
1280
1281                         } else {
1282                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1283                                         x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1284                                         x86_64_imull_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1285
1286                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1287                                         M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1288                                         x86_64_imull_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1289
1290                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1291                                         M_INTMOVE(src->regoff, iptr->dst->regoff);
1292                                         x86_64_imull_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1293
1294                                 } else {
1295                                         if (src->regoff == iptr->dst->regoff) {
1296                                                 x86_64_imull_reg_reg(src->prev->regoff, iptr->dst->regoff);
1297
1298                                         } else {
1299                                                 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1300                                                 x86_64_imull_reg_reg(src->regoff, iptr->dst->regoff);
1301                                         }
1302                                 }
1303                         }
1304                         break;
1305
1306                 case ICMD_IMULCONST:  /* ..., value  ==> ..., value * constant        */
1307                                       /* val.i = constant                             */
1308
1309                         d = reg_of_var(iptr->dst, REG_NULL);
1310                         if (iptr->dst->flags & INMEMORY) {
1311                                 if (src->flags & INMEMORY) {
1312                                         x86_64_imull_imm_membase_reg(iptr->val.i, REG_SP, src->regoff * 8, REG_ITMP1);
1313                                         x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1314
1315                                 } else {
1316                                         x86_64_imull_imm_reg_reg(iptr->val.i, src->regoff, REG_ITMP1);
1317                                         x86_64_movl_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1318                                 }
1319
1320                         } else {
1321                                 if (src->flags & INMEMORY) {
1322                                         x86_64_imull_imm_membase_reg(iptr->val.i, REG_SP, src->regoff * 8, iptr->dst->regoff);
1323
1324                                 } else {
1325                                         if (iptr->val.i == 2) {
1326                                                 M_INTMOVE(src->regoff, iptr->dst->regoff);
1327                                                 x86_64_alul_reg_reg(X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1328
1329                                         } else {
1330                                                 x86_64_imull_imm_reg_reg(iptr->val.i, src->regoff, iptr->dst->regoff);    /* 3 cycles */
1331                                         }
1332                                 }
1333                         }
1334                         break;
1335
1336                 case ICMD_LMUL:       /* ..., val1, val2  ==> ..., val1 * val2        */
1337
1338                         d = reg_of_var(iptr->dst, REG_NULL);
1339                         if (iptr->dst->flags & INMEMORY) {
1340                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1341                                         x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1342                                         x86_64_imul_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1343                                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1344
1345                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1346                                         x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1347                                         x86_64_imul_reg_reg(src->prev->regoff, REG_ITMP1);
1348                                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1349
1350                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1351                                         x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1352                                         x86_64_imul_reg_reg(src->regoff, REG_ITMP1);
1353                                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1354
1355                                 } else {
1356                                         x86_64_mov_reg_reg(src->prev->regoff, REG_ITMP1);
1357                                         x86_64_imul_reg_reg(src->regoff, REG_ITMP1);
1358                                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1359                                 }
1360
1361                         } else {
1362                                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1363                                         x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1364                                         x86_64_imul_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1365
1366                                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
1367                                         M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1368                                         x86_64_imul_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1369
1370                                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
1371                                         M_INTMOVE(src->regoff, iptr->dst->regoff);
1372                                         x86_64_imul_membase_reg(REG_SP, src->prev->regoff * 8, iptr->dst->regoff);
1373
1374                                 } else {
1375                                         if (src->regoff == iptr->dst->regoff) {
1376                                                 x86_64_imul_reg_reg(src->prev->regoff, iptr->dst->regoff);
1377
1378                                         } else {
1379                                                 M_INTMOVE(src->prev->regoff, iptr->dst->regoff);
1380                                                 x86_64_imul_reg_reg(src->regoff, iptr->dst->regoff);
1381                                         }
1382                                 }
1383                         }
1384                         break;
1385
1386                 case ICMD_LMULCONST:  /* ..., value  ==> ..., value * constant        */
1387                                       /* val.l = constant                             */
1388
1389                         d = reg_of_var(iptr->dst, REG_NULL);
1390                         if (iptr->dst->flags & INMEMORY) {
1391                                 if (src->flags & INMEMORY) {
1392                                         if (x86_64_is_imm32(iptr->val.l)) {
1393                                                 x86_64_imul_imm_membase_reg(iptr->val.l, REG_SP, src->regoff * 8, REG_ITMP1);
1394
1395                                         } else {
1396                                                 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
1397                                                 x86_64_imul_membase_reg(REG_SP, src->regoff * 8, REG_ITMP1);
1398                                         }
1399                                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1400                                         
1401                                 } else {
1402                                         if (x86_64_is_imm32(iptr->val.l)) {
1403                                                 x86_64_imul_imm_reg_reg(iptr->val.l, src->regoff, REG_ITMP1);
1404
1405                                         } else {
1406                                                 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
1407                                                 x86_64_imul_reg_reg(src->regoff, REG_ITMP1);
1408                                         }
1409                                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, iptr->dst->regoff * 8);
1410                                 }
1411
1412                         } else {
1413                                 if (src->flags & INMEMORY) {
1414                                         if (x86_64_is_imm32(iptr->val.l)) {
1415                                                 x86_64_imul_imm_membase_reg(iptr->val.l, REG_SP, src->regoff * 8, iptr->dst->regoff);
1416
1417                                         } else {
1418                                                 x86_64_mov_imm_reg(iptr->val.l, iptr->dst->regoff);
1419                                                 x86_64_imul_membase_reg(REG_SP, src->regoff * 8, iptr->dst->regoff);
1420                                         }
1421
1422                                 } else {
1423                                         /* should match in many cases */
1424                                         if (iptr->val.l == 2) {
1425                                                 M_INTMOVE(src->regoff, iptr->dst->regoff);
1426                                                 x86_64_alul_reg_reg(X86_64_ADD, iptr->dst->regoff, iptr->dst->regoff);
1427
1428                                         } else {
1429                                                 if (x86_64_is_imm32(iptr->val.l)) {
1430                                                         x86_64_imul_imm_reg_reg(iptr->val.l, src->regoff, iptr->dst->regoff);    /* 4 cycles */
1431
1432                                                 } else {
1433                                                         x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
1434                                                         M_INTMOVE(src->regoff, iptr->dst->regoff);
1435                                                         x86_64_imul_reg_reg(REG_ITMP1, iptr->dst->regoff);
1436                                                 }
1437                                         }
1438                                 }
1439                         }
1440                         break;
1441
1442                 case ICMD_IDIV:       /* ..., val1, val2  ==> ..., val1 / val2        */
1443
1444                         d = reg_of_var(iptr->dst, REG_NULL);
1445                 if (src->prev->flags & INMEMORY) {
1446                                 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, RAX);
1447
1448                         } else {
1449                                 M_INTMOVE(src->prev->regoff, RAX);
1450                         }
1451                         
1452                         if (src->flags & INMEMORY) {
1453                                 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1454
1455                         } else {
1456                                 M_INTMOVE(src->regoff, REG_ITMP3);
1457                         }
1458                         gen_div_check(src);
1459
1460                         x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, RAX);    /* check as described in jvm spec */
1461                         x86_64_jcc(X86_64_CC_NE, 4 + 6);
1462                         x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP3);      /* 4 bytes */
1463                         x86_64_jcc(X86_64_CC_E, 3 + 1 + 3);                  /* 6 bytes */
1464
1465                         x86_64_mov_reg_reg(RDX, REG_ITMP2);    /* save %rdx, cause it's an argument register */
1466                         x86_64_cltd();
1467                         x86_64_idivl_reg(REG_ITMP3);
1468
1469                         if (iptr->dst->flags & INMEMORY) {
1470                                 x86_64_mov_reg_membase(RAX, REG_SP, iptr->dst->regoff * 8);
1471                                 x86_64_mov_reg_reg(REG_ITMP2, RDX);    /* restore %rdx */
1472
1473                         } else {
1474                                 M_INTMOVE(RAX, iptr->dst->regoff);
1475
1476                                 if (iptr->dst->regoff != RDX) {
1477                                         x86_64_mov_reg_reg(REG_ITMP2, RDX);    /* restore %rdx */
1478                                 }
1479                         }
1480                         break;
1481
1482                 case ICMD_IREM:       /* ..., val1, val2  ==> ..., val1 % val2        */
1483
1484                         d = reg_of_var(iptr->dst, REG_NULL);
1485                         if (src->prev->flags & INMEMORY) {
1486                                 x86_64_movl_membase_reg(REG_SP, src->prev->regoff * 8, RAX);
1487
1488                         } else {
1489                                 M_INTMOVE(src->prev->regoff, RAX);
1490                         }
1491                         
1492                         if (src->flags & INMEMORY) {
1493                                 x86_64_movl_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1494
1495                         } else {
1496                                 M_INTMOVE(src->regoff, REG_ITMP3);
1497                         }
1498                         gen_div_check(src);
1499
1500                         x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, RAX);    /* check as described in jvm spec */
1501                         x86_64_jcc(X86_64_CC_NE, 2 + 4 + 6);
1502                         x86_64_alul_reg_reg(X86_64_XOR, RDX, RDX);           /* 2 bytes */
1503                         x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP3);      /* 4 bytes */
1504                         x86_64_jcc(X86_64_CC_E, 3 + 1 + 3);                  /* 6 bytes */
1505
1506                         x86_64_mov_reg_reg(RDX, REG_ITMP2);    /* save %rdx, cause it's an argument register */
1507                         x86_64_cltd();
1508                         x86_64_idivl_reg(REG_ITMP3);
1509
1510                         if (iptr->dst->flags & INMEMORY) {
1511                                 x86_64_mov_reg_membase(RDX, REG_SP, iptr->dst->regoff * 8);
1512                                 x86_64_mov_reg_reg(REG_ITMP2, RDX);    /* restore %rdx */
1513
1514                         } else {
1515                                 M_INTMOVE(RDX, iptr->dst->regoff);
1516
1517                                 if (iptr->dst->regoff != RDX) {
1518                                         x86_64_mov_reg_reg(REG_ITMP2, RDX);    /* restore %rdx */
1519                                 }
1520                         }
1521                         break;
1522
1523                 case ICMD_IDIVPOW2:   /* ..., value  ==> ..., value >> constant       */
1524                                       /* val.i = constant                             */
1525
1526                         var_to_reg_int(s1, src, REG_ITMP1);
1527                         d = reg_of_var(iptr->dst, REG_ITMP3);
1528                         M_INTMOVE(s1, REG_ITMP1);
1529                         x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1530                         x86_64_leal_membase_reg(REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1531                         x86_64_cmovccl_reg_reg(X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1532                         x86_64_shiftl_imm_reg(X86_64_SAR, iptr->val.i, REG_ITMP1);
1533                         x86_64_mov_reg_reg(REG_ITMP1, d);
1534                         store_reg_to_var_int(iptr->dst, d);
1535                         break;
1536
1537                 case ICMD_IREMPOW2:   /* ..., value  ==> ..., value % constant        */
1538                                       /* val.i = constant                             */
1539
1540                         var_to_reg_int(s1, src, REG_ITMP1);
1541                         d = reg_of_var(iptr->dst, REG_ITMP3);
1542                         M_INTMOVE(s1, REG_ITMP1);
1543                         x86_64_alul_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1544                         x86_64_leal_membase_reg(REG_ITMP1, iptr->val.i, REG_ITMP2);
1545                         x86_64_cmovccl_reg_reg(X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1546                         x86_64_alul_imm_reg(X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1547                         x86_64_alul_reg_reg(X86_64_SUB, REG_ITMP2, REG_ITMP1);
1548                         x86_64_mov_reg_reg(REG_ITMP1, d);
1549                         store_reg_to_var_int(iptr->dst, d);
1550                         break;
1551
1552
1553                 case ICMD_LDIV:       /* ..., val1, val2  ==> ..., val1 / val2        */
1554
1555                         d = reg_of_var(iptr->dst, REG_NULL);
1556                 if (src->prev->flags & INMEMORY) {
1557                                 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1558
1559                         } else {
1560                                 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1561                         }
1562                         
1563                         if (src->flags & INMEMORY) {
1564                                 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1565
1566                         } else {
1567                                 M_INTMOVE(src->regoff, REG_ITMP3);
1568                         }
1569                         gen_div_check(src);
1570
1571                         x86_64_mov_imm_reg(0x8000000000000000LL, REG_ITMP2);    /* check as described in jvm spec */
1572                         x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
1573                         x86_64_jcc(X86_64_CC_NE, 4 + 6);
1574                         x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP3);          /* 4 bytes */
1575                         x86_64_jcc(X86_64_CC_E, 3 + 2 + 3);                     /* 6 bytes */
1576
1577                         x86_64_mov_reg_reg(RDX, REG_ITMP2);    /* save %rdx, cause it's an argument register */
1578                         x86_64_cqto();
1579                         x86_64_idiv_reg(REG_ITMP3);
1580
1581                         if (iptr->dst->flags & INMEMORY) {
1582                                 x86_64_mov_reg_membase(RAX, REG_SP, iptr->dst->regoff * 8);
1583                                 x86_64_mov_reg_reg(REG_ITMP2, RDX);    /* restore %rdx */
1584
1585                         } else {
1586                                 M_INTMOVE(RAX, iptr->dst->regoff);
1587
1588                                 if (iptr->dst->regoff != RDX) {
1589                                         x86_64_mov_reg_reg(REG_ITMP2, RDX);    /* restore %rdx */
1590                                 }
1591                         }
1592                         break;
1593
1594                 case ICMD_LREM:       /* ..., val1, val2  ==> ..., val1 % val2        */
1595
1596                         d = reg_of_var(iptr->dst, REG_NULL);
1597                         if (src->prev->flags & INMEMORY) {
1598                                 x86_64_mov_membase_reg(REG_SP, src->prev->regoff * 8, REG_ITMP1);
1599
1600                         } else {
1601                                 M_INTMOVE(src->prev->regoff, REG_ITMP1);
1602                         }
1603                         
1604                         if (src->flags & INMEMORY) {
1605                                 x86_64_mov_membase_reg(REG_SP, src->regoff * 8, REG_ITMP3);
1606
1607                         } else {
1608                                 M_INTMOVE(src->regoff, REG_ITMP3);
1609                         }
1610                         gen_div_check(src);
1611
1612                         x86_64_mov_imm_reg(0x8000000000000000LL, REG_ITMP2);    /* check as described in jvm spec */
1613                         x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
1614                         x86_64_jcc(X86_64_CC_NE, 2 + 4 + 6);
1615                         x86_64_alul_reg_reg(X86_64_XOR, RDX, RDX);              /* 2 bytes */
1616                         x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP3);          /* 4 bytes */
1617                         x86_64_jcc(X86_64_CC_E, 3 + 2 + 3);                     /* 6 bytes */
1618
1619                         x86_64_mov_reg_reg(RDX, REG_ITMP2);    /* save %rdx, cause it's an argument register */
1620                         x86_64_cqto();
1621                         x86_64_idiv_reg(REG_ITMP3);
1622
1623                         if (iptr->dst->flags & INMEMORY) {
1624                                 x86_64_mov_reg_membase(RDX, REG_SP, iptr->dst->regoff * 8);
1625                                 x86_64_mov_reg_reg(REG_ITMP2, RDX);    /* restore %rdx */
1626
1627                         } else {
1628                                 M_INTMOVE(RDX, iptr->dst->regoff);
1629
1630                                 if (iptr->dst->regoff != RDX) {
1631                                         x86_64_mov_reg_reg(REG_ITMP2, RDX);    /* restore %rdx */
1632                                 }
1633                         }
1634                         break;
1635
1636                 case ICMD_LDIVPOW2:   /* ..., value  ==> ..., value >> constant       */
1637                                       /* val.i = constant                             */
1638
1639                         var_to_reg_int(s1, src, REG_ITMP1);
1640                         d = reg_of_var(iptr->dst, REG_ITMP3);
1641                         M_INTMOVE(s1, REG_ITMP1);
1642                         x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1643                         x86_64_lea_membase_reg(REG_ITMP1, (1 << iptr->val.i) - 1, REG_ITMP2);
1644                         x86_64_cmovcc_reg_reg(X86_64_CC_LE, REG_ITMP2, REG_ITMP1);
1645                         x86_64_shift_imm_reg(X86_64_SAR, iptr->val.i, REG_ITMP1);
1646                         x86_64_mov_reg_reg(REG_ITMP1, d);
1647                         store_reg_to_var_int(iptr->dst, d);
1648                         break;
1649
1650                 case ICMD_LREMPOW2:   /* ..., value  ==> ..., value % constant        */
1651                                       /* val.l = constant                             */
1652
1653                         var_to_reg_int(s1, src, REG_ITMP1);
1654                         d = reg_of_var(iptr->dst, REG_ITMP3);
1655                         M_INTMOVE(s1, REG_ITMP1);
1656                         x86_64_alu_imm_reg(X86_64_CMP, -1, REG_ITMP1);
1657                         x86_64_lea_membase_reg(REG_ITMP1, iptr->val.i, REG_ITMP2);
1658                         x86_64_cmovcc_reg_reg(X86_64_CC_G, REG_ITMP1, REG_ITMP2);
1659                         x86_64_alu_imm_reg(X86_64_AND, -1 - (iptr->val.i), REG_ITMP2);
1660                         x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP2, REG_ITMP1);
1661                         x86_64_mov_reg_reg(REG_ITMP1, d);
1662                         store_reg_to_var_int(iptr->dst, d);
1663                         break;
1664
1665                 case ICMD_ISHL:       /* ..., val1, val2  ==> ..., val1 << val2       */
1666
1667                         d = reg_of_var(iptr->dst, REG_NULL);
1668                         x86_64_emit_ishift(X86_64_SHL, src, iptr);
1669                         break;
1670
1671                 case ICMD_ISHLCONST:  /* ..., value  ==> ..., value << constant       */
1672                                       /* val.i = constant                             */
1673
1674                         d = reg_of_var(iptr->dst, REG_NULL);
1675                         x86_64_emit_ishiftconst(X86_64_SHL, src, iptr);
1676                         break;
1677
1678                 case ICMD_ISHR:       /* ..., val1, val2  ==> ..., val1 >> val2       */
1679
1680                         d = reg_of_var(iptr->dst, REG_NULL);
1681                         x86_64_emit_ishift(X86_64_SAR, src, iptr);
1682                         break;
1683
1684                 case ICMD_ISHRCONST:  /* ..., value  ==> ..., value >> constant       */
1685                                       /* val.i = constant                             */
1686
1687                         d = reg_of_var(iptr->dst, REG_NULL);
1688                         x86_64_emit_ishiftconst(X86_64_SAR, src, iptr);
1689                         break;
1690
1691                 case ICMD_IUSHR:      /* ..., val1, val2  ==> ..., val1 >>> val2      */
1692
1693                         d = reg_of_var(iptr->dst, REG_NULL);
1694                         x86_64_emit_ishift(X86_64_SHR, src, iptr);
1695                         break;
1696
1697                 case ICMD_IUSHRCONST: /* ..., value  ==> ..., value >>> constant      */
1698                                       /* val.i = constant                             */
1699
1700                         d = reg_of_var(iptr->dst, REG_NULL);
1701                         x86_64_emit_ishiftconst(X86_64_SHR, src, iptr);
1702                         break;
1703
1704                 case ICMD_LSHL:       /* ..., val1, val2  ==> ..., val1 << val2       */
1705
1706                         d = reg_of_var(iptr->dst, REG_NULL);
1707                         x86_64_emit_lshift(X86_64_SHL, src, iptr);
1708                         break;
1709
1710         case ICMD_LSHLCONST:  /* ..., value  ==> ..., value << constant       */
1711                                           /* val.i = constant                             */
1712
1713                         d = reg_of_var(iptr->dst, REG_NULL);
1714                         x86_64_emit_lshiftconst(X86_64_SHL, src, iptr);
1715                         break;
1716
1717                 case ICMD_LSHR:       /* ..., val1, val2  ==> ..., val1 >> val2       */
1718
1719                         d = reg_of_var(iptr->dst, REG_NULL);
1720                         x86_64_emit_lshift(X86_64_SAR, src, iptr);
1721                         break;
1722
1723                 case ICMD_LSHRCONST:  /* ..., value  ==> ..., value >> constant       */
1724                                       /* val.i = constant                             */
1725
1726                         d = reg_of_var(iptr->dst, REG_NULL);
1727                         x86_64_emit_lshiftconst(X86_64_SAR, src, iptr);
1728                         break;
1729
1730                 case ICMD_LUSHR:      /* ..., val1, val2  ==> ..., val1 >>> val2      */
1731
1732                         d = reg_of_var(iptr->dst, REG_NULL);
1733                         x86_64_emit_lshift(X86_64_SHR, src, iptr);
1734                         break;
1735
1736                 case ICMD_LUSHRCONST: /* ..., value  ==> ..., value >>> constant      */
1737                                       /* val.l = constant                             */
1738
1739                         d = reg_of_var(iptr->dst, REG_NULL);
1740                         x86_64_emit_lshiftconst(X86_64_SHR, src, iptr);
1741                         break;
1742
1743                 case ICMD_IAND:       /* ..., val1, val2  ==> ..., val1 & val2        */
1744
1745                         d = reg_of_var(iptr->dst, REG_NULL);
1746                         x86_64_emit_ialu(X86_64_AND, src, iptr);
1747                         break;
1748
1749                 case ICMD_IANDCONST:  /* ..., value  ==> ..., value & constant        */
1750                                       /* val.i = constant                             */
1751
1752                         d = reg_of_var(iptr->dst, REG_NULL);
1753                         x86_64_emit_ialuconst(X86_64_AND, src, iptr);
1754                         break;
1755
1756                 case ICMD_LAND:       /* ..., val1, val2  ==> ..., val1 & val2        */
1757
1758                         d = reg_of_var(iptr->dst, REG_NULL);
1759                         x86_64_emit_lalu(X86_64_AND, src, iptr);
1760                         break;
1761
1762                 case ICMD_LANDCONST:  /* ..., value  ==> ..., value & constant        */
1763                                       /* val.l = constant                             */
1764
1765                         d = reg_of_var(iptr->dst, REG_NULL);
1766                         x86_64_emit_laluconst(X86_64_AND, src, iptr);
1767                         break;
1768
1769                 case ICMD_IOR:        /* ..., val1, val2  ==> ..., val1 | val2        */
1770
1771                         d = reg_of_var(iptr->dst, REG_NULL);
1772                         x86_64_emit_ialu(X86_64_OR, src, iptr);
1773                         break;
1774
1775                 case ICMD_IORCONST:   /* ..., value  ==> ..., value | constant        */
1776                                       /* val.i = constant                             */
1777
1778                         d = reg_of_var(iptr->dst, REG_NULL);
1779                         x86_64_emit_ialuconst(X86_64_OR, src, iptr);
1780                         break;
1781
1782                 case ICMD_LOR:        /* ..., val1, val2  ==> ..., val1 | val2        */
1783
1784                         d = reg_of_var(iptr->dst, REG_NULL);
1785                         x86_64_emit_lalu(X86_64_OR, src, iptr);
1786                         break;
1787
1788                 case ICMD_LORCONST:   /* ..., value  ==> ..., value | constant        */
1789                                       /* val.l = constant                             */
1790
1791                         d = reg_of_var(iptr->dst, REG_NULL);
1792                         x86_64_emit_laluconst(X86_64_OR, src, iptr);
1793                         break;
1794
1795                 case ICMD_IXOR:       /* ..., val1, val2  ==> ..., val1 ^ val2        */
1796
1797                         d = reg_of_var(iptr->dst, REG_NULL);
1798                         x86_64_emit_ialu(X86_64_XOR, src, iptr);
1799                         break;
1800
1801                 case ICMD_IXORCONST:  /* ..., value  ==> ..., value ^ constant        */
1802                                       /* val.i = constant                             */
1803
1804                         d = reg_of_var(iptr->dst, REG_NULL);
1805                         x86_64_emit_ialuconst(X86_64_XOR, src, iptr);
1806                         break;
1807
1808                 case ICMD_LXOR:       /* ..., val1, val2  ==> ..., val1 ^ val2        */
1809
1810                         d = reg_of_var(iptr->dst, REG_NULL);
1811                         x86_64_emit_lalu(X86_64_XOR, src, iptr);
1812                         break;
1813
1814                 case ICMD_LXORCONST:  /* ..., value  ==> ..., value ^ constant        */
1815                                       /* val.l = constant                             */
1816
1817                         d = reg_of_var(iptr->dst, REG_NULL);
1818                         x86_64_emit_laluconst(X86_64_XOR, src, iptr);
1819                         break;
1820
1821
1822                 case ICMD_IINC:       /* ..., value  ==> ..., value + constant        */
1823                                       /* op1 = variable, val.i = constant             */
1824
1825                         var = &(locals[iptr->op1][TYPE_INT]);
1826                         d = var->regoff;
1827                         if (var->flags & INMEMORY) {
1828                                 if (iptr->val.i == 1) {
1829                                         x86_64_incl_membase(REG_SP, d * 8);
1830  
1831                                 } else if (iptr->val.i == -1) {
1832                                         x86_64_decl_membase(REG_SP, d * 8);
1833
1834                                 } else {
1835                                         x86_64_alul_imm_membase(X86_64_ADD, iptr->val.i, REG_SP, d * 8);
1836                                 }
1837
1838                         } else {
1839                                 if (iptr->val.i == 1) {
1840                                         x86_64_incl_reg(d);
1841  
1842                                 } else if (iptr->val.i == -1) {
1843                                         x86_64_decl_reg(d);
1844
1845                                 } else {
1846                                         x86_64_alul_imm_reg(X86_64_ADD, iptr->val.i, d);
1847                                 }
1848                         }
1849                         break;
1850
1851
1852                 /* floating operations ************************************************/
1853
1854                 case ICMD_FNEG:       /* ..., value  ==> ..., - value                 */
1855
1856                         var_to_reg_flt(s1, src, REG_FTMP1);
1857                         d = reg_of_var(iptr->dst, REG_FTMP3);
1858                         a = dseg_adds4(0x80000000);
1859                         M_FLTMOVE(s1, d);
1860                         x86_64_movss_membase_reg(RIP, -(((s8) mcodeptr + 9) - (s8) mcodebase) + a, REG_FTMP2);
1861                         x86_64_xorps_reg_reg(REG_FTMP2, d);
1862                         store_reg_to_var_flt(iptr->dst, d);
1863                         break;
1864
1865                 case ICMD_DNEG:       /* ..., value  ==> ..., - value                 */
1866
1867                         var_to_reg_flt(s1, src, REG_FTMP1);
1868                         d = reg_of_var(iptr->dst, REG_FTMP3);
1869                         a = dseg_adds8(0x8000000000000000);
1870                         M_FLTMOVE(s1, d);
1871                         x86_64_movd_membase_reg(RIP, -(((s8) mcodeptr + 9) - (s8) mcodebase) + a, REG_FTMP2);
1872                         x86_64_xorpd_reg_reg(REG_FTMP2, d);
1873                         store_reg_to_var_flt(iptr->dst, d);
1874                         break;
1875
1876                 case ICMD_FADD:       /* ..., val1, val2  ==> ..., val1 + val2        */
1877
1878                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1879                         var_to_reg_flt(s2, src, REG_FTMP2);
1880                         d = reg_of_var(iptr->dst, REG_FTMP3);
1881                         if (s1 == d) {
1882                                 x86_64_addss_reg_reg(s2, d);
1883                         } else if (s2 == d) {
1884                                 x86_64_addss_reg_reg(s1, d);
1885                         } else {
1886                                 M_FLTMOVE(s1, d);
1887                                 x86_64_addss_reg_reg(s2, d);
1888                         }
1889                         store_reg_to_var_flt(iptr->dst, d);
1890                         break;
1891
1892                 case ICMD_DADD:       /* ..., val1, val2  ==> ..., val1 + val2        */
1893
1894                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1895                         var_to_reg_flt(s2, src, REG_FTMP2);
1896                         d = reg_of_var(iptr->dst, REG_FTMP3);
1897                         if (s1 == d) {
1898                                 x86_64_addsd_reg_reg(s2, d);
1899                         } else if (s2 == d) {
1900                                 x86_64_addsd_reg_reg(s1, d);
1901                         } else {
1902                                 M_FLTMOVE(s1, d);
1903                                 x86_64_addsd_reg_reg(s2, d);
1904                         }
1905                         store_reg_to_var_flt(iptr->dst, d);
1906                         break;
1907
1908                 case ICMD_FSUB:       /* ..., val1, val2  ==> ..., val1 - val2        */
1909
1910                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1911                         var_to_reg_flt(s2, src, REG_FTMP2);
1912                         d = reg_of_var(iptr->dst, REG_FTMP3);
1913                         if (s2 == d) {
1914                                 M_FLTMOVE(s2, REG_FTMP2);
1915                                 s2 = REG_FTMP2;
1916                         }
1917                         M_FLTMOVE(s1, d);
1918                         x86_64_subss_reg_reg(s2, d);
1919                         store_reg_to_var_flt(iptr->dst, d);
1920                         break;
1921
1922                 case ICMD_DSUB:       /* ..., val1, val2  ==> ..., val1 - val2        */
1923
1924                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1925                         var_to_reg_flt(s2, src, REG_FTMP2);
1926                         d = reg_of_var(iptr->dst, REG_FTMP3);
1927                         if (s2 == d) {
1928                                 M_FLTMOVE(s2, REG_FTMP2);
1929                                 s2 = REG_FTMP2;
1930                         }
1931                         M_FLTMOVE(s1, d);
1932                         x86_64_subsd_reg_reg(s2, d);
1933                         store_reg_to_var_flt(iptr->dst, d);
1934                         break;
1935
1936                 case ICMD_FMUL:       /* ..., val1, val2  ==> ..., val1 * val2        */
1937
1938                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1939                         var_to_reg_flt(s2, src, REG_FTMP2);
1940                         d = reg_of_var(iptr->dst, REG_FTMP3);
1941                         if (s1 == d) {
1942                                 x86_64_mulss_reg_reg(s2, d);
1943                         } else if (s2 == d) {
1944                                 x86_64_mulss_reg_reg(s1, d);
1945                         } else {
1946                                 M_FLTMOVE(s1, d);
1947                                 x86_64_mulss_reg_reg(s2, d);
1948                         }
1949                         store_reg_to_var_flt(iptr->dst, d);
1950                         break;
1951
1952                 case ICMD_DMUL:       /* ..., val1, val2  ==> ..., val1 * val2        */
1953
1954                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1955                         var_to_reg_flt(s2, src, REG_FTMP2);
1956                         d = reg_of_var(iptr->dst, REG_FTMP3);
1957                         if (s1 == d) {
1958                                 x86_64_mulsd_reg_reg(s2, d);
1959                         } else if (s2 == d) {
1960                                 x86_64_mulsd_reg_reg(s1, d);
1961                         } else {
1962                                 M_FLTMOVE(s1, d);
1963                                 x86_64_mulsd_reg_reg(s2, d);
1964                         }
1965                         store_reg_to_var_flt(iptr->dst, d);
1966                         break;
1967
1968                 case ICMD_FDIV:       /* ..., val1, val2  ==> ..., val1 / val2        */
1969
1970                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1971                         var_to_reg_flt(s2, src, REG_FTMP2);
1972                         d = reg_of_var(iptr->dst, REG_FTMP3);
1973                         if (s2 == d) {
1974                                 M_FLTMOVE(s2, REG_FTMP2);
1975                                 s2 = REG_FTMP2;
1976                         }
1977                         M_FLTMOVE(s1, d);
1978                         x86_64_divss_reg_reg(s2, d);
1979                         store_reg_to_var_flt(iptr->dst, d);
1980                         break;
1981
1982                 case ICMD_DDIV:       /* ..., val1, val2  ==> ..., val1 / val2        */
1983
1984                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
1985                         var_to_reg_flt(s2, src, REG_FTMP2);
1986                         d = reg_of_var(iptr->dst, REG_FTMP3);
1987                         if (s2 == d) {
1988                                 M_FLTMOVE(s2, REG_FTMP2);
1989                                 s2 = REG_FTMP2;
1990                         }
1991                         M_FLTMOVE(s1, d);
1992                         x86_64_divsd_reg_reg(s2, d);
1993                         store_reg_to_var_flt(iptr->dst, d);
1994                         break;
1995
1996                 case ICMD_I2F:       /* ..., value  ==> ..., (float) value            */
1997
1998                         var_to_reg_int(s1, src, REG_ITMP1);
1999                         d = reg_of_var(iptr->dst, REG_FTMP1);
2000                         x86_64_cvtsi2ss_reg_reg(s1, d);
2001                         store_reg_to_var_flt(iptr->dst, d);
2002                         break;
2003
2004                 case ICMD_I2D:       /* ..., value  ==> ..., (double) value           */
2005
2006                         var_to_reg_int(s1, src, REG_ITMP1);
2007                         d = reg_of_var(iptr->dst, REG_FTMP1);
2008                         x86_64_cvtsi2sd_reg_reg(s1, d);
2009                         store_reg_to_var_flt(iptr->dst, d);
2010                         break;
2011
2012                 case ICMD_L2F:       /* ..., value  ==> ..., (float) value            */
2013
2014                         var_to_reg_int(s1, src, REG_ITMP1);
2015                         d = reg_of_var(iptr->dst, REG_FTMP1);
2016                         x86_64_cvtsi2ssq_reg_reg(s1, d);
2017                         store_reg_to_var_flt(iptr->dst, d);
2018                         break;
2019                         
2020                 case ICMD_L2D:       /* ..., value  ==> ..., (double) value           */
2021
2022                         var_to_reg_int(s1, src, REG_ITMP1);
2023                         d = reg_of_var(iptr->dst, REG_FTMP1);
2024                         x86_64_cvtsi2sdq_reg_reg(s1, d);
2025                         store_reg_to_var_flt(iptr->dst, d);
2026                         break;
2027                         
2028                 case ICMD_F2I:       /* ..., value  ==> ..., (int) value              */
2029
2030                         var_to_reg_flt(s1, src, REG_FTMP1);
2031                         d = reg_of_var(iptr->dst, REG_ITMP1);
2032                         x86_64_cvttss2si_reg_reg(s1, d);
2033                         x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, d);    /* corner cases */
2034                         a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
2035                         x86_64_jcc(X86_64_CC_NE, a);
2036                         M_FLTMOVE(s1, REG_FTMP1);
2037                         x86_64_mov_imm_reg((s8) asm_builtin_f2i, REG_ITMP2);
2038                         x86_64_call_reg(REG_ITMP2);
2039                         M_INTMOVE(REG_RESULT, d);
2040                         store_reg_to_var_int(iptr->dst, d);
2041                         break;
2042
2043                 case ICMD_D2I:       /* ..., value  ==> ..., (int) value              */
2044
2045                         var_to_reg_flt(s1, src, REG_FTMP1);
2046                         d = reg_of_var(iptr->dst, REG_ITMP1);
2047                         x86_64_cvttsd2si_reg_reg(s1, d);
2048                         x86_64_alul_imm_reg(X86_64_CMP, 0x80000000, d);    /* corner cases */
2049                         a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
2050                         x86_64_jcc(X86_64_CC_NE, a);
2051                         M_FLTMOVE(s1, REG_FTMP1);
2052                         x86_64_mov_imm_reg((s8) asm_builtin_d2i, REG_ITMP2);
2053                         x86_64_call_reg(REG_ITMP2);
2054                         M_INTMOVE(REG_RESULT, d);
2055                         store_reg_to_var_int(iptr->dst, d);
2056                         break;
2057
2058                 case ICMD_F2L:       /* ..., value  ==> ..., (long) value             */
2059
2060                         var_to_reg_flt(s1, src, REG_FTMP1);
2061                         d = reg_of_var(iptr->dst, REG_ITMP1);
2062                         x86_64_cvttss2siq_reg_reg(s1, d);
2063                         x86_64_mov_imm_reg(0x8000000000000000, REG_ITMP2);
2064                         x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, d);     /* corner cases */
2065                         a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
2066                         x86_64_jcc(X86_64_CC_NE, a);
2067                         M_FLTMOVE(s1, REG_FTMP1);
2068                         x86_64_mov_imm_reg((s8) asm_builtin_f2l, REG_ITMP2);
2069                         x86_64_call_reg(REG_ITMP2);
2070                         M_INTMOVE(REG_RESULT, d);
2071                         store_reg_to_var_int(iptr->dst, d);
2072                         break;
2073
2074                 case ICMD_D2L:       /* ..., value  ==> ..., (long) value             */
2075
2076                         var_to_reg_flt(s1, src, REG_FTMP1);
2077                         d = reg_of_var(iptr->dst, REG_ITMP1);
2078                         x86_64_cvttsd2siq_reg_reg(s1, d);
2079                         x86_64_mov_imm_reg(0x8000000000000000, REG_ITMP2);
2080                         x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, d);     /* corner cases */
2081                         a = ((s1 == REG_FTMP1) ? 0 : 5) + 10 + 3 + ((REG_RESULT == d) ? 0 : 3);
2082                         x86_64_jcc(X86_64_CC_NE, a);
2083                         M_FLTMOVE(s1, REG_FTMP1);
2084                         x86_64_mov_imm_reg((s8) asm_builtin_d2l, REG_ITMP2);
2085                         x86_64_call_reg(REG_ITMP2);
2086                         M_INTMOVE(REG_RESULT, d);
2087                         store_reg_to_var_int(iptr->dst, d);
2088                         break;
2089
2090                 case ICMD_F2D:       /* ..., value  ==> ..., (double) value           */
2091
2092                         var_to_reg_flt(s1, src, REG_FTMP1);
2093                         d = reg_of_var(iptr->dst, REG_FTMP3);
2094                         x86_64_cvtss2sd_reg_reg(s1, d);
2095                         store_reg_to_var_flt(iptr->dst, d);
2096                         break;
2097
2098                 case ICMD_D2F:       /* ..., value  ==> ..., (float) value            */
2099
2100                         var_to_reg_flt(s1, src, REG_FTMP1);
2101                         d = reg_of_var(iptr->dst, REG_FTMP3);
2102                         x86_64_cvtsd2ss_reg_reg(s1, d);
2103                         store_reg_to_var_flt(iptr->dst, d);
2104                         break;
2105
2106                 case ICMD_FCMPL:      /* ..., val1, val2  ==> ..., val1 fcmpl val2    */
2107                                           /* == => 0, < => 1, > => -1 */
2108
2109                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
2110                         var_to_reg_flt(s2, src, REG_FTMP2);
2111                         d = reg_of_var(iptr->dst, REG_ITMP3);
2112                         x86_64_alu_reg_reg(X86_64_XOR, d, d);
2113                         x86_64_mov_imm_reg(1, REG_ITMP1);
2114                         x86_64_mov_imm_reg(-1, REG_ITMP2);
2115                         x86_64_ucomiss_reg_reg(s1, s2);
2116                         x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
2117                         x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
2118                         x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP2, d);    /* treat unordered as GT */
2119                         store_reg_to_var_int(iptr->dst, d);
2120                         break;
2121
2122                 case ICMD_FCMPG:      /* ..., val1, val2  ==> ..., val1 fcmpg val2    */
2123                                           /* == => 0, < => 1, > => -1 */
2124
2125                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
2126                         var_to_reg_flt(s2, src, REG_FTMP2);
2127                         d = reg_of_var(iptr->dst, REG_ITMP3);
2128                         x86_64_alu_reg_reg(X86_64_XOR, d, d);
2129                         x86_64_mov_imm_reg(1, REG_ITMP1);
2130                         x86_64_mov_imm_reg(-1, REG_ITMP2);
2131                         x86_64_ucomiss_reg_reg(s1, s2);
2132                         x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
2133                         x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
2134                         x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP1, d);    /* treat unordered as LT */
2135                         store_reg_to_var_int(iptr->dst, d);
2136                         break;
2137
2138                 case ICMD_DCMPL:      /* ..., val1, val2  ==> ..., val1 fcmpl val2    */
2139                                           /* == => 0, < => 1, > => -1 */
2140
2141                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
2142                         var_to_reg_flt(s2, src, REG_FTMP2);
2143                         d = reg_of_var(iptr->dst, REG_ITMP3);
2144                         x86_64_alu_reg_reg(X86_64_XOR, d, d);
2145                         x86_64_mov_imm_reg(1, REG_ITMP1);
2146                         x86_64_mov_imm_reg(-1, REG_ITMP2);
2147                         x86_64_ucomisd_reg_reg(s1, s2);
2148                         x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
2149                         x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
2150                         x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP2, d);    /* treat unordered as GT */
2151                         store_reg_to_var_int(iptr->dst, d);
2152                         break;
2153
2154                 case ICMD_DCMPG:      /* ..., val1, val2  ==> ..., val1 fcmpg val2    */
2155                                           /* == => 0, < => 1, > => -1 */
2156
2157                         var_to_reg_flt(s1, src->prev, REG_FTMP1);
2158                         var_to_reg_flt(s2, src, REG_FTMP2);
2159                         d = reg_of_var(iptr->dst, REG_ITMP3);
2160                         x86_64_alu_reg_reg(X86_64_XOR, d, d);
2161                         x86_64_mov_imm_reg(1, REG_ITMP1);
2162                         x86_64_mov_imm_reg(-1, REG_ITMP2);
2163                         x86_64_ucomisd_reg_reg(s1, s2);
2164                         x86_64_cmovcc_reg_reg(X86_64_CC_B, REG_ITMP1, d);
2165                         x86_64_cmovcc_reg_reg(X86_64_CC_A, REG_ITMP2, d);
2166                         x86_64_cmovcc_reg_reg(X86_64_CC_P, REG_ITMP1, d);    /* treat unordered as LT */
2167                         store_reg_to_var_int(iptr->dst, d);
2168                         break;
2169
2170
2171                 /* memory operations **************************************************/
2172
2173 #define gen_bound_check \
2174     if (checkbounds) { \
2175         x86_64_alul_membase_reg(X86_64_CMP, s1, OFFSET(java_arrayheader, size), s2); \
2176         x86_64_jcc(X86_64_CC_AE, 0); \
2177         codegen_addxboundrefs(mcodeptr, s2); \
2178     }
2179
2180                 case ICMD_ARRAYLENGTH: /* ..., arrayref  ==> ..., (int) length        */
2181
2182                         var_to_reg_int(s1, src, REG_ITMP1);
2183                         d = reg_of_var(iptr->dst, REG_ITMP3);
2184                         gen_nullptr_check(s1);
2185                         x86_64_movl_membase_reg(s1, OFFSET(java_arrayheader, size), d);
2186                         store_reg_to_var_int(iptr->dst, d);
2187                         break;
2188
2189                 case ICMD_AALOAD:     /* ..., arrayref, index  ==> ..., value         */
2190
2191                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2192                         var_to_reg_int(s2, src, REG_ITMP2);
2193                         d = reg_of_var(iptr->dst, REG_ITMP3);
2194                         if (iptr->op1 == 0) {
2195                                 gen_nullptr_check(s1);
2196                                 gen_bound_check;
2197                         }
2198                         x86_64_mov_memindex_reg(OFFSET(java_objectarray, data[0]), s1, s2, 3, d);
2199                         store_reg_to_var_int(iptr->dst, d);
2200                         break;
2201
2202                 case ICMD_LALOAD:     /* ..., arrayref, index  ==> ..., value         */
2203
2204                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2205                         var_to_reg_int(s2, src, REG_ITMP2);
2206                         d = reg_of_var(iptr->dst, REG_ITMP3);
2207                         if (iptr->op1 == 0) {
2208                                 gen_nullptr_check(s1);
2209                                 gen_bound_check;
2210                         }
2211                         x86_64_mov_memindex_reg(OFFSET(java_longarray, data[0]), s1, s2, 3, d);
2212                         store_reg_to_var_int(iptr->dst, d);
2213                         break;
2214
2215                 case ICMD_IALOAD:     /* ..., arrayref, index  ==> ..., value         */
2216
2217                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2218                         var_to_reg_int(s2, src, REG_ITMP2);
2219                         d = reg_of_var(iptr->dst, REG_ITMP3);
2220                         if (iptr->op1 == 0) {
2221                                 gen_nullptr_check(s1);
2222                                 gen_bound_check;
2223                         }
2224                         x86_64_movl_memindex_reg(OFFSET(java_intarray, data[0]), s1, s2, 2, d);
2225                         store_reg_to_var_int(iptr->dst, d);
2226                         break;
2227
2228                 case ICMD_FALOAD:     /* ..., arrayref, index  ==> ..., value         */
2229
2230                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2231                         var_to_reg_int(s2, src, REG_ITMP2);
2232                         d = reg_of_var(iptr->dst, REG_FTMP3);
2233                         if (iptr->op1 == 0) {
2234                                 gen_nullptr_check(s1);
2235                                 gen_bound_check;
2236                         }
2237                         x86_64_movss_memindex_reg(OFFSET(java_floatarray, data[0]), s1, s2, 2, d);
2238                         store_reg_to_var_flt(iptr->dst, d);
2239                         break;
2240
2241                 case ICMD_DALOAD:     /* ..., arrayref, index  ==> ..., value         */
2242
2243                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2244                         var_to_reg_int(s2, src, REG_ITMP2);
2245                         d = reg_of_var(iptr->dst, REG_FTMP3);
2246                         if (iptr->op1 == 0) {
2247                                 gen_nullptr_check(s1);
2248                                 gen_bound_check;
2249                         }
2250                         x86_64_movsd_memindex_reg(OFFSET(java_doublearray, data[0]), s1, s2, 3, d);
2251                         store_reg_to_var_flt(iptr->dst, d);
2252                         break;
2253
2254                 case ICMD_CALOAD:     /* ..., arrayref, index  ==> ..., value         */
2255
2256                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2257                         var_to_reg_int(s2, src, REG_ITMP2);
2258                         d = reg_of_var(iptr->dst, REG_ITMP3);
2259                         if (iptr->op1 == 0) {
2260                                 gen_nullptr_check(s1);
2261                                 gen_bound_check;
2262                         }
2263                         x86_64_movzwq_memindex_reg(OFFSET(java_chararray, data[0]), s1, s2, 1, d);
2264                         store_reg_to_var_int(iptr->dst, d);
2265                         break;                  
2266
2267                 case ICMD_SALOAD:     /* ..., arrayref, index  ==> ..., value         */
2268
2269                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2270                         var_to_reg_int(s2, src, REG_ITMP2);
2271                         d = reg_of_var(iptr->dst, REG_ITMP3);
2272                         if (iptr->op1 == 0) {
2273                                 gen_nullptr_check(s1);
2274                                 gen_bound_check;
2275                         }
2276                         x86_64_movswq_memindex_reg(OFFSET(java_shortarray, data[0]), s1, s2, 1, d);
2277                         store_reg_to_var_int(iptr->dst, d);
2278                         break;
2279
2280                 case ICMD_BALOAD:     /* ..., arrayref, index  ==> ..., value         */
2281
2282                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2283                         var_to_reg_int(s2, src, REG_ITMP2);
2284                         d = reg_of_var(iptr->dst, REG_ITMP3);
2285                         if (iptr->op1 == 0) {
2286                                 gen_nullptr_check(s1);
2287                                 gen_bound_check;
2288                         }
2289                         x86_64_movsbq_memindex_reg(OFFSET(java_bytearray, data[0]), s1, s2, 0, d);
2290                         store_reg_to_var_int(iptr->dst, d);
2291                         break;
2292
2293
2294                 case ICMD_AASTORE:    /* ..., arrayref, index, value  ==> ...         */
2295
2296                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2297                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2298                         if (iptr->op1 == 0) {
2299                                 gen_nullptr_check(s1);
2300                                 gen_bound_check;
2301                         }
2302                         var_to_reg_int(s3, src, REG_ITMP3);
2303                         x86_64_mov_reg_memindex(s3, OFFSET(java_objectarray, data[0]), s1, s2, 3);
2304                         break;
2305
2306                 case ICMD_LASTORE:    /* ..., arrayref, index, value  ==> ...         */
2307
2308                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2309                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2310                         if (iptr->op1 == 0) {
2311                                 gen_nullptr_check(s1);
2312                                 gen_bound_check;
2313                         }
2314                         var_to_reg_int(s3, src, REG_ITMP3);
2315                         x86_64_mov_reg_memindex(s3, OFFSET(java_longarray, data[0]), s1, s2, 3);
2316                         break;
2317
2318                 case ICMD_IASTORE:    /* ..., arrayref, index, value  ==> ...         */
2319
2320                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2321                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2322                         if (iptr->op1 == 0) {
2323                                 gen_nullptr_check(s1);
2324                                 gen_bound_check;
2325                         }
2326                         var_to_reg_int(s3, src, REG_ITMP3);
2327                         x86_64_movl_reg_memindex(s3, OFFSET(java_intarray, data[0]), s1, s2, 2);
2328                         break;
2329
2330                 case ICMD_FASTORE:    /* ..., arrayref, index, value  ==> ...         */
2331
2332                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2333                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2334                         if (iptr->op1 == 0) {
2335                                 gen_nullptr_check(s1);
2336                                 gen_bound_check;
2337                         }
2338                         var_to_reg_flt(s3, src, REG_FTMP3);
2339                         x86_64_movss_reg_memindex(s3, OFFSET(java_floatarray, data[0]), s1, s2, 2);
2340                         break;
2341
2342                 case ICMD_DASTORE:    /* ..., arrayref, index, value  ==> ...         */
2343
2344                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2345                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2346                         if (iptr->op1 == 0) {
2347                                 gen_nullptr_check(s1);
2348                                 gen_bound_check;
2349                         }
2350                         var_to_reg_flt(s3, src, REG_FTMP3);
2351                         x86_64_movsd_reg_memindex(s3, OFFSET(java_doublearray, data[0]), s1, s2, 3);
2352                         break;
2353
2354                 case ICMD_CASTORE:    /* ..., arrayref, index, value  ==> ...         */
2355
2356                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2357                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2358                         if (iptr->op1 == 0) {
2359                                 gen_nullptr_check(s1);
2360                                 gen_bound_check;
2361                         }
2362                         var_to_reg_int(s3, src, REG_ITMP3);
2363                         x86_64_movw_reg_memindex(s3, OFFSET(java_chararray, data[0]), s1, s2, 1);
2364                         break;
2365
2366                 case ICMD_SASTORE:    /* ..., arrayref, index, value  ==> ...         */
2367
2368                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2369                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2370                         if (iptr->op1 == 0) {
2371                                 gen_nullptr_check(s1);
2372                                 gen_bound_check;
2373                         }
2374                         var_to_reg_int(s3, src, REG_ITMP3);
2375                         x86_64_movw_reg_memindex(s3, OFFSET(java_shortarray, data[0]), s1, s2, 1);
2376                         break;
2377
2378                 case ICMD_BASTORE:    /* ..., arrayref, index, value  ==> ...         */
2379
2380                         var_to_reg_int(s1, src->prev->prev, REG_ITMP1);
2381                         var_to_reg_int(s2, src->prev, REG_ITMP2);
2382                         if (iptr->op1 == 0) {
2383                                 gen_nullptr_check(s1);
2384                                 gen_bound_check;
2385                         }
2386                         var_to_reg_int(s3, src, REG_ITMP3);
2387                         x86_64_movb_reg_memindex(s3, OFFSET(java_bytearray, data[0]), s1, s2, 0);
2388                         break;
2389
2390
2391                 case ICMD_PUTSTATIC:  /* ..., value  ==> ...                          */
2392                                       /* op1 = type, val.a = field address            */
2393
2394                         /* if class isn't yet initialized, do it */
2395                         if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2396                                 /* call helper function which patches this code */
2397                                 x86_64_mov_imm_reg((s8) ((fieldinfo *) iptr->val.a)->class, REG_ITMP1);
2398                                 x86_64_mov_imm_reg((s8) asm_check_clinit, REG_ITMP2);
2399                                 x86_64_call_reg(REG_ITMP2);
2400                         }
2401
2402                         a = dseg_addaddress(&(((fieldinfo *) iptr->val.a)->value));
2403 /*                      x86_64_mov_imm_reg(0, REG_ITMP2); */
2404 /*                      dseg_adddata(mcodeptr); */
2405 /*                      x86_64_mov_membase_reg(REG_ITMP2, a, REG_ITMP2); */
2406                         x86_64_mov_membase_reg(RIP, -(((s8) mcodeptr + 7) - (s8) mcodebase) + a, REG_ITMP2);
2407                         switch (iptr->op1) {
2408                         case TYPE_INT:
2409                                 var_to_reg_int(s2, src, REG_ITMP1);
2410                                 x86_64_movl_reg_membase(s2, REG_ITMP2, 0);
2411                                 break;
2412                         case TYPE_LNG:
2413                         case TYPE_ADR:
2414                                 var_to_reg_int(s2, src, REG_ITMP1);
2415                                 x86_64_mov_reg_membase(s2, REG_ITMP2, 0);
2416                                 break;
2417                         case TYPE_FLT:
2418                                 var_to_reg_flt(s2, src, REG_FTMP1);
2419                                 x86_64_movss_reg_membase(s2, REG_ITMP2, 0);
2420                                 break;
2421                         case TYPE_DBL:
2422                                 var_to_reg_flt(s2, src, REG_FTMP1);
2423                                 x86_64_movsd_reg_membase(s2, REG_ITMP2, 0);
2424                                 break;
2425                         default: panic("internal error");
2426                         }
2427                         break;
2428
2429                 case ICMD_GETSTATIC:  /* ...  ==> ..., value                          */
2430                                       /* op1 = type, val.a = field address            */
2431
2432                         /* if class isn't yet initialized, do it */
2433                         if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2434                                 /* call helper function which patches this code */
2435                                 x86_64_mov_imm_reg((s8) ((fieldinfo *) iptr->val.a)->class, REG_ITMP1);
2436                                 x86_64_mov_imm_reg((s8) asm_check_clinit, REG_ITMP2);
2437                                 x86_64_call_reg(REG_ITMP2);
2438                         }
2439
2440                         a = dseg_addaddress(&(((fieldinfo *) iptr->val.a)->value));
2441 /*                      x86_64_mov_imm_reg(0, REG_ITMP2); */
2442 /*                      dseg_adddata(mcodeptr); */
2443 /*                      x86_64_mov_membase_reg(REG_ITMP2, a, REG_ITMP2); */
2444                         x86_64_mov_membase_reg(RIP, -(((s8) mcodeptr + 7) - (s8) mcodebase) + a, REG_ITMP2);
2445                         switch (iptr->op1) {
2446                         case TYPE_INT:
2447                                 d = reg_of_var(iptr->dst, REG_ITMP1);
2448                                 x86_64_movl_membase_reg(REG_ITMP2, 0, d);
2449                                 store_reg_to_var_int(iptr->dst, d);
2450                                 break;
2451                         case TYPE_LNG:
2452                         case TYPE_ADR:
2453                                 d = reg_of_var(iptr->dst, REG_ITMP1);
2454                                 x86_64_mov_membase_reg(REG_ITMP2, 0, d);
2455                                 store_reg_to_var_int(iptr->dst, d);
2456                                 break;
2457                         case TYPE_FLT:
2458                                 d = reg_of_var(iptr->dst, REG_ITMP1);
2459                                 x86_64_movss_membase_reg(REG_ITMP2, 0, d);
2460                                 store_reg_to_var_flt(iptr->dst, d);
2461                                 break;
2462                         case TYPE_DBL:                          
2463                                 d = reg_of_var(iptr->dst, REG_ITMP1);
2464                                 x86_64_movsd_membase_reg(REG_ITMP2, 0, d);
2465                                 store_reg_to_var_flt(iptr->dst, d);
2466                                 break;
2467                         default: panic("internal error");
2468                         }
2469                         break;
2470
2471                 case ICMD_PUTFIELD:   /* ..., value  ==> ...                          */
2472                                       /* op1 = type, val.i = field offset             */
2473
2474                         /* if class isn't yet initialized, do it */
2475                         if (!((fieldinfo *) iptr->val.a)->class->initialized) {
2476                                 /* call helper function which patches this code */
2477                                 x86_64_mov_imm_reg((s8) ((fieldinfo *) iptr->val.a)->class, REG_ITMP1);
2478                                 x86_64_mov_imm_reg((s8) asm_check_clinit, REG_ITMP2);
2479                                 x86_64_call_reg(REG_ITMP2);
2480                         }
2481
2482                         a = ((fieldinfo *)(iptr->val.a))->offset;
2483                         var_to_reg_int(s1, src->prev, REG_ITMP1);
2484                         switch (iptr->op1) {
2485                                 case TYPE_INT:
2486                                         var_to_reg_int(s2, src, REG_ITMP2);
2487                                         gen_nullptr_check(s1);
2488                                         x86_64_movl_reg_membase(s2, s1, a);
2489                                         break;
2490                                 case TYPE_LNG:
2491                                 case TYPE_ADR:
2492                                         var_to_reg_int(s2, src, REG_ITMP2);
2493                                         gen_nullptr_check(s1);
2494                                         x86_64_mov_reg_membase(s2, s1, a);
2495                                         break;
2496                                 case TYPE_FLT:
2497                                         var_to_reg_flt(s2, src, REG_FTMP2);
2498                                         gen_nullptr_check(s1);
2499                                         x86_64_movss_reg_membase(s2, s1, a);
2500                                         break;
2501                                 case TYPE_DBL:
2502                                         var_to_reg_flt(s2, src, REG_FTMP2);
2503                                         gen_nullptr_check(s1);
2504                                         x86_64_movsd_reg_membase(s2, s1, a);
2505                                         break;
2506                                 default: panic ("internal error");
2507                                 }
2508                         break;
2509
2510                 case ICMD_GETFIELD:   /* ...  ==> ..., value                          */
2511                                       /* op1 = type, val.i = field offset             */
2512
2513                         a = ((fieldinfo *)(iptr->val.a))->offset;
2514                         var_to_reg_int(s1, src, REG_ITMP1);
2515                         switch (iptr->op1) {
2516                                 case TYPE_INT:
2517                                         d = reg_of_var(iptr->dst, REG_ITMP1);
2518                                         gen_nullptr_check(s1);
2519                                         x86_64_movl_membase_reg(s1, a, d);
2520                                         store_reg_to_var_int(iptr->dst, d);
2521                                         break;
2522                                 case TYPE_LNG:
2523                                 case TYPE_ADR:
2524                                         d = reg_of_var(iptr->dst, REG_ITMP1);
2525                                         gen_nullptr_check(s1);
2526                                         x86_64_mov_membase_reg(s1, a, d);
2527                                         store_reg_to_var_int(iptr->dst, d);
2528                                         break;
2529                                 case TYPE_FLT:
2530                                         d = reg_of_var(iptr->dst, REG_FTMP1);
2531                                         gen_nullptr_check(s1);
2532                                         x86_64_movss_membase_reg(s1, a, d);
2533                                         store_reg_to_var_flt(iptr->dst, d);
2534                                         break;
2535                                 case TYPE_DBL:                          
2536                                         d = reg_of_var(iptr->dst, REG_FTMP1);
2537                                         gen_nullptr_check(s1);
2538                                         x86_64_movsd_membase_reg(s1, a, d);
2539                                         store_reg_to_var_flt(iptr->dst, d);
2540                                         break;
2541                                 default: panic ("internal error");
2542                                 }
2543                         break;
2544
2545
2546                 /* branch operations **************************************************/
2547
2548 /*  #define ALIGNCODENOP {if((int)((long)mcodeptr&7)){M_NOP;}} */
2549 #define ALIGNCODENOP do {} while (0)
2550
2551                 case ICMD_ATHROW:       /* ..., objectref ==> ... (, objectref)       */
2552
2553                         var_to_reg_int(s1, src, REG_ITMP1);
2554                         M_INTMOVE(s1, REG_ITMP1_XPTR);
2555
2556                         x86_64_call_imm(0); /* passing exception pointer                  */
2557                         x86_64_pop_reg(REG_ITMP2_XPC);
2558
2559                         x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
2560                         x86_64_jmp_reg(REG_ITMP3);
2561                         ALIGNCODENOP;
2562                         break;
2563
2564                 case ICMD_GOTO:         /* ... ==> ...                                */
2565                                         /* op1 = target JavaVM pc                     */
2566
2567                         x86_64_jmp_imm(0);
2568                         codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2569                         ALIGNCODENOP;
2570                         break;
2571
2572                 case ICMD_JSR:          /* ... ==> ...                                */
2573                                         /* op1 = target JavaVM pc                     */
2574
2575                         x86_64_call_imm(0);
2576                         codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2577                         break;
2578                         
2579                 case ICMD_RET:          /* ... ==> ...                                */
2580                                         /* op1 = local variable                       */
2581
2582                         var = &(locals[iptr->op1][TYPE_ADR]);
2583                         var_to_reg_int(s1, var, REG_ITMP1);
2584                         x86_64_jmp_reg(s1);
2585                         break;
2586
2587                 case ICMD_IFNULL:       /* ..., value ==> ...                         */
2588                                         /* op1 = target JavaVM pc                     */
2589
2590                         if (src->flags & INMEMORY) {
2591                                 x86_64_alu_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
2592
2593                         } else {
2594                                 x86_64_test_reg_reg(src->regoff, src->regoff);
2595                         }
2596                         x86_64_jcc(X86_64_CC_E, 0);
2597                         codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2598                         break;
2599
2600                 case ICMD_IFNONNULL:    /* ..., value ==> ...                         */
2601                                         /* op1 = target JavaVM pc                     */
2602
2603                         if (src->flags & INMEMORY) {
2604                                 x86_64_alu_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
2605
2606                         } else {
2607                                 x86_64_test_reg_reg(src->regoff, src->regoff);
2608                         }
2609                         x86_64_jcc(X86_64_CC_NE, 0);
2610                         codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
2611                         break;
2612
2613                 case ICMD_IFEQ:         /* ..., value ==> ...                         */
2614                                         /* op1 = target JavaVM pc, val.i = constant   */
2615
2616                         x86_64_emit_ifcc(X86_64_CC_E, src, iptr);
2617                         break;
2618
2619                 case ICMD_IFLT:         /* ..., value ==> ...                         */
2620                                         /* op1 = target JavaVM pc, val.i = constant   */
2621
2622                         x86_64_emit_ifcc(X86_64_CC_L, src, iptr);
2623                         break;
2624
2625                 case ICMD_IFLE:         /* ..., value ==> ...                         */
2626                                         /* op1 = target JavaVM pc, val.i = constant   */
2627
2628                         x86_64_emit_ifcc(X86_64_CC_LE, src, iptr);
2629                         break;
2630
2631                 case ICMD_IFNE:         /* ..., value ==> ...                         */
2632                                         /* op1 = target JavaVM pc, val.i = constant   */
2633
2634                         x86_64_emit_ifcc(X86_64_CC_NE, src, iptr);
2635                         break;
2636
2637                 case ICMD_IFGT:         /* ..., value ==> ...                         */
2638                                         /* op1 = target JavaVM pc, val.i = constant   */
2639
2640                         x86_64_emit_ifcc(X86_64_CC_G, src, iptr);
2641                         break;
2642
2643                 case ICMD_IFGE:         /* ..., value ==> ...                         */
2644                                         /* op1 = target JavaVM pc, val.i = constant   */
2645
2646                         x86_64_emit_ifcc(X86_64_CC_GE, src, iptr);
2647                         break;
2648
2649                 case ICMD_IF_LEQ:       /* ..., value ==> ...                         */
2650                                         /* op1 = target JavaVM pc, val.l = constant   */
2651
2652                         x86_64_emit_if_lcc(X86_64_CC_E, src, iptr);
2653                         break;
2654
2655                 case ICMD_IF_LLT:       /* ..., value ==> ...                         */
2656                                         /* op1 = target JavaVM pc, val.l = constant   */
2657
2658                         x86_64_emit_if_lcc(X86_64_CC_L, src, iptr);
2659                         break;
2660
2661                 case ICMD_IF_LLE:       /* ..., value ==> ...                         */
2662                                         /* op1 = target JavaVM pc, val.l = constant   */
2663
2664                         x86_64_emit_if_lcc(X86_64_CC_LE, src, iptr);
2665                         break;
2666
2667                 case ICMD_IF_LNE:       /* ..., value ==> ...                         */
2668                                         /* op1 = target JavaVM pc, val.l = constant   */
2669
2670                         x86_64_emit_if_lcc(X86_64_CC_NE, src, iptr);
2671                         break;
2672
2673                 case ICMD_IF_LGT:       /* ..., value ==> ...                         */
2674                                         /* op1 = target JavaVM pc, val.l = constant   */
2675
2676                         x86_64_emit_if_lcc(X86_64_CC_G, src, iptr);
2677                         break;
2678
2679                 case ICMD_IF_LGE:       /* ..., value ==> ...                         */
2680                                         /* op1 = target JavaVM pc, val.l = constant   */
2681
2682                         x86_64_emit_if_lcc(X86_64_CC_GE, src, iptr);
2683                         break;
2684
2685                 case ICMD_IF_ICMPEQ:    /* ..., value, value ==> ...                  */
2686                                         /* op1 = target JavaVM pc                     */
2687
2688                         x86_64_emit_if_icmpcc(X86_64_CC_E, src, iptr);
2689                         break;
2690
2691                 case ICMD_IF_LCMPEQ:    /* ..., value, value ==> ...                  */
2692                 case ICMD_IF_ACMPEQ:    /* op1 = target JavaVM pc                     */
2693
2694                         x86_64_emit_if_lcmpcc(X86_64_CC_E, src, iptr);
2695                         break;
2696
2697                 case ICMD_IF_ICMPNE:    /* ..., value, value ==> ...                  */
2698                                         /* op1 = target JavaVM pc                     */
2699
2700                         x86_64_emit_if_icmpcc(X86_64_CC_NE, src, iptr);
2701                         break;
2702
2703                 case ICMD_IF_LCMPNE:    /* ..., value, value ==> ...                  */
2704                 case ICMD_IF_ACMPNE:    /* op1 = target JavaVM pc                     */
2705
2706                         x86_64_emit_if_lcmpcc(X86_64_CC_NE, src, iptr);
2707                         break;
2708
2709                 case ICMD_IF_ICMPLT:    /* ..., value, value ==> ...                  */
2710                                         /* op1 = target JavaVM pc                     */
2711
2712                         x86_64_emit_if_icmpcc(X86_64_CC_L, src, iptr);
2713                         break;
2714
2715                 case ICMD_IF_LCMPLT:    /* ..., value, value ==> ...                  */
2716                                     /* op1 = target JavaVM pc                     */
2717
2718                         x86_64_emit_if_lcmpcc(X86_64_CC_L, src, iptr);
2719                         break;
2720
2721                 case ICMD_IF_ICMPGT:    /* ..., value, value ==> ...                  */
2722                                         /* op1 = target JavaVM pc                     */
2723
2724                         x86_64_emit_if_icmpcc(X86_64_CC_G, src, iptr);
2725                         break;
2726
2727                 case ICMD_IF_LCMPGT:    /* ..., value, value ==> ...                  */
2728                                 /* op1 = target JavaVM pc                     */
2729
2730                         x86_64_emit_if_lcmpcc(X86_64_CC_G, src, iptr);
2731                         break;
2732
2733                 case ICMD_IF_ICMPLE:    /* ..., value, value ==> ...                  */
2734                                         /* op1 = target JavaVM pc                     */
2735
2736                         x86_64_emit_if_icmpcc(X86_64_CC_LE, src, iptr);
2737                         break;
2738
2739                 case ICMD_IF_LCMPLE:    /* ..., value, value ==> ...                  */
2740                                         /* op1 = target JavaVM pc                     */
2741
2742                         x86_64_emit_if_lcmpcc(X86_64_CC_LE, src, iptr);
2743                         break;
2744
2745                 case ICMD_IF_ICMPGE:    /* ..., value, value ==> ...                  */
2746                                         /* op1 = target JavaVM pc                     */
2747
2748                         x86_64_emit_if_icmpcc(X86_64_CC_GE, src, iptr);
2749                         break;
2750
2751                 case ICMD_IF_LCMPGE:    /* ..., value, value ==> ...                  */
2752                                     /* op1 = target JavaVM pc                     */
2753
2754                         x86_64_emit_if_lcmpcc(X86_64_CC_GE, src, iptr);
2755                         break;
2756
2757                 /* (value xx 0) ? IFxx_ICONST : ELSE_ICONST                           */
2758
2759                 case ICMD_ELSE_ICONST:  /* handled by IFxx_ICONST                     */
2760                         break;
2761
2762                 case ICMD_IFEQ_ICONST:  /* ..., value ==> ..., constant               */
2763                                         /* val.i = constant                           */
2764
2765                         var_to_reg_int(s1, src, REG_ITMP1);
2766                         d = reg_of_var(iptr->dst, REG_ITMP3);
2767                         s3 = iptr->val.i;
2768                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2769                                 if (s1 == d) {
2770                                         M_INTMOVE(s1, REG_ITMP1);
2771                                         s1 = REG_ITMP1;
2772                                 }
2773                                 x86_64_movl_imm_reg(iptr[1].val.i, d);
2774                         }
2775                         x86_64_movl_imm_reg(s3, REG_ITMP2);
2776                         x86_64_testl_reg_reg(s1, s1);
2777                         x86_64_cmovccl_reg_reg(X86_64_CC_E, REG_ITMP2, d);
2778                         store_reg_to_var_int(iptr->dst, d);
2779                         break;
2780
2781                 case ICMD_IFNE_ICONST:  /* ..., value ==> ..., constant               */
2782                                         /* val.i = constant                           */
2783
2784                         var_to_reg_int(s1, src, REG_ITMP1);
2785                         d = reg_of_var(iptr->dst, REG_ITMP3);
2786                         s3 = iptr->val.i;
2787                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2788                                 if (s1 == d) {
2789                                         M_INTMOVE(s1, REG_ITMP1);
2790                                         s1 = REG_ITMP1;
2791                                 }
2792                                 x86_64_movl_imm_reg(iptr[1].val.i, d);
2793                         }
2794                         x86_64_movl_imm_reg(s3, REG_ITMP2);
2795                         x86_64_testl_reg_reg(s1, s1);
2796                         x86_64_cmovccl_reg_reg(X86_64_CC_NE, REG_ITMP2, d);
2797                         store_reg_to_var_int(iptr->dst, d);
2798                         break;
2799
2800                 case ICMD_IFLT_ICONST:  /* ..., value ==> ..., constant               */
2801                                         /* val.i = constant                           */
2802
2803                         var_to_reg_int(s1, src, REG_ITMP1);
2804                         d = reg_of_var(iptr->dst, REG_ITMP3);
2805                         s3 = iptr->val.i;
2806                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2807                                 if (s1 == d) {
2808                                         M_INTMOVE(s1, REG_ITMP1);
2809                                         s1 = REG_ITMP1;
2810                                 }
2811                                 x86_64_movl_imm_reg(iptr[1].val.i, d);
2812                         }
2813                         x86_64_movl_imm_reg(s3, REG_ITMP2);
2814                         x86_64_testl_reg_reg(s1, s1);
2815                         x86_64_cmovccl_reg_reg(X86_64_CC_L, REG_ITMP2, d);
2816                         store_reg_to_var_int(iptr->dst, d);
2817                         break;
2818
2819                 case ICMD_IFGE_ICONST:  /* ..., value ==> ..., constant               */
2820                                         /* val.i = constant                           */
2821
2822                         var_to_reg_int(s1, src, REG_ITMP1);
2823                         d = reg_of_var(iptr->dst, REG_ITMP3);
2824                         s3 = iptr->val.i;
2825                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2826                                 if (s1 == d) {
2827                                         M_INTMOVE(s1, REG_ITMP1);
2828                                         s1 = REG_ITMP1;
2829                                 }
2830                                 x86_64_movl_imm_reg(iptr[1].val.i, d);
2831                         }
2832                         x86_64_movl_imm_reg(s3, REG_ITMP2);
2833                         x86_64_testl_reg_reg(s1, s1);
2834                         x86_64_cmovccl_reg_reg(X86_64_CC_GE, REG_ITMP2, d);
2835                         store_reg_to_var_int(iptr->dst, d);
2836                         break;
2837
2838                 case ICMD_IFGT_ICONST:  /* ..., value ==> ..., constant               */
2839                                         /* val.i = constant                           */
2840
2841                         var_to_reg_int(s1, src, REG_ITMP1);
2842                         d = reg_of_var(iptr->dst, REG_ITMP3);
2843                         s3 = iptr->val.i;
2844                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2845                                 if (s1 == d) {
2846                                         M_INTMOVE(s1, REG_ITMP1);
2847                                         s1 = REG_ITMP1;
2848                                 }
2849                                 x86_64_movl_imm_reg(iptr[1].val.i, d);
2850                         }
2851                         x86_64_movl_imm_reg(s3, REG_ITMP2);
2852                         x86_64_testl_reg_reg(s1, s1);
2853                         x86_64_cmovccl_reg_reg(X86_64_CC_G, REG_ITMP2, d);
2854                         store_reg_to_var_int(iptr->dst, d);
2855                         break;
2856
2857                 case ICMD_IFLE_ICONST:  /* ..., value ==> ..., constant               */
2858                                         /* val.i = constant                           */
2859
2860                         var_to_reg_int(s1, src, REG_ITMP1);
2861                         d = reg_of_var(iptr->dst, REG_ITMP3);
2862                         s3 = iptr->val.i;
2863                         if (iptr[1].opc == ICMD_ELSE_ICONST) {
2864                                 if (s1 == d) {
2865                                         M_INTMOVE(s1, REG_ITMP1);
2866                                         s1 = REG_ITMP1;
2867                                 }
2868                                 x86_64_movl_imm_reg(iptr[1].val.i, d);
2869                         }
2870                         x86_64_movl_imm_reg(s3, REG_ITMP2);
2871                         x86_64_testl_reg_reg(s1, s1);
2872                         x86_64_cmovccl_reg_reg(X86_64_CC_LE, REG_ITMP2, d);
2873                         store_reg_to_var_int(iptr->dst, d);
2874                         break;
2875
2876
2877                 case ICMD_IRETURN:      /* ..., retvalue ==> ...                      */
2878                 case ICMD_LRETURN:
2879                 case ICMD_ARETURN:
2880
2881                         var_to_reg_int(s1, src, REG_RESULT);
2882                         M_INTMOVE(s1, REG_RESULT);
2883
2884 #if defined(USE_THREADS)
2885                         if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
2886                                 x86_64_mov_membase_reg(REG_SP, maxmemuse * 8, argintregs[0]);
2887                                 x86_64_mov_reg_membase(REG_RESULT, REG_SP, maxmemuse * 8);
2888                                 x86_64_mov_imm_reg((u8) builtin_monitorexit, REG_ITMP1);
2889                                 x86_64_call_reg(REG_ITMP1);
2890                                 x86_64_mov_membase_reg(REG_SP, maxmemuse * 8, REG_RESULT);
2891                         }
2892 #endif
2893
2894                         goto nowperformreturn;
2895
2896                 case ICMD_FRETURN:      /* ..., retvalue ==> ...                      */
2897                 case ICMD_DRETURN:
2898
2899                         var_to_reg_flt(s1, src, REG_FRESULT);
2900                         M_FLTMOVE(s1, REG_FRESULT);
2901
2902 #if defined(USE_THREADS)
2903                         if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
2904                                 x86_64_mov_membase_reg(REG_SP, maxmemuse * 8, argintregs[0]);
2905                                 x86_64_movq_reg_membase(REG_FRESULT, REG_SP, maxmemuse * 8);
2906                                 x86_64_mov_imm_reg((u8) builtin_monitorexit, REG_ITMP1);
2907                                 x86_64_call_reg(REG_ITMP1);
2908                                 x86_64_movq_membase_reg(REG_SP, maxmemuse * 8, REG_FRESULT);
2909                         }
2910 #endif
2911
2912                         goto nowperformreturn;
2913
2914                 case ICMD_RETURN:      /* ...  ==> ...                                */
2915
2916 #if defined(USE_THREADS)
2917                         if (checksync && (method->flags & ACC_SYNCHRONIZED)) {
2918                                 x86_64_mov_membase_reg(REG_SP, maxmemuse * 8, argintregs[0]);
2919                                 x86_64_mov_imm_reg((u8) builtin_monitorexit, REG_ITMP1);
2920                                 x86_64_call_reg(REG_ITMP1);
2921                         }
2922 #endif
2923
2924 nowperformreturn:
2925                         {
2926                         int r, p;
2927                         
2928                         p = parentargs_base;
2929                         
2930                         /* call trace function */
2931                         if (runverbose) {
2932                                 x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
2933
2934                                 x86_64_mov_reg_membase(REG_RESULT, REG_SP, 0 * 8);
2935                                 x86_64_movq_reg_membase(REG_FRESULT, REG_SP, 1 * 8);
2936
2937                                 x86_64_mov_imm_reg((s8) method, argintregs[0]);
2938                                 x86_64_mov_reg_reg(REG_RESULT, argintregs[1]);
2939                                 M_FLTMOVE(REG_FRESULT, argfltregs[0]);
2940                                 M_FLTMOVE(REG_FRESULT, argfltregs[1]);
2941
2942                                 x86_64_mov_imm_reg((s8) builtin_displaymethodstop, REG_ITMP1);
2943                                 x86_64_call_reg(REG_ITMP1);
2944
2945                                 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_RESULT);
2946                                 x86_64_movq_membase_reg(REG_SP, 1 * 8, REG_FRESULT);
2947
2948                                 x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
2949                         }
2950
2951                         /* restore saved registers                                        */
2952                         for (r = savintregcnt - 1; r >= maxsavintreguse; r--) {
2953                                 p--; x86_64_mov_membase_reg(REG_SP, p * 8, savintregs[r]);
2954                         }
2955                         for (r = savfltregcnt - 1; r >= maxsavfltreguse; r--) {
2956                                 p--; x86_64_movq_membase_reg(REG_SP, p * 8, savfltregs[r]);
2957                         }
2958
2959                         /* deallocate stack                                               */
2960                         if (parentargs_base) {
2961                                 x86_64_alu_imm_reg(X86_64_ADD, parentargs_base * 8, REG_SP);
2962                         }
2963
2964                         x86_64_ret();
2965                         ALIGNCODENOP;
2966                         }
2967                         break;
2968
2969
2970                 case ICMD_TABLESWITCH:  /* ..., index ==> ...                         */
2971                         {
2972                                 s4 i, l, *s4ptr;
2973                                 void **tptr;
2974
2975                                 tptr = (void **) iptr->target;
2976
2977                                 s4ptr = iptr->val.a;
2978                                 l = s4ptr[1];                          /* low     */
2979                                 i = s4ptr[2];                          /* high    */
2980
2981                                 var_to_reg_int(s1, src, REG_ITMP1);
2982                                 M_INTMOVE(s1, REG_ITMP1);
2983                                 if (l != 0) {
2984                                         x86_64_alul_imm_reg(X86_64_SUB, l, REG_ITMP1);
2985                                 }
2986                                 i = i - l + 1;
2987
2988                 /* range check */
2989                                 x86_64_alul_imm_reg(X86_64_CMP, i - 1, REG_ITMP1);
2990                                 x86_64_jcc(X86_64_CC_A, 0);
2991
2992                 /* codegen_addreference(BlockPtrOfPC(s4ptr[0]), mcodeptr); */
2993                                 codegen_addreference((basicblock *) tptr[0], mcodeptr);
2994
2995                                 /* build jump table top down and use address of lowest entry */
2996
2997                 /* s4ptr += 3 + i; */
2998                                 tptr += i;
2999
3000                                 while (--i >= 0) {
3001                                         /* dseg_addtarget(BlockPtrOfPC(*--s4ptr)); */
3002                                         dseg_addtarget((basicblock *) tptr[0]); 
3003                                         --tptr;
3004                                 }
3005
3006                                 /* length of dataseg after last dseg_addtarget is used by load */
3007
3008                                 x86_64_mov_imm_reg(0, REG_ITMP2);
3009                                 dseg_adddata(mcodeptr);
3010                                 x86_64_mov_memindex_reg(-dseglen, REG_ITMP2, REG_ITMP1, 3, REG_ITMP1);
3011                                 x86_64_jmp_reg(REG_ITMP1);
3012                                 ALIGNCODENOP;
3013                         }
3014                         break;
3015
3016
3017                 case ICMD_LOOKUPSWITCH: /* ..., key ==> ...                           */
3018                         {
3019                                 s4 i, l, val, *s4ptr;
3020                                 void **tptr;
3021
3022                                 tptr = (void **) iptr->target;
3023
3024                                 s4ptr = iptr->val.a;
3025                                 l = s4ptr[0];                          /* default  */
3026                                 i = s4ptr[1];                          /* count    */
3027                         
3028                                 MCODECHECK((i<<2)+8);
3029                                 var_to_reg_int(s1, src, REG_ITMP1);    /* reg compare should always be faster */
3030                                 while (--i >= 0) {
3031                                         s4ptr += 2;
3032                                         ++tptr;
3033
3034                                         val = s4ptr[0];
3035                                         x86_64_alul_imm_reg(X86_64_CMP, val, s1);
3036                                         x86_64_jcc(X86_64_CC_E, 0);
3037                                         /* codegen_addreference(BlockPtrOfPC(s4ptr[1]), mcodeptr); */
3038                                         codegen_addreference((basicblock *) tptr[0], mcodeptr); 
3039                                 }
3040
3041                                 x86_64_jmp_imm(0);
3042                                 /* codegen_addreference(BlockPtrOfPC(l), mcodeptr); */
3043                         
3044                                 tptr = (void **) iptr->target;
3045                                 codegen_addreference((basicblock *) tptr[0], mcodeptr);
3046
3047                                 ALIGNCODENOP;
3048                         }
3049                         break;
3050
3051
3052                 case ICMD_BUILTIN3:     /* ..., arg1, arg2, arg3 ==> ...              */
3053                                         /* op1 = return type, val.a = function pointer*/
3054                         s3 = 3;
3055                         goto gen_method;
3056
3057                 case ICMD_BUILTIN2:     /* ..., arg1, arg2 ==> ...                    */
3058                                         /* op1 = return type, val.a = function pointer*/
3059                         s3 = 2;
3060                         goto gen_method;
3061
3062                 case ICMD_BUILTIN1:     /* ..., arg1 ==> ...                          */
3063                                         /* op1 = return type, val.a = function pointer*/
3064                         s3 = 1;
3065                         goto gen_method;
3066
3067                 case ICMD_INVOKESTATIC: /* ..., [arg1, [arg2 ...]] ==> ...            */
3068                                         /* op1 = arg count, val.a = method pointer    */
3069
3070                 case ICMD_INVOKESPECIAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3071                                         /* op1 = arg count, val.a = method pointer    */
3072
3073                 case ICMD_INVOKEVIRTUAL:/* ..., objectref, [arg1, [arg2 ...]] ==> ... */
3074                                         /* op1 = arg count, val.a = method pointer    */
3075
3076                 case ICMD_INVOKEINTERFACE:/*.., objectref, [arg1, [arg2 ...]] ==> ... */
3077                                         /* op1 = arg count, val.a = method pointer    */
3078
3079                         s3 = iptr->op1;
3080
3081 gen_method: {
3082                         methodinfo   *m;
3083                         classinfo    *ci;
3084                         stackptr     tmpsrc;
3085                         int iarg = 0;
3086                         int farg = 0;
3087
3088                         MCODECHECK((s3 << 1) + 64);
3089
3090                         tmpsrc = src;
3091                         s2 = s3;
3092
3093                         /* copy arguments to registers or stack location                  */
3094                         for (; --s3 >= 0; src = src->prev) {
3095                                 IS_INT_LNG_TYPE(src->type) ? iarg++ : farg++;
3096                         }
3097
3098                         src = tmpsrc;
3099                         s3 = s2;
3100
3101                         s2 = (iarg > INT_ARG_CNT) ? iarg - INT_ARG_CNT : 0 + (farg > FLT_ARG_CNT) ? farg - FLT_ARG_CNT : 0;
3102
3103                         for (; --s3 >= 0; src = src->prev) {
3104                                 IS_INT_LNG_TYPE(src->type) ? iarg-- : farg--;
3105                                 if (src->varkind == ARGVAR) {
3106                                         if (IS_INT_LNG_TYPE(src->type)) {
3107                                                 if (iarg >= INT_ARG_CNT) {
3108                                                         s2--;
3109                                                 }
3110                                         } else {
3111                                                 if (farg >= FLT_ARG_CNT) {
3112                                                         s2--;
3113                                                 }
3114                                         }
3115                                         continue;
3116                                 }
3117
3118                                 if (IS_INT_LNG_TYPE(src->type)) {
3119                                         if (iarg < INT_ARG_CNT) {
3120                                                 s1 = argintregs[iarg];
3121                                                 var_to_reg_int(d, src, s1);
3122                                                 M_INTMOVE(d, s1);
3123
3124                                         } else {
3125                                                 var_to_reg_int(d, src, REG_ITMP1);
3126                                                 s2--;
3127                                                 x86_64_mov_reg_membase(d, REG_SP, s2 * 8);
3128                                         }
3129
3130                                 } else {
3131                                         if (farg < FLT_ARG_CNT) {
3132                                                 s1 = argfltregs[farg];
3133                                                 var_to_reg_flt(d, src, s1);
3134                                                 M_FLTMOVE(d, s1);
3135
3136                                         } else {
3137                                                 var_to_reg_flt(d, src, REG_FTMP1);
3138                                                 s2--;
3139                                                 x86_64_movq_reg_membase(d, REG_SP, s2 * 8);
3140                                         }
3141                                 }
3142                         } /* end of for */
3143
3144                         m = iptr->val.a;
3145                         switch (iptr->opc) {
3146                                 case ICMD_BUILTIN3:
3147                                 case ICMD_BUILTIN2:
3148                                 case ICMD_BUILTIN1:
3149
3150                                         a = (s8) m;
3151                                         d = iptr->op1;
3152
3153                                         x86_64_mov_imm_reg(a, REG_ITMP1);
3154                                         x86_64_call_reg(REG_ITMP1);
3155                                         break;
3156
3157                                 case ICMD_INVOKESTATIC:
3158
3159                                         a = (s8) m->stubroutine;
3160                                         d = m->returntype;
3161
3162                                         x86_64_mov_imm_reg(a, REG_ITMP2);
3163                                         x86_64_call_reg(REG_ITMP2);
3164                                         break;
3165
3166                                 case ICMD_INVOKESPECIAL:
3167
3168                                         a = (s8) m->stubroutine;
3169                                         d = m->returntype;
3170
3171                                         gen_nullptr_check(argintregs[0]);    /* first argument contains pointer */
3172                                         x86_64_mov_membase_reg(argintregs[0], 0, REG_ITMP2);    /* access memory for hardware nullptr */
3173                                         x86_64_mov_imm_reg(a, REG_ITMP2);
3174                                         x86_64_call_reg(REG_ITMP2);
3175                                         break;
3176
3177                                 case ICMD_INVOKEVIRTUAL:
3178
3179                                         d = m->returntype;
3180
3181                                         gen_nullptr_check(argintregs[0]);
3182                                         x86_64_mov_membase_reg(argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3183                                         x86_64_mov_membase32_reg(REG_ITMP2, OFFSET(vftbl, table[0]) + sizeof(methodptr) * m->vftblindex, REG_ITMP1);
3184                                         x86_64_call_reg(REG_ITMP1);
3185                                         break;
3186
3187                                 case ICMD_INVOKEINTERFACE:
3188
3189                                         ci = m->class;
3190                                         d = m->returntype;
3191
3192                                         gen_nullptr_check(argintregs[0]);
3193                                         x86_64_mov_membase_reg(argintregs[0], OFFSET(java_objectheader, vftbl), REG_ITMP2);
3194                                         x86_64_mov_membase_reg(REG_ITMP2, OFFSET(vftbl, interfacetable[0]) - sizeof(methodptr) * ci->index, REG_ITMP2);
3195                                         x86_64_mov_membase32_reg(REG_ITMP2, sizeof(methodptr) * (m - ci->methods), REG_ITMP1);
3196                                         x86_64_call_reg(REG_ITMP1);
3197                                         break;
3198
3199                                 default:
3200                                         d = 0;
3201                                         error("Unkown ICMD-Command: %d", iptr->opc);
3202                                 }
3203
3204                         /* d contains return type */
3205
3206                         if (d != TYPE_VOID) {
3207                                 if (IS_INT_LNG_TYPE(iptr->dst->type)) {
3208                                         s1 = reg_of_var(iptr->dst, REG_RESULT);
3209                                         M_INTMOVE(REG_RESULT, s1);
3210                                         store_reg_to_var_int(iptr->dst, s1);
3211
3212                                 } else {
3213                                         s1 = reg_of_var(iptr->dst, REG_FRESULT);
3214                                         M_FLTMOVE(REG_FRESULT, s1);
3215                                         store_reg_to_var_flt(iptr->dst, s1);
3216                                 }
3217                         }
3218                         }
3219                         break;
3220
3221
3222                 case ICMD_INSTANCEOF: /* ..., objectref ==> ..., intresult            */
3223
3224                                       /* op1:   0 == array, 1 == class                */
3225                                       /* val.a: (classinfo*) superclass               */
3226
3227 /*          superclass is an interface:
3228  *
3229  *          return (sub != NULL) &&
3230  *                 (sub->vftbl->interfacetablelength > super->index) &&
3231  *                 (sub->vftbl->interfacetable[-super->index] != NULL);
3232  *
3233  *          superclass is a class:
3234  *
3235  *          return ((sub != NULL) && (0
3236  *                  <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3237  *                  super->vftbl->diffvall));
3238  */
3239
3240                         {
3241                         classinfo *super = (classinfo*) iptr->val.a;
3242                         
3243                         var_to_reg_int(s1, src, REG_ITMP1);
3244                         d = reg_of_var(iptr->dst, REG_ITMP3);
3245                         if (s1 == d) {
3246                                 M_INTMOVE(s1, REG_ITMP1);
3247                                 s1 = REG_ITMP1;
3248                         }
3249                         x86_64_alu_reg_reg(X86_64_XOR, d, d);
3250                         if (iptr->op1) {                               /* class/interface */
3251                                 if (super->flags & ACC_INTERFACE) {        /* interface       */
3252                                         x86_64_test_reg_reg(s1, s1);
3253
3254                                         /* TODO: clean up this calculation */
3255                                         a = 3;    /* mov_membase_reg */
3256                                         CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3257
3258                                         a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3259                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetablelength));
3260                                         
3261                                         a += 3;    /* sub */
3262                                         CALCIMMEDIATEBYTES(a, super->index);
3263                                         
3264                                         a += 3;    /* test */
3265
3266                                         a += 6;    /* jcc */
3267                                         a += 3;    /* mov_membase_reg */
3268                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*));
3269
3270                                         a += 3;    /* test */
3271                                         a += 4;    /* setcc */
3272
3273                                         x86_64_jcc(X86_64_CC_E, a);
3274
3275                                         x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3276                                         x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetablelength), REG_ITMP2);
3277                                         x86_64_alu_imm_reg(X86_64_SUB, super->index, REG_ITMP2);
3278                                         x86_64_test_reg_reg(REG_ITMP2, REG_ITMP2);
3279
3280                                         /* TODO: clean up this calculation */
3281                                         a = 0;
3282                                         a += 3;    /* mov_membase_reg */
3283                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*));
3284
3285                                         a += 3;    /* test */
3286                                         a += 4;    /* setcc */
3287
3288                                         x86_64_jcc(X86_64_CC_LE, a);
3289                                         x86_64_mov_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP1);
3290                                         x86_64_test_reg_reg(REG_ITMP1, REG_ITMP1);
3291                                         x86_64_setcc_reg(X86_64_CC_NE, d);
3292
3293                                 } else {                                   /* class           */
3294                                         x86_64_test_reg_reg(s1, s1);
3295
3296                                         /* TODO: clean up this calculation */
3297                                         a = 3;    /* mov_membase_reg */
3298                                         CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3299
3300                                         a += 10;   /* mov_imm_reg */
3301
3302                                         a += 2;    /* movl_membase_reg - only if REG_ITMP1 == RAX */
3303                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, baseval));
3304                                         
3305                                         a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3306                                         CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, baseval));
3307                                         
3308                                         a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3309                                         CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, diffval));
3310                                         
3311                                         a += 3;    /* sub */
3312                                         a += 3;    /* xor */
3313                                         a += 3;    /* cmp */
3314                                         a += 4;    /* setcc */
3315
3316                                         x86_64_jcc(X86_64_CC_E, a);
3317
3318                                         x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3319                                         x86_64_mov_imm_reg((s8) super->vftbl, REG_ITMP2);
3320                                         x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, baseval), REG_ITMP1);
3321                                         x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, baseval), REG_ITMP3);
3322                                         x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, diffval), REG_ITMP2);
3323                                         x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP3, REG_ITMP1);
3324                                         x86_64_alu_reg_reg(X86_64_XOR, d, d);
3325                                         x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
3326                                         x86_64_setcc_reg(X86_64_CC_BE, d);
3327                                 }
3328                         }
3329                         else
3330                                 panic("internal error: no inlined array instanceof");
3331                         }
3332                         store_reg_to_var_int(iptr->dst, d);
3333                         break;
3334
3335                 case ICMD_CHECKCAST:  /* ..., objectref ==> ..., objectref            */
3336
3337                                       /* op1:   0 == array, 1 == class                */
3338                                       /* val.a: (classinfo*) superclass               */
3339
3340 /*          superclass is an interface:
3341  *
3342  *          OK if ((sub == NULL) ||
3343  *                 (sub->vftbl->interfacetablelength > super->index) &&
3344  *                 (sub->vftbl->interfacetable[-super->index] != NULL));
3345  *
3346  *          superclass is a class:
3347  *
3348  *          OK if ((sub == NULL) || (0
3349  *                 <= (sub->vftbl->baseval - super->vftbl->baseval) <=
3350  *                 super->vftbl->diffvall));
3351  */
3352
3353                         {
3354                         classinfo *super = (classinfo*) iptr->val.a;
3355                         
3356                         d = reg_of_var(iptr->dst, REG_ITMP3);
3357                         var_to_reg_int(s1, src, d);
3358                         if (iptr->op1) {                               /* class/interface */
3359                                 if (super->flags & ACC_INTERFACE) {        /* interface       */
3360                                         x86_64_test_reg_reg(s1, s1);
3361
3362                                         /* TODO: clean up this calculation */
3363                                         a = 3;    /* mov_membase_reg */
3364                                         CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3365
3366                                         a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3367                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetablelength));
3368
3369                                         a += 3;    /* sub */
3370                                         CALCIMMEDIATEBYTES(a, super->index);
3371
3372                                         a += 3;    /* test */
3373                                         a += 6;    /* jcc */
3374
3375                                         a += 3;    /* mov_membase_reg */
3376                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*));
3377
3378                                         a += 3;    /* test */
3379                                         a += 6;    /* jcc */
3380
3381                                         x86_64_jcc(X86_64_CC_E, a);
3382
3383                                         x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3384                                         x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetablelength), REG_ITMP2);
3385                                         x86_64_alu_imm_reg(X86_64_SUB, super->index, REG_ITMP2);
3386                                         x86_64_test_reg_reg(REG_ITMP2, REG_ITMP2);
3387                                         x86_64_jcc(X86_64_CC_LE, 0);
3388                                         codegen_addxcastrefs(mcodeptr);
3389                                         x86_64_mov_membase_reg(REG_ITMP1, OFFSET(vftbl, interfacetable[0]) - super->index * sizeof(methodptr*), REG_ITMP2);
3390                                         x86_64_test_reg_reg(REG_ITMP2, REG_ITMP2);
3391                                         x86_64_jcc(X86_64_CC_E, 0);
3392                                         codegen_addxcastrefs(mcodeptr);
3393
3394                                 } else {                                     /* class           */
3395                                         x86_64_test_reg_reg(s1, s1);
3396
3397                                         /* TODO: clean up this calculation */
3398                                         a = 3;    /* mov_membase_reg */
3399                                         CALCOFFSETBYTES(a, s1, OFFSET(java_objectheader, vftbl));
3400                                         a += 10;   /* mov_imm_reg */
3401                                         a += 2;    /* movl_membase_reg - only if REG_ITMP1 == RAX */
3402                                         CALCOFFSETBYTES(a, REG_ITMP1, OFFSET(vftbl, baseval));
3403
3404                                         if (d != REG_ITMP3) {
3405                                                 a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3406                                                 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, baseval));
3407                                                 a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3408                                                 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, diffval));
3409                                                 a += 3;    /* sub */
3410                                                 
3411                                         } else {
3412                                                 a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3413                                                 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, baseval));
3414                                                 a += 3;    /* sub */
3415                                                 a += 10;   /* mov_imm_reg */
3416                                                 a += 3;    /* movl_membase_reg - only if REG_ITMP2 == R10 */
3417                                                 CALCOFFSETBYTES(a, REG_ITMP2, OFFSET(vftbl, diffval));
3418                                         }
3419
3420                                         a += 3;    /* cmp */
3421                                         a += 6;    /* jcc */
3422
3423                                         x86_64_jcc(X86_64_CC_E, a);
3424
3425                                         x86_64_mov_membase_reg(s1, OFFSET(java_objectheader, vftbl), REG_ITMP1);
3426                                         x86_64_mov_imm_reg((s8) super->vftbl, REG_ITMP2);
3427                                         x86_64_movl_membase_reg(REG_ITMP1, OFFSET(vftbl, baseval), REG_ITMP1);
3428                                         if (d != REG_ITMP3) {
3429                                                 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, baseval), REG_ITMP3);
3430                                                 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, diffval), REG_ITMP2);
3431                                                 x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP3, REG_ITMP1);
3432
3433                                         } else {
3434                                                 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, baseval), REG_ITMP2);
3435                                                 x86_64_alu_reg_reg(X86_64_SUB, REG_ITMP2, REG_ITMP1);
3436                                                 x86_64_mov_imm_reg((s8) super->vftbl, REG_ITMP2);
3437                                                 x86_64_movl_membase_reg(REG_ITMP2, OFFSET(vftbl, diffval), REG_ITMP2);
3438                                         }
3439                                         x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP2, REG_ITMP1);
3440                                         x86_64_jcc(X86_64_CC_A, 0);    /* (u) REG_ITMP1 > (u) REG_ITMP2 -> jump */
3441                                         codegen_addxcastrefs(mcodeptr);
3442                                 }
3443
3444                         } else
3445                                 panic("internal error: no inlined array checkcast");
3446                         }
3447                         M_INTMOVE(s1, d);
3448                         store_reg_to_var_int(iptr->dst, d);
3449                         break;
3450
3451                 case ICMD_CHECKASIZE:  /* ..., size ==> ..., size                     */
3452
3453                         if (src->flags & INMEMORY) {
3454                                 x86_64_alul_imm_membase(X86_64_CMP, 0, REG_SP, src->regoff * 8);
3455                                 
3456                         } else {
3457                                 x86_64_testl_reg_reg(src->regoff, src->regoff);
3458                         }
3459                         x86_64_jcc(X86_64_CC_L, 0);
3460                         codegen_addxcheckarefs(mcodeptr);
3461                         break;
3462
3463                 case ICMD_CHECKEXCEPTION:    /* ... ==> ...                           */
3464
3465                         x86_64_test_reg_reg(REG_RESULT, REG_RESULT);
3466                         x86_64_jcc(X86_64_CC_E, 0);
3467                         codegen_addxexceptionrefs(mcodeptr);
3468                         break;
3469
3470                 case ICMD_MULTIANEWARRAY:/* ..., cnt1, [cnt2, ...] ==> ..., arrayref  */
3471                                          /* op1 = dimension, val.a = array descriptor */
3472
3473                         /* check for negative sizes and copy sizes to stack if necessary  */
3474
3475                         MCODECHECK((iptr->op1 << 1) + 64);
3476
3477                         for (s1 = iptr->op1; --s1 >= 0; src = src->prev) {
3478                                 var_to_reg_int(s2, src, REG_ITMP1);
3479                                 x86_64_testl_reg_reg(s2, s2);
3480                                 x86_64_jcc(X86_64_CC_L, 0);
3481                                 codegen_addxcheckarefs(mcodeptr);
3482
3483                                 /* copy sizes to stack (argument numbers >= INT_ARG_CNT)      */
3484
3485                                 if (src->varkind != ARGVAR) {
3486                                         x86_64_mov_reg_membase(s2, REG_SP, (s1 + INT_ARG_CNT) * 8);
3487                                 }
3488                         }
3489
3490                         /* a0 = dimension count */
3491                         x86_64_mov_imm_reg(iptr->op1, argintregs[0]);
3492
3493                         /* a1 = arraydescriptor */
3494                         x86_64_mov_imm_reg((s8) iptr->val.a, argintregs[1]);
3495
3496                         /* a2 = pointer to dimensions = stack pointer */
3497                         x86_64_mov_reg_reg(REG_SP, argintregs[2]);
3498
3499                         x86_64_mov_imm_reg((s8) (builtin_nmultianewarray), REG_ITMP1);
3500                         x86_64_call_reg(REG_ITMP1);
3501
3502                         s1 = reg_of_var(iptr->dst, REG_RESULT);
3503                         M_INTMOVE(REG_RESULT, s1);
3504                         store_reg_to_var_int(iptr->dst, s1);
3505                         break;
3506
3507                 default: error("Unknown pseudo command: %d", iptr->opc);
3508         } /* switch */
3509                 
3510         } /* for instruction */
3511                 
3512         /* copy values to interface registers */
3513
3514         src = bptr->outstack;
3515         len = bptr->outdepth;
3516         MCODECHECK(64+len);
3517         while (src) {
3518                 len--;
3519                 if ((src->varkind != STACKVAR)) {
3520                         s2 = src->type;
3521                         if (IS_FLT_DBL_TYPE(s2)) {
3522                                 var_to_reg_flt(s1, src, REG_FTMP1);
3523                                 if (!(interfaces[len][s2].flags & INMEMORY)) {
3524                                         M_FLTMOVE(s1, interfaces[len][s2].regoff);
3525
3526                                 } else {
3527                                         x86_64_movq_reg_membase(s1, REG_SP, 8 * interfaces[len][s2].regoff);
3528                                 }
3529
3530                         } else {
3531                                 var_to_reg_int(s1, src, REG_ITMP1);
3532                                 if (!(interfaces[len][s2].flags & INMEMORY)) {
3533                                         M_INTMOVE(s1, interfaces[len][s2].regoff);
3534
3535                                 } else {
3536                                         x86_64_mov_reg_membase(s1, REG_SP, interfaces[len][s2].regoff * 8);
3537                                 }
3538                         }
3539                 }
3540                 src = src->prev;
3541         }
3542         } /* if (bptr -> flags >= BBREACHED) */
3543         } /* for basic block */
3544
3545         /* bptr -> mpc = (int)((u1*) mcodeptr - mcodebase); */
3546
3547         {
3548
3549         /* generate bound check stubs */
3550
3551         u1 *xcodeptr = NULL;
3552         
3553         for (; xboundrefs != NULL; xboundrefs = xboundrefs->next) {
3554                 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3555                         gen_resolvebranch(mcodebase + xboundrefs->branchpos, 
3556                                                           xboundrefs->branchpos,
3557                                                           xcodeptr - mcodebase - (3 + 10 + 10 + 3));
3558                         continue;
3559                 }
3560
3561                 gen_resolvebranch(mcodebase + xboundrefs->branchpos, 
3562                                   xboundrefs->branchpos,
3563                                                   mcodeptr - mcodebase);
3564
3565                 MCODECHECK(8);
3566
3567                 /* move index register into REG_ITMP1 */
3568                 x86_64_mov_reg_reg(xboundrefs->reg, REG_ITMP1);              /* 3 bytes  */
3569
3570                 x86_64_mov_imm_reg(0, REG_ITMP2_XPC);                        /* 10 bytes */
3571                 dseg_adddata(mcodeptr);
3572                 x86_64_mov_imm_reg(xboundrefs->branchpos - 6, REG_ITMP3);    /* 10 bytes */
3573                 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC);    /* 3 bytes  */
3574
3575                 if (xcodeptr != NULL) {
3576                         x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3577
3578                 } else {
3579                         xcodeptr = mcodeptr;
3580
3581                         x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
3582                         x86_64_mov_reg_membase(REG_ITMP2_XPC, REG_SP, 0 * 8);
3583                         x86_64_mov_imm_reg((s8) string_java_lang_ArrayIndexOutOfBoundsException, argintregs[0]);
3584                         x86_64_mov_reg_reg(REG_ITMP1, argintregs[1]);
3585                         x86_64_mov_imm_reg((s8) new_exception_int, REG_ITMP3);
3586                         x86_64_call_reg(REG_ITMP3);
3587                         x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_ITMP2_XPC);
3588                         x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
3589
3590                         x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3591                         x86_64_jmp_reg(REG_ITMP3);
3592                 }
3593         }
3594
3595         /* generate negative array size check stubs */
3596
3597         xcodeptr = NULL;
3598         
3599         for (; xcheckarefs != NULL; xcheckarefs = xcheckarefs->next) {
3600                 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3601                         gen_resolvebranch(mcodebase + xcheckarefs->branchpos, 
3602                                                           xcheckarefs->branchpos,
3603                                                           xcodeptr - mcodebase - (10 + 10 + 3));
3604                         continue;
3605                 }
3606
3607                 gen_resolvebranch(mcodebase + xcheckarefs->branchpos, 
3608                                   xcheckarefs->branchpos,
3609                                                   mcodeptr - mcodebase);
3610
3611                 MCODECHECK(8);
3612
3613                 x86_64_mov_imm_reg(0, REG_ITMP2_XPC);                         /* 10 bytes */
3614                 dseg_adddata(mcodeptr);
3615                 x86_64_mov_imm_reg(xcheckarefs->branchpos - 6, REG_ITMP3);    /* 10 bytes */
3616                 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC);     /* 3 bytes  */
3617
3618                 if (xcodeptr != NULL) {
3619                         x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3620
3621                 } else {
3622                         xcodeptr = mcodeptr;
3623
3624                         x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
3625                         x86_64_mov_reg_membase(REG_ITMP2_XPC, REG_SP, 0 * 8);
3626                         x86_64_mov_imm_reg((s8) string_java_lang_NegativeArraySizeException, argintregs[0]);
3627                         x86_64_mov_imm_reg((s8) new_exception, REG_ITMP3);
3628                         x86_64_call_reg(REG_ITMP3);
3629                         x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_ITMP2_XPC);
3630                         x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
3631
3632                         x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3633                         x86_64_jmp_reg(REG_ITMP3);
3634                 }
3635         }
3636
3637         /* generate cast check stubs */
3638
3639         xcodeptr = NULL;
3640         
3641         for (; xcastrefs != NULL; xcastrefs = xcastrefs->next) {
3642                 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3643                         gen_resolvebranch(mcodebase + xcastrefs->branchpos, 
3644                                                           xcastrefs->branchpos,
3645                                                           xcodeptr - mcodebase - (10 + 10 + 3));
3646                         continue;
3647                 }
3648
3649                 gen_resolvebranch(mcodebase + xcastrefs->branchpos, 
3650                                   xcastrefs->branchpos,
3651                                                   mcodeptr - mcodebase);
3652
3653                 MCODECHECK(8);
3654
3655                 x86_64_mov_imm_reg(0, REG_ITMP2_XPC);                        /* 10 bytes */
3656                 dseg_adddata(mcodeptr);
3657                 x86_64_mov_imm_reg(xcastrefs->branchpos - 6, REG_ITMP3);     /* 10 bytes */
3658                 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC);    /* 3 bytes  */
3659
3660                 if (xcodeptr != NULL) {
3661                         x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3662                 
3663                 } else {
3664                         xcodeptr = mcodeptr;
3665
3666                         x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
3667                         x86_64_mov_reg_membase(REG_ITMP2_XPC, REG_SP, 0 * 8);
3668                         x86_64_mov_imm_reg((s8) string_java_lang_ClassCastException, argintregs[0]);
3669                         x86_64_mov_imm_reg((s8) new_exception, REG_ITMP3);
3670                         x86_64_call_reg(REG_ITMP3);
3671                         x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_ITMP2_XPC);
3672                         x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
3673
3674                         x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3675                         x86_64_jmp_reg(REG_ITMP3);
3676                 }
3677         }
3678
3679         /* generate divide by zero check stubs */
3680
3681         xcodeptr = NULL;
3682         
3683         for (; xdivrefs != NULL; xdivrefs = xdivrefs->next) {
3684                 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3685                         gen_resolvebranch(mcodebase + xdivrefs->branchpos, 
3686                                                           xdivrefs->branchpos,
3687                                                           xcodeptr - mcodebase - (10 + 10 + 3));
3688                         continue;
3689                 }
3690
3691                 gen_resolvebranch(mcodebase + xdivrefs->branchpos, 
3692                                   xdivrefs->branchpos,
3693                                                   mcodeptr - mcodebase);
3694
3695                 MCODECHECK(8);
3696
3697                 x86_64_mov_imm_reg(0, REG_ITMP2_XPC);                        /* 10 bytes */
3698                 dseg_adddata(mcodeptr);
3699                 x86_64_mov_imm_reg(xdivrefs->branchpos - 6, REG_ITMP3);      /* 10 bytes */
3700                 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP3, REG_ITMP2_XPC);    /* 3 bytes  */
3701
3702                 if (xcodeptr != NULL) {
3703                         x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3704                 
3705                 } else {
3706                         xcodeptr = mcodeptr;
3707
3708                         x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
3709                         x86_64_mov_reg_membase(REG_ITMP2_XPC, REG_SP, 0 * 8);
3710                         x86_64_mov_imm_reg((u8) string_java_lang_ArithmeticException, argintregs[0]);
3711                         x86_64_mov_imm_reg((u8) string_java_lang_ArithmeticException_message, argintregs[1]);
3712                         x86_64_mov_imm_reg((u8) new_exception, REG_ITMP3);
3713                         x86_64_call_reg(REG_ITMP3);
3714                         x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_ITMP2_XPC);
3715                         x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
3716
3717                         x86_64_mov_imm_reg((u8) asm_handle_exception, REG_ITMP3);
3718                         x86_64_jmp_reg(REG_ITMP3);
3719                 }
3720         }
3721
3722         /* generate exception check stubs */
3723
3724         xcodeptr = NULL;
3725         
3726         for (; xexceptionrefs != NULL; xexceptionrefs = xexceptionrefs->next) {
3727                 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3728                         gen_resolvebranch(mcodebase + xexceptionrefs->branchpos, 
3729                                                           xexceptionrefs->branchpos,
3730                                                           xcodeptr - mcodebase - (10 + 10 + 3));
3731                         continue;
3732                 }
3733
3734                 gen_resolvebranch(mcodebase + xexceptionrefs->branchpos, 
3735                                   xexceptionrefs->branchpos,
3736                                                   mcodeptr - mcodebase);
3737
3738                 MCODECHECK(8);
3739
3740                 x86_64_mov_imm_reg(0, REG_ITMP2_XPC);                        /* 10 bytes */
3741                 dseg_adddata(mcodeptr);
3742                 x86_64_mov_imm_reg(xexceptionrefs->branchpos - 6, REG_ITMP1);     /* 10 bytes */
3743                 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC);    /* 3 bytes  */
3744
3745                 if (xcodeptr != NULL) {
3746                         x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3747                 
3748                 } else {
3749                         xcodeptr = mcodeptr;
3750
3751 #if defined(USE_THREADS) && defined(NATIVE_THREADS)
3752                         x86_64_push_reg(REG_ITMP2_XPC);
3753                         x86_64_mov_imm_reg((u8) &builtin_get_exceptionptrptr, REG_ITMP1);
3754                         x86_64_call_reg(REG_ITMP1);
3755                         x86_64_mov_membase_reg(REG_RESULT, 0, REG_ITMP3);
3756                         x86_64_mov_imm_membase(0, REG_RESULT, 0);
3757                         x86_64_mov_reg_reg(REG_ITMP3, REG_ITMP1_XPTR);
3758                         x86_64_pop_reg(REG_ITMP2_XPC);
3759 #else
3760                         x86_64_mov_imm_reg((u8) &_exceptionptr, REG_ITMP3);
3761                         x86_64_mov_membase_reg(REG_ITMP3, 0, REG_ITMP1_XPTR);
3762                         x86_64_mov_imm_membase(0, REG_ITMP3, 0);
3763 #endif
3764
3765                         x86_64_mov_imm_reg((u8) asm_handle_exception, REG_ITMP3);
3766                         x86_64_jmp_reg(REG_ITMP3);
3767                 }
3768         }
3769
3770         /* generate null pointer check stubs */
3771
3772         xcodeptr = NULL;
3773         
3774         for (; xnullrefs != NULL; xnullrefs = xnullrefs->next) {
3775                 if ((exceptiontablelength == 0) && (xcodeptr != NULL)) {
3776                         gen_resolvebranch(mcodebase + xnullrefs->branchpos, 
3777                                                           xnullrefs->branchpos,
3778                                                           xcodeptr - mcodebase - (10 + 10 + 3));
3779                         continue;
3780                 }
3781
3782                 gen_resolvebranch(mcodebase + xnullrefs->branchpos, 
3783                                   xnullrefs->branchpos,
3784                                                   mcodeptr - mcodebase);
3785
3786                 MCODECHECK(8);
3787
3788                 x86_64_mov_imm_reg(0, REG_ITMP2_XPC);                        /* 10 bytes */
3789                 dseg_adddata(mcodeptr);
3790                 x86_64_mov_imm_reg(xnullrefs->branchpos - 6, REG_ITMP1);     /* 10 bytes */
3791                 x86_64_alu_reg_reg(X86_64_ADD, REG_ITMP1, REG_ITMP2_XPC);    /* 3 bytes  */
3792
3793                 if (xcodeptr != NULL) {
3794                         x86_64_jmp_imm(xcodeptr - mcodeptr - 5);
3795                 
3796                 } else {
3797                         xcodeptr = mcodeptr;
3798
3799                         x86_64_alu_imm_reg(X86_64_SUB, 2 * 8, REG_SP);
3800                         x86_64_mov_reg_membase(REG_ITMP2_XPC, REG_SP, 0 * 8);
3801                         x86_64_mov_imm_reg((s8) string_java_lang_NullPointerException, argintregs[0]);
3802                         x86_64_mov_imm_reg((s8) new_exception, REG_ITMP3);
3803                         x86_64_call_reg(REG_ITMP3);
3804                         x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_ITMP2_XPC);
3805                         x86_64_alu_imm_reg(X86_64_ADD, 2 * 8, REG_SP);
3806
3807                         x86_64_mov_imm_reg((s8) asm_handle_exception, REG_ITMP3);
3808                         x86_64_jmp_reg(REG_ITMP3);
3809                 }
3810         }
3811
3812         }
3813
3814         codegen_finish((int)((u1*) mcodeptr - mcodebase));
3815 }
3816
3817
3818 /* function createcompilerstub *************************************************
3819
3820         creates a stub routine which calls the compiler
3821         
3822 *******************************************************************************/
3823
3824 #define COMPSTUBSIZE 23
3825
3826 u1 *createcompilerstub(methodinfo *m)
3827 {
3828         u1 *s = CNEW(u1, COMPSTUBSIZE);     /* memory to hold the stub            */
3829         mcodeptr = s;                       /* code generation pointer            */
3830
3831                                             /* code for the stub                  */
3832         x86_64_mov_imm_reg((s8) m, REG_ITMP1); /* pass method pointer to compiler */
3833         x86_64_mov_imm_reg((s8) asm_call_jit_compiler, REG_ITMP3);/* load address */
3834         x86_64_jmp_reg(REG_ITMP3);          /* jump to compiler                   */
3835
3836 #ifdef STATISTICS
3837         count_cstub_len += COMPSTUBSIZE;
3838 #endif
3839
3840         return (u1*) s;
3841 }
3842
3843
3844 /* function removecompilerstub *************************************************
3845
3846      deletes a compilerstub from memory  (simply by freeing it)
3847
3848 *******************************************************************************/
3849
3850 void removecompilerstub(u1 *stub) 
3851 {
3852         CFREE(stub, COMPSTUBSIZE);
3853 }
3854
3855 /* function: createnativestub **************************************************
3856
3857         creates a stub routine which calls a native method
3858
3859 *******************************************************************************/
3860
3861 #define NATIVESTUBSIZE 420
3862
3863 u1 *createnativestub(functionptr f, methodinfo *m)
3864 {
3865         u1 *s = CNEW(u1, NATIVESTUBSIZE);   /* memory to hold the stub            */
3866         int stackframesize;                 /* size of stackframe if needed       */
3867         mcodeptr = s;                       /* make macros work                   */
3868
3869         reg_init();
3870     descriptor2types(m);                /* set paramcount and paramtypes      */
3871
3872         /* if function is static, check for initialized */
3873
3874         if (m->flags & ACC_STATIC) {
3875                 /* if class isn't yet initialized, do it */
3876                 if (!m->class->initialized) {
3877                         /* call helper function which patches this code */
3878                         x86_64_mov_imm_reg((u8) m->class, REG_ITMP1);
3879                         x86_64_mov_imm_reg((u8) asm_check_clinit, REG_ITMP2);
3880                         x86_64_call_reg(REG_ITMP2);
3881                 }
3882         }
3883
3884         if (runverbose) {
3885                 int p, l, s1;
3886
3887                 x86_64_alu_imm_reg(X86_64_SUB, (6 + 8 + 1) * 8, REG_SP);
3888
3889                 x86_64_mov_reg_membase(argintregs[0], REG_SP, 1 * 8);
3890                 x86_64_mov_reg_membase(argintregs[1], REG_SP, 2 * 8);
3891                 x86_64_mov_reg_membase(argintregs[2], REG_SP, 3 * 8);
3892                 x86_64_mov_reg_membase(argintregs[3], REG_SP, 4 * 8);
3893                 x86_64_mov_reg_membase(argintregs[4], REG_SP, 5 * 8);
3894                 x86_64_mov_reg_membase(argintregs[5], REG_SP, 6 * 8);
3895
3896                 x86_64_movq_reg_membase(argfltregs[0], REG_SP, 7 * 8);
3897                 x86_64_movq_reg_membase(argfltregs[1], REG_SP, 8 * 8);
3898                 x86_64_movq_reg_membase(argfltregs[2], REG_SP, 9 * 8);
3899                 x86_64_movq_reg_membase(argfltregs[3], REG_SP, 10 * 8);
3900 /*              x86_64_movq_reg_membase(argfltregs[4], REG_SP, 11 * 8); */
3901 /*              x86_64_movq_reg_membase(argfltregs[5], REG_SP, 12 * 8); */
3902 /*              x86_64_movq_reg_membase(argfltregs[6], REG_SP, 13 * 8); */
3903 /*              x86_64_movq_reg_membase(argfltregs[7], REG_SP, 14 * 8); */
3904
3905                 /* show integer hex code for float arguments */
3906                 for (p = 0, l = 0; p < m->paramcount; p++) {
3907                         if (IS_FLT_DBL_TYPE(m->paramtypes[p])) {
3908                                 for (s1 = (m->paramcount > INT_ARG_CNT) ? INT_ARG_CNT - 2 : m->paramcount - 2; s1 >= p; s1--) {
3909                                         x86_64_mov_reg_reg(argintregs[s1], argintregs[s1 + 1]);
3910                                 }
3911
3912                                 x86_64_movd_freg_reg(argfltregs[l], argintregs[p]);
3913                                 l++;
3914                         }
3915                 }
3916
3917                 x86_64_mov_imm_reg((s8) m, REG_ITMP1);
3918                 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, 0 * 8);
3919                 x86_64_mov_imm_reg((s8) builtin_trace_args, REG_ITMP1);
3920                 x86_64_call_reg(REG_ITMP1);
3921
3922                 x86_64_mov_membase_reg(REG_SP, 1 * 8, argintregs[0]);
3923                 x86_64_mov_membase_reg(REG_SP, 2 * 8, argintregs[1]);
3924                 x86_64_mov_membase_reg(REG_SP, 3 * 8, argintregs[2]);
3925                 x86_64_mov_membase_reg(REG_SP, 4 * 8, argintregs[3]);
3926                 x86_64_mov_membase_reg(REG_SP, 5 * 8, argintregs[4]);
3927                 x86_64_mov_membase_reg(REG_SP, 6 * 8, argintregs[5]);
3928
3929                 x86_64_movq_membase_reg(REG_SP, 7 * 8, argfltregs[0]);
3930                 x86_64_movq_membase_reg(REG_SP, 8 * 8, argfltregs[1]);
3931                 x86_64_movq_membase_reg(REG_SP, 9 * 8, argfltregs[2]);
3932                 x86_64_movq_membase_reg(REG_SP, 10 * 8, argfltregs[3]);
3933 /*              x86_64_movq_membase_reg(REG_SP, 11 * 8, argfltregs[4]); */
3934 /*              x86_64_movq_membase_reg(REG_SP, 12 * 8, argfltregs[5]); */
3935 /*              x86_64_movq_membase_reg(REG_SP, 13 * 8, argfltregs[6]); */
3936 /*              x86_64_movq_membase_reg(REG_SP, 14 * 8, argfltregs[7]); */
3937
3938                 x86_64_alu_imm_reg(X86_64_ADD, (6 + 8 + 1) * 8, REG_SP);
3939         }
3940
3941 #if 0
3942         x86_64_alu_imm_reg(X86_64_SUB, 7 * 8, REG_SP);    /* keep stack 16-byte aligned */
3943
3944         /* save callee saved float registers */
3945         x86_64_movq_reg_membase(XMM15, REG_SP, 0 * 8);
3946         x86_64_movq_reg_membase(XMM14, REG_SP, 1 * 8);
3947         x86_64_movq_reg_membase(XMM13, REG_SP, 2 * 8);
3948         x86_64_movq_reg_membase(XMM12, REG_SP, 3 * 8);
3949         x86_64_movq_reg_membase(XMM11, REG_SP, 4 * 8);
3950         x86_64_movq_reg_membase(XMM10, REG_SP, 5 * 8);
3951 #endif
3952
3953         /* save argument registers on stack -- if we have to */
3954         if ((m->flags & ACC_STATIC && m->paramcount > (INT_ARG_CNT - 2)) || m->paramcount > (INT_ARG_CNT - 1)) {
3955                 int i;
3956                 int paramshiftcnt = (m->flags & ACC_STATIC) ? 2 : 1;
3957                 int stackparamcnt = (m->paramcount > INT_ARG_CNT) ? m->paramcount - INT_ARG_CNT : 0;
3958
3959                 stackframesize = stackparamcnt + paramshiftcnt;
3960
3961                 /* keep stack 16-byte aligned */
3962                 if ((stackframesize % 2) == 0) stackframesize++;
3963
3964                 x86_64_alu_imm_reg(X86_64_SUB, stackframesize * 8, REG_SP);
3965
3966                 /* copy stack arguments into new stack frame -- if any */
3967                 for (i = 0; i < stackparamcnt; i++) {
3968                         x86_64_mov_membase_reg(REG_SP, (stackparamcnt + 1 + i) * 8, REG_ITMP1);
3969                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, (paramshiftcnt + i) * 8);
3970                 }
3971
3972                 if (m->flags & ACC_STATIC) {
3973                         x86_64_mov_reg_membase(argintregs[5], REG_SP, 1 * 8);
3974                         x86_64_mov_reg_membase(argintregs[4], REG_SP, 0 * 8);
3975
3976                 } else {
3977                         x86_64_mov_reg_membase(argintregs[5], REG_SP, 0 * 8);
3978                 }
3979
3980         } else {
3981                 /* keep stack 16-byte aligned -- this is essential for x86_64 */
3982                 x86_64_alu_imm_reg(X86_64_SUB, 8, REG_SP);
3983                 stackframesize = 1;
3984         }
3985
3986         if (m->flags & ACC_STATIC) {
3987                 x86_64_mov_reg_reg(argintregs[3], argintregs[5]);
3988                 x86_64_mov_reg_reg(argintregs[2], argintregs[4]);
3989                 x86_64_mov_reg_reg(argintregs[1], argintregs[3]);
3990                 x86_64_mov_reg_reg(argintregs[0], argintregs[2]);
3991
3992                 /* put class into second argument register */
3993                 x86_64_mov_imm_reg((s8) m->class, argintregs[1]);
3994
3995         } else {
3996                 x86_64_mov_reg_reg(argintregs[4], argintregs[5]);
3997                 x86_64_mov_reg_reg(argintregs[3], argintregs[4]);
3998                 x86_64_mov_reg_reg(argintregs[2], argintregs[3]);
3999                 x86_64_mov_reg_reg(argintregs[1], argintregs[2]);
4000                 x86_64_mov_reg_reg(argintregs[0], argintregs[1]);
4001         }
4002
4003         /* put env into first argument register */
4004         x86_64_mov_imm_reg((s8) &env, argintregs[0]);
4005
4006         x86_64_mov_imm_reg((s8) f, REG_ITMP1);
4007         x86_64_call_reg(REG_ITMP1);
4008
4009         /* remove stackframe if there is one */
4010         if (stackframesize) {
4011                 x86_64_alu_imm_reg(X86_64_ADD, stackframesize * 8, REG_SP);
4012         }
4013
4014         if (runverbose) {
4015                 x86_64_alu_imm_reg(X86_64_SUB, 3 * 8, REG_SP);    /* keep stack 16-byte aligned */
4016
4017                 x86_64_mov_reg_membase(REG_RESULT, REG_SP, 0 * 8);
4018                 x86_64_movq_reg_membase(REG_FRESULT, REG_SP, 1 * 8);
4019
4020                 x86_64_mov_imm_reg((s8) m, argintregs[0]);
4021                 x86_64_mov_reg_reg(REG_RESULT, argintregs[1]);
4022                 M_FLTMOVE(REG_FRESULT, argfltregs[0]);
4023                 M_FLTMOVE(REG_FRESULT, argfltregs[1]);
4024
4025                 x86_64_mov_imm_reg((s8) builtin_displaymethodstop, REG_ITMP1);
4026                 x86_64_call_reg(REG_ITMP1);
4027
4028                 x86_64_mov_membase_reg(REG_SP, 0 * 8, REG_RESULT);
4029                 x86_64_movq_membase_reg(REG_SP, 1 * 8, REG_FRESULT);
4030
4031                 x86_64_alu_imm_reg(X86_64_ADD, 3 * 8, REG_SP);    /* keep stack 16-byte aligned */
4032         }
4033
4034 #if 0
4035         /* restore callee saved registers */
4036         x86_64_movq_membase_reg(REG_SP, 0 * 8, XMM15);
4037         x86_64_movq_membase_reg(REG_SP, 1 * 8, XMM14);
4038         x86_64_movq_membase_reg(REG_SP, 2 * 8, XMM13);
4039         x86_64_movq_membase_reg(REG_SP, 3 * 8, XMM12);
4040         x86_64_movq_membase_reg(REG_SP, 4 * 8, XMM11);
4041         x86_64_movq_membase_reg(REG_SP, 5 * 8, XMM10);
4042
4043         x86_64_alu_imm_reg(X86_64_ADD, 7 * 8, REG_SP);    /* keep stack 16-byte aligned */
4044 #endif
4045
4046         x86_64_mov_imm_reg((s8) &_exceptionptr, REG_ITMP3);
4047         x86_64_mov_membase_reg(REG_ITMP3, 0, REG_ITMP3);
4048         x86_64_test_reg_reg(REG_ITMP3, REG_ITMP3);
4049         x86_64_jcc(X86_64_CC_NE, 1);
4050
4051         x86_64_ret();
4052
4053         x86_64_mov_reg_reg(REG_ITMP3, REG_ITMP1_XPTR);
4054         x86_64_mov_imm_reg((s8) &_exceptionptr, REG_ITMP3);
4055         x86_64_alu_reg_reg(X86_64_XOR, REG_ITMP2, REG_ITMP2);
4056         x86_64_mov_reg_membase(REG_ITMP2, REG_ITMP3, 0);    /* clear exception pointer */
4057
4058         x86_64_mov_membase_reg(REG_SP, 0, REG_ITMP2_XPC);    /* get return address from stack */
4059         x86_64_alu_imm_reg(X86_64_SUB, 3, REG_ITMP2_XPC);    /* callq */
4060
4061         x86_64_mov_imm_reg((s8) asm_handle_nat_exception, REG_ITMP3);
4062         x86_64_jmp_reg(REG_ITMP3);
4063
4064 #if 0
4065         {
4066                 static int stubprinted;
4067                 if (!stubprinted)
4068                         printf("stubsize: %d\n", ((long)mcodeptr - (long) s));
4069                 stubprinted = 1;
4070         }
4071 #endif
4072
4073 #ifdef STATISTICS
4074         count_nstub_len += NATIVESTUBSIZE;
4075 #endif
4076
4077         return s;
4078 }
4079
4080
4081 /* function: removenativestub **************************************************
4082
4083     removes a previously created native-stub from memory
4084     
4085 *******************************************************************************/
4086
4087 void removenativestub(u1 *stub)
4088 {
4089         CFREE(stub, NATIVESTUBSIZE);
4090 }
4091
4092
4093 /* code generation functions */
4094
4095 void x86_64_emit_ialu(s4 alu_op, stackptr src, instruction *iptr)
4096 {
4097         s4 s1 = src->prev->regoff;
4098         s4 s2 = src->regoff;
4099         s4 d = iptr->dst->regoff;
4100
4101         if (iptr->dst->flags & INMEMORY) {
4102                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4103                         if (s2 == d) {
4104                                 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4105                                 x86_64_alul_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4106
4107                         } else if (s1 == d) {
4108                                 x86_64_movl_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4109                                 x86_64_alul_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4110
4111                         } else {
4112                                 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4113                                 x86_64_alul_membase_reg(alu_op, REG_SP, s2 * 8, REG_ITMP1);
4114                                 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
4115                         }
4116
4117                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4118                         if (s2 == d) {
4119                                 x86_64_alul_reg_membase(alu_op, s1, REG_SP, d * 8);
4120
4121                         } else {
4122                                 x86_64_movl_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4123                                 x86_64_alul_reg_reg(alu_op, s1, REG_ITMP1);
4124                                 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
4125                         }
4126
4127                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4128                         if (s1 == d) {
4129                                 x86_64_alul_reg_membase(alu_op, s2, REG_SP, d * 8);
4130                                                 
4131                         } else {
4132                                 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4133                                 x86_64_alul_reg_reg(alu_op, s2, REG_ITMP1);
4134                                 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
4135                         }
4136
4137                 } else {
4138                         x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4139                         x86_64_alul_reg_membase(alu_op, s2, REG_SP, d * 8);
4140                 }
4141
4142         } else {
4143                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4144                         x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4145                         x86_64_alul_membase_reg(alu_op, REG_SP, s2 * 8, d);
4146
4147                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4148                         M_INTMOVE(s1, d);
4149                         x86_64_alul_membase_reg(alu_op, REG_SP, s2 * 8, d);
4150
4151                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4152                         M_INTMOVE(s2, d);
4153                         x86_64_alul_membase_reg(alu_op, REG_SP, s1 * 8, d);
4154
4155                 } else {
4156                         if (s2 == d) {
4157                                 x86_64_alul_reg_reg(alu_op, s1, d);
4158
4159                         } else {
4160                                 M_INTMOVE(s1, d);
4161                                 x86_64_alul_reg_reg(alu_op, s2, d);
4162                         }
4163                 }
4164         }
4165 }
4166
4167
4168
4169 void x86_64_emit_lalu(s4 alu_op, stackptr src, instruction *iptr)
4170 {
4171         s4 s1 = src->prev->regoff;
4172         s4 s2 = src->regoff;
4173         s4 d = iptr->dst->regoff;
4174
4175         if (iptr->dst->flags & INMEMORY) {
4176                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4177                         if (s2 == d) {
4178                                 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4179                                 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4180
4181                         } else if (s1 == d) {
4182                                 x86_64_mov_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4183                                 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4184
4185                         } else {
4186                                 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4187                                 x86_64_alu_membase_reg(alu_op, REG_SP, s2 * 8, REG_ITMP1);
4188                                 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4189                         }
4190
4191                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4192                         if (s2 == d) {
4193                                 x86_64_alu_reg_membase(alu_op, s1, REG_SP, d * 8);
4194
4195                         } else {
4196                                 x86_64_mov_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4197                                 x86_64_alu_reg_reg(alu_op, s1, REG_ITMP1);
4198                                 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4199                         }
4200
4201                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4202                         if (s1 == d) {
4203                                 x86_64_alu_reg_membase(alu_op, s2, REG_SP, d * 8);
4204                                                 
4205                         } else {
4206                                 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4207                                 x86_64_alu_reg_reg(alu_op, s2, REG_ITMP1);
4208                                 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4209                         }
4210
4211                 } else {
4212                         x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4213                         x86_64_alu_reg_membase(alu_op, s2, REG_SP, d * 8);
4214                 }
4215
4216         } else {
4217                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4218                         x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4219                         x86_64_alu_membase_reg(alu_op, REG_SP, s2 * 8, d);
4220
4221                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4222                         M_INTMOVE(s1, d);
4223                         x86_64_alu_membase_reg(alu_op, REG_SP, s2 * 8, d);
4224
4225                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4226                         M_INTMOVE(s2, d);
4227                         x86_64_alu_membase_reg(alu_op, REG_SP, s1 * 8, d);
4228
4229                 } else {
4230                         if (s2 == d) {
4231                                 x86_64_alu_reg_reg(alu_op, s1, d);
4232
4233                         } else {
4234                                 M_INTMOVE(s1, d);
4235                                 x86_64_alu_reg_reg(alu_op, s2, d);
4236                         }
4237                 }
4238         }
4239 }
4240
4241
4242
4243 void x86_64_emit_ialuconst(s4 alu_op, stackptr src, instruction *iptr)
4244 {
4245         s4 s1 = src->regoff;
4246         s4 d = iptr->dst->regoff;
4247
4248         if (iptr->dst->flags & INMEMORY) {
4249                 if (src->flags & INMEMORY) {
4250                         if (s1 == d) {
4251                                 x86_64_alul_imm_membase(alu_op, iptr->val.i, REG_SP, d * 8);
4252
4253                         } else {
4254                                 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4255                                 x86_64_alul_imm_reg(alu_op, iptr->val.i, REG_ITMP1);
4256                                 x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
4257                         }
4258
4259                 } else {
4260                         x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4261                         x86_64_alul_imm_membase(alu_op, iptr->val.i, REG_SP, d * 8);
4262                 }
4263
4264         } else {
4265                 if (src->flags & INMEMORY) {
4266                         x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4267                         x86_64_alul_imm_reg(alu_op, iptr->val.i, d);
4268
4269                 } else {
4270                         M_INTMOVE(s1, d);
4271                         x86_64_alul_imm_reg(alu_op, iptr->val.i, d);
4272                 }
4273         }
4274 }
4275
4276
4277
4278 void x86_64_emit_laluconst(s4 alu_op, stackptr src, instruction *iptr)
4279 {
4280         s4 s1 = src->regoff;
4281         s4 d = iptr->dst->regoff;
4282
4283         if (iptr->dst->flags & INMEMORY) {
4284                 if (src->flags & INMEMORY) {
4285                         if (s1 == d) {
4286                                 if (x86_64_is_imm32(iptr->val.l)) {
4287                                         x86_64_alu_imm_membase(alu_op, iptr->val.l, REG_SP, d * 8);
4288
4289                                 } else {
4290                                         x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4291                                         x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4292                                 }
4293
4294                         } else {
4295                                 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4296
4297                                 if (x86_64_is_imm32(iptr->val.l)) {
4298                                         x86_64_alu_imm_reg(alu_op, iptr->val.l, REG_ITMP1);
4299
4300                                 } else {
4301                                         x86_64_mov_imm_reg(iptr->val.l, REG_ITMP2);
4302                                         x86_64_alu_reg_reg(alu_op, REG_ITMP2, REG_ITMP1);
4303                                 }
4304                                 x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4305                         }
4306
4307                 } else {
4308                         x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4309
4310                         if (x86_64_is_imm32(iptr->val.l)) {
4311                                 x86_64_alu_imm_membase(alu_op, iptr->val.l, REG_SP, d * 8);
4312
4313                         } else {
4314                                 x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4315                                 x86_64_alu_reg_membase(alu_op, REG_ITMP1, REG_SP, d * 8);
4316                         }
4317                 }
4318
4319         } else {
4320                 if (src->flags & INMEMORY) {
4321                         x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4322
4323                 } else {
4324                         M_INTMOVE(s1, d);
4325                 }
4326
4327                 if (x86_64_is_imm32(iptr->val.l)) {
4328                         x86_64_alu_imm_reg(alu_op, iptr->val.l, d);
4329
4330                 } else {
4331                         x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4332                         x86_64_alu_reg_reg(alu_op, REG_ITMP1, d);
4333                 }
4334         }
4335 }
4336
4337
4338
4339 void x86_64_emit_ishift(s4 shift_op, stackptr src, instruction *iptr)
4340 {
4341         s4 s1 = src->prev->regoff;
4342         s4 s2 = src->regoff;
4343         s4 d = iptr->dst->regoff;
4344
4345         M_INTMOVE(RCX, REG_ITMP1);    /* save RCX */
4346         if (iptr->dst->flags & INMEMORY) {
4347                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4348                         if (s1 == d) {
4349                                 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4350                                 x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
4351
4352                         } else {
4353                                 x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4354                                 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
4355                                 x86_64_shiftl_reg(shift_op, REG_ITMP2);
4356                                 x86_64_movl_reg_membase(REG_ITMP2, REG_SP, d * 8);
4357                         }
4358
4359                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4360                         x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4361                         x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4362                         x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
4363
4364                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4365                         if (s1 == d) {
4366                                 M_INTMOVE(s2, RCX);
4367                                 x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
4368
4369                         } else {
4370                                 M_INTMOVE(s2, RCX);
4371                                 x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
4372                                 x86_64_shiftl_reg(shift_op, REG_ITMP2);
4373                                 x86_64_movl_reg_membase(REG_ITMP2, REG_SP, d * 8);
4374                         }
4375
4376                 } else {
4377                         M_INTMOVE(s2, RCX);
4378                         x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4379                         x86_64_shiftl_membase(shift_op, REG_SP, d * 8);
4380                 }
4381                 M_INTMOVE(REG_ITMP1, RCX);    /* restore RCX */
4382
4383         } else {
4384                 if (d == RCX) {
4385                         d = REG_ITMP3;
4386                 }
4387                                         
4388                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4389                         x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4390                         x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4391                         x86_64_shiftl_reg(shift_op, d);
4392
4393                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4394                         M_INTMOVE(s1, d);    /* maybe src is RCX */
4395                         x86_64_movl_membase_reg(REG_SP, s2 * 8, RCX);
4396                         x86_64_shiftl_reg(shift_op, d);
4397
4398                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4399                         M_INTMOVE(s2, RCX);
4400                         x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4401                         x86_64_shiftl_reg(shift_op, d);
4402
4403                 } else {
4404                         if (s1 == RCX) {
4405                                 M_INTMOVE(s1, d);
4406                                 M_INTMOVE(s2, RCX);
4407
4408                         } else {
4409                                 M_INTMOVE(s2, RCX);
4410                                 M_INTMOVE(s1, d);
4411                         }
4412                         x86_64_shiftl_reg(shift_op, d);
4413                 }
4414
4415                 if (d == RCX) {
4416                         M_INTMOVE(REG_ITMP3, RCX);
4417
4418                 } else {
4419                         M_INTMOVE(REG_ITMP1, RCX);    /* restore RCX */
4420                 }
4421         }
4422 }
4423
4424
4425
4426 void x86_64_emit_lshift(s4 shift_op, stackptr src, instruction *iptr)
4427 {
4428         s4 s1 = src->prev->regoff;
4429         s4 s2 = src->regoff;
4430         s4 d = iptr->dst->regoff;
4431
4432         M_INTMOVE(RCX, REG_ITMP1);    /* save RCX */
4433         if (iptr->dst->flags & INMEMORY) {
4434                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4435                         if (s1 == d) {
4436                                 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4437                                 x86_64_shift_membase(shift_op, REG_SP, d * 8);
4438
4439                         } else {
4440                                 x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4441                                 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
4442                                 x86_64_shift_reg(shift_op, REG_ITMP2);
4443                                 x86_64_mov_reg_membase(REG_ITMP2, REG_SP, d * 8);
4444                         }
4445
4446                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4447                         x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4448                         x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4449                         x86_64_shift_membase(shift_op, REG_SP, d * 8);
4450
4451                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4452                         if (s1 == d) {
4453                                 M_INTMOVE(s2, RCX);
4454                                 x86_64_shift_membase(shift_op, REG_SP, d * 8);
4455
4456                         } else {
4457                                 M_INTMOVE(s2, RCX);
4458                                 x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP2);
4459                                 x86_64_shift_reg(shift_op, REG_ITMP2);
4460                                 x86_64_mov_reg_membase(REG_ITMP2, REG_SP, d * 8);
4461                         }
4462
4463                 } else {
4464                         M_INTMOVE(s2, RCX);
4465                         x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4466                         x86_64_shift_membase(shift_op, REG_SP, d * 8);
4467                 }
4468                 M_INTMOVE(REG_ITMP1, RCX);    /* restore RCX */
4469
4470         } else {
4471                 if (d == RCX) {
4472                         d = REG_ITMP3;
4473                 }
4474
4475                 if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4476                         x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4477                         x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4478                         x86_64_shift_reg(shift_op, d);
4479
4480                 } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4481                         M_INTMOVE(s1, d);    /* maybe src is RCX */
4482                         x86_64_mov_membase_reg(REG_SP, s2 * 8, RCX);
4483                         x86_64_shift_reg(shift_op, d);
4484
4485                 } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4486                         M_INTMOVE(s2, RCX);
4487                         x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4488                         x86_64_shift_reg(shift_op, d);
4489
4490                 } else {
4491                         if (s1 == RCX) {
4492                                 M_INTMOVE(s1, d);
4493                                 M_INTMOVE(s2, RCX);
4494                         } else {
4495                                 M_INTMOVE(s2, RCX);
4496                                 M_INTMOVE(s1, d);
4497                         }
4498                         x86_64_shift_reg(shift_op, d);
4499                 }
4500
4501                 if (d == RCX) {
4502                         M_INTMOVE(REG_ITMP3, RCX);
4503
4504                 } else {
4505                         M_INTMOVE(REG_ITMP1, RCX);    /* restore RCX */
4506                 }
4507         }
4508 }
4509
4510
4511
4512 void x86_64_emit_ishiftconst(s4 shift_op, stackptr src, instruction *iptr)
4513 {
4514         s4 s1 = src->regoff;
4515         s4 d = iptr->dst->regoff;
4516
4517         if ((src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
4518                 if (s1 == d) {
4519                         x86_64_shiftl_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
4520
4521                 } else {
4522                         x86_64_movl_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4523                         x86_64_shiftl_imm_reg(shift_op, iptr->val.i, REG_ITMP1);
4524                         x86_64_movl_reg_membase(REG_ITMP1, REG_SP, d * 8);
4525                 }
4526
4527         } else if ((src->flags & INMEMORY) && !(iptr->dst->flags & INMEMORY)) {
4528                 x86_64_movl_membase_reg(REG_SP, s1 * 8, d);
4529                 x86_64_shiftl_imm_reg(shift_op, iptr->val.i, d);
4530                                 
4531         } else if (!(src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
4532                 x86_64_movl_reg_membase(s1, REG_SP, d * 8);
4533                 x86_64_shiftl_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
4534
4535         } else {
4536                 M_INTMOVE(s1, d);
4537                 x86_64_shiftl_imm_reg(shift_op, iptr->val.i, d);
4538         }
4539 }
4540
4541
4542
4543 void x86_64_emit_lshiftconst(s4 shift_op, stackptr src, instruction *iptr)
4544 {
4545         s4 s1 = src->regoff;
4546         s4 d = iptr->dst->regoff;
4547
4548         if ((src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
4549                 if (s1 == d) {
4550                         x86_64_shift_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
4551
4552                 } else {
4553                         x86_64_mov_membase_reg(REG_SP, s1 * 8, REG_ITMP1);
4554                         x86_64_shift_imm_reg(shift_op, iptr->val.i, REG_ITMP1);
4555                         x86_64_mov_reg_membase(REG_ITMP1, REG_SP, d * 8);
4556                 }
4557
4558         } else if ((src->flags & INMEMORY) && !(iptr->dst->flags & INMEMORY)) {
4559                 x86_64_mov_membase_reg(REG_SP, s1 * 8, d);
4560                 x86_64_shift_imm_reg(shift_op, iptr->val.i, d);
4561                                 
4562         } else if (!(src->flags & INMEMORY) && (iptr->dst->flags & INMEMORY)) {
4563                 x86_64_mov_reg_membase(s1, REG_SP, d * 8);
4564                 x86_64_shift_imm_membase(shift_op, iptr->val.i, REG_SP, d * 8);
4565
4566         } else {
4567                 M_INTMOVE(s1, d);
4568                 x86_64_shift_imm_reg(shift_op, iptr->val.i, d);
4569         }
4570 }
4571
4572
4573
4574 void x86_64_emit_ifcc(s4 if_op, stackptr src, instruction *iptr)
4575 {
4576         if (src->flags & INMEMORY) {
4577                 x86_64_alul_imm_membase(X86_64_CMP, iptr->val.i, REG_SP, src->regoff * 8);
4578
4579         } else {
4580                 x86_64_alul_imm_reg(X86_64_CMP, iptr->val.i, src->regoff);
4581         }
4582         x86_64_jcc(if_op, 0);
4583         codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
4584 }
4585
4586
4587
4588 void x86_64_emit_if_lcc(s4 if_op, stackptr src, instruction *iptr)
4589 {
4590         s4 s1 = src->regoff;
4591
4592         if (src->flags & INMEMORY) {
4593                 if (x86_64_is_imm32(iptr->val.l)) {
4594                         x86_64_alu_imm_membase(X86_64_CMP, iptr->val.l, REG_SP, s1 * 8);
4595
4596                 } else {
4597                         x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4598                         x86_64_alu_reg_membase(X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
4599                 }
4600
4601         } else {
4602                 if (x86_64_is_imm32(iptr->val.l)) {
4603                         x86_64_alu_imm_reg(X86_64_CMP, iptr->val.l, s1);
4604
4605                 } else {
4606                         x86_64_mov_imm_reg(iptr->val.l, REG_ITMP1);
4607                         x86_64_alu_reg_reg(X86_64_CMP, REG_ITMP1, s1);
4608                 }
4609         }
4610         x86_64_jcc(if_op, 0);
4611         codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
4612 }
4613
4614
4615
4616 void x86_64_emit_if_icmpcc(s4 if_op, stackptr src, instruction *iptr)
4617 {
4618         s4 s1 = src->prev->regoff;
4619         s4 s2 = src->regoff;
4620
4621         if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4622                 x86_64_movl_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4623                 x86_64_alul_reg_membase(X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
4624
4625         } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4626                 x86_64_alul_membase_reg(X86_64_CMP, REG_SP, s2 * 8, s1);
4627
4628         } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4629                 x86_64_alul_reg_membase(X86_64_CMP, s2, REG_SP, s1 * 8);
4630
4631         } else {
4632                 x86_64_alul_reg_reg(X86_64_CMP, s2, s1);
4633         }
4634         x86_64_jcc(if_op, 0);
4635         codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
4636 }
4637
4638
4639
4640 void x86_64_emit_if_lcmpcc(s4 if_op, stackptr src, instruction *iptr)
4641 {
4642         s4 s1 = src->prev->regoff;
4643         s4 s2 = src->regoff;
4644
4645         if ((src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4646                 x86_64_mov_membase_reg(REG_SP, s2 * 8, REG_ITMP1);
4647                 x86_64_alu_reg_membase(X86_64_CMP, REG_ITMP1, REG_SP, s1 * 8);
4648
4649         } else if ((src->flags & INMEMORY) && !(src->prev->flags & INMEMORY)) {
4650                 x86_64_alu_membase_reg(X86_64_CMP, REG_SP, s2 * 8, s1);
4651
4652         } else if (!(src->flags & INMEMORY) && (src->prev->flags & INMEMORY)) {
4653                 x86_64_alu_reg_membase(X86_64_CMP, s2, REG_SP, s1 * 8);
4654
4655         } else {
4656                 x86_64_alu_reg_reg(X86_64_CMP, s2, s1);
4657         }
4658         x86_64_jcc(if_op, 0);
4659         codegen_addreference(BlockPtrOfPC(iptr->op1), mcodeptr);
4660 }
4661
4662
4663
4664 #if 1
4665
4666 /*
4667  * mov ops
4668  */
4669 void x86_64_mov_reg_reg(s8 reg, s8 dreg) {
4670         x86_64_emit_rex(1,(reg),0,(dreg));
4671         *(mcodeptr++) = 0x89;
4672         x86_64_emit_reg((reg),(dreg));
4673 }
4674
4675
4676 void x86_64_mov_imm_reg(s8 imm, s8 reg) {
4677         x86_64_emit_rex(1,0,0,(reg));
4678         *(mcodeptr++) = 0xb8 + ((reg) & 0x07);
4679         x86_64_emit_imm64((imm));
4680 }
4681
4682
4683 void x86_64_movl_imm_reg(s8 imm, s8 reg) {
4684         x86_64_emit_rex(0,0,0,(reg));
4685         *(mcodeptr++) = 0xb8 + ((reg) & 0x07);
4686         x86_64_emit_imm32((imm));
4687 }
4688
4689
4690 void x86_64_mov_membase_reg(s8 basereg, s8 disp, s8 reg) {
4691         x86_64_emit_rex(1,(reg),0,(basereg));
4692         *(mcodeptr++) = 0x8b;
4693         x86_64_emit_membase((basereg),(disp),(reg));
4694 }
4695
4696
4697 void x86_64_movl_membase_reg(s8 basereg, s8 disp, s8 reg) {
4698         x86_64_emit_rex(0,(reg),0,(basereg));
4699         *(mcodeptr++) = 0x8b;
4700         x86_64_emit_membase((basereg),(disp),(reg));
4701 }
4702
4703
4704 /*
4705  * this one is for INVOKEVIRTUAL/INVOKEINTERFACE to have a
4706  * constant membase immediate length of 32bit
4707  */
4708 void x86_64_mov_membase32_reg(s8 basereg, s8 disp, s8 reg) {
4709         x86_64_emit_rex(1,(reg),0,(basereg));
4710         *(mcodeptr++) = 0x8b;
4711         x86_64_address_byte(2, (reg), (basereg));
4712         x86_64_emit_imm32((disp));
4713 }
4714
4715
4716 void x86_64_mov_reg_membase(s8 reg, s8 basereg, s8 disp) {
4717         x86_64_emit_rex(1,(reg),0,(basereg));
4718         *(mcodeptr++) = 0x89;
4719         x86_64_emit_membase((basereg),(disp),(reg));
4720 }
4721
4722
4723 void x86_64_movl_reg_membase(s8 reg, s8 basereg, s8 disp) {
4724         x86_64_emit_rex(0,(reg),0,(basereg));
4725         *(mcodeptr++) = 0x89;
4726         x86_64_emit_membase((basereg),(disp),(reg));
4727 }
4728
4729
4730 void x86_64_mov_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4731         x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4732         *(mcodeptr++) = 0x8b;
4733         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4734 }
4735
4736
4737 void x86_64_movl_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4738         x86_64_emit_rex(0,(reg),(indexreg),(basereg));
4739         *(mcodeptr++) = 0x8b;
4740         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4741 }
4742
4743
4744 void x86_64_mov_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
4745         x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4746         *(mcodeptr++) = 0x89;
4747         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4748 }
4749
4750
4751 void x86_64_movl_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
4752         x86_64_emit_rex(0,(reg),(indexreg),(basereg));
4753         *(mcodeptr++) = 0x89;
4754         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4755 }
4756
4757
4758 void x86_64_movw_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
4759         *(mcodeptr++) = 0x66;
4760         x86_64_emit_rex(0,(reg),(indexreg),(basereg));
4761         *(mcodeptr++) = 0x89;
4762         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4763 }
4764
4765
4766 void x86_64_movb_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
4767         x86_64_emit_rex(0,(reg),(indexreg),(basereg));
4768         *(mcodeptr++) = 0x88;
4769         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4770 }
4771
4772
4773 void x86_64_mov_imm_membase(s8 imm, s8 basereg, s8 disp) {
4774         x86_64_emit_rex(1,0,0,(basereg));
4775         *(mcodeptr++) = 0xc7;
4776         x86_64_emit_membase((basereg),(disp),0);
4777         x86_64_emit_imm32((imm));
4778 }
4779
4780
4781 void x86_64_movl_imm_membase(s8 imm, s8 basereg, s8 disp) {
4782         x86_64_emit_rex(0,0,0,(basereg));
4783         *(mcodeptr++) = 0xc7;
4784         x86_64_emit_membase((basereg),(disp),0);
4785         x86_64_emit_imm32((imm));
4786 }
4787
4788
4789 void x86_64_movsbq_reg_reg(s8 reg, s8 dreg) {
4790         x86_64_emit_rex(1,(dreg),0,(reg));
4791         *(mcodeptr++) = 0x0f;
4792         *(mcodeptr++) = 0xbe;
4793         /* XXX: why do reg and dreg have to be exchanged */
4794         x86_64_emit_reg((dreg),(reg));
4795 }
4796
4797
4798 void x86_64_movsbq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4799         x86_64_emit_rex(1,(dreg),0,(basereg));
4800         *(mcodeptr++) = 0x0f;
4801         *(mcodeptr++) = 0xbe;
4802         x86_64_emit_membase((basereg),(disp),(dreg));
4803 }
4804
4805
4806 void x86_64_movswq_reg_reg(s8 reg, s8 dreg) {
4807         x86_64_emit_rex(1,(dreg),0,(reg));
4808         *(mcodeptr++) = 0x0f;
4809         *(mcodeptr++) = 0xbf;
4810         /* XXX: why do reg and dreg have to be exchanged */
4811         x86_64_emit_reg((dreg),(reg));
4812 }
4813
4814
4815 void x86_64_movswq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4816         x86_64_emit_rex(1,(dreg),0,(basereg));
4817         *(mcodeptr++) = 0x0f;
4818         *(mcodeptr++) = 0xbf;
4819         x86_64_emit_membase((basereg),(disp),(dreg));
4820 }
4821
4822
4823 void x86_64_movslq_reg_reg(s8 reg, s8 dreg) {
4824         x86_64_emit_rex(1,(dreg),0,(reg));
4825         *(mcodeptr++) = 0x63;
4826         /* XXX: why do reg and dreg have to be exchanged */
4827         x86_64_emit_reg((dreg),(reg));
4828 }
4829
4830
4831 void x86_64_movslq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4832         x86_64_emit_rex(1,(dreg),0,(basereg));
4833         *(mcodeptr++) = 0x63;
4834         x86_64_emit_membase((basereg),(disp),(dreg));
4835 }
4836
4837
4838 void x86_64_movzwq_reg_reg(s8 reg, s8 dreg) {
4839         x86_64_emit_rex(1,(dreg),0,(reg));
4840         *(mcodeptr++) = 0x0f;
4841         *(mcodeptr++) = 0xb7;
4842         /* XXX: why do reg and dreg have to be exchanged */
4843         x86_64_emit_reg((dreg),(reg));
4844 }
4845
4846
4847 void x86_64_movzwq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
4848         x86_64_emit_rex(1,(dreg),0,(basereg));
4849         *(mcodeptr++) = 0x0f;
4850         *(mcodeptr++) = 0xb7;
4851         x86_64_emit_membase((basereg),(disp),(dreg));
4852 }
4853
4854
4855 void x86_64_movswq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4856         x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4857         *(mcodeptr++) = 0x0f;
4858         *(mcodeptr++) = 0xbf;
4859         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4860 }
4861
4862
4863 void x86_64_movsbq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4864         x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4865         *(mcodeptr++) = 0x0f;
4866         *(mcodeptr++) = 0xbe;
4867         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4868 }
4869
4870
4871 void x86_64_movzwq_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 reg) {
4872         x86_64_emit_rex(1,(reg),(indexreg),(basereg));
4873         *(mcodeptr++) = 0x0f;
4874         *(mcodeptr++) = 0xb7;
4875         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
4876 }
4877
4878
4879
4880 /*
4881  * alu operations
4882  */
4883 void x86_64_alu_reg_reg(s8 opc, s8 reg, s8 dreg) {
4884         x86_64_emit_rex(1,(reg),0,(dreg));
4885         *(mcodeptr++) = (((opc)) << 3) + 1;
4886         x86_64_emit_reg((reg),(dreg));
4887 }
4888
4889
4890 void x86_64_alul_reg_reg(s8 opc, s8 reg, s8 dreg) {
4891         x86_64_emit_rex(0,(reg),0,(dreg));
4892         *(mcodeptr++) = (((opc)) << 3) + 1;
4893         x86_64_emit_reg((reg),(dreg));
4894 }
4895
4896
4897 void x86_64_alu_reg_membase(s8 opc, s8 reg, s8 basereg, s8 disp) {
4898         x86_64_emit_rex(1,(reg),0,(basereg));
4899         *(mcodeptr++) = (((opc)) << 3) + 1;
4900         x86_64_emit_membase((basereg),(disp),(reg));
4901 }
4902
4903
4904 void x86_64_alul_reg_membase(s8 opc, s8 reg, s8 basereg, s8 disp) {
4905         x86_64_emit_rex(0,(reg),0,(basereg));
4906         *(mcodeptr++) = (((opc)) << 3) + 1;
4907         x86_64_emit_membase((basereg),(disp),(reg));
4908 }
4909
4910
4911 void x86_64_alu_membase_reg(s8 opc, s8 basereg, s8 disp, s8 reg) {
4912         x86_64_emit_rex(1,(reg),0,(basereg));
4913         *(mcodeptr++) = (((opc)) << 3) + 3;
4914         x86_64_emit_membase((basereg),(disp),(reg));
4915 }
4916
4917
4918 void x86_64_alul_membase_reg(s8 opc, s8 basereg, s8 disp, s8 reg) {
4919         x86_64_emit_rex(0,(reg),0,(basereg));
4920         *(mcodeptr++) = (((opc)) << 3) + 3;
4921         x86_64_emit_membase((basereg),(disp),(reg));
4922 }
4923
4924
4925 void x86_64_alu_imm_reg(s8 opc, s8 imm, s8 dreg) {
4926         if (x86_64_is_imm8(imm)) {
4927                 x86_64_emit_rex(1,0,0,(dreg));
4928                 *(mcodeptr++) = 0x83;
4929                 x86_64_emit_reg((opc),(dreg));
4930                 x86_64_emit_imm8((imm));
4931         } else {
4932                 x86_64_emit_rex(1,0,0,(dreg));
4933                 *(mcodeptr++) = 0x81;
4934                 x86_64_emit_reg((opc),(dreg));
4935                 x86_64_emit_imm32((imm));
4936         }
4937 }
4938
4939
4940 void x86_64_alul_imm_reg(s8 opc, s8 imm, s8 dreg) {
4941         if (x86_64_is_imm8(imm)) {
4942                 x86_64_emit_rex(0,0,0,(dreg));
4943                 *(mcodeptr++) = 0x83;
4944                 x86_64_emit_reg((opc),(dreg));
4945                 x86_64_emit_imm8((imm));
4946         } else {
4947                 x86_64_emit_rex(0,0,0,(dreg));
4948                 *(mcodeptr++) = 0x81;
4949                 x86_64_emit_reg((opc),(dreg));
4950                 x86_64_emit_imm32((imm));
4951         }
4952 }
4953
4954
4955 void x86_64_alu_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
4956         if (x86_64_is_imm8(imm)) {
4957                 x86_64_emit_rex(1,(basereg),0,0);
4958                 *(mcodeptr++) = 0x83;
4959                 x86_64_emit_membase((basereg),(disp),(opc));
4960                 x86_64_emit_imm8((imm));
4961         } else {
4962                 x86_64_emit_rex(1,(basereg),0,0);
4963                 *(mcodeptr++) = 0x81;
4964                 x86_64_emit_membase((basereg),(disp),(opc));
4965                 x86_64_emit_imm32((imm));
4966         }
4967 }
4968
4969
4970 void x86_64_alul_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
4971         if (x86_64_is_imm8(imm)) {
4972                 x86_64_emit_rex(0,(basereg),0,0);
4973                 *(mcodeptr++) = 0x83;
4974                 x86_64_emit_membase((basereg),(disp),(opc));
4975                 x86_64_emit_imm8((imm));
4976         } else {
4977                 x86_64_emit_rex(0,(basereg),0,0);
4978                 *(mcodeptr++) = 0x81;
4979                 x86_64_emit_membase((basereg),(disp),(opc));
4980                 x86_64_emit_imm32((imm));
4981         }
4982 }
4983
4984
4985 void x86_64_test_reg_reg(s8 reg, s8 dreg) {
4986         x86_64_emit_rex(1,(reg),0,(dreg));
4987         *(mcodeptr++) = 0x85;
4988         x86_64_emit_reg((reg),(dreg));
4989 }
4990
4991
4992 void x86_64_testl_reg_reg(s8 reg, s8 dreg) {
4993         x86_64_emit_rex(0,(reg),0,(dreg));
4994         *(mcodeptr++) = 0x85;
4995         x86_64_emit_reg((reg),(dreg));
4996 }
4997
4998
4999 void x86_64_test_imm_reg(s8 imm, s8 reg) {
5000         *(mcodeptr++) = 0xf7;
5001         x86_64_emit_reg(0,(reg));
5002         x86_64_emit_imm32((imm));
5003 }
5004
5005
5006 void x86_64_testw_imm_reg(s8 imm, s8 reg) {
5007         *(mcodeptr++) = 0x66;
5008         *(mcodeptr++) = 0xf7;
5009         x86_64_emit_reg(0,(reg));
5010         x86_64_emit_imm16((imm));
5011 }
5012
5013
5014 void x86_64_testb_imm_reg(s8 imm, s8 reg) {
5015         *(mcodeptr++) = 0xf6;
5016         x86_64_emit_reg(0,(reg));
5017         x86_64_emit_imm8((imm));
5018 }
5019
5020
5021 void x86_64_lea_membase_reg(s8 basereg, s8 disp, s8 reg) {
5022         x86_64_emit_rex(1,(reg),0,(basereg));
5023         *(mcodeptr++) = 0x8d;
5024         x86_64_emit_membase((basereg),(disp),(reg));
5025 }
5026
5027
5028 void x86_64_leal_membase_reg(s8 basereg, s8 disp, s8 reg) {
5029         x86_64_emit_rex(0,(reg),0,(basereg));
5030         *(mcodeptr++) = 0x8d;
5031         x86_64_emit_membase((basereg),(disp),(reg));
5032 }
5033
5034
5035
5036 /*
5037  * inc, dec operations
5038  */
5039 void x86_64_inc_reg(s8 reg) {
5040         x86_64_emit_rex(1,0,0,(reg));
5041         *(mcodeptr++) = 0xff;
5042         x86_64_emit_reg(0,(reg));
5043 }
5044
5045
5046 void x86_64_incl_reg(s8 reg) {
5047         x86_64_emit_rex(0,0,0,(reg));
5048         *(mcodeptr++) = 0xff;
5049         x86_64_emit_reg(0,(reg));
5050 }
5051
5052
5053 void x86_64_inc_membase(s8 basereg, s8 disp) {
5054         x86_64_emit_rex(1,(basereg),0,0);
5055         *(mcodeptr++) = 0xff;
5056         x86_64_emit_membase((basereg),(disp),0);
5057 }
5058
5059
5060 void x86_64_incl_membase(s8 basereg, s8 disp) {
5061         x86_64_emit_rex(0,(basereg),0,0);
5062         *(mcodeptr++) = 0xff;
5063         x86_64_emit_membase((basereg),(disp),0);
5064 }
5065
5066
5067 void x86_64_dec_reg(s8 reg) {
5068         x86_64_emit_rex(1,0,0,(reg));
5069         *(mcodeptr++) = 0xff;
5070         x86_64_emit_reg(1,(reg));
5071 }
5072
5073         
5074 void x86_64_decl_reg(s8 reg) {
5075         x86_64_emit_rex(0,0,0,(reg));
5076         *(mcodeptr++) = 0xff;
5077         x86_64_emit_reg(1,(reg));
5078 }
5079
5080         
5081 void x86_64_dec_membase(s8 basereg, s8 disp) {
5082         x86_64_emit_rex(1,(basereg),0,0);
5083         *(mcodeptr++) = 0xff;
5084         x86_64_emit_membase((basereg),(disp),1);
5085 }
5086
5087
5088 void x86_64_decl_membase(s8 basereg, s8 disp) {
5089         x86_64_emit_rex(0,(basereg),0,0);
5090         *(mcodeptr++) = 0xff;
5091         x86_64_emit_membase((basereg),(disp),1);
5092 }
5093
5094
5095
5096
5097 void x86_64_cltd() {
5098     *(mcodeptr++) = 0x99;
5099 }
5100
5101
5102 void x86_64_cqto() {
5103         x86_64_emit_rex(1,0,0,0);
5104         *(mcodeptr++) = 0x99;
5105 }
5106
5107
5108
5109 void x86_64_imul_reg_reg(s8 reg, s8 dreg) {
5110         x86_64_emit_rex(1,(dreg),0,(reg));
5111         *(mcodeptr++) = 0x0f;
5112         *(mcodeptr++) = 0xaf;
5113         x86_64_emit_reg((dreg),(reg));
5114 }
5115
5116
5117 void x86_64_imull_reg_reg(s8 reg, s8 dreg) {
5118         x86_64_emit_rex(0,(dreg),0,(reg));
5119         *(mcodeptr++) = 0x0f;
5120         *(mcodeptr++) = 0xaf;
5121         x86_64_emit_reg((dreg),(reg));
5122 }
5123
5124
5125 void x86_64_imul_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5126         x86_64_emit_rex(1,(dreg),0,(basereg));
5127         *(mcodeptr++) = 0x0f;
5128         *(mcodeptr++) = 0xaf;
5129         x86_64_emit_membase((basereg),(disp),(dreg));
5130 }
5131
5132
5133 void x86_64_imull_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5134         x86_64_emit_rex(0,(dreg),0,(basereg));
5135         *(mcodeptr++) = 0x0f;
5136         *(mcodeptr++) = 0xaf;
5137         x86_64_emit_membase((basereg),(disp),(dreg));
5138 }
5139
5140
5141 void x86_64_imul_imm_reg(s8 imm, s8 dreg) {
5142         if (x86_64_is_imm8((imm))) {
5143                 x86_64_emit_rex(1,0,0,(dreg));
5144                 *(mcodeptr++) = 0x6b;
5145                 x86_64_emit_reg(0,(dreg));
5146                 x86_64_emit_imm8((imm));
5147         } else {
5148                 x86_64_emit_rex(1,0,0,(dreg));
5149                 *(mcodeptr++) = 0x69;
5150                 x86_64_emit_reg(0,(dreg));
5151                 x86_64_emit_imm32((imm));
5152         }
5153 }
5154
5155
5156 void x86_64_imul_imm_reg_reg(s8 imm, s8 reg, s8 dreg) {
5157         if (x86_64_is_imm8((imm))) {
5158                 x86_64_emit_rex(1,(dreg),0,(reg));
5159                 *(mcodeptr++) = 0x6b;
5160                 x86_64_emit_reg((dreg),(reg));
5161                 x86_64_emit_imm8((imm));
5162         } else {
5163                 x86_64_emit_rex(1,(dreg),0,(reg));
5164                 *(mcodeptr++) = 0x69;
5165                 x86_64_emit_reg((dreg),(reg));
5166                 x86_64_emit_imm32((imm));
5167         }
5168 }
5169
5170
5171 void x86_64_imull_imm_reg_reg(s8 imm, s8 reg, s8 dreg) {
5172         if (x86_64_is_imm8((imm))) {
5173                 x86_64_emit_rex(0,(dreg),0,(reg));
5174                 *(mcodeptr++) = 0x6b;
5175                 x86_64_emit_reg((dreg),(reg));
5176                 x86_64_emit_imm8((imm));
5177         } else {
5178                 x86_64_emit_rex(0,(dreg),0,(reg));
5179                 *(mcodeptr++) = 0x69;
5180                 x86_64_emit_reg((dreg),(reg));
5181                 x86_64_emit_imm32((imm));
5182         }
5183 }
5184
5185
5186 void x86_64_imul_imm_membase_reg(s8 imm, s8 basereg, s8 disp, s8 dreg) {
5187         if (x86_64_is_imm8((imm))) {
5188                 x86_64_emit_rex(1,(dreg),0,(basereg));
5189                 *(mcodeptr++) = 0x6b;
5190                 x86_64_emit_membase((basereg),(disp),(dreg));
5191                 x86_64_emit_imm8((imm));
5192         } else {
5193                 x86_64_emit_rex(1,(dreg),0,(basereg));
5194                 *(mcodeptr++) = 0x69;
5195                 x86_64_emit_membase((basereg),(disp),(dreg));
5196                 x86_64_emit_imm32((imm));
5197         }
5198 }
5199
5200
5201 void x86_64_imull_imm_membase_reg(s8 imm, s8 basereg, s8 disp, s8 dreg) {
5202         if (x86_64_is_imm8((imm))) {
5203                 x86_64_emit_rex(0,(dreg),0,(basereg));
5204                 *(mcodeptr++) = 0x6b;
5205                 x86_64_emit_membase((basereg),(disp),(dreg));
5206                 x86_64_emit_imm8((imm));
5207         } else {
5208                 x86_64_emit_rex(0,(dreg),0,(basereg));
5209                 *(mcodeptr++) = 0x69;
5210                 x86_64_emit_membase((basereg),(disp),(dreg));
5211                 x86_64_emit_imm32((imm));
5212         }
5213 }
5214
5215
5216 void x86_64_idiv_reg(s8 reg) {
5217         x86_64_emit_rex(1,0,0,(reg));
5218         *(mcodeptr++) = 0xf7;
5219         x86_64_emit_reg(7,(reg));
5220 }
5221
5222
5223 void x86_64_idivl_reg(s8 reg) {
5224         x86_64_emit_rex(0,0,0,(reg));
5225         *(mcodeptr++) = 0xf7;
5226         x86_64_emit_reg(7,(reg));
5227 }
5228
5229
5230
5231 void x86_64_ret() {
5232     *(mcodeptr++) = 0xc3;
5233 }
5234
5235
5236
5237 /*
5238  * shift ops
5239  */
5240 void x86_64_shift_reg(s8 opc, s8 reg) {
5241         x86_64_emit_rex(1,0,0,(reg));
5242         *(mcodeptr++) = 0xd3;
5243         x86_64_emit_reg((opc),(reg));
5244 }
5245
5246
5247 void x86_64_shiftl_reg(s8 opc, s8 reg) {
5248         x86_64_emit_rex(0,0,0,(reg));
5249         *(mcodeptr++) = 0xd3;
5250         x86_64_emit_reg((opc),(reg));
5251 }
5252
5253
5254 void x86_64_shift_membase(s8 opc, s8 basereg, s8 disp) {
5255         x86_64_emit_rex(1,0,0,(basereg));
5256         *(mcodeptr++) = 0xd3;
5257         x86_64_emit_membase((basereg),(disp),(opc));
5258 }
5259
5260
5261 void x86_64_shiftl_membase(s8 opc, s8 basereg, s8 disp) {
5262         x86_64_emit_rex(0,0,0,(basereg));
5263         *(mcodeptr++) = 0xd3;
5264         x86_64_emit_membase((basereg),(disp),(opc));
5265 }
5266
5267
5268 void x86_64_shift_imm_reg(s8 opc, s8 imm, s8 dreg) {
5269         if ((imm) == 1) {
5270                 x86_64_emit_rex(1,0,0,(dreg));
5271                 *(mcodeptr++) = 0xd1;
5272                 x86_64_emit_reg((opc),(dreg));
5273         } else {
5274                 x86_64_emit_rex(1,0,0,(dreg));
5275                 *(mcodeptr++) = 0xc1;
5276                 x86_64_emit_reg((opc),(dreg));
5277                 x86_64_emit_imm8((imm));
5278         }
5279 }
5280
5281
5282 void x86_64_shiftl_imm_reg(s8 opc, s8 imm, s8 dreg) {
5283         if ((imm) == 1) {
5284                 x86_64_emit_rex(0,0,0,(dreg));
5285                 *(mcodeptr++) = 0xd1;
5286                 x86_64_emit_reg((opc),(dreg));
5287         } else {
5288                 x86_64_emit_rex(0,0,0,(dreg));
5289                 *(mcodeptr++) = 0xc1;
5290                 x86_64_emit_reg((opc),(dreg));
5291                 x86_64_emit_imm8((imm));
5292         }
5293 }
5294
5295
5296 void x86_64_shift_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
5297         if ((imm) == 1) {
5298                 x86_64_emit_rex(1,0,0,(basereg));
5299                 *(mcodeptr++) = 0xd1;
5300                 x86_64_emit_membase((basereg),(disp),(opc));
5301         } else {
5302                 x86_64_emit_rex(1,0,0,(basereg));
5303                 *(mcodeptr++) = 0xc1;
5304                 x86_64_emit_membase((basereg),(disp),(opc));
5305                 x86_64_emit_imm8((imm));
5306         }
5307 }
5308
5309
5310 void x86_64_shiftl_imm_membase(s8 opc, s8 imm, s8 basereg, s8 disp) {
5311         if ((imm) == 1) {
5312                 x86_64_emit_rex(0,0,0,(basereg));
5313                 *(mcodeptr++) = 0xd1;
5314                 x86_64_emit_membase((basereg),(disp),(opc));
5315         } else {
5316                 x86_64_emit_rex(0,0,0,(basereg));
5317                 *(mcodeptr++) = 0xc1;
5318                 x86_64_emit_membase((basereg),(disp),(opc));
5319                 x86_64_emit_imm8((imm));
5320         }
5321 }
5322
5323
5324
5325 /*
5326  * jump operations
5327  */
5328 void x86_64_jmp_imm(s8 imm) {
5329         *(mcodeptr++) = 0xe9;
5330         x86_64_emit_imm32((imm));
5331 }
5332
5333
5334 void x86_64_jmp_reg(s8 reg) {
5335         x86_64_emit_rex(0,0,0,(reg));
5336         *(mcodeptr++) = 0xff;
5337         x86_64_emit_reg(4,(reg));
5338 }
5339
5340
5341 void x86_64_jcc(s8 opc, s8 imm) {
5342         *(mcodeptr++) = 0x0f;
5343         *(mcodeptr++) = (0x80 + (opc));
5344         x86_64_emit_imm32((imm));
5345 }
5346
5347
5348
5349 /*
5350  * conditional set and move operations
5351  */
5352
5353 /* we need the rex byte to get all low bytes */
5354 void x86_64_setcc_reg(s8 opc, s8 reg) {
5355         *(mcodeptr++) = (0x40 | (((reg) >> 3) & 0x01));
5356         *(mcodeptr++) = 0x0f;
5357         *(mcodeptr++) = (0x90 + (opc));
5358         x86_64_emit_reg(0,(reg));
5359 }
5360
5361
5362 /* we need the rex byte to get all low bytes */
5363 void x86_64_setcc_membase(s8 opc, s8 basereg, s8 disp) {
5364         *(mcodeptr++) = (0x40 | (((basereg) >> 3) & 0x01));
5365         *(mcodeptr++) = 0x0f;
5366         *(mcodeptr++) = (0x90 + (opc));
5367         x86_64_emit_membase((basereg),(disp),0);
5368 }
5369
5370
5371 void x86_64_cmovcc_reg_reg(s8 opc, s8 reg, s8 dreg) {
5372         x86_64_emit_rex(1,(dreg),0,(reg));
5373         *(mcodeptr++) = 0x0f;
5374         *(mcodeptr++) = (0x40 + (opc));
5375         x86_64_emit_reg((dreg),(reg));
5376 }
5377
5378
5379 void x86_64_cmovccl_reg_reg(s8 opc, s8 reg, s8 dreg) {
5380         x86_64_emit_rex(0,(dreg),0,(reg));
5381         *(mcodeptr++) = 0x0f;
5382         *(mcodeptr++) = (0x40 + (opc));
5383         x86_64_emit_reg((dreg),(reg));
5384 }
5385
5386
5387
5388 void x86_64_neg_reg(s8 reg) {
5389         x86_64_emit_rex(1,0,0,(reg));
5390         *(mcodeptr++) = 0xf7;
5391         x86_64_emit_reg(3,(reg));
5392 }
5393
5394
5395 void x86_64_negl_reg(s8 reg) {
5396         x86_64_emit_rex(0,0,0,(reg));
5397         *(mcodeptr++) = 0xf7;
5398         x86_64_emit_reg(3,(reg));
5399 }
5400
5401
5402 void x86_64_neg_membase(s8 basereg, s8 disp) {
5403         x86_64_emit_rex(1,0,0,(basereg));
5404         *(mcodeptr++) = 0xf7;
5405         x86_64_emit_membase((basereg),(disp),3);
5406 }
5407
5408
5409 void x86_64_negl_membase(s8 basereg, s8 disp) {
5410         x86_64_emit_rex(0,0,0,(basereg));
5411         *(mcodeptr++) = 0xf7;
5412         x86_64_emit_membase((basereg),(disp),3);
5413 }
5414
5415
5416
5417 void x86_64_push_imm(s8 imm) {
5418         *(mcodeptr++) = 0x68;
5419         x86_64_emit_imm32((imm));
5420 }
5421
5422
5423 void x86_64_pop_reg(s8 reg) {
5424         x86_64_emit_rex(0,0,0,(reg));
5425         *(mcodeptr++) = 0x58 + (0x07 & (reg));
5426 }
5427
5428
5429 void x86_64_xchg_reg_reg(s8 reg, s8 dreg) {
5430         x86_64_emit_rex(1,(reg),0,(dreg));
5431         *(mcodeptr++) = 0x87;
5432         x86_64_emit_reg((reg),(dreg));
5433 }
5434
5435
5436 void x86_64_nop() {
5437     *(mcodeptr++) = 0x90;
5438 }
5439
5440
5441
5442 /*
5443  * call instructions
5444  */
5445 void x86_64_call_reg(s8 reg) {
5446         x86_64_emit_rex(1,0,0,(reg));
5447         *(mcodeptr++) = 0xff;
5448         x86_64_emit_reg(2,(reg));
5449 }
5450
5451
5452 void x86_64_call_imm(s8 imm) {
5453         *(mcodeptr++) = 0xe8;
5454         x86_64_emit_imm32((imm));
5455 }
5456
5457
5458
5459 /*
5460  * floating point instructions (SSE2)
5461  */
5462 void x86_64_addsd_reg_reg(s8 reg, s8 dreg) {
5463         *(mcodeptr++) = 0xf2;
5464         x86_64_emit_rex(0,(dreg),0,(reg));
5465         *(mcodeptr++) = 0x0f;
5466         *(mcodeptr++) = 0x58;
5467         x86_64_emit_reg((dreg),(reg));
5468 }
5469
5470
5471 void x86_64_addss_reg_reg(s8 reg, s8 dreg) {
5472         *(mcodeptr++) = 0xf3;
5473         x86_64_emit_rex(0,(dreg),0,(reg));
5474         *(mcodeptr++) = 0x0f;
5475         *(mcodeptr++) = 0x58;
5476         x86_64_emit_reg((dreg),(reg));
5477 }
5478
5479
5480 void x86_64_cvtsi2ssq_reg_reg(s8 reg, s8 dreg) {
5481         *(mcodeptr++) = 0xf3;
5482         x86_64_emit_rex(1,(dreg),0,(reg));
5483         *(mcodeptr++) = 0x0f;
5484         *(mcodeptr++) = 0x2a;
5485         x86_64_emit_reg((dreg),(reg));
5486 }
5487
5488
5489 void x86_64_cvtsi2ss_reg_reg(s8 reg, s8 dreg) {
5490         *(mcodeptr++) = 0xf3;
5491         x86_64_emit_rex(0,(dreg),0,(reg));
5492         *(mcodeptr++) = 0x0f;
5493         *(mcodeptr++) = 0x2a;
5494         x86_64_emit_reg((dreg),(reg));
5495 }
5496
5497
5498 void x86_64_cvtsi2sdq_reg_reg(s8 reg, s8 dreg) {
5499         *(mcodeptr++) = 0xf2;
5500         x86_64_emit_rex(1,(dreg),0,(reg));
5501         *(mcodeptr++) = 0x0f;
5502         *(mcodeptr++) = 0x2a;
5503         x86_64_emit_reg((dreg),(reg));
5504 }
5505
5506
5507 void x86_64_cvtsi2sd_reg_reg(s8 reg, s8 dreg) {
5508         *(mcodeptr++) = 0xf2;
5509         x86_64_emit_rex(0,(dreg),0,(reg));
5510         *(mcodeptr++) = 0x0f;
5511         *(mcodeptr++) = 0x2a;
5512         x86_64_emit_reg((dreg),(reg));
5513 }
5514
5515
5516 void x86_64_cvtss2sd_reg_reg(s8 reg, s8 dreg) {
5517         *(mcodeptr++) = 0xf3;
5518         x86_64_emit_rex(0,(dreg),0,(reg));
5519         *(mcodeptr++) = 0x0f;
5520         *(mcodeptr++) = 0x5a;
5521         x86_64_emit_reg((dreg),(reg));
5522 }
5523
5524
5525 void x86_64_cvtsd2ss_reg_reg(s8 reg, s8 dreg) {
5526         *(mcodeptr++) = 0xf2;
5527         x86_64_emit_rex(0,(dreg),0,(reg));
5528         *(mcodeptr++) = 0x0f;
5529         *(mcodeptr++) = 0x5a;
5530         x86_64_emit_reg((dreg),(reg));
5531 }
5532
5533
5534 void x86_64_cvttss2siq_reg_reg(s8 reg, s8 dreg) {
5535         *(mcodeptr++) = 0xf3;
5536         x86_64_emit_rex(1,(dreg),0,(reg));
5537         *(mcodeptr++) = 0x0f;
5538         *(mcodeptr++) = 0x2c;
5539         x86_64_emit_reg((dreg),(reg));
5540 }
5541
5542
5543 void x86_64_cvttss2si_reg_reg(s8 reg, s8 dreg) {
5544         *(mcodeptr++) = 0xf3;
5545         x86_64_emit_rex(0,(dreg),0,(reg));
5546         *(mcodeptr++) = 0x0f;
5547         *(mcodeptr++) = 0x2c;
5548         x86_64_emit_reg((dreg),(reg));
5549 }
5550
5551
5552 void x86_64_cvttsd2siq_reg_reg(s8 reg, s8 dreg) {
5553         *(mcodeptr++) = 0xf2;
5554         x86_64_emit_rex(1,(dreg),0,(reg));
5555         *(mcodeptr++) = 0x0f;
5556         *(mcodeptr++) = 0x2c;
5557         x86_64_emit_reg((dreg),(reg));
5558 }
5559
5560
5561 void x86_64_cvttsd2si_reg_reg(s8 reg, s8 dreg) {
5562         *(mcodeptr++) = 0xf2;
5563         x86_64_emit_rex(0,(dreg),0,(reg));
5564         *(mcodeptr++) = 0x0f;
5565         *(mcodeptr++) = 0x2c;
5566         x86_64_emit_reg((dreg),(reg));
5567 }
5568
5569
5570 void x86_64_divss_reg_reg(s8 reg, s8 dreg) {
5571         *(mcodeptr++) = 0xf3;
5572         x86_64_emit_rex(0,(dreg),0,(reg));
5573         *(mcodeptr++) = 0x0f;
5574         *(mcodeptr++) = 0x5e;
5575         x86_64_emit_reg((dreg),(reg));
5576 }
5577
5578
5579 void x86_64_divsd_reg_reg(s8 reg, s8 dreg) {
5580         *(mcodeptr++) = 0xf2;
5581         x86_64_emit_rex(0,(dreg),0,(reg));
5582         *(mcodeptr++) = 0x0f;
5583         *(mcodeptr++) = 0x5e;
5584         x86_64_emit_reg((dreg),(reg));
5585 }
5586
5587
5588 void x86_64_movd_reg_freg(s8 reg, s8 freg) {
5589         *(mcodeptr++) = 0x66;
5590         x86_64_emit_rex(1,(freg),0,(reg));
5591         *(mcodeptr++) = 0x0f;
5592         *(mcodeptr++) = 0x6e;
5593         x86_64_emit_reg((freg),(reg));
5594 }
5595
5596
5597 void x86_64_movd_freg_reg(s8 freg, s8 reg) {
5598         *(mcodeptr++) = 0x66;
5599         x86_64_emit_rex(1,(freg),0,(reg));
5600         *(mcodeptr++) = 0x0f;
5601         *(mcodeptr++) = 0x7e;
5602         x86_64_emit_reg((freg),(reg));
5603 }
5604
5605
5606 void x86_64_movd_reg_membase(s8 reg, s8 basereg, s8 disp) {
5607         *(mcodeptr++) = 0x66;
5608         x86_64_emit_rex(0,(reg),0,(basereg));
5609         *(mcodeptr++) = 0x0f;
5610         *(mcodeptr++) = 0x7e;
5611         x86_64_emit_membase((basereg),(disp),(reg));
5612 }
5613
5614
5615 void x86_64_movd_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
5616         *(mcodeptr++) = 0x66;
5617         x86_64_emit_rex(0,(reg),(indexreg),(basereg));
5618         *(mcodeptr++) = 0x0f;
5619         *(mcodeptr++) = 0x7e;
5620         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
5621 }
5622
5623
5624 void x86_64_movd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5625         *(mcodeptr++) = 0x66;
5626         x86_64_emit_rex(1,(dreg),0,(basereg));
5627         *(mcodeptr++) = 0x0f;
5628         *(mcodeptr++) = 0x6e;
5629         x86_64_emit_membase((basereg),(disp),(dreg));
5630 }
5631
5632
5633 void x86_64_movdl_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5634         *(mcodeptr++) = 0x66;
5635         x86_64_emit_rex(0,(dreg),0,(basereg));
5636         *(mcodeptr++) = 0x0f;
5637         *(mcodeptr++) = 0x6e;
5638         x86_64_emit_membase((basereg),(disp),(dreg));
5639 }
5640
5641
5642 void x86_64_movd_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
5643         *(mcodeptr++) = 0x66;
5644         x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
5645         *(mcodeptr++) = 0x0f;
5646         *(mcodeptr++) = 0x6e;
5647         x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
5648 }
5649
5650
5651 void x86_64_movq_reg_reg(s8 reg, s8 dreg) {
5652         *(mcodeptr++) = 0xf3;
5653         x86_64_emit_rex(0,(dreg),0,(reg));
5654         *(mcodeptr++) = 0x0f;
5655         *(mcodeptr++) = 0x7e;
5656         x86_64_emit_reg((dreg),(reg));
5657 }
5658
5659
5660 void x86_64_movq_reg_membase(s8 reg, s8 basereg, s8 disp) {
5661         *(mcodeptr++) = 0x66;
5662         x86_64_emit_rex(0,(reg),0,(basereg));
5663         *(mcodeptr++) = 0x0f;
5664         *(mcodeptr++) = 0xd6;
5665         x86_64_emit_membase((basereg),(disp),(reg));
5666 }
5667
5668
5669 void x86_64_movq_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5670         *(mcodeptr++) = 0xf3;
5671         x86_64_emit_rex(0,(dreg),0,(basereg));
5672         *(mcodeptr++) = 0x0f;
5673         *(mcodeptr++) = 0x7e;
5674         x86_64_emit_membase((basereg),(disp),(dreg));
5675 }
5676
5677
5678 void x86_64_movss_reg_reg(s8 reg, s8 dreg) {
5679         *(mcodeptr++) = 0xf3;
5680         x86_64_emit_rex(0,(reg),0,(dreg));
5681         *(mcodeptr++) = 0x0f;
5682         *(mcodeptr++) = 0x10;
5683         x86_64_emit_reg((reg),(dreg));
5684 }
5685
5686
5687 void x86_64_movsd_reg_reg(s8 reg, s8 dreg) {
5688         *(mcodeptr++) = 0xf2;
5689         x86_64_emit_rex(0,(reg),0,(dreg));
5690         *(mcodeptr++) = 0x0f;
5691         *(mcodeptr++) = 0x10;
5692         x86_64_emit_reg((reg),(dreg));
5693 }
5694
5695
5696 void x86_64_movss_reg_membase(s8 reg, s8 basereg, s8 disp) {
5697         *(mcodeptr++) = 0xf3;
5698         x86_64_emit_rex(0,(reg),0,(basereg));
5699         *(mcodeptr++) = 0x0f;
5700         *(mcodeptr++) = 0x11;
5701         x86_64_emit_membase((basereg),(disp),(reg));
5702 }
5703
5704
5705 void x86_64_movsd_reg_membase(s8 reg, s8 basereg, s8 disp) {
5706         *(mcodeptr++) = 0xf2;
5707         x86_64_emit_rex(0,(reg),0,(basereg));
5708         *(mcodeptr++) = 0x0f;
5709         *(mcodeptr++) = 0x11;
5710         x86_64_emit_membase((basereg),(disp),(reg));
5711 }
5712
5713
5714 void x86_64_movss_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5715         *(mcodeptr++) = 0xf3;
5716         x86_64_emit_rex(0,(dreg),0,(basereg));
5717         *(mcodeptr++) = 0x0f;
5718         *(mcodeptr++) = 0x10;
5719         x86_64_emit_membase((basereg),(disp),(dreg));
5720 }
5721
5722
5723 void x86_64_movlps_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5724         x86_64_emit_rex(0,(dreg),0,(basereg));
5725         *(mcodeptr++) = 0x0f;
5726         *(mcodeptr++) = 0x12;
5727         x86_64_emit_membase((basereg),(disp),(dreg));
5728 }
5729
5730
5731 void x86_64_movsd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5732         *(mcodeptr++) = 0xf2;
5733         x86_64_emit_rex(0,(dreg),0,(basereg));
5734         *(mcodeptr++) = 0x0f;
5735         *(mcodeptr++) = 0x10;
5736         x86_64_emit_membase((basereg),(disp),(dreg));
5737 }
5738
5739
5740 void x86_64_movlpd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5741         *(mcodeptr++) = 0x66;
5742         x86_64_emit_rex(0,(dreg),0,(basereg));
5743         *(mcodeptr++) = 0x0f;
5744         *(mcodeptr++) = 0x12;
5745         x86_64_emit_membase((basereg),(disp),(dreg));
5746 }
5747
5748
5749 void x86_64_movss_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
5750         *(mcodeptr++) = 0xf3;
5751         x86_64_emit_rex(0,(reg),(indexreg),(basereg));
5752         *(mcodeptr++) = 0x0f;
5753         *(mcodeptr++) = 0x11;
5754         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
5755 }
5756
5757
5758 void x86_64_movsd_reg_memindex(s8 reg, s8 disp, s8 basereg, s8 indexreg, s8 scale) {
5759         *(mcodeptr++) = 0xf2;
5760         x86_64_emit_rex(0,(reg),(indexreg),(basereg));
5761         *(mcodeptr++) = 0x0f;
5762         *(mcodeptr++) = 0x11;
5763         x86_64_emit_memindex((reg),(disp),(basereg),(indexreg),(scale));
5764 }
5765
5766
5767 void x86_64_movss_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
5768         *(mcodeptr++) = 0xf3;
5769         x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
5770         *(mcodeptr++) = 0x0f;
5771         *(mcodeptr++) = 0x10;
5772         x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
5773 }
5774
5775
5776 void x86_64_movsd_memindex_reg(s8 disp, s8 basereg, s8 indexreg, s8 scale, s8 dreg) {
5777         *(mcodeptr++) = 0xf2;
5778         x86_64_emit_rex(0,(dreg),(indexreg),(basereg));
5779         *(mcodeptr++) = 0x0f;
5780         *(mcodeptr++) = 0x10;
5781         x86_64_emit_memindex((dreg),(disp),(basereg),(indexreg),(scale));
5782 }
5783
5784
5785 void x86_64_mulss_reg_reg(s8 reg, s8 dreg) {
5786         *(mcodeptr++) = 0xf3;
5787         x86_64_emit_rex(0,(dreg),0,(reg));
5788         *(mcodeptr++) = 0x0f;
5789         *(mcodeptr++) = 0x59;
5790         x86_64_emit_reg((dreg),(reg));
5791 }
5792
5793
5794 void x86_64_mulsd_reg_reg(s8 reg, s8 dreg) {
5795         *(mcodeptr++) = 0xf2;
5796         x86_64_emit_rex(0,(dreg),0,(reg));
5797         *(mcodeptr++) = 0x0f;
5798         *(mcodeptr++) = 0x59;
5799         x86_64_emit_reg((dreg),(reg));
5800 }
5801
5802
5803 void x86_64_subss_reg_reg(s8 reg, s8 dreg) {
5804         *(mcodeptr++) = 0xf3;
5805         x86_64_emit_rex(0,(dreg),0,(reg));
5806         *(mcodeptr++) = 0x0f;
5807         *(mcodeptr++) = 0x5c;
5808         x86_64_emit_reg((dreg),(reg));
5809 }
5810
5811
5812 void x86_64_subsd_reg_reg(s8 reg, s8 dreg) {
5813         *(mcodeptr++) = 0xf2;
5814         x86_64_emit_rex(0,(dreg),0,(reg));
5815         *(mcodeptr++) = 0x0f;
5816         *(mcodeptr++) = 0x5c;
5817         x86_64_emit_reg((dreg),(reg));
5818 }
5819
5820
5821 void x86_64_ucomiss_reg_reg(s8 reg, s8 dreg) {
5822         x86_64_emit_rex(0,(dreg),0,(reg));
5823         *(mcodeptr++) = 0x0f;
5824         *(mcodeptr++) = 0x2e;
5825         x86_64_emit_reg((dreg),(reg));
5826 }
5827
5828
5829 void x86_64_ucomisd_reg_reg(s8 reg, s8 dreg) {
5830         *(mcodeptr++) = 0x66;
5831         x86_64_emit_rex(0,(dreg),0,(reg));
5832         *(mcodeptr++) = 0x0f;
5833         *(mcodeptr++) = 0x2e;
5834         x86_64_emit_reg((dreg),(reg));
5835 }
5836
5837
5838 void x86_64_xorps_reg_reg(s8 reg, s8 dreg) {
5839         x86_64_emit_rex(0,(dreg),0,(reg));
5840         *(mcodeptr++) = 0x0f;
5841         *(mcodeptr++) = 0x57;
5842         x86_64_emit_reg((dreg),(reg));
5843 }
5844
5845
5846 void x86_64_xorps_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5847         x86_64_emit_rex(0,(dreg),0,(basereg));
5848         *(mcodeptr++) = 0x0f;
5849         *(mcodeptr++) = 0x57;
5850         x86_64_emit_membase((basereg),(disp),(dreg));
5851 }
5852
5853
5854 void x86_64_xorpd_reg_reg(s8 reg, s8 dreg) {
5855         *(mcodeptr++) = 0x66;
5856         x86_64_emit_rex(0,(dreg),0,(reg));
5857         *(mcodeptr++) = 0x0f;
5858         *(mcodeptr++) = 0x57;
5859         x86_64_emit_reg((dreg),(reg));
5860 }
5861
5862
5863 void x86_64_xorpd_membase_reg(s8 basereg, s8 disp, s8 dreg) {
5864         *(mcodeptr++) = 0x66;
5865         x86_64_emit_rex(0,(dreg),0,(basereg));
5866         *(mcodeptr++) = 0x0f;
5867         *(mcodeptr++) = 0x57;
5868         x86_64_emit_membase((basereg),(disp),(dreg));
5869 }
5870
5871 #endif
5872
5873 /*
5874  * These are local overrides for various environment variables in Emacs.
5875  * Please do not remove this and leave it at the end of the file, where
5876  * Emacs will automagically detect them.
5877  * ---------------------------------------------------------------------
5878  * Local variables:
5879  * mode: c
5880  * indent-tabs-mode: t
5881  * c-basic-offset: 4
5882  * tab-width: 4
5883  * End:
5884  */